gt
stringclasses 1
value | context
stringlengths 2.49k
119k
|
---|---|
from plotly.basedatatypes import BaseTraceHierarchyType as _BaseTraceHierarchyType
import copy as _copy
class Insidetextfont(_BaseTraceHierarchyType):
# class properties
# --------------------
_parent_path_str = "bar"
_path_str = "bar.insidetextfont"
_valid_props = {"color", "colorsrc", "family", "familysrc", "size", "sizesrc"}
# color
# -----
@property
def color(self):
"""
The 'color' property is a color and may be specified as:
- A hex string (e.g. '#ff0000')
- An rgb/rgba string (e.g. 'rgb(255,0,0)')
- An hsl/hsla string (e.g. 'hsl(0,100%,50%)')
- An hsv/hsva string (e.g. 'hsv(0,100%,100%)')
- A named CSS color:
aliceblue, antiquewhite, aqua, aquamarine, azure,
beige, bisque, black, blanchedalmond, blue,
blueviolet, brown, burlywood, cadetblue,
chartreuse, chocolate, coral, cornflowerblue,
cornsilk, crimson, cyan, darkblue, darkcyan,
darkgoldenrod, darkgray, darkgrey, darkgreen,
darkkhaki, darkmagenta, darkolivegreen, darkorange,
darkorchid, darkred, darksalmon, darkseagreen,
darkslateblue, darkslategray, darkslategrey,
darkturquoise, darkviolet, deeppink, deepskyblue,
dimgray, dimgrey, dodgerblue, firebrick,
floralwhite, forestgreen, fuchsia, gainsboro,
ghostwhite, gold, goldenrod, gray, grey, green,
greenyellow, honeydew, hotpink, indianred, indigo,
ivory, khaki, lavender, lavenderblush, lawngreen,
lemonchiffon, lightblue, lightcoral, lightcyan,
lightgoldenrodyellow, lightgray, lightgrey,
lightgreen, lightpink, lightsalmon, lightseagreen,
lightskyblue, lightslategray, lightslategrey,
lightsteelblue, lightyellow, lime, limegreen,
linen, magenta, maroon, mediumaquamarine,
mediumblue, mediumorchid, mediumpurple,
mediumseagreen, mediumslateblue, mediumspringgreen,
mediumturquoise, mediumvioletred, midnightblue,
mintcream, mistyrose, moccasin, navajowhite, navy,
oldlace, olive, olivedrab, orange, orangered,
orchid, palegoldenrod, palegreen, paleturquoise,
palevioletred, papayawhip, peachpuff, peru, pink,
plum, powderblue, purple, red, rosybrown,
royalblue, rebeccapurple, saddlebrown, salmon,
sandybrown, seagreen, seashell, sienna, silver,
skyblue, slateblue, slategray, slategrey, snow,
springgreen, steelblue, tan, teal, thistle, tomato,
turquoise, violet, wheat, white, whitesmoke,
yellow, yellowgreen
- A list or array of any of the above
Returns
-------
str|numpy.ndarray
"""
return self["color"]
@color.setter
def color(self, val):
self["color"] = val
# colorsrc
# --------
@property
def colorsrc(self):
"""
Sets the source reference on Chart Studio Cloud for `color`.
The 'colorsrc' property must be specified as a string or
as a plotly.grid_objs.Column object
Returns
-------
str
"""
return self["colorsrc"]
@colorsrc.setter
def colorsrc(self, val):
self["colorsrc"] = val
# family
# ------
@property
def family(self):
"""
HTML font family - the typeface that will be applied by the web
browser. The web browser will only be able to apply a font if
it is available on the system which it operates. Provide
multiple font families, separated by commas, to indicate the
preference in which to apply fonts if they aren't available on
the system. The Chart Studio Cloud (at https://chart-
studio.plotly.com or on-premise) generates images on a server,
where only a select number of fonts are installed and
supported. These include "Arial", "Balto", "Courier New",
"Droid Sans",, "Droid Serif", "Droid Sans Mono", "Gravitas
One", "Old Standard TT", "Open Sans", "Overpass", "PT Sans
Narrow", "Raleway", "Times New Roman".
The 'family' property is a string and must be specified as:
- A non-empty string
- A tuple, list, or one-dimensional numpy array of the above
Returns
-------
str|numpy.ndarray
"""
return self["family"]
@family.setter
def family(self, val):
self["family"] = val
# familysrc
# ---------
@property
def familysrc(self):
"""
Sets the source reference on Chart Studio Cloud for `family`.
The 'familysrc' property must be specified as a string or
as a plotly.grid_objs.Column object
Returns
-------
str
"""
return self["familysrc"]
@familysrc.setter
def familysrc(self, val):
self["familysrc"] = val
# size
# ----
@property
def size(self):
"""
The 'size' property is a number and may be specified as:
- An int or float in the interval [1, inf]
- A tuple, list, or one-dimensional numpy array of the above
Returns
-------
int|float|numpy.ndarray
"""
return self["size"]
@size.setter
def size(self, val):
self["size"] = val
# sizesrc
# -------
@property
def sizesrc(self):
"""
Sets the source reference on Chart Studio Cloud for `size`.
The 'sizesrc' property must be specified as a string or
as a plotly.grid_objs.Column object
Returns
-------
str
"""
return self["sizesrc"]
@sizesrc.setter
def sizesrc(self, val):
self["sizesrc"] = val
# Self properties description
# ---------------------------
@property
def _prop_descriptions(self):
return """\
color
colorsrc
Sets the source reference on Chart Studio Cloud for
`color`.
family
HTML font family - the typeface that will be applied by
the web browser. The web browser will only be able to
apply a font if it is available on the system which it
operates. Provide multiple font families, separated by
commas, to indicate the preference in which to apply
fonts if they aren't available on the system. The Chart
Studio Cloud (at https://chart-studio.plotly.com or on-
premise) generates images on a server, where only a
select number of fonts are installed and supported.
These include "Arial", "Balto", "Courier New", "Droid
Sans",, "Droid Serif", "Droid Sans Mono", "Gravitas
One", "Old Standard TT", "Open Sans", "Overpass", "PT
Sans Narrow", "Raleway", "Times New Roman".
familysrc
Sets the source reference on Chart Studio Cloud for
`family`.
size
sizesrc
Sets the source reference on Chart Studio Cloud for
`size`.
"""
def __init__(
self,
arg=None,
color=None,
colorsrc=None,
family=None,
familysrc=None,
size=None,
sizesrc=None,
**kwargs
):
"""
Construct a new Insidetextfont object
Sets the font used for `text` lying inside the bar.
Parameters
----------
arg
dict of properties compatible with this constructor or
an instance of
:class:`plotly.graph_objs.bar.Insidetextfont`
color
colorsrc
Sets the source reference on Chart Studio Cloud for
`color`.
family
HTML font family - the typeface that will be applied by
the web browser. The web browser will only be able to
apply a font if it is available on the system which it
operates. Provide multiple font families, separated by
commas, to indicate the preference in which to apply
fonts if they aren't available on the system. The Chart
Studio Cloud (at https://chart-studio.plotly.com or on-
premise) generates images on a server, where only a
select number of fonts are installed and supported.
These include "Arial", "Balto", "Courier New", "Droid
Sans",, "Droid Serif", "Droid Sans Mono", "Gravitas
One", "Old Standard TT", "Open Sans", "Overpass", "PT
Sans Narrow", "Raleway", "Times New Roman".
familysrc
Sets the source reference on Chart Studio Cloud for
`family`.
size
sizesrc
Sets the source reference on Chart Studio Cloud for
`size`.
Returns
-------
Insidetextfont
"""
super(Insidetextfont, self).__init__("insidetextfont")
if "_parent" in kwargs:
self._parent = kwargs["_parent"]
return
# Validate arg
# ------------
if arg is None:
arg = {}
elif isinstance(arg, self.__class__):
arg = arg.to_plotly_json()
elif isinstance(arg, dict):
arg = _copy.copy(arg)
else:
raise ValueError(
"""\
The first argument to the plotly.graph_objs.bar.Insidetextfont
constructor must be a dict or
an instance of :class:`plotly.graph_objs.bar.Insidetextfont`"""
)
# Handle skip_invalid
# -------------------
self._skip_invalid = kwargs.pop("skip_invalid", False)
self._validate = kwargs.pop("_validate", True)
# Populate data dict with properties
# ----------------------------------
_v = arg.pop("color", None)
_v = color if color is not None else _v
if _v is not None:
self["color"] = _v
_v = arg.pop("colorsrc", None)
_v = colorsrc if colorsrc is not None else _v
if _v is not None:
self["colorsrc"] = _v
_v = arg.pop("family", None)
_v = family if family is not None else _v
if _v is not None:
self["family"] = _v
_v = arg.pop("familysrc", None)
_v = familysrc if familysrc is not None else _v
if _v is not None:
self["familysrc"] = _v
_v = arg.pop("size", None)
_v = size if size is not None else _v
if _v is not None:
self["size"] = _v
_v = arg.pop("sizesrc", None)
_v = sizesrc if sizesrc is not None else _v
if _v is not None:
self["sizesrc"] = _v
# Process unknown kwargs
# ----------------------
self._process_kwargs(**dict(arg, **kwargs))
# Reset skip_invalid
# ------------------
self._skip_invalid = False
|
|
from __future__ import unicode_literals
import json
import boto
import sure # noqa
from moto import (
mock_autoscaling,
mock_cloudformation,
mock_ec2,
mock_elb,
mock_iam,
)
from .fixtures import (
single_instance_with_ebs_volume,
vpc_single_instance_in_subnet,
ec2_classic_eip,
vpc_eip,
fn_join
)
@mock_cloudformation()
def test_stack_sqs_integration():
sqs_template = {
"AWSTemplateFormatVersion": "2010-09-09",
"Resources": {
"QueueGroup": {
"Type": "AWS::SQS::Queue",
"Properties": {
"QueueName": "my-queue",
"VisibilityTimeout": 60,
}
},
},
}
sqs_template_json = json.dumps(sqs_template)
conn = boto.connect_cloudformation()
conn.create_stack(
"test_stack",
template_body=sqs_template_json,
)
stack = conn.describe_stacks()[0]
queue = stack.describe_resources()[0]
queue.resource_type.should.equal('AWS::SQS::Queue')
queue.logical_resource_id.should.equal("QueueGroup")
queue.physical_resource_id.should.equal("my-queue")
@mock_ec2()
@mock_cloudformation()
def test_stack_ec2_integration():
ec2_template = {
"AWSTemplateFormatVersion": "2010-09-09",
"Resources": {
"WebServerGroup": {
"Type": "AWS::EC2::Instance",
"Properties": {
"ImageId": "ami-1234abcd",
"UserData": "some user data",
}
},
},
}
ec2_template_json = json.dumps(ec2_template)
conn = boto.connect_cloudformation()
conn.create_stack(
"ec2_stack",
template_body=ec2_template_json,
)
ec2_conn = boto.connect_ec2()
reservation = ec2_conn.get_all_instances()[0]
ec2_instance = reservation.instances[0]
stack = conn.describe_stacks()[0]
instance = stack.describe_resources()[0]
instance.resource_type.should.equal('AWS::EC2::Instance')
instance.logical_resource_id.should.contain("WebServerGroup")
instance.physical_resource_id.should.equal(ec2_instance.id)
@mock_ec2()
@mock_elb()
@mock_cloudformation()
def test_stack_elb_integration_with_attached_ec2_instances():
elb_template = {
"AWSTemplateFormatVersion": "2010-09-09",
"Resources": {
"MyELB": {
"Type": "AWS::ElasticLoadBalancing::LoadBalancer",
"Instances": [{"Ref": "Ec2Instance1"}],
"Properties": {
"LoadBalancerName": "test-elb",
"AvailabilityZones": ['us-east1'],
}
},
"Ec2Instance1": {
"Type": "AWS::EC2::Instance",
"Properties": {
"ImageId": "ami-1234abcd",
"UserData": "some user data",
}
},
},
}
elb_template_json = json.dumps(elb_template)
conn = boto.connect_cloudformation()
conn.create_stack(
"elb_stack",
template_body=elb_template_json,
)
elb_conn = boto.connect_elb()
load_balancer = elb_conn.get_all_load_balancers()[0]
ec2_conn = boto.connect_ec2()
reservation = ec2_conn.get_all_instances()[0]
ec2_instance = reservation.instances[0]
instance_id = ec2_instance.id
load_balancer.instances[0].id.should.equal(ec2_instance.id)
list(load_balancer.availability_zones).should.equal(['us-east1'])
load_balancer_name = load_balancer.name
stack = conn.describe_stacks()[0]
stack_resources = stack.describe_resources()
stack_resources.should.have.length_of(2)
for resource in stack_resources:
if resource.resource_type == 'AWS::ElasticLoadBalancing::LoadBalancer':
load_balancer = resource
else:
ec2_instance = resource
load_balancer.logical_resource_id.should.equal("MyELB")
load_balancer.physical_resource_id.should.equal(load_balancer_name)
ec2_instance.physical_resource_id.should.equal(instance_id)
@mock_ec2()
@mock_cloudformation()
def test_stack_security_groups():
security_group_template = {
"AWSTemplateFormatVersion": "2010-09-09",
"Resources": {
"my-security-group": {
"Type": "AWS::EC2::SecurityGroup",
"Properties": {
"GroupDescription": "My other group",
},
},
"Ec2Instance2": {
"Type": "AWS::EC2::Instance",
"Properties": {
"SecurityGroups": [{"Ref": "InstanceSecurityGroup"}],
"ImageId": "ami-1234abcd",
}
},
"InstanceSecurityGroup": {
"Type": "AWS::EC2::SecurityGroup",
"Properties": {
"GroupDescription": "My security group",
"SecurityGroupIngress": [{
"IpProtocol": "tcp",
"FromPort": "22",
"ToPort": "22",
"CidrIp": "123.123.123.123/32",
}, {
"IpProtocol": "tcp",
"FromPort": "80",
"ToPort": "8000",
"SourceSecurityGroupId": {"Ref": "my-security-group"},
}]
}
}
},
}
security_group_template_json = json.dumps(security_group_template)
conn = boto.connect_cloudformation()
conn.create_stack(
"security_group_stack",
template_body=security_group_template_json,
)
ec2_conn = boto.connect_ec2()
security_groups = ec2_conn.get_all_security_groups()
for group in security_groups:
if "InstanceSecurityGroup" in group.name:
instance_group = group
else:
other_group = group
reservation = ec2_conn.get_all_instances()[0]
ec2_instance = reservation.instances[0]
ec2_instance.groups[0].id.should.equal(instance_group.id)
instance_group.description.should.equal("My security group")
rule1, rule2 = instance_group.rules
int(rule1.to_port).should.equal(22)
int(rule1.from_port).should.equal(22)
rule1.grants[0].cidr_ip.should.equal("123.123.123.123/32")
rule1.ip_protocol.should.equal('tcp')
int(rule2.to_port).should.equal(8000)
int(rule2.from_port).should.equal(80)
rule2.ip_protocol.should.equal('tcp')
rule2.grants[0].group_id.should.equal(other_group.id)
@mock_autoscaling()
@mock_elb()
@mock_cloudformation()
def test_autoscaling_group_with_elb():
web_setup_template = {
"AWSTemplateFormatVersion": "2010-09-09",
"Resources": {
"my-as-group": {
"Type": "AWS::AutoScaling::AutoScalingGroup",
"Properties": {
"AvailabilityZones": ['us-east1'],
"LaunchConfigurationName": {"Ref": "my-launch-config"},
"MinSize": "2",
"MaxSize": "2",
"LoadBalancerNames": [{"Ref": "my-elb"}]
},
},
"my-launch-config": {
"Type": "AWS::AutoScaling::LaunchConfiguration",
"Properties": {
"ImageId": "ami-1234abcd",
"UserData": "some user data",
}
},
"my-elb": {
"Type": "AWS::ElasticLoadBalancing::LoadBalancer",
"Properties": {
"AvailabilityZones": ['us-east1'],
"Listeners": [{
"LoadBalancerPort": "80",
"InstancePort": "80",
"Protocol": "HTTP"
}],
"LoadBalancerName": "my-elb",
"HealthCheck": {
"Target": "80",
"HealthyThreshold": "3",
"UnhealthyThreshold": "5",
"Interval": "30",
"Timeout": "5",
},
},
},
}
}
web_setup_template_json = json.dumps(web_setup_template)
conn = boto.connect_cloudformation()
conn.create_stack(
"web_stack",
template_body=web_setup_template_json,
)
autoscale_conn = boto.connect_autoscale()
autoscale_group = autoscale_conn.get_all_groups()[0]
autoscale_group.launch_config_name.should.contain("my-launch-config")
autoscale_group.load_balancers[0].should.equal('my-elb')
# Confirm the Launch config was actually created
autoscale_conn.get_all_launch_configurations().should.have.length_of(1)
# Confirm the ELB was actually created
elb_conn = boto.connect_elb()
elb_conn.get_all_load_balancers().should.have.length_of(1)
stack = conn.describe_stacks()[0]
resources = stack.describe_resources()
as_group_resource = [resource for resource in resources if resource.resource_type == 'AWS::AutoScaling::AutoScalingGroup'][0]
as_group_resource.physical_resource_id.should.contain("my-as-group")
launch_config_resource = [resource for resource in resources if resource.resource_type == 'AWS::AutoScaling::LaunchConfiguration'][0]
launch_config_resource.physical_resource_id.should.contain("my-launch-config")
elb_resource = [resource for resource in resources if resource.resource_type == 'AWS::ElasticLoadBalancing::LoadBalancer'][0]
elb_resource.physical_resource_id.should.contain("my-elb")
@mock_ec2()
@mock_cloudformation()
def test_vpc_single_instance_in_subnet():
template_json = json.dumps(vpc_single_instance_in_subnet.template)
conn = boto.connect_cloudformation()
conn.create_stack(
"test_stack",
template_body=template_json,
)
vpc_conn = boto.connect_vpc()
vpc = vpc_conn.get_all_vpcs()[0]
vpc.cidr_block.should.equal("10.0.0.0/16")
# Add this once we implement the endpoint
# vpc_conn.get_all_internet_gateways().should.have.length_of(1)
subnet = vpc_conn.get_all_subnets()[0]
subnet.vpc_id.should.equal(vpc.id)
ec2_conn = boto.connect_ec2()
reservation = ec2_conn.get_all_instances()[0]
instance = reservation.instances[0]
# Check that the EIP is attached the the EC2 instance
eip = ec2_conn.get_all_addresses()[0]
eip.domain.should.equal('vpc')
eip.instance_id.should.equal(instance.id)
security_group = ec2_conn.get_all_security_groups()[0]
security_group.vpc_id.should.equal(vpc.id)
stack = conn.describe_stacks()[0]
resources = stack.describe_resources()
vpc_resource = [resource for resource in resources if resource.resource_type == 'AWS::EC2::VPC'][0]
vpc_resource.physical_resource_id.should.equal(vpc.id)
subnet_resource = [resource for resource in resources if resource.resource_type == 'AWS::EC2::Subnet'][0]
subnet_resource.physical_resource_id.should.equal(subnet.id)
eip_resource = [resource for resource in resources if resource.resource_type == 'AWS::EC2::EIP'][0]
eip_resource.physical_resource_id.should.equal(eip.allocation_id)
@mock_autoscaling()
@mock_iam()
@mock_cloudformation()
def test_iam_roles():
iam_template = {
"AWSTemplateFormatVersion": "2010-09-09",
"Resources": {
"my-launch-config": {
"Properties": {
"IamInstanceProfile": {"Ref": "my-instance-profile"},
"ImageId": "ami-1234abcd",
},
"Type": "AWS::AutoScaling::LaunchConfiguration"
},
"my-instance-profile": {
"Properties": {
"Path": "my-path",
"Roles": [{"Ref": "my-role"}],
},
"Type": "AWS::IAM::InstanceProfile"
},
"my-role": {
"Properties": {
"AssumeRolePolicyDocument": {
"Statement": [
{
"Action": [
"sts:AssumeRole"
],
"Effect": "Allow",
"Principal": {
"Service": [
"ec2.amazonaws.com"
]
}
}
]
},
"Path": "my-path",
"Policies": [
{
"PolicyDocument": {
"Statement": [
{
"Action": [
"ec2:CreateTags",
"ec2:DescribeInstances",
"ec2:DescribeTags"
],
"Effect": "Allow",
"Resource": [
"*"
]
}
],
"Version": "2012-10-17"
},
"PolicyName": "EC2_Tags"
},
{
"PolicyDocument": {
"Statement": [
{
"Action": [
"sqs:*"
],
"Effect": "Allow",
"Resource": [
"*"
]
}
],
"Version": "2012-10-17"
},
"PolicyName": "SQS"
},
]
},
"Type": "AWS::IAM::Role"
}
}
}
iam_template_json = json.dumps(iam_template)
conn = boto.connect_cloudformation()
conn.create_stack(
"test_stack",
template_body=iam_template_json,
)
iam_conn = boto.connect_iam()
role_result = iam_conn.list_roles()['list_roles_response']['list_roles_result']['roles'][0]
role = iam_conn.get_role(role_result.role_name)
role.role_name.should.contain("my-role")
role.path.should.equal("my-path")
instance_profile_response = iam_conn.list_instance_profiles()['list_instance_profiles_response']
cfn_instance_profile = instance_profile_response['list_instance_profiles_result']['instance_profiles'][0]
instance_profile = iam_conn.get_instance_profile(cfn_instance_profile.instance_profile_name)
instance_profile.instance_profile_name.should.contain("my-instance-profile")
instance_profile.path.should.equal("my-path")
instance_profile.role_id.should.equal(role.role_id)
autoscale_conn = boto.connect_autoscale()
launch_config = autoscale_conn.get_all_launch_configurations()[0]
launch_config.instance_profile_name.should.contain("my-instance-profile")
stack = conn.describe_stacks()[0]
resources = stack.describe_resources()
instance_profile_resource = [resource for resource in resources if resource.resource_type == 'AWS::IAM::InstanceProfile'][0]
instance_profile_resource.physical_resource_id.should.equal(instance_profile.instance_profile_name)
role_resource = [resource for resource in resources if resource.resource_type == 'AWS::IAM::Role'][0]
role_resource.physical_resource_id.should.equal(role.role_id)
@mock_ec2()
@mock_cloudformation()
def test_single_instance_with_ebs_volume():
template_json = json.dumps(single_instance_with_ebs_volume.template)
conn = boto.connect_cloudformation()
conn.create_stack(
"test_stack",
template_body=template_json,
)
ec2_conn = boto.connect_ec2()
reservation = ec2_conn.get_all_instances()[0]
ec2_instance = reservation.instances[0]
volume = ec2_conn.get_all_volumes()[0]
volume.volume_state().should.equal('in-use')
volume.attach_data.instance_id.should.equal(ec2_instance.id)
stack = conn.describe_stacks()[0]
resources = stack.describe_resources()
ebs_volume = [resource for resource in resources if resource.resource_type == 'AWS::EC2::Volume'][0]
ebs_volume.physical_resource_id.should.equal(volume.id)
@mock_ec2()
@mock_cloudformation()
def test_classic_eip():
template_json = json.dumps(ec2_classic_eip.template)
conn = boto.connect_cloudformation()
conn.create_stack(
"test_stack",
template_body=template_json,
)
ec2_conn = boto.connect_ec2()
eip = ec2_conn.get_all_addresses()[0]
stack = conn.describe_stacks()[0]
resources = stack.describe_resources()
cfn_eip = [resource for resource in resources if resource.resource_type == 'AWS::EC2::EIP'][0]
cfn_eip.physical_resource_id.should.equal(eip.public_ip)
@mock_ec2()
@mock_cloudformation()
def test_vpc_eip():
template_json = json.dumps(vpc_eip.template)
conn = boto.connect_cloudformation()
conn.create_stack(
"test_stack",
template_body=template_json,
)
ec2_conn = boto.connect_ec2()
eip = ec2_conn.get_all_addresses()[0]
stack = conn.describe_stacks()[0]
resources = stack.describe_resources()
cfn_eip = [resource for resource in resources if resource.resource_type == 'AWS::EC2::EIP'][0]
cfn_eip.physical_resource_id.should.equal(eip.allocation_id)
@mock_ec2()
@mock_cloudformation()
def test_fn_join():
template_json = json.dumps(fn_join.template)
conn = boto.connect_cloudformation()
conn.create_stack(
"test_stack",
template_body=template_json,
)
ec2_conn = boto.connect_ec2()
eip = ec2_conn.get_all_addresses()[0]
stack = conn.describe_stacks()[0]
fn_join_output = stack.outputs[0]
fn_join_output.value.should.equal('test eip:{0}'.format(eip.public_ip))
|
|
"""A generic rest serving layer for NDB models."""
import logging
import sys
from google.appengine.api import namespace_manager
from google.appengine.ext import db
import flask
import flask.views
from appengine import user
def command(func):
"""Command decorator - automatically dispatches methods."""
setattr(func, 'is_command', True)
return func
class ClassView(flask.views.MethodView):
"""Implements create, retrieve, update and removed endpoints for models."""
def __init__(self, classname, cls, create_callback):
super(ClassView, self).__init__()
self._classname = classname
self._cls = cls
self._create_callback = create_callback
def get(self, object_id):
"""List objects or just return a single object."""
default_user_authentication()
if object_id is None:
# Return json list of objects.
object_list = self._cls.query().iter()
if object_list is None:
object_list = []
object_list = [obj.to_dict() for obj in object_list]
return flask.jsonify(objects=object_list)
else:
# Return json repr of given object
obj = self._cls.get_by_id(object_id)
if not obj:
flask.abort(404)
return flask.jsonify(**obj.to_dict())
def post(self, object_id):
"""Using json body to create or update a object."""
default_user_authentication()
body = flask.request.get_json()
if body is None:
flask.abort(400, 'JSON body and mime type required.')
logging.info("Creating (or updating) object - %s", body)
obj = self._cls.get_by_id(object_id)
if not obj and self._create_callback is None:
flask.abort(403)
elif not obj:
obj = self._create_callback(object_id, body)
# Update the object; abort with 400 on unknown field
try:
obj.populate(**body)
except AttributeError:
logging.error('Exception populating object', exc_info=sys.exc_info())
flask.abort(400)
obj.sync()
# Put the object - BadValueError if there are uninitalised required fields
try:
obj.put()
except db.BadValueError:
logging.error('Exception saving object', exc_info=sys.exc_info())
flask.abort(400)
values = obj.to_dict()
return flask.jsonify(**values)
def delete(self, object_id):
"""Delete an object."""
default_user_authentication()
obj = self._cls.get_by_id(object_id)
if not obj:
flask.abort(404)
obj.key.delete()
user.send_event(cls=self._classname, id=object_id, event='delete')
return ('', 204)
class CommandView(flask.views.MethodView):
"""Implements /command endpoints for models."""
def __init__(self, classname, cls):
super(CommandView, self).__init__()
self._classname = classname
self._cls = cls
def post(self, object_id):
"""Run a command on a object."""
default_user_authentication()
body = flask.request.get_json()
if body is None:
flask.abort(400, 'JSON body and mime type required.')
logging.info(body)
obj = self._cls.get_by_id(object_id)
if not obj:
flask.abort(404)
func_name = body.pop('command', None)
func = getattr(obj, func_name, None)
if func is None or not getattr(func, 'is_command', False):
logging.error('Command %s does not exist or is not a command',
func_name)
flask.abort(400)
result = func(**body)
obj.put()
return flask.jsonify(result=result)
class HistoryView(flask.views.MethodView):
"""Implements /history endpoints for models."""
def __init__(self, classname, cls):
super(HistoryView, self).__init__()
self._classname = classname
self._cls = cls
def post(self, object_id):
"""Fetch the history for an object."""
default_user_authentication()
body = flask.request.get_json()
if body is None:
flask.abort(400, 'JSON body and mime type required.')
start_time = body.pop('start_time', None)
end_time = body.pop('end_time', None)
if start_time is None or end_time is None:
flask.abort(400, 'start_time and end_time expected.')
obj = self._cls.get_by_id(object_id)
if not obj:
flask.abort(404)
result = obj.get_history(start=start_time, end=end_time)
result = list(result)
return flask.jsonify(result=result)
def default_user_authentication():
"""Ensure user is authenticated, and switch to
appropriate building namespace."""
user_object = user.get_user_object()
if not user_object:
return flask.abort(401)
# Need to pick a building for this user request
person = user.get_person()
buildings = list(person.buildings)
assert len(buildings) > 0
buildings.sort()
if 'building-id' in flask.request.headers:
building_id = flask.request.headers['building-id']
if building_id not in buildings:
flask.abort(401)
else:
building_id = buildings[0]
namespace_manager.set_namespace(building_id)
def register_class(blueprint, cls, create_callback):
"""Register a ndb model for rest endpoints."""
# register some handlers
class_view_func = ClassView.as_view('%s_crud' % cls.__name__,
blueprint.name, cls, create_callback)
blueprint.add_url_rule('/', defaults={'object_id': None},
view_func=class_view_func, methods=['GET',])
blueprint.add_url_rule('/<object_id>', view_func=class_view_func,
methods=['GET', 'POST', 'DELETE'])
command_view_func = CommandView.as_view('%s_command' % cls.__name__,
blueprint.name, cls)
blueprint.add_url_rule('/<object_id>/command', methods=['POST'],
view_func=command_view_func)
history_view_func = HistoryView.as_view('%s_history' % cls.__name__,
blueprint.name, cls)
blueprint.add_url_rule('/<object_id>/history', methods=['POST'],
view_func=history_view_func)
|
|
"""Tests for the WiLight integration."""
from unittest.mock import patch
import pytest
import pywilight
from homeassistant.components.light import (
ATTR_BRIGHTNESS,
ATTR_HS_COLOR,
DOMAIN as LIGHT_DOMAIN,
)
from homeassistant.const import (
ATTR_ENTITY_ID,
SERVICE_TURN_OFF,
SERVICE_TURN_ON,
STATE_OFF,
STATE_ON,
)
from homeassistant.helpers.typing import HomeAssistantType
from tests.components.wilight import (
HOST,
UPNP_MAC_ADDRESS,
UPNP_MODEL_NAME_COLOR,
UPNP_MODEL_NAME_DIMMER,
UPNP_MODEL_NAME_LIGHT_FAN,
UPNP_MODEL_NAME_P_B,
UPNP_MODEL_NUMBER,
UPNP_SERIAL,
WILIGHT_ID,
setup_integration,
)
@pytest.fixture(name="dummy_get_components_from_model_light")
def mock_dummy_get_components_from_model_light():
"""Mock a components list with light."""
components = ["light"]
with patch(
"pywilight.get_components_from_model",
return_value=components,
):
yield components
@pytest.fixture(name="dummy_device_from_host_light_fan")
def mock_dummy_device_from_host_light_fan():
"""Mock a valid api_devce."""
device = pywilight.wilight_from_discovery(
f"http://{HOST}:45995/wilight.xml",
UPNP_MAC_ADDRESS,
UPNP_MODEL_NAME_LIGHT_FAN,
UPNP_SERIAL,
UPNP_MODEL_NUMBER,
)
device.set_dummy(True)
with patch(
"pywilight.device_from_host",
return_value=device,
):
yield device
@pytest.fixture(name="dummy_device_from_host_pb")
def mock_dummy_device_from_host_pb():
"""Mock a valid api_devce."""
device = pywilight.wilight_from_discovery(
f"http://{HOST}:45995/wilight.xml",
UPNP_MAC_ADDRESS,
UPNP_MODEL_NAME_P_B,
UPNP_SERIAL,
UPNP_MODEL_NUMBER,
)
device.set_dummy(True)
with patch(
"pywilight.device_from_host",
return_value=device,
):
yield device
@pytest.fixture(name="dummy_device_from_host_dimmer")
def mock_dummy_device_from_host_dimmer():
"""Mock a valid api_devce."""
device = pywilight.wilight_from_discovery(
f"http://{HOST}:45995/wilight.xml",
UPNP_MAC_ADDRESS,
UPNP_MODEL_NAME_DIMMER,
UPNP_SERIAL,
UPNP_MODEL_NUMBER,
)
device.set_dummy(True)
with patch(
"pywilight.device_from_host",
return_value=device,
):
yield device
@pytest.fixture(name="dummy_device_from_host_color")
def mock_dummy_device_from_host_color():
"""Mock a valid api_devce."""
device = pywilight.wilight_from_discovery(
f"http://{HOST}:45995/wilight.xml",
UPNP_MAC_ADDRESS,
UPNP_MODEL_NAME_COLOR,
UPNP_SERIAL,
UPNP_MODEL_NUMBER,
)
device.set_dummy(True)
with patch(
"pywilight.device_from_host",
return_value=device,
):
yield device
async def test_loading_light(
hass: HomeAssistantType,
dummy_device_from_host_light_fan,
dummy_get_components_from_model_light,
) -> None:
"""Test the WiLight configuration entry loading."""
# Using light_fan and removind fan from get_components_from_model
# to test light.py line 28
entry = await setup_integration(hass)
assert entry
assert entry.unique_id == WILIGHT_ID
entity_registry = await hass.helpers.entity_registry.async_get_registry()
# First segment of the strip
state = hass.states.get("light.wl000000000099_1")
assert state
assert state.state == STATE_OFF
entry = entity_registry.async_get("light.wl000000000099_1")
assert entry
assert entry.unique_id == "WL000000000099_0"
async def test_on_off_light_state(
hass: HomeAssistantType, dummy_device_from_host_pb
) -> None:
"""Test the change of state of the light switches."""
await setup_integration(hass)
# Turn on
await hass.services.async_call(
LIGHT_DOMAIN,
SERVICE_TURN_ON,
{ATTR_ENTITY_ID: "light.wl000000000099_1"},
blocking=True,
)
await hass.async_block_till_done()
state = hass.states.get("light.wl000000000099_1")
assert state
assert state.state == STATE_ON
# Turn off
await hass.services.async_call(
LIGHT_DOMAIN,
SERVICE_TURN_OFF,
{ATTR_ENTITY_ID: "light.wl000000000099_1"},
blocking=True,
)
await hass.async_block_till_done()
state = hass.states.get("light.wl000000000099_1")
assert state
assert state.state == STATE_OFF
async def test_dimmer_light_state(
hass: HomeAssistantType, dummy_device_from_host_dimmer
) -> None:
"""Test the change of state of the light switches."""
await setup_integration(hass)
await hass.services.async_call(
LIGHT_DOMAIN,
SERVICE_TURN_ON,
{ATTR_BRIGHTNESS: 42, ATTR_ENTITY_ID: "light.wl000000000099_1"},
blocking=True,
)
await hass.async_block_till_done()
state = hass.states.get("light.wl000000000099_1")
assert state
assert state.state == STATE_ON
assert state.attributes.get(ATTR_BRIGHTNESS) == 42
await hass.services.async_call(
LIGHT_DOMAIN,
SERVICE_TURN_ON,
{ATTR_BRIGHTNESS: 0, ATTR_ENTITY_ID: "light.wl000000000099_1"},
blocking=True,
)
await hass.async_block_till_done()
state = hass.states.get("light.wl000000000099_1")
assert state
assert state.state == STATE_OFF
await hass.services.async_call(
LIGHT_DOMAIN,
SERVICE_TURN_ON,
{ATTR_BRIGHTNESS: 100, ATTR_ENTITY_ID: "light.wl000000000099_1"},
blocking=True,
)
await hass.async_block_till_done()
state = hass.states.get("light.wl000000000099_1")
assert state
assert state.state == STATE_ON
assert state.attributes.get(ATTR_BRIGHTNESS) == 100
await hass.services.async_call(
LIGHT_DOMAIN,
SERVICE_TURN_OFF,
{ATTR_ENTITY_ID: "light.wl000000000099_1"},
blocking=True,
)
await hass.async_block_till_done()
state = hass.states.get("light.wl000000000099_1")
assert state
assert state.state == STATE_OFF
# Turn on
await hass.services.async_call(
LIGHT_DOMAIN,
SERVICE_TURN_ON,
{ATTR_ENTITY_ID: "light.wl000000000099_1"},
blocking=True,
)
await hass.async_block_till_done()
state = hass.states.get("light.wl000000000099_1")
assert state
assert state.state == STATE_ON
async def test_color_light_state(
hass: HomeAssistantType, dummy_device_from_host_color
) -> None:
"""Test the change of state of the light switches."""
await setup_integration(hass)
await hass.services.async_call(
LIGHT_DOMAIN,
SERVICE_TURN_ON,
{
ATTR_BRIGHTNESS: 42,
ATTR_HS_COLOR: [0, 100],
ATTR_ENTITY_ID: "light.wl000000000099_1",
},
blocking=True,
)
await hass.async_block_till_done()
state = hass.states.get("light.wl000000000099_1")
assert state
assert state.state == STATE_ON
assert state.attributes.get(ATTR_BRIGHTNESS) == 42
state_color = [
round(state.attributes.get(ATTR_HS_COLOR)[0]),
round(state.attributes.get(ATTR_HS_COLOR)[1]),
]
assert state_color == [0, 100]
await hass.services.async_call(
LIGHT_DOMAIN,
SERVICE_TURN_ON,
{ATTR_BRIGHTNESS: 0, ATTR_ENTITY_ID: "light.wl000000000099_1"},
blocking=True,
)
await hass.async_block_till_done()
state = hass.states.get("light.wl000000000099_1")
assert state
assert state.state == STATE_OFF
await hass.services.async_call(
LIGHT_DOMAIN,
SERVICE_TURN_ON,
{
ATTR_BRIGHTNESS: 100,
ATTR_HS_COLOR: [270, 50],
ATTR_ENTITY_ID: "light.wl000000000099_1",
},
blocking=True,
)
await hass.async_block_till_done()
state = hass.states.get("light.wl000000000099_1")
assert state
assert state.state == STATE_ON
assert state.attributes.get(ATTR_BRIGHTNESS) == 100
state_color = [
round(state.attributes.get(ATTR_HS_COLOR)[0]),
round(state.attributes.get(ATTR_HS_COLOR)[1]),
]
assert state_color == [270, 50]
await hass.services.async_call(
LIGHT_DOMAIN,
SERVICE_TURN_OFF,
{ATTR_ENTITY_ID: "light.wl000000000099_1"},
blocking=True,
)
await hass.async_block_till_done()
state = hass.states.get("light.wl000000000099_1")
assert state
assert state.state == STATE_OFF
# Turn on
await hass.services.async_call(
LIGHT_DOMAIN,
SERVICE_TURN_ON,
{ATTR_ENTITY_ID: "light.wl000000000099_1"},
blocking=True,
)
await hass.async_block_till_done()
state = hass.states.get("light.wl000000000099_1")
assert state
assert state.state == STATE_ON
# Hue = 0, Saturation = 100
await hass.services.async_call(
LIGHT_DOMAIN,
SERVICE_TURN_ON,
{ATTR_HS_COLOR: [0, 100], ATTR_ENTITY_ID: "light.wl000000000099_1"},
blocking=True,
)
await hass.async_block_till_done()
state = hass.states.get("light.wl000000000099_1")
assert state
assert state.state == STATE_ON
state_color = [
round(state.attributes.get(ATTR_HS_COLOR)[0]),
round(state.attributes.get(ATTR_HS_COLOR)[1]),
]
assert state_color == [0, 100]
# Brightness = 60
await hass.services.async_call(
LIGHT_DOMAIN,
SERVICE_TURN_ON,
{ATTR_BRIGHTNESS: 60, ATTR_ENTITY_ID: "light.wl000000000099_1"},
blocking=True,
)
await hass.async_block_till_done()
state = hass.states.get("light.wl000000000099_1")
assert state
assert state.state == STATE_ON
assert state.attributes.get(ATTR_BRIGHTNESS) == 60
|
|
import os
import sys
import numpy as np
from ..pakbase import Package
from ..utils import Util2d, Util3d
class ModflowBcf(Package):
"""
MODFLOW Block Centered Flow Package Class.
Parameters
----------
model : model object
The model object (of type :class:`flopy.modflow.Modflow`) to which
this package will be added.
ipakcb : int
A flag that is used to determine if cell-by-cell budget data should be
saved. If ipakcb is non-zero cell-by-cell budget data will be saved.
(default is 53)
intercellt : int
Intercell transmissivities, harmonic mean (0), arithmetic mean (1),
logarithmetic mean (2), combination (3). (default is 0)
laycon : int
Layer type, confined (0), unconfined (1), constant T, variable S (2),
variable T, variable S (default is 3)
trpy : float or array of floats (nlay)
horizontal anisotropy ratio (default is 1.0)
hdry : float
head assigned when cell is dry - used as indicator(default is -1E+30)
iwdflg : int
flag to indicate if wetting is inactive (0) or not (non zero)
(default is 0)
wetfct : float
factor used when cell is converted from dry to wet (default is 0.1)
iwetit : int
iteration interval in wetting/drying algorithm (default is 1)
ihdwet : int
flag to indicate how initial head is computd for cells that become
wet (default is 0)
tran : float or array of floats (nlay, nrow, ncol), optional
transmissivity (only read if laycon is 0 or 2) (default is 1.0)
hy : float or array of floats (nlay, nrow, ncol)
hydraulic conductivity (only read if laycon is 1 or 3)
(default is 1.0)
vcont : float or array of floats (nlay-1, nrow, ncol)
vertical leakance between layers (default is 1.0)
sf1 : float or array of floats (nlay, nrow, ncol)
specific storage (confined) or storage coefficient (unconfined),
read when there is at least one transient stress period.
(default is 1e-5)
sf2 : float or array of floats (nrow, ncol)
specific yield, only read when laycon is 2 or 3 and there is at least
one transient stress period (default is 0.15)
wetdry : float
a combination of the wetting threshold and a flag to indicate which
neighboring cells can cause a cell to become wet (default is -0.01)
extension : string
Filename extension (default is 'bcf')
unitnumber : int
File unit number (default is None).
filenames : str or list of str
Filenames to use for the package and the output files. If
filenames=None the package name will be created using the model name
and package extension and the cbc output name will be created using
the model name and .cbc extension (for example, modflowtest.cbc),
if ipakcbc is a number greater than zero. If a single string is passed
the package will be set to the string and cbc output name will be
created using the model name and .cbc extension, if ipakcbc is a
number greater than zero. To define the names for all package files
(input and output) the length of the list of strings should be 2.
Default is None.
Methods
-------
See Also
--------
Notes
-----
Examples
--------
>>> import flopy
>>> ml = flopy.modflow.Modflow()
>>> bcf = flopy.modflow.ModflowBcf(ml)
"""
def __init__(self, model, ipakcb=None, intercellt=0, laycon=3, trpy=1.0,
hdry=-1E+30, iwdflg=0, wetfct=0.1, iwetit=1, ihdwet=0,
tran=1.0, hy=1.0, vcont=1.0, sf1=1e-5, sf2=0.15, wetdry=-0.01,
extension='bcf', unitnumber=None, filenames=None):
if unitnumber is None:
unitnumber = ModflowBcf.defaultunit()
# set filenames
if filenames is None:
filenames = [None, None]
elif isinstance(filenames, str):
filenames = [filenames, None]
elif isinstance(filenames, list):
if len(filenames) < 2:
filenames.append(None)
# update external file information with cbc output, if necessary
if ipakcb is not None:
fname = filenames[1]
model.add_output_file(ipakcb, fname=fname,
package=ModflowBcf.ftype())
else:
ipakcb = 0
# Fill namefile items
name = [ModflowBcf.ftype()]
units = [unitnumber]
extra = ['']
# set package name
fname = [filenames[0]]
# Call ancestor's init to set self.parent, extension, name and unit number
Package.__init__(self, model, extension=extension, name=name,
unit_number=units, extra=extra, filenames=fname)
self.url = 'bcf.htm'
nrow, ncol, nlay, nper = self.parent.nrow_ncol_nlay_nper
# Set values of all parameters
self.intercellt = Util2d(model, (nlay,), np.int, intercellt,
name='laycon', locat=self.unit_number[0])
self.laycon = Util2d(model, (nlay,), np.int, laycon, name='laycon',
locat=self.unit_number[0])
self.trpy = Util2d(model, (nlay,), np.float32, trpy,
name='Anisotropy factor', locat=self.unit_number[0])
# item 1
self.ipakcb = ipakcb
self.hdry = hdry
self.iwdflg = iwdflg
self.wetfct = wetfct
self.iwetit = iwetit
self.ihdwet = ihdwet
self.tran = Util3d(model, (nlay, nrow, ncol), np.float32, tran,
'Transmissivity', locat=self.unit_number[0])
self.hy = Util3d(model, (nlay, nrow, ncol), np.float32, hy,
'Horizontal Hydraulic Conductivity',
locat=self.unit_number[0])
if model.nlay > 1:
self.vcont = Util3d(model, (nlay - 1, nrow, ncol), np.float32,
vcont,
'Vertical Conductance',
locat=self.unit_number[0])
else:
self.vcont = None
self.sf1 = Util3d(model, (nlay, nrow, ncol), np.float32, sf1,
'Primary Storage Coefficient',
locat=self.unit_number[0])
self.sf2 = Util3d(model, (nlay, nrow, ncol), np.float32, sf2,
'Secondary Storage Coefficient',
locat=self.unit_number[0])
self.wetdry = Util3d(model, (nlay, nrow, ncol), np.float32, wetdry,
'WETDRY', locat=self.unit_number[0])
self.parent.add_package(self)
return
def write_file(self):
"""
Write the package file.
Returns
-------
None
"""
# get model information
nrow, ncol, nlay, nper = self.parent.nrow_ncol_nlay_nper
dis = self.parent.get_package('DIS')
if dis is None:
dis = self.parent.get_package('DISU')
# Open file for writing
f_bcf = open(self.fn_path, 'w')
# Item 1: ipakcb, HDRY, IWDFLG, WETFCT, IWETIT, IHDWET
f_bcf.write('{:10d}{:10.6G}{:10d}{:10.3f}{:10d}{:10d}\n'.format(
self.ipakcb, self.hdry, self.iwdflg, self.wetfct, self.iwetit,
self.ihdwet))
# LAYCON array
for k in range(nlay):
if self.intercellt[k] > 0:
f_bcf.write('{0:1d}{1:1d} '.format(self.intercellt[k],
self.laycon[k]))
else:
f_bcf.write('{0:1d} '.format(self.laycon[k]))
f_bcf.write('\n')
f_bcf.write(self.trpy.get_file_entry())
transient = not dis.steady.all()
for k in range(nlay):
if (transient == True):
f_bcf.write(self.sf1[k].get_file_entry())
if ((self.laycon[k] == 0) or (self.laycon[k] == 2)):
f_bcf.write(self.tran[k].get_file_entry())
else:
f_bcf.write(self.hy[k].get_file_entry())
if k < nlay - 1:
f_bcf.write(self.vcont[k].get_file_entry())
if ((transient == True) and (
(self.laycon[k] == 2) or (self.laycon[k] == 3))):
f_bcf.write(self.sf2[k].get_file_entry())
if ((self.iwdflg != 0) and (
(self.laycon[k] == 1) or (self.laycon[k] == 3))):
f_bcf.write(self.wetdry[k].get_file_entry())
f_bcf.close()
@staticmethod
def load(f, model, ext_unit_dict=None):
"""
Load an existing package.
Parameters
----------
f : filename or file handle
File to load.
model : model object
The model object (of type :class:`flopy.modflow.mf.Modflow`) to
which this package will be added.
nper : int
The number of stress periods. If nper is None, then nper will be
obtained from the model object. (default is None).
ext_unit_dict : dictionary, optional
If the arrays in the file are specified using EXTERNAL,
or older style array control records, then `f` should be a file
handle. In this case ext_unit_dict is required, which can be
constructed using the function
:class:`flopy.utils.mfreadnam.parsenamefile`.
Returns
-------
wel : ModflowBcf object
ModflowBcf object.
Examples
--------
>>> import flopy
>>> m = flopy.modflow.Modflow()
>>> wel = flopy.modflow.ModflowBcf.load('test.bcf', m)
"""
if model.verbose:
sys.stdout.write('loading bcf package file...\n')
if not hasattr(f, 'read'):
filename = f
f = open(filename, 'r')
# dataset 0 -- header
while True:
line = f.readline()
if line[0] != '#':
break
# determine problem dimensions
nr, nc, nlay, nper = model.get_nrow_ncol_nlay_nper()
dis = model.get_package('DIS')
if dis is None:
dis = model.get_package('DISU')
# Item 1: ipakcb, HDRY, IWDFLG, WETFCT, IWETIT, IHDWET - line already read above
if model.verbose:
print(' loading ipakcb, HDRY, IWDFLG, WETFCT, IWETIT, IHDWET...')
t = line.strip().split()
ipakcb, hdry, iwdflg, wetfct, iwetit, ihdwet = int(t[0]), \
float(t[1]), \
int(t[2]), \
float(t[3]), \
int(t[4]), \
int(t[5])
# LAYCON array
ifrefm = model.get_ifrefm()
if model.verbose:
print(' loading LAYCON...')
line = f.readline()
if ifrefm:
t = []
tt = line.strip().split()
for iv in tt:
t.append(iv)
# read the rest of the laycon values
if len(t) < nlay:
while True:
line = f.readline()
tt = line.strip().split()
for iv in tt:
t.append(iv)
if len(t) == nlay:
break
else:
t = []
istart = 0
for k in range(nlay):
lcode = line[istart:istart+2]
lcode = lcode.replace(' ', '0')
t.append(lcode)
istart += 2
intercellt = np.zeros(nlay, dtype=np.int)
laycon = np.zeros(nlay, dtype=np.int)
for k in range(nlay):
if len(t[k]) > 1:
intercellt[k] = int(t[k][0])
laycon[k] = int(t[k][1])
else:
laycon[k] = int(t[k])
# TRPY array
if model.verbose:
print(' loading TRPY...')
trpy = Util2d.load(f, model, (1, nlay), np.float32, 'trpy',
ext_unit_dict)
trpy = trpy.array.reshape((nlay))
# property data for each layer based on options
transient = not dis.steady.all()
sf1 = [0] * nlay
tran = [0] * nlay
hy = [0] * nlay
if nlay > 1:
vcont = [0] * (nlay - 1)
else:
vcont = [0] * nlay
sf2 = [0] * nlay
wetdry = [0] * nlay
for k in range(nlay):
# allow for unstructured changing nodes per layer
if nr is None:
nrow = 1
ncol = nc[k]
else:
nrow = nr
ncol = nc
# sf1
if transient:
if model.verbose:
print(' loading sf1 layer {0:3d}...'.format(k + 1))
t = Util2d.load(f, model, (nrow, ncol), np.float32, 'sf1',
ext_unit_dict)
sf1[k] = t
# tran or hy
if ((laycon[k] == 0) or (laycon[k] == 2)):
if model.verbose:
print(' loading tran layer {0:3d}...'.format(k + 1))
t = Util2d.load(f, model, (nrow, ncol), np.float32, 'tran',
ext_unit_dict)
tran[k] = t
else:
if model.verbose:
print(' loading hy layer {0:3d}...'.format(k + 1))
t = Util2d.load(f, model, (nrow, ncol), np.float32, 'hy',
ext_unit_dict)
hy[k] = t
# vcont
if k < (nlay - 1):
if model.verbose:
print(' loading vcont layer {0:3d}...'.format(k + 1))
t = Util2d.load(f, model, (nrow, ncol), np.float32, 'vcont',
ext_unit_dict)
vcont[k] = t
# sf2
if (transient and ((laycon[k] == 2) or (laycon[k] == 3))):
if model.verbose:
print(' loading sf2 layer {0:3d}...'.format(k + 1))
t = Util2d.load(f, model, (nrow, ncol), np.float32, 'sf2',
ext_unit_dict)
sf2[k] = t
# wetdry
if ((iwdflg != 0) and ((laycon[k] == 1) or (laycon[k] == 3))):
if model.verbose:
print(' loading sf2 layer {0:3d}...'.format(k + 1))
t = Util2d.load(f, model, (nrow, ncol), np.float32, 'wetdry',
ext_unit_dict)
wetdry[k] = t
# set package unit number
unitnumber = None
filenames = [None, None]
if ext_unit_dict is not None:
unitnumber, filenames[0] = \
model.get_ext_dict_attr(ext_unit_dict,
filetype=ModflowBcf.ftype())
if ipakcb > 0:
iu, filenames[1] = \
model.get_ext_dict_attr(ext_unit_dict, unit=ipakcb)
model.add_pop_key_list(ipakcb)
# create instance of bcf object
bcf = ModflowBcf(model, ipakcb=ipakcb, intercellt=intercellt,
laycon=laycon, trpy=trpy, hdry=hdry,
iwdflg=iwdflg, wetfct=wetfct, iwetit=iwetit,
ihdwet=ihdwet,
tran=tran, hy=hy, vcont=vcont, sf1=sf1, sf2=sf2,
wetdry=wetdry,
unitnumber=unitnumber, filenames=filenames)
# return bcf object
return bcf
@staticmethod
def ftype():
return 'BCF6'
@staticmethod
def defaultunit():
return 15
|
|
#
# pandas documentation build configuration file, created by
#
# This file is execfile()d with the current directory set to its containing
# dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
from datetime import datetime
import importlib
import inspect
import logging
import os
import sys
import jinja2
from numpydoc.docscrape import NumpyDocString
from sphinx.ext.autosummary import _import_by_name
logger = logging.getLogger(__name__)
# https://github.com/sphinx-doc/sphinx/pull/2325/files
# Workaround for sphinx-build recursion limit overflow:
# pickle.dump(doctree, f, pickle.HIGHEST_PROTOCOL)
# RuntimeError: maximum recursion depth exceeded while pickling an object
#
# Python's default allowed recursion depth is 1000.
sys.setrecursionlimit(5000)
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
# sys.path.append(os.path.abspath('.'))
sys.path.insert(0, os.path.abspath("../sphinxext"))
sys.path.extend(
[
# numpy standard doc extensions
os.path.join(os.path.dirname(__file__), "..", "../..", "sphinxext")
]
)
# -- General configuration -----------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
# sphinxext.
extensions = [
"sphinx.ext.autodoc",
"sphinx.ext.autosummary",
"sphinx.ext.doctest",
"sphinx.ext.extlinks",
"sphinx.ext.todo",
"numpydoc", # handle NumPy documentation formatted docstrings
"IPython.sphinxext.ipython_directive",
"IPython.sphinxext.ipython_console_highlighting",
"matplotlib.sphinxext.plot_directive",
"sphinx.ext.intersphinx",
"sphinx.ext.coverage",
"sphinx.ext.mathjax",
"sphinx.ext.ifconfig",
"sphinx.ext.linkcode",
"nbsphinx",
"sphinx_panels",
"contributors", # custom pandas extension
]
exclude_patterns = [
"**.ipynb_checkpoints",
# to ensure that include files (partial pages) aren't built, exclude them
# https://github.com/sphinx-doc/sphinx/issues/1965#issuecomment-124732907
"**/includes/**",
]
try:
import nbconvert
except ImportError:
logger.warning("nbconvert not installed. Skipping notebooks.")
exclude_patterns.append("**/*.ipynb")
else:
try:
nbconvert.utils.pandoc.get_pandoc_version()
except nbconvert.utils.pandoc.PandocMissing:
logger.warning("Pandoc not installed. Skipping notebooks.")
exclude_patterns.append("**/*.ipynb")
# sphinx_pattern can be '-api' to exclude the API pages,
# the path to a file, or a Python object
# (e.g. '10min.rst' or 'pandas.DataFrame.head')
source_path = os.path.dirname(os.path.abspath(__file__))
pattern = os.environ.get("SPHINX_PATTERN")
single_doc = pattern is not None and pattern not in ("-api", "whatsnew")
include_api = pattern is None or pattern == "whatsnew"
if pattern:
for dirname, dirs, fnames in os.walk(source_path):
reldir = os.path.relpath(dirname, source_path)
for fname in fnames:
if os.path.splitext(fname)[-1] in (".rst", ".ipynb"):
fname = os.path.relpath(os.path.join(dirname, fname), source_path)
if fname == "index.rst" and os.path.abspath(dirname) == source_path:
continue
elif pattern == "-api" and reldir.startswith("reference"):
exclude_patterns.append(fname)
elif (
pattern == "whatsnew"
and not reldir.startswith("reference")
and reldir != "whatsnew"
):
exclude_patterns.append(fname)
elif single_doc and fname != pattern:
exclude_patterns.append(fname)
with open(os.path.join(source_path, "index.rst.template")) as f:
t = jinja2.Template(f.read())
with open(os.path.join(source_path, "index.rst"), "w") as f:
f.write(
t.render(
include_api=include_api,
single_doc=(pattern if single_doc else None),
)
)
autosummary_generate = True if include_api else ["index"]
autodoc_typehints = "none"
# numpydoc
numpydoc_attributes_as_param_list = False
# matplotlib plot directive
plot_include_source = True
plot_formats = [("png", 90)]
plot_html_show_formats = False
plot_html_show_source_link = False
plot_pre_code = """import numpy as np
import pandas as pd"""
# nbsphinx do not use requirejs (breaks bootstrap)
nbsphinx_requirejs_path = ""
# sphinx-panels shouldn't add bootstrap css since the pydata-sphinx-theme
# already loads it
panels_add_bootstrap_css = False
# Add any paths that contain templates here, relative to this directory.
templates_path = ["../_templates"]
# The suffix of source filenames.
source_suffix = [".rst"]
# The encoding of source files.
source_encoding = "utf-8"
# The master toctree document.
master_doc = "index"
# General information about the project.
project = "pandas"
copyright = f"2008-{datetime.now().year}, the pandas development team"
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
import pandas # isort:skip
# version = '%s r%s' % (pandas.__version__, svn_version())
version = str(pandas.__version__)
# The full version, including alpha/beta/rc tags.
release = version
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
language = "en"
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
# today = ''
# Else, today_fmt is used as the format for a strftime call.
# today_fmt = '%B %d, %Y'
# List of documents that shouldn't be included in the build.
# unused_docs = []
# List of directories, relative to source directory, that shouldn't be searched
# for source files.
exclude_trees = []
# The reST default role (used for this markup: `text`) to use for all
# documents. default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = "sphinx"
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------
# The theme to use for HTML and HTML Help pages. Major themes that come with
# Sphinx are currently 'default' and 'sphinxdoc'.
html_theme = "pydata_sphinx_theme"
# The style sheet to use for HTML and HTML Help pages. A file of that name
# must exist either in Sphinx' static/ path, or in one of the custom paths
# given in html_static_path.
# html_style = 'statsmodels.css'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
html_theme_options = {
"external_links": [],
"github_url": "https://github.com/pandas-dev/pandas",
"twitter_url": "https://twitter.com/pandas_dev",
"google_analytics_id": "UA-27880019-2",
}
# Add any paths that contain custom themes here, relative to this directory.
# html_theme_path = ["themes"]
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
# html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
html_logo = "../../web/pandas/static/img/pandas.svg"
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ["_static"]
html_css_files = [
"css/getting_started.css",
"css/pandas.css",
]
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
html_favicon = "../../web/pandas/static/img/favicon.ico"
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
# html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
# Add redirect for previously existing API pages
# each item is like `(from_old, to_new)`
# To redirect a class and all its methods, see below
# https://github.com/pandas-dev/pandas/issues/16186
moved_api_pages = [
("pandas.core.common.isnull", "pandas.isna"),
("pandas.core.common.notnull", "pandas.notna"),
("pandas.core.reshape.get_dummies", "pandas.get_dummies"),
("pandas.tools.merge.concat", "pandas.concat"),
("pandas.tools.merge.merge", "pandas.merge"),
("pandas.tools.pivot.pivot_table", "pandas.pivot_table"),
("pandas.tseries.tools.to_datetime", "pandas.to_datetime"),
("pandas.io.clipboard.read_clipboard", "pandas.read_clipboard"),
("pandas.io.excel.ExcelFile.parse", "pandas.ExcelFile.parse"),
("pandas.io.excel.read_excel", "pandas.read_excel"),
("pandas.io.gbq.read_gbq", "pandas.read_gbq"),
("pandas.io.html.read_html", "pandas.read_html"),
("pandas.io.json.read_json", "pandas.read_json"),
("pandas.io.parsers.read_csv", "pandas.read_csv"),
("pandas.io.parsers.read_fwf", "pandas.read_fwf"),
("pandas.io.parsers.read_table", "pandas.read_table"),
("pandas.io.pickle.read_pickle", "pandas.read_pickle"),
("pandas.io.pytables.HDFStore.append", "pandas.HDFStore.append"),
("pandas.io.pytables.HDFStore.get", "pandas.HDFStore.get"),
("pandas.io.pytables.HDFStore.put", "pandas.HDFStore.put"),
("pandas.io.pytables.HDFStore.select", "pandas.HDFStore.select"),
("pandas.io.pytables.read_hdf", "pandas.read_hdf"),
("pandas.io.sql.read_sql", "pandas.read_sql"),
("pandas.io.sql.read_frame", "pandas.read_frame"),
("pandas.io.sql.write_frame", "pandas.write_frame"),
("pandas.io.stata.read_stata", "pandas.read_stata"),
]
# Again, tuples of (from_old, to_new)
moved_classes = [
("pandas.tseries.resample.Resampler", "pandas.core.resample.Resampler"),
("pandas.formats.style.Styler", "pandas.io.formats.style.Styler"),
]
for old, new in moved_classes:
# the class itself...
moved_api_pages.append((old, new))
mod, classname = new.rsplit(".", 1)
klass = getattr(importlib.import_module(mod), classname)
methods = [
x for x in dir(klass) if not x.startswith("_") or x in ("__iter__", "__array__")
]
for method in methods:
# ... and each of its public methods
moved_api_pages.append((f"{old}.{method}", f"{new}.{method}"))
if include_api:
html_additional_pages = {
"generated/" + page[0]: "api_redirect.html" for page in moved_api_pages
}
header = f"""\
.. currentmodule:: pandas
.. ipython:: python
:suppress:
import numpy as np
import pandas as pd
np.random.seed(123456)
np.set_printoptions(precision=4, suppress=True)
pd.options.display.max_rows = 15
import os
os.chdir(r'{os.path.dirname(os.path.dirname(__file__))}')
"""
html_context = {
"redirects": {old: new for old, new in moved_api_pages},
"header": header,
}
# If false, no module index is generated.
html_use_modindex = True
# If false, no index is generated.
# html_use_index = True
# If true, the index is split into individual pages for each letter.
# html_split_index = False
# If true, links to the reST sources are added to the pages.
# html_show_sourcelink = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
# html_use_opensearch = ''
# If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = ''
# Output file base name for HTML help builder.
htmlhelp_basename = "pandas"
# -- Options for nbsphinx ------------------------------------------------
nbsphinx_allow_errors = True
# -- Options for LaTeX output --------------------------------------------
latex_elements = {}
# The paper size ('letter' or 'a4').
# latex_paper_size = 'letter'
# The font size ('10pt', '11pt' or '12pt').
# latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples (source start
# file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
(
"index",
"pandas.tex",
"pandas: powerful Python data analysis toolkit",
"Wes McKinney and the Pandas Development Team",
"manual",
)
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
# latex_use_parts = False
# Additional stuff for the LaTeX preamble.
# latex_preamble = ''
# Documents to append as an appendix to all manuals.
# latex_appendices = []
# If false, no module index is generated.
# latex_use_modindex = True
if include_api:
intersphinx_mapping = {
"dateutil": ("https://dateutil.readthedocs.io/en/latest/", None),
"matplotlib": ("https://matplotlib.org/stable/", None),
"numpy": ("https://numpy.org/doc/stable/", None),
"pandas-gbq": ("https://pandas-gbq.readthedocs.io/en/latest/", None),
"py": ("https://pylib.readthedocs.io/en/latest/", None),
"python": ("https://docs.python.org/3/", None),
"scipy": ("https://docs.scipy.org/doc/scipy/reference/", None),
"statsmodels": ("https://www.statsmodels.org/devel/", None),
"pyarrow": ("https://arrow.apache.org/docs/", None),
}
# extlinks alias
extlinks = {
"issue": ("https://github.com/pandas-dev/pandas/issues/%s", "GH"),
"wiki": ("https://github.com/pandas-dev/pandas/wiki/%s", "wiki "),
}
ipython_warning_is_error = False
ipython_execlines = [
"import numpy as np",
"import pandas as pd",
# This ensures correct rendering on system with console encoding != utf8
# (windows). It forces pandas to encode its output reprs using utf8
# wherever the docs are built. The docs' target is the browser, not
# the console, so this is fine.
'pd.options.display.encoding="utf8"',
]
# Add custom Documenter to handle attributes/methods of an AccessorProperty
# eg pandas.Series.str and pandas.Series.dt (see GH9322)
import sphinx # isort:skip
from sphinx.ext.autodoc import ( # isort:skip
AttributeDocumenter,
Documenter,
MethodDocumenter,
)
from sphinx.ext.autosummary import Autosummary # isort:skip
class AccessorDocumenter(MethodDocumenter):
"""
Specialized Documenter subclass for accessors.
"""
objtype = "accessor"
directivetype = "method"
# lower than MethodDocumenter so this is not chosen for normal methods
priority = 0.6
def format_signature(self):
# this method gives an error/warning for the accessors, therefore
# overriding it (accessor has no arguments)
return ""
class AccessorLevelDocumenter(Documenter):
"""
Specialized Documenter subclass for objects on accessor level (methods,
attributes).
"""
# This is the simple straightforward version
# modname is None, base the last elements (eg 'hour')
# and path the part before (eg 'Series.dt')
# def resolve_name(self, modname, parents, path, base):
# modname = 'pandas'
# mod_cls = path.rstrip('.')
# mod_cls = mod_cls.split('.')
#
# return modname, mod_cls + [base]
def resolve_name(self, modname, parents, path, base):
if modname is None:
if path:
mod_cls = path.rstrip(".")
else:
mod_cls = None
# if documenting a class-level object without path,
# there must be a current class, either from a parent
# auto directive ...
mod_cls = self.env.temp_data.get("autodoc:class")
# ... or from a class directive
if mod_cls is None:
mod_cls = self.env.temp_data.get("py:class")
# ... if still None, there's no way to know
if mod_cls is None:
return None, []
# HACK: this is added in comparison to ClassLevelDocumenter
# mod_cls still exists of class.accessor, so an extra
# rpartition is needed
modname, _, accessor = mod_cls.rpartition(".")
modname, _, cls = modname.rpartition(".")
parents = [cls, accessor]
# if the module name is still missing, get it like above
if not modname:
modname = self.env.temp_data.get("autodoc:module")
if not modname:
if sphinx.__version__ > "1.3":
modname = self.env.ref_context.get("py:module")
else:
modname = self.env.temp_data.get("py:module")
# ... else, it stays None, which means invalid
return modname, parents + [base]
class AccessorAttributeDocumenter(AccessorLevelDocumenter, AttributeDocumenter):
objtype = "accessorattribute"
directivetype = "attribute"
# lower than AttributeDocumenter so this is not chosen for normal
# attributes
priority = 0.6
class AccessorMethodDocumenter(AccessorLevelDocumenter, MethodDocumenter):
objtype = "accessormethod"
directivetype = "method"
# lower than MethodDocumenter so this is not chosen for normal methods
priority = 0.6
class AccessorCallableDocumenter(AccessorLevelDocumenter, MethodDocumenter):
"""
This documenter lets us removes .__call__ from the method signature for
callable accessors like Series.plot
"""
objtype = "accessorcallable"
directivetype = "method"
# lower than MethodDocumenter; otherwise the doc build prints warnings
priority = 0.5
def format_name(self):
return MethodDocumenter.format_name(self).rstrip(".__call__")
class PandasAutosummary(Autosummary):
"""
This alternative autosummary class lets us override the table summary for
Series.plot and DataFrame.plot in the API docs.
"""
def _replace_pandas_items(self, display_name, sig, summary, real_name):
# this a hack: ideally we should extract the signature from the
# .__call__ method instead of hard coding this
if display_name == "DataFrame.plot":
sig = "([x, y, kind, ax, ....])"
summary = "DataFrame plotting accessor and method"
elif display_name == "Series.plot":
sig = "([kind, ax, figsize, ....])"
summary = "Series plotting accessor and method"
return (display_name, sig, summary, real_name)
@staticmethod
def _is_deprecated(real_name):
try:
obj, parent, modname = _import_by_name(real_name)
except ImportError:
return False
doc = NumpyDocString(obj.__doc__ or "")
summary = "".join(doc["Summary"] + doc["Extended Summary"])
return ".. deprecated::" in summary
def _add_deprecation_prefixes(self, items):
for item in items:
display_name, sig, summary, real_name = item
if self._is_deprecated(real_name):
summary = f"(DEPRECATED) {summary}"
yield display_name, sig, summary, real_name
def get_items(self, names):
items = Autosummary.get_items(self, names)
items = [self._replace_pandas_items(*item) for item in items]
items = list(self._add_deprecation_prefixes(items))
return items
# based on numpy doc/source/conf.py
def linkcode_resolve(domain, info):
"""
Determine the URL corresponding to Python object
"""
if domain != "py":
return None
modname = info["module"]
fullname = info["fullname"]
submod = sys.modules.get(modname)
if submod is None:
return None
obj = submod
for part in fullname.split("."):
try:
obj = getattr(obj, part)
except AttributeError:
return None
try:
fn = inspect.getsourcefile(inspect.unwrap(obj))
except TypeError:
fn = None
if not fn:
return None
try:
source, lineno = inspect.getsourcelines(obj)
except OSError:
lineno = None
if lineno:
linespec = f"#L{lineno}-L{lineno + len(source) - 1}"
else:
linespec = ""
fn = os.path.relpath(fn, start=os.path.dirname(pandas.__file__))
if "+" in pandas.__version__:
return f"https://github.com/pandas-dev/pandas/blob/master/pandas/{fn}{linespec}"
else:
return (
f"https://github.com/pandas-dev/pandas/blob/"
f"v{pandas.__version__}/pandas/{fn}{linespec}"
)
# remove the docstring of the flags attribute (inherited from numpy ndarray)
# because these give doc build errors (see GH issue 5331)
def remove_flags_docstring(app, what, name, obj, options, lines):
if what == "attribute" and name.endswith(".flags"):
del lines[:]
def process_class_docstrings(app, what, name, obj, options, lines):
"""
For those classes for which we use ::
:template: autosummary/class_without_autosummary.rst
the documented attributes/methods have to be listed in the class
docstring. However, if one of those lists is empty, we use 'None',
which then generates warnings in sphinx / ugly html output.
This "autodoc-process-docstring" event connector removes that part
from the processed docstring.
"""
if what == "class":
joined = "\n".join(lines)
templates = [
""".. rubric:: Attributes
.. autosummary::
:toctree:
None
""",
""".. rubric:: Methods
.. autosummary::
:toctree:
None
""",
]
for template in templates:
if template in joined:
joined = joined.replace(template, "")
lines[:] = joined.split("\n")
_BUSINED_ALIASES = [
"pandas.tseries.offsets." + name
for name in [
"BDay",
"CDay",
"BMonthEnd",
"BMonthBegin",
"CBMonthEnd",
"CBMonthBegin",
]
]
def process_business_alias_docstrings(app, what, name, obj, options, lines):
"""
Starting with sphinx 3.4, the "autodoc-process-docstring" event also
gets called for alias classes. This results in numpydoc adding the
methods/attributes to the docstring, which we don't want (+ this
causes warnings with sphinx).
"""
if name in _BUSINED_ALIASES:
lines[:] = []
suppress_warnings = [
# We "overwrite" autosummary with our PandasAutosummary, but
# still want the regular autosummary setup to run. So we just
# suppress this warning.
"app.add_directive"
]
if pattern:
# When building a single document we don't want to warn because references
# to other documents are unknown, as it's expected
suppress_warnings.append("ref.ref")
def rstjinja(app, docname, source):
"""
Render our pages as a jinja template for fancy templating goodness.
"""
# https://www.ericholscher.com/blog/2016/jul/25/integrating-jinja-rst-sphinx/
# Make sure we're outputting HTML
if app.builder.format != "html":
return
src = source[0]
rendered = app.builder.templates.render_string(src, app.config.html_context)
source[0] = rendered
def setup(app):
app.connect("source-read", rstjinja)
app.connect("autodoc-process-docstring", remove_flags_docstring)
app.connect("autodoc-process-docstring", process_class_docstrings)
app.connect("autodoc-process-docstring", process_business_alias_docstrings)
app.add_autodocumenter(AccessorDocumenter)
app.add_autodocumenter(AccessorAttributeDocumenter)
app.add_autodocumenter(AccessorMethodDocumenter)
app.add_autodocumenter(AccessorCallableDocumenter)
app.add_directive("autosummary", PandasAutosummary)
|
|
import IPython, graphviz, re
from io import StringIO
from IPython.display import Image
import numpy as np
import pandas as pd
import math
from sklearn import tree
from sklearn.datasets import load_boston, load_iris
from collections import defaultdict
import string
import re
YELLOW = "#fefecd" # "#fbfbd0" # "#FBFEB0"
BLUE = "#D9E6F5"
GREEN = "#cfe2d4"
color_blind_friendly_colors = {
'redorange': '#f46d43',
'orange': '#fdae61', 'yellow': '#fee090', 'sky': '#e0f3f8',
'babyblue': '#abd9e9', 'lightblue': '#74add1', 'blue': '#4575b4'
}
color_blind_friendly_colors = [
None, # 0 classes
None, # 1 class
[YELLOW,BLUE], # 2 classes
[YELLOW,BLUE,GREEN], # 3 classes
[YELLOW,BLUE,GREEN,'#a1dab4'], # 4
[YELLOW,BLUE,GREEN,'#a1dab4','#41b6c4'], # 5
[YELLOW,'#c7e9b4','#7fcdbb','#41b6c4','#2c7fb8','#253494'], # 6
[YELLOW,'#c7e9b4','#7fcdbb','#41b6c4','#1d91c0','#225ea8','#0c2c84'], # 7
[YELLOW,'#edf8b1','#c7e9b4','#7fcdbb','#41b6c4','#1d91c0','#225ea8','#0c2c84'], # 8
[YELLOW,'#ece7f2','#d0d1e6','#a6bddb','#74a9cf','#3690c0','#0570b0','#045a8d','#023858'], # 9
[YELLOW,'#e0f3f8','#313695','#fee090','#4575b4','#fdae61','#abd9e9','#74add1','#d73027','#f46d43'] # 10
]
for x in color_blind_friendly_colors[2:]:
print(x)
max_class_colors = len(color_blind_friendly_colors)-1
def tree_traverse(n_nodes, children_left, children_right):
"""
Derives code from http://scikit-learn.org/stable/auto_examples/tree/plot_unveil_tree_structure.html
to walk tree
Traversing tree structure to compute compute various properties such
as the depth of each node and whether or not it is a leaf.
Input -
n_nodes: number of nodes in the tree
children_left: array of length n_nodes. left children node indexes
children_right: array of length n_nodes. right children node indexes
:return:
is_leaf: array of length n_nodes with boolean whether node i is leaf or not,
node_depth: depth of each node from root to node. root is depth 0
"""
node_depth = np.zeros(shape=n_nodes, dtype=np.int64)
is_leaf = np.zeros(shape=n_nodes, dtype=bool)
stack = [(0, -1)] # seed is the root node id and its parent depth
while len(stack) > 0:
node_id, parent_depth = stack.pop() # (0,-1)
node_depth[node_id] = parent_depth + 1
# If we have a non-leaf node
if children_left[node_id] != children_right[node_id]:
stack.append((children_left[node_id], parent_depth + 1))
stack.append((children_right[node_id], parent_depth + 1))
else:
is_leaf[node_id] = True
return is_leaf, node_depth
# def dectree_max_depth(tree):
# n_nodes = tree.node_count
# children_left = tree.children_left
# children_right = tree.children_right
#
# def walk(node_id):
# if (children_left[node_id] != children_right[node_id]):
# left_max = 1 + walk(children_left[node_id])
# right_max = 1 + walk(children_right[node_id])
# # if node_id<100: print(f"node {node_id}: {left_max}, {right_max}")
# return max(left_max, right_max)
# else: # leaf
# return 1
#
# root_node_id = 0
# return walk(root_node_id)
def dtreeviz(tree, X, y, precision=1, classnames=None, orientation="LR"):
def get_feature(i):
name = X.columns[feature[i]]
node_name = ''.join(c for c in name if c not in string.punctuation)+str(i)
node_name = re.sub("["+string.punctuation+string.whitespace+"]", '_', node_name)
return name, node_name
def round(v,ndigits=precision):
return format(v, '.' + str(ndigits) + 'f')
def dec_node_box(name, node_name, split):
html = """<table BORDER="0" CELLPADDING="0" CELLBORDER="0" CELLSPACING="0">
<tr>
<td colspan="3" align="center" cellspacing="0" cellpadding="0" bgcolor="#fefecd" border="1" sides="b"><font face="Helvetica" color="#444443" point-size="12">{name}</font></td>
</tr>
<tr>
<td colspan="3" cellpadding="1" border="0" bgcolor="#fefecd"></td>
</tr>
<tr>
<td cellspacing="0" cellpadding="0" bgcolor="#fefecd" border="1" sides="r" align="right"><font face="Helvetica" color="#444443" point-size="11">split</font></td>
<td cellspacing="0" cellpadding="0" border="0"></td>
<td cellspacing="0" cellpadding="0" bgcolor="#fefecd" align="left"><font face="Helvetica" color="#444443" point-size="11">{split}</font></td>
</tr>
</table>""".format(name=name, split=split)
return '{node_name} [shape=box label=<{label}>]\n'.format(label=html, node_name=node_name)
def dec_node(name, node_name, split):
html = """<font face="Helvetica" color="#444443" point-size="12">{name}<br/>@{split}</font>""".format(name=name, split=split)
return '{node_name} [shape=none label=<{label}>]\n'.format(label=html, node_name=node_name)
def prop_size(n):
# map to 0.03 to .35
margin_range = (0.03, 0.35)
if sample_count_range>0:
zero_to_one = (n - min_samples) / sample_count_range
return zero_to_one * (margin_range[1] - margin_range[0]) + margin_range[0]
else:
return margin_range[0]
# parsing the tree structure
n_nodes = tree.node_count # total nodes in the tree
children_left = tree.children_left # left children node index
children_right = tree.children_right # right children node index
feature = tree.feature # feature index at splits (-2 means leaf)
threshold = tree.threshold # split threshold values at given feature
is_leaf, node_depth = tree_traverse(n_nodes, children_left, children_right)
ranksep = ".22"
if orientation=="TD":
ranksep = ".35"
st = '\ndigraph G {splines=line;\n \
nodesep=0.1;\n \
ranksep=%s;\n \
rankdir=%s;\n \
node [margin="0.03" penwidth="0.5" width=.1, height=.1];\n \
edge [arrowsize=.4 penwidth="0.5"]\n' % (ranksep,orientation)
# Define decision nodes (non leaf nodes) as feature names
for i in range(n_nodes):
if not is_leaf[i]: # non leaf nodes
name, node_name = get_feature(i)
# st += dec_node_box(name, node_name, split=round(threshold[i]))
st += dec_node(name, node_name, split=round(threshold[i]))
# non leaf edges with > and <=
for i in range(n_nodes):
if not is_leaf[i]:
name, node_name = get_feature(i)
left, left_node_name = get_feature(children_left[i])
if is_leaf[children_left[i]]:
left = left_node_name ='leaf%d' % children_left[i]
right_name, right_node_name = get_feature(children_right[i])
if is_leaf[children_right[i]]:
right = right_node_name ='leaf%d' % children_right[i]
split = round(threshold[i])
left_html = '<font face="Helvetica" color="#444443" point-size="11"><</font>'
right_html = '<font face="Helvetica" color="#444443" point-size="11">≥</font>'
if orientation=="TD":
ldistance = ".9"
rdistance = ".9"
langle = "-28"
rangle = "28"
else:
ldistance = "1.3" # not used in LR mode; just label not taillable.
rdistance = "1.3"
langle = "-90"
rangle = "90"
blankedge = 'label=<<font face="Helvetica" color="#444443" point-size="1"> </font>>'
st += '{name} -> {left} [{blankedge} labelangle="{angle}" labeldistance="{ldistance}" {tail}label=<{label}>]\n'\
.format(label="",#left_html,
angle=langle,
ldistance=ldistance,
name=node_name,
blankedge = "",#blankedge,
tail="tail",#""tail" if orientation=="TD" else "",
left=left_node_name)
st += '{name} -> {right} [{blankedge} labelangle="{angle}" labeldistance="{rdistance}" {tail}label=<{label}>]\n' \
.format(label="",#right_html,
angle=rangle,
rdistance=rdistance,
name=node_name,
blankedge="",#blankedge,
tail="tail",# "tail" if orientation == "TD" else "",
right=right_node_name)
# find range of leaf sample count
leaf_sample_counts = [tree.n_node_samples[i] for i in range(n_nodes) if is_leaf[i]]
min_samples = min(leaf_sample_counts)
max_samples = max(leaf_sample_counts)
sample_count_range = max_samples - min_samples
print(leaf_sample_counts)
print("range is ", sample_count_range)
# is_classifier = hasattr(tree, 'n_classes')
is_classifier = tree.n_classes > 1
color_values = list(reversed(color_blind_friendly_colors))
n_classes = tree.n_classes[0]
color_values = color_blind_friendly_colors[n_classes]
# color_values = [c+"EF" for c in color_values] # add alpha
# Define leaf nodes (after edges so >= edges shown properly)
for i in range(n_nodes):
if is_leaf[i]:
node_samples = tree.n_node_samples[i]
impurity = tree.impurity
if is_classifier:
counts = np.array(tree.value[i][0])
predicted_class = np.argmax(counts)
predicted = predicted_class
if classnames:
predicted = classnames[predicted_class]
ratios = counts / node_samples # convert counts to ratios totalling 1.0
ratios = [round(r,3) for r in ratios]
color_spec = ["{c};{r}".format(c=color_values[i],r=r) for i,r in enumerate(ratios)]
color_spec = ':'.join(color_spec)
if n_classes > max_class_colors:
color_spec = YELLOW
html = """<font face="Helvetica" color="black" point-size="12">{predicted}<br/> </font>""".format(predicted=predicted)
margin = prop_size(node_samples)
st += 'leaf{i} [height=0 width="0.4" margin="{margin}" style={style} fillcolor="{colors}" shape=circle label=<{label}>]\n' \
.format(i=i, label=html, name=node_name, colors=color_spec, margin=margin,
style='wedged' if n_classes<=max_class_colors else 'filled')
else:
value = tree.value[i][0]
html = """<font face="Helvetica" color="#444443" point-size="11">"""+round(value[0])+"""</font>"""
margin = prop_size(node_samples)
st += 'leaf{i} [height=0 width="0.4" margin="{margin}" style=filled fillcolor="{color}" shape=circle label=<{label}>]\n'\
.format(i=i, label=html, name=node_name, color=YELLOW, margin=margin)
# end of string
st = st+'}'
return st
def boston():
regr = tree.DecisionTreeRegressor(max_depth=4, random_state=666)
boston = load_boston()
print(boston.data.shape, boston.target.shape)
data = pd.DataFrame(boston.data)
data.columns =boston.feature_names
regr = regr.fit(data, boston.target)
# st = dectreeviz(regr.tree_, data, boston.target)
st = dtreeviz(regr.tree_, data, boston.target, orientation="TD")
with open("/tmp/t3.dot", "w") as f:
f.write(st)
return st
def iris():
clf = tree.DecisionTreeClassifier(max_depth=4, random_state=666)
iris = load_iris()
print(iris.data.shape, iris.target.shape)
data = pd.DataFrame(iris.data)
data.columns = iris.feature_names
clf = clf.fit(data, iris.target)
# st = dectreeviz(clf.tree_, data, boston.target)
st = dtreeviz(clf.tree_, data, iris.target, orientation="TD"
, classnames=["setosa", "versicolor", "virginica"]
)
with open("/tmp/t3.dot", "w") as f:
f.write(st)
print(clf.tree_.value)
return st
# st = iris()
st = boston()
print(st)
graphviz.Source(st).view()
|
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
This is a complete example where you have to push an order to Fulfil.IO. The
steps are:
1. Fetch inventory for the products that have been sold
2. Create new customer, address
3. Process the order.
"""
from datetime import date
from decimal import Decimal
from fulfil_client import Client
client = Client('<subdomain>', '<api_key>')
def get_warehouses():
"""
Return the warehouses in the system
"""
StockLocation = client.model('stock.location')
return StockLocation.find(
[('type', '=', 'warehouse')], # filter just warehouses
fields=['code', 'name'] # Get the code and name fields
)
def get_product_inventory(product_id, warehouse_ids):
"""
Return the product inventory in each location. The returned response
will look like::
{
12: { // Product ID
4: { // Location ID
'quantity_on_hand': 12.0,
'quantity_available': 8.0
},
5: { // Location ID
'quantity_on_hand': 8.0,
'quantity_available': 8.0
},
},
126: { // Product ID
4: { // Location ID
'quantity_on_hand': 16.0,
'quantity_available': 15.0
},
5: { // Location ID
'quantity_on_hand': 9.0,
'quantity_available': 8.0
},
}
}
Read more:
http://docs.fulfiliorestapi.apiary.io/#reference/product/product-inventory
"""
Product = client.model('product.product')
return Product.get_product_inventory(
[product_id], warehouse_ids
)[product_id]
def get_customer(code):
"""
Fetch a customer with the code.
Returns None if the customer is not found.
"""
Party = client.model('party.party')
results = Party.find([('code', '=', code)])
if results:
return results[0]['id']
def get_address(customer_id, data):
"""
Easier to fetch the addresses of customer and then check one by one.
You can get fancy by using some validation mechanism too
"""
Address = client.model('party.address')
addresses = Address.find(
[('party', '=', customer_id)],
fields=[
'name', 'street', 'street_bis', 'city', 'zip',
'subdivision.code', 'country.code'
]
)
for address in addresses:
if (
address['name'] == data['name'] and
address['street'] == data['street'] and
address['street_bis'] == data['street_bis'] and
address['city'] == data['city'] and
address['zip'] == data['zip'] and
address['subdivision.code'].endswith(data['state']) and
address['country.code'] == data['country']):
return address['id']
def create_address(customer_id, data):
"""
Create an address and return the id
"""
Address = client.model('party.address')
Country = client.model('country.country')
Subdivision = client.model('country.subdivision')
country, = Country.find([('code', '=', data['country'])])
state, = Subdivision.find([
('code', 'ilike', '%-' + data['state']), # state codes are US-CA, IN-KL
('country', '=', country['id'])
])
address, = Address.create([{
'party': customer_id,
'name': data['name'],
'street': data['street'],
'street_bis': data['street_bis'],
'city': data['city'],
'zip': data['zip'],
'country': country['id'],
'subdivision': state['id'],
}])
return address['id']
def create_customer(name, email, phone):
"""
Create a customer with the name.
Then attach the email and phone as contact methods
"""
Party = client.model('party.party')
ContactMechanism = client.model('party.contact_mechanism')
party, = Party.create([{'name': name}])
# Bulk create the email and phone
ContactMechanism.create([
{'type': 'email', 'value': email, 'party': party},
{'type': 'phone', 'value': phone, 'party': party},
])
return party
def get_product(code):
"""
Given a product code/sku return the product id
"""
Product = client.model('product.product')
return Product.find(
[('code', '=', code)], # Filter
fields=['code', 'variant_name', 'cost_price']
)[0]
def create_order(order):
"""
Create an order on fulfil from order_details.
See the calling function below for an example of the order_details
"""
SaleOrder = client.model('sale.sale')
SaleOrderLine = client.model('sale.line')
# Check if customer exists, if not create one
customer_id = get_customer(order['customer']['code'])
if not customer_id:
customer_id = create_customer(
order['customer']['name'],
order['customer']['email'],
order['customer']['phone'],
)
# No check if there is a matching address
invoice_address = get_address(
customer_id,
order['invoice_address']
)
if not invoice_address:
invoice_address = create_address(
customer_id,
order['invoice_address']
)
# See if the shipping address exists, if not create it
shipment_address = get_address(
customer_id,
order['shipment_address']
)
if not shipment_address:
shipment_address = create_address(
customer_id,
order['shipment_address']
)
sale_order_id, = SaleOrder.create([{
'reference': order['number'],
'sale_date': order['date'],
'party': customer_id,
'invoice_address': invoice_address,
'shipment_address': shipment_address,
}])
# fetch inventory of all the products before we create lines
warehouses = get_warehouses()
warehouse_ids = [warehouse['id'] for warehouse in warehouses]
lines = []
for item in order['items']:
# get the product. We assume ti already exists.
product = get_product(item['product'])
# find the first location that has inventory
product_inventory = get_product_inventory(product, warehouse_ids)
for location, quantities in product_inventory.items():
if quantities['quantity_available'] >= item['quantity']:
break
lines.append({
'sale': sale_order_id,
'product': product,
'quantity': item['quantity'],
'unit_price': item['unit_price'],
'warehouse': location,
})
SaleOrderLine.create(lines)
SaleOrder.quote([sale_order_id])
SaleOrder.confirm([sale_order_id])
if __name__ == '__main__':
create_order({
'customer': {
'code': 'A1234',
'name': 'Sharoon Thomas',
'email': '[email protected]',
'phone': '650-999-9999',
},
'number': 'SO-12345', # an order number
'date': date.today(), # An order date
'invoice_address': {
'name': 'Sharoon Thomas',
'street': '444 Castro St.',
'street2': 'STE 1200',
'city': 'Mountain View',
'zip': '94040',
'state': 'CA',
'country': 'US',
},
'shipment_address': {
'name': 'Office Manager',
'street': '444 Castro St.',
'street2': 'STE 1200',
'city': 'Mountain View',
'zip': '94040',
'state': 'CA',
'country': 'US',
},
'items': [
{
'product': 'P123',
'quantity': 2,
'unit_price': Decimal('99'),
'description': 'P123 is a fabulous product',
},
{
'product': 'P456',
'quantity': 1,
'unit_price': Decimal('100'),
'description': 'Yet another amazing product',
},
]
})
|
|
# Copyright 2014 Cloudbase Solutions Srl
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from os_win import exceptions as os_win_exc
from oslo_config import cfg
from nova.objects import migrate_data as migrate_data_obj
from nova.tests.unit import fake_instance
from nova.tests.unit.virt.hyperv import test_base
from nova.virt.hyperv import livemigrationops
from nova.virt.hyperv import serialconsoleops
CONF = cfg.CONF
class LiveMigrationOpsTestCase(test_base.HyperVBaseTestCase):
"""Unit tests for the Hyper-V LiveMigrationOps class."""
def setUp(self):
super(LiveMigrationOpsTestCase, self).setUp()
self.context = 'fake_context'
self._livemigrops = livemigrationops.LiveMigrationOps()
self._livemigrops._livemigrutils = mock.MagicMock()
self._livemigrops._pathutils = mock.MagicMock()
self._livemigrops._block_dev_man = mock.MagicMock()
self._pathutils = self._livemigrops._pathutils
@mock.patch.object(serialconsoleops.SerialConsoleOps,
'stop_console_handler')
@mock.patch('nova.virt.hyperv.vmops.VMOps.copy_vm_dvd_disks')
def _test_live_migration(self, mock_copy_dvd_disk,
mock_stop_console_handler,
side_effect=None,
shared_storage=False,
migrate_data_received=True,
migrate_data_version='1.1'):
mock_instance = fake_instance.fake_instance_obj(self.context)
mock_post = mock.MagicMock()
mock_recover = mock.MagicMock()
mock_copy_logs = self._livemigrops._pathutils.copy_vm_console_logs
fake_dest = mock.sentinel.DESTINATION
mock_check_shared_inst_dir = (
self._pathutils.check_remote_instances_dir_shared)
mock_check_shared_inst_dir.return_value = shared_storage
self._livemigrops._livemigrutils.live_migrate_vm.side_effect = [
side_effect]
if migrate_data_received:
migrate_data = migrate_data_obj.HyperVLiveMigrateData()
if migrate_data_version != '1.0':
migrate_data.is_shared_instance_path = shared_storage
else:
migrate_data = None
if side_effect is os_win_exc.HyperVException:
self.assertRaises(os_win_exc.HyperVException,
self._livemigrops.live_migration,
self.context, mock_instance, fake_dest,
mock_post, mock_recover,
mock.sentinel.block_migr,
migrate_data)
mock_recover.assert_called_once_with(self.context, mock_instance,
fake_dest,
migrate_data)
else:
self._livemigrops.live_migration(context=self.context,
instance_ref=mock_instance,
dest=fake_dest,
post_method=mock_post,
recover_method=mock_recover,
block_migration=(
mock.sentinel.block_migr),
migrate_data=migrate_data)
post_call_args = mock_post.call_args_list
self.assertEqual(1, len(post_call_args))
post_call_args_list = post_call_args[0][0]
self.assertEqual((self.context, mock_instance,
fake_dest, mock.sentinel.block_migr),
post_call_args_list[:-1])
# The last argument, the migrate_data object, should be created
# by the callee if not received.
migrate_data_arg = post_call_args_list[-1]
self.assertIsInstance(
migrate_data_arg,
migrate_data_obj.HyperVLiveMigrateData)
self.assertEqual(shared_storage,
migrate_data_arg.is_shared_instance_path)
if not migrate_data_received or migrate_data_version == '1.0':
mock_check_shared_inst_dir.assert_called_once_with(fake_dest)
else:
self.assertFalse(mock_check_shared_inst_dir.called)
mock_stop_console_handler.assert_called_once_with(mock_instance.name)
if not shared_storage:
mock_copy_logs.assert_called_once_with(mock_instance.name,
fake_dest)
mock_copy_dvd_disk.assert_called_once_with(mock_instance.name,
fake_dest)
else:
self.assertFalse(mock_copy_logs.called)
self.assertFalse(mock_copy_dvd_disk.called)
mock_live_migr = self._livemigrops._livemigrutils.live_migrate_vm
mock_live_migr.assert_called_once_with(mock_instance.name,
fake_dest)
def test_live_migration(self):
self._test_live_migration(migrate_data_received=False)
def test_live_migration_old_migrate_data_version(self):
self._test_live_migration(migrate_data_version='1.0')
def test_live_migration_exception(self):
self._test_live_migration(side_effect=os_win_exc.HyperVException)
def test_live_migration_shared_storage(self):
self._test_live_migration(shared_storage=True)
@mock.patch('nova.virt.hyperv.volumeops.VolumeOps.get_disk_path_mapping')
@mock.patch('nova.virt.hyperv.imagecache.ImageCache.get_cached_image')
@mock.patch('nova.virt.hyperv.volumeops.VolumeOps.connect_volumes')
def _test_pre_live_migration(self, mock_initialize_connection,
mock_get_cached_image,
mock_get_disk_path_mapping,
phys_disks_attached=True):
mock_instance = fake_instance.fake_instance_obj(self.context)
mock_instance.image_ref = "fake_image_ref"
mock_get_disk_path_mapping.return_value = (
mock.sentinel.disk_path_mapping if phys_disks_attached
else None)
bdman = self._livemigrops._block_dev_man
mock_is_boot_from_vol = bdman.is_boot_from_volume
mock_is_boot_from_vol.return_value = None
CONF.set_override('use_cow_images', True)
self._livemigrops.pre_live_migration(
self.context, mock_instance,
block_device_info=mock.sentinel.BLOCK_INFO,
network_info=mock.sentinel.NET_INFO)
check_config = (
self._livemigrops._livemigrutils.check_live_migration_config)
check_config.assert_called_once_with()
mock_is_boot_from_vol.assert_called_once_with(
mock.sentinel.BLOCK_INFO)
mock_get_cached_image.assert_called_once_with(self.context,
mock_instance)
mock_initialize_connection.assert_called_once_with(
mock.sentinel.BLOCK_INFO)
mock_get_disk_path_mapping.assert_called_once_with(
mock.sentinel.BLOCK_INFO)
if phys_disks_attached:
livemigrutils = self._livemigrops._livemigrutils
livemigrutils.create_planned_vm.assert_called_once_with(
mock_instance.name,
mock_instance.host,
mock.sentinel.disk_path_mapping)
def test_pre_live_migration(self):
self._test_pre_live_migration()
def test_pre_live_migration_invalid_disk_mapping(self):
self._test_pre_live_migration(phys_disks_attached=False)
@mock.patch('nova.virt.hyperv.volumeops.VolumeOps.disconnect_volumes')
def _test_post_live_migration(self, mock_disconnect_volumes,
shared_storage=False):
migrate_data = migrate_data_obj.HyperVLiveMigrateData(
is_shared_instance_path=shared_storage)
self._livemigrops.post_live_migration(
self.context, mock.sentinel.instance,
mock.sentinel.block_device_info,
migrate_data)
mock_disconnect_volumes.assert_called_once_with(
mock.sentinel.block_device_info)
mock_get_inst_dir = self._pathutils.get_instance_dir
if not shared_storage:
mock_get_inst_dir.assert_called_once_with(
mock.sentinel.instance.name,
create_dir=False, remove_dir=True)
else:
self.assertFalse(mock_get_inst_dir.called)
def test_post_block_migration(self):
self._test_post_live_migration()
def test_post_live_migration_shared_storage(self):
self._test_post_live_migration(shared_storage=True)
@mock.patch.object(migrate_data_obj, 'HyperVLiveMigrateData')
def test_check_can_live_migrate_destination(self, mock_migr_data_cls):
mock_instance = fake_instance.fake_instance_obj(self.context)
migr_data = self._livemigrops.check_can_live_migrate_destination(
mock.sentinel.context, mock_instance, mock.sentinel.src_comp_info,
mock.sentinel.dest_comp_info)
mock_check_shared_inst_dir = (
self._pathutils.check_remote_instances_dir_shared)
mock_check_shared_inst_dir.assert_called_once_with(mock_instance.host)
self.assertEqual(mock_migr_data_cls.return_value, migr_data)
self.assertEqual(mock_check_shared_inst_dir.return_value,
migr_data.is_shared_instance_path)
@mock.patch('nova.virt.hyperv.vmops.VMOps.plug_vifs')
def test_post_live_migration_at_destination(self, mock_plug_vifs):
self._livemigrops.post_live_migration_at_destination(
self.context, mock.sentinel.instance,
network_info=mock.sentinel.NET_INFO,
block_migration=mock.sentinel.BLOCK_INFO)
mock_plug_vifs.assert_called_once_with(mock.sentinel.instance,
mock.sentinel.NET_INFO)
|
|
import itertools
import logging.config
import os
import unittest
from botocore.exceptions import ClientError
from docker.errors import APIError
from mock import MagicMock, Mock, call, patch
from requests.exceptions import ConnectionError, SSLError
from testfixtures import LogCapture
from captain import exceptions
from captain.config import Config
from captain.docker_controller import DockerController, DockerControllerException
from captain.tests.util_mock import ClientMock
minimal_config = {
'SLUG_RUNNER_COMMAND': 'foo',
'SLUG_RUNNER_IMAGE': 'foo',
'AWS_DOCKER_HOST_TAG_VALUE': 'foo',
'DOCKER_PROXY_USERNAME': 'foo',
'DOCKER_PROXY_PASSWORD': 'foo'
}
class TestDockerController(unittest.TestCase):
def setUp(self):
with patch.dict(os.environ, minimal_config):
self.config = Config()
self.config.docker_nodes = ["http://node-1/", "http://node-2/", "http://node-3/"]
self.config.slug_runner_command = "runner command"
self.config.slug_runner_image = "runner/image"
self.config.slug_runner_version = "0.0.73"
self.config.docker_gc_grace_period = 86400
self.config.slots_per_node = 10
self.config.slot_memory_mb = 128
self.config.default_slots_per_instance = 2
self.config.aws_docker_host_tag_name = None
self.config.aws_docker_host_tag_value = None
self.docker_proxy_username = None
self.docker_proxy_password = None
self.docker_node_resolver = MagicMock()
self.docker_node_resolver.get_docker_nodes = MagicMock(return_value=["http://node-1/", "http://node-2/", "http://node-3/"])
@patch('docker.Client')
def test_returns_summary_of_instances(self, docker_client):
# given
(docker_conn1, docker_conn2, docker_conn3) = ClientMock().mock_two_docker_nodes(docker_client)
# when
docker_controller = DockerController(self.config, self.docker_node_resolver)
summary = docker_controller.get_instance_summary()
# then
self.assertEqual(3, summary['total_instances'])
self.assertEqual(1, summary['apps']['ers-checking-frontend-27'])
self.assertEqual(2, summary['apps']['paye'])
@patch('docker.Client')
def test_logs_exception_when_docker_nodes_config_is_bad(self, docker_client):
"""
With three nodes configured but with one bad node an error should be logged but 2 nodes should be returned
"""
# given
(docker_conn1, docker_conn2, docker_conn3) = ClientMock().mock_two_docker_nodes(docker_client)
expected_nodes = ['node-1', 'node-2']
# when
# Note the very subtle ']' which results in the URL being considered as an invalid IPv6 URL.
self.config.docker_nodes = ["http://node-1/", "http://node-2/", "http://node-3]"]
self.docker_node_resolver.get_docker_nodes = MagicMock(return_value=self.config.docker_nodes)
# given
with LogCapture(names='captain.docker_controller', level=logging.ERROR) as l:
docker_controller = DockerController(self.config, self.docker_node_resolver)
l.check(
('captain.docker_controller', 'ERROR', "{'message': 'Could not obtain connection to docker node: http://node-3]. Exception: Invalid IPv6 URL'}")
)
nodes = docker_controller.get_nodes()
self.assertTrue(len(nodes) == 2)
for node in nodes:
self.assertIn(node['id'], expected_nodes)
@patch('docker.Client')
def test_returns_all_instances_with_ports(self, docker_client):
# given
(docker_conn1, docker_conn2, docker_conn3) = ClientMock().mock_two_docker_nodes(docker_client)
# when
docker_controller = DockerController(self.config, self.docker_node_resolver)
# get_instances is async and order isn't guaranteed, sort it for the tests
instances = sorted(docker_controller.get_instances(), key=lambda i: i["id"])
# then
self.assertEqual(3, instances.__len__())
instance1 = instances[0]
self.assertEqual("656ca7c307d178", instance1["id"])
self.assertEqual("ers-checking-frontend-27", instance1["app"])
self.assertEqual("node-1", instance1["node"])
self.assertEqual(9225, instance1["port"])
self.assertEqual("https://host/ers-checking-frontend_27.tgz", instance1["slug_uri"])
self.assertEqual(2, instance1["environment"].__len__())
self.assertEqual("-Dapplication.secret=H7dVw$PlJiD)^U,oa4TA1pa]pT:4ETLqbL&2P=n6T~p,A*}^.Y46@PQOV~9(B09Hc]t7-hsf~&@w=zH -Dapplication.log=INFO -Dlogger.resource=/application-json-logger.xml -Dhttp.port=8080 -Dgovuk-tax.Prod.google-analytics.token=UA-00000000-0 -Drun.mode=Prod -Dsession.secure=true -Dsession.httpOnly=true -Dcookie.encryption.key=fqpLDZ4smuDsekHkrEBlCA==", instance1["environment"]["HMRC_CONFIG"])
self.assertEqual("-Xmx256m -Xms256m", instance1["environment"]["JAVA_OPTS"])
instance2 = instances[2]
self.assertEqual("eba8bea2600029", instance2["id"])
self.assertEqual("paye", instance2["app"])
self.assertEqual("node-1", instance2["node"])
self.assertEqual(9317, instance2["port"])
self.assertEqual("https://host/paye_216.tgz", instance2["slug_uri"])
self.assertEqual(2, instance2["environment"].__len__())
self.assertEqual("-Dapplication.log=INFO -Drun.mode=Prod -Dlogger.resource=/application-json-logger.xml -Dhttp.port=8080", instance2["environment"]["HMRC_CONFIG"])
self.assertEqual("-Xmx256m -Xms256m", instance2["environment"]["JAVA_OPTS"])
instance3 = instances[1]
self.assertEqual("80be2a9e62ba00", instance3["id"])
self.assertEqual("paye", instance3["app"])
self.assertEqual("node-2", instance3["node"])
self.assertEqual(9317, instance3["port"])
self.assertEqual(2, instance3["environment"].__len__())
self.assertEqual("-Dapplication.log=INFO -Drun.mode=Prod -Dlogger.resource=/application-json-logger.xml -Dhttp.port=8080", instance3["environment"]["HMRC_CONFIG"])
self.assertEqual("-Xmx256m -Xms256m", instance3["environment"]["JAVA_OPTS"])
# One container stopped
docker_conn1.remove_container.assert_has_calls([call("381587e2978216")])
# One container with FinishedAt time of 0 removed
docker_conn1.remove_container.assert_has_calls([call("3815178hgdasf6")])
self.assertEqual(docker_conn1.remove_container.call_count, 2)
self.assertEqual(docker_conn2.remove_container.call_count, 1)
# jh23899fg00029 doesn't have captain ports defined and should be ignored.
self.assertFalse([i for i in instances if i["id"] == "jh23899fg00029"])
self.assertRaises(ConnectionError, docker_conn3.containers)
@patch('docker.Client')
def test_starts_instance_throws_correct_exception(self, docker_client):
# given
mock_client_node1 = ClientMock().mock_one_docker_node(docker_client)
mock_client_node1.create_container.side_effect = APIError("docker-error", Mock(content="bar"))
docker_controller = DockerController(self.config, self.docker_node_resolver)
# then
with self.assertRaises(DockerControllerException):
# when
docker_controller.start_instance("foo", "foo", "node-1", None, {}, None)
try:
docker_controller.start_instance("foo", "foo", "node-1", None, {}, None)
except DockerControllerException as e:
self.assertEqual("docker-error", e.message)
@patch('docker.Client')
@patch('uuid.uuid4')
def test_starts_instance(self, uuid_mock, docker_client):
# given
(mock_client_node1, mock_client_node2, mock_client_node3) = ClientMock().mock_two_docker_nodes(docker_client)
uuid_mock.return_value = 'SOME-UUID'
# when
docker_controller = DockerController(self.config, self.docker_node_resolver)
started_instance = docker_controller.start_instance(
"paye", "https://host/paye_216.tgz", "node-1", None,
{'HMRC_CONFIG': "-Dapplication.log=INFO -Drun.mode=Prod -Dlogger.resource=/application-json-logger.xml -Dhttp.port=8080",
'JAVA_OPTS': "-Xmx256m -Xms256m"}, 2)
# then
self.assertEqual("eba8bea2600029", started_instance["id"])
self.assertEqual("paye", started_instance["app"])
self.assertEqual("node-1", started_instance["node"])
self.assertEqual(9317, started_instance["port"])
self.assertEqual("https://host/paye_216.tgz", started_instance["slug_uri"])
self.assertEqual(2, started_instance["environment"].__len__())
self.assertEqual("-Dapplication.log=INFO -Drun.mode=Prod -Dlogger.resource=/application-json-logger.xml -Dhttp.port=8080", started_instance["environment"]["HMRC_CONFIG"])
self.assertEqual("-Xmx256m -Xms256m", started_instance["environment"]["JAVA_OPTS"])
self.assertEqual(2, started_instance["slots"])
mock_client_node1.create_container.assert_called_with(
image="{}:{}".format(self.config.slug_runner_image, str(self.config.slug_runner_version)),
command=self.config.slug_runner_command,
ports=[8080],
environment={
'PORT': '8080',
'SLUG_URL': 'https://host/paye_216.tgz',
'HMRC_CONFIG': '-Dapplication.log=INFO -Drun.mode=Prod -Dlogger.resource=/application-json-logger.xml -Dhttp.port=8080',
'JAVA_OPTS': '-Xmx256m -Xms256m'
},
detach=True,
name="paye_SOME-UUID",
cpu_shares=2,
hostname=None,
mem_limit=256 * 1024 * 1024)
docker_controller.start_instance(
"paye", "http://host/paye-216-slug.tgz", "node-1", None,
{'HMRC_CONFIG': "-Dapplication.log=INFO -Drun.mode=Prod -Dlogger.resource=/application-json-logger.xml -Dhttp.port=8080",
'JAVA_OPTS': "-Xmx256m -Xms256m"})
mock_client_node1.create_container.assert_called_with(
image="{}:{}".format(self.config.slug_runner_image, str(self.config.slug_runner_version)),
command=self.config.slug_runner_command,
ports=[8080],
environment={
'PORT': '8080',
'SLUG_URL': 'http://host/paye-216-slug.tgz',
'HMRC_CONFIG': '-Dapplication.log=INFO -Drun.mode=Prod -Dlogger.resource=/application-json-logger.xml -Dhttp.port=8080',
'JAVA_OPTS': '-Xmx256m -Xms256m'
},
detach=True,
name="paye_SOME-UUID",
cpu_shares=2,
mem_limit=256 * 1024 * 1024,
hostname=None)
mock_client_node1.start.assert_called_with("eba8bea2600029", port_bindings={8080: None})
self.assertFalse(mock_client_node2.create_container.called)
self.assertFalse(mock_client_node2.start.called)
@patch('docker.Client')
@patch('uuid.uuid4')
def test_starts_instance_on_specific_slug_runner_version(self, uuid_mock, docker_client):
# given
(mock_client_node1, mock_client_node2, mock_client_node3) = ClientMock().mock_two_docker_nodes(docker_client)
uuid_mock.return_value = 'SOME-OTHER-UUID'
# when
docker_controller = DockerController(self.config, self.docker_node_resolver)
docker_controller.start_instance(
"paye", "https://host/paye_216.tgz", "node-1", None,
{'HMRC_CONFIG': "-Dapplication.log=INFO -Drun.mode=Prod -Dlogger.resource=/application-json-logger.xml -Dhttp.port=8080",
'JAVA_OPTS': "-Xmx256m -Xms256m"}, 2, None, "0.0.99")
# then
mock_client_node1.create_container.assert_called_with(
image="{}:{}".format(self.config.slug_runner_image, "0.0.99"),
command=self.config.slug_runner_command,
ports=[8080],
environment={
'PORT': '8080',
'SLUG_URL': 'https://host/paye_216.tgz',
'HMRC_CONFIG': '-Dapplication.log=INFO -Drun.mode=Prod -Dlogger.resource=/application-json-logger.xml -Dhttp.port=8080',
'JAVA_OPTS': '-Xmx256m -Xms256m'
},
detach=True,
name="paye_SOME-OTHER-UUID",
cpu_shares=2,
hostname=None,
mem_limit=256 * 1024 * 1024)
@patch('docker.Client')
def test_stops_instance(self, docker_client):
# given
(mock_client_node1, mock_client_node2, mock_client_node3) = ClientMock().mock_two_docker_nodes(docker_client)
# when
docker_controller = DockerController(self.config, self.docker_node_resolver)
result = docker_controller.stop_instance("80be2a9e62ba00")
# then
self.assertTrue(result)
self.assertFalse(mock_client_node1.stop.called)
mock_client_node1.remove_container.assert_not_called_with("80be2a9e62ba00")
mock_client_node2.stop.assert_called_with('80be2a9e62ba00')
mock_client_node2.remove_container.assert_called_with('80be2a9e62ba00', force=True)
@patch('docker.Client')
def test_stops_instance_even_if_remove_container_fails(self, docker_client):
# given
(mock_client_node1, mock_client_node2, mock_client_node3) = ClientMock().mock_two_docker_nodes(docker_client)
# when
docker_controller = DockerController(self.config, self.docker_node_resolver)
result = docker_controller.stop_instance("80be2a9e62ba00")
# then
self.assertTrue(result)
self.assertFalse(mock_client_node1.stop.called)
mock_client_node1.remove_container.assert_not_called_with('80be2a9e62ba00')
mock_client_node2.stop.assert_called_with('80be2a9e62ba00')
mock_client_node2.remove_container.assert_called_with('80be2a9e62ba00', force=True)
@patch('docker.Client')
def test_returns_false_when_trying_to_stop_nonexisting_instance(self, docker_client):
# given
(mock_client_node1, mock_client_node2, mock_client_node3) = ClientMock().mock_two_docker_nodes(docker_client)
# when
docker_controller = DockerController(self.config, self.docker_node_resolver)
result = docker_controller.stop_instance("nonexisting-instance")
# then
self.assertFalse(result)
self.assertFalse(mock_client_node1.stop.called)
mock_client_node1.remove_container.assert_not_called_with('nonexisting-instance')
self.assertFalse(mock_client_node2.stop.called)
mock_client_node2.remove_container.assert_not_called_with('nonexisting-instance')
@patch('docker.Client')
def test_over_capacity(self, docker_client):
# given
(mock_client_node1, mock_client_node2, mock_client_node3) = ClientMock().mock_two_docker_nodes(docker_client)
# when
docker_controller = DockerController(self.config, self.docker_node_resolver)
# Force an over capacity error
current_slot_count = sum([i["slots"] for i in docker_controller.get_instances() if i['node'] == 'node-1'])
self.assertTrue(current_slot_count != self.config.slots_per_node)
# then
self.assertRaises(exceptions.NodeOutOfCapacityException,
docker_controller.start_instance, "paye", "http://host/paye-216-slug.tgz", "node-1", None,
{'HMRC_CONFIG': "-Dapplication.log=INFO -Drun.mode=Prod -Dlogger.resource=/application-json-logger.xml -Dhttp.port=8080",
'JAVA_OPTS': "-Xmx256m -Xms256m"}, self.config.slots_per_node - current_slot_count + 1)
@patch('docker.Client')
def test_get_node_details(self, docker_client):
(mock_client_node1, mock_client_node2, mock_client_node3) = ClientMock().mock_two_docker_nodes(docker_client)
docker_controller = DockerController(self.config, self.docker_node_resolver)
self.assertRaises(exceptions.NoSuchNodeException, docker_controller.get_node, "bum-node-1")
node_details = docker_controller.get_node("node-1")
self.assertDictEqual(
{"id": "node-1",
"slots": {"free": 6, "used": 4, "total": 10}, "state": "healthy"},
node_details
)
@patch('docker.Client')
def test_aws_exceptions_are_not_bubbled_up_when_refreshing_docker_nodes(self, docker_client):
"""
Directly tests that the private method '__refresh_docker_node_connections' will catch
ClientErrors that it encounters and will not re-throw them.
"""
# Given
ClientMock().mock_one_docker_node(docker_client)
docker_controller = DockerController(self.config, self.docker_node_resolver)
# When
self.docker_node_resolver.get_docker_nodes = Mock(side_effect=ClientError({}, 'Request limit exceeded.'))
# Absence of an exception being raised confirms the desired functionality here.
# Then
with LogCapture(names='captain.docker_controller', level=logging.WARNING) as l:
docker_controller._DockerController__refresh_docker_node_connections()
l.check(
('captain.docker_controller', 'WARNING',
"{'message': 'Unable to refresh docker nodes. Exception: An error occurred (Unknown) when calling the Request limit exceeded. operation: Unknown'}")
)
@patch('docker.Client')
def test_node_connections_are_unaffected_by_aws_exceptions(self, docker_client):
"""
We've focused on get_node here but what we're really testing is that
the the stateful node_connections list is unaffected when '__refresh_docker_node_connections'
encounters an exception. This is important as these errors
were previously being bubbled up to Flask/ GUnicorn and were causing threads to die.
"""
ClientMock().mock_one_docker_node(docker_client)
# Initially populate the internal docker_nodes list.
docker_controller = DockerController(self.config, self.docker_node_resolver)
node1_details = {"id": "node-1", "slots": {"free": 6, "used": 4, "total": 10}, "state": "healthy"}
# Prove that the internal node_connections list was populated as we expected.
returned_node_details = docker_controller.get_node("node-1")
self.assertDictEqual(node1_details, returned_node_details)
# Simulate an AWS failure.
self.docker_node_resolver.get_docker_nodes = Mock(side_effect=ClientError({}, 'Request limit exceeded.'))
# Assert that we still get the originally stored node back
# i.e. the AWS failure didn't affect the stores node_connections list.
returned_node_details = docker_controller.get_node("node-1")
self.assertDictEqual(node1_details, returned_node_details)
@patch('docker.Client')
def test_node_ping_retry_is_eventually_successful(self, docker_client):
# Given
mock_client_node1 = ClientMock().mock_one_docker_node(docker_client)
docker_controller = DockerController(self.config, self.docker_node_resolver)
# When
mock_client_node1.ping = Mock(side_effect=[ConnectionError("node-1 is unhealthy"),
ConnectionError("node-1 is unhealthy"),
None])
node_details = docker_controller.get_node('node-1')
# Then
self.assertDictEqual(
{"id": "node-1",
"slots": {"free": 6, "used": 4, "total": 10}, "state": "healthy"},
node_details
)
@patch('docker.Client')
def test_node_ping_retry_exhaustion(self, docker_client):
# Given
mock_client_node1 = ClientMock().mock_one_docker_node(docker_client)
docker_controller = DockerController(self.config, self.docker_node_resolver)
# When
mock_client_node1.ping = Mock(side_effect=[ConnectionError("node-1 is unhealthy"),
ConnectionError("node-1 is unhealthy"),
ConnectionError("node-1 is unhealthy")])
node_details = docker_controller.get_node('node-1')
# Then
self.assertDictEqual(
{"id": "node-1",
"slots": {"free": 0, "used": 0, "total": 0}, "state": "ConnectionError('node-1 is unhealthy',)"},
node_details
)
@patch('docker.Client')
def test_get_logs(self, docker_client):
(mock_client_node1, mock_client_node2, mock_client_node3) = ClientMock().mock_two_docker_nodes(docker_client)
docker_controller = DockerController(self.config, self.docker_node_resolver)
self.assertRaises(exceptions.NoSuchInstanceException, docker_controller.get_logs, "non-existant")
instance_logs = docker_controller.get_logs("80be2a9e62ba00")
self.assertEqual(
({"msg": "this is line 1\n"}, {"msg": "this is line 2\n"}),
tuple(itertools.islice(instance_logs, 2)))
instance_logs = docker_controller.get_logs("eba8bea2600029", follow=True)
self.assertEqual(
({"msg": "this is line 1"}, {"msg": "this is line 2"}, {"msg": "this is line 3"}),
tuple(itertools.islice(instance_logs, 3)))
@patch('docker.Client')
def test_get_nodes_when_exceptions_are_raised(self, docker_client):
with LogCapture(names='captain.docker_controller', level=logging.ERROR) as l:
# Given
ClientMock().mock_one_docker_node(docker_client)
self.config.docker_nodes = ["http://node-1/"]
self.docker_node_resolver.get_docker_nodes = MagicMock(return_value=["http://node-1/"])
docker_controller = DockerController(self.config, self.docker_node_resolver)
docker_controller.get_node = Mock(side_effect=SSLError('BOOM!'))
# When
nodes = docker_controller.get_nodes()
# Then
self.assertTrue(len(nodes) == 0)
l.check(
('captain.docker_controller', 'ERROR', '{\'message\': "Getting details for node-1 raised an exception of type \'SSLError\': BOOM!"}')
)
@patch('docker.Client')
def test_get_nodes(self, docker_client):
(mock_client_node1, mock_client_node2, mock_client_node3) = ClientMock().mock_two_docker_nodes(docker_client)
docker_controller = DockerController(self.config, self.docker_node_resolver)
nodes = docker_controller.get_nodes()
self.assertTrue(len(nodes) == 3)
self.assertIn(
{"id": "node-1",
"slots": {"free": 6, "used": 4, "total": 10}, "state": "healthy"},
nodes
)
@patch('docker.Client')
def test_gc(self, docker_client):
# given
(docker_conn1, docker_conn2, docker_conn3) = ClientMock().mock_two_docker_nodes(docker_client)
# when
docker_controller = DockerController(self.config, self.docker_node_resolver)
# trigger gc
docker_controller.get_instances()
# then
# 61c2695fd82a is a freshly created but not yet started container and so shouldn't be gc'd
self.assertNotIn(call("61c2695fd82a"), docker_conn2.start.mock_calls)
# 61c2695fd82b is an old container with epoch start and exit times and should be gc'd
docker_conn2.remove_container.assert_has_calls([call("61c2695fd82b")])
@patch('docker.Client')
def test_change_in_number_of_configured_nodes(self, docker_client):
"""
When the number of configured nodes is changed this should be reflected in the call to get nodes
"""
docker_controller = DockerController(self.config, self.docker_node_resolver)
# Given 3 nodes from the default mock in Setup
nodes = docker_controller.get_nodes()
self.assertTrue(len(nodes) == 3)
# Override the default mock in Setup to return 2 different nodes
self.docker_node_resolver.get_docker_nodes = MagicMock(
return_value=["http://node-3/", "http://node-4/"])
nodes = docker_controller.get_nodes()
self.assertEqual(len(nodes), 2, "Expected number of nodes weren't returned")
self.assertListEqual(sorted([item["id"] for item in nodes]), ["node-3", "node-4"], "Expected node names weren't returned")
|
|
#!/usr/bin/env python
# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import tensorflow as tf
tf.logging.set_verbosity(tf.logging.INFO)
CSV_COLUMNS = "fare_amount,dayofweek,hourofday,pickuplon,pickuplat,dropofflon,dropofflat,passengers".split(",")
LABEL_COLUMN = "fare_amount"
DEFAULTS = [[0.0], ["Sun"], [0], [-74.0], [40.0], [-74.0], [40.7], [1.0]]
# These are the raw input columns, and will be provided for prediction also
INPUT_COLUMNS = [
# Define features
tf.feature_column.categorical_column_with_vocabulary_list(
key="dayofweek",
vocabulary_list=["Sun", "Mon", "Tues", "Wed", "Thu", "Fri", "Sat"]),
tf.feature_column.categorical_column_with_identity(key="hourofday", num_buckets=24),
# Numeric columns
tf.feature_column.numeric_column(key="pickuplat"),
tf.feature_column.numeric_column(key="pickuplon"),
tf.feature_column.numeric_column(key="dropofflat"),
tf.feature_column.numeric_column(key="dropofflon"),
tf.feature_column.numeric_column(key="passengers"),
# Engineered features that are created in the input_fn
tf.feature_column.numeric_column(key="latdiff"),
tf.feature_column.numeric_column(key="londiff"),
tf.feature_column.numeric_column(key="euclidean")
]
# Build the estimator
def build_estimator(model_dir, nbuckets, hidden_units):
"""
Build an estimator starting from INPUT COLUMNS.
These include feature transformations and synthetic features.
The model is a wide-and-deep model.
"""
# Input columns
(dayofweek, hourofday, plat, plon, dlat, dlon, pcount, latdiff, londiff, euclidean) = INPUT_COLUMNS
# Bucketize the lats & lons
latbuckets = np.linspace(38.0, 42.0, nbuckets).tolist()
lonbuckets = np.linspace(-76.0, -72.0, nbuckets).tolist()
b_plat = tf.feature_column.bucketized_column(plat, latbuckets)
b_dlat = tf.feature_column.bucketized_column(dlat, latbuckets)
b_plon = tf.feature_column.bucketized_column(plon, lonbuckets)
b_dlon = tf.feature_column.bucketized_column(dlon, lonbuckets)
# Feature cross
ploc = tf.feature_column.crossed_column([b_plat, b_plon], nbuckets * nbuckets)
dloc = tf.feature_column.crossed_column([b_dlat, b_dlon], nbuckets * nbuckets)
pd_pair = tf.feature_column.crossed_column([ploc, dloc], nbuckets**4)
day_hr = tf.feature_column.crossed_column([dayofweek, hourofday], 24 * 7)
# Wide columns and deep columns.
wide_columns = [
# Feature crosses
dloc, ploc, pd_pair,
day_hr,
# Sparse columns
dayofweek, hourofday,
# Anything with a linear relationship
pcount
]
deep_columns = [
# Embedding_column to "group" together
tf.feature_column.embedding_column(pd_pair, 10),
tf.feature_column.embedding_column(day_hr, 10),
# Numeric columns
plat, plon, dlat, dlon,
latdiff, londiff, euclidean
]
# Setting the checkpoint interval to be much lower for this task
run_config = tf.estimator.RunConfig(
save_checkpoints_secs=30, keep_checkpoint_max=3)
estimator = tf.estimator.DNNLinearCombinedRegressor(
model_dir=model_dir,
linear_feature_columns=wide_columns,
dnn_feature_columns=deep_columns,
dnn_hidden_units=hidden_units,
config=run_config)
# Add extra evaluation metric for hyperparameter tuning
estimator = tf.contrib.estimator.add_metrics(estimator, add_eval_metrics)
return estimator
# Create feature engineering function that will be used in the input and serving input functions
def add_engineered(features):
# This is how you can do feature engineering in TensorFlow
lat1 = features["pickuplat"]
lat2 = features["dropofflat"]
lon1 = features["pickuplon"]
lon2 = features["dropofflon"]
latdiff = (lat1 - lat2)
londiff = (lon1 - lon2)
# Set features for distance with sign that indicates direction
features["latdiff"] = latdiff
features["londiff"] = londiff
dist = tf.sqrt(latdiff * latdiff + londiff * londiff)
features["euclidean"] = dist
return features
def add_eval_metrics(labels, predictions):
pred_values = predictions["predictions"]
return {
"rmse": tf.metrics.root_mean_squared_error(labels, pred_values)
}
# Create serving input function to be able to serve predictions
def serving_input_fn():
feature_placeholders = {
# All the real-valued columns
column.name: tf.placeholder(dtype=tf.float32, shape=[None]) for column in INPUT_COLUMNS[2:7]
}
feature_placeholders["dayofweek"] = tf.placeholder(dtype=tf.string, shape=[None])
feature_placeholders["hourofday"] = tf.placeholder(dtype=tf.int32, shape=[None])
features = add_engineered(feature_placeholders.copy())
return tf.estimator.export.ServingInputReceiver(features, feature_placeholders)
# Create input function to load data into datasets
def read_dataset(filename, mode, batch_size=512):
def _input_fn():
def decode_csv(value_column):
columns = tf.decode_csv(value_column, record_defaults=DEFAULTS)
features = dict(zip(CSV_COLUMNS, columns))
label = features.pop(LABEL_COLUMN)
return add_engineered(features), label
# Create list of files that match pattern
file_list = tf.gfile.Glob(filename)
# Create dataset from file list
dataset = tf.data.TextLineDataset(file_list).map(decode_csv)
if mode == tf.estimator.ModeKeys.TRAIN:
num_epochs = None # indefinitely
dataset = dataset.shuffle(buffer_size=10 * batch_size)
else:
num_epochs = 1 # end-of-input after this
dataset = dataset.repeat(num_epochs).batch(batch_size)
batch_features, batch_labels = dataset.make_one_shot_iterator().get_next()
return batch_features, batch_labels
return _input_fn
# Create estimator train and evaluate function
def train_and_evaluate(args):
tf.summary.FileWriterCache.clear() # ensure filewriter cache is clear for TensorBoard events file
estimator = build_estimator(args["output_dir"], args["nbuckets"], args["hidden_units"])
train_spec = tf.estimator.TrainSpec(
input_fn=read_dataset(
filename=args["train_data_paths"],
mode=tf.estimator.ModeKeys.TRAIN,
batch_size=args["train_batch_size"]),
max_steps=args["train_steps"])
exporter = tf.estimator.LatestExporter("exporter", serving_input_fn)
eval_spec = tf.estimator.EvalSpec(
input_fn=read_dataset(
filename=args["eval_data_paths"],
mode=tf.estimator.ModeKeys.EVAL,
batch_size=args["eval_batch_size"]),
steps=100,
exporters=exporter)
tf.estimator.train_and_evaluate(estimator, train_spec, eval_spec)
|
|
# SVD module
from lxml import etree as et
arights = ('read-only', 'read-write', 'write-only', 'writeOnce', 'read-writeOnce')
__version__ = '1.0.1'
default_xml = ('<device><name>NEW_DEVICE</name>'
'<version>1.0</version>'
'<description>Default CMSIS device</description>'
'<addressUnitBits>8</addressUnitBits>'
'<width>32</width>'
'<size>0x20</size>'
'<access>read-write</access>'
'<resetValue>0x00000000</resetValue>'
'<resetMask>0xFFFFFFFF</resetMask>'
'<peripherals><peripheral>'
'<name>NEW_PERIPHERAL</name>'
'<groupName>DEVICE PERIPHERALS</groupName>'
'<baseAddress>0xDEADBEEF</baseAddress>'
'<addressBlock><offset>0x00</offset><size>0x400</size><usage>registers</usage></addressBlock>'
'<interrupt><name>NEW_INTERRUPT</name><description>Default interrupt</description><value>1</value></interrupt>'
'<registers><register>'
'<name>NEW_REGISTER</name><displayName>NEW_REGISTER</displayName>'
'<description>Default register</description>'
'<addressOffset>0x00</addressOffset>'
'<fields><field><name>NEW_BITFIELD</name><description>Default bitfield</description>'
'<bitOffset>0</bitOffset><bitWidth>1</bitWidth></field></fields>'
'</register></registers>'
'</peripheral></peripherals></device>'
)
def str_cleanup(s):
try:
s = s.encode('ascii', errors='ignore')
return ' '.join(s.split())
except:
return None
def toInt(val, fault=None):
try:
return int(val, 0)
except:
return fault
def get_from_xml(node, attr):
try:
return node.find(attr).text
except:
return None
class basedata(object):
def __init__(self, parent=None):
self.parent = parent
self._name = 'new'
self._desc = None
self._rsize = None
self.rvalue = None
self._access = None
@property
def name(self):
return self._name
@name.setter
def name(self, val):
try:
s = val.encode('ascii', errors='ignore')
self._name = '_'.join(s.split())
except:
pass
@property
def desc(self):
return self._desc
@desc.setter
def desc(self, val):
self._desc = str_cleanup(val)
@property
def access(self):
return self._access
@access.setter
def access(self, val):
self._access = val if val in arights else None
@property
def rsize(self):
return '0x{0:02X}'.format(self._rsize) if self._rsize else None
@rsize.setter
def rsize(self, val):
self._rsize = toInt(val)
@property
def vsize(self):
if self._rsize:
return self.rsize
else:
if self.parent:
return self.parent.vsize
else:
return 0
@property
def vvalue(self):
if self.rvalue:
return self.rvalue
else:
if self.parent:
return self.parent.vvalue
else:
return None
@property
def vaccess(self):
if self.access:
return self.access
else:
if self.parent:
return self.parent.vaccess
else:
return 'undefined'
class field(basedata):
def __init__(self, parent, xml=None):
basedata.__init__(self, parent)
self._bitw = 1
self._bito = 0
if xml is not None:
self.fromXML(xml)
@property
def bitw(self):
return str(self._bitw)
@bitw.setter
def bitw(self, val):
self._bitw = toInt(val, self._bitw)
@property
def bito(self):
return str(self._bito)
@bito.setter
def bito(self, val):
self._bito = toInt(val, self._bito)
@property
def valid(self):
if self.name and self.desc and self.bito and self.bitw:
if (self._bito + self._bitw) <= int(self.vsize, 0):
return True
return False
def fromXML(self, node):
self.name = get_from_xml(node, 'name')
self.desc = get_from_xml(node, 'description')
self.bitw = get_from_xml(node, 'bitWidth')
self.bito = get_from_xml(node, 'bitOffset')
self.access = get_from_xml(node, 'access')
def toXML(self, node=None):
if node is None:
node = et.Element('field')
et.SubElement(node, 'name').text = self.name
if self.desc:
et.SubElement(node, 'description').text = self.desc
et.SubElement(node, 'bitOffset').text = self.bito
et.SubElement(node, 'bitWidth').text = self.bitw
if self.access:
et.SubElement(node, 'access').text = self.access
return node
class register(basedata):
def __init__(self, parent, xml=None):
basedata.__init__(self, parent)
self._dispname = None
self._offset = 0
self.fields = []
if xml is not None:
self.fromXML(xml)
@property
def dispname(self):
return self._dispname
@dispname.setter
def dispname(self, val):
self._dispname = str_cleanup(val)
@property
def offset(self):
return '0x{0:04X}'.format(self._offset)
@offset.setter
def offset(self, val):
self._offset = toInt(val, self._offset)
@property
def valid(self):
return (self.name and self.desc)
def fromXML(self, node):
del self.fields[:]
self.name = get_from_xml(node, 'name')
self.dispname = get_from_xml(node, 'displayName')
self.desc = get_from_xml(node, 'description')
self.offset = get_from_xml(node, 'addressOffset')
self.rsize = get_from_xml(node, 'size')
self.rvalue = get_from_xml(node, 'resetValue')
self.access = get_from_xml(node, 'access')
for x in node.findall('./fields/field'):
self.fields.append(field(self, x))
self.sortField()
def toXML(self, node=None):
if node is None:
node = et.Element('register')
et.SubElement(node, 'name').text = self.name
if self.dispname:
et.SubElement(node, 'displayName').text = self.dispname
if self.desc:
et.SubElement(node, 'description').text = self.desc
et.SubElement(node, 'addressOffset').text = self.offset
if self.rsize:
et.SubElement(node, 'size').text = self.rsize
if self.access:
et.SubElement(node, 'access').text = self.access
if self.rvalue:
et.SubElement(node, 'resetValue').text = self.rvalue
if self.fields:
f = et.SubElement(node, 'fields')
for x in self.fields:
x.toXML(et.SubElement(f, 'field'))
return node
def newField(self, name=''):
r = 0
for x in sorted(self.fields, key=lambda x: x._bito, reverse=False):
if r < x._bito:
break
r = x._bito + x._bitw
if r < int(self.vsize, 0):
f = field(self)
f._bito = r
if name:
f.name = name
return f
else:
return None
def addField(self, field):
field.parent = self
self.fields.append(field)
self.sortField()
def sortField(self):
self.fields.sort(key=lambda x: x._bito, reverse=True)
def delField(self, item):
self.fields.remove(item)
def validate(self, callback):
names = []
cap = int(self.vsize, 0)
ofs = 0
for x in sorted(self.fields, key=lambda x: x._bito):
if x.name in names:
if callback('Duplicated bitfield name %s in %s' % (x.name, self.name)):
return True
elif x._bito + x._bitw > cap:
if callback('Bitfield %s is out of bounds in %s' % (x.name, self.name)):
return True
elif ofs > x._bito:
if callback('Bitfields %s and %s overlapped in %s' % (x.name, names[-1], self.name)):
return True
elif x.vaccess == 'undefined':
if callback('Undefined access level for %s in %s' % (x.name, self.name)):
return True
else:
names.append(x.name)
ofs = x._bito + x._bitw
return False
class interrupt(basedata):
def __init__(self, parent, xml=None):
basedata.__init__(self, parent)
self._value = 0
if xml is not None:
self.fromXML(xml)
@property
def value(self):
return str(self._value)
@value.setter
def value(self, val):
self._value = toInt(val, self._value)
@property
def valid(self):
return (self.name and self.desc)
def fromXML(self, node):
self.name = get_from_xml(node, 'name')
self.desc = get_from_xml(node, 'description')
self.value = get_from_xml(node, 'value')
def toXML(self, node=None):
if node is None:
node = et.Element('interrupt')
et.SubElement(node, 'name').text = self.name
if self.desc:
et.SubElement(node, 'description').text = self.desc
et.SubElement(node, 'value').text = self.value
return node
class peripheral(basedata):
def __init__(self, parent, xml=None):
basedata.__init__(self, parent)
self.ref = None
self.group = None
self._address = 0
self._aoffset = 0
self._asize = 0x400
self.interrupts = []
self.registers = []
if xml is not None:
self.fromXML(xml)
@property
def asize(self):
return '0x{0:04X}'.format(self._asize)
@asize.setter
def asize(self, val):
self._asize = toInt(val, self._asize)
@property
def aoffset(self):
return '0x{0:08X}'.format(self._aoffset)
@aoffset.setter
def aoffset(self, val):
self._aoffset = toInt(val, self._aoffset)
@property
def address(self):
return '0x{0:08X}'.format(self._address)
@address.setter
def address(self, val):
self._address = toInt(val, self._address)
def fromXML(self, node):
del self.interrupts[:]
del self.registers[:]
self.name = get_from_xml(node, 'name')
if 'derivedFrom' in node.attrib:
ref = node.attrib['derivedFrom']
for x in self.parent.peripherals:
if x.name == ref:
self.ref = x
break
else:
self.ref = None
self.desc = get_from_xml(node, 'description')
self.group = get_from_xml(node, 'groupName')
self.address = get_from_xml(node, 'baseAddress')
self.aoffset = get_from_xml(node, './addressBlock/offset')
self.asize = get_from_xml(node, './addressBlock/size')
for x in node.findall('./interrupt'):
self.interrupts.append(interrupt(self, x))
for x in node.findall('./registers/register'):
self.registers.append(register(self, x))
self.registers.sort(key=lambda x: x._offset, reverse=False)
def toXML(self, node=None):
if node is None:
node = et.Element('peripheral')
if self.ref:
node.set('derivedFrom', self.ref.name)
et.SubElement(node, 'name').text = self.name
if self.group:
et.SubElement(node, 'groupName').text = self.group
if self.desc:
et.SubElement(node, 'description').text = self.desc
et.SubElement(node, 'baseAddress').text = self.address
a = et.SubElement(node, 'addressBlock')
et.SubElement(a, 'offset').text = self.aoffset
et.SubElement(a, 'size').text = self.asize
et.SubElement(a, 'usage').text = 'registers'
for x in self.interrupts:
x.toXML(et.SubElement(node, 'interrupt'))
if self.registers:
r = et.SubElement(node, 'registers')
for x in self.registers:
x.toXML(et.SubElement(r, 'register'))
return node
def newRegister(self, name=''):
o = 0
sz = int(self.vsize, 0) / 8
for x in sorted(self.registers, key=lambda x: x._offset, reverse=False):
if o < x._offset:
break
o = x._offset + sz
if o < self._asize:
r = register(self)
r._offset = o
if name:
r.name = name
return r
else:
return None
def setRef(self, ref):
if ref:
for x in self.parent.peripherals:
if x == self:
return False
if x.name == ref:
self.ref = x
return True
return False
else:
self.ref = None
return True
def addRegister(self, item):
item.parent = self
self.registers.append(item)
self.registers.sort(key=lambda x: x._offset, reverse=False)
def delRegister(self, item):
self.registers.remove(item)
def newInterrupt(self, name=''):
ni = interrupt(self)
if name:
ni.name = name
return ni
def addInterrupt(self, reg):
if not next((i for i in self.interrupts if i.value == reg.value), None):
self.interrupts.append(reg)
def delInterrupt(self, item):
self.interrupts.remove(item)
def validate(self, callback):
names = []
ofs = 0
for x in sorted(self.registers, key=lambda x: x._offset, reverse=False):
rsize = int(x.vsize, 0) / 8
if x.name in names:
if callback('Duplicated register name %s in %s' % (x.name, self.name)):
return True
elif x._offset < ofs:
if callback('Registers %s and %s in %s is overlapped' % (x.name, names[-1], self.name)):
return True
elif x._offset + rsize > self._asize:
if callback('Register %s is out of bounds in %s' % (x.name, self.name)):
return True
elif x.vaccess == 'undefined':
if callback('Undefined access level for %s in %s' % (x.name, self.name)):
return True
else:
if x.validate(callback):
return True
names.append(x.name)
ofs = x._offset + rsize
return False
class device(basedata):
def __init__(self, xml=None):
basedata.__init__(self, None)
self.vendor = None
self.width = '32'
self.rsize = '0x20'
self.rvalue = '0x00000000'
self.rmask = '0xFFFFFFFF'
self.access = 'read-write'
self.peripherals = []
if xml is not None:
self.fromXML(xml)
def fromString(self, str):
xml = et.fromstring(str)
self.fromXML(xml)
def fromXML(self, node):
del self.peripherals[:]
self.vendor = get_from_xml(node, 'vendor')
self.name = get_from_xml(node, 'name')
self.desc = get_from_xml(node, 'description')
self.width = get_from_xml(node, 'width')
self.rsize = get_from_xml(node, 'size')
self.access = get_from_xml(node, 'access')
self.rvalue = get_from_xml(node, 'resetValue')
self.rmask = get_from_xml(node, 'resetMask')
for x in node.findall('./peripherals/peripheral'):
self.peripherals.append(peripheral(self, x))
def toXML(self, node=None):
if node is None:
node = et.Element('export_device')
if self.vendor:
et.SubElement(node, 'vendor').text = self.vendor
et.SubElement(node, 'name').text = self.name
et.SubElement(node, 'version').text = '1.0'
et.SubElement(node, 'description').text = self.desc
et.SubElement(node, 'addressUnitBits').text = '8'
et.SubElement(node, 'width').text = self.width
et.SubElement(node, 'size').text = self.rsize
et.SubElement(node, 'access').text = self.access
et.SubElement(node, 'resetValue').text = self.rvalue
et.SubElement(node, 'resetMask').text = self.rmask
p = et.SubElement(node, 'peripherals')
for per in self.peripherals:
per.toXML(et.SubElement(p, 'peripheral'))
return node
def newPeripheral(self, name=''):
p = peripheral(self)
p.name = name
return p
def delPeripheral(self, item):
self.peripherals.remove(item)
def addPeripheral(self, item):
item.parent = self
self.peripherals.append(item)
def movePeripheral(self, dest, item):
uindex = 1 + self.peripherals.index(dest)
iindex = self.peripherals.index(item)
if iindex != uindex:
self.peripherals.insert(uindex, self.peripherals.pop(iindex))
def validate(self, callback):
names = []
vectors = []
ofs = 0
for x in sorted(self.peripherals, key=lambda x: x._address + x._aoffset):
if x.name in names:
if callback('Duplicated peripheral name %s' % (x.name)):
return True
if ofs > x._address + x._aoffset:
if callback('Peripherals %s and %s is overlapped' % (x.name, names[-1])):
return True
if x.validate(callback):
return True
names.append(x.name)
ofs = x._address + x._aoffset + x._asize
for i in x.interrupts:
if i.value in vectors:
if callback('Duplicated interrupt vector %s' % (i.name)):
return True
vectors.append(i.value)
return False
def load(self, name):
xml = et.parse(name)
self.fromXML(xml)
def save(self, name):
xs = 'http://www.w3.org/2001/XMLSchema-instance'
xml = et.Element('device', schemaVersion='1.1',
nsmap={'xs': xs},
attrib={'{' + xs + '}noNamespaceSchemaLocation': 'CMSIS-SVD_Schema_1_1.xsd'})
xml.addprevious(et.Comment('generated by SVD editor ' + __version__))
self.toXML(xml)
tree = et.ElementTree(xml)
tree.write(name, encoding='utf-8', xml_declaration=True, standalone=True, pretty_print=True)
|
|
# -*- coding:utf-8 -*-
import numpy as np
def quick_sort(a):
quick_sort_call(a, 0, len(a) - 1)
def quick_sort_call(a, sp, ep):
if sp >= ep:
return
pivot = partition(a, sp, ep)
quick_sort_call(a, sp, pivot - 1)
quick_sort_call(a, pivot + 1, ep)
def partition(a, sp, ep):
i = j = sp
pivot_val = a[ep]
while j < ep:
if a[j] < pivot_val:
a[i], a[j] = a[j], a[i]
i += 1
j += 1
a[ep], a[i] = a[i], a[ep]
return i
def merge_sort(a):
merge_sort_call(a, 0, len(a) - 1)
def merge_sort_call(a, sp, ep):
if sp >= ep:
return
mp = sp + (ep - sp) // 2
merge_sort_call(a, sp, mp)
merge_sort_call(a, mp + 1, ep)
merge(a, sp, mp, ep)
def merge(a, sp, mp, ep):
i = sp
j = mp + 1
tmp = []
while i <= mp and j <= ep:
if a[i] < a[j]:
tmp.append(a[i])
i += 1
else:
tmp.append(a[j])
j += 1
s, e = i, mp
if j <= ep:
s, e = j, ep
while s <= e:
tmp.append(a[s])
s += 1
for i in range(0, ep - sp + 1):
a[i + sp] = tmp[i]
def selective_sort(a):
for i in range(len(a)):
min_pos = i
min_val = a[i]
for j in range(i + 1, len(a)):
if a[j] < min_val:
min_pos = j
min_val = a[j]
a[min_pos], a[i] = a[i], a[min_pos]
def inserting_sort(a):
for i in range(1, len(a)):
val = a[i]
pos = i
for j in range(i, 0, -1):
if a[j - 1] > val:
a[j] = a[j - 1]
pos = j - 1
else:
break
a[pos] = val
def bubble_sort(a):
for i in range(len(a)):
is_sort = True
for j in range(len(a) - i - 1):
if a[j] > a[j + 1]:
a[j], a[j + 1] = a[j + 1], a[j]
is_sort = False
if is_sort:
break
def simple_binary_search(a, val):
low = 0
high = len(a) - 1
while low <= high:
mid = low + (high - low) // 2
if a[mid] == val:
return mid
elif a[mid] < val:
low = mid + 1
else:
high = mid - 1
return -1
def recursive_binary_search(a, low, high, val):
mid = low + (high - low ) // 2
if low > high:
return -1
if a[mid] == val:
return mid
elif a[mid] < val:
return recursive_binary_search(a, mid + 1, high, val)
else:
return recursive_binary_search(a, low, mid - 1, val)
def find_sqrt_by_binary_search(val):
low = 0
high = val / 2
while low <= high:
sqrt = low + (high - low) / 2
p2 = sqrt ** 2
if p2 >= (val - 1e-6) and p2 <= (val + 1e-6):
return sqrt
elif p2 < val:
low = sqrt + 1e-6
else:
high = sqrt - 1e-6
return sqrt
def find_first_by_binary_search(a, val):
low = 0
high = len(a) - 1
while low <= high:
mid = low + ((high - low) >> 2)
if a[mid] == val:
while mid > 0:
if a[mid - 1] == val:
mid -= 1
else:
return mid
return mid
elif a[mid] < val:
low = mid + 1
else:
high = mid - 1
return -1
def find_first_by_binary_search_v2(a, val):
low, mid, high = 0, 0, len(a) - 1
while low <= high:
mid = low + ((high - low) >> 2)
if a[mid] > val:
high = mid - 1
elif a[mid] < val:
low = mid + 1
else:
if mid == 0 or a[mid - 1] != val:
return mid
else:
high = mid - 1
return -1
def find_last_by_binary_search(a, val):
low = 0
high = len(a) - 1
while low <= high:
mid = low + ((high - low) >> 2)
if a[mid] == val:
while mid < (len(a) - 1):
if a[mid + 1] == val:
mid = mid + 1
else:
return mid
return mid
elif a[mid] < val:
low = mid + 1
else:
high = mid - 1
return -1
def find_last_by_binary_search_v2(a, val):
low, mid, high = 0, 0, len(a) - 1
while low <= high:
mid = low + ((high - low) >> 2)
if a[mid] > val:
high = mid - 1
elif a[mid] < val:
low = mid + 1
else:
if (mid == len(a) - 1) or a[mid + 1] != val:
return mid
else:
low = mid + 1
return -1
def find_first_geq_by_binary_search(a, val):
low, mid, high = 0, 0, len(a) - 1
while low <= high:
mid = low + ((high - low) >> 2)
if a[mid] == val:
while mid < (len(a) -1):
if a[mid + 1] != val:
return mid + 1
mid += 1
return mid
elif a[mid] < val:
low = mid + 1
else:
high = mid - 1
if a[mid - 1] < value < a[mid + 1]:
return mid + 1
return -1
def find_first_geq_by_binary_search_v2(a, val):
low, mid, high = 0, 0, len(a) - 1
while low <= high:
mid = low + ((high - low) >> 2)
if a[mid] >= val:
if mid == 0 or a[mid - 1] < val:
return mid
high = mid - 1
else:
low = mid + 1
return -1
def find_last_leq_by_binary_search(a, val):
low, mid, high = 0, 0, len(a) - 1
while low <= high:
mid = low + ((high - low) >> 2)
if a[mid] == val:
while mid > 0:
if a[mid - 1] != val:
return mid - 1
mid -= 1
return mid
elif a[mid] < val:
low = mid + 1
else:
high = mid - 1
def find_last_leq_by_binary_search_v2(a, val):
low, mid, high = 0, 0, len(a) - 1
while low <= high:
mid = low + ((high - low) >> 2)
if a[mid] <= val:
if mid == len(a) -1 or a[mid + 1] > val:
return mid
low = mid + 1
else:
high = mid - 1
return -1
if __name__ == '__main__':
#data = np.random.randint(0, 40, 50)
#print(simple_binary_search(data, 10))
#print(recursive_binary_search(data,0, len(data) - 1, 10))
#print(find_sqrt_by_binary_search(9999999))
print('-' * 20 + 'normal test' + '-' * 20)
data = [1,2, 32, 34, 12, 1, 42, 38, 9, 8, 34, 12 ,34, 19]
print(data)
bubble_sort(data)
print(data)
print('find 34: ')
print("The first occurence position: %d" %find_first_by_binary_search_v2(data, 34))
print("The last occurence position: %d" %find_last_by_binary_search_v2(data, 34))
print("The first larger occurence position: %d" %find_first_geq_by_binary_search_v2(data, 34))
print("The last smaller occurence position: %d" %find_last_leq_by_binary_search_v2(data, 34))
print('find 25: ')
print("The first occurence position: %d" %find_first_by_binary_search_v2(data,25 ))
print("The last occurence position: %d" %find_last_by_binary_search_v2(data, 25))
print("The first larger occurence position: %d" %find_first_geq_by_binary_search_v2(data, 25))
print("The last smaller occurence position: %d" %find_last_leq_by_binary_search_v2(data, 25))
print('find 50: ')
print("The first occurence position: %d" %find_first_by_binary_search_v2(data,50 ))
print("The last occurence position: %d" %find_last_by_binary_search_v2(data, 50))
print("The first larger occurence position: %d" %find_first_geq_by_binary_search_v2(data, 50))
print("The last smaller occurence position: %d" %find_last_leq_by_binary_search_v2(data, 50))
print('-' * 20 + 'boundary test' + '-' * 20)
print('high boundary test')
data = [1,2, 32, 34, 12, 1, 12, 20, 9, 8, 34, 12 ,34, 19]
print(data)
bubble_sort(data)
print(data)
print("The first occurence position: %d" %find_first_by_binary_search_v2(data, 34))
print("The last occurence position: %d" %find_last_by_binary_search_v2(data, 34))
print("The first larger occurence position: %d" %find_first_geq_by_binary_search_v2(data, 34))
print("The last smaller occurence position: %d" %find_last_leq_by_binary_search_v2(data, 34))
print('low boundary test')
data = [1,2, 32, 34, 12, 1, 12, 20, 9, 8, 34, 12 ,34, 19]
print(data)
bubble_sort(data)
print(data)
print("The first occurence position: %d" %find_first_by_binary_search_v2(data, 1))
print("The last occurence position: %d" %find_last_by_binary_search_v2(data, 1))
print("The first larger occurence position: %d" %find_first_geq_by_binary_search_v2(data, 1))
print("The last smaller occurence position: %d" %find_last_leq_by_binary_search_v2(data,1 ))
|
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for Bidirectional wrapper."""
# pylint: disable=g-direct-tensorflow-import
import copy
from absl.testing import parameterized
import keras
from keras.engine import base_layer_utils
from keras.layers import core
from keras.layers.rnn.cell_wrappers import ResidualWrapper
from keras.testing_infra import test_combinations
from keras.testing_infra import test_utils
from keras.utils import generic_utils
import numpy as np
import tensorflow.compat.v2 as tf
from tensorflow.python.framework import test_util as tf_test_util
from tensorflow.python.training.tracking import util as trackable_util
class _RNNCellWithConstants(keras.layers.Layer):
def __init__(self, units, constant_size, **kwargs):
self.units = units
self.state_size = units
self.constant_size = constant_size
super(_RNNCellWithConstants, self).__init__(**kwargs)
def build(self, input_shape):
self.input_kernel = self.add_weight(
shape=(input_shape[-1], self.units),
initializer='uniform',
name='kernel')
self.recurrent_kernel = self.add_weight(
shape=(self.units, self.units),
initializer='uniform',
name='recurrent_kernel')
self.constant_kernel = self.add_weight(
shape=(self.constant_size, self.units),
initializer='uniform',
name='constant_kernel')
self.built = True
def call(self, inputs, states, constants):
[prev_output] = states
[constant] = constants
h_input = keras.backend.dot(inputs, self.input_kernel)
h_state = keras.backend.dot(prev_output, self.recurrent_kernel)
h_const = keras.backend.dot(constant, self.constant_kernel)
output = h_input + h_state + h_const
return output, [output]
def get_config(self):
config = {'units': self.units, 'constant_size': self.constant_size}
base_config = super(_RNNCellWithConstants, self).get_config()
return dict(list(base_config.items()) + list(config.items()))
class _ResidualLSTMCell(keras.layers.LSTMCell):
def call(self, inputs, states, training=None):
output, states = super(_ResidualLSTMCell, self).call(inputs, states)
return output + inputs, states
class _AddOneCell(keras.layers.AbstractRNNCell):
"""Increments inputs and state by one on each call."""
@property
def state_size(self):
return 1
@property
def output_size(self):
return 1
def call(self, inputs, state):
inputs = tf.reduce_mean(inputs, axis=1, keepdims=True)
outputs = inputs + 1.0
state = tf.nest.map_structure(lambda t: t + 1.0, state)
return outputs, state
@test_combinations.generate(test_combinations.combine(mode=['graph', 'eager']))
class BidirectionalTest(tf.test.TestCase, parameterized.TestCase):
@parameterized.parameters(['sum', 'concat', 'ave', 'mul'])
def test_bidirectional(self, mode):
rnn = keras.layers.SimpleRNN
samples = 2
dim = 2
timesteps = 2
output_dim = 2
with self.cached_session():
x = np.random.random((samples, timesteps, dim))
target_dim = 2 * output_dim if mode == 'concat' else output_dim
y = np.random.random((samples, target_dim))
# test with Sequential model
model = keras.models.Sequential()
model.add(
keras.layers.Bidirectional(
rnn(output_dim), merge_mode=mode, input_shape=(timesteps, dim)))
model.compile(optimizer='rmsprop', loss='mse')
model.fit(x, y, epochs=1, batch_size=1)
# check whether the model variables are present in the
# trackable list of objects
checkpointed_object_ids = {
id(o) for o in trackable_util.list_objects(model)
}
for v in model.variables:
self.assertIn(id(v), checkpointed_object_ids)
# test compute output shape
ref_shape = model.layers[-1].output.shape
shape = model.layers[-1].compute_output_shape(
(None, timesteps, dim))
self.assertListEqual(shape.as_list(), ref_shape.as_list())
# test config
model.get_config()
model = keras.models.model_from_json(model.to_json())
model.summary()
def test_bidirectional_invalid_init(self):
x = tf.constant(np.zeros((1, 1)).astype('float32'))
with self.assertRaisesRegex(
ValueError,
'Please initialize `Bidirectional` layer with a '
'`tf.keras.layers.Layer` instance.'):
keras.layers.Bidirectional(x)
def test_bidirectional_weight_loading(self):
rnn = keras.layers.SimpleRNN
samples = 2
dim = 2
timesteps = 2
output_dim = 2
with self.cached_session():
x = np.random.random((samples, timesteps, dim))
model = keras.models.Sequential()
model.add(
keras.layers.Bidirectional(
rnn(output_dim), input_shape=(timesteps, dim)))
y_ref = model.predict(x)
weights = model.layers[-1].get_weights()
model.layers[-1].set_weights(weights)
y = model.predict(x)
self.assertAllClose(y, y_ref)
def test_bidirectional_stacked(self):
# test stacked bidirectional layers
rnn = keras.layers.SimpleRNN
samples = 2
dim = 2
timesteps = 2
output_dim = 2
mode = 'sum'
with self.cached_session():
x = np.random.random((samples, timesteps, dim))
target_dim = 2 * output_dim if mode == 'concat' else output_dim
y = np.random.random((samples, target_dim))
model = keras.models.Sequential()
model.add(
keras.layers.Bidirectional(
rnn(output_dim, return_sequences=True),
merge_mode=mode,
input_shape=(timesteps, dim)))
model.add(keras.layers.Bidirectional(rnn(output_dim), merge_mode=mode))
model.compile(loss='mse', optimizer='sgd')
model.fit(x, y, epochs=1, batch_size=1)
# test with functional API
inputs = keras.layers.Input((timesteps, dim))
output = keras.layers.Bidirectional(
rnn(output_dim), merge_mode=mode)(inputs)
model = keras.models.Model(inputs, output)
model.compile(loss='mse', optimizer='sgd')
model.fit(x, y, epochs=1, batch_size=1)
def test_bidirectional_statefulness(self):
# Bidirectional and stateful
def run_test():
rnn = keras.layers.SimpleRNN
samples = 2
dim = 2
timesteps = 2
output_dim = 2
mode = 'sum'
with self.cached_session():
x = np.random.random((samples, timesteps, dim))
target_dim = 2 * output_dim if mode == 'concat' else output_dim
y = np.random.random((samples, target_dim))
inputs = keras.layers.Input(batch_shape=(1, timesteps, dim))
bidi_rnn = keras.layers.Bidirectional(
rnn(output_dim, stateful=True), merge_mode=mode)
self.assertTrue(bidi_rnn.stateful)
output = bidi_rnn(inputs)
model = keras.models.Model(inputs, output)
y_1 = model.predict(x, batch_size=1)
model.reset_states()
y_2 = model.predict(x, batch_size=1)
self.assertAllClose(y_1, y_2)
model.compile(loss='mse', optimizer='sgd')
model.fit(x, y, epochs=1, batch_size=1)
if tf.executing_eagerly():
run_test()
else:
tf_test_util.enable_output_all_intermediates(run_test)()
@parameterized.parameters(['sum', 'mul', 'ave', 'concat', None])
def test_Bidirectional_merged_value(self, merge_mode):
rnn = keras.layers.LSTM
samples = 2
dim = 5
timesteps = 3
units = 3
x = [np.random.rand(samples, timesteps, dim)]
with self.cached_session():
if merge_mode == 'sum':
merge_func = lambda y, y_rev: y + y_rev
elif merge_mode == 'mul':
merge_func = lambda y, y_rev: y * y_rev
elif merge_mode == 'ave':
merge_func = lambda y, y_rev: (y + y_rev) / 2
elif merge_mode == 'concat':
merge_func = lambda y, y_rev: np.concatenate((y, y_rev), axis=-1)
else:
merge_func = lambda y, y_rev: [y, y_rev]
# basic case
inputs = keras.Input((timesteps, dim))
layer = keras.layers.Bidirectional(
rnn(units, return_sequences=True), merge_mode=merge_mode)
f_merged = keras.backend.function([inputs], _to_list(layer(inputs)))
f_forward = keras.backend.function([inputs],
[layer.forward_layer(inputs)])
f_backward = keras.backend.function(
[inputs],
[keras.backend.reverse(layer.backward_layer(inputs), 1)])
y_merged = f_merged(x)
y_expected = _to_list(merge_func(f_forward(x)[0], f_backward(x)[0]))
assert len(y_merged) == len(y_expected)
for x1, x2 in zip(y_merged, y_expected):
self.assertAllClose(x1, x2, atol=1e-5)
# test return_state
inputs = keras.Input((timesteps, dim))
layer = keras.layers.Bidirectional(
rnn(units, return_state=True), merge_mode=merge_mode)
f_merged = keras.backend.function([inputs], layer(inputs))
f_forward = keras.backend.function([inputs],
layer.forward_layer(inputs))
f_backward = keras.backend.function([inputs],
layer.backward_layer(inputs))
n_states = len(layer.layer.states)
y_merged = f_merged(x)
y_forward = f_forward(x)
y_backward = f_backward(x)
y_expected = _to_list(merge_func(y_forward[0], y_backward[0]))
assert len(y_merged) == len(y_expected) + n_states * 2
for x1, x2 in zip(y_merged, y_expected):
self.assertAllClose(x1, x2, atol=1e-5)
y_merged = y_merged[-n_states * 2:]
y_forward = y_forward[-n_states:]
y_backward = y_backward[-n_states:]
for state_birnn, state_inner in zip(y_merged, y_forward + y_backward):
self.assertAllClose(state_birnn, state_inner, atol=1e-5)
@parameterized.parameters([True, False])
def test_Bidirectional_with_time_major_input(self, time_major):
batch_size, time, input_dim = 2, 3, 1
inputs = tf.zeros((batch_size, time, input_dim))
# length is [1 2]. Within the batch, the first element has 1 step, and the
# second element as 2 steps.
lengths = tf.range(1, 1 + batch_size)
mask = tf.sequence_mask(lengths, maxlen=time, dtype=tf.float32)
forward_cell = _AddOneCell(name='forward')
backward_cell = _AddOneCell(name='backward')
layer = keras.layers.Bidirectional(
layer=keras.layers.RNN(
forward_cell, time_major=time_major, return_sequences=True),
backward_layer=keras.layers.RNN(
backward_cell, time_major=time_major, return_sequences=True,
go_backwards=True))
# Switch to time-major.
if time_major:
inputs = tf.transpose(inputs, [1, 0, 2])
mask = tf.transpose(mask, [1, 0])
keras_outputs = layer(inputs, mask=mask)
if time_major:
keras_outputs = tf.transpose(keras_outputs, [1, 0, 2])
# expect the first element in batch has 1 step and second element in batch
# has 2 steps.
expected_result = np.array([[[1., 1.], [0., 0.], [0., 0.]],
[[1., 1.], [1., 1.], [0., 0.]]])
self.assertAllClose(expected_result, keras_outputs)
def test_Bidirectional_dropout(self):
rnn = keras.layers.LSTM
samples = 2
dim = 5
timesteps = 3
units = 3
merge_mode = 'sum'
x = [np.random.rand(samples, timesteps, dim)]
with self.cached_session():
inputs = keras.Input((timesteps, dim))
wrapped = keras.layers.Bidirectional(
rnn(units, dropout=0.2, recurrent_dropout=0.2), merge_mode=merge_mode)
outputs = _to_list(wrapped(inputs, training=True))
inputs = keras.Input((timesteps, dim))
wrapped = keras.layers.Bidirectional(
rnn(units, dropout=0.2, return_state=True), merge_mode=merge_mode)
outputs = _to_list(wrapped(inputs))
model = keras.Model(inputs, outputs)
y1 = _to_list(model.predict(x))
y2 = _to_list(model.predict(x))
for x1, x2 in zip(y1, y2):
self.assertAllClose(x1, x2, atol=1e-5)
def test_Bidirectional_state_reuse(self):
rnn = keras.layers.LSTM
samples = 2
dim = 5
timesteps = 3
units = 3
with self.cached_session():
input1 = keras.layers.Input((timesteps, dim))
layer = keras.layers.Bidirectional(
rnn(units, return_state=True, return_sequences=True))
state = layer(input1)[1:]
# test passing invalid initial_state: passing a tensor
input2 = keras.layers.Input((timesteps, dim))
with self.assertRaises(ValueError):
keras.layers.Bidirectional(rnn(units))(input2, initial_state=state[0])
# test valid usage: passing a list
output = keras.layers.Bidirectional(rnn(units))(input2,
initial_state=state)
model = keras.models.Model([input1, input2], output)
assert len(model.layers) == 4
assert isinstance(model.layers[-1].input, list)
inputs = [np.random.rand(samples, timesteps, dim),
np.random.rand(samples, timesteps, dim)]
model.predict(inputs)
def test_Bidirectional_state_reuse_with_np_input(self):
# See https://github.com/tensorflow/tensorflow/issues/28761 for more detail.
rnn = keras.layers.LSTM
samples = 2
dim = 5
timesteps = 3
units = 3
with self.cached_session():
input1 = np.random.rand(samples, timesteps, dim).astype(np.float32)
layer = keras.layers.Bidirectional(
rnn(units, return_state=True, return_sequences=True))
state = layer(input1)[1:]
input2 = np.random.rand(samples, timesteps, dim).astype(np.float32)
keras.layers.Bidirectional(rnn(units))(input2, initial_state=state)
def test_Bidirectional_trainable(self):
# test layers that need learning_phase to be set
with self.cached_session():
x = keras.layers.Input(shape=(3, 2))
layer = keras.layers.Bidirectional(keras.layers.SimpleRNN(3))
_ = layer(x)
assert len(layer.trainable_weights) == 6
layer.trainable = False
assert not layer.trainable_weights
layer.trainable = True
assert len(layer.trainable_weights) == 6
def test_Bidirectional_updates(self):
if tf.executing_eagerly():
self.skipTest('layer.updates is only available in graph mode.')
with self.cached_session():
x = keras.layers.Input(shape=(3, 2))
x_reachable_update = x * x
layer = keras.layers.Bidirectional(keras.layers.SimpleRNN(3))
_ = layer(x)
assert not layer.updates
# TODO(b/128684069): Remove when Wrapper sublayers are __call__'d.
with base_layer_utils.call_context().enter(layer, x, True, None):
layer.forward_layer.add_update(x_reachable_update)
layer.forward_layer.add_update(1)
layer.backward_layer.add_update(x_reachable_update)
layer.backward_layer.add_update(1)
assert len(layer.updates) == 4
def test_Bidirectional_losses(self):
x = keras.layers.Input(shape=(3, 2))
layer = keras.layers.Bidirectional(
keras.layers.SimpleRNN(
3,
kernel_regularizer='l1',
bias_regularizer='l1',
activity_regularizer='l1'))
_ = layer(x)
assert len(layer.losses) == 6
loss = x * x
layer.forward_layer.add_loss(loss)
layer.backward_layer.add_loss(loss)
assert len(layer.losses) == 8
def test_Bidirectional_with_constants(self):
with self.cached_session():
# Test basic case.
x = keras.Input((5, 5))
c = keras.Input((3,))
cell = _RNNCellWithConstants(32, 3)
custom_objects = {'_RNNCellWithConstants': _RNNCellWithConstants}
with generic_utils.CustomObjectScope(custom_objects):
layer = keras.layers.Bidirectional(keras.layers.RNN(cell))
y = layer(x, constants=c)
model = keras.Model([x, c], y)
model.compile(optimizer='rmsprop', loss='mse')
model.train_on_batch(
[np.zeros((6, 5, 5)), np.zeros((6, 3))],
np.zeros((6, 64))
)
# Test basic case serialization.
x_np = np.random.random((6, 5, 5))
c_np = np.random.random((6, 3))
y_np = model.predict([x_np, c_np])
weights = model.get_weights()
config = layer.get_config()
with generic_utils.CustomObjectScope(custom_objects):
layer = keras.layers.Bidirectional.from_config(copy.deepcopy(config))
y = layer(x, constants=c)
model = keras.Model([x, c], y)
model.set_weights(weights)
y_np_2 = model.predict([x_np, c_np])
self.assertAllClose(y_np, y_np_2, atol=1e-4)
# Test flat list inputs
with generic_utils.CustomObjectScope(custom_objects):
layer = keras.layers.Bidirectional.from_config(copy.deepcopy(config))
y = layer([x, c])
model = keras.Model([x, c], y)
model.set_weights(weights)
y_np_3 = model.predict([x_np, c_np])
self.assertAllClose(y_np, y_np_3, atol=1e-4)
def test_Bidirectional_with_constants_layer_passing_initial_state(self):
with self.cached_session():
# Test basic case.
x = keras.Input((5, 5))
c = keras.Input((3,))
s_for = keras.Input((32,))
s_bac = keras.Input((32,))
cell = _RNNCellWithConstants(32, 3)
custom_objects = {'_RNNCellWithConstants': _RNNCellWithConstants}
with generic_utils.CustomObjectScope(custom_objects):
layer = keras.layers.Bidirectional(keras.layers.RNN(cell))
y = layer(x, initial_state=[s_for, s_bac], constants=c)
model = keras.Model([x, s_for, s_bac, c], y)
model.compile(optimizer='rmsprop', loss='mse')
model.train_on_batch(
[np.zeros((6, 5, 5)),
np.zeros((6, 32)),
np.zeros((6, 32)),
np.zeros((6, 3))],
np.zeros((6, 64))
)
# Test basic case serialization.
x_np = np.random.random((6, 5, 5))
s_fw_np = np.random.random((6, 32))
s_bk_np = np.random.random((6, 32))
c_np = np.random.random((6, 3))
y_np = model.predict([x_np, s_fw_np, s_bk_np, c_np])
weights = model.get_weights()
config = layer.get_config()
with generic_utils.CustomObjectScope(custom_objects):
layer = keras.layers.Bidirectional.from_config(copy.deepcopy(config))
y = layer(x, initial_state=[s_for, s_bac], constants=c)
model = keras.Model([x, s_for, s_bac, c], y)
model.set_weights(weights)
y_np_2 = model.predict([x_np, s_fw_np, s_bk_np, c_np])
self.assertAllClose(y_np, y_np_2, atol=1e-4)
# Verify that state is used
y_np_2_different_s = model.predict(
[x_np, s_fw_np + 10., s_bk_np + 10., c_np])
assert np.mean(y_np - y_np_2_different_s) != 0
# Test flat list inputs
with generic_utils.CustomObjectScope(custom_objects):
layer = keras.layers.Bidirectional.from_config(copy.deepcopy(config))
y = layer([x, s_for, s_bac, c])
model = keras.Model([x, s_for, s_bac, c], y)
model.set_weights(weights)
y_np_3 = model.predict([x_np, s_fw_np, s_bk_np, c_np])
self.assertAllClose(y_np, y_np_3, atol=1e-4)
@parameterized.parameters([keras.layers.LSTM, keras.layers.GRU])
def test_Bidirectional_output_shape(self, rnn):
input_shape = [None, 2, 1]
num_state = 4 if rnn == keras.layers.LSTM else 2
wrapper = keras.layers.Bidirectional(rnn(3))
output_shape = wrapper.compute_output_shape(input_shape)
self.assertEqual(output_shape.as_list(), [None, 6])
wrapper = keras.layers.Bidirectional(rnn(3, return_state=True))
output_shape = wrapper.compute_output_shape(input_shape)
# 1 for output and the rest for forward and backward states
self.assertLen(output_shape, 1 + num_state)
self.assertEqual(output_shape[0].as_list(), [None, 6])
for shape in output_shape[1:]:
self.assertEqual(shape.as_list(), [None, 3])
wrapper = keras.layers.Bidirectional(rnn(3, return_state=True),
merge_mode=None)
output_shape = wrapper.compute_output_shape(input_shape)
# 1 for forward output and 1 for backward output, and the rest for states
self.assertLen(output_shape, 2 + num_state)
for shape in output_shape:
self.assertEqual(shape.as_list(), [None, 3])
def test_Bidirectional_output_shape_return_types(self):
class TestLayer(keras.layers.SimpleRNN):
def call(self, inputs):
return tf.concat([inputs, inputs], axis=-1)
def compute_output_shape(self, input_shape):
output_shape = tf.TensorShape(input_shape).as_list()
output_shape[-1] = output_shape[-1] * 2
return tf.TensorShape(output_shape)
class TestListLayer(TestLayer):
def compute_output_shape(self, input_shape):
shape = super(TestListLayer, self).compute_output_shape(input_shape)
return shape.as_list()
class TestTupleLayer(TestLayer):
def compute_output_shape(self, input_shape):
shape = super(TestTupleLayer, self).compute_output_shape(input_shape)
return tuple(shape.as_list())
# Layers can specify output shape as list/tuple/TensorShape
test_layers = [TestLayer, TestListLayer, TestTupleLayer]
for layer in test_layers:
input_layer = keras.layers.Bidirectional(layer(1))
inputs = keras.backend.placeholder(shape=(None, 2, 4))
output = input_layer(inputs)
self.assertEqual(output.shape.as_list(), [None, 2, 16])
self.assertEqual(
input_layer.compute_output_shape([None, 2, 4]).as_list(),
[None, 2, 16])
@tf.test.disable_with_predicate(
pred=tf.test.is_built_with_rocm,
skip_message='Skipping as ROCm MIOpen does not support padded input yet.')
def test_Bidirectional_last_output_with_masking(self):
rnn = keras.layers.LSTM
samples = 2
dim = 5
timesteps = 3
units = 3
merge_mode = 'concat'
x = np.random.rand(samples, timesteps, dim)
# clear the first record's timestep 2. Last output should be same as state,
# not zeroed.
x[0, 2] = 0
with self.cached_session():
inputs = keras.Input((timesteps, dim))
masked_inputs = keras.layers.Masking()(inputs)
wrapped = keras.layers.Bidirectional(
rnn(units, return_state=True), merge_mode=merge_mode)
outputs = _to_list(wrapped(masked_inputs, training=True))
self.assertLen(outputs, 5)
self.assertEqual(outputs[0].shape.as_list(), [None, units * 2])
model = keras.Model(inputs, outputs)
y = _to_list(model.predict(x))
self.assertLen(y, 5)
self.assertAllClose(y[0], np.concatenate([y[1], y[3]], axis=1))
@parameterized.parameters([keras.layers.LSTM, keras.layers.GRU])
@tf.test.disable_with_predicate(
pred=tf.test.is_built_with_rocm,
skip_message='Skipping as ROCm MIOpen does not support padded input yet.')
def test_Bidirectional_sequence_output_with_masking(self, rnn):
samples = 2
dim = 5
timesteps = 3
units = 3
merge_mode = 'concat'
x = np.random.rand(samples, timesteps, dim)
# clear the first record's timestep 2, and expect the output of timestep 2
# is also 0s.
x[0, 2] = 0
with self.cached_session():
inputs = keras.Input((timesteps, dim))
masked_inputs = keras.layers.Masking()(inputs)
wrapped = keras.layers.Bidirectional(
rnn(units, return_sequences=True),
merge_mode=merge_mode)
outputs = _to_list(wrapped(masked_inputs, training=True))
self.assertLen(outputs, 1)
self.assertEqual(outputs[0].shape.as_list(), [None, timesteps, units * 2])
model = keras.Model(inputs, outputs)
y = _to_list(model.predict(x))
self.assertLen(y, 1)
self.assertAllClose(y[0][0, 2], np.zeros(units * 2))
@parameterized.parameters(['sum', 'concat'])
def test_custom_backward_layer(self, mode):
rnn = keras.layers.SimpleRNN
samples = 2
dim = 2
timesteps = 2
output_dim = 2
x = np.random.random((samples, timesteps, dim))
target_dim = 2 * output_dim if mode == 'concat' else output_dim
y = np.random.random((samples, target_dim))
forward_layer = rnn(output_dim)
backward_layer = rnn(output_dim, go_backwards=True)
# test with Sequential model
model = keras.models.Sequential()
model.add(
keras.layers.Bidirectional(
forward_layer,
merge_mode=mode,
backward_layer=backward_layer,
input_shape=(timesteps, dim)))
model.compile(optimizer='rmsprop', loss='mse')
model.fit(x, y, epochs=1, batch_size=1)
# check whether the model variables are present in the
# trackable list of objects
checkpointed_object_ids = {
id(o) for o in trackable_util.list_objects(model)
}
for v in model.variables:
self.assertIn(id(v), checkpointed_object_ids)
# test compute output shape
ref_shape = model.layers[-1].output.shape
shape = model.layers[-1].compute_output_shape((None, timesteps, dim))
self.assertListEqual(shape.as_list(), ref_shape.as_list())
# test config
model.get_config()
model = keras.models.model_from_json(model.to_json())
model.summary()
def test_custom_backward_layer_error_check(self):
rnn = keras.layers.LSTM
units = 2
forward_layer = rnn(units)
backward_layer = rnn(units)
with self.assertRaisesRegex(ValueError,
'should have different `go_backwards` value.'):
keras.layers.Bidirectional(
forward_layer, merge_mode='concat', backward_layer=backward_layer)
for attr in ('stateful', 'return_sequences', 'return_state'):
kwargs = {attr: True}
backward_layer = rnn(units, go_backwards=True, **kwargs)
with self.assertRaisesRegex(
ValueError, 'expected to have the same value for attribute "' + attr):
keras.layers.Bidirectional(
forward_layer, merge_mode='concat', backward_layer=backward_layer)
def test_custom_backward_layer_serialization(self):
rnn = keras.layers.LSTM
units = 2
forward_layer = rnn(units)
backward_layer = rnn(units, go_backwards=True)
layer = keras.layers.Bidirectional(
forward_layer, merge_mode='concat', backward_layer=backward_layer)
config = layer.get_config()
layer_from_config = keras.layers.Bidirectional.from_config(config)
new_config = layer_from_config.get_config()
self.assertDictEqual(config, new_config)
def test_rnn_layer_name(self):
rnn = keras.layers.LSTM
units = 2
layer = keras.layers.Bidirectional(rnn(units, name='rnn'))
config = layer.get_config()
self.assertEqual(config['layer']['config']['name'], 'rnn')
layer_from_config = keras.layers.Bidirectional.from_config(config)
self.assertEqual(layer_from_config.forward_layer.name, 'forward_rnn')
self.assertEqual(layer_from_config.backward_layer.name, 'backward_rnn')
def test_custom_backward_rnn_layer_name(self):
rnn = keras.layers.LSTM
units = 2
forward_layer = rnn(units)
backward_layer = rnn(units, go_backwards=True)
layer = keras.layers.Bidirectional(
forward_layer, merge_mode='concat', backward_layer=backward_layer)
config = layer.get_config()
self.assertEqual(config['layer']['config']['name'], 'lstm')
self.assertEqual(config['backward_layer']['config']['name'], 'lstm_1')
layer_from_config = keras.layers.Bidirectional.from_config(config)
self.assertEqual(layer_from_config.forward_layer.name, 'forward_lstm')
self.assertEqual(layer_from_config.backward_layer.name, 'backward_lstm_1')
def test_rnn_with_customized_cell(self):
batch = 20
dim = 5
timesteps = 3
units = 5
merge_mode = 'sum'
cell = _ResidualLSTMCell(units)
forward_layer = keras.layers.RNN(cell)
inputs = keras.Input((timesteps, dim))
bidirectional_rnn = keras.layers.Bidirectional(
forward_layer, merge_mode=merge_mode)
outputs = _to_list(bidirectional_rnn(inputs))
model = keras.Model(inputs, outputs)
model.compile(optimizer='rmsprop', loss='mse')
model.fit(
np.random.random((batch, timesteps, dim)),
np.random.random((batch, units)),
epochs=1,
batch_size=10)
def test_rnn_with_customized_cell_stacking(self):
batch = 20
dim = 5
timesteps = 3
units = 5
merge_mode = 'sum'
cell = [_ResidualLSTMCell(units), _ResidualLSTMCell(units)]
forward_layer = keras.layers.RNN(cell)
inputs = keras.Input((timesteps, dim))
bidirectional_rnn = keras.layers.Bidirectional(
forward_layer, merge_mode=merge_mode)
outputs = _to_list(bidirectional_rnn(inputs))
model = keras.Model(inputs, outputs)
model.compile(optimizer='rmsprop', loss='mse')
model.fit(
np.random.random((batch, timesteps, dim)),
np.random.random((batch, units)),
epochs=1,
batch_size=10)
@test_utils.run_v2_only
def test_wrapped_rnn_cell(self):
# See https://github.com/tensorflow/tensorflow/issues/26581.
batch = 20
dim = 5
timesteps = 3
units = 5
merge_mode = 'sum'
cell = keras.layers.LSTMCell(units)
cell = ResidualWrapper(cell)
rnn = keras.layers.RNN(cell)
inputs = keras.Input((timesteps, dim))
wrapped = keras.layers.Bidirectional(rnn, merge_mode=merge_mode)
outputs = _to_list(wrapped(inputs))
model = keras.Model(inputs, outputs)
model.compile(optimizer='rmsprop', loss='mse')
model.fit(
np.random.random((batch, timesteps, dim)),
np.random.random((batch, units)),
epochs=1,
batch_size=10)
@parameterized.parameters(['ave', 'concat', 'mul'])
@tf.test.disable_with_predicate(
pred=tf.test.is_built_with_rocm,
skip_message='Skipping as ROCm RNN does not support ragged tensors yet.')
def test_Bidirectional_ragged_input(self, merge_mode):
np.random.seed(100)
rnn = keras.layers.LSTM
units = 3
x = tf.ragged.constant(
[[[1, 1, 1], [1, 1, 1]], [[1, 1, 1]],
[[1, 1, 1], [1, 1, 1], [1, 1, 1], [1, 1, 1]],
[[1, 1, 1], [1, 1, 1], [1, 1, 1]]],
ragged_rank=1)
x = tf.cast(x, 'float32')
# pylint: disable=g-long-lambda
with self.cached_session():
if merge_mode == 'ave':
merge_func = lambda y, y_rev: (y + y_rev) / 2
elif merge_mode == 'concat':
merge_func = lambda y, y_rev: tf.concat(
(y, y_rev), axis=-1)
elif merge_mode == 'mul':
merge_func = lambda y, y_rev: (y * y_rev)
# pylint: enable=g-long-lambda
inputs = keras.Input(
shape=(None, 3), batch_size=4, dtype='float32', ragged=True)
layer = keras.layers.Bidirectional(
rnn(units, return_sequences=True), merge_mode=merge_mode)
f_merged = keras.backend.function([inputs], layer(inputs))
f_forward = keras.backend.function([inputs],
layer.forward_layer(inputs))
# TODO(kaftan): after KerasTensor refactor TF op layers should work
# with many composite tensors, and this shouldn't need to be a lambda
# layer.
reverse_layer = core.Lambda(tf.reverse, arguments=dict(axis=[1]))
f_backward = keras.backend.function(
[inputs],
reverse_layer(layer.backward_layer(inputs)))
y_merged = f_merged(x)
y_expected = merge_func(
convert_ragged_tensor_value(f_forward(x)),
convert_ragged_tensor_value(f_backward(x)))
y_merged = convert_ragged_tensor_value(y_merged)
self.assertAllClose(y_merged.flat_values, y_expected.flat_values)
def test_Bidirectional_nested_state_reuse(self):
if not tf.executing_eagerly():
self.skipTest('Only test eager mode.')
x = tf.random.normal([4, 8, 16])
layer = keras.layers.Bidirectional(
keras.layers.RNN([keras.layers.LSTMCell(5),
keras.layers.LSTMCell(5)],
return_sequences=True,
return_state=True))
y = layer(x)
self.assertAllClose(layer([x] + y[1:]), layer(x, initial_state=y[1:]))
def test_full_input_spec(self):
# See https://github.com/tensorflow/tensorflow/issues/38403
inputs = keras.layers.Input(batch_shape=(1, 1, 1))
fw_state = keras.layers.Input(batch_shape=(1, 1))
bw_state = keras.layers.Input(batch_shape=(1, 1))
states = [fw_state, bw_state]
bidirectional_rnn = keras.layers.Bidirectional(
keras.layers.SimpleRNN(1, stateful=True))
rnn_output = bidirectional_rnn(inputs, initial_state=states)
model = keras.Model([inputs, fw_state, bw_state], rnn_output)
output1 = model.predict(
[np.ones((1, 1, 1)), np.ones((1, 1)), np.ones((1, 1))])
output2 = model.predict(
[np.ones((1, 1, 1)), np.ones((1, 1)), np.ones((1, 1))])
model.reset_states()
output3 = model.predict(
[np.ones((1, 1, 1)), np.ones((1, 1)), np.ones((1, 1))])
self.assertAllClose(output1, output3)
self.assertNotAllClose(output1, output2)
def _to_list(ls):
if isinstance(ls, list):
return ls
else:
return [ls]
def convert_ragged_tensor_value(inputs):
if isinstance(inputs, tf.compat.v1.ragged.RaggedTensorValue):
flat_values = tf.convert_to_tensor(
value=inputs.flat_values,
name='flat_values')
return tf.RaggedTensor.from_nested_row_splits(
flat_values, inputs.nested_row_splits, validate=False)
return inputs
if __name__ == '__main__':
tf.test.main()
|
|
#!/usr/bin/env python
import vtk
def main():
colors = vtk.vtkNamedColors()
# Set the background color.
colors.SetColor("BkgColor", [51, 77, 102, 255])
titles = list()
textMappers = list()
textActors = list()
uGrids = list()
mappers = list()
actors = list()
renderers = list()
uGrids.append(MakeHexagonalPrism())
titles.append('Hexagonal Prism')
uGrids.append(MakeHexahedron())
titles.append('Hexahedron')
uGrids.append(MakePentagonalPrism())
titles.append('Pentagonal Prism')
uGrids.append(MakePolyhedron())
titles.append('Polyhedron')
uGrids.append(MakePyramid())
titles.append('Pyramid')
uGrids.append(MakeTetrahedron())
titles.append('Tetrahedron')
uGrids.append(MakeVoxel())
titles.append('Voxel')
uGrids.append(MakeWedge())
titles.append('Wedge')
renWin = vtk.vtkRenderWindow()
renWin.SetWindowName('Cell3D Demonstration')
iRen = vtk.vtkRenderWindowInteractor()
iRen.SetRenderWindow(renWin)
# Create one text property for all
textProperty = vtk.vtkTextProperty()
textProperty.SetFontSize(16)
textProperty.SetJustificationToCentered()
# Create and link the mappers actors and renderers together.
for i in range(0, len(uGrids)):
textMappers.append(vtk.vtkTextMapper())
textActors.append(vtk.vtkActor2D())
mappers.append(vtk.vtkDataSetMapper())
actors.append(vtk.vtkActor())
renderers.append(vtk.vtkRenderer())
mappers[i].SetInputData(uGrids[i])
actors[i].SetMapper(mappers[i])
actors[i].GetProperty().SetColor(
colors.GetColor3d("Seashell"))
renderers[i].AddViewProp(actors[i])
textMappers[i].SetInput(titles[i])
textMappers[i].SetTextProperty(textProperty)
textActors[i].SetMapper(textMappers[i])
textActors[i].SetPosition(120, 16)
renderers[i].AddViewProp(textActors[i])
renWin.AddRenderer(renderers[i])
gridDimensions = 3
rendererSize = 300
renWin.SetSize(rendererSize * gridDimensions,
rendererSize * gridDimensions)
for row in range(0, gridDimensions):
for col in range(0, gridDimensions):
index = row * gridDimensions + col
# (xmin, ymin, xmax, ymax)
viewport = [
float(col) * rendererSize /
(gridDimensions * rendererSize),
float(gridDimensions - (row + 1)) * rendererSize /
(gridDimensions * rendererSize),
float(col + 1) * rendererSize /
(gridDimensions * rendererSize),
float(gridDimensions - row) * rendererSize /
(gridDimensions * rendererSize)]
if index > len(actors) - 1:
# Add a renderer even if there is no actor.
# This makes the render window background all the same color.
ren = vtk.vtkRenderer()
ren.SetBackground(colors.GetColor3d("BkgColor"))
ren.SetViewport(viewport)
renWin.AddRenderer(ren)
continue
renderers[index].SetViewport(viewport)
renderers[index].SetBackground(colors.GetColor3d("BkgColor"))
renderers[index].ResetCamera()
renderers[index].GetActiveCamera().Azimuth(30)
renderers[index].GetActiveCamera().Elevation(-30)
renderers[index].GetActiveCamera().Zoom(0.85)
renderers[index].ResetCameraClippingRange()
iRen.Initialize()
renWin.Render()
iRen.Start()
def MakeHexagonalPrism():
"""
3D: hexagonal prism: a wedge with an hexagonal base.
Be careful, the base face ordering is different from wedge.
"""
numberOfVertices = 12
points = vtk.vtkPoints()
points.InsertNextPoint(0.0, 0.0, 1.0)
points.InsertNextPoint(1.0, 0.0, 1.0)
points.InsertNextPoint(1.5, 0.5, 1.0)
points.InsertNextPoint(1.0, 1.0, 1.0)
points.InsertNextPoint(0.0, 1.0, 1.0)
points.InsertNextPoint(-0.5, 0.5, 1.0)
points.InsertNextPoint(0.0, 0.0, 0.0)
points.InsertNextPoint(1.0, 0.0, 0.0)
points.InsertNextPoint(1.5, 0.5, 0.0)
points.InsertNextPoint(1.0, 1.0, 0.0)
points.InsertNextPoint(0.0, 1.0, 0.0)
points.InsertNextPoint(-0.5, 0.5, 0.0)
hexagonalPrism = vtk.vtkHexagonalPrism()
for i in range(0, numberOfVertices):
hexagonalPrism.GetPointIds().SetId(i, i)
ug = vtk.vtkUnstructuredGrid()
ug.InsertNextCell(hexagonalPrism.GetCellType(),
hexagonalPrism.GetPointIds())
ug.SetPoints(points)
return ug
def MakeHexahedron():
"""
A regular hexagon (cube) with all faces square and three squares around
each vertex is created below.
Setup the coordinates of eight points
(the two faces must be in counter clockwise
order as viewed from the outside).
As an exercise you can modify the coordinates of the points to create
seven topologically distinct convex hexahedras.
"""
numberOfVertices = 8
# Create the points
points = vtk.vtkPoints()
points.InsertNextPoint(0.0, 0.0, 0.0)
points.InsertNextPoint(1.0, 0.0, 0.0)
points.InsertNextPoint(1.0, 1.0, 0.0)
points.InsertNextPoint(0.0, 1.0, 0.0)
points.InsertNextPoint(0.0, 0.0, 1.0)
points.InsertNextPoint(1.0, 0.0, 1.0)
points.InsertNextPoint(1.0, 1.0, 1.0)
points.InsertNextPoint(0.0, 1.0, 1.0)
# Create a hexahedron from the points
hex_ = vtk.vtkHexahedron()
for i in range(0, numberOfVertices):
hex_.GetPointIds().SetId(i, i)
# Add the points and hexahedron to an unstructured grid
uGrid = vtk.vtkUnstructuredGrid()
uGrid.SetPoints(points)
uGrid.InsertNextCell(hex_.GetCellType(), hex_.GetPointIds())
return uGrid
def MakePentagonalPrism():
numberOfVertices = 10
# Create the points
points = vtk.vtkPoints()
points.InsertNextPoint(11, 10, 10)
points.InsertNextPoint(13, 10, 10)
points.InsertNextPoint(14, 12, 10)
points.InsertNextPoint(12, 14, 10)
points.InsertNextPoint(10, 12, 10)
points.InsertNextPoint(11, 10, 14)
points.InsertNextPoint(13, 10, 14)
points.InsertNextPoint(14, 12, 14)
points.InsertNextPoint(12, 14, 14)
points.InsertNextPoint(10, 12, 14)
# Pentagonal Prism
pentagonalPrism = vtk.vtkPentagonalPrism()
for i in range(0, numberOfVertices):
pentagonalPrism.GetPointIds().SetId(i, i)
# Add the points and hexahedron to an unstructured grid
uGrid = vtk.vtkUnstructuredGrid()
uGrid.SetPoints(points)
uGrid.InsertNextCell(pentagonalPrism.GetCellType(),
pentagonalPrism.GetPointIds())
return uGrid
def MakePolyhedron():
"""
Make a regular dodecahedron. It consists of twelve regular pentagonal
faces with three faces meeting at each vertex.
"""
# numberOfVertices = 20
numberOfFaces = 12
# numberOfFaceVertices = 5
points = vtk.vtkPoints()
points.InsertNextPoint(1.21412, 0, 1.58931)
points.InsertNextPoint(0.375185, 1.1547, 1.58931)
points.InsertNextPoint(-0.982247, 0.713644, 1.58931)
points.InsertNextPoint(-0.982247, -0.713644, 1.58931)
points.InsertNextPoint(0.375185, -1.1547, 1.58931)
points.InsertNextPoint(1.96449, 0, 0.375185)
points.InsertNextPoint(0.607062, 1.86835, 0.375185)
points.InsertNextPoint(-1.58931, 1.1547, 0.375185)
points.InsertNextPoint(-1.58931, -1.1547, 0.375185)
points.InsertNextPoint(0.607062, -1.86835, 0.375185)
points.InsertNextPoint(1.58931, 1.1547, -0.375185)
points.InsertNextPoint(-0.607062, 1.86835, -0.375185)
points.InsertNextPoint(-1.96449, 0, -0.375185)
points.InsertNextPoint(-0.607062, -1.86835, -0.375185)
points.InsertNextPoint(1.58931, -1.1547, -0.375185)
points.InsertNextPoint(0.982247, 0.713644, -1.58931)
points.InsertNextPoint(-0.375185, 1.1547, -1.58931)
points.InsertNextPoint(-1.21412, 0, -1.58931)
points.InsertNextPoint(-0.375185, -1.1547, -1.58931)
points.InsertNextPoint(0.982247, -0.713644, -1.58931)
# Dimensions are [numberOfFaces][numberOfFaceVertices]
dodechedronFace = [
[0, 1, 2, 3, 4],
[0, 5, 10, 6, 1],
[1, 6, 11, 7, 2],
[2, 7, 12, 8, 3],
[3, 8, 13, 9, 4],
[4, 9, 14, 5, 0],
[15, 10, 5, 14, 19],
[16, 11, 6, 10, 15],
[17, 12, 7, 11, 16],
[18, 13, 8, 12, 17],
[19, 14, 9, 13, 18],
[19, 18, 17, 16, 15]
]
dodechedronFacesIdList = vtk.vtkIdList()
# Number faces that make up the cell.
dodechedronFacesIdList.InsertNextId(numberOfFaces)
for face in dodechedronFace:
# Number of points in the face == numberOfFaceVertices
dodechedronFacesIdList.InsertNextId(len(face))
# Insert the pointIds for that face.
[dodechedronFacesIdList.InsertNextId(i) for i in face]
uGrid = vtk.vtkUnstructuredGrid()
uGrid.InsertNextCell(vtk.VTK_POLYHEDRON, dodechedronFacesIdList)
uGrid.SetPoints(points)
return uGrid
def MakePyramid():
"""
Make a regular square pyramid.
"""
numberOfVertices = 5
points = vtk.vtkPoints()
p = [
[1.0, 1.0, 0.0],
[-1.0, 1.0, 0.0],
[-1.0, -1.0, 0.0],
[1.0, -1.0, 0.0],
[0.0, 0.0, 1.0]
]
for pt in p:
points.InsertNextPoint(pt)
pyramid = vtk.vtkPyramid()
for i in range(0, numberOfVertices):
pyramid.GetPointIds().SetId(i, i)
ug = vtk.vtkUnstructuredGrid()
ug.SetPoints(points)
ug.InsertNextCell(pyramid.GetCellType(), pyramid.GetPointIds())
return ug
def MakeTetrahedron():
"""
Make a tetrahedron.
"""
numberOfVertices = 4
points = vtk.vtkPoints()
points.InsertNextPoint(0, 0, 0)
points.InsertNextPoint(1, 0, 0)
points.InsertNextPoint(1, 1, 0)
points.InsertNextPoint(0, 1, 1)
tetra = vtk.vtkTetra()
for i in range(0, numberOfVertices):
tetra.GetPointIds().SetId(i, i)
cellArray = vtk.vtkCellArray()
cellArray.InsertNextCell(tetra)
unstructuredGrid = vtk.vtkUnstructuredGrid()
unstructuredGrid.SetPoints(points)
unstructuredGrid.SetCells(vtk.VTK_TETRA, cellArray)
return unstructuredGrid
def MakeVoxel():
"""
A voxel is a representation of a regular grid in 3-D space.
"""
numberOfVertices = 8
points = vtk.vtkPoints()
points.InsertNextPoint(0, 0, 0)
points.InsertNextPoint(1, 0, 0)
points.InsertNextPoint(0, 1, 0)
points.InsertNextPoint(1, 1, 0)
points.InsertNextPoint(0, 0, 1)
points.InsertNextPoint(1, 0, 1)
points.InsertNextPoint(0, 1, 1)
points.InsertNextPoint(1, 1, 1)
voxel = vtk.vtkVoxel()
for i in range(0, numberOfVertices):
voxel.GetPointIds().SetId(i, i)
ug = vtk.vtkUnstructuredGrid()
ug.SetPoints(points)
ug.InsertNextCell(voxel.GetCellType(), voxel.GetPointIds())
return ug
def MakeWedge():
"""
A wedge consists of two triangular ends and three rectangular faces.
"""
numberOfVertices = 6
points = vtk.vtkPoints()
points.InsertNextPoint(0, 1, 0)
points.InsertNextPoint(0, 0, 0)
points.InsertNextPoint(0, .5, .5)
points.InsertNextPoint(1, 1, 0)
points.InsertNextPoint(1, 0.0, 0.0)
points.InsertNextPoint(1, .5, .5)
wedge = vtk.vtkWedge()
for i in range(0, numberOfVertices):
wedge.GetPointIds().SetId(i, i)
ug = vtk.vtkUnstructuredGrid()
ug.SetPoints(points)
ug.InsertNextCell(wedge.GetCellType(), wedge.GetPointIds())
return ug
def WritePNG(renWin, fn, magnification=1):
"""
Screenshot
Write out a png corresponding to the render window.
:param: renWin - the render window.
:param: fn - the file name.
:param: magnification - the magnification.
"""
windowToImageFilter = vtk.vtkWindowToImageFilter()
windowToImageFilter.SetInput(renWin)
windowToImageFilter.SetMagnification(magnification)
# Record the alpha (transparency) channel
# windowToImageFilter.SetInputBufferTypeToRGBA()
windowToImageFilter.SetInputBufferTypeToRGB()
# Read from the back buffer
windowToImageFilter.ReadFrontBufferOff()
windowToImageFilter.Update()
writer = vtk.vtkPNGWriter()
writer.SetFileName(fn)
writer.SetInputConnection(windowToImageFilter.GetOutputPort())
writer.Write()
if __name__ == '__main__':
main()
|
|
from PresentationObject import PresentationObject
from img import img
from font import font
from qt import QColor
import math # ceil()
import os
import time
true = success = 1
false = failure = 0
class equation( img ):
"""
<equation> uses LaTeX code to insert math characters. The
following programs must be in your $PATH: latex, dvips, gs, pnmgamma,
pnmcrop, pnmpad, ppmchange, pnmscale, and pnmtopng.
<p>
Only LaTeX code will be rendered to the screen... if any tags appear
within <equation>, they won't be rendered. If the LaTeX code
contains errors, it may appear that the Presenter has frozen (if
nothing is displayed to the screen after a minute or two). If this
occurs, quit the Presenter and type "exit" at the shell to exit from
LaTeX.
<p>
Example:
<pre>
<equation>
\int_0^\infty \frac{1}{x^2} \; \mbox{d}x
</equation>
</pre>
<p>
<b>Properties:</b>
<ul>
<li>
<i>color</i>/<i>colour</i>: color of the text, defined by name
(i.e., "red") or in hexidecimal (i.e., "#ff0000").
</li>
<li>
<i>size</i>: height of the text, defined in pixels, point size,
or in relation to the height of the current slide. For example:
<ul>
<li>"12" -> 12 points</li>
<li>"12pt" -> 12 points</li>
<li>"20px" -> 20 pixels</li>
<li>"1/42" -> 1/42 the height of the slide window</li>
<li>"0.05%" -> 0.05% the height of the slide window</li>
</ul>
</li>
</ul>
"""
def __init__( self, *args ):
"""
Initiate the container, contents, and properties.
-*args, arguments for the PresentationObject constructor.
"""
apply( img.__init__, (self,) + args )
def render( self, app, x, y ):
"""
Draw the equation to the app's QCanvas.
-app, SlideApplication object
-x, x coordinate to start drawing at
-y, y coordinate to start drawing at
Returns x, y coordinates where the rendering left off.
"""
if not app.getConfig().latexEnabled:
return x, y
emptyPixels = self.setSize( app.getHeight() )
#
# Align the image in the middle of the current line.
#
x, y = img.render( self, app, x, y )
self.move( self.x(), self.y() + emptyPixels/2 )
return x, y
def setSize( self, displayHeight=600 ):
"""
"""
image = self.getImage()["QImageOriginal"]
height, emptyPixels = self.getHeightInfo( displayHeight )
width = float( height ) / image.height()
width = int( width * image.width() )
self.setProperty( "width", width )
self.setProperty( "height", height )
return emptyPixels
def getHeightInfo( self, displayHeight ):
"""
Get the information needed to compute the dimensions and
alignment of the equation image.
-displayHeight, height of the renderer display
Returns a tuple with the first arguments being the height
to show the image and the second argument being the number
of empty vertical pixels above & below the image on the
current font line(s).
"""
image = self.getImage()["QImageOriginal"]
fontHeight = font( self ).getActualFontHeight( displayHeight )
if fontHeight == 0:
fontHeight = self.getSlide().getFontHeight()
#
# If the outputted LaTeX is multiple lines, try to calculate
# how many lines are in the image.
#
scale = self.getSlideshow().getConfig().latexScale
pixelsPerLine = self.pixelsPerLine( scale )
numLines = math.ceil( float(image.height()) / pixelsPerLine )
height = int( float(image.height()) / \
(pixelsPerLine * numLines) * \
(fontHeight * numLines) )
return height, fontHeight * numLines - height
def pixelsPerLine( self, scale ):
"""
Find the maximum number of pixels per text line, so the number
of lines in a LaTeX image can be computed.
-scale, integer for the image size scale being used.
Returns an integer for pixels.
"""
#
# The pixel per line line was computed by the image height for
# "(", which is the highest letter in the fonts, or so it
# seems...
#
if scale <= 2:
return 45
elif scale == 3:
return 62
elif scale == 4:
return 78
elif scale == 5:
return 96
elif scale == 6:
return 113
elif scale == 7:
return 130
elif scale == 8:
return 147
elif scale == 9:
return 165
elif scale == 10:
return 182
elif scale == 11:
return 199
elif scale == 12:
return 216
elif scale == 13:
return 233
elif scale == 14:
return 251
elif scale == 15:
return 267
else:
return 286
def getHexColors( self ):
"""
Get the fore/background colors in hexidecimal #rrggbb.
Returns a tuple with the first argument the foreground/text
color and the second the background color.
"""
color = self.getProperty( "color" )
bgColor = self.getProperty( "bgcolor" )
return str(QColor(color).name()), str(QColor(bgColor).name())
def createEquationImage( self, dirName ):
"""
Use the equation to attempt to take the Tex code and
translate it to a PNG file. If any of the following programs
exit with a non-zero return code, a PNG image will not be
created.
latex, dvips, gs, pnmgamma, pnmcrop, pnmpad, ppmchange,
pnmscale, pnmtopng
Returns a string with the path to the created image file. If no
file was created, None is returned.
"""
import re
fileName = self.getNewImageName()
stub = "%s/%s" % ( dirName, fileName )
#
# Since latex won't allow me to define a destination directory
# of the created .dvi file, they'll be created in the current
# working directory, so don't include the xml directory which
# is included in stub.
#
dviFile = "%s.dvi" % fileName
auxFile = "%s.aux" % fileName
logFile = "%s.log" % fileName
#
# ... and these files are created in the directory the .xml
# file is located in. Marvel at the wonder.
#
texFile = "%s.tex" % stub
psFile = "%s.ps" % stub
ppmFile = "%s.ppm" % stub
pngFile = "%s.png" % stub
outFile = "%s.stdout" % stub # redirect stdout
errFile = "%s.stderr" % stub # redirect stderr
gsFile = "%s_gs.input" % stub
filesToRemove = []
regex = re.compile( r"%%BoundingBox: " \
"(?P<llx>\d*) (?P<lly>\d*) " \
"(?P<urx>\d*) (?P<ury>\d*)" )
texfp = open( texFile, "w" )
texfp.write( "\\documentclass{amsart}\n" \
"\\pagestyle{empty}\n" \
"\\thispagestyle{empty}\n" \
"\\begin{document}\n" \
"\\huge\n" \
"{\n" \
"$" )
texfp.write( self.getEquationString() )
texfp.write( "$\n" \
"}\n" \
"\\end{document}\n" )
texfp.close()
#
# Call LaTeX to take the .tex file and create a .dvi file.
#
filesToRemove.append( outFile )
filesToRemove.append( errFile )
filesToRemove.append( texFile )
filesToRemove.append( dviFile )
filesToRemove.append( auxFile )
filesToRemove.append( logFile )
exitCode = os.system( 'echo q | latex "%s" >"%s" 2>"%s"' \
% (texFile, outFile, errFile) )
if exitCode:
self.removeFiles( filesToRemove )
return None
#
# Call DVIPS to take the .dvi file and create a .ps file.
#
filesToRemove.append( dviFile )
exitCode = os.system( 'dvips -E -o "%s" "%s" >"%s" 2>"%s"' \
% (psFile, dviFile, outFile, errFile) )
if exitCode:
self.removeFiles( filesToRemove )
return None
#
# Attempt to match the above BoundingBox regular expression in
# the .dvi file. Strip out the four parameters if it's matched.
#
dviArgsSet = false
dvifp = open( psFile );
for line in dvifp:
match = regex.match( line );
if match:
llx, lly, urx, ury = match.group( "llx",
"lly",
"urx",
"ury" )
dviArgsSet = true
break
dvifp.close()
#
# Default arguments if BoundingBox regexp wasn't matched.
#
if not dviArgsSet:
self.removeFiles( filesToRemove )
return None
#
# Get arguments for Ghostscript to create the .ppm file.
#
# 4.0
scaleFactor = self.getSlideshow().getConfig().latexScale
scaleFactor = float( scaleFactor )
gsArgs = {}
gsArgs["ppmFile"] = ppmFile
gsArgs["width"] = int( scaleFactor * (int(urx)-int(llx)) )
gsArgs["height"] = int( scaleFactor * (int(ury)-int(lly)) )
gsArgs["resolv"] = "%dx%d" % ( scaleFactor*72, scaleFactor*72 )
gsArgs["psFile"] = psFile
gsArgs["gsFile"] = gsFile
gsArgs["outFile"] = outFile
gsArgs["errFile"] = errFile
gsfp = open( gsFile, "w" )
gsfp.write( "%s neg %s neg translate" % (llx, lly) )
gsfp.close()
#
# Call Ghostscript to take the .ps file and create a .ppm file.
#
filesToRemove.append( gsFile )
exitCode = os.system( 'gs -sDEVICE=ppmraw ' \
'-sOutputFile="%(ppmFile)s" ' \
'-g%(width)sx%(height)s ' \
'-r%(resolv)s - "%(psFile)s" ' \
'< "%(gsFile)s" ' \
'> "%(outFile)s" ' \
'2> "%(errFile)s"' % gsArgs )
if exitCode:
self.removeFiles( filesToRemove )
return None
#
# Pass the .ppm through different programs to output a resized
# .png file. Substitute white (.ppm background) with the
# background color of the current slide and black (text) with
# the slide font color.
#
color, bgColor = self.getHexColors()
color = "rgb:%s/%s/%s" % ( color[1:3], color[3:5], color[5:] )
bgColor = "rgb:%s/%s/%s" \
% ( bgColor[1:3], bgColor[3:5], bgColor[5:] )
tempColor = "rgb:83/95/67"
# reductionFactor = 1 / scaleFactor;'pnmscale %s | ' <BALEETED
#
# I had to change black (text) to #839567 which is then changed
# to the requested font color because if you choose a black
# background and white text, it all comes out transparent
# with the two ppmchange calls. I think it's unlikely #839567
# will be used...
#
filesToRemove.append( psFile )
filesToRemove.append( ppmFile )
exitCode = os.system( 'pnmgamma 1.0 "%s" | ' \
'pnmcrop | ' \
'pnmpad -white -l5 -r5 -t5 -b5 | ' \
'ppmchange rgb:00/00/00 %s | ' \
'ppmchange rgb:ff/ff/ff %s | ' \
'ppmchange %s %s | ' \
'pnmtopng -interlace -transparent %s ' \
'> "%s"' % \
(ppmFile, tempColor, bgColor,
tempColor, color, bgColor, pngFile) )
if exitCode:
self.removeFiles( filesToRemove )
return None
#
# Remove temporary files used to create the .png.
#
self.removeFiles( filesToRemove )
#
# Add entry in the cache index file if caching is enabled.
#
if self.getSlideshow().getConfig().latexCacheEnabled:
self.addEquationImage( "%s.png" % fileName )
#
# Return the path to the created .png.
#
return pngFile
def getEquationString( self ):
"""
Get the text to be processed through LaTeX.
Returns an "ISO 8858-1" encoded string.
"""
equation = ""
for content in self.getContents():
if not isinstance(content, PresentationObject):
equation = "%s%s" % ( equation, content )
try:
equation = equation.encode( "ISO 8859-1" )
except AttributeError:
pass
return equation
def removeFiles( self, fileList=None ):
"""
"""
for file in fileList:
try:
os.remove( file )
except OSError:
pass
def getNewImageName( self ):
"""
Get a new image name based on the current time. If another
.png image was created at the same time down to the second, a
counter is used to avoid overwriting files. Up to 1000 files
can be created in a second without overwriting...
Returns a filename string without the ".png" extension.
"""
if self.getSlideshow().getConfig().latexCacheEnabled:
dirName = self.getSlideshow().getConfig().latexCacheDir
else:
dirName = self.getSlideshow().getDirName()
name = stub = time.strftime( "%y%m%d%H%M%S", time.localtime() )
num = 0
while 1:
try:
os.stat( "%s/%s.png" % (dirName, name) )
except OSError:
#
# Another file doesn't exist so return the
# available file name. Don't include the
# ".png" extension.
#
return name
name = "%s_%03d" % ( stub, num )
num = num + 1
if num > 999:
return "%s_1000" % stub
return name
def addEquationImage( self, imgName, equation=None ):
"""
"""
cacheDir = self.getSlideshow().getConfig().latexCacheDir
indexPath = "%s/index.dat" % cacheDir
if equation == None:
equation = self.getEquationString()
try:
fp = open( indexPath, "a" )
except IOError:
#
# If the file couldn't be opened, it may be because the
# cache directory has been created, so create it then
# attempt to open the cache index file again.
#
# If it still doesn't open, obviously nothing is saved
# in an index file, although the .png file will still
# sit in the directory.
#
try:
os.makedirs( cacheDir )
except OSError, e:
#
# errno 17 means the directory already existed.
# If this exception was thrown for another
# error, don't try to add the image file to
# the cache.
#
if e.errno != 17:
return
#
# Try to open the file again
#
try:
fp = open( indexPath, "a" )
except IOError:
return
color, bgColor = self.getHexColors()
fp.write( "\n[%s color=%s bgcolor=%s scale=%d]\n" \
% (imgName, color, bgColor, \
self.getSlideshow().getConfig().latexScale) )
fp.write( equation )
fp.write( "\n[/%s]\n" % imgName )
fp.close()
def findEquationImage( self, equation=None ):
"""
"""
import re
imgRegex = re.compile( r"^\[(?P<name>(.)+) " \
"color=(?P<color>" \
"#([A-Za-z0-9]){6}) " \
"bgcolor=(?P<bgcolor>" \
"#([A-Za-z0-9]){6}) " \
"scale=(?P<scale>[0-9]+)\]$" )
cacheDir = self.getSlideshow().getConfig().latexCacheDir
if equation == None:
equation = self.getEquationString()
try:
fp = open( "%s/index.dat" % cacheDir, "r" )
except IOError:
#
# Cache index hasn't been built, meaning no matching
# image exists...
#
return None
imgName = None
line = fp.readline()
while len(line):
match = imgRegex.match( line );
if match:
imgName, color, bgColor, scale = \
match.group( "name",
"color",
"bgcolor",
"scale" )
endRegex = re.compile( "^\[/%s\]$" % imgName )
#
# Make sure the color, bgcolor, and scale
# match the current settings.
#
currColor, currBgColor = self.getHexColors()
currScale = \
self.getSlideshow().getConfig().latexScale
if color != currColor or \
bgColor != currBgColor or \
int(scale) != currScale:
line = fp.readline()
while not endRegex.match(line):
line = fp.readline()
line = fp.readline()
continue
#
# Read in the equation from the cache file.
#
imgEq = ""
line = fp.readline()
while len(line) and not endRegex.match(line):
imgEq = "%s%s" % ( imgEq, line )
line = fp.readline()
#
# Find if the read equation matches.
#
if len(imgEq) and imgEq[-1] == "\n":
imgEq = imgEq[:-1]
if imgEq == equation:
fp.close()
return "%s/%s" % ( cacheDir, imgName )
line = fp.readline()
fp.close()
return None
def cacheObjects( self ):
"""
"""
#
# Convert the LaTeX equation to a .png file.
#
config = self.getSlideshow().getConfig()
if config.latexCacheEnabled:
imgFile = self.findEquationImage()
if imgFile == None:
dirName = config.latexCacheDir
imgFile = self.createEquationImage( dirName )
else:
dirName = self.getSlideshow().getDirName()
imgFile = self.createEquationImage( dirName )
if imgFile != None and len(imgFile):
imgFile = os.path.abspath( imgFile )
self.setProperty( "src", imgFile )
else:
self.setProperty( "src", "images/broken_equation.png" )
#
# Load .png file into memory.
#
img.cacheObjects( self )
#
# If the LaTeX cache is disabled, queue the .png file to be
# deleted later since the equation has been loaded into memory.
#
if not config.latexCacheEnabled:
self.getSlideshow().addFileToRemove( imgFile )
def getHtml( self ):
"""
Get the HTML associated with this object.
Returns a list of html strings, with each entry being a line
in a html file.
"""
#
# Set the proper size to the equation image.
#
self.setSize()
return img.getHtml( self )
|
|
import asynchat
import errno
import socket
import struct
import RPCProto
import string
import sys
from xdrlib import Packer, Unpacker
from traceback import print_exc
from SocketServer import ThreadingTCPServer
# FIXME: get rid of these...
VERSION = RPCProto.RPC_VERSION
CALL = RPCProto.CALL
REPLY = RPCProto.REPLY
AUTH_NONE = RPCProto.AUTH_NONE
AUTH_SYS = RPCProto.AUTH_SYS
AUTH_SHORT = RPCProto.AUTH_SHORT
MSG_ACCEPTED = RPCProto.MSG_ACCEPTED
MSG_DENIED = RPCProto.MSG_DENIED
RPC_MISMATCH = RPCProto.RPC_MISMATCH # RPC version number != 2
AUTH_ERROR = RPCProto.AUTH_ERROR # remote can't authenticate caller
SUCCESS = RPCProto.SUCCESS # RPC executed successfully
PROG_UNAVAIL = RPCProto.PROG_UNAVAIL # remote hasn't exported program
PROG_MISMATCH = RPCProto.PROG_MISMATCH # remote can't support version #
PROC_UNAVAIL = RPCProto.PROC_UNAVAIL # program can't support procedure
GARBAGE_ARGS = RPCProto.GARBAGE_ARGS # procedure can't decode params
SYSTEM_ERR = RPCProto.SYSTEM_ERR # errors like memory allocation
AUTH_OK = RPCProto.AUTH_OK # success
AUTH_BADCRED = RPCProto.AUTH_BADCRED # bad credential (seal broken)
AUTH_REJECTEDCRED = RPCProto.AUTH_REJECTEDCRED # client must begin new session
AUTH_BADVERF = RPCProto.AUTH_BADVERF # bad verifier (seal broken)
AUTH_REJECTEDVERF = RPCProto.AUTH_REJECTEDVERF # verifier expired or replayed
AUTH_TOOWEAK = RPCProto.AUTH_TOOWEAK # rejected for security reasons
AUTH_INVALIDRESP = RPCProto.AUTH_INVALIDRESP # bogus response verifier
AUTH_FAILED = RPCProto.AUTH_FAILED # reason unknown
PROC_NULL = 0
NULL_AUTH = RPCProto.opaque_auth()
NULL_AUTH.flavor = RPCProto.AUTH_NONE
NULL_AUTH.body = ''
def parse_frag_len(data):
if len(data) < 4:
raise EOFError, "no fraglen"
fraglen = struct.unpack('>L', data[:4])[0]
lastfrag = fraglen & 0x80000000
fraglen = fraglen & 0x7fffffff
return (fraglen, lastfrag)
def writefrags(message, write):
"""Fragments message and writes the fragments using write.
This procedure consumes message, so caller should
save a copy if needed.
"""
# TODO: use StringIO
while message:
frag = message[0:0x7fffffff]
fraglen = len(frag)
message = message[fraglen:]
if not message:
fraglen = fraglen | 0x80000000
fraglen = struct.pack('>L', fraglen)
write(fraglen)
write(frag)
def readfrags(read):
"""Reads fragments using read and returns the assembled message.
Raises EOFError if unable to read the whole message.
"""
# TODO: use StringIO
message = ''
while 1:
fraglen = read(4)
(fraglen, lastfrag) = parse_frag_len(fraglen)
frag = read(fraglen)
if len(frag) < fraglen:
raise EOFError, "frag too short"
message += frag
if lastfrag:
return message
raise AssertionError, "should not get here"
class UnpackException(Exception):
pass
class ReplyException(Exception):
"""An exception that carries a reply packet"""
def __init__(self, message, reply):
Exception.__init__(self)
self.message = message
self.reply = reply
def __str__(self):
return self.message
def pack_reply(xid, *args):
"""Packs an RPC reply from a variable-length arg list (args):
MSG_ACCEPTED, verf, (SUCCESS | PROG_MISMATCH, low, hi | PROG_UNAVAIL
| PROC_UNAVAIL | GARBAGE_ARGS | SYSTEM_ERR)
MSG_DENIED, (RPC_MISMATCH, hi, low | AUTH_ERROR, auth_stat)
verf is an auth of the form (flavor, value)
Returns an xdrlib.Packer that the caller can use to add data,
such as the results of a SUCCESSful call.
"""
arg = list(args) # need a mutable list for pop()
msg = RPCProto.rpc_msg()
msg.xid = xid
msg.body = RPCProto.body_t()
msg.body.mtype = RPCProto.REPLY
msg.body.rbody = reply = RPCProto.reply_body()
reply.stat = reply_stat = arg.pop(0)
if reply_stat == MSG_ACCEPTED:
reply.areply = RPCProto.accepted_reply()
reply.areply.verf = verf = arg.pop(0)
reply.areply.reply_data = RPCProto.reply_data_t()
reply.areply.reply_data.stat = accept_stat = arg.pop(0)
if accept_stat == PROG_MISMATCH:
reply.areply.reply_data.mismatch_info = RPCProto.mismatch_info_t()
reply.areply.reply_data.mismatch_info.low = arg.pop(0)
reply.areply.reply_data.mismatch_info.high = arg.pop(0)
elif (accept_stat == SUCCESS):
reply.areply.reply_data.results = '' # FIXME?
elif (accept_stat == PROG_UNAVAIL or
accept_stat == PROC_UNAVAIL or
accept_stat == GARBAGE_ARGS or
accept_stat == SYSTEM_ERR):
pass
else:
raise ValueError("unknown accept_stat: %u" % accept_stat)
elif reply_stat == MSG_DENIED:
reply.rreply = RPCProto.rejected_reply()
reply.rreply.stat = reject_stat = arg.pop(0)
if reject_stat == RPC_MISMATCH:
reply.rreply.mismatch_info.low = RPCProto.mismatch_info_t()
reply.rreply.mismatch_info.low = arg.pop(0)
reply.rreply.mismatch_info.high = arg.pop(0)
elif reject_stat == AUTH_ERROR:
reply.rreply.astat = arg.pop(0)
else:
raise ValueError("unknown reject_stat: %u" % reject_stat)
else:
raise ValueError("unknown reply_stat: %u" % reply_stat)
p = Packer()
RPCProto.pack_rpc_msg(p, msg)
return p
def check(expected, actual, name, replyf=None):
"""If expected is not None, checks whether expected equals actual,
and if not, raises an exception (see code for themessage).
If replyf is None, the exception is an UnpackException.
Otherwise, reply must be a function that takes no arguments
and returns an RPC reply (a string), and the exception is a
ReplyException containing the output of the function.
"""
if expected is not None and expected != actual:
if replyf:
raise ReplyException("Expected %s %s, but got %s"
% (name, expected, actual), replyf())
else:
raise UnpackException("Expected %s %s, but got %s"
% (name, expected, actual))
def unpack_reply(response, myxid=None, myreply_stat=MSG_ACCEPTED,
myverf=NULL_AUTH, myaccept_stat=SUCCESS,
myreject_stat=None, myauth_stat=None):
"""Unpacks an RPC reply and returns a variable-length arg list
of the same form as the argument to pack_reply, but for SUCCESS also
returns an xdrlib.Unpacker as the final element of the list
that the caller can use to unpack the results of the call.
If values are given for any myXXX arguments, checks that those
values match the unpacked XXX values. Default myXXX values assume
success with no authentication.
Raises UnpackException on any errors or mismatches.
"""
u = Unpacker(response)
msg = RPCProto.unpack_rpc_msg(u)
check(myxid, msg.xid, "xid")
if msg.body.mtype == RPCProto.CALL:
raise UnpackException("Expected reply, but got call")
reply = msg.body.rbody
check(myreply_stat, reply.stat, "reply_stat")
retval = [msg.xid, reply.stat]
if reply.stat == RPCProto.MSG_ACCEPTED:
check(myverf, reply.areply.verf, "verf")
retval.append(reply.areply.verf)
accept_stat = reply.areply.reply_data.stat
check(myaccept_stat, accept_stat, "accept_stat")
retval.append(accept_stat)
if accept_stat == RPCProto.SUCCESS:
retval.append(u)
elif accept_stat == RPCProto.PROG_MISMATCH:
retval.append(reply.areply.reply_data.mismatch_info.low)
retval.append(reply.areply.reply_data.mismatch_info.high)
elif (accept_stat == RPCProto.PROG_UNAVAIL or
accept_stat == RPCProto.PROC_UNAVAIL or
accept_stat == RPCProto.GARBAGE_ARGS or
accept_stat == RPCProto.SYSTEM_ERR):
pass
else:
raise UnpackException("unknown accept_stat: %u" % accept_stat)
elif reply.stat == RPCProto.MSG_DENIED:
reject_stat = reply.rreply.stat
check(myreject_stat, reject_stat, "reject_stat")
retval.append(reject_stat)
if reject_stat == RPCProto.RPC_MISMATCH:
retval.append(reply.rreply.mismatch_info.low)
retval.append(reply.rreply.mismatch_info.high)
elif reject_stat == RPCProto.AUTH_ERROR:
check(myauth_stat, reply.rreply.astat, "auth_stat")
retval.append(reply.rreply.astat)
else:
raise UnpackException("unknown reject_stat: %u" % reject_stat)
else:
raise UnpackException("unknown reply_stat: %u" % reply.stat)
return retval
def pack_call(xid, prog, vers, proc,
cred=NULL_AUTH, verf=NULL_AUTH):
"""Packs an RPC call message; returns an xdrlib.Packer that
the caller can use to add more data, e.g., the call arguments.
"""
msg = RPCProto.rpc_msg()
msg.xid = xid
msg.body = RPCProto.body_t()
msg.body.mtype = RPCProto.CALL
msg.body.cbody = RPCProto.call_body()
msg.body.cbody.rpcvers = RPCProto.RPC_VERSION
msg.body.cbody.prog = prog
msg.body.cbody.vers = vers
msg.body.cbody.proc = proc
msg.body.cbody.cred = cred
msg.body.cbody.verf = verf
p = Packer()
RPCProto.pack_rpc_msg(p, msg)
return p
def unpack_call(request, myprog=None, myvers=None,
mycred=NULL_AUTH, myverf=NULL_AUTH):
"""Unpacks an RPC call message from request.
Returns (xid, prog, vers, proc, cred, verf, u) if okay,
where u is an xdrlib.Unpacker.
otherwise raises either UnpackException or ReplyException.
If myXXX is not None, checks that XXX == myXXX.
Assumes AUTH_NONE for cred and verf; override with mycred and myverf.
"""
if len(request) < 24:
raise UnpackException("Packet too short (%d bytes)" % len(request))
u = Unpacker(request)
msg = RPCProto.unpack_rpc_msg(u)
if msg.body.mtype == RPCProto.REPLY:
raise UnpackException("Expected call, but got reply")
call = msg.body.cbody
check(RPCProto.RPC_VERSION, call.rpcvers, "RPC version",
lambda: pack_reply(msg.xid,
RPCProto.MSG_DENIED,
RPCProto.RPC_MISMATCH,
RPCProto.RPC_VERSION,
RPCProto.RPC_VERSION).get_buffer())
check(myprog, call.prog, "program",
lambda: pack_reply(msg.xid,
RPCProto.MSG_ACCEPTED,
NULL_AUTH,
RPCProto.PROG_UNAVAIL).get_buffer())
check(myvers, call.vers, "version",
lambda: pack_reply(msg.xid,
RPCProto.MSG_ACCEPTED,
NULL_AUTH,
RPCProto.PROG_MISMATCH,
myvers,
myvers).get_buffer())
check(mycred, call.cred, "cred",
lambda: pack_reply(msg.xid,
RPCProto.MSG_DENIED,
RPCProto.AUTH_ERROR,
RPCProto.AUTH_BADCRED).get_buffer())
check(myverf, call.verf, "verf",
lambda: pack_reply(msg.xid,
RPCProto.MSG_DENIED,
RPCProto.AUTH_ERROR,
RPCProto.AUTH_BADVERF).get_buffer())
return (msg.xid, call.prog, call.vers,
call.proc, call.cred, call.verf, u)
class ReuseTCPServer(ThreadingTCPServer):
def __init__(self, addr, handler):
self.allow_reuse_address = 1
ThreadingTCPServer.__init__(self, addr, handler)
class Server:
def __init__(self, module, PROG, VERS, port, handlers, name=None):
"""If name is not None, Server prints debug messages
prefixed by name."""
assert module is not None
assert 'programs' in dir(module)
assert PROG in module.programs
assert VERS in module.programs[PROG]
for proc in handlers:
assert proc in module.programs[PROG][VERS]
import SocketServer
class StreamHandler(SocketServer.StreamRequestHandler):
def dispatch(self, request):
xid, prog, vers, proc, cred, verf, u = unpack_call(
request, myprog=PROG, myvers=VERS)
if proc in handlers:
if name:
print name + ": Got proc", proc
arg = module.programs[PROG][VERS][proc].unpack_arg(u)
res = handlers[proc](xid, cred, verf, arg)
p = pack_reply(xid, MSG_ACCEPTED, NULL_AUTH, SUCCESS)
module.programs[PROG][VERS][proc].pack_res(p, res)
return p.get_buffer()
else:
# no such procedure
if name:
print name + ": Got unknown proc", proc
return pack_reply(xid, MSG_ACCEPTED, NULL_AUTH,
PROC_UNAVAIL).get_buffer()
def handle(self):
rfile = self.request.makefile('rb', -1)
wfile = self.request.makefile('wb', -1)
if name:
print name + ": Got connection from", self.client_address
while 1:
try:
request = readfrags(rfile.read)
reply = self.dispatch(request)
writefrags(reply, wfile.write)
wfile.flush()
except EOFError:
return
except UnpackException, e:
if name:
print name + ":", e
return
except ReplyException, e:
if name:
print name + ":", e
writefrags(e.reply, wfile.write)
wfile.flush()
except:
if name:
print name + ": Unexpected error:"
print_exc()
return # avoid killing the server
self.handler = StreamHandler
self.port = port
def run(self):
server = ReuseTCPServer(('', self.port), self.handler)
server.serve_forever() # support UDP?
class XidGenerator:
# FIXME: should this use locks?
def __init__(self):
import random, sys
self.xid = random.randint(0, sys.maxint/2)
# FIXME: should this randomize xids?
def next(self):
self.xid += 1
return self.xid
class ClientBase:
def __init__(self, module, PROG, VERS, host, port):
assert module is not None
assert 'programs' in dir(module)
assert PROG in module.programs
assert VERS in module.programs[PROG]
self.PROG = PROG
self.VERS = VERS
self.module = module
self.xidgen = XidGenerator()
self.host = host
self.port = port
def start_connect(self, host, port, cb = None):
raise AssertionError, "This method must be overridden."
def __call__(self, pnum, arg, cb = None):
"""Call proc number pnum with arg.
If answer is immediately available, it will be returned.
Otherwise, None is returned, and cb will be called later."""
raise AssertionError, "This method must be overridden."
class SClient(ClientBase):
def start_connect(self, cb = None):
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.sock.connect((self.host, self.port))
self.rfile = self.sock.makefile('rb', -1)
self.wfile = self.sock.makefile('wb', -1)
return self.sock
# TODO: use exec to define methods on this object for each proc
# in programs[PROG][VERS]. This requires that each generated
# Procedure() object include the proc name as a string.
# Probably a good idea, and for the Program and Version objects,
# too. Also, rename all these to RPCProcedure, RPCProgram,
# RPCVersion.
def write_request(self, request):
writefrags(request, self.wfile.write)
self.wfile.flush()
def read_reply(self):
return readfrags(self.rfile.read)
def __call__(self, pnum, arg, cb = None):
proc = self.module.programs[self.PROG][self.VERS][pnum]
p = pack_call(self.xidgen.next(), self.PROG, self.VERS, pnum)
proc.pack_arg(p, arg)
request = p.get_buffer()
self.write_request(request)
reply = self.read_reply()
u = unpack_reply(reply)[-1]
return proc.unpack_res(u)
class strbuf:
"""Unlike stringio, always append to the end of string, read from front."""
def __init__(self): self.s = ''
def write(self, s): self.s += s
def read(self, n):
# Slicing past the end of string returns '', working out well.
v = self.s[:n]
self.s = self.s[n:]
return v
class AClient(ClientBase,asynchat.async_chat):
def __init__(self, module, PROG, VERS, host, port):
# A table of callbacks to be called, keyed by xid.
self.xidcbmap = {}
self.inbuffer = ''
self.fragments = []
self.bytesleft = 0 # until end of current fragment.
asynchat.async_chat.__init__(self)
ClientBase.__init__(self, module, PROG, VERS, host, port)
def handle_connect(self):
err = self.getsockopt(socket.SOL_SOCKET, socket.SO_ERROR)
if err == errno.ECONNREFUSED:
self.connect_cb(None)
else:
self.connect_cb(self)
def start_connect(self, cb = None):
if cb is None:
raise TypeError, "Must pass cb to async client"
self.create_socket(socket.AF_INET, socket.SOCK_STREAM)
self.connect_cb = cb
self.connect((self.host, self.port))
self.set_terminator(None)
return None
def handle_reply(self):
reply = ''.join(self.fragments)
self.fragments = []
u = unpack_reply(reply)
# print "Reply for xid %x" % u[0]
try:
(cb, proc) = self.xidcbmap[u[0]]
del self.xidcbmap[u[0]]
except KeyError:
sys.stderr.write("Reply for unknown xid %x received: %s" %
(u[0], str(u[1:])))
return
if not cb:
return
# XXX should really return some useful info to cb if error case
# either if denied, or if some weird bug like PROG_UNAVAIL.
if u[1] == RPCProto.MSG_ACCEPTED:
res = proc.unpack_res(u[-1])
cb(res)
else:
cb(None)
def collect_incoming_data(self, data):
if len(self.inbuffer) > 0:
data = self.inbuffer + data
(fraglen, lastfrag) = parse_frag_len(data)
# print "a", fraglen, lastfrag, len(data)
while 4 + fraglen <= len(data):
frag = data[4:4+fraglen]
self.fragments.append(frag)
if lastfrag:
self.handle_reply()
data = data[4+fraglen:]
if len(data) > 0:
(fraglen, lastfrag) = parse_frag_len(data)
# print "b", fraglen, lastfrag, len(data)
# else:
# print "c"
self.inbuffer = data
def found_terminator(self):
raise AssertionError, "We don't use terminators."
def __call__(self, pnum, arg, cb = None):
proc = self.module.programs[self.PROG][self.VERS][pnum]
xid = self.xidgen.next()
# print "Call for xid %x" % xid
p = pack_call(xid, self.PROG, self.VERS, pnum)
proc.pack_arg(p, arg)
request = p.get_buffer()
val = strbuf()
writefrags(request, val.write)
self.push(val.s)
self.xidcbmap[xid] = (cb, proc)
return None
|
|
#!/usr/bin/env python
# coding:utf-8
import platform
import env_info
import urlparse
import json
import os
import re
import subprocess
import cgi
import urllib2
import sys
import datetime
import locale
import time
import hashlib
from xlog import getLogger
xlog = getLogger("gae_proxy")
from config import config
from appids_manager import appid_manager
from google_ip import google_ip
from google_ip_range import ip_range
from connect_manager import https_manager
from scan_ip_log import scan_ip_log
import ConfigParser
import connect_control
import ip_utils
import check_local_network
import check_ip
import cert_util
import simple_http_server
import test_appid
from http_dispatcher import http_dispatch
import openssl_wrap
os.environ['HTTPS_PROXY'] = ''
current_path = os.path.dirname(os.path.abspath(__file__))
root_path = os.path.abspath(os.path.join(current_path, os.pardir, os.pardir))
top_path = os.path.abspath(os.path.join(root_path, os.pardir, os.pardir))
web_ui_path = os.path.join(current_path, os.path.pardir, "web_ui")
import yaml
class User_special(object):
def __init__(self):
self.appid = ''
self.password = ''
self.proxy_enable = "0"
self.proxy_type = "HTTP"
self.proxy_host = ""
self.proxy_port = ""
self.proxy_user = ""
self.proxy_passwd = ""
self.host_appengine_mode = "gae"
self.auto_adjust_scan_ip_thread_num = 1
self.scan_ip_thread_num = 0
self.use_ipv6 = 0
class User_config(object):
user_special = User_special()
def __init__(self):
self.load()
def load(self):
ConfigParser.RawConfigParser.OPTCRE = re.compile(r'(?P<option>[^=\s][^=]*)\s*(?P<vi>[=])\s*(?P<value>.*)$')
self.DEFAULT_CONFIG = ConfigParser.ConfigParser()
DEFAULT_CONFIG_FILENAME = os.path.abspath( os.path.join(current_path, 'proxy.ini'))
self.USER_CONFIG = ConfigParser.ConfigParser()
CONFIG_USER_FILENAME = os.path.abspath( os.path.join(top_path, 'data', 'gae_proxy', 'config.ini'))
try:
if os.path.isfile(DEFAULT_CONFIG_FILENAME):
self.DEFAULT_CONFIG.read(DEFAULT_CONFIG_FILENAME)
self.user_special.scan_ip_thread_num = self.DEFAULT_CONFIG.getint('google_ip', 'max_scan_ip_thread_num')
else:
return
if os.path.isfile(CONFIG_USER_FILENAME):
self.USER_CONFIG.read(CONFIG_USER_FILENAME)
else:
return
try:
self.user_special.appid = self.USER_CONFIG.get('gae', 'appid')
self.user_special.password = self.USER_CONFIG.get('gae', 'password')
except:
pass
try:
self.user_special.host_appengine_mode = self.USER_CONFIG.get('hosts', 'appengine.google.com')
except:
pass
try:
self.user_special.scan_ip_thread_num = config.CONFIG.getint('google_ip', 'max_scan_ip_thread_num')
except:
self.user_special.scan_ip_thread_num = self.DEFAULT_CONFIG.getint('google_ip', 'max_scan_ip_thread_num')
try:
self.user_special.auto_adjust_scan_ip_thread_num = config.CONFIG.getint('google_ip', 'auto_adjust_scan_ip_thread_num')
except:
pass
try:
self.user_special.use_ipv6 = config.CONFIG.getint('google_ip', 'use_ipv6')
except:
pass
self.user_special.proxy_enable = self.USER_CONFIG.get('proxy', 'enable')
self.user_special.proxy_type = self.USER_CONFIG.get('proxy', 'type')
self.user_special.proxy_host = self.USER_CONFIG.get('proxy', 'host')
self.user_special.proxy_port = self.USER_CONFIG.get('proxy', 'port')
self.user_special.proxy_user = self.USER_CONFIG.get('proxy', 'user')
self.user_special.proxy_passwd = self.USER_CONFIG.get('proxy', 'passwd')
except Exception as e:
xlog.warn("User_config.load except:%s", e)
def save(self):
CONFIG_USER_FILENAME = os.path.abspath( os.path.join(top_path, 'data', 'gae_proxy', 'config.ini'))
try:
f = open(CONFIG_USER_FILENAME, 'w')
if self.user_special.appid != "":
f.write("[gae]\n")
f.write("appid = %s\n" % self.user_special.appid)
f.write("password = %s\n\n" % self.user_special.password)
f.write("[proxy]\n")
f.write("enable = %s\n" % self.user_special.proxy_enable)
f.write("type = %s\n" % self.user_special.proxy_type)
f.write("host = %s\n" % self.user_special.proxy_host)
f.write("port = %s\n" % self.user_special.proxy_port)
f.write("user = %s\n" % self.user_special.proxy_user)
f.write("passwd = %s\n\n" % self.user_special.proxy_passwd)
"""
if self.user_special.host_appengine_mode != "gae":
f.write("[hosts]\n")
f.write("appengine.google.com = %s\n" % self.user_special.host_appengine_mode)
f.write("www.google.com = %s\n\n" % self.user_special.host_appengine_mode)
"""
f.write("[google_ip]\n")
if int(self.user_special.auto_adjust_scan_ip_thread_num) != self.DEFAULT_CONFIG.getint('google_ip', 'auto_adjust_scan_ip_thread_num'):
f.write("auto_adjust_scan_ip_thread_num = %d\n\n" % int(self.user_special.auto_adjust_scan_ip_thread_num))
if int(self.user_special.scan_ip_thread_num) != self.DEFAULT_CONFIG.getint('google_ip', 'max_scan_ip_thread_num'):
f.write("max_scan_ip_thread_num = %d\n\n" % int(self.user_special.scan_ip_thread_num))
if int(self.user_special.use_ipv6) != self.DEFAULT_CONFIG.getint('google_ip', 'use_ipv6'):
f.write("use_ipv6 = %d\n\n" % int(self.user_special.use_ipv6))
f.close()
except:
xlog.warn("launcher.config save user config fail:%s", CONFIG_USER_FILENAME)
user_config = User_config()
def get_openssl_version():
return "%s %s h2:%s" % (openssl_wrap.openssl_version,
openssl_wrap.ssl_version,
openssl_wrap.support_alpn_npn)
def http_request(url, method="GET"):
proxy_handler = urllib2.ProxyHandler({})
opener = urllib2.build_opener(proxy_handler)
try:
req = opener.open(url)
except Exception as e:
xlog.exception("web_control http_request:%s fail:%s", url, e)
return
deploy_proc = None
class ControlHandler(simple_http_server.HttpServerHandler):
def __init__(self, client_address, headers, command, path, rfile, wfile):
self.client_address = client_address
self.headers = headers
self.command = command
self.path = path
self.rfile = rfile
self.wfile = wfile
def do_CONNECT(self):
self.wfile.write(b'HTTP/1.1 403\r\nConnection: close\r\n\r\n')
def do_GET(self):
path = urlparse.urlparse(self.path).path
if path == "/log":
return self.req_log_handler()
elif path == "/status":
return self.req_status_handler()
else:
xlog.debug('GAEProxy Web_control %s %s %s ', self.address_string(), self.command, self.path)
if path == '/deploy':
return self.req_deploy_handler()
elif path == "/config":
return self.req_config_handler()
elif path == "/ip_list":
return self.req_ip_list_handler()
elif path == "/scan_ip":
return self.req_scan_ip_handler()
elif path == "/ssl_pool":
return self.req_ssl_pool_handler()
elif path == "/workers":
return self.req_workers_handler()
elif path == "/download_cert":
return self.req_download_cert_handler()
elif path == "/is_ready":
return self.req_is_ready_handler()
elif path == "/test_ip":
return self.req_test_ip_handler()
elif path == "/check_ip":
return self.req_check_ip_handler()
elif path == "/quit":
connect_control.keep_running = False
data = "Quit"
self.wfile.write(('HTTP/1.1 200\r\nContent-Type: %s\r\nContent-Length: %s\r\n\r\n' % ('text/plain', len(data))).encode())
self.wfile.write(data)
#sys.exit(0)
#quit()
#os._exit(0)
return
elif path.startswith("/wizard/"):
file_path = os.path.abspath(os.path.join(web_ui_path, '/'.join(path.split('/')[1:])))
if not os.path.isfile(file_path):
self.wfile.write(b'HTTP/1.1 404 Not Found\r\n\r\n')
xlog.warn('%s %s %s wizard file %s not found', self.address_string(), self.command, self.path, file_path)
return
if file_path.endswith('.html'):
mimetype = 'text/html'
elif file_path.endswith('.png'):
mimetype = 'image/png'
elif file_path.endswith('.jpg') or file_path.endswith('.jpeg'):
mimetype = 'image/jpeg'
else:
mimetype = 'application/octet-stream'
self.send_file(file_path, mimetype)
return
else:
xlog.warn('Control Req %s %s %s ', self.address_string(), self.command, self.path)
# check for '..', which will leak file
if re.search(r'(\.{2})', self.path) is not None:
self.wfile.write(b'HTTP/1.1 404\r\n\r\n')
xlog.warn('%s %s %s haking', self.address_string(), self.command, self.path )
return
filename = os.path.normpath('./' + path)
if self.path.startswith(('http://', 'https://')):
data = b'HTTP/1.1 200\r\nCache-Control: max-age=86400\r\nExpires:Oct, 01 Aug 2100 00:00:00 GMT\r\nConnection: close\r\n'
data += b'\r\n'
self.wfile.write(data)
xlog.info('%s "%s %s HTTP/1.1" 200 -', self.address_string(), self.command, self.path)
elif os.path.isfile(filename):
if filename.endswith('.pac'):
mimetype = 'text/plain'
else:
mimetype = 'application/octet-stream'
#self.send_file(filename, mimetype)
else:
self.wfile.write(b'HTTP/1.1 404\r\nContent-Type: text/plain\r\nConnection: close\r\n\r\n404 Not Found')
xlog.info('%s "%s %s HTTP/1.1" 404 -', self.address_string(), self.command, self.path)
def do_POST(self):
try:
refer = self.headers.getheader('Referer')
netloc = urlparse.urlparse(refer).netloc
if not netloc.startswith("127.0.0.1") and not netloc.startswitch("localhost"):
xlog.warn("web control ref:%s refuse", netloc)
return
except:
pass
xlog.debug ('GAEProxy web_control %s %s %s ', self.address_string(), self.command, self.path)
try:
ctype, pdict = cgi.parse_header(self.headers.getheader('content-type'))
if ctype == 'multipart/form-data':
self.postvars = cgi.parse_multipart(self.rfile, pdict)
elif ctype == 'application/x-www-form-urlencoded':
length = int(self.headers.getheader('content-length'))
self.postvars = urlparse.parse_qs(self.rfile.read(length), keep_blank_values=1)
else:
self.postvars = {}
except:
self.postvars = {}
path = urlparse.urlparse(self.path).path
if path == '/deploy':
return self.req_deploy_handler()
elif path == "/config":
return self.req_config_handler()
elif path == "/scan_ip":
return self.req_scan_ip_handler()
elif path.startswith("/importip"):
return self.req_importip_handler()
else:
self.wfile.write(b'HTTP/1.1 404\r\nContent-Type: text/plain\r\nConnection: close\r\n\r\n404 Not Found')
xlog.info('%s "%s %s HTTP/1.1" 404 -', self.address_string(), self.command, self.path)
def req_log_handler(self):
req = urlparse.urlparse(self.path).query
reqs = urlparse.parse_qs(req, keep_blank_values=True)
data = ''
cmd = "get_last"
if reqs["cmd"]:
cmd = reqs["cmd"][0]
if cmd == "set_buffer_size" :
if not reqs["buffer_size"]:
data = '{"res":"fail", "reason":"size not set"}'
mimetype = 'text/plain'
self.send_response_nc(mimetype, data)
return
buffer_size = reqs["buffer_size"][0]
xlog.set_buffer_size(buffer_size)
elif cmd == "get_last":
max_line = int(reqs["max_line"][0])
data = xlog.get_last_lines(max_line)
elif cmd == "get_new":
last_no = int(reqs["last_no"][0])
data = xlog.get_new_lines(last_no)
else:
xlog.error('PAC %s %s %s ', self.address_string(), self.command, self.path)
mimetype = 'text/plain'
self.send_response_nc(mimetype, data)
def get_launcher_version(self):
data_path = os.path.abspath( os.path.join(top_path, 'data', 'launcher', 'config.yaml'))
try:
config = yaml.load(file(data_path, 'r'))
return config["modules"]["launcher"]["current_version"]
#print yaml.dump(config)
except yaml.YAMLError, exc:
print "Error in configuration file:", exc
return "unknown"
@staticmethod
def xxnet_version():
version_file = os.path.join(root_path, "version.txt")
try:
with open(version_file, "r") as fd:
version = fd.read()
return version
except Exception as e:
xlog.exception("xxnet_version fail")
return "get_version_fail"
def get_os_language(self):
if hasattr(self, "lang_code"):
return self.lang_code
try:
lang_code, code_page = locale.getdefaultlocale()
#('en_GB', 'cp1252'), en_US,
self.lang_code = lang_code
return lang_code
except:
#Mac fail to run this
pass
if sys.platform == "darwin":
try:
oot = os.pipe()
p = subprocess.Popen(["/usr/bin/defaults", 'read', 'NSGlobalDomain', 'AppleLanguages'],stdout=oot[1])
p.communicate()
lang_code = os.read(oot[0],10000)
self.lang_code = lang_code
return lang_code
except:
pass
lang_code = 'Unknown'
return lang_code
def req_status_handler(self):
if "user-agent" in self.headers.dict:
user_agent = self.headers.dict["user-agent"]
else:
user_agent = ""
good_ip_num = google_ip.good_ip_num
if good_ip_num > len(google_ip.gws_ip_list):
good_ip_num = len(google_ip.gws_ip_list)
res_arr = {
"sys_platform": "%s, %s" % (platform.machine(), platform.platform()),
"os_system": platform.system(),
"os_version": platform.version(),
"os_release": platform.release(),
"architecture": platform.architecture(),
"os_detail": env_info.os_detail(),
"language": self.get_os_language(),
"browser": user_agent,
"xxnet_version": self.xxnet_version(),
"python_version": platform.python_version(),
"openssl_version": get_openssl_version(),
"proxy_listen": config.LISTEN_IP + ":" + str(config.LISTEN_PORT),
"pac_url": config.pac_url,
"use_ipv6": config.CONFIG.getint("google_ip", "use_ipv6"),
"gae_appid": "|".join(config.GAE_APPIDS),
"working_appid": "|".join(appid_manager.working_appid_list),
"out_of_quota_appids": "|".join(appid_manager.out_of_quota_appids),
"not_exist_appids": "|".join(appid_manager.not_exist_appids),
"network_state": check_local_network.network_stat,
"ip_num": len(google_ip.gws_ip_list),
"good_ip_num": good_ip_num,
"connected_link_new": len(https_manager.new_conn_pool.pool),
"connected_link_used": len(https_manager.gae_conn_pool.pool),
"worker_h1": http_dispatch.h1_num,
"worker_h2": http_dispatch.h2_num,
"is_idle": int(http_dispatch.is_idle()),
"scan_ip_thread_num": google_ip.scan_thread_count,
"ip_quality": google_ip.ip_quality(),
"block_stat": connect_control.block_stat(),
"high_prior_connecting_num": connect_control.high_prior_connecting_num,
"low_prior_connecting_num": connect_control.low_prior_connecting_num,
"high_prior_lock": len(connect_control.high_prior_lock),
"low_prior_lock": len(connect_control.low_prior_lock),
}
data = json.dumps(res_arr, indent=0, sort_keys=True)
self.send_response_nc('text/html', data)
def req_config_handler(self):
req = urlparse.urlparse(self.path).query
reqs = urlparse.parse_qs(req, keep_blank_values=True)
data = ''
appid_updated = False
try:
if reqs['cmd'] == ['get_config']:
data = json.dumps(user_config.user_special, default=lambda o: o.__dict__)
elif reqs['cmd'] == ['set_config']:
appids = self.postvars['appid'][0]
if appids != user_config.user_special.appid:
if appids and google_ip.good_ip_num:
fail_appid_list = test_appid.test_appids(appids)
if len(fail_appid_list):
fail_appid = "|".join(fail_appid_list)
return self.send_response_nc('text/html', '{"res":"fail", "reason":"appid fail:%s"}' % fail_appid)
appid_updated = True
user_config.user_special.appid = appids
user_config.user_special.proxy_enable = self.postvars['proxy_enable'][0]
user_config.user_special.proxy_type = self.postvars['proxy_type'][0]
user_config.user_special.proxy_host = self.postvars['proxy_host'][0]
user_config.user_special.proxy_port = self.postvars['proxy_port'][0]
try:
user_config.user_special.proxy_port = int(user_config.user_special.proxy_port)
except:
user_config.user_special.proxy_port = 0
user_config.user_special.proxy_user = self.postvars['proxy_user'][0]
user_config.user_special.proxy_passwd = self.postvars['proxy_passwd'][0]
user_config.user_special.host_appengine_mode = self.postvars['host_appengine_mode'][0]
use_ipv6 = int(self.postvars['use_ipv6'][0])
if user_config.user_special.use_ipv6 != use_ipv6:
if use_ipv6:
if not check_local_network.check_ipv6():
xlog.warn("IPv6 was enabled, but check failed.")
return self.send_response_nc('text/html', '{"res":"fail", "reason":"IPv6 fail"}')
user_config.user_special.use_ipv6 = use_ipv6
user_config.save()
config.load()
appid_manager.reset_appid()
import connect_manager
connect_manager.load_proxy_config()
connect_manager.https_manager.load_config()
if appid_updated:
http_dispatch.close_all_worker()
google_ip.reset()
check_ip.load_proxy_config()
data = '{"res":"success"}'
self.send_response_nc('text/html', data)
#http_request("http://127.0.0.1:8085/init_module?module=gae_proxy&cmd=restart")
return
except Exception as e:
xlog.exception("req_config_handler except:%s", e)
data = '{"res":"fail", "except":"%s"}' % e
self.send_response_nc('text/html', data)
def req_deploy_handler(self):
global deploy_proc
req = urlparse.urlparse(self.path).query
reqs = urlparse.parse_qs(req, keep_blank_values=True)
data = ''
log_path = os.path.abspath(os.path.join(current_path, os.pardir, "server", 'upload.log'))
time_now = datetime.datetime.today().strftime('%H:%M:%S-%a/%d/%b/%Y')
if reqs['cmd'] == ['deploy']:
appid = self.postvars['appid'][0]
debug = int(self.postvars['debug'][0])
if deploy_proc and deploy_proc.poll() == None:
xlog.warn("deploy is running, request denied.")
data = '{"res":"deploy is running", "time":"%s"}' % time_now
else:
try:
if os.path.isfile(log_path):
os.remove(log_path)
script_path = os.path.abspath(os.path.join(current_path, os.pardir, "server", 'uploader.py'))
args = [sys.executable, script_path, appid]
if debug:
args.append("-debug")
deploy_proc = subprocess.Popen(args)
xlog.info("deploy begin.")
data = '{"res":"success", "time":"%s"}' % time_now
except Exception as e:
data = '{"res":"%s", "time":"%s"}' % (e, time_now)
elif reqs['cmd'] == ['cancel']:
if deploy_proc and deploy_proc.poll() == None:
deploy_proc.kill()
data = '{"res":"deploy is killed", "time":"%s"}' % time_now
else:
data = '{"res":"deploy is not running", "time":"%s"}' % time_now
elif reqs['cmd'] == ['get_log']:
if deploy_proc and os.path.isfile(log_path):
with open(log_path, "r") as f:
content = f.read()
else:
content = ""
status = 'init'
if deploy_proc:
if deploy_proc.poll() == None:
status = 'running'
else:
status = 'finished'
data = json.dumps({'status': status, 'log': content, 'time': time_now})
self.send_response_nc('text/html', data)
def req_importip_handler(self):
req = urlparse.urlparse(self.path).query
reqs = urlparse.parse_qs(req, keep_blank_values=True)
data = ''
if reqs['cmd'] == ['importip']:
count = 0
ip_list = self.postvars['ipList'][0]
addresses = ip_list.split('|')
for ip in addresses:
if not ip_utils.check_ip_valid(ip):
continue
if google_ip.add_ip(ip, 100, "google.com", "gws"):
count += 1
data = '{"res":"%s"}' % count
google_ip.save_ip_list(force=True)
elif reqs['cmd'] == ['exportip']:
data = '{"res":"'
for ip in google_ip.gws_ip_list:
if google_ip.ip_dict[ip]['fail_times'] > 0:
continue
data += "%s|" % ip
data = data[0: len(data) - 1]
data += '"}'
self.send_response_nc('text/html', data)
def req_test_ip_handler(self):
req = urlparse.urlparse(self.path).query
reqs = urlparse.parse_qs(req, keep_blank_values=True)
ip = reqs['ip'][0]
result = check_ip.test_gae_ip2(ip)
if not result or not result.support_gae:
data = "{'res':'fail'}"
else:
data = json.dumps("{'ip':'%s', 'handshake':'%s', 'server':'%s', 'domain':'%s'}" %
(ip, result.handshake_time, result.server_type, result.domain))
self.send_response_nc('text/html', data)
def req_ip_list_handler(self):
time_now = time.time()
data = "<html><body><div style='float: left; white-space:nowrap;font-family: monospace;'>"
data += "time:%d pointer:%d<br>\r\n" % (time_now, google_ip.gws_ip_pointer)
data += "<table><tr><th>N</th><th>IP</th><th>HS</th><th>Fails</th>"
data += "<th>down_fail</th><th>links</th>"
data += "<th>get_time</th><th>success_time</th><th>fail_time</th><th>down_fail_time</th>"
data += "<th>data_active</th><th>transfered_data</th><th>Trans</th>"
data += "<th>history</th></tr>\n"
i = 1
for ip in google_ip.gws_ip_list:
handshake_time = google_ip.ip_dict[ip]["handshake_time"]
fail_times = google_ip.ip_dict[ip]["fail_times"]
down_fail = google_ip.ip_dict[ip]["down_fail"]
links = google_ip.ip_dict[ip]["links"]
get_time = google_ip.ip_dict[ip]["get_time"]
if get_time:
get_time = time_now - get_time
success_time = google_ip.ip_dict[ip]["success_time"]
if success_time:
success_time = time_now - success_time
fail_time = google_ip.ip_dict[ip]["fail_time"]
if fail_time:
fail_time = time_now - fail_time
down_fail_time = google_ip.ip_dict[ip]["down_fail_time"]
if down_fail_time:
down_fail_time = time_now - down_fail_time
data_active = google_ip.ip_dict[ip]["data_active"]
if data_active:
active_time = time_now - data_active
else:
active_time = 0
history = google_ip.ip_dict[ip]["history"]
t0 = 0
str_out = ''
for item in history:
t = item[0]
v = item[1]
if t0 == 0:
t0 = t
time_per = int((t - t0) * 1000)
t0 = t
str_out += "%d(%s) " % (time_per, v)
data += "<tr><td>%d</td><td>%s</td><td>%d</td><td>%d</td><td>%d</td><td>%d</td><td>%d</td><td>%d</td><td>%d</td>" \
"<td>%d</td><td>%d</td><td>%s</td></tr>\n" % \
(i, ip, handshake_time, fail_times, down_fail, links, get_time, success_time, fail_time, down_fail_time, \
active_time, str_out)
i += 1
data += "</table></div></body></html>"
mimetype = 'text/html'
self.send_response_nc(mimetype, data)
def req_scan_ip_handler(self):
req = urlparse.urlparse(self.path).query
reqs = urlparse.parse_qs(req, keep_blank_values=True)
data = ""
if reqs['cmd'] == ['get_range']:
data = ip_range.load_range_content()
elif reqs['cmd'] == ['update']:
#update ip_range if needed
content = self.postvars['ip_range'][0]
#check ip_range checksums, update if needed
default_digest = hashlib.md5(ip_range.load_range_content(default=True)).hexdigest()
old_digest = hashlib.md5(ip_range.load_range_content()).hexdigest()
new_digest = hashlib.md5(content).hexdigest()
if new_digest == default_digest:
ip_range.remove_user_range()
else:
if old_digest != new_digest:
ip_range.update_range_content(content)
if old_digest != new_digest:
ip_range.load_ip_range()
#update auto_adjust_scan_ip and scan_ip_thread_num
should_auto_adjust_scan_ip = int(self.postvars['auto_adjust_scan_ip_thread_num'][0])
thread_num_for_scan_ip = int(self.postvars['scan_ip_thread_num'][0])
#update user config settings
user_config.user_special.auto_adjust_scan_ip_thread_num = should_auto_adjust_scan_ip
user_config.user_special.scan_ip_thread_num = thread_num_for_scan_ip
user_config.save()
#update google_ip settings
google_ip.auto_adjust_scan_ip_thread_num = should_auto_adjust_scan_ip
if google_ip.max_scan_ip_thread_num != thread_num_for_scan_ip:
google_ip.adjust_scan_thread_num(thread_num_for_scan_ip)
#reponse
data='{"res":"success"}'
elif reqs['cmd'] == ['get_scan_ip_log']:
data = scan_ip_log.get_log_content()
mimetype = 'text/plain'
self.send_response_nc(mimetype, data)
def req_ssl_pool_handler(self):
data = "New conn:\n"
data += https_manager.new_conn_pool.to_string()
data += "\nGAE conn:\n"
data += https_manager.gae_conn_pool.to_string()
for host in https_manager.host_conn_pool:
data += "\nHost:%s\n" % host
data += https_manager.host_conn_pool[host].to_string()
mimetype = 'text/plain'
self.send_response_nc(mimetype, data)
def req_workers_handler(self):
data = http_dispatch.to_string()
mimetype = 'text/plain'
self.send_response_nc(mimetype, data)
def req_download_cert_handler(self):
filename = cert_util.CertUtil.ca_keyfile
with open(filename, 'rb') as fp:
data = fp.read()
mimetype = 'application/x-x509-ca-cert'
self.wfile.write(('HTTP/1.1 200\r\nContent-Disposition: inline; filename=CA.crt\r\nContent-Type: %s\r\nContent-Length: %s\r\n\r\n' % (mimetype, len(data))).encode())
self.wfile.write(data)
def req_is_ready_handler(self):
data = "%s" % config.cert_import_ready
mimetype = 'text/plain'
self.send_response_nc(mimetype, data)
def req_check_ip_handler(self):
req = urlparse.urlparse(self.path).query
reqs = urlparse.parse_qs(req, keep_blank_values=True)
data = ""
if reqs['cmd'] == ['get_process']:
all_ip_num = len(google_ip.ip_dict)
left_num = google_ip.scan_exist_ip_queue.qsize()
good_num = google_ip.good_ip_num
data = json.dumps(dict(all_ip_num=all_ip_num, left_num=left_num, good_num=good_num))
self.send_response_nc('text/plain', data)
elif reqs['cmd'] == ['start']:
left_num = google_ip.scan_exist_ip_queue.qsize()
if left_num:
self.send_response_nc('text/plain', '{"res":"fail", "reason":"running"}')
else:
google_ip.start_scan_all_exist_ip()
self.send_response_nc('text/plain', '{"res":"success"}')
elif reqs['cmd'] == ['stop']:
left_num = google_ip.scan_exist_ip_queue.qsize()
if not left_num:
self.send_response_nc('text/plain', '{"res":"fail", "reason":"not running"}')
else:
google_ip.stop_scan_all_exist_ip()
self.send_response_nc('text/plain', '{"res":"success"}')
else:
return self.send_not_exist()
|
|
#!/usr/bin/env python
"""An unofficial Python API for http://kickass.to/
Supports searching and getting popular torrents from the home page.
Search results can be made more precise by using Categories and can
be sorted according to file size, seeders etc.
@author Stephan McLean
@email [email protected]
The MIT License (MIT)
Copyright (c) [2013] [Stephan McLean]
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"),
to deal in the Software without restriction, including without limitation the
rights to use, copy, modify, merge, publish, distribute, sublicense,
and/or sell copies of the Software, and to permit persons to whom the Software
is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included
in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE."""
import bs4
import requests
import ConfigParser
def _get_soup(page):
"""Return BeautifulSoup object for given page"""
request = requests.get(page)
data = request.text
return bs4.BeautifulSoup(data)
def set_base_url(url):
Search._set_base_url(_format_url(url))
def _format_url(url):
# Make sure base url given is formatted correctly.
if not url.startswith("http://"):
url = "http://" + url
if url.endswith("/"):
# Remove trailing /
url = url[:-1]
return url
class Categories:
ALL = "all"
MOVIES = "movies"
TV = "tv"
ANIME = "anime"
MUSIC = "music"
BOOKS = "books"
APPS = "applications"
GAMES = "games"
XXX = "xxx"
class Sorting:
SIZE = "size"
FILES = "files_count"
AGE = "time_add"
SEED = "seeders"
LEECH = "leechers"
class Order:
ASC = "asc"
DESC = "desc"
class Torrent(object):
"""Represents a torrent as found in KAT search results"""
def __init__(self, title, category, link, size, seed, leech, magnet,
download, files, age, isVerified):
self.title = title
self.category = category
self.page = Search.base_url + link
self.size = size
self.seeders = seed
self.leechers = leech
self._magnet = magnet
self._download = download
self.files = files
self.age = age
self._data = None # bs4 html for getting download & magnet
self.isVerified = isVerified
def print_details(self):
"""Print torrent details"""
print("Title:", self.title)
print("Category:", self.category)
print("Page: ", self.page)
print("Size: ", self.size)
print("Files: ", self.files)
print("Age: ", self.age)
print("Seeds:", self.seeders)
print("Leechers: ", self.leechers)
print("Magnet: ", self.magnet)
print("Download: ", self.download)
print("Verified:", self.isVerified)
@property
def download(self):
if self._download:
return self._download
if self._data:
self._download = self._data.find("a", class_="siteButton giantButton verifTorrentButton").get("href")
return self._download
# No data. Parse torrent page
soup = _get_soup(self.page)
self._download = soup.find("a", class_="siteButton giantButton verifTorrentButton").get("href")
self._data = soup # Store for later
return self._download
@property
def magnet(self):
if self._magnet:
return self._magnet
if self._data:
self._magnet = self._data.find("a", class_="siteButton giantIcon magnetlinkButton").get("href")
return self._magnet
soup = _get_soup(self.page)
self._magnet = soup.find("a", class_="siteButton giantIcon magnetlinkButton").get("href")
self._data = soup
return self._magnet
class Search(object):
"""This class will search for torrents given a search term or by
returning popular torrents from the home page. The results are
of type Torrent and can be iterated over."""
base_url = "http://kat.cr"
search_url = base_url + "/usearch/"
latest_url = base_url+"/new"
def __init__(self):
self.torrents = list()
self._current_page = 1
self.term = None
self.category = None
self.order = None
self.sort = None
self.current_url = None
def search(self, term=None, category=None, pages=1, url=search_url,
sort=None, order=None):
"""Search a given URL for torrent results."""
if not self.current_url:
self.current_url = url
if self.current_url == Search.base_url:
# Searching home page so no formatting
results = self._get_results(self.current_url)
self._add_results(results)
else:
search = self._format_search(term, category)
sorting = self._format_sort(sort, order)
# Now get the results.
for i in range(pages):
results = self._get_results(search + "/" + str(self._current_page) +
"/" + sorting)
self._add_results(results)
self._current_page += 1
self._current_page -= 1
def popular(self, category, sortOption="title"):
self.search(url=Search.base_url)
if category:
self._categorize(category)
self.torrents.sort(key = lambda t: t.__getattribute__(sortOption))
def recent(self, category, pages, sort, order):
self.search(pages=pages, url=Search.latest_url, sort=sort, order=order)
if category:
self._categorize(category)
def _categorize(self, category):
"""Remove torrents with unwanted category from self.torrents"""
self.torrents = [result for result in self.torrents
if result.category == category]
def _format_sort(self, sort, order):
sorting = ""
if sort:
self.sort = sort
sorting = "?field=" + self.sort
if order:
self.order = order
else:
self.order = Sorting.Order.DESC
sorting = sorting + "&sorder=" + self.order
return sorting
def _format_search(self, term, category):
search = self.current_url
if term:
self.term = term
search = self.current_url + term
if category:
self.category = category
search = search + " category:" + category
return search
def page(self, i):
"""Get page i of search results"""
# Need to clear previous results.
self.torrents = list()
self._current_page = i
self.search(term=self.term, category=self.category,
sort=self.sort, order=self.order)
def next_page(self):
"""Get next page of search results."""
self.page(self._current_page + 1)
def _get_results(self, page):
"""Find every div tag containing torrent details on given page,
then parse the results into a list of Torrents and return them"""
soup = _get_soup(page)
details = soup.find_all("tr", class_="odd")
even = soup.find_all("tr", class_="even")
# Join the results
for i in range(len(even)):
details.insert((i * 2)+1, even[i])
return self._parse_details(details)
def _parse_details(self, tag_list):
"""Given a list of tags from either a search page or the
KAT home page parse the details and return a list of
Torrents"""
result = list()
for i, item in enumerate(tag_list):
title = item.find("a", class_="cellMainLink")
title_text = title.text
link = title.get("href")
tds = item.find_all("td", class_="center") # Better name here.
size = tds[0].text
files = tds[1].text
age = tds[2].text
seed = tds[3].text
leech = tds[4].text
magnet = item.find("a", class_="imagnet icon16")
download = item.find("a", class_="idownload icon16")
isVerified = item.find("a", class_="iverify icon16") != None
# Home page doesn't have magnet or download links
if magnet:
magnet = magnet.get("href")
if download:
download = download.get("href")
# Get category changes depending on if we're parsing
# the home page or a search page.
if self.current_url == self.base_url:
category = self._get_torrent_category(item, result=i)
else:
category = self._get_torrent_category(item)
result.append(Torrent(title_text, category, link, size, seed,
leech, magnet, download,files, age, isVerified))
return result
def _get_torrent_category(self, tag, result=None):
"""Given a tag containing torrent details try to find category
of torrent. In search pages the category is found in links of
the form <a href='/tv/'>TV</a> with TV replaced with movies, books
etc. For the home page I will use the result number to
decide the category"""
hrefs = ["/movies/", "/tv/", "/music/", "/games/", "/applications/", "/anime/",
"/books/", "/xxx/"]
category = None
if not result is None: # if result: 0 returns false.
# Searching home page, get category from result number
category = hrefs[result / 10].strip("/")
return category
for item in hrefs:
if tag.select("a[href=" + item + "]"):
category = item.strip("/")
return category
def _add_results(self, results):
for item in results:
self.torrents.append(item)
@staticmethod
def _set_base_url(url):
Search.base_url = url
Search.search_url = url + "/usearch/"
Search.latest_url = url + "/new"
@property
def current_page(self):
return self._current_page
def __iter__(self):
return iter(self.torrents)
def __len__(self):
return len(self.torrents)
def __getitem__(self, k):
return self.torrents[k]
# Functions to be called by user -----------------------------------------
def search(term, category=Categories.ALL, pages=1, sort=None, order=None):
"""Return a search result for term in category. Can also be
sorted and span multiple pages."""
s = Search()
s.search(term=term, category=category, pages=pages, sort=sort, order=order)
return s
def popular(category=None, sortOption = "title"):
"""Return a search result containing torrents appearing
on the KAT home page. Can be categorized. Cannot be
sorted or contain multiple pages"""
s = Search()
s.popular(category, sortOption)
return s
def recent(category=None, pages=1, sort=None, order=None):
"""Return most recently added torrents. Can be sorted and categorized
and contain multiple pages."""
s = Search()
s.recent(category, pages, sort, order)
return s
# -----------------------------------------------------------------------
#module init
if __name__ != '__main__':
config = ConfigParser.ConfigParser()
config.read('conf.cfg')
Search.base_url = config.get('url', 'base_url')
Search.search_url = Search.base_url + "/usearch/"
Search.latest_url = Search.base_url+"/new"
|
|
import scipy.stats as stats
import matplotlib.pyplot as plt
import MySQLdb
from wsd.database import MySQLDatabase
import matplotlib.cm as cm
from matplotlib.colors import LogNorm, Normalize, BoundaryNorm, PowerNorm
from conf import *
from collections import defaultdict
import cPickle as pickle
import pandas as pd
import numpy as np
import pylab
from scipy.stats.stats import pearsonr,spearmanr,kendalltau
import math
import cPickle as pickle
import powerlaw
import matplotlib.mlab as mlab
import random
from collections import Counter
from operator import itemgetter
from matplotlib import style
style.use('acm-3col-bmh')
import pylab
params = {
'font.family' : 'serif',
'font.serif' : ['Times New Roman'],
'font.size' : 7
}
pylab.rcParams.update(params)
def read_pickle(fpath):
with open(fpath, 'rb') as infile:
obj = pickle.load(infile)
return obj
def write_pickle(fpath, obj):
with open(fpath, 'wb') as outfile:
pickle.dump(obj, outfile, -1)
def plot_counts_category_distributions_ccdf():
category_distributions = read_pickle(HOME+'output/category_counts_distribution.obj')
for i in category_distributions.values():
print len(i)
colors= {'lead':'r','infobox':'b', 'body':'g', 'left-body':'m','navbox':'c', 'counts':'k'}
fig = plt.figure()
ax = fig.add_subplot(111)
for category in ['lead', 'infobox', 'body', 'left-body', 'navbox', 'counts']:
data = category_distributions[category]
data = [x[0] for x in data]
powerlaw.plot_ccdf(data, ax, label=category,color=colors[category])
# further plotting
ax.set_xlabel("Number of clicks n")
ax.set_ylabel("Pr(X>=n)")
plt.legend(fancybox=True, loc=3, ncol=2, prop={'size':4})
#leg = plt.gca().get_legend()
#ltext = leg.get_texts() # all the text.Text instance in the legend
#llines = leg.get_lines()
#plt.setp(ltext, fontsize='small') # the legend text fontsize
#plt.setp(llines, linewidth=1)
plt.tight_layout()
plt.savefig('output/category_counts_distributions.pdf')
data = category_distributions['counts']
data = [int(x[0]) for x in data]
hist, bin_edges = np.histogram(data, 100, density=True)
fig = plt.figure()
ax = fig.add_subplot(111)
ax.plot( bin_edges[:-1],hist, marker='o')
ax.set_xlabel('#Counts')
ax.set_ylabel('#Pages')
ax.set_yscale('log')
ax.set_xscale('log')
plt.legend(fancybox=True, loc=3, prop={'size':4})
plt.tight_layout()
plt.savefig('output/counts_distribution.pdf')
def plot_aggregated_counts_distributions_ccdf():
#agg_distributions = read_pickle(HOME+'output/aggregated_counts_distribution.obj')
#for i in agg_distributions.values():
# print len(i)
colors= {'source_article':'r','target_article':'b'}
labels = {'source_article': 'source article', 'target_article':'target article'}
fig = plt.figure()
ax = fig.add_subplot(111)
#for category in ['source_article', 'target_article']:
# data = agg_distributions[category]
# data = [int(x[0]) for x in data]
# powerlaw.plot_ccdf(data, ax, label=labels[category],color=colors[category])
category_distributions = read_pickle(HOME+'output/category_counts_distribution.obj')
data = category_distributions['counts']
data = [int(x[0]) for x in data]
#to consider the edges that have zero transitions we substract the number transitions from the number of edges in wikipeida
number_of_edges = 339463340
listofzeros = [0] * (number_of_edges - len(data))
print len(data)
print len(listofzeros)
zeros = np.zeros((number_of_edges - len(data)))
data = np.append(zeros, data)
#data = data.extend(listofzeros)
print data
#hist, bin_edges = np.histogram(data, bins=100, normed=True)
#ones = np.ones(100)
#ccdf = ones - np.cumsum(data)
#cdf = np.cumsum(hist)
#print cdf
#print ccdf
bins, CDF = powerlaw.cdf(data, survival=True)
plt.plot(bins, CDF)
plt.xscale('symlog')
#powerlaw.plot_cdf(data, ax, label='transitions', color='r')
# further plotting
#ax.set_xlabel(r'Number of transitions $n$')
#ax.set_ylabel(r'$P(X \geq n)$')
plt.legend(fancybox=True, loc='lower left', ncol=1, prop={'size':5})
#leg = plt.gca().get_legend()
#ltext = leg.get_texts() # all the text.Text instance in the legend
#plt.setp(ltext, fontsize='small') # the legend text fontsize
plt.tight_layout()
plt.savefig('output/agg_counts_distributions.pdf', bbox_inches='tight')
# data = agg_distributions['target_article']
# data = [int(x[0]) for x in data]
#
# hist, bin_edges = np.histogram(data, 100, density=True)
# print len(hist)
# print len(hist[:-1])
# print len(bin_edges)
# fig = plt.figure()
# ax = fig.add_subplot(111)
# ax.plot(bin_edges[:-1],hist, marker='o')
# ax.set_xlabel('#Counts')
# ax.set_ylabel('#Pages')
# ax.set_yscale('log')
# ax.set_xscale('log')
# plt.legend(fancybox=True, loc=3, prop={'size':4})
# plt.tight_layout()
# plt.savefig('output/agg_counts_distributions_target.pdf')
#
# data = agg_distributions['source_article']
# data = [int(x[0]) for x in data]
#
# hist, bin_edges = np.histogram(data, 100, density=True)
# fig = plt.figure()
# ax = fig.add_subplot(111)
# ax.plot( bin_edges[:-1],hist, marker='o')
# ax.set_xlabel('#Counts')
# ax.set_ylabel('#Pages')
# ax.set_yscale('log')
# ax.set_xscale('log')
# plt.legend(fancybox=True, loc=3, prop={'size':4})
# plt.tight_layout()
# plt.savefig('output/agg_counts_distributions_source.pdf')
def plot_counts_frequency():
fig = plt.figure()
ax = fig.add_subplot(111)
category_distributions = read_pickle(HOME+'output/category_counts_distribution.obj')
data = category_distributions['counts']
data = [int(x[0]) for x in data]
#to consider the edges that have zero transitions we substract the number transitions from the number of edges in wikipeida
#number_of_edges = 339463340
#zeros = np.zeros((number_of_edges - len(data)))
#data = np.append(zeros, data)
#bins = [0,11]
#bins.extend(np.linspace(100,10000))
#data = data.extend(listofzeros)
#print data
hist, bin_edges = np.histogram(data, bins=10000)
#print len(hist)
#print len(bin_edges)
print hist, bin_edges
ax.set_yscale('log')
ax.set_xscale('log')
ax.plot(bin_edges[:-1], hist, marker='o', markersize=3, markeredgecolor='none', color='#D65F5F')
#ax.set_ylim([10**0, 10**6])
#ax.set_xlim([10**0, 10**6])
ax.set_xlabel('Number of transitions')
ax.set_ylabel('Frequency')
fig.tight_layout()
fig.savefig( 'output/agg_counts_distributions.pdf', bbox_inches='tight')
def pickle_category_counts_distribution():
results = {}
db = MySQLDatabase(DATABASE_HOST, DATABASE_USER, DATABASE_PASSWORD, DATABASE_NAME)
db_worker_view = db.get_work_view()
cursor = db_worker_view._cursor
for category in ['lead', 'infobox', 'body', 'left-body', 'navbox']:
try:
cursor.execute('select counts from link_features where counts is not null and visual_region=%s;', (category,))
result = cursor.fetchall()
results[category] = result
except MySQLdb.Error, e:
print e
try:
cursor.execute('select counts from clickstream_derived_internal_links;')
result = cursor.fetchall()
results['counts'] = result
except MySQLdb.Error, e:
print e
write_pickle(HOME+'output/category_counts_distribution.obj', results)
def pickle_aggregated_counts_distribution():
db = MySQLDatabase(DATABASE_HOST, DATABASE_USER, DATABASE_PASSWORD, DATABASE_NAME)
db_worker_view = db.get_work_view()
cursor = db_worker_view._cursor
results = {}
try:
cursor.execute('select sum(counts) from clickstream_derived_internal_links group by prev_id;')
result = cursor.fetchall()
results['source_article']=result
except MySQLdb.Error, e:
print e
try:
cursor.execute('select sum(counts) from clickstream_derived_internal_links group by curr_id;')
result = cursor.fetchall()
results['target_article']=result
except MySQLdb.Error, e:
print e
write_pickle(HOME+'output/aggregated_counts_distribution.obj', results)
def trim_to_range(data, xmin=None, xmax=None, **kwargs):
"""
Removes elements of the data that are above xmin or below xmax (if present)
"""
from numpy import asarray
data = asarray(data)
if xmin:
data = data[data>=xmin]
if xmax:
data = data[data<=xmax]
return data
if __name__ == '__main__':
#pickle_category_counts_distribution()
#pickle_aggregated_counts_distribution()
#plot_counts_category_distributions_ccdf()
#plot_aggregated_counts_distributions_ccdf()
plot_counts_frequency()
|
|
#!/usr/bin/env python3
# Copyright 2013-present Barefoot Networks, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Runs the BMv2 behavioral model simulator with input from an stf file
from subprocess import Popen
from threading import Thread
from glob import glob
import json
import sys
import re
import os
import stat
import tempfile
import shutil
import difflib
import subprocess
import signal
import time
import random
import errno
import socket
from collections import OrderedDict
try:
from scapy.layers.all import *
from scapy.utils import *
except ImportError:
pass
SUCCESS = 0
FAILURE = 1
class TimeoutException(Exception): pass
def signal_handler(signum, frame):
raise TimeoutException("Timed out!")
signal.signal(signal.SIGALRM, signal_handler)
class Options(object):
def __init__(self):
self.binary = None
self.verbose = False
self.preserveTmp = False
self.observationLog = None
self.usePsa = False
def nextWord(text, sep = None):
# Split a text at the indicated separator.
# Note that the separator can be a string.
# Separator is discarded.
spl = text.split(sep, 1)
if len(spl) == 0:
return '', ''
elif len(spl) == 1:
return spl[0].strip(), ''
else:
return spl[0].strip(), spl[1].strip()
def ByteToHex(byteStr):
return ''.join( [ "%02X " % ord( x ) for x in byteStr ] ).strip()
def convert_packet_bin2hexstr(pkt_bin):
return pkt_bin.convert_to(Raw).load.hex().upper()
def convert_packet_stf2hexstr(pkt_stf_text):
return ''.join(pkt_stf_text.split()).upper()
def reportError(*message):
print("***", *message)
class Local(object):
# object to hold local vars accessable to nested functions
pass
def FindExe(dirname, exe):
dir = os.getcwd()
while len(dir) > 1:
if os.path.isdir(os.path.join(dir, dirname)):
rv = None
rv_time = 0
for dName, sdName, fList in os.walk(os.path.join(dir, dirname)):
if exe in fList:
n=os.path.join(dName, exe)
if os.path.isfile(n) and os.access(n, os.X_OK):
n_time = os.path.getmtime(n)
if n_time > rv_time:
rv = n
rv_time = n_time
if rv is not None:
return rv
dir = os.path.dirname(dir)
return exe
def run_timeout(verbose, args, timeout, stderr):
if verbose:
print("Executing ", " ".join(args))
local = Local()
local.process = None
def target():
procstderr = None
if stderr is not None:
procstderr = open(stderr, "w")
local.process = Popen(args, stderr=procstderr)
local.process.wait()
thread = Thread(target=target)
thread.start()
thread.join(timeout)
if thread.is_alive():
print("Timeout ", " ".join(args), file=sys.stderr)
local.process.terminate()
thread.join()
if local.process is None:
# never even started
reportError("Process failed to start")
return -1
if verbose:
print("Exit code ", local.process.returncode)
return local.process.returncode
timeout = 10 * 60
class ConcurrentInteger(object):
# Generates exclusive integers in a range 0-max
# in a way which is safe across multiple processes.
# It uses a simple form of locking using folder names.
# This is necessary because this script may be invoked
# concurrently many times by make, and we need the many simulator instances
# to use different port numbers.
def __init__(self, folder, max):
self.folder = folder
self.max = max
def lockName(self, value):
return "lock_" + str(value)
def release(self, value):
os.rmdir(self.lockName(value))
def generate(self):
# try 10 times
for i in range(0, 10):
index = random.randint(0, self.max)
file = self.lockName(index)
try:
os.makedirs(file)
return index
except:
time.sleep(1)
continue
return None
class BMV2ActionArg(object):
def __init__(self, name, width):
# assert isinstance(name, str)
# assert isinstance(width, int)
self.name = name
self.width = width
class TableKey(object):
def __init__(self):
self.fields = OrderedDict()
def append(self, name, type):
self.fields[name] = type
def __str__(self):
result = ""
for f in self.fields.keys():
if result != "":
result += " "
result += f + ":" + self.fields[f]
return result
class TableKeyInstance(object):
def __init__(self, tableKey):
assert isinstance(tableKey, TableKey)
self.values = {}
self.key = tableKey
for f,t in tableKey.fields.items():
if t == "ternary":
self.values[f] = "0&&&0"
elif t == "lpm":
self.values[f] = "0/0"
elif t == "exact":
self.values[f] = "0"
elif t == "valid":
self.values[f] = "0"
else:
raise Exception("Unexpected key type " + t)
def set(self, key, value):
array = re.compile("(.*)\$([0-9]+)(.*)");
m = array.match(key)
if m:
key = m.group(1) + "[" + m.group(2) + "]" + m.group(3)
found = False
if key in self.key.fields:
found = True
elif key + '$' in self.key.fields:
key = key + '$'
found = True
elif key + '.$valid$' in self.key.fields:
key = key + '.$valid$'
found = True
elif key.endswith(".valid"):
alt = key[:-5] + "$valid$"
if alt in self.key.fields:
key = alt
found = True
if not found:
for i in self.key.fields:
if i.endswith("." + key) or i.endswith("." + key + "$"):
key = i
found = True
elif key == "valid" and i.endswith(".$valid$"):
key = i
found = True
if not found and key == "valid" and "$valid$" in self.key.fields:
key = "$valid$"
found = True
if not found:
print(self.key.fields)
raise Exception("Unexpected key field " + key)
if self.key.fields[key] == "ternary":
self.values[key] = self.makeMask(value)
elif self.key.fields[key] == "lpm":
self.values[key] = self.makeLpm(value)
else:
self.values[key] = value
def makeMask(self, value):
# TODO -- we really need to know the size of the key to make the mask properly,
# but to find that, we need to parse the headers and header_types from the json
if value.startswith("0x"):
mask = "F"
value = value[2:]
prefix = "0x"
elif value.startswith("0b"):
mask = "1"
value = value[2:]
prefix = "0b"
elif value.startswith("0o"):
mask = "7"
value = value[2:]
prefix = "0o"
else:
raise Exception("Decimal value "+value+" not supported for ternary key")
return value
values = "0123456789abcdefABCDEF*"
replacements = (mask * 22) + "0"
trans = str.maketrans(values, replacements)
m = value.translate(trans)
return prefix + value.replace("*", "0") + "&&&" + prefix + m
def makeLpm(self, value):
if value.find('/') >= 0:
return value
if value.startswith("0x"):
bits_per_digit = 4
elif value.startswith("0b"):
bits_per_digit = 1
elif value.startswith("0o"):
bits_per_digit = 3
else:
value = "0x" + hex(int(value))
bits_per_digit = 4
digits = len(value) - 2 - value.count('*')
return value.replace('*', '0') + "/" + str(digits*bits_per_digit)
def __str__(self):
result = ""
for f in self.key.fields:
if result != "":
result += " "
result += self.values[f]
return result
class BMV2ActionArguments(object):
def __init__(self, action):
assert isinstance(action, BMV2Action)
self.action = action
self.values = {}
def set(self, key, value):
found = False
for i in self.action.args:
if key == i.name:
found = True
if not found:
raise Exception("Unexpected action arg " + key)
self.values[key] = value
def __str__(self):
result = ""
for f in self.action.args:
if result != "":
result += " "
result += self.values[f.name]
return result
def size(self):
return len(self.action.args)
class BMV2Action(object):
def __init__(self, jsonAction):
self.name = jsonAction["name"]
self.args = []
for a in jsonAction["runtime_data"]:
arg = BMV2ActionArg(a["name"], a["bitwidth"])
self.args.append(arg)
def __str__(self):
return self.name
def makeArgsInstance(self):
return BMV2ActionArguments(self)
class BMV2Table(object):
def __init__(self, jsonTable):
self.match_type = jsonTable["match_type"]
self.name = jsonTable["name"]
self.key = TableKey()
self.actions = {}
for k in jsonTable["key"]:
name = k["name"]
if name is None:
name = k["target"]
if isinstance(name, list):
name = ""
for t in k["target"]:
if name != "":
name += "."
name += t
self.key.append(name, k["match_type"])
actions = jsonTable["actions"]
action_ids = jsonTable["action_ids"]
for i in range(0, len(actions)):
actionName = actions[i]
actionId = action_ids[i]
self.actions[actionName] = actionId
def __str__(self):
return self.name
def makeKeyInstance(self):
return TableKeyInstance(self.key)
# Represents enough about the program executed to be
# able to invoke the BMV2 simulator, create a CLI file
# and test packets in pcap files.
class RunBMV2(object):
def __init__(self, folder, options, jsonfile):
self.clifile = folder + "/cli.txt"
self.jsonfile = jsonfile
self.stffile = None
self.folder = folder
self.pcapPrefix = "pcap"
self.interfaces = {}
self.expected = {} # for each interface number of packets expected
self.expectedAny = [] # interface on which any number of packets is fine
self.packetDelay = 0
self.options = options
self.json = None
self.tables = []
self.actions = []
self.switchLogFile = "switch.log" # .txt is added by BMv2
self.readJson()
self.cmd_line_args = getattr(options, 'switchOptions', ())
self.target_specific_cmd_line_args = getattr(options, 'switchTargetSpecificOptions', ())
def readJson(self):
with open(self.jsonfile) as jf:
self.json = json.load(jf)
for a in self.json["actions"]:
self.actions.append(BMV2Action(a))
for t in self.json["pipelines"][0]["tables"]:
self.tables.append(BMV2Table(t))
for t in self.json["pipelines"][1]["tables"]:
self.tables.append(BMV2Table(t))
def filename(self, interface, direction):
return self.folder + "/" + self.pcapPrefix + str(interface) + "_" + direction + ".pcap"
def interface_of_filename(self, f):
return int(os.path.basename(f).rstrip('.pcap').lstrip(self.pcapPrefix).rsplit('_', 1)[0])
def do_cli_command(self, cmd):
if self.options.verbose:
print(cmd)
self.cli_stdin.write(bytes(cmd + "\n", encoding='utf8'))
self.cli_stdin.flush()
self.packetDelay = 1
def do_command(self, cmd):
if self.options.verbose and cmd != "":
print("STF Command:", cmd)
first, cmd = nextWord(cmd)
if first == "":
pass
elif first == "add":
self.do_cli_command(self.parse_table_add(cmd))
elif first == "setdefault":
self.do_cli_command(self.parse_table_set_default(cmd))
elif first == "mirroring_add":
# Pass through mirroring_add commands unchanged, with same
# arguments as expected by simple_switch_CLI
self.do_cli_command(first + " " + cmd)
elif first == "mc_mgrp_create" or first == "mc_node_create" or first == "mc_node_associate":
# Pass through multicast group commands unchanged, with
# same arguments as expected by simple_switch_CLI
self.do_cli_command(first + " " + cmd)
elif first == "counter_read" or first == "counter_write":
# Pass through multicast group commands unchanged, with
# same arguments as expected by simple_switch_CLI
self.do_cli_command(first + " " + cmd)
elif first == "register_read" or first == "register_write" or first == "register_reset":
# Pass through multicast group commands unchanged, with
# same arguments as expected by simple_switch_CLI
self.do_cli_command(first + " " + cmd)
elif first == "packet":
interface, data = nextWord(cmd)
interface = int(interface)
data = ''.join(data.split())
time.sleep(self.packetDelay)
try:
self.interfaces[interface]._write_packet(bytes.fromhex(data))
except ValueError:
reportError("Invalid packet data", data)
return FAILURE
self.interfaces[interface].flush()
self.packetDelay = 0
elif first == "expect":
interface, data = nextWord(cmd)
interface = int(interface)
data = ''.join(data.split())
if data != '':
self.expected.setdefault(interface, []).append(data)
else:
self.expectedAny.append(interface)
else:
if self.options.verbose:
print("ignoring stf command:", first, cmd)
def parse_table_set_default(self, cmd):
tableName, cmd = nextWord(cmd)
table = self.tableByName(tableName)
actionName, cmd = nextWord(cmd, "(")
action = self.actionByName(table, actionName)
actionArgs = action.makeArgsInstance()
cmd = cmd.strip(")")
while cmd != "":
word, cmd = nextWord(cmd, ",")
k, v = nextWord(word, ":")
actionArgs.set(k, v)
command = "table_set_default " + tableName + " " + actionName
if actionArgs.size():
command += " => " + str(actionArgs)
return command
def parse_table_add(self, cmd):
tableName, cmd = nextWord(cmd)
table = self.tableByName(tableName)
key = table.makeKeyInstance()
actionArgs = None
actionName = None
prio, cmd = nextWord(cmd)
number = re.compile("[0-9]+")
if not number.match(prio):
# not a priority; push back
cmd = prio + " " + cmd
prio = ""
while cmd != "":
if actionName != None:
# parsing action arguments
word, cmd = nextWord(cmd, ",")
k, v = nextWord(word, ":")
actionArgs.set(k, v)
else:
# parsing table key
word, cmd = nextWord(cmd)
if cmd.find("=") >= 0:
# This command retrieves a handle for the key
# This feature is currently not supported, so we just ignore the handle part
cmd = cmd.split("=")[0]
if word.find("(") >= 0:
# found action
actionName, arg = nextWord(word, "(")
action = self.actionByName(table, actionName)
actionArgs = action.makeArgsInstance()
cmd = arg + cmd
cmd = cmd.strip("()")
else:
k, v = nextWord(word, ":")
key.set(k, v)
if prio != "":
# Priorities in BMV2 seem to be reversed with respect to the stf file
# Hopefully 10000 is large enough
prio = str(10000 - int(prio))
command = "table_add " + table.name + " " + action.name + " " + str(key) + " => " + str(actionArgs)
if table.match_type == "ternary":
command += " " + prio
return command
def actionByName(self, table, actionName):
for name, id in table.actions.items():
action = self.actions[id]
if action.name == actionName:
return action
# Try again with suffixes
candidate = None
for name, id in table.actions.items():
action = self.actions[id]
if action.name.endswith(actionName):
if candidate is None:
candidate = action
else:
raise Exception("Ambiguous action name " + actionName + " in " + table.name)
if candidate is not None:
return candidate
raise Exception("No action", actionName, "in table", table)
def tableByName(self, tableName):
originalName = tableName
for t in self.tables:
if t.name == tableName:
return t
# If we can't find that try to match the tableName with a table suffix
candidate = None
for t in self.tables:
if t.name.endswith(tableName):
if candidate == None:
candidate = t
else:
raise Exception("Table name " + tableName + " is ambiguous between " +
candidate.name + " and " + t.name)
if candidate is not None:
return candidate
raise Exception("Could not find table " + tableName)
def interfaceArgs(self):
# return list of interface names suitable for bmv2
result = []
for interface in sorted(self.interfaces):
result.append("-i " + str(interface) + "@" + self.pcapPrefix + str(interface))
return result
def generate_model_inputs(self, stffile):
self.stffile = stffile
with open(stffile) as i:
for line in i:
line, comment = nextWord(line, "#")
first, cmd = nextWord(line)
if first == "packet" or first == "expect":
interface, cmd = nextWord(cmd)
interface = int(interface)
if not interface in self.interfaces:
# Can't open the interfaces yet, as that would block
ifname = self.interfaces[interface] = self.filename(interface, "in")
os.mkfifo(ifname)
return SUCCESS
def check_switch_server_ready(self, proc, thriftPort):
"""While the process is running, we check if the Thrift server has been
started. If the Thrift server is ready, we assume that the switch was
started successfully. This is only reliable if the Thrift server is
started at the end of the init process"""
while True:
if proc.poll() is not None:
return False
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.settimeout(0.5)
result = sock.connect_ex(("localhost", thriftPort))
if result == 0:
return True
def run(self):
if self.options.verbose:
print("Running model")
wait = 0 # Time to wait before model starts running
if self.options.usePsa:
switch = "psa_switch"
switch_cli = "psa_switch_CLI"
else:
switch = "simple_switch"
switch_cli = "simple_switch_CLI"
concurrent = ConcurrentInteger(os.getcwd(), 1000)
rand = concurrent.generate()
if rand is None:
reportError("Could not find a free port for Thrift")
return FAILURE
thriftPort = str(9090 + rand)
rv = SUCCESS
try:
os.remove("/tmp/bmv2-%d-notifications.ipc" % rand)
except OSError:
pass
try:
runswitch = [FindExe("behavioral-model", switch),
"--log-file", self.switchLogFile, "--log-flush",
"--use-files", str(wait), "--thrift-port", thriftPort,
"--device-id", str(rand)] + self.interfaceArgs() + ["../" + self.jsonfile]
if self.cmd_line_args:
runswitch += self.cmd_line_args
if self.target_specific_cmd_line_args:
runswitch += ['--',] + self.target_specific_cmd_line_args
if self.options.verbose:
print("Running", " ".join(runswitch))
sw = subprocess.Popen(runswitch, cwd=self.folder)
def openInterface(ifname):
fp = self.interfaces[interface] = RawPcapWriter(ifname, linktype=0)
fp._write_header(None)
# Try to open input interfaces. Each time, we set a 2 second
# timeout. If the timeout expires we check if the bmv2 process is
# not running anymore. If it is, we check if we have exceeded the
# one minute timeout (exceeding this timeout is very unlikely and
# could mean the system is very slow for some reason). If one of the
# 2 conditions above is met, the test is considered a FAILURE.
start = time.time()
sw_timeout = 60
# open input interfaces
# DANGER -- it is critical that we open these fifos in the same
# order as bmv2, as otherwise we'll deadlock. Would be nice if we
# could open nonblocking.
for interface in sorted(self.interfaces):
ifname = self.interfaces[interface]
while True:
try:
signal.alarm(2)
openInterface(ifname)
signal.alarm(0)
except TimeoutException:
if time.time() - start > sw_timeout:
return FAILURE
if sw.poll() is not None:
return FAILURE
else:
break
# at this point we wait until the Thrift server is ready
# also useful if there are no interfaces
try:
signal.alarm(int(sw_timeout + start - time.time()))
self.check_switch_server_ready(sw, int(thriftPort))
signal.alarm(0)
except TimeoutException:
return FAILURE
time.sleep(0.1)
runcli = [FindExe("behavioral-model", switch_cli), "--thrift-port", thriftPort]
if self.options.verbose:
print("Running", " ".join(runcli))
try:
cli = subprocess.Popen(runcli, cwd=self.folder, stdin=subprocess.PIPE)
self.cli_stdin = cli.stdin
with open(self.stffile) as i:
for line in i:
line, comment = nextWord(line, "#")
self.do_command(line)
cli.stdin.close()
for interface, fp in self.interfaces.items():
fp.close()
# Give time to the model to execute
time.sleep(2)
cli.terminate()
sw.terminate()
sw.wait()
except Exception as e:
cli.terminate()
sw.terminate()
sw.wait()
raise e
# This only works on Unix: negative returncode is
# minus the signal number that killed the process.
if sw.returncode != 0 and sw.returncode != -15: # 15 is SIGTERM
reportError(switch, "died with return code", sw.returncode);
rv = FAILURE
elif self.options.verbose:
print(switch, "exit code", sw.returncode)
cli.wait()
if cli.returncode != 0 and cli.returncode != -15:
reportError("CLI process failed with exit code", cli.returncode)
rv = FAILURE
finally:
try:
os.remove("/tmp/bmv2-%d-notifications.ipc" % rand)
except OSError:
pass
concurrent.release(rand)
if self.options.verbose:
print("Execution completed")
return rv
def comparePacket(self, expected, received):
received = convert_packet_bin2hexstr(received)
expected = convert_packet_stf2hexstr(expected)
strict_length_check = False
if expected[-1] == '$':
strict_length_check = True
expected = expected[:-1]
if len(received) < len(expected):
reportError("Received packet too short", len(received), "vs",
len(expected), "(in units of hex digits)")
reportError("Full expected packet is ", expected)
reportError("Full received packet is ", received)
return FAILURE
for i in range(0, len(expected)):
if expected[i] == "*":
continue;
if expected[i] != received[i]:
reportError("Received packet ", received)
reportError("Packet different at position", i, ": expected", expected[i], ", received", received[i])
reportError("Full expected packet is ", expected)
reportError("Full received packet is ", received)
return FAILURE
if strict_length_check and len(received) > len(expected):
reportError("Received packet too long", len(received), "vs",
len(expected), "(in units of hex digits)")
reportError("Full expected packet is ", expected)
reportError("Full received packet is ", received)
return FAILURE
return SUCCESS
def showLog(self):
with open(self.folder + "/" + self.switchLogFile + ".txt") as a:
log = a.read()
print("Log file:")
print(log)
def checkOutputs(self):
if self.options.verbose:
print("Comparing outputs")
direction = "out"
for file in glob(self.filename('*', direction)):
interface = self.interface_of_filename(file)
if os.stat(file).st_size == 0:
packets = []
else:
try:
packets = rdpcap(file)
except:
reportError("Corrupt pcap file", file)
self.showLog()
return FAILURE
# Log packets.
if self.options.observationLog:
observationLog = open(self.options.observationLog, 'w')
for pkt in packets:
observationLog.write('%d %s\n' % (
interface,
convert_packet_bin2hexstr(pkt)))
observationLog.close()
# Check for expected packets.
if interface in self.expectedAny:
if interface in self.expected:
reportError("Interface " + interface + " has both expected with packets and without")
continue
if interface not in self.expected:
expected = []
else:
expected = self.expected[interface]
if len(expected) != len(packets):
reportError("Expected", len(expected), "packets on port", str(interface),
"got", len(packets))
reportError("Full list of %d expected packets on port %d:"
"" % (len(expected), interface))
for i in range(len(expected)):
reportError(" packet #%2d: %s"
"" % (i+1,
convert_packet_stf2hexstr(expected[i])))
reportError("Full list of %d received packets on port %d:"
"" % (len(packets), interface))
for i in range(len(packets)):
reportError(" packet #%2d: %s"
"" % (i+1,
convert_packet_bin2hexstr(packets[i])))
self.showLog()
return FAILURE
for i in range(0, len(expected)):
cmp = self.comparePacket(expected[i], packets[i])
if cmp != SUCCESS:
reportError("Packet", i, "on port", str(interface), "differs")
return FAILURE
# remove successfully checked interfaces
if interface in self.expected:
del self.expected[interface]
if len(self.expected) != 0:
# didn't find all the expects we were expecting
reportError("Expected packets on ports",
list(self.expected.keys()), "not received")
return FAILURE
else:
return SUCCESS
def run_model(options, tmpdir, jsonfile, testfile):
bmv2 = RunBMV2(tmpdir, options, jsonfile)
result = bmv2.generate_model_inputs(testfile)
if result != SUCCESS:
return result
result = bmv2.run()
if result != SUCCESS:
return result
result = bmv2.checkOutputs()
return result
######################### main
def usage(options):
print("usage:", options.binary, "[-v] [-p] [-observation-log <file>] <json file> <stf file>");
def main(argv):
options = Options()
options.binary = argv[0]
argv = argv[1:]
while len(argv) > 0 and argv[0][0] == '-':
if argv[0] == "-b":
options.preserveTmp = True
elif argv[0] == "-v":
options.verbose = True
elif argv[0] == "-p":
options.usePsa = True
elif argv[0] == '-observation-log':
if len(argv) == 1:
reportError("Missing argument", argv[0])
usage(options)
sys.exit(1)
options.observationLog = argv[1]
argv = argv[1:]
else:
reportError("Unknown option ", argv[0])
usage(options)
argv = argv[1:]
if len(argv) < 2:
usage(options)
return FAILURE
if not os.path.isfile(argv[0]) or not os.path.isfile(argv[1]):
usage(options)
return FAILURE
tmpdir = tempfile.mkdtemp(dir=".")
result = run_model(options, tmpdir, argv[0], argv[1])
if options.preserveTmp:
print("preserving", tmpdir)
else:
shutil.rmtree(tmpdir)
if options.verbose:
if result == SUCCESS:
print("SUCCESS")
else:
print("FAILURE", result)
return result
if __name__ == "__main__":
sys.exit(main(sys.argv))
|
|
'''
Name: Tim Molleman
Doel: Dit script wordt gebruikt om MigrantsData(1999-2015) naar
een json-file om te zetten
'''
import csv
import json
codes_data = [
["af", "AFG", "Afghanistan"],
["ax", "ALA", "Aland Islands"],
["al", "ALB", "Albania"],
["dz", "DZA", "Algeria"],
["as", "ASM", "American Samoa"],
["ad", "AND", "Andorra"],
["ao", "AGO", "Angola"],
["ai", "AIA", "Anguilla"],
["aq", "ATA", "Antarctica"],
["ag", "ATG", "Antigua and Barbuda"],
["ar", "ARG", "Argentina"],
["am", "ARM", "Armenia"],
["aw", "ABW", "Aruba"],
["au", "AUS", "Australia"],
["at", "AUT", "Austria"],
["az", "AZE", "Azerbaijan"],
["bs", "BHS", "Bahamas"],
["bh", "BHR", "Bahrain"],
["bd", "BGD", "Bangladesh"],
["bb", "BRB", "Barbados"],
["by", "BLR", "Belarus"],
["be", "BEL", "Belgium"],
["bz", "BLZ", "Belize"],
["bj", "BEN", "Benin"],
["bm", "BMU", "Bermuda"],
["bt", "BTN", "Bhutan"],
["bo", "BOL", "Bolivia"],
["bq", "BES", "Bonaire, Sint Eustatius and Saba"],
["ba", "BIH", "Bosnia and Herzegovina"],
["bw", "BWA", "Botswana"],
["bv", "BVT", "Bouvet Island"],
["br", "BRA", "Brazil"],
["io", "IOT", "British Indian Ocean Territory"],
["bn", "BRN", "Brunei Darussalam"],
["bg", "BGR", "Bulgaria"],
["bf", "BFA", "Burkina Faso"],
["bi", "BDI", "Burundi"],
["kh", "KHM", "Cambodia"],
["cm", "CMR", "Cameroon"],
["ca", "CAN", "Canada"],
["ci", "CIV", "Ivory Coast"],
["cv", "CPV", "Cape Verde"],
["ky", "CYM", "Cayman Islands"],
["cf", "CAF", "Central African Republic"],
["td", "TCD", "Chad"],
["cl", "CHL", "Chile"],
["cn", "CHN", "China"],
["cx", "CXR", "Christmas Island"],
["cc", "CCK", "Cocos (Keeling) Islands"],
["co", "COL", "Colombia"],
["km", "COM", "Comoros"],
["cg", "COG", "Congo"],
["cd", "COD", "Congo"],
["ck", "COK", "Cook Islands"],
["cr", "CRI", "Costa Rica"],
["ci", "CIV", "Cote d'Ivoire"],
["hr", "HRV", "Croatia"],
["cu", "CUB", "Cuba"],
["cw", "CUW", "Curacao"],
["cy", "CYP", "Cyprus"],
["cz", "CZE", "Czech Republic"],
["dk", "DNK", "Denmark"],
["dj", "DJI", "Djibouti"],
["dm", "DMA", "Dominica"],
["do", "DOM", "Dominican Republic"],
["ec", "ECU", "Ecuador"],
["eg", "EGY", "Egypt"],
["sv", "SLV", "El Salvador"],
["gq", "GNQ", "Equatorial Guinea"],
["er", "ERI", "Eritrea"],
["ee", "EST", "Estonia"],
["et", "ETH", "Ethiopia"],
["fk", "FLK", "Falkland Islands (Malvinas)"],
["fo", "FRO", "Faroe Islands"],
["fj", "FJI", "Fiji"],
["fi", "FIN", "Finland"],
["fr", "FRA", "France"],
["gf", "GUF", "French Guiana"],
["pf", "PYF", "French Polynesia"],
["tf", "ATF", "French Southern Territories"],
["ga", "GAB", "Gabon"],
["gm", "GMB", "Gambia"],
["ge", "GEO", "Georgia"],
["de", "DEU", "Germany"],
["gh", "GHA", "Ghana"],
["gi", "GIB", "Gibraltar"],
["gr", "GRC", "Greece"],
["gl", "GRL", "Greenland"],
["gd", "GRD", "Grenada"],
["gp", "GLP", "Guadeloupe"],
["gu", "GUM", "Guam"],
["gt", "GTM", "Guatemala"],
["gg", "GGY", "Guernsey"],
["gn", "GIN", "Guinea"],
["gw", "GNB", "Guinea-Bissau"],
["gy", "GUY", "Guyana"],
["ht", "HTI", "Haiti"],
["hm", "HMD", "Heard Island and McDonald Islands"],
["va", "VAT", "Holy See (Vatican City State)"],
["hn", "HND", "Honduras"],
["hk", "HKG", "Hong Kong"],
["hu", "HUN", "Hungary"],
["is", "ISL", "Iceland"],
["in", "IND", "India"],
["id", "IDN", "Indonesia"],
["ir", "IRN", "Iran"],
["iq", "IRQ", "Iraq"],
["ie", "IRL", "Ireland"],
["im", "IMN", "Isle of Man"],
["il", "ISR", "Israel"],
["it", "ITA", "Italy"],
["jm", "JAM", "Jamaica"],
["jp", "JPN", "Japan"],
["je", "JEY", "Jersey"],
["jo", "JOR", "Jordan"],
["kz", "KAZ", "Kazakhstan"],
["ke", "KEN", "Kenya"],
["ki", "KIR", "Kiribati"],
["kp", "PRK", "North Korea"],
["kr", "KOR", "South Korea"],
["kw", "KWT", "Kuwait"],
["kg", "KGZ", "Kyrgyzstan"],
["la", "LAO", "Laos"],
["lv", "LVA", "Latvia"],
["lb", "LBN", "Lebanon"],
["ls", "LSO", "Lesotho"],
["lr", "LBR", "Liberia"],
["ly", "LBY", "Libya"],
["li", "LIE", "Liechtenstein"],
["lt", "LTU", "Lithuania"],
["lu", "LUX", "Luxembourg"],
["mo", "MAC", "Macao"],
["mk", "MKD", "Macedonia"],
["mg", "MDG", "Madagascar"],
["mw", "MWI", "Malawi"],
["my", "MYS", "Malaysia"],
["mv", "MDV", "Maldives"],
["ml", "MLI", "Mali"],
["mt", "MLT", "Malta"],
["mh", "MHL", "Marshall Islands"],
["mq", "MTQ", "Martinique"],
["mr", "MRT", "Mauritania"],
["mu", "MUS", "Mauritius"],
["yt", "MYT", "Mayotte"],
["mx", "MEX", "Mexico"],
["fm", "FSM", "Micronesia, Federated States of"],
["md", "MDA", "Moldova"],
["mc", "MCO", "Monaco"],
["mn", "MNG", "Mongolia"],
["me", "MNE", "Montenegro"],
["ms", "MSR", "Montserrat"],
["ma", "MAR", "Morocco"],
["mz", "MOZ", "Mozambique"],
["mm", "MMR", "Myanmar"],
["na", "NAM", "Namibia"],
["nr", "NRU", "Nauru"],
["np", "NPL", "Nepal"],
["nl", "NLD", "Netherlands"],
["nc", "NCL", "New Caledonia"],
["nz", "NZL", "New Zealand"],
["ni", "NIC", "Nicaragua"],
["ne", "NER", "Niger"],
["ng", "NGA", "Nigeria"],
["nu", "NIU", "Niue"],
["nf", "NFK", "Norfolk Island"],
["mp", "MNP", "Northern Mariana Islands"],
["no", "NOR", "Norway"],
["om", "OMN", "Oman"],
["pk", "PAK", "Pakistan"],
["pw", "PLW", "Palau"],
["ps", "PSE", "Palestine, State of"],
["pa", "PAN", "Panama"],
["pg", "PNG", "Papua New Guinea"],
["py", "PRY", "Paraguay"],
["pe", "PER", "Peru"],
["ph", "PHL", "Philippines"],
["pn", "PCN", "Pitcairn"],
["pl", "POL", "Poland"],
["pt", "PRT", "Portugal"],
["pr", "PRI", "Puerto Rico"],
["qa", "QAT", "Qatar"],
["re", "REU", "Reunion"],
["ro", "ROU", "Romania"],
["ru", "RUS", "Russia"],
["rw", "RWA", "Rwanda"],
["bl", "BLM", "Saint Barthelemy"],
["sh", "SHN", "Saint Helena, Ascension and Tristan da Cunha"],
["kn", "KNA", "Saint Kitts and Nevis"],
["lc", "LCA", "Saint Lucia"],
["mf", "MAF", "Saint Martin (French part)"],
["pm", "SPM", "Saint Pierre and Miquelon"],
["vc", "VCT", "Saint Vincent and the Grenadines"],
["ws", "WSM", "Samoa"],
["sm", "SMR", "San Marino"],
["st", "STP", "Sao Tome and Principe"],
["sa", "SAU", "Saudi Arabia"],
["sn", "SEN", "Senegal"],
["rs", "SRB", "Serbia"],
["sc", "SYC", "Seychelles"],
["sl", "SLE", "Sierra Leone"],
["sg", "SGP", "Singapore"],
["sx", "SXM", "Sint Maarten (Dutch part)"],
["sk", "SVK", "Slovakia"],
["si", "SVN", "Slovenia"],
["sb", "SLB", "Solomon Islands"],
["so", "SOM", "Somalia"],
["za", "ZAF", "South Africa"],
["gs", "SGS", "South Georgia and the South Sandwich Islands"],
["ss", "SSD", "South Sudan"],
["es", "ESP", "Spain"],
["lk", "LKA", "Sri Lanka"],
["sd", "SDN", "Sudan"],
["sr", "SUR", "Suriname"],
["sj", "SJM", "Svalbard and Jan Mayen"],
["sz", "SWZ", "Swaziland"],
["se", "SWE", "Sweden"],
["ch", "CHE", "Switzerland"],
["sy", "SYR", "Syria"],
["tw", "TWN", "Taiwan"],
["tj", "TJK", "Tajikistan"],
["tz", "TZA", "Tanzania"],
["th", "THA", "Thailand"],
["tl", "TLS", "Timor-Leste"],
["tg", "TGO", "Togo"],
["tk", "TKL", "Tokelau"],
["to", "TON", "Tonga"],
["tt", "TTO", "Trinidad and Tobago"],
["tn", "TUN", "Tunisia"],
["tr", "TUR", "Turkey"],
["tm", "TKM", "Turkmenistan"],
["tc", "TCA", "Turks and Caicos Islands"],
["tv", "TUV", "Tuvalu"],
["ug", "UGA", "Uganda"],
["ua", "UKR", "Ukraine"],
["ae", "ARE", "United Arab Emirates"],
["gb", "GBR", "United Kingdom"],
["us", "USA", "United States"],
["um", "UMI", "United States Minor Outlying Islands"],
["uy", "URY", "Uruguay"],
["uz", "UZB", "Uzbekistan"],
["vu", "VUT", "Vanuatu"],
["ve", "VEN", "Venezuela"],
["vn", "VNM", "Vietnam"],
["vg", "VGB", "Virgin Islands, British"],
["vi", "VIR", "Virgin Islands, U.S."],
["wf", "WLF", "Wallis and Futuna"],
["eh", "ESH", "Western Sahara"],
["ye", "YEM", "Yemen"],
["zm", "ZMB", "Zambia"],
["zw", "ZWE", "Zimbabwe"] ]
# Making dictionaries to store data
data = []
# Creating JSONfile
jsonfile = open('migrants.json', 'w')
'''
Change names of origins if a condition is met
'''
def checkOrigin(origin):
if origin == "Serbia and Kosovo: S/RES/1244 (1999)":
origin = "Kosovo"
if origin == "Russian Federation":
origin = "Russia"
if origin == "Viet Nam":
origin = "Vietnam"
if origin == "Syrian Arab Rep.":
origin = "Syria"
if origin == "Iran (Islamic Rep. of)":
origin = "Iran"
if origin == "The former Yugoslav Rep. of Macedonia":
origin = "Macedonia"
if origin == "Dem. Rep. of the Congo":
origin = "Congo"
if origin == "Venezuela (Bolivarian Republic of)":
origin = "Venezuela"
if origin == "Various/unknown":
origin = "Unknown"
else:
origin = origin
return origin
'''
Fill data array with objects. Every object has a "year": value key-pair combination
and a "countries": [] key-pair combination. Countries is subsequently filled with
objects that look like this: {"hostcountry": country, "refugees": []}. Refugees is filled
with objects that look like this {"country of origin": country, "No. asylum seekers": number}
'''
def createMigrants(year, country, origin, number):
if country == 'Czech Rep.':
country = 'Czech Republic'
if country == "United Kingdom of Great Britain and Northern Ireland":
country = "United Kingdom"
# call checkOrigin function
origin = checkOrigin(origin)
# append year to the dataset if it is not yet present in format {'year': 2006, 'countries': []}
if not any(d['year'] == year for d in data):
data.append({'year': year, 'countries': []})
for i in data:
# check for the right object in data and go on with this object (as 'i')
if year == i['year']:
# if EU country is not yet included in data add it to countries
if not any(d['country'] == country for d in i['countries']):
# first check if there is a corresponding country code in codes_data, and if yes:
for a in codes_data:
if a[2] == country:
# append country to countries array to the object with right year in format:
# {'country': 'Austria', 'code': 'AUS', 'refugees': [{'origin': 'Afghanistan', 'number': 2045}]
i['countries'].append({'country': country, 'code': a[1], 'refugees': [{'origin': origin, 'number': number}]})
# if the country is already included do the following:
else:
# check to find country in the dataset
for j in i['countries']:
# if found, append new 'refugees' object to the array as {'origin': Syria, 'number': '1976'}
if j['country'] == country:
if not any(p['origin'] == origin for p in j['refugees']):
j['refugees'].append({'origin': origin, 'number': number})
else:
for x in j['refugees']:
if x['origin'] == origin:
x['number'] = str(int(x['number']) + int(number))
# reading csv file with universal newline support open()
with open('MigrantsData(1999-2015).csv', 'rU') as file:
reader = csv.reader(file)
next(file)
# loop over every line in csv and call createMigrants on lines
for row in reader:
if int(row[2]) >= 2006:
year = row[2]
country = row[0]
origin = row[1]
number = row[4]
if number != '*':
createMigrants(year, country, origin, number)
# transform 'data' dictionary to json format
json.dump(data, jsonfile)
|
|
_input_connector_schema = {
'type': 'object',
'properties': {
'connector_type': {'type': 'string'},
'connector_access': {'type': 'object'}
},
'required': ['connector_type', 'connector_access'],
'additionalProperties': False
}
_result_connector_schema = {
'type': 'object',
'properties': {
'local_result_file': {'type': 'string'},
'connector_type': {'type': 'string'},
'connector_access': {'type': 'object'},
'add_meta_data': {'type': 'boolean'}
},
'required': ['connector_type', 'connector_access', 'local_result_file'],
'additionalProperties': False
}
_tracing_connector_schema = {
'type': 'object',
'properties': {
'connector_type': {'type': 'string'},
'connector_access': {'type': 'object'},
'add_meta_data': {'type': 'boolean'}
},
'required': ['connector_type', 'connector_access'],
'additionalProperties': False
}
_auth = {
'type': 'object',
'properties': {
'auth_type': {'enum': ['basic', 'digest']},
'username': {'type': 'string'},
'password': {'type': 'string'}
},
'required': ['auth_type', 'username', 'password'],
'additionalProperties': False
}
_notification_connector_schema = {
'type': 'object',
'properties': {
'connector_access': {
'type': 'object',
'properties': {
'url': {'type': 'string'},
'json_data': {'type': 'object'},
'ssl_verify': {'type': 'boolean'},
'auth': _auth
},
'required': ['url'],
'additionalProperties': False
},
'add_meta_data': {'type': 'boolean'}
},
'required': ['connector_access'],
'additionalProperties': False
}
_tracing_schema = {
'type': 'object',
'properties': {
'enabled': {
'type': 'boolean'
},
'file_access': {
'enum': ['none', 'short', 'full']
},
'syscall': {
'enum': ['none', 'short', 'full']
},
'tracing_file': {
'anyOf': [
_tracing_connector_schema,
{'type': 'null'}
]
}
},
'required': ['enabled'],
'additionalProperties': False
}
_syscall_filter_condition_one_parameter_schema = {
'type': 'object',
'properties': {
'argument': {'type': 'integer', 'minimum': 0, 'maximum': 6},
'operator': {'enum': ['==', '!=', '<=', '<', '>=', '>']},
'datum_a': {'type': 'integer'}
},
'required': ['argument', 'operator', 'datum_a']
}
_syscall_filter_condition_two_parameter_schema = {
'type': 'object',
'properties': {
'argument': {'type': 'integer', 'minimum': 0, 'maximum': 6},
'operator': {'enum': ['&=']},
'datum_a': {'type': 'integer'},
'datum_b': {'type': 'integer'}
},
'required': ['argument', 'operator', 'datum_a', 'datum_b']
}
_syscall_seccomp_filter_schema = {
'type': 'object',
'properties': {
'syscall': {'type': ['string', 'integer']},
'conditions': {
'type': 'array',
'minItems': 0,
'maxItems': 6,
'items': {
'anyOf': [
_syscall_filter_condition_one_parameter_schema,
_syscall_filter_condition_two_parameter_schema
]
},
},
},
'required': ['syscall'],
'additionalProperties': False
}
_sandbox_limits_schema = {
'type': 'object',
'properties': {
'cpu_usage': {'type': 'number', 'minimum': 0, 'maximum': 1},
'create_file_size': {'type': 'integer', 'minimum': 0},
'num_open_files': {'type': 'integer', 'minimum': 0},
'heap_size': {'type': 'integer', 'minimum': 0},
'stack_size': {'type': 'integer', 'minimum': 0},
'rss_size': {'type': 'integer', 'minimum': 0},
'child_processes': {'type': 'integer', 'minimum': 0}
},
'additionalProperties': False
}
_sandbox_seccomp_schema = {
'type': 'object',
'properties': {
'mode': {
'enum': ['disabled', 'whitelist', 'blacklist']
},
'filter_items': {
'type': 'array',
'items': _syscall_seccomp_filter_schema
}
},
'required': ['mode'],
'additionalProperties': False
}
_sandbox_schema = {
'type': 'object',
'properties': {
'limits': _sandbox_limits_schema,
'seccomp': _sandbox_seccomp_schema
},
'additionalProperties': False
}
_task_schema = {
'type': 'object',
'properties': {
'tags': {
'type': 'array',
'items': {'type': 'string'}
},
'no_cache': {'type': 'boolean'},
'application_container_description': {
'type': 'object',
'properties': {
'image': {'type': 'string'},
'entry_point': {'type': 'string'},
'registry_auth': {
'anyOf': [{
'type': 'object',
'properties': {
'username': {'type': 'string'},
'password': {'type': 'string'}
},
'required': ['username', 'password'],
'additionalProperties': False
}, {
'type': 'null'
}],
},
'container_ram': {'type': 'number'},
'tracing': _tracing_schema,
'sandbox': _sandbox_schema,
'parameters': {
'anyOf': [
{'type': 'object'},
{'type': 'array'}
]
}
},
'required': ['image', 'container_ram'],
'additionalProperties': False
},
'input_files': {
'type': 'array',
'items': _input_connector_schema
},
'result_files': {
'type': 'array',
'items': {
'anyOf': [
_result_connector_schema,
{'type': 'null'}
]
}
},
'notifications': {
'type': 'array',
'items': _notification_connector_schema
}
},
'required': [
'application_container_description',
'input_files',
'result_files'
],
'additionalProperties': False
}
_tasks_schema = {
'type': 'object',
'properties': {
'tasks': {
'type': 'array',
'items': _task_schema
}
},
'required': ['tasks'],
'additionalProperties': False
}
tasks_schema = {
'anyOf': [
_task_schema,
_tasks_schema
]
}
nodes_schema = {
'type': 'object',
'properties': {
'nodes': {
'type': 'array',
'items': {
'anyOf': [{
'type': 'object',
'properties': {
'cluster_node': {'type': 'string'}
},
'required': ['cluster_node'],
'additionalProperties': False
}]
}
}
},
'required': ['nodes'],
'additionalProperties': False
}
tasks_cancel_schema = {
'type': 'object',
'oneOf': [{
'type': 'object',
'properties': {
'_id': {'type': 'string'}
},
'required': ['_id'],
'additionalProperties': False
}, {
'type': 'object',
'properties': {
'tasks': {
'type': 'array',
'items': {
'type': 'object',
'properties': {
'_id': {'type': 'string'}
},
'required': ['_id'],
'additionalProperties': False
}
}
},
'required': ['tasks'],
'additionalProperties': False
}]
}
query_schema = {
'type': 'object',
'properties': {
'aggregate': {
'type': 'array',
'items': {
'anyOf': [{
'type': 'object',
'properties': {
'$match': {}
},
'required': ['$match'],
'additionalProperties': False
}, {
'type': 'object',
'properties': {
'$project': {}
},
'required': ['$project'],
'additionalProperties': False
}, {
'type': 'object',
'properties': {
'$limit': {}
},
'required': ['$limit'],
'additionalProperties': False
}, {
'type': 'object',
'properties': {
'$skip': {}
},
'required': ['$skip'],
'additionalProperties': False
}, {
'type': 'object',
'properties': {
'$count': {}
},
'required': ['$count'],
'additionalProperties': False
}, {
'type': 'object',
'properties': {
'$sort': {}
},
'required': ['$sort'],
'additionalProperties': False
}, {
'type': 'object',
'properties': {
'$unwind': {}
},
'required': ['$unwind'],
'additionalProperties': False
}, {
'type': 'object',
'properties': {
'$group': {}
},
'required': ['$group'],
'additionalProperties': False
}, {
'type': 'object',
'properties': {
'$sample': {}
},
'required': ['$sample'],
'additionalProperties': False
}, {
'type': 'object',
'properties': {
'$replaceRoot': {}
},
'required': ['$replaceRoot'],
'additionalProperties': False
}, {
'type': 'object',
'properties': {
'$addFields': {}
},
'required': ['$addFields'],
'additionalProperties': False
}]
}
}
},
'required': ['aggregate'],
'additionalProperties': False
}
_file_size = {
'type': 'object',
'properties': {
'local_file_path': {'type': 'string'},
'file_size': {'type': 'integer'}
},
'required': ['local_file_path', 'file_size'],
'additionalProperties': False
}
callback_schema = {
'type': 'object',
'properties': {
'callback_key': {'type': 'string'},
'callback_type': {'type': 'number'},
'container_id': {'type': 'string'},
'content': {
'type': 'object',
'properties': {
'state': {'type': 'integer'},
'description': {'type': 'string'},
'exception': {'type': ['string', 'null']},
'telemetry': {
'type': ['object', 'null'],
'properties': {
'max_vms_memory': {'type': 'number'},
'max_rss_memory': {'type': 'number'},
'input_file_sizes': {
'type': 'array',
'items': {
'anyOf': [
{'type': 'null'},
_file_size
]
}
},
'result_file_sizes': {
'type': 'object',
'patternProperties': {
'^[a-zA-Z0-9_-]+$': {
'anyOf': [
{'type': 'null'},
_file_size
],
}
},
'additionalProperties': False
},
'wall_time': {'type': 'number'},
'std_out': {'type': 'string'},
'std_err': {'type': 'string'},
'return_code': {'type': 'integer'}
},
'additionalProperties': False
}
},
'required': ['state', 'description'],
'additionalProperties': False
}
},
'required': [
'callback_key',
'callback_type',
'container_id',
'content'
],
'additionalProperties': False
}
cc_server_config_schema = {
'type': 'object',
'properties': {
'server_web': {
'type': 'object',
'properties': {
'external_url': {'type': 'string'},
'bind_host': {'type': 'string'},
'bind_port': {'type': 'integer'},
'num_workers': {'type': 'integer'}
},
'required': ['external_url', 'bind_host', 'bind_port'],
'additionalProperties': False
},
'server_master': {
'type': 'object',
'properties': {
'external_url': {'type': 'string'},
'bind_host': {'type': 'string'},
'bind_port': {'type': 'integer'},
'scheduling_interval_seconds': {'type': 'integer'}
},
'required': ['external_url', 'bind_host', 'bind_port'],
'additionalProperties': False
},
'server_log': {
'type': 'object',
'properties': {
'external_url': {'type': 'string'},
'bind_host': {'type': 'string'},
'bind_port': {'type': 'integer'},
'log_dir': {'type': 'string'},
'suppress_stdout': {'type': 'boolean'}
},
'required': ['external_url', 'bind_host', 'bind_port'],
'additionalProperties': False
},
'server_files': {
'type': 'object',
'properties': {
'external_url': {'type': 'string'},
'bind_host': {'type': 'string'},
'bind_port': {'type': 'integer'},
'input_files_dir': {'type': 'string'},
'result_files_dir': {'type': 'string'},
'num_workers': {'type': 'integer'}
},
'required': ['external_url', 'bind_host', 'bind_port', 'input_files_dir', 'result_files_dir'],
'additionalProperties': False
},
'mongo': {
'type': 'object',
'properties': {
'username': {'type': 'string'},
'password': {'type': 'string'},
'host': {'type': 'string'},
'port': {'type': 'integer'},
'db': {'type': 'string'}
},
'required': ['username', 'password', 'host', 'port', 'db'],
'additionalProperties': False
},
'docker': {
'type': 'object',
'properties': {
'thread_limit': {'type': 'integer'},
'api_timeout': {'type': 'integer'},
'net': {'type': 'string'},
'docker_machine_dir': {'type': 'string'},
'nodes': {
'type': 'object',
'patternProperties': {
'^[a-zA-Z0-9_-]+$': {
'type': 'object',
'properties': {
'base_url': {'type': 'string'},
'tls': {
'type': 'object',
'properties': {
'verify': {'type': 'string'},
'client_cert': {
'type': 'array',
'items': {'type': 'string'}
},
'assert_hostname': {'type': 'boolean'}
},
'additionalProperties': True
}
},
'required': ['base_url'],
'additionalProperties': False
}
}
}
},
'required': ['thread_limit'],
'additionalProperties': False
},
'defaults': {
'type': 'object',
'properties': {
'application_container_description': {
'type': 'object',
'properties': {
'entry_point': {'type': 'string'}
},
'required': ['entry_point'],
'additionalProperties': False
},
'data_container_description': {
'type': 'object',
'properties': {
'image': {'type': 'string'},
'entry_point': {'type': 'string'},
'container_ram': {'type': 'integer'},
'num_workers': {'type': 'integer'},
'registry_auth': {
'type': 'object',
'properties': {
'username': {'type': 'string'},
'password': {'type': 'string'}
},
'required': ['username', 'password'],
'additionalProperties': False
}
},
'required': ['image', 'entry_point', 'container_ram'],
'additionalProperties': False
},
'inspection_container_description': {
'type': 'object',
'properties': {
'image': {'type': 'string'},
'entry_point': {'type': 'string'},
'registry_auth': {
'type': 'object',
'properties': {
'username': {'type': 'string'},
'password': {'type': 'string'}
},
'required': ['username', 'password'],
'additionalProperties': False
}
},
'required': ['image', 'entry_point'],
'additionalProperties': False
},
'scheduling_strategies': {
'type': 'object',
'properties': {
'container_allocation': {'enum': ['spread', 'binpack']}
},
'required': ['container_allocation'],
'additionalProperties': False
},
'error_handling': {
'type': 'object',
'properties': {
'max_task_trials': {'type': 'integer'},
'node_offline_notification': {
'type': 'object',
'properties': {
'url': {'type': 'string'},
'auth': _auth
},
'required': ['url'],
'additionalProperties': False
}
},
'required': ['max_task_trials'],
'additionalProperties': False
},
'authorization': {
'type': 'object',
'properties': {
'num_login_attempts': {'type': 'integer'},
'block_for_seconds': {'type': 'integer'},
'tokens_valid_for_seconds': {'type': 'integer'}
},
'required': ['num_login_attempts', 'block_for_seconds', 'tokens_valid_for_seconds']
}
},
'required': [
'application_container_description',
'data_container_description',
'inspection_container_description',
'scheduling_strategies',
'error_handling',
'authorization'
],
'additionalProperties': False
}
},
'required': ['server_web', 'server_master', 'server_log', 'mongo', 'docker', 'defaults'],
'additionalProperties': False
}
|
|
# Copyright (C) 2010 Google Inc. All rights reserved.
# Copyright (C) 2010 Gabor Rapcsanyi ([email protected]), University of Szeged
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import logging
import re
import signal
import time
from webkitpy.layout_tests.models import test_expectations
from webkitpy.layout_tests.models import test_failures
_log = logging.getLogger(__name__)
OK_EXIT_STATUS = 0
# This matches what the shell does on POSIX.
INTERRUPTED_EXIT_STATUS = signal.SIGINT + 128
# POSIX limits status codes to 0-255. Normally run-webkit-tests returns the number
# of tests that failed. These indicate exceptional conditions triggered by the
# script itself, so we count backwards from 255 (aka -1) to enumerate them.
#
# FIXME: crbug.com/357866. We really shouldn't return the number of failures
# in the exit code at all.
EARLY_EXIT_STATUS = 251
SYS_DEPS_EXIT_STATUS = 252
NO_TESTS_EXIT_STATUS = 253
NO_DEVICES_EXIT_STATUS = 254
UNEXPECTED_ERROR_EXIT_STATUS = 255
ERROR_CODES = (
INTERRUPTED_EXIT_STATUS,
EARLY_EXIT_STATUS,
SYS_DEPS_EXIT_STATUS,
NO_TESTS_EXIT_STATUS,
NO_DEVICES_EXIT_STATUS,
UNEXPECTED_ERROR_EXIT_STATUS,
)
# In order to avoid colliding with the above codes, we put a ceiling on
# the value returned by num_regressions
MAX_FAILURES_EXIT_STATUS = 101
class TestRunException(Exception):
def __init__(self, code, msg):
self.code = code
self.msg = msg
class TestRunResults(object):
def __init__(self, expectations, num_tests):
self.total = num_tests
self.remaining = self.total
self.expectations = expectations
self.expected = 0
self.expected_failures = 0
self.unexpected = 0
self.unexpected_failures = 0
self.unexpected_crashes = 0
self.unexpected_timeouts = 0
self.tests_by_expectation = {}
self.tests_by_timeline = {}
self.results_by_name = {} # Map of test name to the last result for the test.
self.all_results = [] # All results from a run, including every iteration of every test.
self.unexpected_results_by_name = {}
self.failures_by_name = {}
self.total_failures = 0
self.expected_skips = 0
for expectation in test_expectations.TestExpectations.EXPECTATIONS.values():
self.tests_by_expectation[expectation] = set()
for timeline in test_expectations.TestExpectations.TIMELINES.values():
self.tests_by_timeline[timeline] = expectations.get_tests_with_timeline(timeline)
self.slow_tests = set()
self.interrupted = False
self.keyboard_interrupted = False
self.run_time = 0 # The wall clock time spent running the tests (layout_test_runner.run()).
def add(self, test_result, expected, test_is_slow):
result_type_for_stats = test_result.type
if test_expectations.WONTFIX in self.expectations.model().get_expectations(test_result.test_name):
result_type_for_stats = test_expectations.WONTFIX
self.tests_by_expectation[result_type_for_stats].add(test_result.test_name)
self.results_by_name[test_result.test_name] = test_result
if test_result.type != test_expectations.SKIP:
self.all_results.append(test_result)
self.remaining -= 1
if len(test_result.failures):
self.total_failures += 1
self.failures_by_name[test_result.test_name] = test_result.failures
if expected:
self.expected += 1
if test_result.type == test_expectations.SKIP:
self.expected_skips += 1
elif test_result.type != test_expectations.PASS:
self.expected_failures += 1
else:
self.unexpected_results_by_name[test_result.test_name] = test_result
self.unexpected += 1
if len(test_result.failures):
self.unexpected_failures += 1
if test_result.type == test_expectations.CRASH:
self.unexpected_crashes += 1
elif test_result.type == test_expectations.TIMEOUT:
self.unexpected_timeouts += 1
if test_is_slow:
self.slow_tests.add(test_result.test_name)
class RunDetails(object):
def __init__(self, exit_code, summarized_full_results=None,
summarized_failing_results=None, initial_results=None,
all_retry_results=None, enabled_pixel_tests_in_retry=False):
self.exit_code = exit_code
self.summarized_full_results = summarized_full_results
self.summarized_failing_results = summarized_failing_results
self.initial_results = initial_results
self.all_retry_results = all_retry_results or []
self.enabled_pixel_tests_in_retry = enabled_pixel_tests_in_retry
def _interpret_test_failures(failures):
test_dict = {}
failure_types = [type(failure) for failure in failures]
# FIXME: get rid of all this is_* values once there is a 1:1 map between
# TestFailure type and test_expectations.EXPECTATION.
if test_failures.FailureMissingAudio in failure_types:
test_dict['is_missing_audio'] = True
if test_failures.FailureMissingResult in failure_types:
test_dict['is_missing_text'] = True
if test_failures.FailureMissingImage in failure_types or test_failures.FailureMissingImageHash in failure_types:
test_dict['is_missing_image'] = True
if test_failures.FailureTestHarnessAssertion in failure_types:
test_dict['is_testharness_test'] = True
return test_dict
def summarize_results(port_obj, expectations, initial_results,
all_retry_results, enabled_pixel_tests_in_retry,
only_include_failing=False):
"""Returns a dictionary containing a summary of the test runs, with the following fields:
'version': a version indicator
'fixable': The number of fixable tests (NOW - PASS)
'skipped': The number of skipped tests (NOW & SKIPPED)
'num_regressions': The number of non-flaky failures
'num_flaky': The number of flaky failures
'num_passes': The number of expected and unexpected passes
'tests': a dict of tests -> {'expected': '...', 'actual': '...'}
"""
results = {}
results['version'] = 3
all_retry_results = all_retry_results or []
tbe = initial_results.tests_by_expectation
tbt = initial_results.tests_by_timeline
results['fixable'] = len(tbt[test_expectations.NOW] - tbe[test_expectations.PASS])
# FIXME: Remove this. It is redundant with results['num_failures_by_type'].
results['skipped'] = len(tbt[test_expectations.NOW] & tbe[test_expectations.SKIP])
num_passes = 0
num_flaky = 0
num_regressions = 0
keywords = {}
for expectation_string, expectation_enum in test_expectations.TestExpectations.EXPECTATIONS.iteritems():
keywords[expectation_enum] = expectation_string.upper()
num_failures_by_type = {}
for expectation in initial_results.tests_by_expectation:
tests = initial_results.tests_by_expectation[expectation]
if expectation != test_expectations.WONTFIX:
tests &= tbt[test_expectations.NOW]
num_failures_by_type[keywords[expectation]] = len(tests)
# The number of failures by type.
results['num_failures_by_type'] = num_failures_by_type
tests = {}
for test_name, result in initial_results.results_by_name.iteritems():
expected = expectations.get_expectations_string(test_name)
actual = [keywords[result.type]]
actual_types = [result.type]
if only_include_failing and result.type == test_expectations.SKIP:
continue
if result.type == test_expectations.PASS:
num_passes += 1
if not result.has_stderr and only_include_failing:
continue
elif (result.type != test_expectations.SKIP and
test_name in initial_results.unexpected_results_by_name):
# Loop through retry results to collate results and determine
# whether this is a regression, unexpected pass, or flaky test.
is_flaky = False
has_unexpected_pass = False
for retry_attempt_results in all_retry_results:
# If a test passes on one of the retries, it won't be in the subsequent retries.
if test_name not in retry_attempt_results.results_by_name:
break
retry_result_type = retry_attempt_results.results_by_name[test_name].type
actual.append(keywords[retry_result_type])
actual_types.append(retry_result_type)
if test_name in retry_attempt_results.unexpected_results_by_name:
if retry_result_type == test_expectations.PASS:
# The test failed unexpectedly at first, then passed
# unexpectedly on a subsequent run -> unexpected pass.
has_unexpected_pass = True
else:
# The test failed unexpectedly at first but then ran as
# expected on a subsequent run -> flaky.
is_flaky = True
if len(set(actual)) == 1:
actual = [actual[0]]
actual_types = [actual_types[0]]
if is_flaky:
num_flaky += 1
elif has_unexpected_pass:
num_passes += 1
if not result.has_stderr and only_include_failing:
continue
else:
# Either no retries or all retries failed unexpectedly.
num_regressions += 1
test_dict = {}
rounded_run_time = round(result.test_run_time, 1)
if rounded_run_time:
test_dict['time'] = rounded_run_time
if result.has_stderr:
test_dict['has_stderr'] = True
bugs = expectations.model().get_expectation_line(test_name).bugs
if bugs:
test_dict['bugs'] = bugs
if result.reftest_type:
test_dict.update(reftest_type=list(result.reftest_type))
test_dict['expected'] = expected
test_dict['actual'] = " ".join(actual)
def is_expected(actual_result):
return expectations.matches_an_expected_result(test_name, actual_result,
port_obj.get_option('pixel_tests') or result.reftest_type,
port_obj.get_option('enable_sanitizer'))
# To avoid bloating the output results json too much, only add an entry for whether the failure is unexpected.
if not any(is_expected(actual_result) for actual_result in actual_types):
test_dict['is_unexpected'] = True
test_dict.update(_interpret_test_failures(result.failures))
for retry_attempt_results in all_retry_results:
retry_result = retry_attempt_results.unexpected_results_by_name.get(test_name)
if retry_result:
test_dict.update(_interpret_test_failures(retry_result.failures))
if (result.has_repaint_overlay):
test_dict['has_repaint_overlay'] = True
# Store test hierarchically by directory. e.g.
# foo/bar/baz.html: test_dict
# foo/bar/baz1.html: test_dict
#
# becomes
# foo: {
# bar: {
# baz.html: test_dict,
# baz1.html: test_dict
# }
# }
parts = test_name.split('/')
current_map = tests
for i, part in enumerate(parts):
if i == (len(parts) - 1):
current_map[part] = test_dict
break
if part not in current_map:
current_map[part] = {}
current_map = current_map[part]
results['tests'] = tests
# FIXME: Remove this. It is redundant with results['num_failures_by_type'].
results['num_passes'] = num_passes
results['num_flaky'] = num_flaky
# FIXME: Remove this. It is redundant with results['num_failures_by_type'].
results['num_regressions'] = num_regressions
# Does results.html have enough information to compute this itself? (by
# checking total number of results vs. total number of tests?)
results['interrupted'] = initial_results.interrupted
results['layout_tests_dir'] = port_obj.layout_tests_dir()
results['has_wdiff'] = port_obj.wdiff_available()
results['has_pretty_patch'] = port_obj.pretty_patch_available()
results['pixel_tests_enabled'] = port_obj.get_option('pixel_tests')
results['seconds_since_epoch'] = int(time.time())
results['build_number'] = port_obj.get_option('build_number')
results['builder_name'] = port_obj.get_option('builder_name')
# Don't do this by default since it takes >100ms.
# It's only used for rebaselining and uploading data to the flakiness dashboard.
results['chromium_revision'] = ''
if port_obj.get_option('builder_name'):
path = port_obj.repository_path()
scm = port_obj.host.scm_for_path(path)
if scm:
results['chromium_revision'] = str(scm.commit_position(path))
else:
_log.warn('Failed to determine chromium commit position for %s, '
'leaving "chromium_revision" key blank in full_results.json.'
% path)
return results
|
|
# coding=utf-8
# Copyright 2019 The Interval Bound Propagation Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Trains a verifiable model on Mnist or CIFAR-10."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
from absl import app
from absl import flags
from absl import logging
import interval_bound_propagation as ibp
import tensorflow.compat.v1 as tf
FLAGS = flags.FLAGS
flags.DEFINE_enum('dataset', 'mnist', ['mnist', 'cifar10'],
'Dataset (either "mnist" or "cifar10").')
flags.DEFINE_enum('model', 'tiny', ['tiny', 'small', 'medium', 'large'],
'Model size.')
flags.DEFINE_string('output_dir', '/tmp/ibp_model', 'Output directory.')
# Options.
flags.DEFINE_integer('steps', 60001, 'Number of steps in total.')
flags.DEFINE_integer('test_every_n', 2000,
'Number of steps between testing iterations.')
flags.DEFINE_integer('warmup_steps', 2000, 'Number of warm-up steps.')
flags.DEFINE_integer('rampup_steps', 10000, 'Number of ramp-up steps.')
flags.DEFINE_integer('batch_size', 200, 'Batch size.')
flags.DEFINE_float('epsilon', .3, 'Target epsilon.')
flags.DEFINE_float('epsilon_train', .33, 'Train epsilon.')
flags.DEFINE_string('learning_rate', '1e-3,1e-4@15000,1e-5@25000',
'Learning rate schedule of the form: '
'initial_learning_rate[,learning:steps]*. E.g., "1e-3" or '
'"1e-3,1e-4@15000,1e-5@25000".')
flags.DEFINE_float('nominal_xent_init', 1.,
'Initial weight for the nominal cross-entropy.')
flags.DEFINE_float('nominal_xent_final', .5,
'Final weight for the nominal cross-entropy.')
flags.DEFINE_float('verified_xent_init', 0.,
'Initial weight for the verified cross-entropy.')
flags.DEFINE_float('verified_xent_final', .5,
'Final weight for the verified cross-entropy.')
flags.DEFINE_float('crown_bound_init', 0.,
'Initial weight for mixing the CROWN bound with the IBP '
'bound in the verified cross-entropy.')
flags.DEFINE_float('crown_bound_final', 0.,
'Final weight for mixing the CROWN bound with the IBP '
'bound in the verified cross-entropy.')
flags.DEFINE_float('attack_xent_init', 0.,
'Initial weight for the attack cross-entropy.')
flags.DEFINE_float('attack_xent_final', 0.,
'Initial weight for the attack cross-entropy.')
def show_metrics(step_value, metric_values, loss_value=None):
print('{}: {}nominal accuracy = {:.2f}%, '
'verified = {:.2f}%, attack = {:.2f}%'.format(
step_value,
'loss = {}, '.format(loss_value) if loss_value is not None else '',
metric_values.nominal_accuracy * 100.,
metric_values.verified_accuracy * 100.,
metric_values.attack_accuracy * 100.))
def layers(model_size):
"""Returns the layer specification for a given model name."""
if model_size == 'tiny':
return (
('linear', 100),
('activation', 'relu'))
elif model_size == 'small':
return (
('conv2d', (4, 4), 16, 'VALID', 2),
('activation', 'relu'),
('conv2d', (4, 4), 32, 'VALID', 1),
('activation', 'relu'),
('linear', 100),
('activation', 'relu'))
elif model_size == 'medium':
return (
('conv2d', (3, 3), 32, 'VALID', 1),
('activation', 'relu'),
('conv2d', (4, 4), 32, 'VALID', 2),
('activation', 'relu'),
('conv2d', (3, 3), 64, 'VALID', 1),
('activation', 'relu'),
('conv2d', (4, 4), 64, 'VALID', 2),
('activation', 'relu'),
('linear', 512),
('activation', 'relu'),
('linear', 512),
('activation', 'relu'))
elif model_size == 'large':
return (
('conv2d', (3, 3), 64, 'SAME', 1),
('activation', 'relu'),
('conv2d', (3, 3), 64, 'SAME', 1),
('activation', 'relu'),
('conv2d', (3, 3), 128, 'SAME', 2),
('activation', 'relu'),
('conv2d', (3, 3), 128, 'SAME', 1),
('activation', 'relu'),
('conv2d', (3, 3), 128, 'SAME', 1),
('activation', 'relu'),
('linear', 512),
('activation', 'relu'))
else:
raise ValueError('Unknown model: "{}"'.format(model_size))
def main(unused_args):
logging.info('Training IBP on %s...', FLAGS.dataset.upper())
step = tf.train.get_or_create_global_step()
# Learning rate.
learning_rate = ibp.parse_learning_rate(step, FLAGS.learning_rate)
# Dataset.
input_bounds = (0., 1.)
num_classes = 10
if FLAGS.dataset == 'mnist':
data_train, data_test = tf.keras.datasets.mnist.load_data()
else:
assert FLAGS.dataset == 'cifar10', (
'Unknown dataset "{}"'.format(FLAGS.dataset))
data_train, data_test = tf.keras.datasets.cifar10.load_data()
data_train = (data_train[0], data_train[1].flatten())
data_test = (data_test[0], data_test[1].flatten())
data = ibp.build_dataset(data_train, batch_size=FLAGS.batch_size,
sequential=False)
if FLAGS.dataset == 'cifar10':
data = data._replace(image=ibp.randomize(
data.image, (32, 32, 3), expand_shape=(40, 40, 3),
crop_shape=(32, 32, 3), vertical_flip=True))
# Base predictor network.
original_predictor = ibp.DNN(num_classes, layers(FLAGS.model))
predictor = original_predictor
if FLAGS.dataset == 'cifar10':
mean = (0.4914, 0.4822, 0.4465)
std = (0.2023, 0.1994, 0.2010)
predictor = ibp.add_image_normalization(original_predictor, mean, std)
if FLAGS.crown_bound_init > 0 or FLAGS.crown_bound_final > 0:
logging.info('Using CROWN-IBP loss.')
model_wrapper = ibp.crown.VerifiableModelWrapper
loss_helper = ibp.crown.create_classification_losses
else:
model_wrapper = ibp.VerifiableModelWrapper
loss_helper = ibp.create_classification_losses
predictor = model_wrapper(predictor)
# Training.
train_losses, train_loss, _ = loss_helper(
step,
data.image,
data.label,
predictor,
FLAGS.epsilon_train,
loss_weights={
'nominal': {
'init': FLAGS.nominal_xent_init,
'final': FLAGS.nominal_xent_final,
'warmup': FLAGS.verified_xent_init + FLAGS.nominal_xent_init
},
'attack': {
'init': FLAGS.attack_xent_init,
'final': FLAGS.attack_xent_final
},
'verified': {
'init': FLAGS.verified_xent_init,
'final': FLAGS.verified_xent_final,
'warmup': 0.
},
'crown_bound': {
'init': FLAGS.crown_bound_init,
'final': FLAGS.crown_bound_final,
'warmup': 0.
},
},
warmup_steps=FLAGS.warmup_steps,
rampup_steps=FLAGS.rampup_steps,
input_bounds=input_bounds)
saver = tf.train.Saver(original_predictor.get_variables())
optimizer = tf.train.AdamOptimizer(learning_rate)
update_ops = tf.get_collection(tf.GraphKeys.UPDATE_OPS)
with tf.control_dependencies(update_ops):
train_op = optimizer.minimize(train_loss, step)
# Test using while loop.
def get_test_metrics(batch_size, attack_builder=ibp.UntargetedPGDAttack):
"""Returns the test metrics."""
num_test_batches = len(data_test[0]) // batch_size
assert len(data_test[0]) % batch_size == 0, (
'Test data is not a multiple of batch size.')
def cond(i, *unused_args):
return i < num_test_batches
def body(i, metrics):
"""Compute the sum of all metrics."""
test_data = ibp.build_dataset(data_test, batch_size=batch_size,
sequential=True)
predictor(test_data.image, override=True, is_training=False)
input_interval_bounds = ibp.IntervalBounds(
tf.maximum(test_data.image - FLAGS.epsilon, input_bounds[0]),
tf.minimum(test_data.image + FLAGS.epsilon, input_bounds[1]))
predictor.propagate_bounds(input_interval_bounds)
test_specification = ibp.ClassificationSpecification(
test_data.label, num_classes)
test_attack = attack_builder(predictor, test_specification, FLAGS.epsilon,
input_bounds=input_bounds,
optimizer_builder=ibp.UnrolledAdam)
test_losses = ibp.Losses(predictor, test_specification, test_attack)
test_losses(test_data.label)
new_metrics = []
for m, n in zip(metrics, test_losses.scalar_metrics):
new_metrics.append(m + n)
return i + 1, new_metrics
total_count = tf.constant(0, dtype=tf.int32)
total_metrics = [tf.constant(0, dtype=tf.float32)
for _ in range(len(ibp.ScalarMetrics._fields))]
total_count, total_metrics = tf.while_loop(
cond,
body,
loop_vars=[total_count, total_metrics],
back_prop=False,
parallel_iterations=1)
total_count = tf.cast(total_count, tf.float32)
test_metrics = []
for m in total_metrics:
test_metrics.append(m / total_count)
return ibp.ScalarMetrics(*test_metrics)
test_metrics = get_test_metrics(
FLAGS.batch_size, ibp.UntargetedPGDAttack)
summaries = []
for f in test_metrics._fields:
summaries.append(
tf.summary.scalar(f, getattr(test_metrics, f)))
test_summaries = tf.summary.merge(summaries)
test_writer = tf.summary.FileWriter(os.path.join(FLAGS.output_dir, 'test'))
# Run everything.
tf_config = tf.ConfigProto()
tf_config.gpu_options.allow_growth = True
with tf.train.SingularMonitoredSession(config=tf_config) as sess:
for _ in range(FLAGS.steps):
iteration, loss_value, _ = sess.run(
[step, train_losses.scalar_losses.nominal_cross_entropy, train_op])
if iteration % FLAGS.test_every_n == 0:
metric_values, summary = sess.run([test_metrics, test_summaries])
test_writer.add_summary(summary, iteration)
show_metrics(iteration, metric_values, loss_value=loss_value)
saver.save(sess._tf_sess(), # pylint: disable=protected-access
os.path.join(FLAGS.output_dir, 'model'),
global_step=FLAGS.steps - 1)
if __name__ == '__main__':
app.run(main)
|
|
#!/usr/bin/env python2
# Copyright (c) 2016 The Zcash developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
from test_framework.test_framework import BitcoinTestFramework
from test_framework.authproxy import JSONRPCException
from test_framework.mininode import COIN
from test_framework.util import assert_equal, initialize_chain_clean, \
start_nodes, connect_nodes_bi, wait_and_assert_operationid_status \
wait_and_assert_operationid_status_result, get_coinbase_address
import sys
import time
import timeit
from decimal import Decimal
def check_value_pool(node, name, total):
value_pools = node.getblockchaininfo()['valuePools']
found = False
for pool in value_pools:
if pool['id'] == name:
found = True
assert_equal(pool['monitored'], True)
assert_equal(pool['chainValue'], total)
assert_equal(pool['chainValueZat'], total * COIN)
assert(found)
class WalletProtectCoinbaseTest (BitcoinTestFramework):
def setup_chain(self):
print("Initializing test directory "+self.options.tmpdir)
initialize_chain_clean(self.options.tmpdir, 4)
# Start nodes with -regtestprotectcoinbase to set fCoinbaseMustBeProtected to true.
def setup_network(self, split=False):
self.nodes = start_nodes(4, self.options.tmpdir, extra_args=[['-regtestprotectcoinbase', '-debug=zrpcunsafe']] * 4 )
connect_nodes_bi(self.nodes,0,1)
connect_nodes_bi(self.nodes,1,2)
connect_nodes_bi(self.nodes,0,2)
connect_nodes_bi(self.nodes,0,3)
self.is_network_split=False
self.sync_all()
def run_test (self):
print "Mining blocks..."
self.nodes[0].generate(4)
walletinfo = self.nodes[0].getwalletinfo()
assert_equal(walletinfo['immature_balance'], 40)
assert_equal(walletinfo['balance'], 0)
self.sync_all()
self.nodes[1].generate(101)
self.sync_all()
assert_equal(self.nodes[0].getbalance(), 40)
assert_equal(self.nodes[1].getbalance(), 10)
assert_equal(self.nodes[2].getbalance(), 0)
assert_equal(self.nodes[3].getbalance(), 0)
check_value_pool(self.nodes[0], 'sprout', 0)
check_value_pool(self.nodes[1], 'sprout', 0)
check_value_pool(self.nodes[2], 'sprout', 0)
check_value_pool(self.nodes[3], 'sprout', 0)
# Send will fail because we are enforcing the consensus rule that
# coinbase utxos can only be sent to a zaddr.
errorString = ""
try:
self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 1)
except JSONRPCException,e:
errorString = e.error['message']
assert_equal("Coinbase funds can only be sent to a zaddr" in errorString, True)
# Prepare to send taddr->zaddr
mytaddr = self.nodes[0].getnewaddress()
myzaddr = self.nodes[0].z_getnewaddress('sprout')
# Node 3 will test that watch only address utxos are not selected
self.nodes[3].importaddress(mytaddr)
recipients= [{"address":myzaddr, "amount": Decimal('1')}]
myopid = self.nodes[3].z_sendmany(mytaddr, recipients)
wait_and_assert_operationid_status(self.nodes[3], myopid, "failed", "Insufficient funds, no UTXOs found for taddr from address.", 10)
# This send will fail because our wallet does not allow any change when protecting a coinbase utxo,
# as it's currently not possible to specify a change address in z_sendmany.
recipients = []
recipients.append({"address":myzaddr, "amount":Decimal('1.23456789')})
myopid = self.nodes[0].z_sendmany(mytaddr, recipients)
error_result = wait_and_assert_operationid_status_result(self.nodes[0], myopid, "failed", ("Change 8.76533211 not allowed. "
"When shielding coinbase funds, the wallet does not allow any change "
"as there is currently no way to specify a change address in z_sendmany."), 10)
# Test that the returned status object contains a params field with the operation's input parameters
assert_equal(error_result["method"], "z_sendmany")
params = error_result["params"]
assert_equal(params["fee"], Decimal('0.0001')) # default
assert_equal(params["minconf"], Decimal('1')) # default
assert_equal(params["fromaddress"], mytaddr)
assert_equal(params["amounts"][0]["address"], myzaddr)
assert_equal(params["amounts"][0]["amount"], Decimal('1.23456789'))
# Add viewing key for myzaddr to Node 3
myviewingkey = self.nodes[0].z_exportviewingkey(myzaddr)
self.nodes[3].z_importviewingkey(myviewingkey, "no")
# This send will succeed. We send two coinbase utxos totalling 20.0 less a fee of 0.00010000, with no change.
shieldvalue = Decimal('20.0') - Decimal('0.0001')
recipients = []
recipients.append({"address":myzaddr, "amount": shieldvalue})
myopid = self.nodes[0].z_sendmany(mytaddr, recipients)
mytxid = wait_and_assert_operationid_status(self.nodes[0], myopid)
self.sync_all()
# Verify that z_listunspent can return a note that has zero confirmations
results = self.nodes[0].z_listunspent()
assert(len(results) == 0)
results = self.nodes[0].z_listunspent(0) # set minconf to zero
assert(len(results) == 1)
assert_equal(results[0]["address"], myzaddr)
assert_equal(results[0]["amount"], shieldvalue)
assert_equal(results[0]["confirmations"], 0)
# Mine the tx
self.nodes[1].generate(1)
self.sync_all()
# Verify that z_listunspent returns one note which has been confirmed
results = self.nodes[0].z_listunspent()
assert(len(results) == 1)
assert_equal(results[0]["address"], myzaddr)
assert_equal(results[0]["amount"], shieldvalue)
assert_equal(results[0]["confirmations"], 1)
assert_equal(results[0]["spendable"], True)
# Verify that z_listunspent returns note for watchonly address on node 3.
results = self.nodes[3].z_listunspent(1, 999, True)
assert(len(results) == 1)
assert_equal(results[0]["address"], myzaddr)
assert_equal(results[0]["amount"], shieldvalue)
assert_equal(results[0]["confirmations"], 1)
assert_equal(results[0]["spendable"], False)
# Verify that z_listunspent returns error when address spending key from node 0 is not available in wallet of node 1.
try:
results = self.nodes[1].z_listunspent(1, 999, False, [myzaddr])
except JSONRPCException as e:
errorString = e.error['message']
assert_equal("Invalid parameter, spending key for address does not belong to wallet" in errorString, True)
# Verify that debug=zrpcunsafe logs params, and that full txid is associated with opid
logpath = self.options.tmpdir+"/node0/regtest/debug.log"
logcounter = 0
with open(logpath, "r") as myfile:
logdata = myfile.readlines()
for logline in logdata:
if myopid + ": z_sendmany initialized" in logline and mytaddr in logline and myzaddr in logline:
assert_equal(logcounter, 0) # verify order of log messages
logcounter = logcounter + 1
if myopid + ": z_sendmany finished" in logline and mytxid in logline:
assert_equal(logcounter, 1)
logcounter = logcounter + 1
assert_equal(logcounter, 2)
# check balances (the z_sendmany consumes 3 coinbase utxos)
resp = self.nodes[0].z_gettotalbalance()
assert_equal(Decimal(resp["transparent"]), Decimal('20.0'))
assert_equal(Decimal(resp["private"]), Decimal('19.9999'))
assert_equal(Decimal(resp["total"]), Decimal('39.9999'))
# The Sprout value pool should reflect the send
sproutvalue = shieldvalue
check_value_pool(self.nodes[0], 'sprout', sproutvalue)
# A custom fee of 0 is okay. Here the node will send the note value back to itself.
recipients = []
recipients.append({"address":myzaddr, "amount": Decimal('19.9999')})
myopid = self.nodes[0].z_sendmany(myzaddr, recipients, 1, Decimal('0.0'))
mytxid = wait_and_assert_operationid_status(self.nodes[0], myopid)
self.sync_all()
self.nodes[1].generate(1)
self.sync_all()
resp = self.nodes[0].z_gettotalbalance()
assert_equal(Decimal(resp["transparent"]), Decimal('20.0'))
assert_equal(Decimal(resp["private"]), Decimal('19.9999'))
assert_equal(Decimal(resp["total"]), Decimal('39.9999'))
# The Sprout value pool should be unchanged
check_value_pool(self.nodes[0], 'sprout', sproutvalue)
# convert note to transparent funds
unshieldvalue = Decimal('10.0')
recipients = []
recipients.append({"address":mytaddr, "amount": unshieldvalue})
myopid = self.nodes[0].z_sendmany(myzaddr, recipients)
mytxid = wait_and_assert_operationid_status(self.nodes[0], myopid)
assert(mytxid is not None)
self.sync_all()
# check that priority of the tx sending from a zaddr is not 0
mempool = self.nodes[0].getrawmempool(True)
assert(Decimal(mempool[mytxid]['startingpriority']) >= Decimal('1000000000000'))
self.nodes[1].generate(1)
self.sync_all()
# check balances
sproutvalue -= unshieldvalue + Decimal('0.0001')
resp = self.nodes[0].z_gettotalbalance()
assert_equal(Decimal(resp["transparent"]), Decimal('30.0'))
assert_equal(Decimal(resp["private"]), Decimal('9.9998'))
assert_equal(Decimal(resp["total"]), Decimal('39.9998'))
check_value_pool(self.nodes[0], 'sprout', sproutvalue)
# z_sendmany will return an error if there is transparent change output considered dust.
# UTXO selection in z_sendmany sorts in ascending order, so smallest utxos are consumed first.
# At this point in time, unspent notes all have a value of 10.0 and standard z_sendmany fee is 0.0001.
recipients = []
amount = Decimal('10.0') - Decimal('0.00010000') - Decimal('0.00000001') # this leaves change at 1 zatoshi less than dust threshold
recipients.append({"address":self.nodes[0].getnewaddress(), "amount":amount })
myopid = self.nodes[0].z_sendmany(mytaddr, recipients)
wait_and_assert_operationid_status(self.nodes[0], myopid, "failed", "Insufficient transparent funds, have 10.00, need 0.00000053 more to avoid creating invalid change output 0.00000001 (dust threshold is 0.00000054)")
# Send will fail because send amount is too big, even when including coinbase utxos
errorString = ""
try:
self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 99999)
except JSONRPCException,e:
errorString = e.error['message']
assert_equal("Insufficient funds" in errorString, True)
# z_sendmany will fail because of insufficient funds
recipients = []
recipients.append({"address":self.nodes[1].getnewaddress(), "amount":Decimal('10000.0')})
myopid = self.nodes[0].z_sendmany(mytaddr, recipients)
wait_and_assert_operationid_status(self.nodes[0], myopid, "failed", "Insufficient transparent funds, have 10.00, need 10000.0001")
myopid = self.nodes[0].z_sendmany(myzaddr, recipients)
wait_and_assert_operationid_status(self.nodes[0], myopid, "failed", "Insufficient shielded funds, have 9.9998, need 10000.0001")
# Send will fail because of insufficient funds unless sender uses coinbase utxos
try:
self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 21)
except JSONRPCException,e:
errorString = e.error['message']
assert_equal("Insufficient funds, coinbase funds can only be spent after they have been sent to a zaddr" in errorString, True)
# Verify that mempools accept tx with joinsplits which have at least the default z_sendmany fee.
# If this test passes, it confirms that issue #1851 has been resolved, where sending from
# a zaddr to 1385 taddr recipients fails because the default fee was considered too low
# given the tx size, resulting in mempool rejection.
errorString = ''
recipients = []
num_t_recipients = 2500
amount_per_recipient = Decimal('0.00000546') # dust threshold
# Note that regtest chainparams does not require standard tx, so setting the amount to be
# less than the dust threshold, e.g. 0.00000001 will not result in mempool rejection.
start_time = timeit.default_timer()
for i in xrange(0,num_t_recipients):
newtaddr = self.nodes[2].getnewaddress()
recipients.append({"address":newtaddr, "amount":amount_per_recipient})
elapsed = timeit.default_timer() - start_time
print("...invoked getnewaddress() {} times in {} seconds".format(num_t_recipients, elapsed))
# Issue #2263 Workaround START
# HTTP connection to node 0 may fall into a state, during the few minutes it takes to process
# loop above to create new addresses, that when z_sendmany is called with a large amount of
# rpc data in recipients, the connection fails with a 'broken pipe' error. Making a RPC call
# to node 0 before calling z_sendmany appears to fix this issue, perhaps putting the HTTP
# connection into a good state to handle a large amount of data in recipients.
self.nodes[0].getinfo()
# Issue #2263 Workaround END
myopid = self.nodes[0].z_sendmany(myzaddr, recipients)
try:
wait_and_assert_operationid_status(self.nodes[0], myopid)
except JSONRPCException as e:
print("JSONRPC error: "+e.error['message'])
assert(False)
except Exception as e:
print("Unexpected exception caught during testing: "+str(sys.exc_info()[0]))
assert(False)
self.sync_all()
self.nodes[1].generate(1)
self.sync_all()
# check balance
node2balance = amount_per_recipient * num_t_recipients
sproutvalue -= node2balance + Decimal('0.0001')
assert_equal(self.nodes[2].getbalance(), node2balance)
check_value_pool(self.nodes[0], 'sprout', sproutvalue)
# Send will fail because fee is negative
try:
self.nodes[0].z_sendmany(myzaddr, recipients, 1, -1)
except JSONRPCException,e:
errorString = e.error['message']
assert_equal("Amount out of range" in errorString, True)
# Send will fail because fee is larger than MAX_MONEY
try:
self.nodes[0].z_sendmany(myzaddr, recipients, 1, Decimal('21000000.00000001'))
except JSONRPCException,e:
errorString = e.error['message']
assert_equal("Amount out of range" in errorString, True)
# Send will fail because fee is larger than sum of outputs
try:
self.nodes[0].z_sendmany(myzaddr, recipients, 1, (amount_per_recipient * num_t_recipients) + Decimal('0.00000001'))
except JSONRPCException,e:
errorString = e.error['message']
assert_equal("is greater than the sum of outputs" in errorString, True)
# Send will succeed because the balance of non-coinbase utxos is 10.0
try:
self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 9)
except JSONRPCException:
assert(False)
self.sync_all()
self.nodes[1].generate(1)
self.sync_all()
# check balance
node2balance = node2balance + 9
assert_equal(self.nodes[2].getbalance(), node2balance)
# Check that chained joinsplits in a single tx are created successfully.
recipients = []
num_recipients = 3
amount_per_recipient = Decimal('0.002')
minconf = 1
send_amount = num_recipients * amount_per_recipient
custom_fee = Decimal('0.00012345')
zbalance = self.nodes[0].z_getbalance(myzaddr)
for i in xrange(0,num_recipients):
newzaddr = self.nodes[2].z_getnewaddress('sprout')
recipients.append({"address":newzaddr, "amount":amount_per_recipient})
myopid = self.nodes[0].z_sendmany(myzaddr, recipients, minconf, custom_fee)
wait_and_assert_operationid_status(self.nodes[0], myopid)
self.sync_all()
self.nodes[1].generate(1)
self.sync_all()
# check balances and unspent notes
resp = self.nodes[2].z_gettotalbalance()
assert_equal(Decimal(resp["private"]), send_amount)
notes = self.nodes[2].z_listunspent()
sum_of_notes = sum([note["amount"] for note in notes])
assert_equal(Decimal(resp["private"]), sum_of_notes)
resp = self.nodes[0].z_getbalance(myzaddr)
assert_equal(Decimal(resp), zbalance - custom_fee - send_amount)
sproutvalue -= custom_fee
check_value_pool(self.nodes[0], 'sprout', sproutvalue)
notes = self.nodes[0].z_listunspent(1, 99999, False, [myzaddr])
sum_of_notes = sum([note["amount"] for note in notes])
assert_equal(Decimal(resp), sum_of_notes)
if __name__ == '__main__':
WalletProtectCoinbaseTest().main()
|
|
'''
List View
===========
.. versionadded:: 1.5
.. warning::
This code is still experimental, and its API is subject to change in a
future version.
The :class:`~kivy.uix.listview.ListView` implements an
:class:`~kivy.uix.abstractview.AbstractView` as
a vertical, scrollable,pannable list clipped to the scrollview's bounding box
and contains list item view instances.
The :class:`AbstractView` has one property: :class:`~kivy.adapters.adapter`.
The adapter can be one of the following: a
:class:`~kivy.adapters.simplelistadapter.SimpleListAdapter`, a
:class:`~kivy.adapters.listadapter.ListAdapter` or a
:class:`~kivy.adapters.dictadapter.DictAdapter`. The :class:`Adapter` can make
use of :mod:`~kivy.adapters.args_converters` to prepare you data for passing
into the constructor for each item view instantiation.
For an overview of how all these components fit together, please see the
:mod:`~kivy.adapters` module documentation.
Introduction
------------
Lists are central parts of many software projects. Kivy's approach to lists
includes providing solutions for simple lists, along with a substantial
framework for building lists of moderate to advanced complexity. For a new
user, it can be difficult to ramp up from simple to advanced. For
this reason, Kivy provides an extensive set of examples (with the Kivy package)
that you may wish to run first, to get a taste of the range of functionality
offered. You can tell from the names of the examples that they illustrate the
"ramping up" from simple to advanced:
* `kivy/examples/widgets/lists/list_simple.py <https://github.com/\
kivy/kivy/tree/master/examples/widgets/lists/list_simple.py>`_
* `kivy/examples/widgets/lists/list_simple_in_kv.py <https://github.com/\
kivy/kivy/tree/master/examples/widgets/lists/list_simple_in_kv.py>`_
* `kivy/examples/widgets/lists/list_simple_in_kv_2.py <https://github.com/\
kivy/kivy/tree/master/examples/widgets/lists/list_simple_in_kv_2.py>`_
* `kivy/examples/widgets/lists/list_master_detail.py <https://github.com/\
kivy/kivy/tree/master/examples/widgets/lists/list_master_detail.py>`_
* `kivy/examples/widgets/lists/list_two_up.py <https://github.com/\
kivy/kivy/tree/master/examples/widgets/lists/list_two_up.py>`_
* `kivy/examples/widgets/lists/list_kv.py <https://github.com/\
kivy/kivy/tree/master/examples/widgets/lists/list_kv.py>`_
* `kivy/examples/widgets/lists/list_composite.py <https://github.com/\
kivy/kivy/tree/master/examples/widgets/lists/list_composite.py>`_
* `kivy/examples/widgets/lists/list_cascade.py <https://github.com/\
kivy/kivy/tree/master/examples/widgets/lists/list_cascade.py>`_
* `kivy/examples/widgets/lists/list_cascade_dict.py <https://github.com/\
kivy/kivy/tree/master/examples/widgets/lists/list_cascade_dict.py>`_
* `kivy/examples/widgets/lists/list_cascade_images.py <https://github.com/\
kivy/kivy/tree/master/examples/widgets/lists/list_cascade_images.py>`_
* `kivy/examples/widgets/lists/list_ops.py <https://github.com/\
kivy/kivy/tree/master/examples/widgets/lists/list_ops.py>`_
Many of the examples feature selection, some restricting selection to single
selection, where only one item at at time can be selected, and others allowing
multiple item selection. Many of the examples illustrate how selection in one
list can be connected to actions and selections in another view or another list.
Find your own way of reading the documentation here, examining the source code
for the example apps and running the examples. Some may prefer to read the
documentation through first, others may want to run the examples and view their
code. No matter what you do, going back and forth will likely be needed.
Basic Example
-------------
In its simplest form, we make a listview with 100 items::
from kivy.uix.listview import ListView
from kivy.base import runTouchApp
class MainView(ListView):
def __init__(self, **kwargs):
super(MainView, self).__init__(
item_strings=[str(index) for index in range(100)])
if __name__ == '__main__':
runTouchApp(MainView())
Or, we could declare the listview using the kv language::
from kivy.uix.boxlayout import BoxLayout
from kivy.lang import Builder
from kivy.base import runTouchApp
Builder.load_string("""
<MyListView>:
ListView:
item_strings: [str(index) for index in range(100)]
""")
class MyListView(BoxLayout):
pass
if __name__ == '__main__':
runTouchApp(MyListView())
Using an Adapter
-------------------
Behind the scenes, the basic example above uses the
:class:`~kivy.adapters.simplelistadapter.SimpleListAdapter`. When the
constructor for the :class:`~kivy.uix.listview.ListView` sees that only a list
of
strings is provided as an argument (called item_strings), it creates a
:class:`~kivy.adapters.simplelistadapter.SimpleListAdapter` using the
list of strings.
"Simple" in :class:`~kivy.adapters.simplelistadapter.SimpleListAdapter` means
*without selection support*. It is a scrollable list of items that does not
respond to touch events.
To use a :class:`SimpleListAdaper` explicitly when creating a ListView instance,
do::
simple_list_adapter = SimpleListAdapter(
data=["Item #{0}".format(i) for i in range(100)],
cls=Label)
list_view = ListView(adapter=simple_list_adapter)
The instance of :class:`~kivy.adapters.simplelistadapter.SimpleListAdapter` has
a required data argument which contains data items to use for instantiating
:class:`~kivy.uix.label.Label` views for the list view (note the cls=Label
argument). The data items are strings. Each item string is set by the
:class:`~kivy.adapters.simplelistadapter.SimpleListAdapter` as the *text*
argument for each Label instantiation.
You can declare a ListView with an adapter in a kv file with special attention
given to the way longer python blocks are indented::
from kivy.uix.boxlayout import BoxLayout
from kivy.base import runTouchApp
from kivy.lang import Builder
# Note the special nature of indentation in the adapter declaration, where
# the adapter: is on one line, then the value side must be given at one
# level of indentation.
Builder.load_string("""
#:import label kivy.uix.label
#:import sla kivy.adapters.simplelistadapter
<MyListView>:
ListView:
adapter:
sla.SimpleListAdapter(
data=["Item #{0}".format(i) for i in range(100)],
cls=label.Label)
""")
class MyListView(BoxLayout):
pass
if __name__ == '__main__':
runTouchApp(MyListView())
ListAdapter and DictAdapter
---------------------------
For most use cases, your data is more complex than a simple list of strings.
Selection functionality is also often needed.
The :class:`~kivy.adapters.listadapter.ListAdapter` and
:class:`~kivy.adapters.dictadapter.DictAdapter` cover these more elaborate
needs.
The :class:`~kivy.adapters.listadapter.ListAdapter` is the base class for
:class:`~kivy.adapters.dictadapter.DictAdapter`, so we can start with it.
Refer to the :class:`~kivy.adapters.listadapter.ListAdapter` docs for details,
but here is a synopses of its arguments:
* :attr:`~kivy.adapters.adapter.Adapter.data`:
strings, class instances, dicts, etc. that form the base data
for instantiating views.
* :attr:`~kivy.adapters.adapter.Adapter.cls`:
a Kivy view that is to be instantiated for each list item. There
are several built-in types available, including ListItemLabel and
ListItemButton, or you can make your own class that mixes in the
required :class:`~kivy.uix.listview.SelectableView`.
* :attr:`~kivy.adapters.adapter.Adapter.template`:
the name of a Kivy language (kv) template that defines the
Kivy view for each list item.
.. note::
Pick only one, cls or template, to provide as an argument.
* :attr:`~kivy.adapters.args_converters`: a function that takes a data item
object as input and
uses it to build and return an args dict, ready
to be used in a call to instantiate item views using the item view cls
or template. In the case of cls, the args dict becomes a kwargs constructor
argument. For a template, it is treated as a context
(ctx) but is essentially similar in form to the kwargs usage.
* :attr:`~kivy.adapters.listadapter.ListAdapter.selection_mode`:
a string with the value 'single',
'multiple' or other.
* :attr:`~kivy.adapters.listadapter.ListAdapter.allow_empty_selection`:
a boolean, which if False (the default), forces
there to always be a selection if there is data
available. If True, selection happens only as a
result of user action.
In narrative, we can summarize as follows:
A listview's adapter takes data items and uses an args_converter
function to transform them into arguments for creating list item view
instances, using either a cls or a kv template.
In a graphic, a summary of the relationship between a listview and its
components can be summarized as follows:
.. image:: images/adapters.png
Please refer to the :mod:`~kivy.adapters` documentation for more details.
A :class:`~kivy.adapters.dictadapter.DictAdapter` has the same arguments and
requirements as a :class:`~kivy.adapters.listadapter.ListAdapter` except for two
things:
1) There is an additional argument, sorted_keys, which must meet the
requirements of normal python dictionary keys.
2) The data argument is, as you would expect, a dict. Keys in the dict
must include the keys in the sorted_keys argument, but they may form a
superset of the keys in sorted_keys. Values may be strings, class
instances, dicts, etc. (The args_converter uses it accordingly).
Using an Args Converter
-----------------------
A :class:`~kivy.uix.listview.ListView` allows use of built-in list item views,
such as :class:`~kivy.uix.listview.ListItemButton`, your own custom item view
class or a custom kv template. Whichever type of list item view is used, an
:doc:`args_converter <api-kivy.adapters.args_converters>` function is needed to
prepare, per list data item, kwargs for the cls or the ctx for the template.
.. note::
Only the ListItemLabel, ListItemButton or custom classes like them (and
not the simple Label or Button classes) are to be used in the listview
system.
.. warning::
ListItemButton inherits the `background_normal` and `background_down`
properties from the Button widget, so the `selected_color` and
`deselected_color` are not represented faithfully by default.
Here is an args_converter for use with the built-in
:class:`~kivy.uix.listview.ListItemButton` specified as a normal Python
function::
def args_converter(row_index, an_obj):
return {'text': an_obj.text,
'size_hint_y': None,
'height': 25}
and as a lambda::
args_converter = lambda row_index, an_obj: {'text': an_obj.text,
'size_hint_y': None,
'height': 25}
In the args converter example above, the data item is assumed to be an object
(class instance), hence the reference an_obj.text.
Here is an example of an args converter that works with list data items that
are dicts::
args_converter = lambda row_index, obj: {'text': obj['text'],
'size_hint_y': None,
'height': 25}
So, it is the responsibility of the developer to code the args_converter
according to the data at hand. The row_index argument can be useful in some
cases, such as when custom labels are needed.
An Example ListView
-------------------
Now, to some example code::
from kivy.adapters.listadapter import ListAdapter
from kivy.uix.listview import ListItemButton, ListView
data = [{'text': str(i), 'is_selected': False} for i in range(100)]
args_converter = lambda row_index, rec: {'text': rec['text'],
'size_hint_y': None,
'height': 25}
list_adapter = ListAdapter(data=data,
args_converter=args_converter,
cls=ListItemButton,
selection_mode='single',
allow_empty_selection=False)
list_view = ListView(adapter=list_adapter)
This listview will show 100 buttons with text of 0 to 100. The args_converter
function converts the dict items in the data and instantiates ListItemButton
views by passing these converted items into it's constructor. The
listview will only allow single selection and the first item will already be
selected as allow_empty_selection is False. For a complete discussion on these
arguments, please see the :class:`~kivy.adapters.listadapter.ListAdapter`
documentation.
The :class:`~kivy.uix.listview.ListItemLabel` works in much the same way as the
:class:`~kivy.uix.listview.ListItemButton`.
Using a Custom Item View Class
------------------------------
The data used in an adapter can be any of the normal Python types or custom
classes, as shown below. It is up to the programmer to assure that the
args_converter performs the appropriate conversions.
Here we make a simple DataItem class that has the required text and
is_selected properties::
from kivy.uix.listview import ListItemButton
from kivy.adapters.listadapter import ListAdapter
class DataItem(object):
def __init__(self, text='', is_selected=False):
self.text = text
self.is_selected = is_selected
data_items = [DataItem(text='cat'),
DataItem(text='dog'),
DataItem(text='frog')]
list_item_args_converter = lambda row_index, obj: {'text': obj.text,
'size_hint_y': None,
'height': 25}
list_adapter = ListAdapter(data=data_items,
args_converter=list_item_args_converter,
propagate_selection_to_data=True,
cls=ListItemButton)
list_view = ListView(adapter=list_adapter)
The data is passed to the :class:`~kivy.adapters.listadapter.ListAdapter` along
with an args_converter function. The propagation setting means that
the is_selected property for each data item will be set and kept in sync with
the list item views. This setting should be set to True if you wish to
initialize the view with item views already selected.
You may also use the provided :class:`~kivy.adapters.models.SelectableDataItem`
mixin to make a custom class. Instead of the "manually-constructed" DataItem
class above, we could do::
from kivy.adapters.models import SelectableDataItem
class DataItem(SelectableDataItem):
# Add properties here.
pass
:class:`~kivy.adapters.models.SelectableDataItem` is a simple mixin class that
has an is_selected property.
Using an Item View Template
---------------------------
:class:`~kivy.uix.listview.SelectableView` is another simple mixin class that
has required properties for a list item: text, and is_selected. To make your
own template, mix it in as follows::
from kivy.lang import Builder
Builder.load_string("""
[CustomListItem@SelectableView+BoxLayout]:
size_hint_y: ctx.size_hint_y
height: ctx.height
ListItemButton:
text: ctx.text
is_selected: ctx.is_selected
""")
A class called CustomListItem can then be instantiated for each list item. Note
that it subclasses a :class:`~kivy.uix.boxlayout.BoxLayout` and is thus a type
of :mod:`~kivy.uix.layout`. It contains a
:class:`~kivy.uix.listview.ListItemButton` instance.
Using the power of the Kivy language (kv), you can easily build composite list
items: in addition to ListItemButton, you could have a ListItemLabel or a
custom class you have defined and registered via the
:class:`~kivy.factory.Factory`.
An args_converter needs to be constructed that goes along with such a kv
template. For example, to use the kv template above::
list_item_args_converter = \\
lambda row_index, rec: {'text': rec['text'],
'is_selected': rec['is_selected'],
'size_hint_y': None,
'height': 25}
integers_dict = \\
{ str(i): {'text': str(i), 'is_selected': False} for i in range(100)}
dict_adapter = DictAdapter(sorted_keys=[str(i) for i in range(100)],
data=integers_dict,
args_converter=list_item_args_converter,
template='CustomListItem')
list_view = ListView(adapter=dict_adapter)
A dict adapter is created with 1..100 integer strings as sorted_keys, and an
integers_dict as data. integers_dict has the integer strings as keys and dicts
with text and is_selected properties. The CustomListItem defined above in the
Builder.load_string() call is set as the kv template for the list item views.
The list_item_args_converter lambda function will take each dict in
integers_dict and will return an args dict, ready for passing as the context
(ctx) for the template.
Using CompositeListItem
-----------------------
The class :class:`~kivy.uix.listview.CompositeListItem` is another option for
building advanced composite list items. The kv language approach has its
advantages, but here we build a composite list view using a plain Python::
args_converter = lambda row_index, rec: \\
{'text': rec['text'],
'size_hint_y': None,
'height': 25,
'cls_dicts': [{'cls': ListItemButton,
'kwargs': {'text': rec['text']}},
{'cls': ListItemLabel,
'kwargs': {'text': "Middle-{0}".format(rec['text']),
'is_representing_cls': True}},
{'cls': ListItemButton,
'kwargs': {'text': rec['text']}}]}
item_strings = ["{0}".format(index) for index in range(100)]
integers_dict = \\
{str(i): {'text': str(i), 'is_selected': False} for i in range(100)}
dict_adapter = DictAdapter(sorted_keys=item_strings,
data=integers_dict,
args_converter=args_converter,
selection_mode='single',
allow_empty_selection=False,
cls=CompositeListItem)
list_view = ListView(adapter=dict_adapter)
The args_converter is somewhat complicated, so we should go through the
details. Observe in the :class:`~kivy.adapters.dictadapter.DictAdapter`
instantiation that :class:`~kivy.uix.listview.CompositeListItem` instance is
set as the cls to be instantiated for each list item component. The
args_converter will
make args dicts for this cls. In the args_converter, the first three items,
text, size_hint_y, and height, are arguments for the CompositeListItem itself.
After that you see a cls_dicts list that contains argument sets for each of the
member widgets for this composite: 2
:class:`ListItemButtons <kivy.uix.listview.ListItemButton>` and a
:class:`~kivy.uix.listview.ListItemLabel`. This is a similar approach to
using a kv template described above.
For details on how :class:`~kivy.uix.listview.CompositeListItem` works,
examine the code, looking for how parsing of the cls_dicts list and kwargs
processing is done.
Uses for Selection
------------------
What can we do with selection? Combining selection with the system of bindings
in Kivy, we can build a wide range of user interface designs.
We could make data items that contain the names of dog breeds, and connect
the selection of dog breed to the display of details in another view, which
would update automatically on selection. This is done via a binding to the
:attr:`~kivy.adapters.listadapter.ListAdapter.on_selection_change` event::
list_adapter.bind(on_selection_change=callback_function)
where callback_function() gets passed the adapter as an argument and does
whatever is needed for the update. See the
example called list_master_detail.py, and imagine that the list on the left
could be a list of dog breeds, and the detail view on the right could show
details for a selected dog breed.
In another example, we could set the selection_mode of a listview to
'multiple', and load it with a list of answers to a multiple-choice question.
The question could have several correct answers. A color swatch view could be
bound to selection change, as above, so that it turns green as soon as the
correct choices are made, unless the number of touches exeeds a limit, then the
answer session could be terminated. See the examples that feature thumbnail
images to get some ideas, e.g., list_cascade_dict.py.
In a more involved example, we could chain together three listviews, where
selection in the first controls the items shown in the second, and selection in
the second controls the items shown in the third. If allow_empty_selection were
set to False for these listviews, a dynamic system of selection "cascading"
from one list to the next, would result.
There are so many ways that listviews and Kivy bindings functionality can be
used, that we have only scratched the surface here. For on-disk examples, see::
kivy/examples/widgets/lists/list_*.py
Several examples show the "cascading" behavior described above. Others
demonstrate the use of kv templates and composite list views.
'''
__all__ = ('SelectableView', 'ListItemButton', 'ListItemLabel',
'CompositeListItem', 'ListView', )
from kivy.event import EventDispatcher
from kivy.clock import Clock
from kivy.uix.widget import Widget
from kivy.uix.button import Button
from kivy.uix.label import Label
from kivy.uix.boxlayout import BoxLayout
from kivy.adapters.simplelistadapter import SimpleListAdapter
from kivy.uix.abstractview import AbstractView
from kivy.properties import ObjectProperty, DictProperty, \
NumericProperty, ListProperty, BooleanProperty
from kivy.lang import Builder
from math import ceil, floor
class SelectableView(object):
'''The :class:`~kivy.uix.listview.SelectableView` mixin is used to design
list items and other classes that are to be instantiated by an adapter for
use in a listview. The :class:`~kivy.adapters.listadapter.ListAdapter`
and :class:`~kivy.adapters.dictadapter.DictAdapter` adapters are
selection-enabled. select() and deselect() are to be overridden with
display code to mark items as selected or not, if desired.
'''
index = NumericProperty(-1)
'''The index into the underlying data list or the data item this view
represents.
:attr:`index` is a :class:`~kivy.properties.NumericProperty`, default
to -1.
'''
is_selected = BooleanProperty(False)
'''A SelectableView instance carries this property, which should be kept
in sync with the equivalent property in the data item it represents.
:attr:`is_selected` is a :class:`~kivy.properties.BooleanProperty`, default
to False.
'''
def __init__(self, **kwargs):
super(SelectableView, self).__init__(**kwargs)
def select(self, *args):
'''The list item is responsible for updating the display for
being selected, if desired.
'''
self.is_selected = True
def deselect(self, *args):
'''The list item is responsible for updating the display for
being unselected, if desired.
'''
self.is_selected = False
class ListItemButton(SelectableView, Button):
''':class:`~kivy.uix.listview.ListItemButton` mixes
:class:`~kivy.uix.listview.SelectableView` with
:class:`~kivy.uix.button.Button` to produce a button suitable for use in
:class:`~kivy.uix.listview.ListView`.
'''
selected_color = ListProperty([1., 0., 0., 1])
'''
:attr:`selected_color` is a :class:`~kivy.properties.ListProperty` and
defaults to [1., 0., 0., 1].
'''
deselected_color = ListProperty([0., 1., 0., 1])
'''
:attr:`deselected_color` is a :class:`~kivy.properties.ListProperty` and
defaults to [0., 1., 0., 1].
'''
def __init__(self, **kwargs):
super(ListItemButton, self).__init__(**kwargs)
# Set Button bg color to be deselected_color.
self.background_color = self.deselected_color
def select(self, *args):
self.background_color = self.selected_color
if isinstance(self.parent, CompositeListItem):
self.parent.select_from_child(self, *args)
def deselect(self, *args):
self.background_color = self.deselected_color
if isinstance(self.parent, CompositeListItem):
self.parent.deselect_from_child(self, *args)
def select_from_composite(self, *args):
self.background_color = self.selected_color
def deselect_from_composite(self, *args):
self.background_color = self.deselected_color
def __repr__(self):
return '<%s text=%s>' % (self.__class__.__name__, self.text)
# [TODO] Why does this mix in SelectableView -- that makes it work like
# button, which is redundant.
class ListItemLabel(SelectableView, Label):
''':class:`~kivy.uix.listview.ListItemLabel` mixes
:class:`~kivy.uix.listview.SelectableView` with
:class:`~kivy.uix.label.Label` to produce a label suitable for use in
:class:`~kivy.uix.listview.ListView`.
'''
def __init__(self, **kwargs):
super(ListItemLabel, self).__init__(**kwargs)
def select(self, *args):
self.bold = True
if isinstance(self.parent, CompositeListItem):
self.parent.select_from_child(self, *args)
def deselect(self, *args):
self.bold = False
if isinstance(self.parent, CompositeListItem):
self.parent.deselect_from_child(self, *args)
def select_from_composite(self, *args):
self.bold = True
def deselect_from_composite(self, *args):
self.bold = False
def __repr__(self):
return '<%s text=%s>' % (self.__class__.__name__, self.text)
class CompositeListItem(SelectableView, BoxLayout):
''':class:`~kivy.uix.listview.CompositeListItem` mixes
:class:`~kivy.uix.listview.SelectableView` with :class:`BoxLayout` for a
generic container-style list item, to be used in
:class:`~kivy.uix.listview.ListView`.
'''
background_color = ListProperty([1, 1, 1, 1])
'''ListItem sublasses Button, which has background_color, but
for a composite list item, we must add this property.
:attr:`background_color` is a :class:`~kivy.properties.ListProperty` and
defaults to [1, 1, 1, 1].
'''
selected_color = ListProperty([1., 0., 0., 1])
'''
:attr:`selected_color` is a :class:`~kivy.properties.ListProperty` and
defaults to [1., 0., 0., 1].
'''
deselected_color = ListProperty([.33, .33, .33, 1])
'''
:attr:`deselected_color` is a :class:`~kivy.properties.ListProperty` and
defaults to [.33, .33, .33, 1].
'''
representing_cls = ObjectProperty(None)
'''Which component view class, if any, should represent for the
composite list item in __repr__()?
:attr:`representing_cls` is an :class:`~kivy.properties.ObjectProperty` and
defaults to None.
'''
def __init__(self, **kwargs):
super(CompositeListItem, self).__init__(**kwargs)
# Example data:
#
# 'cls_dicts': [{'cls': ListItemButton,
# 'kwargs': {'text': "Left"}},
# 'cls': ListItemLabel,
# 'kwargs': {'text': "Middle",
# 'is_representing_cls': True}},
# 'cls': ListItemButton,
# 'kwargs': {'text': "Right"}]
# There is an index to the data item this composite list item view
# represents. Get it from kwargs and pass it along to children in the
# loop below.
index = kwargs['index']
for cls_dict in kwargs['cls_dicts']:
cls = cls_dict['cls']
cls_kwargs = cls_dict.get('kwargs', None)
if cls_kwargs:
cls_kwargs['index'] = index
if 'selection_target' not in cls_kwargs:
cls_kwargs['selection_target'] = self
if 'text' not in cls_kwargs:
cls_kwargs['text'] = kwargs['text']
if 'is_representing_cls' in cls_kwargs:
self.representing_cls = cls
self.add_widget(cls(**cls_kwargs))
else:
cls_kwargs = {}
cls_kwargs['index'] = index
if 'text' in kwargs:
cls_kwargs['text'] = kwargs['text']
self.add_widget(cls(**cls_kwargs))
def select(self, *args):
self.background_color = self.selected_color
def deselect(self, *args):
self.background_color = self.deselected_color
def select_from_child(self, child, *args):
for c in self.children:
if c is not child:
c.select_from_composite(*args)
def deselect_from_child(self, child, *args):
for c in self.children:
if c is not child:
c.deselect_from_composite(*args)
def __repr__(self):
if self.representing_cls is not None:
return '<%r>, representing <%s>' % (
self.representing_cls, self.__class__.__name__)
else:
return '<%s>' % (self.__class__.__name__)
Builder.load_string('''
<ListView>:
container: container
ScrollView:
pos: root.pos
on_scroll_y: root._scroll(args[1])
do_scroll_x: False
GridLayout:
cols: 1
id: container
size_hint_y: None
''')
class ListView(AbstractView, EventDispatcher):
''':class:`~kivy.uix.listview.ListView` is a primary high-level widget,
handling the common task of presenting items in a scrolling list.
Flexibility is afforded by use of a variety of adapters to interface with
data.
The adapter property comes via the mixed in
:class:`~kivy.uix.abstractview.AbstractView` class.
:class:`~kivy.uix.listview.ListView` also subclasses
:class:`EventDispatcher` for scrolling. The event *on_scroll_complete* is
used in refreshing the main view.
For a simple list of string items, without selection, use
:class:`~kivy.adapters.simplelistadapter.SimpleListAdapter`. For list items
that respond to selection, ranging from simple items to advanced
composites, use :class:`~kivy.adapters.listadapter.ListAdapter`. For an
alternate powerful adapter, use
:class:`~kivy.adapters.dictadapter.DictAdapter`, rounding out the choice
for designing highly interactive lists.
:Events:
`on_scroll_complete`: (boolean, )
Fired when scrolling completes.
'''
divider = ObjectProperty(None)
'''[TODO] Not used.
'''
divider_height = NumericProperty(2)
'''[TODO] Not used.
'''
container = ObjectProperty(None)
'''The container is a :class:`~kivy.uix.gridlayout.GridLayout` widget held
within a :class:`~kivy.uix.scrollview.ScrollView` widget. (See the
associated kv block in the Builder.load_string() setup). Item view
instances managed and provided by the adapter are added to this container.
The container is cleared with a call to clear_widgets() when the list is
rebuilt by the populate() method. A padding
:class:`~kivy.uix.widget.Widget` instance is also added as needed,
depending on the row height calculations.
:attr:`container` is an :class:`~kivy.properties.ObjectProperty` and
defaults to None.
'''
row_height = NumericProperty(None)
'''The row_height property is calculated on the basis of the height of the
container and the count of items.
:attr:`row_height` is a :class:`~kivy.properties.NumericProperty` and
defaults to None.
'''
item_strings = ListProperty([])
'''If item_strings is provided, create an instance of
:class:`~kivy.adapters.simplelistadapter.SimpleListAdapter` with this list
of strings, and use it to manage a no-selection list.
:attr:`item_strings` is a :class:`~kivy.properties.ListProperty` and
defaults to [].
'''
scrolling = BooleanProperty(False)
'''If the scroll_to() method is called while scrolling operations are
happening, a call recursion error can occur. scroll_to() checks to see that
scrolling is False before calling populate(). scroll_to() dispatches a
scrolling_complete event, which sets scrolling back to False.
:attr:`scrolling` is a :class:`~kivy.properties.BooleanProperty` and
defaults to False.
'''
_index = NumericProperty(0)
_sizes = DictProperty({})
_count = NumericProperty(0)
_wstart = NumericProperty(0)
_wend = NumericProperty(-1)
__events__ = ('on_scroll_complete', )
def __init__(self, **kwargs):
# Check for an adapter argument. If it doesn't exist, we
# check for item_strings in use with SimpleListAdapter
# to make a simple list.
if 'adapter' not in kwargs:
if 'item_strings' not in kwargs:
# Could be missing, or it could be that the ListView is
# declared in a kv file. If kv is in use, and item_strings is
# declared there, then item_strings will not be set until after
# __init__(). So, the data=[] set will temporarily serve for
# SimpleListAdapter instantiation, with the binding to
# item_strings_changed() handling the eventual set of the
# item_strings property from the application of kv rules.
list_adapter = SimpleListAdapter(data=[],
cls=Label)
else:
list_adapter = SimpleListAdapter(data=kwargs['item_strings'],
cls=Label)
kwargs['adapter'] = list_adapter
super(ListView, self).__init__(**kwargs)
self._trigger_populate = Clock.create_trigger(self._spopulate, -1)
self._trigger_reset_populate = \
Clock.create_trigger(self._reset_spopulate, -1)
self.bind(size=self._trigger_populate,
pos=self._trigger_populate,
item_strings=self.item_strings_changed,
adapter=self._trigger_populate)
# The bindings setup above sets self._trigger_populate() to fire
# when the adapter changes, but we also need this binding for when
# adapter.data and other possible triggers change for view updating.
# We don't know that these are, so we ask the adapter to set up the
# bindings back to the view updating function here.
self.adapter.bind_triggers_to_view(self._trigger_reset_populate)
# Added to set data when item_strings is set in a kv template, but it will
# be good to have also if item_strings is reset generally.
def item_strings_changed(self, *args):
self.adapter.data = self.item_strings
def _scroll(self, scroll_y):
if self.row_height is None:
return
self._scroll_y = scroll_y
scroll_y = 1 - min(1, max(scroll_y, 0))
container = self.container
mstart = (container.height - self.height) * scroll_y
mend = mstart + self.height
# convert distance to index
rh = self.row_height
istart = int(ceil(mstart / rh))
iend = int(floor(mend / rh))
istart = max(0, istart - 1)
iend = max(0, iend - 1)
if istart < self._wstart:
rstart = max(0, istart - 10)
self.populate(rstart, iend)
self._wstart = rstart
self._wend = iend
elif iend > self._wend:
self.populate(istart, iend + 10)
self._wstart = istart
self._wend = iend + 10
def _spopulate(self, *args):
self.populate()
def _reset_spopulate(self, *args):
self._wend = -1
self.populate()
# simulate the scroll again, only if we already scrolled before
# the position might not be the same, mostly because we don't know the
# size of the new item.
if hasattr(self, '_scroll_y'):
self._scroll(self._scroll_y)
def populate(self, istart=None, iend=None):
container = self.container
sizes = self._sizes
rh = self.row_height
# ensure we know what we want to show
if istart is None:
istart = self._wstart
iend = self._wend
# clear the view
container.clear_widgets()
# guess only ?
if iend is not None and iend != -1:
# fill with a "padding"
fh = 0
for x in range(istart):
fh += sizes[x] if x in sizes else rh
container.add_widget(Widget(size_hint_y=None, height=fh))
# now fill with real item_view
index = istart
while index <= iend:
item_view = self.adapter.get_view(index)
index += 1
if item_view is None:
continue
sizes[index] = item_view.height
container.add_widget(item_view)
else:
available_height = self.height
real_height = 0
index = self._index
count = 0
while available_height > 0:
item_view = self.adapter.get_view(index)
if item_view is None:
break
sizes[index] = item_view.height
index += 1
count += 1
container.add_widget(item_view)
available_height -= item_view.height
real_height += item_view.height
self._count = count
# extrapolate the full size of the container from the size
# of view instances in the adapter
if count:
container.height = \
real_height / count * self.adapter.get_count()
if self.row_height is None:
self.row_height = real_height / count
def scroll_to(self, index=0):
if not self.scrolling:
self.scrolling = True
self._index = index
self.populate()
self.dispatch('on_scroll_complete')
def on_scroll_complete(self, *args):
self.scrolling = False
|
|
from __future__ import unicode_literals
from django.conf import settings
from django.core.exceptions import ValidationError
from django.core.urlresolvers import reverse
from django.db import models
from django.db.models.signals import post_init, post_save
from django.utils import timezone
from django.utils.encoding import python_2_unicode_compatible
from django.utils.translation import ugettext_lazy as _
from django.contrib.auth.models import User
from symposion.conference.models import Conference
from symposion.sponsorship.managers import SponsorManager
# The benefits we track as individual fields on sponsors
# Names are the names in the database as defined by organizers.
# Field names are the benefit names, lowercased, with
# spaces changed to _, and with "_benefit" appended.
# Column titles are arbitrary.
# "really just care about the ones we have today: print logo, web logo, print description, web description and the ad."
BENEFITS = [
{
'name': 'Web logo',
'field_name': 'web_logo_benefit',
'column_title': _(u"Web Logo"),
}, {
'name': 'Print logo',
'field_name': 'print_logo_benefit',
'column_title': _(u"Print Logo"),
}, {
'name': 'Company Description',
'field_name': 'company_description_benefit',
'column_title': _(u"Web Desc"),
}, {
'name': 'Print Description',
'field_name': 'print_description_benefit',
'column_title': _(u"Print Desc"),
}
]
@python_2_unicode_compatible
class SponsorLevel(models.Model):
conference = models.ForeignKey(Conference, verbose_name=_("Conference"))
name = models.CharField(_("Name"), max_length=100)
order = models.IntegerField(_("Order"), default=0)
cost = models.PositiveIntegerField(_("Cost"))
description = models.TextField(_("Description"), blank=True, help_text=_("This is private."))
class Meta:
ordering = ["conference", "order"]
verbose_name = _("Sponsor level")
verbose_name_plural = _("Sponsor levels")
def __str__(self):
return "%s %s" % (self.conference, self.name)
def sponsors(self):
return self.sponsor_set.filter(active=True).order_by("added")
@python_2_unicode_compatible
class Sponsor(models.Model):
applicant = models.ForeignKey(User, related_name="sponsorships", verbose_name=_("Applicant"),
null=True)
name = models.CharField(_("Sponsor Name"), max_length=100)
display_url = models.URLField(_("display URL"), blank=True)
external_url = models.URLField(_("External URL"))
annotation = models.TextField(_("Annotation"), blank=True)
contact_name = models.CharField(_("Contact Name"), max_length=100)
contact_email = models.EmailField(_("Contact Email"))
level = models.ForeignKey(SponsorLevel, verbose_name=_("level"))
added = models.DateTimeField(_("added"), default=timezone.now)
active = models.BooleanField(_("active"), default=False)
# Denormalization (this assumes only one logo)
sponsor_logo = models.ForeignKey("SponsorBenefit", related_name="+", null=True, blank=True,
editable=False, verbose_name=_("Sponsor logo"))
# Whether things are complete
# True = complete, False = incomplate, Null = n/a for this sponsor level
web_logo_benefit = models.NullBooleanField(_("Web logo benefit"), help_text=_(u"Web logo benefit is complete"))
print_logo_benefit = models.NullBooleanField(_("Print logo benefit"), help_text=_(u"Print logo benefit is complete"))
print_description_benefit = models.NullBooleanField(_("Print description benefit"), help_text=_(u"Print description benefit is complete"))
company_description_benefit = models.NullBooleanField(_("Company description benefit"), help_text=_(u"Company description benefit is complete"))
objects = SponsorManager()
def __str__(self):
return self.name
class Meta:
verbose_name = _("Sponsor")
verbose_name_plural = _("Sponsors")
ordering = ['name']
def save(self, *args, **kwargs):
# Set fields related to benefits being complete
for benefit in BENEFITS:
field_name = benefit['field_name']
benefit_name = benefit['name']
setattr(self, field_name, self.benefit_is_complete(benefit_name))
super(Sponsor, self).save(*args, **kwargs)
def get_absolute_url(self):
if self.active:
return reverse("sponsor_detail", kwargs={"pk": self.pk})
return reverse("sponsor_list")
def get_display_url(self):
if self.display_url:
return self.display_url
else:
return self.external_url
@property
def website_logo(self):
if self.sponsor_logo is None:
benefits = self.sponsor_benefits.filter(
benefit__type="weblogo", upload__isnull=False)[:1]
if benefits.count():
if benefits[0].upload:
self.sponsor_logo = benefits[0]
self.save()
if self.sponsor_logo:
return self.sponsor_logo.upload
@property
def listing_text(self):
if not hasattr(self, "_listing_text"):
self._listing_text = ""
# @@@ better than hard-coding a pk but still not good
benefits = self.sponsor_benefits.filter(benefit__name="Sponsor Description")
if benefits.count():
self._listing_text = benefits[0].text
return self._listing_text
def reset_benefits(self):
"""
Reset all benefits for this sponsor to the defaults for their
sponsorship level.
"""
level = None
try:
level = self.level
except SponsorLevel.DoesNotExist:
pass
allowed_benefits = []
if level:
for benefit_level in level.benefit_levels.all():
# Create all needed benefits if they don't exist already
sponsor_benefit, created = SponsorBenefit.objects.get_or_create(
sponsor=self, benefit=benefit_level.benefit)
# and set to default limits for this level.
sponsor_benefit.max_words = benefit_level.max_words
sponsor_benefit.other_limits = benefit_level.other_limits
# and set to active
sponsor_benefit.active = True
# @@@ We don't call sponsor_benefit.clean here. This means
# that if the sponsorship level for a sponsor is adjusted
# downwards, an existing too-long text entry can remain,
# and won't raise a validation error until it's next
# edited.
sponsor_benefit.save()
allowed_benefits.append(sponsor_benefit.pk)
# Any remaining sponsor benefits that don't normally belong to
# this level are set to inactive
self.sponsor_benefits.exclude(pk__in=allowed_benefits)\
.update(active=False, max_words=None, other_limits="")
def send_coordinator_emails(self):
pass # @@@ should this just be done centrally?
def benefit_is_complete(self, name):
"""Return True - benefit is complete, False - benefit is not complete,
or None - benefit not applicable for this sponsor's level """
if BenefitLevel.objects.filter(level=self.level, benefit__name=name).exists():
try:
benefit = self.sponsor_benefits.get(benefit__name=name)
except SponsorBenefit.DoesNotExist:
return False
else:
return benefit.is_complete
else:
return None # Not an applicable benefit for this sponsor's level
def _store_initial_level(sender, instance, **kwargs):
if instance:
instance._initial_level_id = instance.level_id
post_init.connect(_store_initial_level, sender=Sponsor)
def _check_level_change(sender, instance, created, **kwargs):
if instance and (created or instance.level_id != instance._initial_level_id):
instance.reset_benefits()
post_save.connect(_check_level_change, sender=Sponsor)
BENEFIT_TYPE_CHOICES = [
("text", _("Text")),
("file", _("File")),
("richtext", _("Rich Text")),
("weblogo", _("Web Logo")),
("simple", _("Simple")),
("option", _("Option"))
]
CONTENT_TYPE_CHOICES = [
("simple", "Simple"),
] + [
("listing_text_%s" % lang, "Listing Text (%s)" % label) for lang, label in settings.LANGUAGES
]
@python_2_unicode_compatible
class Benefit(models.Model):
name = models.CharField(_("Name"), max_length=100)
description = models.TextField(_("Description"), blank=True)
type = models.CharField(_("Type"), choices=BENEFIT_TYPE_CHOICES, max_length=10,
default="simple")
content_type = models.CharField(_("content type"), choices=CONTENT_TYPE_CHOICES,
max_length=20, default="simple")
def __str__(self):
return self.name
@python_2_unicode_compatible
class BenefitLevel(models.Model):
benefit = models.ForeignKey(Benefit, related_name="benefit_levels", verbose_name=_("Benefit"))
level = models.ForeignKey(SponsorLevel, related_name="benefit_levels", verbose_name=_("Level"))
# default limits for this benefit at given level
max_words = models.PositiveIntegerField(_("Max words"), blank=True, null=True)
other_limits = models.CharField(_("Other limits"), max_length=200, blank=True)
class Meta:
ordering = ["level"]
verbose_name = _("Benefit level")
verbose_name_plural = _("Benefit levels")
def __str__(self):
return "%s - %s" % (self.level, self.benefit)
@python_2_unicode_compatible
class SponsorBenefit(models.Model):
sponsor = models.ForeignKey(Sponsor, related_name="sponsor_benefits", verbose_name=_("Sponsor"))
benefit = models.ForeignKey(Benefit, related_name="sponsor_benefits", verbose_name=_("Benefit"))
active = models.BooleanField(default=True, verbose_name=_("Active"))
# Limits: will initially be set to defaults from corresponding BenefitLevel
max_words = models.PositiveIntegerField(_("Max words"), blank=True, null=True)
other_limits = models.CharField(_("Other limits"), max_length=200, blank=True)
# Data: zero or one of these fields will be used, depending on the
# type of the Benefit (text, file, or simple)
text = models.TextField(_("Text"), blank=True)
upload = models.FileField(_("File"), blank=True, upload_to="sponsor_files")
# Whether any assets required from the sponsor have been provided
# (e.g. a logo file for a Web logo benefit).
is_complete = models.NullBooleanField(_("Complete?"), help_text=_(u"True - benefit complete; False - benefit incomplete; Null - n/a"))
class Meta:
ordering = ["-active"]
verbose_name = _("Sponsor benefit")
verbose_name_plural = _("Sponsor benefits")
def __str__(self):
return "%s - %s (%s)" % (self.sponsor, self.benefit, self.benefit.type)
def save(self, *args, **kwargs):
# Validate - save() doesn't clean your model by default, so call
# it explicitly before saving
self.full_clean()
self.is_complete = self._is_complete()
super(SponsorBenefit, self).save(*args, **kwargs)
def clean(self):
num_words = len(self.text.split())
if self.max_words and num_words > self.max_words:
raise ValidationError(
_("Sponsorship level only allows for %(word)s words, you provided %(num)d.") % {
"word": self.max_words, "num": num_words})
def data_fields(self):
"""
Return list of data field names which should be editable for
this ``SponsorBenefit``, depending on its ``Benefit`` type.
"""
if self.benefit.type == "file" or self.benefit.type == "weblogo":
return ["upload"]
elif self.benefit.type in ("text", "richtext", "simple", "option"):
return ["text"]
return []
def _is_text_benefit(self):
return self.benefit.type in ["text", "richtext", "simple"] and bool(self.text)
def _is_upload_benefit(self):
return self.benefit.type in ["file", "weblogo"] and bool(self.upload)
def _is_complete(self):
return self.active and (self._is_text_benefit() or self._is_upload_benefit())
def _denorm_weblogo(sender, instance, created, **kwargs):
if instance:
if instance.benefit.type == "weblogo" and instance.upload:
sponsor = instance.sponsor
sponsor.sponsor_logo = instance
sponsor.save()
post_save.connect(_denorm_weblogo, sender=SponsorBenefit)
|
|
import torch
import p3b7 as bmk
import candle
import numpy as np
import torch.nn as nn
from torch.utils.data import DataLoader
from data import P3B3, Egress
from mtcnn import MTCNN, Hparams
from util import to_device
from meters import AccuracyMeter
from metrics import F1Meter
from prune import (
negative_prune, min_max_prune,
create_prune_masks, remove_prune_masks
)
TASKS = {
'subsite': 15,
'laterality': 3,
'behavior': 3,
'grade': 3,
}
TRAIN_F1_MICRO = F1Meter(TASKS, 'micro')
VALID_F1_MICRO = F1Meter(TASKS, 'micro')
TRAIN_F1_MACRO = F1Meter(TASKS, 'macro')
VALID_F1_MACRO = F1Meter(TASKS, 'macro')
def initialize_parameters():
"""Initialize the parameters for the P3B7 benchmark """
p3b7_bench = bmk.BenchmarkP3B7(
bmk.file_path,
"default_model.txt",
"pytorch",
prog="p3b7",
desc="Network pruning",
)
gParameters = candle.finalize_parameters(p3b7_bench)
return gParameters
def fetch_data(gParameters):
"""Download and unpack data
Args:
gParameters: parameters from candle
Returns:
path to where the data is located
"""
path = gParameters.data_url
fpath = candle.fetch_file(
path + gParameters.train_data, 'Pilot3', unpack=True
)
return fpath
def get_synthetic_data(args):
"""Initialize data loaders
Args:
datapath: path to the synthetic data
Returns:
train and valid data
"""
datapath = fetch_data(args)
train_data = P3B3(datapath, 'train')
valid_data = P3B3(datapath, 'test')
return train_data, valid_data
def get_egress_data(tasks):
"""Initialize egress tokenized data loaders
Args:
args: CANDLE ArgumentStruct
tasks: dictionary of the number of classes for each task
Returns:
train and valid data
"""
train_data = Egress('./data', 'train')
valid_data = Egress('./data', 'valid')
return train_data, valid_data
def train(model, loader, optimizer, device, epoch):
accmeter = AccuracyMeter(TASKS, loader)
total_loss = 0
for idx, (data, target) in enumerate(loader):
optimizer.zero_grad()
data, target = data.to(device), to_device(target, device)
logits = model(data)
_ = TRAIN_F1_MICRO.f1(to_device(logits, 'cpu'), to_device(target, 'cpu'))
_ = TRAIN_F1_MACRO.f1(to_device(logits, 'cpu'), to_device(target, 'cpu'))
loss = model.loss_value(logits, target, reduce="mean")
loss.backward()
optimizer.step()
total_loss += loss.item()
accmeter.update(logits, target)
avg_loss = total_loss / len(loader.dataset)
accmeter.update_accuracy()
print(f'\nEpoch {epoch} Training Accuracy:')
accmeter.print_task_accuracies()
accmeter.reset()
return avg_loss
def evaluate(model, loader, device):
accmeter = AccuracyMeter(TASKS, loader)
loss = 0
model.eval()
with torch.no_grad():
for idx, (data, target) in enumerate(loader):
data, target = data.to(device), to_device(target, device)
logits = model(data)
_ = VALID_F1_MICRO.f1(to_device(logits, 'cpu'), to_device(target, 'cpu'))
_ = VALID_F1_MACRO.f1(to_device(logits, 'cpu'), to_device(target, 'cpu'))
loss += model.loss_value(logits, target, reduce="mean").item()
accmeter.update(logits, target)
accmeter.update_accuracy()
print(f'Validation accuracy:')
accmeter.print_task_accuracies()
loss /= len(loader.dataset)
return loss
def save_dataframe(metrics, filename):
"""Save F1 metrics"""
df = pd.DataFrame(metrics, index=[0])
path = Path(ARGS.savepath).joinpath(f'f1/{filename}.csv')
df.to_csv(path, index=False)
def run(args):
args = candle.ArgumentStruct(**args)
args.cuda = torch.cuda.is_available()
args.device = torch.device(f"cuda" if args.cuda else "cpu")
if args.use_synthetic_data:
train_data, valid_data = get_synthetic_data(args)
hparams = Hparams(
kernel1=args.kernel1,
kernel2=args.kernel2,
kernel3=args.kernel3,
embed_dim=args.embed_dim,
n_filters=args.n_filters,
)
else:
train_data, valid_data = get_egress_data(tasks)
hparams = Hparams(
kernel1=args.kernel1,
kernel2=args.kernel2,
kernel3=args.kernel3,
embed_dim=args.embed_dim,
n_filters=args.n_filters,
vocab_size=len(train_data.vocab)
)
train_loader = DataLoader(train_data, batch_size=args.batch_size)
valid_loader = DataLoader(valid_data, batch_size=args.batch_size)
model = MTCNN(TASKS, hparams).to(args.device)
model = create_prune_masks(model)
optimizer = torch.optim.Adam(
model.parameters(), lr=args.learning_rate, eps=args.eps
)
train_epoch_loss = []
valid_epoch_loss = []
for epoch in range(args.epochs):
train_loss = train(model, train_loader, optimizer, args.device, epoch)
valid_loss = evaluate(model, valid_loader, args.device)
train_epoch_loss.append(train_loss)
valid_epoch_loss.append(valid_loss)
model = remove_prune_masks(model)
def main():
params = initialize_parameters()
run(params)
if __name__ == "__main__":
main()
|
|
"""
This file contains a class that holds the TimeSeries class. This class is used
to "manage" the time series within PASTAS. It has methods to change a time
series in frequency and extend the time series, without losing the original
data.
August 2017, R.A. Collenteur
"""
from logging import getLogger
import pandas as pd
from pandas.tseries.frequencies import to_offset
from .utils import get_stress_dt, get_dt, get_time_offset, \
timestep_weighted_resample
logger = getLogger(__name__)
class TimeSeries:
"""Class that deals with all user-provided Time Series.
Parameters
----------
series: pandas.Series or pastas.TimeSeries
Pandas Series with time indices and values or a Pastas.TimeSeries
instance. If the latter is provided, a new TimeSeries.
name: str, optional
String with the name of the time series, if None is provided,
pastas will try to derive the name from the series.
settings: str or dict, optional
String with the name of one of the predefined settings (obtained
through ps.TimeSeries._predefined_settings.) or a dictionary with the
settings to be applied. This does not have to include all the
settings arguments.
metadata: dict, optional
Dictionary with metadata of the time series.
freq_original str, optional
**kwargs: optional
Any keyword arguments that are provided but are not listed will be
passed as additional settings.
Notes
-----
For the individual options for the different settings please refer to
the docstring from the TimeSeries.update_series() method.
To obtain the predefined TimeSeries settings, you can run the following
line of code: 'ps.TimeSeries._predefined_settings'
See Also
--------
ps.timeseries.TimeSeries.update_series
Returns
-------
series: pastas.TimeSeries
Returns a pastas.TimeSeries object.
"""
_predefined_settings = {
"oseries": {"fill_nan": "drop", "sample_down": "drop"},
"prec": {"sample_up": "bfill", "sample_down": "mean",
"fill_nan": 0.0, "fill_before": "mean", "fill_after": "mean"},
"evap": {"sample_up": "bfill", "sample_down": "mean",
"fill_before": "mean", "fill_after": "mean",
"fill_nan": "interpolate"},
"well": {"sample_up": "bfill", "sample_down": "mean",
"fill_nan": 0.0, "fill_before": 0.0, "fill_after": 0.0,
"to_daily_unit": "divide"},
"waterlevel": {"sample_up": "interpolate", "sample_down": "mean",
"fill_before": "mean", "fill_after": "mean",
"fill_nan": "interpolate"},
"level": {"sample_up": "interpolate", "sample_down": "mean",
"fill_before": "mean", "fill_after": "mean",
"fill_nan": "interpolate"},
"flux": {"sample_up": "bfill", "sample_down": "mean",
"fill_before": "mean", "fill_after": "mean",
"fill_nan": 0.0},
"quantity": {"sample_up": "divide", "sample_down": "sum",
"fill_before": "mean", "fill_after": "mean",
"fill_nan": 0.0},
}
def __init__(self, series, name=None, settings=None, metadata=None,
freq_original=None, **kwargs):
if isinstance(series, TimeSeries):
# Copy all the series
self._series_original = series.series_original.copy()
self._series_validated = series.series_validated.copy()
self._series = series.series.copy()
# Copy all the properties
self.freq_original = series.freq_original
self.settings = series.settings.copy()
self.metadata = series.metadata.copy()
validate = False
update = False
if settings is None:
settings = self.settings.copy()
else:
# Make sure we have a Pandas Series and not a 1D-DataFrame
if isinstance(series, pd.DataFrame):
if len(series.columns) is 1:
series = series.iloc[:, 0]
elif not isinstance(series, pd.Series):
msg = "Expected a Pandas Series, got {}".format(type(series))
raise TypeError(msg)
validate = True
update = True
# Store a copy of the original series
self._series_original = series.copy()
self.freq_original = freq_original
self.settings = {
"to_daily_unit": None,
"freq": None,
"sample_up": None,
"sample_down": None,
"fill_nan": "interpolate",
"fill_before": None,
"fill_after": None,
"tmin": None,
"tmax": None,
"norm": None,
"time_offset": pd.Timedelta(0)
}
self.metadata = {
"x": 0.0,
"y": 0.0,
"z": 0.0,
"projection": None
}
# Use user provided name or set from series
if name is None:
name = series.name
self.name = name
self._series_original.name = name
if metadata is not None:
self.metadata.update(metadata)
# Update the settings with user-provided values, if any.
if settings:
if isinstance(settings, str):
if settings in self._predefined_settings.keys():
settings = self._predefined_settings[settings]
else:
error = "Settings shortcut code '{}' is not in the " \
"predefined settings options. Please choose " \
"from {}".format(settings,
self._predefined_settings.keys())
raise KeyError(error)
if self.update_settings(**settings):
update = True
if kwargs:
if self.update_settings(**kwargs):
update = True
# Create a validated series for computations and update
if validate:
self._series_validated = self.validate_series(
self._series_original)
if update:
self.update_series(force_update=True, **self.settings)
def __repr__(self):
"""Prints a simple string representation of the time series.
"""
template = ('{cls}(name={name}, freq={freq}, tmin={tmin}, '
'tmax={tmax})')
return template.format(cls=self.__class__.__name__,
name=self.name,
freq=self.settings["freq"],
tmin=self.settings["tmin"],
tmax=self.settings["tmax"])
@property
def series_original(self):
return self._series_original
@series_original.setter
def series_original(self, series):
"""Sets a new freq_original for the TimeSeries"""
if not isinstance(series, pd.Series):
raise TypeError("Expected a Pandas Series, got {}".format(
type(series)))
else:
self._series_original = series
# make sure that tmin and tmax and freq_original are set in validate_series
self.settings["tmin"] = None
self.settings["tmax"] = None
freq_original = self.freq_original # remember what it was
self.freq_original = None
self._series_validated = self.validate_series(
self._series_original)
if self.freq_original is None:
self.freq_original = freq_original
self.update_series(force_update=True, **self.settings)
@property
def series(self):
return self._series
@series.setter
def series(self, value):
raise AttributeError("You cannot set series by yourself, as it is "
"calculated from series_original. Please set "
"series_original to update the series.")
@property
def series_validated(self):
return self._series_validated
@series_validated.setter
def series_validated(self, value):
raise AttributeError("You cannot set series_validated by yourself,as"
"it is calculated from series_original. Please "
"set series_original to update the series.")
def validate_series(self, series):
""" This method performs some PASTAS specific tests for the TimeSeries.
Parameters
----------
series: pd.Series
Pandas series object containing the series time series.
Returns
-------
series: pandas.Series
The validated series as pd.Series
Notes
-----
The Series are validated for the following cases:
1. Series is an actual pandas Series;
2. Nan-values from begin and end are removed;
3. Nan-values between observations are removed;
4. Indices are in Timestamps (standard throughout PASTAS), making the index a pandas DateTimeIndex.
5. Duplicate indices are removed (by averaging).
"""
# 2. Make sure the indices are Timestamps and sorted
series = series.astype(float)
series.index = pd.to_datetime(series.index)
series = series.sort_index()
series.index.name = ""
# 3. Drop nan-values at the beginning and end of the time series
series = series.loc[series.first_valid_index():series.last_valid_index(
)].copy(deep=True)
# 4. Find the frequency of the original series
if self.freq_original:
pass
elif pd.infer_freq(series.index):
self.freq_original = pd.infer_freq(series.index)
msg = "Inferred frequency from time series {}: freq={} " \
.format(self.name, self.freq_original)
logger.info(msg)
else:
self.freq_original = self.settings["freq"]
if self.freq_original is None:
msg = "Cannot determine frequency of series " \
"{}".format(self.name)
logger.info(msg)
elif self.settings["fill_nan"] and self.settings["fill_nan"] != \
"drop":
msg = "User-provided frequency is applied when validating " \
"the Time Series {}. Make sure the provided frequency" \
" is close to the real frequency of the original " \
"series.".format(self.name)
logger.warning(msg)
# 5. Handle duplicate indices
if not series.index.is_unique:
msg = "duplicate time-indexes were found in the Time Series {}." \
"Values were averaged.".format(self.name)
logger.warning(msg)
grouped = series.groupby(level=0)
series = grouped.mean()
# 6. drop nan-values
if series.hasnans:
series = self.fill_nan(series)
if self.settings["tmin"] is None:
self.settings["tmin"] = series.index.min()
if self.settings["tmax"] is None:
self.settings["tmax"] = series.index.max()
return series
def update_settings(self, **kwargs):
"""Internal method that check if an update is actually necessary.
TODO still some bug in here when comparing timestamps. causing uneccesary updates..
"""
update = False
for key, value in kwargs.items():
if key in ["tmin", "tmax"]:
if value is None:
pass
else:
value = pd.Timestamp(value)
if value != self.settings[key]:
self.settings[key] = value
update = True
return update
def update_series(self, force_update=False, **kwargs):
"""Method to update the series with new options, but most likely
only a change in the frequency before solving a PASTAS model.
Parameters
----------
force_update: bool, optional
argument that is used to force an update, even when no changes
are found. Internally used by the __init__ method. Default is
False.
freq: str, optional
String representing the desired frequency of the time series.
sample_up: str or float, optional
String with the method to use when the frequency is increased (
e.g. Weekly to daily). Possible values are: "backfill", "bfill",
"pad", "ffill", "mean", "interpolate", "divide" or a float value
to fill the gaps.
sample_down: str, optional
String with the method to use when the frequency decreases
(e.g. from daily to weekly values). Possible values are: "mean",
"drop", "sum", "min", "max".
fill_nan: str or float, optional
Method to use when there ar nan-values in the time series.
Possible values are: "mean", "drop", "interpolate" (default) or a
float value.
fill_before: str or float, optional
Method used to extend a time series before any measurements are
available. possible values are: "mean" or a float value.
fill_after: str ir float, optional
Method used to extens a time series after any measurements are
available. Possible values are: "mean" or a float value.
tmin: str or pandas.TimeStamp, optional
String that can be converted to, or a Pandas TimeStamp with the
minimum time of the series.
tmax; str or pandas.TimeStamp, optional
String that can be converted to, or a Pandas TimeStamp with the
maximum time of the series.
norm: str or float, optional
String with the method to normalize the time series with.
Possible values are: "mean" or "median", "min", "max" or a float
value.
Notes
-----
The method will validate if any of the settings is changed to
determine if the series need to be updated.
"""
if self.update_settings(**kwargs) or force_update:
# Get the validated series to start with
series = self.series_validated.copy(deep=True)
# Update the series with the new settings
series = self.to_daily_unit(series)
series = self.change_frequency(series)
series = self.fill_before(series)
series = self.fill_after(series)
series = self.normalize(series)
self._series = series
def change_frequency(self, series):
"""Method to change the frequency of the time series.
"""
freq = self.settings["freq"]
# 1. If no freq string is present or is provided (e.g. Oseries)
if not freq:
return series
# 2. If original frequency could not be determined
elif not self.freq_original:
series = self.sample_weighted(series)
else:
dt_new = get_dt(freq)
dt_org = get_stress_dt(self.freq_original)
# 3. If new and original frequency are not a multiple of each other
eps = 1e-10
if not ((dt_new % dt_org) < eps or (dt_org % dt_new) < eps):
series = self.sample_weighted(series)
# 4. If new frequency is lower than its original
elif dt_new < dt_org:
series = self.sample_up(series)
# 5. If new frequency is higher than its original
elif dt_new > dt_org:
series = self.sample_down(series)
# 6. If new frequency is equal to its original
elif dt_new == dt_org:
# shouldn't we do this before changing frequency?
series = self.fill_nan(series)
# Drop nan-values at the beginning and end of the time series
series = series.loc[
series.first_valid_index():series.last_valid_index()]
return series
def to_daily_unit(self, series):
method = self.settings["to_daily_unit"]
if method is not None:
if method is True or method == "divide":
dt = series.index.to_series().diff() / pd.Timedelta(1, 'd')
dt = dt.fillna(1)
if not (dt == 1.0).all():
series = series / dt
msg = ("Time Series {}: values of stress were transformed "
"to daily values (frequency not altered) with: {}")
logger.info(msg.format(self.name, method))
else:
msg = ("Time Series {}: User-defined option for to_daily_unit "
"{} is not supported")
logger.warning(msg.format(self.name, method))
return series
def sample_up(self, series):
"""Resample the time series when the frequency increases (e.g. from
weekly to daily values).
"""
method = self.settings["sample_up"]
freq = self.settings["freq"]
# adjust the first timestep, so that the output will have the
# correct frequency
t0_new = series.index[0].ceil(freq)
if t0_new > series.index[0]:
series.index.set_value(series.index, series.index[0], t0_new)
n = series.isnull().values.sum()
if method in ["backfill", "bfill", "pad", "ffill"]:
series = series.asfreq(freq, method=method)
elif method is None:
pass
else:
if method == "mean": # when would you ever want this?
series = series.asfreq(freq)
series.fillna(series.mean(), inplace=True) # Default option
elif method == "interpolate":
series = series.asfreq(freq)
series.interpolate(method="time", inplace=True)
elif method == "divide":
dt = series.index.to_series().diff() / to_offset(freq).delta
series = series / dt
series = series.asfreq(freq, method="bfill")
elif isinstance(method, float):
series = series.asfreq(freq)
series.fillna(method, inplace=True)
else:
msg = "Time Series {}: User-defined option for sample_up {} " \
"is not supported".format(self.name, method)
logger.warning(msg)
if n > 0:
msg = "Time Series {}: {} nan-value(s) was/were found and filled" \
" with: {}".format(self.name, n, method)
logger.info(msg)
return series
def sample_down(self, series):
"""Resample the time series when the frequency decreases (e.g. from
daily to weekly values).
Notes
-----
make sure the labels are still at the end of each period, and
data at the right side of the bucket is included (see
http://pandas.pydata.org/pandas-docs/stable/generated/pandas.Series.resample.html)
"""
method = self.settings["sample_down"]
freq = self.settings["freq"]
# when a multiple freq is used (like '7D') make sure the first record
# has a rounded index
series = series[series.index[0].ceil(freq):]
# Shift time series by offset, as resample time_offset doesn't do it.
series = series.shift(1, freq=self.settings["time_offset"])
# Provide some standard pandas arguments for all options
kwargs = {"label": "right", "closed": "right",
"loffset": self.settings["time_offset"]}
if method == "mean":
series = series.resample(freq, **kwargs).mean()
elif method == "drop": # does this work?
series = series.resample(freq, **kwargs).mean().dropna()
elif method == "sum":
series = series.resample(freq, **kwargs).sum()
elif method == "min":
series = series.resample(freq, **kwargs).min()
elif method == "max":
series = series.resample(freq, **kwargs).max()
else:
msg = "Time Series {}: User-defined option for sample_down {} is" \
"not supported".format(self.name, method)
logger.warning(msg)
logger.info("Time Series {} was sampled down to freq {} with method "
"{}".format(self.name, freq, method))
return series
def sample_weighted(self, series):
freq = self.settings["freq"]
tindex = pd.date_range(series.index[0].ceil(freq), series.index[-1],
freq=freq)
series = timestep_weighted_resample(series, tindex)
msg = "Time Series {} was sampled down to freq {} with method " \
"{}".format(self.name, freq, "timestep_weighted_resample")
logger.info(msg)
return series
def fill_nan(self, series):
"""Fill up the nan-values when present and a constant frequency is
required.
"""
method = self.settings["fill_nan"]
freq = self.freq_original
if freq:
series = series.asfreq(freq)
n = series.isnull().values.sum()
if method == "drop":
series.dropna(inplace=True)
elif method == "mean":
series.fillna(series.mean(), inplace=True) # Default option
elif method == "interpolate":
series.interpolate(method="time", inplace=True)
elif isinstance(method, float):
series.fillna(method, inplace=True)
else:
msg = "Time Series {}: User-defined option for fill_nan {} " \
"is not supported".format(self.name, method)
logger.warning(msg)
else:
method = "drop"
n = series.isnull().values.sum()
series.dropna(inplace=True)
if n > 0:
logger.info("Time Series {}: {} nan-value(s) was/were found and "
"filled with: {}".format(self.name, n, method))
return series
def fill_before(self, series):
"""Method to add a period in front of the available time series.
"""
freq = self.settings["freq"]
method = self.settings["fill_before"]
tmin = self.settings["tmin"]
if tmin is None:
pass
elif method is None:
pass
elif pd.Timestamp(tmin) >= series.index.min():
series = series.loc[pd.Timestamp(tmin):]
else:
tmin = pd.Timestamp(tmin)
# When time offsets are not equal
time_offset = get_time_offset(tmin, freq)
tmin = tmin - time_offset
index_extend = pd.date_range(start=tmin, end=series.index.min(),
freq=freq)
index = series.index.union(index_extend[:-1])
series = series.reindex(index)
if method == "mean":
series.fillna(series.mean(), inplace=True) # Default option
elif isinstance(method, float):
series.fillna(method, inplace=True)
else:
msg = "Time Series {}: User-defined option for fill_before " \
"{} is not supported".format(self.name, method)
logger.warning(msg)
return series
def fill_after(self, series):
"""Method to add a period in front of the available time series.
"""
freq = self.settings["freq"]
method = self.settings["fill_after"]
tmax = self.settings["tmax"]
if tmax is None:
pass
elif method is None:
pass
elif pd.Timestamp(tmax) <= series.index.max():
series = series.loc[:pd.Timestamp(tmax)]
else:
# When time offsets are not equal
time_offset = get_time_offset(tmax, freq)
tmax = tmax - time_offset
index_extend = pd.date_range(start=series.index.max(), end=tmax,
freq=freq)
index = series.index.union(index_extend)
series = series.reindex(index)
if method == "mean":
series.fillna(series.mean(), inplace=True) # Default option
elif isinstance(method, float):
series.fillna(method, inplace=True)
else:
msg = "Time Series {}: User-defined option for fill_after {}" \
" is not supported".format(self.name, method)
logger.warning(msg)
return series
def normalize(self, series):
"""Method to normalize the time series.
TODO: can we also choose to normalize by the fill_before-value?
"""
method = self.settings["norm"]
if method is None:
pass
elif method == "mean":
series = series.subtract(series.mean())
elif method == "median":
series = series.subtract(series.median())
elif method == "min":
series = series.subtract(series.min())
elif method == "max":
series = series.subtract(series.max())
elif isinstance(method, float):
series = series.subtract(method)
else:
msg = "Time Series {}: Selected method {} to normalize the time " \
"series is not supported".format(self.name, method)
logger.info(msg)
return series
def multiply(self, other):
self._series = self.series.multiply(other)
self._series_original = self.series_original.multiply(other)
self.update_series(force_update=True)
def to_dict(self, series=True):
"""Method to export the Time Series to a json format.
Parameters
----------
series: Boolean
True to export the original time series, False to only export
the TimeSeries object"s name.
Returns
-------
data: dict
dictionary with the necessary information to recreate the
TimeSeries object completely.
"""
data = {}
if series is True or series == "original":
data["series"] = self.series_original
elif series == "modified":
data["series"] = self
data["name"] = self.name
data["settings"] = self.settings
data["metadata"] = self.metadata
data["freq_original"] = self.freq_original
return data
def plot(self, original=False, **kwargs):
"""Method to plot the TimeSeries object. Plots the edited series by
default.
Parameters
----------
original: bool
Also plot the original series.
kwargs
Returns
-------
"""
if original:
ax = self.series_original.plot()
else:
ax = self.series.plot(**kwargs)
return ax
|
|
# -*- coding: utf-8 -*-
#
# xarray documentation build configuration file, created by
# sphinx-quickstart on Thu Feb 6 18:57:54 2014.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
import sys
import warnings
import os
print("python exec:", sys.executable)
print("sys.path:", sys.path)
try:
import numpy
print("numpy: %s, %s" % (numpy.__version__, numpy.__file__))
except ImportError:
print("no numpy")
try:
import scipy
print("scipy: %s, %s" % (scipy.__version__, scipy.__file__))
except ImportError:
print("no scipy")
try:
import pandas
print("pandas: %s, %s" % (pandas.__version__, pandas.__file__))
except ImportError:
print("no pandas")
try:
import matplotlib
matplotlib.use('Agg')
print("matplotlib: %s, %s" % (matplotlib.__version__, matplotlib.__file__))
except ImportError:
print("no matplotlib")
try:
import dask
print("dask: %s, %s" % (dask.__version__, dask.__file__))
except ImportError:
print("no dask")
try:
import IPython
print("ipython: %s, %s" % (IPython.__version__, IPython.__file__))
except ImportError:
print("no ipython")
try:
with warnings.catch_warnings():
# https://github.com/mwaskom/seaborn/issues/892
warnings.simplefilter("ignore")
import seaborn
print("seaborn: %s, %s" % (seaborn.__version__, seaborn.__file__))
except ImportError:
print("no seaborn")
try:
import cartopy
print("cartopy: %s, %s" % (cartopy.__version__, cartopy.__file__))
except ImportError:
print("no cartopy")
try:
import netCDF4
print("netCDF4: %s, %s" % (netCDF4.__version__, netCDF4.__file__))
except ImportError:
print("no netCDF4")
try:
import rasterio
print("rasterio: %s, %s" % (rasterio.__version__, rasterio.__file__))
except ImportError:
print("no rasterio")
import xarray
print("xarray: %s, %s" % (xarray.__version__, xarray.__file__))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.autosummary',
'sphinx.ext.intersphinx',
'sphinx.ext.extlinks',
'sphinx.ext.mathjax',
'numpydoc',
'IPython.sphinxext.ipython_directive',
'IPython.sphinxext.ipython_console_highlighting',
'sphinx_gallery.gen_gallery',
]
extlinks = {'issue': ('https://github.com/pydata/xarray/issues/%s', 'GH'),
'pull': ('https://github.com/pydata/xarray/pull/%s', 'PR'),
}
sphinx_gallery_conf = {'examples_dirs': 'gallery',
'gallery_dirs': 'auto_gallery',
'backreferences_dir': False
}
autosummary_generate = True
numpydoc_class_members_toctree = True
numpydoc_show_class_members = False
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'xarray'
copyright = '2014-2016, xarray Developers'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = xarray.version.short_version
# The full version, including alpha/beta/rc tags.
release = xarray.__version__
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
# on_rtd is whether we are on readthedocs.org, this line of code grabbed from
# docs.readthedocs.org
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
if not on_rtd: # only import and set the theme if we're building docs locally
import sphinx_rtd_theme
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
# otherwise, readthedocs.org uses their theme by default, so no need to specify it
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Sometimes the savefig directory doesn't exist and needs to be created
# https://github.com/ipython/ipython/issues/8733
# becomes obsolete when ipython 5.2 is out
ipython_savefig_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)),
'_build','html','_static')
if not os.path.exists(ipython_savefig_dir):
os.makedirs(ipython_savefig_dir)
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
# html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'xarraydoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
('index', 'xarray.tex', 'xarray Documentation',
'xarray Developers', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'xarray', 'xarray Documentation',
['xarray Developers'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'xarray', 'xarray Documentation',
'xarray Developers', 'xarray', 'N-D labeled arrays and datasets in Python.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {
'python': ('https://docs.python.org/3.5/', None),
'pandas': ('http://pandas.pydata.org/pandas-docs/stable/', None),
'iris': ('http://scitools.org.uk/iris/docs/latest/', None),
'numpy': ('http://docs.scipy.org/doc/numpy/', None),
}
|
|
"""Test the Z-Wave JS config flow."""
import asyncio
from unittest.mock import patch
import pytest
from zwave_js_server.version import VersionInfo
from homeassistant import config_entries, setup
from homeassistant.components.hassio.handler import HassioAPIError
from homeassistant.components.zwave_js.config_flow import TITLE
from homeassistant.components.zwave_js.const import DOMAIN
from tests.common import MockConfigEntry
ADDON_DISCOVERY_INFO = {
"addon": "Z-Wave JS",
"host": "host1",
"port": 3001,
}
@pytest.fixture(name="supervisor")
def mock_supervisor_fixture():
"""Mock Supervisor."""
with patch("homeassistant.components.hassio.is_hassio", return_value=True):
yield
@pytest.fixture(name="addon_info_side_effect")
def addon_info_side_effect_fixture():
"""Return the add-on info side effect."""
return None
@pytest.fixture(name="addon_info")
def mock_addon_info(addon_info_side_effect):
"""Mock Supervisor add-on info."""
with patch(
"homeassistant.components.hassio.async_get_addon_info",
side_effect=addon_info_side_effect,
) as addon_info:
addon_info.return_value = {}
yield addon_info
@pytest.fixture(name="addon_running")
def mock_addon_running(addon_info):
"""Mock add-on already running."""
addon_info.return_value["state"] = "started"
return addon_info
@pytest.fixture(name="addon_installed")
def mock_addon_installed(addon_info):
"""Mock add-on already installed but not running."""
addon_info.return_value["state"] = "stopped"
addon_info.return_value["version"] = "1.0"
return addon_info
@pytest.fixture(name="addon_options")
def mock_addon_options(addon_info):
"""Mock add-on options."""
addon_info.return_value["options"] = {}
return addon_info.return_value["options"]
@pytest.fixture(name="set_addon_options_side_effect")
def set_addon_options_side_effect_fixture():
"""Return the set add-on options side effect."""
return None
@pytest.fixture(name="set_addon_options")
def mock_set_addon_options(set_addon_options_side_effect):
"""Mock set add-on options."""
with patch(
"homeassistant.components.hassio.async_set_addon_options",
side_effect=set_addon_options_side_effect,
) as set_options:
yield set_options
@pytest.fixture(name="install_addon")
def mock_install_addon():
"""Mock install add-on."""
with patch("homeassistant.components.hassio.async_install_addon") as install_addon:
yield install_addon
@pytest.fixture(name="start_addon_side_effect")
def start_addon_side_effect_fixture():
"""Return the set add-on options side effect."""
return None
@pytest.fixture(name="start_addon")
def mock_start_addon(start_addon_side_effect):
"""Mock start add-on."""
with patch(
"homeassistant.components.hassio.async_start_addon",
side_effect=start_addon_side_effect,
) as start_addon:
yield start_addon
@pytest.fixture(name="server_version_side_effect")
def server_version_side_effect_fixture():
"""Return the server version side effect."""
return None
@pytest.fixture(name="get_server_version", autouse=True)
def mock_get_server_version(server_version_side_effect):
"""Mock server version."""
version_info = VersionInfo(
driver_version="mock-driver-version",
server_version="mock-server-version",
home_id=1234,
)
with patch(
"homeassistant.components.zwave_js.config_flow.get_server_version",
side_effect=server_version_side_effect,
return_value=version_info,
) as mock_version:
yield mock_version
@pytest.fixture(name="addon_setup_time", autouse=True)
def mock_addon_setup_time():
"""Mock add-on setup sleep time."""
with patch(
"homeassistant.components.zwave_js.config_flow.ADDON_SETUP_TIME", new=0
) as addon_setup_time:
yield addon_setup_time
async def test_manual(hass):
"""Test we create an entry with manual step."""
await setup.async_setup_component(hass, "persistent_notification", {})
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == "form"
with patch(
"homeassistant.components.zwave_js.async_setup", return_value=True
) as mock_setup, patch(
"homeassistant.components.zwave_js.async_setup_entry",
return_value=True,
) as mock_setup_entry:
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{
"url": "ws://localhost:3000",
},
)
await hass.async_block_till_done()
assert result2["type"] == "create_entry"
assert result2["title"] == "Z-Wave JS"
assert result2["data"] == {
"url": "ws://localhost:3000",
"usb_path": None,
"network_key": None,
"use_addon": False,
"integration_created_addon": False,
}
assert len(mock_setup.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 1
assert result2["result"].unique_id == 1234
@pytest.mark.parametrize(
"url, server_version_side_effect, error",
[
(
"not-ws-url",
None,
"invalid_ws_url",
),
(
"ws://localhost:3000",
asyncio.TimeoutError,
"cannot_connect",
),
(
"ws://localhost:3000",
Exception("Boom"),
"unknown",
),
],
)
async def test_manual_errors(
hass,
url,
error,
):
"""Test all errors with a manual set up."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == "form"
assert result["step_id"] == "manual"
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{
"url": url,
},
)
assert result["type"] == "form"
assert result["step_id"] == "manual"
assert result["errors"] == {"base": error}
async def test_manual_already_configured(hass):
"""Test that only one unique instance is allowed."""
entry = MockConfigEntry(domain=DOMAIN, data={}, title=TITLE, unique_id=1234)
entry.add_to_hass(hass)
await setup.async_setup_component(hass, "persistent_notification", {})
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == "form"
assert result["step_id"] == "manual"
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{
"url": "ws://localhost:3000",
},
)
assert result["type"] == "abort"
assert result["reason"] == "already_configured"
@pytest.mark.parametrize("discovery_info", [{"config": ADDON_DISCOVERY_INFO}])
async def test_supervisor_discovery(
hass, supervisor, addon_running, addon_options, get_addon_discovery_info
):
"""Test flow started from Supervisor discovery."""
await setup.async_setup_component(hass, "persistent_notification", {})
addon_options["device"] = "/test"
addon_options["network_key"] = "abc123"
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_HASSIO},
data=ADDON_DISCOVERY_INFO,
)
with patch(
"homeassistant.components.zwave_js.async_setup", return_value=True
) as mock_setup, patch(
"homeassistant.components.zwave_js.async_setup_entry",
return_value=True,
) as mock_setup_entry:
result = await hass.config_entries.flow.async_configure(result["flow_id"], {})
await hass.async_block_till_done()
assert result["type"] == "create_entry"
assert result["title"] == TITLE
assert result["data"] == {
"url": "ws://host1:3001",
"usb_path": "/test",
"network_key": "abc123",
"use_addon": True,
"integration_created_addon": False,
}
assert len(mock_setup.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 1
@pytest.mark.parametrize(
"discovery_info, server_version_side_effect",
[({"config": ADDON_DISCOVERY_INFO}, asyncio.TimeoutError())],
)
async def test_supervisor_discovery_cannot_connect(
hass, supervisor, get_addon_discovery_info
):
"""Test Supervisor discovery and cannot connect."""
await setup.async_setup_component(hass, "persistent_notification", {})
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_HASSIO},
data=ADDON_DISCOVERY_INFO,
)
assert result["type"] == "abort"
assert result["reason"] == "cannot_connect"
@pytest.mark.parametrize("discovery_info", [{"config": ADDON_DISCOVERY_INFO}])
async def test_clean_discovery_on_user_create(
hass, supervisor, addon_running, addon_options, get_addon_discovery_info
):
"""Test discovery flow is cleaned up when a user flow is finished."""
await setup.async_setup_component(hass, "persistent_notification", {})
addon_options["device"] = "/test"
addon_options["network_key"] = "abc123"
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_HASSIO},
data=ADDON_DISCOVERY_INFO,
)
assert result["type"] == "form"
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == "form"
assert result["step_id"] == "on_supervisor"
result = await hass.config_entries.flow.async_configure(
result["flow_id"], {"use_addon": False}
)
assert result["type"] == "form"
assert result["step_id"] == "manual"
with patch(
"homeassistant.components.zwave_js.async_setup", return_value=True
) as mock_setup, patch(
"homeassistant.components.zwave_js.async_setup_entry",
return_value=True,
) as mock_setup_entry:
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{
"url": "ws://localhost:3000",
},
)
await hass.async_block_till_done()
assert len(hass.config_entries.flow.async_progress()) == 0
assert result["type"] == "create_entry"
assert result["title"] == TITLE
assert result["data"] == {
"url": "ws://localhost:3000",
"usb_path": None,
"network_key": None,
"use_addon": False,
"integration_created_addon": False,
}
assert len(mock_setup.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 1
async def test_abort_discovery_with_existing_entry(
hass, supervisor, addon_running, addon_options
):
"""Test discovery flow is aborted if an entry already exists."""
await setup.async_setup_component(hass, "persistent_notification", {})
entry = MockConfigEntry(
domain=DOMAIN, data={"url": "ws://localhost:3000"}, title=TITLE, unique_id=1234
)
entry.add_to_hass(hass)
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_HASSIO},
data=ADDON_DISCOVERY_INFO,
)
assert result["type"] == "abort"
assert result["reason"] == "already_configured"
# Assert that the entry data is updated with discovery info.
assert entry.data["url"] == "ws://host1:3001"
async def test_discovery_addon_not_running(
hass, supervisor, addon_installed, addon_options, set_addon_options, start_addon
):
"""Test discovery with add-on already installed but not running."""
addon_options["device"] = None
await setup.async_setup_component(hass, "persistent_notification", {})
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_HASSIO},
data=ADDON_DISCOVERY_INFO,
)
assert result["step_id"] == "hassio_confirm"
assert result["type"] == "form"
result = await hass.config_entries.flow.async_configure(result["flow_id"], {})
assert result["step_id"] == "start_addon"
assert result["type"] == "form"
async def test_discovery_addon_not_installed(
hass, supervisor, addon_installed, install_addon, addon_options
):
"""Test discovery with add-on not installed."""
addon_installed.return_value["version"] = None
await setup.async_setup_component(hass, "persistent_notification", {})
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_HASSIO},
data=ADDON_DISCOVERY_INFO,
)
assert result["step_id"] == "hassio_confirm"
assert result["type"] == "form"
result = await hass.config_entries.flow.async_configure(result["flow_id"], {})
assert result["step_id"] == "install_addon"
assert result["type"] == "progress"
await hass.async_block_till_done()
result = await hass.config_entries.flow.async_configure(result["flow_id"])
assert result["type"] == "form"
assert result["step_id"] == "start_addon"
async def test_not_addon(hass, supervisor):
"""Test opting out of add-on on Supervisor."""
await setup.async_setup_component(hass, "persistent_notification", {})
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == "form"
assert result["step_id"] == "on_supervisor"
result = await hass.config_entries.flow.async_configure(
result["flow_id"], {"use_addon": False}
)
assert result["type"] == "form"
assert result["step_id"] == "manual"
with patch(
"homeassistant.components.zwave_js.async_setup", return_value=True
) as mock_setup, patch(
"homeassistant.components.zwave_js.async_setup_entry",
return_value=True,
) as mock_setup_entry:
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{
"url": "ws://localhost:3000",
},
)
await hass.async_block_till_done()
assert result["type"] == "create_entry"
assert result["title"] == TITLE
assert result["data"] == {
"url": "ws://localhost:3000",
"usb_path": None,
"network_key": None,
"use_addon": False,
"integration_created_addon": False,
}
assert len(mock_setup.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 1
@pytest.mark.parametrize("discovery_info", [{"config": ADDON_DISCOVERY_INFO}])
async def test_addon_running(
hass,
supervisor,
addon_running,
addon_options,
get_addon_discovery_info,
):
"""Test add-on already running on Supervisor."""
addon_options["device"] = "/test"
addon_options["network_key"] = "abc123"
await setup.async_setup_component(hass, "persistent_notification", {})
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == "form"
assert result["step_id"] == "on_supervisor"
with patch(
"homeassistant.components.zwave_js.async_setup", return_value=True
) as mock_setup, patch(
"homeassistant.components.zwave_js.async_setup_entry",
return_value=True,
) as mock_setup_entry:
result = await hass.config_entries.flow.async_configure(
result["flow_id"], {"use_addon": True}
)
await hass.async_block_till_done()
assert result["type"] == "create_entry"
assert result["title"] == TITLE
assert result["data"] == {
"url": "ws://host1:3001",
"usb_path": "/test",
"network_key": "abc123",
"use_addon": True,
"integration_created_addon": False,
}
assert len(mock_setup.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 1
@pytest.mark.parametrize(
"discovery_info, discovery_info_side_effect, server_version_side_effect, "
"addon_info_side_effect, abort_reason",
[
(
{"config": ADDON_DISCOVERY_INFO},
HassioAPIError(),
None,
None,
"addon_get_discovery_info_failed",
),
(
{"config": ADDON_DISCOVERY_INFO},
None,
asyncio.TimeoutError,
None,
"cannot_connect",
),
(
None,
None,
None,
None,
"addon_missing_discovery_info",
),
(
{"config": ADDON_DISCOVERY_INFO},
None,
None,
HassioAPIError(),
"addon_info_failed",
),
],
)
async def test_addon_running_failures(
hass,
supervisor,
addon_running,
get_addon_discovery_info,
abort_reason,
):
"""Test all failures when add-on is running."""
await setup.async_setup_component(hass, "persistent_notification", {})
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == "form"
assert result["step_id"] == "on_supervisor"
result = await hass.config_entries.flow.async_configure(
result["flow_id"], {"use_addon": True}
)
assert result["type"] == "abort"
assert result["reason"] == abort_reason
@pytest.mark.parametrize("discovery_info", [{"config": ADDON_DISCOVERY_INFO}])
async def test_addon_running_already_configured(
hass, supervisor, addon_running, get_addon_discovery_info
):
"""Test that only one unique instance is allowed when add-on is running."""
entry = MockConfigEntry(domain=DOMAIN, data={}, title=TITLE, unique_id=1234)
entry.add_to_hass(hass)
await setup.async_setup_component(hass, "persistent_notification", {})
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == "form"
assert result["step_id"] == "on_supervisor"
result = await hass.config_entries.flow.async_configure(
result["flow_id"], {"use_addon": True}
)
assert result["type"] == "abort"
assert result["reason"] == "already_configured"
@pytest.mark.parametrize("discovery_info", [{"config": ADDON_DISCOVERY_INFO}])
async def test_addon_installed(
hass,
supervisor,
addon_installed,
addon_options,
set_addon_options,
start_addon,
get_addon_discovery_info,
):
"""Test add-on already installed but not running on Supervisor."""
await setup.async_setup_component(hass, "persistent_notification", {})
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == "form"
assert result["step_id"] == "on_supervisor"
result = await hass.config_entries.flow.async_configure(
result["flow_id"], {"use_addon": True}
)
assert result["type"] == "form"
assert result["step_id"] == "start_addon"
with patch(
"homeassistant.components.zwave_js.async_setup", return_value=True
) as mock_setup, patch(
"homeassistant.components.zwave_js.async_setup_entry",
return_value=True,
) as mock_setup_entry:
result = await hass.config_entries.flow.async_configure(
result["flow_id"], {"usb_path": "/test", "network_key": "abc123"}
)
await hass.async_block_till_done()
assert result["type"] == "create_entry"
assert result["title"] == TITLE
assert result["data"] == {
"url": "ws://host1:3001",
"usb_path": "/test",
"network_key": "abc123",
"use_addon": True,
"integration_created_addon": False,
}
assert len(mock_setup.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 1
@pytest.mark.parametrize(
"discovery_info, start_addon_side_effect",
[({"config": ADDON_DISCOVERY_INFO}, HassioAPIError())],
)
async def test_addon_installed_start_failure(
hass,
supervisor,
addon_installed,
addon_options,
set_addon_options,
start_addon,
get_addon_discovery_info,
):
"""Test add-on start failure when add-on is installed."""
await setup.async_setup_component(hass, "persistent_notification", {})
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == "form"
assert result["step_id"] == "on_supervisor"
result = await hass.config_entries.flow.async_configure(
result["flow_id"], {"use_addon": True}
)
assert result["type"] == "form"
assert result["step_id"] == "start_addon"
result = await hass.config_entries.flow.async_configure(
result["flow_id"], {"usb_path": "/test", "network_key": "abc123"}
)
assert result["type"] == "form"
assert result["errors"] == {"base": "addon_start_failed"}
@pytest.mark.parametrize(
"set_addon_options_side_effect, start_addon_side_effect, discovery_info, "
"server_version_side_effect, abort_reason",
[
(
HassioAPIError(),
None,
{"config": ADDON_DISCOVERY_INFO},
None,
"addon_set_config_failed",
),
(
None,
None,
{"config": ADDON_DISCOVERY_INFO},
asyncio.TimeoutError,
"cannot_connect",
),
(
None,
None,
None,
None,
"addon_missing_discovery_info",
),
],
)
async def test_addon_installed_failures(
hass,
supervisor,
addon_installed,
addon_options,
set_addon_options,
start_addon,
get_addon_discovery_info,
abort_reason,
):
"""Test all failures when add-on is installed."""
await setup.async_setup_component(hass, "persistent_notification", {})
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == "form"
assert result["step_id"] == "on_supervisor"
result = await hass.config_entries.flow.async_configure(
result["flow_id"], {"use_addon": True}
)
assert result["type"] == "form"
assert result["step_id"] == "start_addon"
result = await hass.config_entries.flow.async_configure(
result["flow_id"], {"usb_path": "/test", "network_key": "abc123"}
)
assert result["type"] == "abort"
assert result["reason"] == abort_reason
@pytest.mark.parametrize("discovery_info", [{"config": ADDON_DISCOVERY_INFO}])
async def test_addon_installed_already_configured(
hass,
supervisor,
addon_installed,
addon_options,
set_addon_options,
start_addon,
get_addon_discovery_info,
):
"""Test that only one unique instance is allowed when add-on is installed."""
entry = MockConfigEntry(domain=DOMAIN, data={}, title=TITLE, unique_id=1234)
entry.add_to_hass(hass)
await setup.async_setup_component(hass, "persistent_notification", {})
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == "form"
assert result["step_id"] == "on_supervisor"
result = await hass.config_entries.flow.async_configure(
result["flow_id"], {"use_addon": True}
)
assert result["type"] == "form"
assert result["step_id"] == "start_addon"
result = await hass.config_entries.flow.async_configure(
result["flow_id"], {"usb_path": "/test", "network_key": "abc123"}
)
assert result["type"] == "abort"
assert result["reason"] == "already_configured"
@pytest.mark.parametrize("discovery_info", [{"config": ADDON_DISCOVERY_INFO}])
async def test_addon_not_installed(
hass,
supervisor,
addon_installed,
install_addon,
addon_options,
set_addon_options,
start_addon,
get_addon_discovery_info,
):
"""Test add-on not installed."""
addon_installed.return_value["version"] = None
await setup.async_setup_component(hass, "persistent_notification", {})
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == "form"
assert result["step_id"] == "on_supervisor"
result = await hass.config_entries.flow.async_configure(
result["flow_id"], {"use_addon": True}
)
assert result["type"] == "progress"
# Make sure the flow continues when the progress task is done.
await hass.async_block_till_done()
result = await hass.config_entries.flow.async_configure(result["flow_id"])
assert result["type"] == "form"
assert result["step_id"] == "start_addon"
with patch(
"homeassistant.components.zwave_js.async_setup", return_value=True
) as mock_setup, patch(
"homeassistant.components.zwave_js.async_setup_entry",
return_value=True,
) as mock_setup_entry:
result = await hass.config_entries.flow.async_configure(
result["flow_id"], {"usb_path": "/test", "network_key": "abc123"}
)
await hass.async_block_till_done()
assert result["type"] == "create_entry"
assert result["title"] == TITLE
assert result["data"] == {
"url": "ws://host1:3001",
"usb_path": "/test",
"network_key": "abc123",
"use_addon": True,
"integration_created_addon": True,
}
assert len(mock_setup.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 1
async def test_install_addon_failure(hass, supervisor, addon_installed, install_addon):
"""Test add-on install failure."""
addon_installed.return_value["version"] = None
install_addon.side_effect = HassioAPIError()
await setup.async_setup_component(hass, "persistent_notification", {})
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == "form"
assert result["step_id"] == "on_supervisor"
result = await hass.config_entries.flow.async_configure(
result["flow_id"], {"use_addon": True}
)
assert result["type"] == "progress"
# Make sure the flow continues when the progress task is done.
await hass.async_block_till_done()
result = await hass.config_entries.flow.async_configure(result["flow_id"])
assert result["type"] == "abort"
assert result["reason"] == "addon_install_failed"
|
|
"""SmartFactory code generator base.
Base of code generator for SmartFactory that provides SmartSchema object in
accordance with given internal model.
"""
# pylint: disable=W0402
# pylint: disable=C0302
import codecs
import os
import string
import uuid
import re
from model.enum import Enum
from model.enum_element import EnumElement
from model.function import Function
class GenerateError(Exception):
"""Generate error.
This exception is raised when generator is unable to create
output from given model.
"""
pass
def to_camel_case(snake_str):
components = snake_str.split('_')
return components[0].lower() + "".join(x.title() for x in components[1:])
def to_snake_case(camel_str):
s1 = re.sub('(.)([A-Z][a-z]+)', r'\1_\2', camel_str)
return re.sub('([a-z0-9])([A-Z])', r'\1_\2', s1)
class CodeGenerator(object):
"""Base PolicyTypes generator.
This class provides service which allows to generate *.h files with types required for Policy
"""
def __init__(self):
"""Construct new object."""
self._generated_structs = []
self._structs_add_code = u""
def generate_vehicledatatype_names(self, enum):
template = string.Template('''static std::set<std::string> ${enum_name}_enum_names = { \n${values}\n};\n''')
enum_element_name = lambda enum_element: enum_element.internal_name if enum_element.internal_name else enum_element.name
values = ", \n".join([" \"{}\"".format(enum_element_name(enum_element)) for enum_element in enum.elements.values()])
return template.substitute(enum_name=enum.name, values = values)
def generate_all_enum_values(self, enum):
template = string.Template('''static std::set<mobile_apis::${enum_name}::eType> ${enum_name}_all_enum_values = { \n${values}\n};\n''')
enum_element_name = lambda enum_element: enum_element.internal_name if enum_element.internal_name else enum_element.name
values = ", \n".join([" mobile_apis::{}::{}".format(enum.name, enum_element_name(enum_element)) for enum_element in enum.elements.values()])
return template.substitute(enum_name=enum.name, values = values)
def generate_enums_schema_factory_content(self, enums):
return "\n".join([self._enum_factory_element_template.substitute(enum_name = enum.name) for enum in enums])
def generate_enums_schema_factory_impl(self, enums):
enums_values = ""
for enum in enums:
enums_values += "\n".join([self.generate_all_enum_values(enum)])
if enum.name == "VehicleDataType":
enums_values += "\n".join([self.generate_vehicledatatype_names(enum)])
return self._enum_factory_template.substitute(enums_values = enums_values,
enum_factory_content = self.generate_enums_schema_factory_content(enums))
def generate_POD_types_schema_factory_impl(self, enums):
pod_types = {
"Integer" : "int32_t",
"Float" : "double",
}
'''if (enum_name == "DriverDistractionState") {
return Get<mobile_apis::DriverDistractionState::eType>(DriverDistractionState_all_enum_values);
}'''
enums_values = "\n".join([self.generate_all_enum_values(enum) for enum in enums])
return self._enum_factory_template.substitute(enums_values = enums_values,
enum_factory_content = self.generate_enums_schema_factory_content(enums))
def generate(self, interface, filename, namespace, destination_dir):
""" Generate SmartFactory source files.
Generates source code files at destination directory in
accordance with given model in specified namespace.
Keyword arguments:
interface -- model of the interface to generate source code for.
filename -- name of initial XML file.
namespace -- name of destination namespace.
destination_dir -- directory to create source files.
"""
if interface is None:
raise GenerateError("Given interface is None.")
params_set = set()
for func in interface.functions.values():
for param in func.params:
params_set.add(param)
parameter_enum = Enum('Parameter')
for item in params_set:
parameter_enum.elements[item.upper()] = EnumElement(item)
required_enums_for_policy = [
"HMILevel",
"FunctionID",
"HybridAppPreference",
"AppHMIType",
"RequestType",
"ModuleType",
"Common_AppPriority",
"Parameter"
]
self.required_empty_value = [
"RequestType",
"ModuleType",
"Parameter"
]
self.enum_items_naming_conversion_ = {
"HMILevel" : lambda item_name : "HL_" + item_name.replace("HMI_", ""),
"AppHMIType" : lambda item_name : "AHT_" + item_name,
"FunctionID" : lambda item_name : item_name,
"HybridAppPreference" : lambda item_name : item_name,
"RequestType" : lambda item_name : "RT_" + item_name,
"ModuleType" : lambda item_name : "MT_" + item_name,
"Common_AppPriority" : lambda item_name : "P_" + item_name if not item_name == "VOICE_COMMUNICATION" else "P_VOICECOM",
"Parameter" : lambda item_name : "P_" + to_snake_case(item_name).upper()
}
self.enum_items_string_naming_conversion_ = {
"HMILevel" : lambda item_name : item_name,
"AppHMIType" : lambda item_name : item_name,
"FunctionID" : lambda item_name : item_name[:item_name.rfind("ID")],
"HybridAppPreference" : lambda item_name : item_name,
"RequestType" : lambda item_name : item_name,
"ModuleType" : lambda item_name : item_name,
"Common_AppPriority" : lambda item_name : item_name if not item_name == "VOICE_COMMUNICATION" else "VOICECOM",
"Parameter" : lambda item_name : item_name
}
self.enum_naming_conversion_ = {
"HMILevel" : "HmiLevel",
"AppHMIType" : "AppHMIType",
"FunctionID" : "FunctionID",
"HybridAppPreference" : "HybridAppPreference",
"RequestType" : "RequestType",
"ModuleType" : "ModuleType",
"Common_AppPriority" : "Priority",
"Parameter" : "Parameter"
}
# In case if "." is in FunctionID name this is HMI_API function ID and should not be included in Policy enums
required_enum_values = list(filter(lambda e: e.name in required_enums_for_policy
and "." not in list(e.elements.values())[0].name, list(interface.enums.values())))
if filename == "MOBILE_API":
self._write_cc_with_enum_schema_factory(filename, namespace, destination_dir, interface.enums.values())
# Params should be generated as enum for MOBILE_API to validate RPCSpec parameters
required_enum_values.append(parameter_enum)
self._write_header_with_enums(filename, namespace, destination_dir, required_enum_values)
self._write_cc_with_enums(filename, namespace, destination_dir, required_enum_values)
def _write_cc_with_enum_schema_factory(self,filename, namespace, destination_dir, required_enum_values):
class_name = u"generated_{0}_enum_schema_factory".format(filename)
header_file_name = '''policy/policy_table/policy_enum_schema_factory.h'''
cc_file_name = u"".join("{0}.cc".format(class_name))
if not os.path.exists(destination_dir):
os.makedirs(destination_dir)
print(os.path.join(destination_dir, cc_file_name))
with codecs.open(os.path.join(destination_dir, cc_file_name),
encoding="utf-8",
mode="w") as f_cc:
guard = u"_{0}_{1}_CC__".format( class_name.upper(),
uuid.uuid1().hex.capitalize())
namespace_open, namespace_close = self._namespaces_strings(namespace)
includes = '''#include <set>\n'''\
'''#include "interfaces/MOBILE_API.h"\n'''\
'''#include "smart_objects/enum_schema_item.h"\n'''
f_cc.write(self._cc_file_template.substitute(
class_name=class_name,
header_file=header_file_name,
includes = includes,
namespace_open=namespace_open,
enums_content=self.generate_enums_schema_factory_impl(required_enum_values),
namespace_close=namespace_close))
def _write_header_with_enums(self,filename, namespace, destination_dir, required_enum_values):
class_name = u"generated_{0}_policy_types".format(filename)
header_file_name = u"".join("{0}.h".format(class_name))
if not os.path.exists(destination_dir):
os.makedirs(destination_dir)
with codecs.open(os.path.join(destination_dir, header_file_name),
encoding="utf-8",
mode="w") as f_h:
guard = u"_{0}_{1}_H__".format( class_name.upper(),
uuid.uuid1().hex.capitalize())
namespace_open, namespace_close = self._namespaces_strings(namespace)
f_h.write(self._h_file_template.substitute(
class_name=class_name,
guard=guard,
includes="",
namespace_open=namespace_open,
enums_content=self._gen_enums(required_enum_values
),
namespace_close=namespace_close))
def _write_cc_with_enums(self,filename, namespace, destination_dir, required_enum_values):
class_name = u"generated_{0}_policy_types".format(filename)
header_file_name = u"".join("{0}.h".format(class_name))
cc_file_name = u"".join("{0}.cc".format(class_name))
if not os.path.exists(destination_dir):
os.makedirs(destination_dir)
with codecs.open(os.path.join(destination_dir, cc_file_name),
encoding="utf-8",
mode="w") as f_cc:
guard = u"_{0}_{1}_CC__".format( class_name.upper(),
uuid.uuid1().hex.capitalize())
namespace_open, namespace_close = self._namespaces_strings(namespace)
f_cc.write(self._cc_file_template.substitute(
class_name=class_name,
header_file=header_file_name,
includes="",
namespace_open=namespace_open,
enums_content=self.gen_enums_processing(required_enum_values
),
namespace_close=namespace_close))
def _namespaces_strings(self, namespace):
""" Generate open and close namespaces strings
Generates source code for opening and close namespaces
Keyword arguments:
namespace -- name of destination namespace.
Returns:
Tuple with namespace open string and namespace close string
"""
namespace_open = u""
namespace_close = u""
if namespace:
parts = namespace.split(u"::")
for part in parts:
namespace_open = u"".join(
[namespace_open,
self._namespace_open_template.substitute(name=part)])
namespace_close = u"".join(
[namespace_close,
"}} // {0}\n".format(part)])
return namespace_open, namespace_close
def _gen_enums(self, enums):
"""Generate enums for header file.
Generates declaration of enumerations for the header file.
Keyword arguments:
enums -- list of enums to generate.
Returns:
String with enums declaration source code.
"""
if enums is None:
raise GenerateError("Enums is None")
return u"\n".join([self._gen_enum(x) for x in enums])
def _gen_enum(self, enum):
"""Generate enum for header file.
Generates declaration of enumeration for the header file.
Keyword arguments:
enum -- enum to generate.
Returns:
String with enum declaration source code.
"""
enum_elements = enum.elements.values()
return self._enum_template.substitute(
comment=self._gen_comment(enum),
name=self.enum_naming_conversion_[enum.name],
enum_items=self._indent_code(self._gen_enum_elements(
enum_elements, enum.name), 1))
def _gen_enum_elements(self, enum_elements, enum_name):
"""Generate enum elements for header file.
Generates declaration of enumeration elements for the header file.
Keyword arguments:
enum_elements -- list of enum elements to generate.
Returns:
String with enum elements declaration source code.
"""
enum_elements = [self._gen_enum_element(x, enum_name)
for x in enum_elements]
if (enum_name in self.required_empty_value):
enum_elements.append(self._gen_empty_enum_element(enum_name))
return u",\n\n".join(enum_elements)
def _gen_empty_enum_element(self, enum_name) :
return self._enum_element_with_value_template.substitute(
comment="",
name=self.enum_items_naming_conversion_[enum_name]("EMPTY"),
value=-1)
def _gen_enum_element(self, enum_element, enum_name):
"""Generate enum element for header file.
Generates declaration of enumeration element for the header file.
Keyword arguments:
enum_element -- enum element to generate.
Returns:
String with enum element declaration source code.
"""
if enum_element.value is not None:
return self._enum_element_with_value_template.substitute(
comment=self._gen_comment(enum_element),
name=self.enum_items_naming_conversion_[enum_name](enum_element.primary_name),
value=enum_element.value)
else:
return self._enum_element_with_no_value_template.substitute(
comment=self._gen_comment(enum_element),
name=self.enum_items_naming_conversion_[enum_name](enum_element.primary_name))
def gen_enums_processing(self, enums):
validation = "\n".join([self._gen_enum_validation(enum) for enum in enums])
to_json = "\n".join([self._gen_enum_to_json(enum) for enum in enums])
from_json = "\n".join([self._gen_enum_from_json(enum) for enum in enums])
return "\n".join([validation, to_json, from_json])
def _gen_enum_validation(self, enum):
return self._valiation_enum_template.substitute(
name = self.enum_naming_conversion_[enum.name],
enum_items = "\n".join([self._gen_enum_item_validation(enum_item.name, enum.name) for enum_item in enum.elements.values()])
)
def _gen_enum_item_validation(self, item_name, enum_name):
return self._valiation_enum_item_template.substitute(
name = self.enum_items_naming_conversion_[enum_name](item_name))
def _gen_enum_to_json(self, enum):
name = self.enum_naming_conversion_[enum.name]
enum_items = "\n".join([self._gen_enum_item_to_json(enum_item.name, enum.name) for enum_item in enum.elements.values()])
if (enum.name in self.required_empty_value):
enum_items+= "\n" + self._gen_enum_item_to_json("EMPTY", enum.name)
return self._enum_to_json_template.substitute(
name = name,
enum_items = enum_items
)
def _gen_enum_item_to_json(self, item_name, enum_name):
return self._enum_to_json_item_template.substitute(
name = self.enum_items_naming_conversion_[enum_name](item_name),
string_name = self.enum_items_string_naming_conversion_[enum_name](item_name)
)
def _gen_enum_from_json(self, enum):
name = self.enum_naming_conversion_[enum.name]
enum_items = "\n".join([self._gen_enum_item_from_json(enum_item.name, enum.name) for enum_item in enum.elements.values()])
if (enum.name in self.required_empty_value):
enum_items += "\n" + self._gen_enum_item_from_json("EMPTY", enum.name)
return self._enum_from_json_template.substitute(
name = name,
enum_items = enum_items
)
def _gen_enum_item_from_json(self, item_name, enum_name):
return self._enum_from_json_item_template.substitute(
name = self.enum_items_naming_conversion_[enum_name](item_name),
string_name = self.enum_items_string_naming_conversion_[enum_name](item_name)
)
def _gen_comment(self, interface_item_base, use_doxygen=True):
"""Generate doxygen comment for iterface_item_base for header file.
Generates doxygen comment for any iterface_item_base for the header
file.
Keyword arguments:
interface_item_base -- object to generate doxygen comment for.
use_doxygen -- Flag that indicates does function uses doxygen or not.
Returns:
String with generated doxygen comment.
"""
brief_type_title = None
interface_item_base_classname = interface_item_base.__class__.__name__
if interface_item_base_classname in self._model_types_briefs:
brief_type_title = \
self._model_types_briefs[interface_item_base_classname]
else:
raise GenerateError("Unable to create comment for unknown type " +
interface_item_base_classname)
name = interface_item_base.primary_name if \
type(interface_item_base) is EnumElement else \
interface_item_base.name
brief_description = (u" * @brief {0}{1}.\n" if use_doxygen is
True else u"// {0}{1}.\n").format(
brief_type_title,
name)
description = u"".join([(u" * {0}\n" if use_doxygen
is True else u"// {0}\n").format(x)
for x in self._normalize_multiline_comments(
interface_item_base.description)])
if description != u"":
description = u"".join([u" *\n" if use_doxygen
is True else u"//\n", description])
design_description = u"".join([(u" * {0}\n" if use_doxygen is
True else u"// {0}\n").format(x)
for x in
self._normalize_multiline_comments(
interface_item_base.
design_description)])
if design_description != u"":
design_description = u"".join([u" *\n" if use_doxygen is
True else "//\n",
design_description])
issues = u"".join([(u" * @note {0}\n" if use_doxygen is
True else u"// Note: {0}\n").format(x)
for x in self._normalize_multiline_comments(
[x.value for x in interface_item_base.issues])])
if issues != u"":
issues = u"".join([u" *\n" if use_doxygen is
True else u"//\n", issues])
todos = u"".join([(u" * @todo {0}\n" if use_doxygen is
True else u"// ToDo: {0}\n").format(x)
for x in self._normalize_multiline_comments(
interface_item_base.todos)])
if todos != u"":
todos = u"".join([u" *\n" if use_doxygen is
True else u"//\n", todos])
returns = u""
if type(interface_item_base) is Function:
returns = u"".join([u" *\n", self._function_return_comment])
template = self._comment_doxygen_template if use_doxygen is \
True else self._comment_cc_template
return template.substitute(
brief_description=brief_description,
description=description,
design_description=design_description,
issues=issues,
todos=todos,
returns=returns)
def _indent_code(self, code, indent_level):
"""Indent given source code.
Indents given source code right by given indentation level.
Keyword arguments:
code -- given source code.
indent_level -- desired indentation level.
Returns:
String with processed code.
"""
code_lines = code.split("\n")
return u"".join(
[u"{0}{1}\n".format(
self._indent_template * indent_level,
x) if x != u"" else u"\n" for x in code_lines])
@staticmethod
def _normalize_multiline_comments(initial_strings):
"""Normalize multiline comments.
Makes multiline comment clean of any line breaks creating additional
strings for the comment.
Keyword arguments:
initial_strings -- initial list of strings to process.
Returns:
New list of the strings (with contains no strings with line breaks).
"""
result = []
for initial_string in initial_strings:
result = result + initial_string.splitlines()
return result
_model_types_briefs = dict(
{u"EnumElement": u"",
u"Enum": u"Enumeration ",
u"Function": u"Method that generates schema for function ",
u"Struct": u"Method that generates schema item for structure ",
u"Param": u"Struct member ",
u"FunctionParam": u"Function parameter "})
_cc_file_template = string.Template(
u'''/**\n'''
u''' * @file ${class_name}.h\n'''
u''' * @brief Generated class ${class_name} source file.\n'''
u''' *\n'''
u'''*/\n'''
u'''#include "${header_file}"\n'''
u'''${includes}'''
u'''\n\n'''
u'''$namespace_open'''
u'''\n'''
u'''$enums_content'''
u'''\n'''
u'''$namespace_close'''
u'''\n\n''')
_h_file_template = string.Template(
u'''/**\n'''
u''' * @file ${class_name}.h\n'''
u''' * @brief Generated class ${class_name} header file.\n'''
u''' *\n'''
u'''* Copyright (c) 2017, Ford Motor Company\n'''
u'''* All rights reserved.\n'''
u'''*\n'''
u'''* Redistribution and use in source and binary forms, with or without\n'''
u'''* modification, are permitted provided that the following conditions are met:\n'''
u'''*\n'''
u'''* Redistributions of source code must retain the above copyright notice, this\n'''
u'''* list of conditions and the following disclaimer.\n'''
u'''*\n'''
u'''* Redistributions in binary form must reproduce the above copyright notice,\n'''
u'''* this list of conditions and the following\n'''
u'''* disclaimer in the documentation and/or other materials provided with the\n'''
u'''* distribution.\n'''
u'''*\n'''
u'''* Neither the name of the Ford Motor Company nor the names of its contributors\n'''
u'''* may be used to endorse or promote products derived from this software\n'''
u'''* without specific prior written permission.\n'''
u'''*\n'''
u'''* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"\n'''
u'''* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE\n'''
u'''* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE\n'''
u'''* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE\n'''
u'''* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR\n'''
u'''* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF\n'''
u'''* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS\n'''
u'''* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN\n'''
u'''* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)\n'''
u'''* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE\n'''
u'''* POSSIBILITY OF SUCH DAMAGE.\n'''
u'''*/\n'''
u'''\n'''
u'''#ifndef $guard\n'''
u'''#define $guard\n'''
u'''#include <string>\n'''
u'''${includes}'''
u'''\n'''
u'''$namespace_open'''
u'''$enums_content'''
u'''$namespace_close'''
u'''#endif //$guard\n'''
u'''\n\n''')
_namespace_open_template = string.Template(
u'''namespace $name {\n''')
_comment_doxygen_template = string.Template(
u'''/**\n'''
u'''$brief_description'''
u'''$description'''
u'''$design_description'''
u'''$issues'''
u'''$todos'''
u'''$returns */''')
_enum_template = string.Template(
u'''$comment\n'''
u'''enum $name {\n'''
u'''$enum_items};\n'''
u'''bool IsValidEnum($name val);\n'''
u'''const char* EnumToJsonString($name val);\n'''
u'''bool EnumFromJsonString(const std::string& literal, $name* result);\n'''
)
_valiation_enum_template = string.Template(
u'''bool IsValidEnum($name val) {\n'''
u''' return !(std::string(EnumToJsonString(val)).empty());\n'''
u'''};\n''')
_valiation_enum_item_template = string.Template(
u''' case $name: return true;''')
_enum_to_json_template = string.Template(
u'''const char* EnumToJsonString($name val) {\n'''
u''' switch (val) {\n'''
u'''$enum_items\n'''
u''' default: return "";\n'''
u''' }\n'''
u'''};\n''')
_enum_to_json_item_template = string.Template(
u''' case $name: return "$string_name";''')
_enum_from_json_template = string.Template(
u'''bool EnumFromJsonString(const std::string& literal, $name* result) {\n'''
u'''$enum_items'''
u''' return false;\n'''
u'''};\n''')
_enum_from_json_item_template = string.Template(
u''' if ("$string_name" == literal) {\n'''
u''' *result = $name;\n'''
u''' return true;\n'''
u''' }\n''')
_enum_element_with_value_template = string.Template(
u'''$comment\n'''
u'''$name = $value''')
_enum_element_with_no_value_template = string.Template(
u'''$comment\n'''
u'''$name''')
_enum_factory_template = string.Template(
'''${enums_values}\n'''
'''template<typename EnumType>\n'''
'''std::shared_ptr<smart_objects::ISchemaItem> Create(const std::set<EnumType>& all_enum_values) {\n'''
'''using namespace smart_objects;\n'''
''' static auto schemaItem = TEnumSchemaItem<EnumType>::create(all_enum_values, TSchemaItemParameter<EnumType>());\n'''
''' return schemaItem;\n'''
'''}\n\n'''
'''std::shared_ptr<smart_objects::ISchemaItem> EnumSchemaItemFactory::Get(const std::string & enum_name) {\n'''
'''${enum_factory_content}\n'''
''' return std::shared_ptr<smart_objects::ISchemaItem>();\n'''
'''}\n\n'''
'''bool EnumSchemaItemFactory::IsRPCSpecVehicleDataType(const std::string& vd_name) {\n'''
''' for(auto& item_name : VehicleDataType_enum_names) {\n'''
''' if (vd_name == item_name) {\n'''
''' return true;\n'''
''' }\n'''
''' }\n'''
''' return false;\n'''
'''}\n''')
_enum_factory_element_template = string.Template(
''' if (enum_name == "${enum_name}") {\n'''
''' return Create<mobile_apis::${enum_name}::eType>(${enum_name}_all_enum_values);\n'''
''' }''')
_POD_type_factory_element_template = string.Template(
''' if (typename == "${type_name}") {\n'''
''' return TNumberSchemaItem<${cpp_type}>::create(TSchemaItemParameter<double>(${min_val}), TSchemaItemParameter<double>(${${max_val}}), TSchemaItemParameter<${cpp_type}>());'''
''' }''')
_indent_template = u" "
|
|
# Copyright (c) 2011 - 2017, Intel Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""``IxLoadTclAPI.py``
`IxLoad Tcl API wrapper module`
"""
import json
import os
import time
# from ..custom_exceptions import IxiaException
from . import ixia_helpers
from ..loggers import ClassLogger
from ..read_csv import ReadCsv
# To log tcl commands without execution set to True.
SIMULATE = False
class IxLoadTclAPI(object):
"""IxLoad Tcl API base wrapper class.
"""
class_logger = ClassLogger()
def __init__(self, ipaddr, user):
"""Initializes connection to IxLoad.
Args:
ipaddr(str): IxLoad host IP address.
user(str): IxLoad windows user.
"""
import tkinter
self.tcl_interpret = tkinter.Tcl()
def tcl_puts(*args):
if len(args) >= 2:
stream = args[0]
if stream == "stdout":
self.class_logger.debug(" ".join(args[1:]))
elif stream == "stderr":
self.class_logger.error(" ".join(args[1:]))
else:
self.class_logger.debug("stream <%s>: %s" % (args[0], " ".join(args[1:])))
elif len(args) == 1:
self.class_logger.debug(args[0])
else:
self.class_logger.error("Called puts without arguments.")
return None
self.tcl_interpret.createcommand("tcl_puts", tcl_puts)
self.class_logger.debug("Insert tcl script to catch puts output.")
ixia_helpers.tcl_puts_replace(self.tcl_interpret)
self.ixload_ip = ipaddr
self.ixload_user = user
# Temp path for sending rxf files to IxLoad
self.ixload_tmppath = "C:\\Users\\{0}\\AppData\\Local\\Temp\\".format(self.ixload_user)
# Path for storing test csv reports. It could be override in child IxLoadHL class.
self.ixload_respath = self.ixload_tmppath
self.__log_name = "IxLoadTclAPI-{0}-{1}-{2}".format(os.uname()[1], os.getuid(), int(time.time()))
self.ixload_logpath = None
ixia_helpers.ixtclhal_import(self.tcl_interpret)
ixia_helpers.ixload_import(self.tcl_interpret)
self.test_controller = None
self.tst = None
def tcl(self, cmd):
"""Tcl wrapper.
"""
self.class_logger.debug("Run tcl command: %s", cmd)
if not SIMULATE:
return self.tcl_interpret.eval(cmd)
else:
return ""
def connect(self):
"""Logs in to IXIA and takes ports ownership.
Returns:
None
"""
# Set simple config
# self.tcl("namespace eval ::IxLoadPrivate {};" +
# "namespace eval ::IxLoadPrivate::SimpleSettings {};" +
# "variable ::IxLoadPrivate::SimpleSettings::remoteServer {0};".format(self.ixload_ip) +
# "::IxLoad connect $::IxLoadPrivate::SimpleSettings::remoteServer")
self.tcl("::IxLoad connect {0}".format(self.ixload_ip))
# Set up logger
self.ixload_logpath = (self.ixload_respath + "\\" + self.__log_name).replace("\\", "\\\\")
self.logger_setup()
# Define test controller.
self.test_controller = IxLoadTestController(self.tcl, self.tcl_interpret, self.ixload_respath)
# self.tcl("set testController [::IxLoad new ixTestController -outputDir 1];")
self.tcl("global ixAppPluginManager")
self.class_logger.info("IxLoad startup complete.")
def disconnect(self):
"""Logs out from IXIA and clears ports ownership.
Returns:
None
"""
self.tcl("::IxLoad disconnect")
def logger_setup(self):
self.class_logger.info("Setting up IxLoad logger...")
self.tcl("set logtag \"IxLoad-api\";" +
"set logName \"{0}\";".format(self.__log_name) +
"set logger [::IxLoad new ixLogger $logtag 1];" +
"set logEngine [$logger getEngine];" +
"$logEngine setLevels $::ixLogger(kLevelDebug) $::ixLogger(kLevelInfo);" +
"$logEngine setFile {0} 2 1024 1".format(self.ixload_logpath))
# "$logEngine setFile $logName 2 256 1")
def logger_delete(self):
self.tcl("::IxLoad delete $logger;" +
"::IxLoad delete $logEngine")
def load_repo(self, repo=None):
"""Loading rxf repo file or create new one.
"""
if repo is None:
self.tcl("set repository [::IxLoad new ixRepository]")
else:
self.class_logger.info("Loading repo: {0}".format(repo))
_repo_name = os.path.basename(repo)
ixload_repo = self.ixload_tmppath + _repo_name
self.copy_local_file(repo, ixload_repo)
self.tcl("set repository [::IxLoad new ixRepository -name \"{0}\"]".format(ixload_repo).replace("\\", "\\\\"))
self.repo_file = repo # pylint: disable=attribute-defined-outside-init
# self.tst = IxLoadTests(self.tcl)
self.tst = IxLoadTests(self.tcl, self.test_controller,
"{0}{1}".format(self.ixload_respath, self.__log_name))
self.class_logger.debug("Discovered tests list: {0}".format(self.tst.tc_list))
def copy_local_file(self, local_path, remote_path):
"""Copy local file to IxLoad host.
"""
self.tcl("::IxLoad sendFileCopy \"{0}\" \"{1}\"".format(local_path, remote_path).replace("\\", "\\\\"))
def copy_remote_file(self, remote_path, local_path):
"""Copy remote file from IxLoad host to local host.
"""
self.tcl("::IxLoad retrieveFileCopy \"{0}\" \"{1}\"".format(remote_path, local_path).replace("\\", "\\\\"))
def retrieve_results(self, dst_path):
"""Retrieve result csv files from IxLoad host to local dst_path.
"""
self.tcl("::IxLoad retrieveResults \"{0}\"".format(dst_path).replace("\\", "\\\\"))
def load_plugin(self, plugin):
self.tcl("$ixAppPluginManager load \"{0}\"".format(plugin))
def update_stats(self, stype="file", stat_name=None):
def s2i_safe(val):
try:
return int(val.replace("kInt ", "").replace("timestamp ", ""))
except Exception:
try:
return float(val.replace("kInt ", "").replace("timestamp ", ""))
except Exception:
return val
if stype == "file":
if self.test_controller.test_result_path is None:
raise Exception("Any test is started or csv result path isn't set.")
tmp_path = os.path.join("/tmp", "taf_ixload_file_stats.{0}".format(os.getpid()))
if stat_name:
stat_names = [stat_name, ]
else:
stat_names = [sn[0] for sn in self.test_controller.stats_list]
for stat_name in stat_names:
self.copy_remote_file(self.test_controller.test_result_path + "\\" + stat_name.replace(" ", "_") + ".csv", tmp_path)
csv = ReadCsv(tmp_path)
# 15 is minimal acceptable length of csv report. Take bigger number for assurance.
if len(csv.content) < 18:
self.class_logger.warning("IxLoad {0} csv file is empty yet.".format(stat_name))
return False
# Remove unnecessary lines from IxLoad csv
# -1 because last line could be not full filled.
for i in [-1, 12, 11, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0]:
csv.content.pop(i)
self.test_controller.file_stats[stat_name] = IxLoadStats()
self.test_controller.file_stats[stat_name].extend([list(map(s2i_safe, x)) for x in csv.content[1:]])
self.test_controller.file_stats[stat_name].add_header(csv.content[0])
elif stype == "runtime":
for stat_item in self.test_controller.stats_list:
stat_name = stat_item[0]
_h = ["Elapsed Time", ]
_h.extend(stat_item[1])
self.test_controller.runtime_stats[stat_name] = IxLoadStats()
self.test_controller.runtime_stats[stat_name].add_header(_h)
for stat in self.test_controller.stats[:]:
time_stamp = s2i_safe(stat[1])
# Convert time_stamp to seconds
time_stamp = time_stamp / 1000 if isinstance(time_stamp, int) else time_stamp
stat_values = list(map(s2i_safe, stat[2]))
last_stat_item = 0
for stat_item in self.test_controller.stats_list:
stat_name = stat_item[0]
stat_num = len(stat_item[1])
_l = stat_values[last_stat_item:stat_num + last_stat_item]
_l.insert(0, time_stamp)
last_stat_item += stat_num
self.test_controller.runtime_stats[stat_name].append(_l)
else:
raise Exception("Unknown stats type: {0}".format(stype))
return True
def get_stats(self, stype="file"):
if stype == "file":
return self.test_controller.file_stats
elif stype == "runtime":
return self.test_controller.runtime_stats
else:
raise Exception("Incorrect stats type: {0}".format(stype))
class IxLoadTests(object):
"""Class for managing IxLoad Tests.
"""
def __init__(self, tcl, test_controller, res_path=""):
self.tcl = tcl
self.tc_list = []
self.load_tclist()
self.res_path = res_path
self.test_controller = test_controller
def load_tclist(self):
"""Loading list of IxLoad Tests.
"""
_tlist = []
num_tests = self.tcl("$repository testList.indexCount")
for _i in range(int(num_tests)):
tc_name = self.tcl("$repository testList({0}).cget -name".format(_i))
_tlist.append(tc_name)
# Test list is read. Cleanup previous one and store new list.
self.tc_list = _tlist
def start(self, t_name):
"""Start ixLoad test without waiting for result.
"""
self.tcl("puts {0}".format(t_name))
def run(self, t_name, res_path=None):
"""Run ixLoad test until completion.
"""
# Set result dir.
res_path = res_path if res_path is not None else self.res_path
res_path = "{0}\\{1}".format(res_path, t_name)
self.tcl("${0} setResultDir \"{1}\"".format(self.test_controller.name, res_path).replace("\\", "\\\\"))
# Execute the test.
self.tcl("set test [$repository testList.getItem {}];".format(t_name) +
"${0} run $test".format(self.test_controller.name))
self.tcl("vwait ::ixTestControllerMonitor; puts $::ixTestControllerMonitor")
return res_path
def cleanup(self):
self.tcl("${0} releaseConfigWaitFinish;".format(self.test_controller.name) +
"if {[lsearch [info vars] test] >= 0} {$test clearDUTList; ::IxLoad delete $test}")
def report(self, pdf=False):
"""Enable/Disable report options.
"""
self.tcl("ixNet setAttribute [ixNet getRoot]/testConfiguration -enableGenerateReportAfterRun {0}".format(pdf))
class IxLoadStats(list):
"""Custom list class to support columns header names.
"""
headers = None
def add_header(self, header):
self.headers = dict((hi, header.index(hi)) for hi in header)
def _get(self, row_num, col_names):
if col_names is None:
# Return whole row
return self[row_num]
elif isinstance(col_names, str):
return self[row_num][self.headers[col_names]]
elif isinstance(col_names, list):
indexes = [self.headers[x] for x in col_names]
return [self[row_num][x] for x in indexes]
else:
raise TypeError("Incorrect col_names type: {0}. str/list of str/None are allowed.".format(type(col_names)))
def get(self, row_num=-1, col_names=None):
if isinstance(row_num, int):
return self._get(row_num, col_names)
elif isinstance(row_num, list):
return [self._get(x, col_names) for x in row_num]
else:
raise TypeError("Incorrect row_num type: {0}. int and list are allowed.".format(type(row_num)))
class IxLoadGenericObjectMixin(object):
IXLOAD_CLASS = None
TCL_VARS = ""
def __init__(self, tcl):
self.tcl = tcl
self.name = self.get_free_name()
if self.IXLOAD_CLASS is not None:
self.tcl("set {0} [::IxLoad new {1}]".format(self.name, self.IXLOAD_CLASS))
def __del__(self):
self.tcl("::IxLoad delete ${0}".format(self.name))
self.tcl("unset {0}".format(self.name))
def get_free_name(self):
def is_int(num):
try:
return int(num)
except ValueError:
return None
if not SIMULATE:
_tcl_vars = self.tcl("info vars")
else:
self.tcl("")
_tcl_vars = IxLoadGenericObjectMixin.TCL_VARS
# Return unused number for class variable.
_id = max([is_int(_v.replace(self.NAME, "")) for _v in _tcl_vars.split(" ") if _v.startswith(self.NAME)] or [0, ]) or 0
if SIMULATE:
IxLoadGenericObjectMixin.TCL_VARS += " " + self.NAME + str(_id + 1)
return self.NAME + str(_id + 1)
def _gen_params(self, **kw):
def arg_repr(arg):
if isinstance(arg, IxLoadGenericObjectMixin):
return "${0}".format(arg.name)
elif isinstance(arg, str):
return '"{0}"'.format(arg)
else:
return arg
return " ".join(["-{0} {1}".format(k, arg_repr(kw[k])) for k in kw])
def config(self, **kwargs):
params = self._gen_params(**kwargs)
self.tcl("${0} config {1}".format(self.name, params))
def clear(self, target):
self.tcl("${0} {1}.clear".format(self.name, target))
class IxLoadTestController(IxLoadGenericObjectMixin):
NAME = "testController"
class_logger = ClassLogger()
def __init__(self, tcl, tcl_interpret, root_path="", res_path=None):
super(IxLoadTestController, self).__init__(tcl)
self.tcl("set {0} [::IxLoad new ixTestController -outputDir True]".format(self.name))
self.res_path = res_path or "TAF-{0}-{1}-{2}".format(os.uname()[1], os.getuid(), int(time.time()))
self.root_path = root_path
self.test_result_path = None
self.statcollector = None
self.testserverhandle = None
# Stats collector for runtime statistics collecting.
self.stats = []
self.runtime_stats = {}
self.file_stats = {}
self.stats_list = []
self.statmap = {}
def collect_stats(*args):
try:
# Try to pars stat args.
a1, a2 = args
time_stamp, stat_args = a2.split(" stats ")
time_stamp = time_stamp.replace("timestamp ", "")
stat_args = stat_args.lstrip("{{").rstrip("}}").split("} {")
self.stats.append((a1, time_stamp, stat_args))
except Exception as err:
# Append stat args as is.
self.class_logger.warning("Failed to parse stat args. Err: {0}".format(err))
self.stats.append(args)
tcl_interpret.createcommand("collect_stats", collect_stats)
self.set_statcollector()
self.get_testserverhandle()
self.statcollector.init_tsh(self.testserverhandle)
def __del__(self):
self.cleanup()
super(IxLoadTestController, self).__del__()
def cleanup(self):
tcs = self.status()
if tcs == "1":
self.stop(force=True)
else:
self.tcl("${0} releaseConfigWaitFinish".format(self.name))
def clear_stats(self):
self.runtime_stats = {}
self.file_stats = {}
# self.stats_list = []
self.statcollector.clear()
def set_resultdir(self, root_path=None, res_dir=None):
root_path = root_path or self.root_path
_result_path = "{0}{1}\\{2}".format(root_path, self.res_path, res_dir)
self.tcl("${0} setResultDir {1}".format(self.name, _result_path).replace("\\", "\\\\"))
return _result_path
def apply(self, test):
self.tcl("${0} applyConfig ${1}".format(self.name, test))
def run_test(self, test, test_name=None, stats=None):
self.test_result_path = self.set_resultdir(self.root_path, test_name or test.name)
statmap = {}
if stats:
self.stats_list = stats
_stat_list = ""
_stat_types_file = os.path.join(os.path.dirname(__file__), "ixload_stat_types")
_stat_types = json.loads(open(_stat_types_file).read().encode("ascii"), encoding="latin-1")
for stat in stats:
self.runtime_stats[stat[0]] = IxLoadStats()
_stat_list_name = stat[0].replace(" ", "_") + "_StatList"
_stat_items = ""
statmap[stat[0]] = {}
for stat_item in stat[1]:
_stat_items += " {\"%s\" \"%s\" \"%s\"}" % (stat[0], stat_item,
_stat_types[stat[0]][stat_item])
statmap[stat[0]][stat_item] = len(statmap[stat[0]])
self.tcl("set %s { %s }" % (_stat_list_name, _stat_items))
_stat_list += " $" + _stat_list_name
self.statmap = statmap
self.tcl("${0} clearGridStats".format(test.name))
self.tcl("set statList [concat {0}]".format(_stat_list))
self.tcl("set count 1; " +
"foreach stat $statList { " +
" set caption [format \"Watch_Stat_%s\" $count]; " +
" set statSourceType [lindex $stat 0]; " +
" set statName [lindex $stat 1]; " +
" set aggregationType [lindex $stat 2]; " +
" ${%s}::AddStat " % (self.statcollector.name, ) +
" -caption $caption " +
" -statSourceType $statSourceType " +
" -statName $statName " +
" -aggregationType $aggregationType " +
" -filterList {}; "
" incr count}")
self.statcollector.start()
self.tcl("set ::ixTestControllerMonitor \"\"; ${0} run ${1}".format(self.name, test.name))
return self.test_result_path
def stop(self, force=False):
if force:
self.tcl("${0} stopRun".format(self.name))
else:
self.tcl("${0} stopRunGraceful".format(self.name))
def status(self):
return self.tcl("${0} isBusy".format(self.name))
def wait_test(self):
self.tcl("vwait ::ixTestControllerMonitor; puts $::ixTestControllerMonitor")
if self.statcollector is not None:
self.statcollector.stop()
def check_testexecution(self):
rc = self.tcl("if { $::ixTestControllerMonitor == \"\" } {return \"RUN\"}")
if rc == "RUN":
return True
else:
return False
def get_testserverhandle(self):
self.testserverhandle = IxLoadTestServerHandle(self.tcl)
self.tcl("set {0} [${1} getTestServerHandle]".format(self.testserverhandle.name, self.name))
return self.testserverhandle
def set_statcollector(self):
self.statcollector = IxLoadstatCollectorUtils(self.tcl)
class IxLoadChassisChain(IxLoadGenericObjectMixin):
NAME = "chassisChain"
IXLOAD_CLASS = "ixChassisChain"
def __init__(self, tcl, ipaddr=None):
super(IxLoadChassisChain, self).__init__(tcl)
if ipaddr is None:
ipaddr = ["127.0.0.1", ]
self.__ipaddr = ipaddr
for _ipaddr in self.__ipaddr:
self.tcl("${0} addChassis {1}".format(self.name, _ipaddr))
class IxLoadixEventHandlerSettings(IxLoadGenericObjectMixin):
NAME = "ixEventHandlerSettings"
IXLOAD_CLASS = "ixEventHandlerSettings"
def __init__(self, tcl):
super(IxLoadixEventHandlerSettings, self).__init__(tcl)
self.tcl("${0} config".format(self.name))
class IxLoadixViewOptions(IxLoadGenericObjectMixin):
NAME = "ixViewOptions"
IXLOAD_CLASS = "ixViewOptions"
def __init__(self, tcl):
super(IxLoadixViewOptions, self).__init__(tcl)
self.tcl("${0} config".format(self.name))
class IxLoadixTest(IxLoadGenericObjectMixin):
NAME = "ixTest"
IXLOAD_CLASS = "ixTest"
def __init__(self, tcl):
super(IxLoadixTest, self).__init__(tcl)
self.clear("scenarioList")
self.scenarioelement = None
self.scenariofactory = None
self.scenario = None
self.eventhandlersettings = None
self.viewoptions = None
self.sessionspecificdata = {}
self.profiledir = None
def __del__(self):
self.tcl("${0} clearDUTList".format(self.name))
super(IxLoadixTest, self).__del__()
def get_scenarioelementfactory(self):
self.scenarioelement = IxLoadScenarioElementFactory(self.tcl)
self.tcl("set {0} [${1} getScenarioElementFactory]".format(self.scenarioelement.name, self.name))
return self.scenarioelement
def get_scenariofactory(self):
self.scenariofactory = IxLoadScenarioFactory(self.tcl)
self.tcl("set {0} [${1} getScenarioFactory]".format(self.scenariofactory.name, self.name))
return self.scenariofactory
def get_scenario(self):
if self.scenariofactory is None:
self.get_scenariofactory()
self.scenario = self.scenariofactory.create_scenario()
return self. scenario
def get_sessionspecificdata(self, _type):
ssd = IxLoadixTestSessionSpecificData(self.tcl)
self.tcl("set {0} [${1} getSessionSpecificData \"{2}\"]".format(ssd.name, self.name, _type))
self.sessionspecificdata[ssd.name] = ssd
return ssd
def get_profiledir(self):
self.profiledir = IxLoadixTestProfileDirectory(self.tcl)
self.tcl("set {0} [${1} cget -profileDirectory]".format(self.profiledir.name, self.name))
return self.profiledir
class IxLoadScenarioElementFactory(IxLoadGenericObjectMixin):
NAME = "scenarioElementFactory"
def create_nettraffic(self):
nettraffic = IxLoadSETkNetTraffic(self.tcl)
self.tcl("set {0} [${1} create $::ixScenarioElementType(kNetTraffic)]".format(nettraffic.name, self.name))
return nettraffic
def create_dut(self):
dut = IxLoadSETkDutBasic(self.tcl)
self.tcl("set {0} [${1} create $::ixScenarioElementType(kDutBasic)]".format(dut.name, self.name))
return dut
class IxLoadScenarioFactory(IxLoadGenericObjectMixin):
NAME = "scenarioFactory"
# IXLOAD_CLASS = "getScenarioFactory"
def create_scenario(self):
scenario = IxLoadScenario(self.tcl)
self.tcl("set {0} [${1} create \"Scenario\"]".format(scenario.name, self.name))
scenario.clear("columnList")
scenario.clear("links")
return scenario
class IxLoadScenario(IxLoadGenericObjectMixin):
NAME = "Scenario"
def __init__(self, tcl):
super(IxLoadScenario, self).__init__(tcl)
self.columnlist = []
def append_columnlist(self, column):
self.tcl("${0} columnList.appendItem -object ${1}".format(self.name, column.name))
self.columnlist.append(column)
def new_traffic_column(self):
return IxLoadixTrafficColumn(self.tcl)
class IxLoadixTrafficColumn(IxLoadGenericObjectMixin):
NAME = "ixTrafficColumn"
IXLOAD_CLASS = "ixTrafficColumn"
def __init__(self, tcl):
super(IxLoadixTrafficColumn, self).__init__(tcl)
self.clear("elementList")
self.elementlist = []
def append_elementlist(self, element):
self.tcl("${0} elementList.appendItem -object ${1}".format(self.name, element.name))
self.elementlist.append(element)
class IxLoadixNetIxLoadSettingsPlugin(IxLoadGenericObjectMixin):
NAME = "Settings"
IXLOAD_CLASS = "ixNetIxLoadSettingsPlugin"
class IxLoadixNetFilterPlugin(IxLoadGenericObjectMixin):
NAME = "Filter"
IXLOAD_CLASS = "ixNetFilterPlugin"
class IxLoadixNetGratArpPlugin(IxLoadGenericObjectMixin):
NAME = "GratARP"
IXLOAD_CLASS = "ixNetGratArpPlugin"
class IxLoadixNetTCPPlugin(IxLoadGenericObjectMixin):
NAME = "TCP"
IXLOAD_CLASS = "ixNetTCPPlugin"
class IxLoadixNetDnsPlugin(IxLoadGenericObjectMixin):
NAME = "DNS"
IXLOAD_CLASS = "ixNetDnsPlugin"
def __init__(self, tcl):
super(IxLoadixNetDnsPlugin, self).__init__(tcl)
self.clear("hostList")
self.clear("searchList")
self.clear("nameServerList")
ixNetIxLoadPlugins = {"Settings": IxLoadixNetIxLoadSettingsPlugin,
"Filter": IxLoadixNetFilterPlugin,
"GratARP": IxLoadixNetGratArpPlugin,
"TCP": IxLoadixNetTCPPlugin,
"DNS": IxLoadixNetDnsPlugin, }
class IxLoadixNetEthernetELMPlugin(IxLoadGenericObjectMixin):
NAME = "ixNetEthernetELMPlugin"
IXLOAD_CLASS = "ixNetEthernetELMPlugin"
class IxLoadixNetDualPhyPlugin(IxLoadGenericObjectMixin):
NAME = "ixNetDualPhyPlugin"
IXLOAD_CLASS = "ixNetDualPhyPlugin"
class IxLoadIPRMacRange(IxLoadGenericObjectMixin):
NAME = "MAC_R"
class IxLoadIPRVlanIdRange(IxLoadGenericObjectMixin):
NAME = "VLAN_R"
class IxLoadixNetIpV4V6Range(IxLoadGenericObjectMixin):
NAME = "IP_R"
IXLOAD_CLASS = "ixNetIpV4V6Range"
def __init__(self, tcl):
super(IxLoadixNetIpV4V6Range, self).__init__(tcl)
self.macrange = None
self.vlanidrange = None
def get_macrange(self):
self.macrange = IxLoadIPRMacRange(self.tcl)
self.tcl("set {0} [${1} getLowerRelatedRange \"MacRange\"]".format(self.macrange.name, self.name))
return self.macrange
def get_vlanidrange(self):
self.vlanidrange = IxLoadIPRVlanIdRange(self.tcl)
self.tcl("set {0} [${1} getLowerRelatedRange \"VlanIdRange\"]".format(self.vlanidrange.name, self.name))
return self.vlanidrange
class IxLoadixNetRangeGroup(IxLoadGenericObjectMixin):
NAME = "DistGroup"
IXLOAD_CLASS = "ixNetRangeGroup"
def __init__(self, tcl):
super(IxLoadixNetRangeGroup, self).__init__(tcl)
self.clear("rangeList")
self.ranges = []
def append_range(self, iprange):
self.tcl("${0} rangeList.appendItem -object ${1}".format(self.name, iprange.name))
self.ranges.append(iprange)
class IxLoadixNetIpV4V6Plugin(IxLoadGenericObjectMixin):
NAME = "IP"
IXLOAD_CLASS = "ixNetIpV4V6Plugin"
def __init__(self, tcl):
super(IxLoadixNetIpV4V6Plugin, self).__init__(tcl)
self.clear("childrenList")
self.clear("extensionList")
self.clear("rangeList")
self.clear("rangeGroups")
self.ranges = []
self.distgroup = None
def new_range(self):
_range = IxLoadixNetIpV4V6Range(self.tcl)
self.tcl("${0} rangeList.appendItem -object ${1}".format(self.name, _range.name))
self.ranges.append(_range)
return _range
def new_distgroup(self):
self.distgroup = IxLoadixNetRangeGroup(self.tcl)
self.tcl("${0} rangeGroups.appendItem -object ${1}".format(self.name, self.distgroup.name))
return self.distgroup
def append_iprange(self, ip_range):
if self.distgroup is None:
self.new_distgroup()
self.tcl("${0} rangeList.appendItem -object ${1}".format(self.distgroup.name, ip_range.name))
self.distgroup.config(distribType=0, _Stale=False, name=self.distgroup.name)
class IxLoadixNetL2EthernetPlugin(IxLoadGenericObjectMixin):
NAME = "MAC_VLAN"
IXLOAD_CLASS = "ixNetL2EthernetPlugin"
def __init__(self, tcl):
super(IxLoadixNetL2EthernetPlugin, self).__init__(tcl)
self.clear("childrenList")
self.clear("extensionList")
self.ipplugin = None
def new_ipplugin(self):
self.ipplugin = IxLoadixNetIpV4V6Plugin(self.tcl)
self.tcl("${0} childrenList.appendItem -object ${1}".format(self.name, self.ipplugin.name))
self.ipplugin.config(_Stale=False)
return self.ipplugin
class IxLoadNetworkL1Plugin(IxLoadGenericObjectMixin):
NAME = "Ethernet"
def __init__(self, tcl):
super(IxLoadNetworkL1Plugin, self).__init__(tcl)
self.elm = None
self.phy = None
self.l2plugin = []
def init(self):
self.clear("childrenList")
self.clear("extensionList")
def new_l2plugin(self):
l2plugin = IxLoadixNetL2EthernetPlugin(self.tcl)
self.tcl("${0} childrenList.appendItem -object ${1}".format(self.name, l2plugin.name))
l2plugin.config(_Stale=False)
self.l2plugin.append(l2plugin)
return l2plugin
class IxLoadixLinearTimeSegment(IxLoadGenericObjectMixin):
NAME = "Linear_Segment"
IXLOAD_CLASS = "ixLinearTimeSegment"
class IxLoadixAdvancedIteration(IxLoadGenericObjectMixin):
NAME = "ixAdvancedIteration"
IXLOAD_CLASS = "ixAdvancedIteration"
def __init__(self, tcl):
super(IxLoadixAdvancedIteration, self).__init__(tcl)
self.clear("segmentList")
self.segmentlist = []
def append_segmentlist(self, segment):
self.tcl("${0} segmentList.appendItem -object ${1}".format(self.name, segment.name))
self.segmentlist.append(segment)
def add_segment(self, **kwargs):
segment = IxLoadixLinearTimeSegment(self.tcl)
segment.config(**kwargs)
self.append_segmentlist(segment)
class IxLoadixTimeline(IxLoadGenericObjectMixin):
NAME = "Timeline"
IXLOAD_CLASS = "ixTimeline"
def __init__(self, tcl):
super(IxLoadixTimeline, self).__init__(tcl)
self.iteration = None
def new_iteration(self):
self.iteration = IxLoadixAdvancedIteration(self.tcl)
return self.iteration
class IxLoadixMatchLongestTimeline(IxLoadGenericObjectMixin):
NAME = "Timeline_Match_Longest"
IXLOAD_CLASS = "ixMatchLongestTimeline"
class IxLoadixHttpCommand(IxLoadGenericObjectMixin):
NAME = "ixHttpCommand"
IXLOAD_CLASS = "ixHttpCommand"
class IxLoadixHttpHeaderString(IxLoadGenericObjectMixin):
NAME = "ixHttpHeaderString"
IXLOAD_CLASS = "ixHttpHeaderString"
class IxLoadResponseHeader(IxLoadGenericObjectMixin):
NAME = "RespondHeader"
IXLOAD_CLASS = "ResponseHeader"
def __init__(self, tcl):
super(IxLoadResponseHeader, self).__init__(tcl)
self.clear("responseList")
class IxLoadPageObject(IxLoadGenericObjectMixin):
NAME = "PageObject"
IXLOAD_CLASS = "PageObject"
response = None
class IxLoadCookieObject(IxLoadGenericObjectMixin):
NAME = "CookieObject"
IXLOAD_CLASS = "CookieObject"
def __init__(self, tcl):
super(IxLoadCookieObject, self).__init__(tcl)
self.clear("cookieContentList")
self.cookiecontentlist = []
def append_cookiecontent(self, cookiecontent):
self.tcl("${0} cookieContentList.appendItem -object ${1}".format(self.name, cookiecontent.name))
self.cookiecontentlist.append(cookiecontent)
class IxLoadixCookieContent(IxLoadGenericObjectMixin):
NAME = "ixCookieContent"
IXLOAD_CLASS = "ixCookieContent"
class IxLoadCustomPayloadObject(IxLoadGenericObjectMixin):
NAME = "CustomPayloadObject"
IXLOAD_CLASS = "CustomPayloadObject"
class IxLoadHTTPClient(IxLoadGenericObjectMixin):
NAME = "HTTPClient"
def __init__(self, tcl):
super(IxLoadHTTPClient, self).__init__(tcl)
def init(self):
self.clear("agent.actionList")
self.clear("agent.cmdPercentagePool.percentageCommandList")
self.agent_actionlist = [] # pylint: disable=attribute-defined-outside-init
self.agent_headerlist = [] # pylint: disable=attribute-defined-outside-init
self.timeline = None # pylint: disable=attribute-defined-outside-init
def append_agent_actionlist(self, agent_action):
self.tcl("${0} agent.actionList.appendItem -object ${1}".format(self.name, agent_action.name))
self.agent_actionlist.append(agent_action)
def append_agent_headerlist(self, agent_header):
self.tcl("${0} agent.headerList.appendItem -object ${1}".format(self.name, agent_header.name))
self.agent_headerlist.append(agent_header)
def new_timeline(self):
self.timeline = IxLoadixTimeline(self.tcl) # pylint: disable=attribute-defined-outside-init
return self.timeline
def config_percentagecmdlist(self, **kwargs):
params = self._gen_params(**kwargs) if kwargs else ""
self.tcl("${0} agent.cmdPercentagePool.percentageCommandList.clear".format(self.name))
self.tcl("${0} agent.cmdPercentagePool.config {1}".format(self.name, params))
def config_agent(self, **kwargs):
params = self._gen_params(**kwargs) if kwargs else ""
self.tcl("${0} agent.config {1}".format(self.name, params))
def modify_objectivevalue(self, value):
return self.tcl("$%s config -objectiveValue %s; " % (self.name, value) +
"set canSetObjectiveValue [$%s canSetObjectiveValue]; " % (self.name, ) +
"puts \"Can set objective value? - $canSetObjectiveValue\"; " +
"if { $canSetObjectiveValue } { $%s applyObjectiveValues } " % (self.name, ) +
"{puts \"Failed to set objectiveValue for %s.\"}; " % (self.name, ) +
"return $canSetObjectiveValue")
def config_timeline(self, **kwargs):
self.new_timeline()
segments = kwargs.pop("segments") if "segments" in kwargs else []
if segments:
iteration = self.timeline.new_iteration()
for segment in segments:
iteration.add_segment(**segment)
kwargs.update({"advancedIteration": iteration})
self.timeline.config(**kwargs)
return self.timeline
def add_command(self, **kwargs):
httpcmd = IxLoadixHttpCommand(self.tcl)
httpcmd.config(**kwargs)
self.append_agent_actionlist(httpcmd)
def add_header(self, data):
header = IxLoadixHttpHeaderString(self.tcl)
header.config(data=data)
self.append_agent_headerlist(header)
class IxLoadHTTPServer(IxLoadGenericObjectMixin):
NAME = "HTTPServer"
def __init__(self, tcl):
super(IxLoadHTTPServer, self).__init__(tcl)
self.timeline = None
self.pagelist = []
self.cookielist = []
self.payloadlist = []
self.responseheaderlist = []
def init(self):
self.clear("agent.cookieList")
self.clear("agent.webPageList")
self.clear("agent.customPayloadList")
self.clear("agent.responseHeaderList")
def new_timeline(self):
self.timeline = IxLoadixMatchLongestTimeline(self.tcl)
return self.timeline
def append_pageobject(self, page):
self.tcl("${0} agent.webPageList.appendItem -object ${1}".format(self.name, page.name))
self.pagelist.append(page)
def new_response(self, code="200"):
response = IxLoadResponseHeader(self.tcl)
_t1 = time.strftime("%Y/%m/%d %H:%M:%S", time.gmtime())
_t2 = time.strftime("%Y/%m/%d %H:%M:%S", time.gmtime(time.time() + 2592000))
if code == "200":
name = "200_OK"
elif code == "404":
name = "404_PageNotFound"
else:
name = "Custom"
response.config(code=code, name=name, description=name,
lastModifiedDateTimeValue=_t1, dateTimeValue=_t1,
expirationDateTimeValue=_t2)
return response
def add_pageobject(self, response="200", **kwargs):
page = IxLoadPageObject(self.tcl)
page.response = self.new_response(response)
page.config(response=page.response, **kwargs)
self.append_pageobject(page)
def append_cookielist(self, cookie):
self.tcl("${0} agent.cookieList.appendItem -object ${1}".format(self.name, cookie.name))
self.cookielist.append(cookie)
def add_cookie(self, cookiecontentlist):
_cookie = IxLoadCookieObject(self.tcl)
for cookie_params in cookiecontentlist:
_cookiecontent = IxLoadixCookieContent(self.tcl)
_cookiecontent.config(**cookie_params)
_cookie.append_cookiecontent(_cookiecontent)
self.append_cookielist(_cookie)
def append_payloadlist(self, payload):
self.tcl("${0} agent.customPayloadList.appendItem -object ${1}".format(self.name, payload.name))
self.payloadlist.append(payload)
def add_payload(self, **kwargs):
payload = IxLoadCustomPayloadObject(self.tcl)
payload.config(**kwargs)
self.append_payloadlist(payload)
def append_responseheaderlist(self, responseheader):
self.tcl("${0} agent.responseHeaderList.appendItem -object ${1}".format(self.name, responseheader.name))
self.responseheaderlist.append(responseheader)
def config_agent(self, **kwargs):
params = self._gen_params(**kwargs) if kwargs else ""
self.tcl("${0} agent.config {1}".format(self.name, params))
def config_timeline(self, **kwargs):
self.new_timeline()
self.config(name=self.name, timeline=self.timeline)
activityListItems = {"HTTP Client": IxLoadHTTPClient,
"HTTP Server": IxLoadHTTPServer, }
class IxLoadNetTrafficNetwork(IxLoadGenericObjectMixin):
NAME = "NetTrafficNetwork"
def __init__(self, tcl):
super(IxLoadNetTrafficNetwork, self).__init__(tcl)
self.plugins = {}
self.l1plugin = None
self.activities = {}
def append_portlist(self, port):
chass, card, pid = port.split("/")
self.tcl("${0} portList.appendItem -chassisId {1} -cardId {2} -portId {3}".
format(self.name, chass, card, pid))
def clear_plugins(self):
# self.clear("childrenList")
self.clear("globalPlugins")
def new_plugin(self, plugin):
_plugin = ixNetIxLoadPlugins[plugin](self.tcl)
self.tcl("${0} globalPlugins.appendItem -object ${1}".format(self.name, _plugin.name))
if _plugin.NAME not in self.plugins:
self.plugins[_plugin.NAME] = {}
self.plugins[_plugin.NAME][_plugin.name] = _plugin
return _plugin
def new_l1plugin(self):
self.l1plugin = IxLoadNetworkL1Plugin(self.tcl)
self.tcl("set {0} [${1} getL1Plugin]".format(self.l1plugin.name, self.name))
self.l1plugin.init()
return self.l1plugin
class IxLoadSETkNetTraffic(IxLoadGenericObjectMixin):
# NAME = "NetworkTraffic"
NAME = "Traffic_Network"
PORTOPERMODE = {"ThroughputAcceleration": "kOperationModeThroughputAcceleration"}
def __init__(self, tcl):
super(IxLoadSETkNetTraffic, self).__init__(tcl)
self.activities = {}
self.activitydst = None
def new_network(self):
network = IxLoadNetTrafficNetwork(self.tcl)
self.tcl("set {0} [${1} cget -network]".format(network.name, self.name))
network.clear_plugins()
return network
def set_portopermode(self, mode):
self.tcl("${0} setPortOperationModeAllowed $::ixPort(kOperationModeThroughputAcceleration) {1}".format(self.name, mode))
def set_tcpaccel(self, mode):
self.tcl("${0} setTcpAccelerationAllowed $::ixAgent(kTcpAcceleration) {1}".format(self.name, mode))
def config_traffic(self):
self.tcl("${0} traffic.config".format(self.name))
def new_activity(self, activity):
_activity = activityListItems[activity](self.tcl)
self.tcl("set {0} [${1} activityList.appendItem -protocolAndType \"{2}\"]".
format(_activity.name, self.name, activity))
_activity.init()
if activity not in self.activities:
self.activities[activity] = []
# self.activities[activity][_activity.name] = _activity
self.activities[activity].append(_activity)
return _activity
def get_activitydst(self, *args):
self.activitydst = IxLoadDestinationForActivity(self.tcl)
params = str(args)[1:-1].replace("'", '"').replace(",", "")
self.tcl("set {0} [${1} getDestinationForActivity {2}]".format(self.activitydst.name, self.name, params))
return self.activitydst
def set_activityendpoint(self, iprange, activity, activity_type, enable):
self.tcl("${0} setActivityEndPointAvailableForSmRange ${1} \"{2}\" \"{3}\" {4}".
format(self.name, iprange.name, activity.name, activity_type, enable))
class IxLoadSETkDutBasic(IxLoadGenericObjectMixin):
NAME = "DUT"
def __init__(self, tcl):
super(IxLoadSETkDutBasic, self).__init__(tcl)
self.cfg_packetswitch = None
self.cfg_vip = None
def new_cfg_packetswitch(self):
self.cfg_packetswitch = IxLoadixDutConfigPacketSwitch(self.tcl)
return self.cfg_packetswitch
def new_cfg_vip(self):
self.cfg_vip = IxLoadixDutConfigVip(self.tcl)
return self.cfg_vip
class IxLoadixDutConfigPacketSwitch(IxLoadGenericObjectMixin):
NAME = "ixDutConfigPacketSwitch"
IXLOAD_CLASS = "ixDutConfigPacketSwitch"
def __init__(self, tcl):
super(IxLoadixDutConfigPacketSwitch, self).__init__(tcl)
self.clear("originateProtocolPortRangeList")
self.clear("terminateProtocolPortRangeList")
self.clear("terminateNetworkRangeList")
self.clear("originateNetworkRangeList")
self.originatenetworkrangelist = []
self.terminatenetworkrangelist = []
def append_networkrangelist(self, dst, networkrange):
self.tcl("${0} {1}NetworkRangeList.appendItem -object ${2}".format(self.name, dst, networkrange.name))
getattr(self, "{0}networkrangelist".format(dst)).append(networkrange)
def add_networkrange(self, dst, **kwargs):
networkrange = IxLoadixDutNetworkRange(self.tcl)
name = "DUT NetworkRange{0} {1}+{2}".format(len(getattr(self, "{0}networkrangelist".format(dst))),
kwargs['firstIp'], kwargs['ipCount'])
networkrange.config(name=name, **kwargs)
self.append_networkrangelist(dst, networkrange)
class IxLoadixDutNetworkRange(IxLoadGenericObjectMixin):
NAME = "ixDutNetworkRange"
IXLOAD_CLASS = "ixDutNetworkRange"
class IxLoadDestinationForActivity(IxLoadGenericObjectMixin):
NAME = "DestinationForActivity"
class IxLoadixTestSessionSpecificData(IxLoadGenericObjectMixin):
NAME = "SessionSpecificData"
class IxLoadixTestProfileDirectory(IxLoadGenericObjectMixin):
NAME = "profileDirectory"
class IxLoadixDutConfigVip(IxLoadGenericObjectMixin):
NAME = "ixDutConfigVip"
IXLOAD_CLASS = "ixDutConfigVip"
class IxLoadstatCollectorUtils(IxLoadGenericObjectMixin):
NAME = "NS"
def __init__(self, tcl):
super(IxLoadstatCollectorUtils, self).__init__(tcl)
self.tcl("set {0} statCollectorUtils".format(self.name))
self.status = False
# self.tcl("proc ::my_stat_collector_command {args} { " +
# "puts \"=====================================\"; " +
# "puts \"INCOMING STAT RECORD >>> $args\"; " +
# "puts \"=====================================\";" +
# "}")
def init_tsh(self, tsh):
self.tcl("${%s}::Initialize -testServerHandle $%s;" % (self.name, tsh.name) +
"${%s}::ClearStats" % (self.name, ))
def start(self):
# self.tcl("${%s}::StartCollector -command ::my_stat_collector_command -interval 2" % (self.name, ))
# self.tcl("${%s}::StartCollector -command collect_stats -interval 2" % (self.name, ))
self.tcl("${%s}::StartCollector -command collect_stats -interval 4" % (self.name, ))
self.status = True
def stop(self):
if self.status:
self.tcl("${%s}::StopCollector" % (self.name, ))
self.status = False
def clear(self):
self.tcl("${%s}::ClearStats" % (self.name, ))
class IxLoadTestServerHandle(IxLoadGenericObjectMixin):
NAME = "TestServerHandle"
|
|
# test_codecs.py from CPython 2.7, modified for Jython
from test import test_support
import unittest
import codecs
import locale
import sys, StringIO
if not test_support.is_jython:
import _testcapi
class Queue(object):
"""
queue: write bytes at one end, read bytes from the other end
"""
def __init__(self):
self._buffer = ""
def write(self, chars):
self._buffer += chars
def read(self, size=-1):
if size<0:
s = self._buffer
self._buffer = ""
return s
else:
s = self._buffer[:size]
self._buffer = self._buffer[size:]
return s
class ReadTest(unittest.TestCase):
def check_partial(self, input, partialresults):
# get a StreamReader for the encoding and feed the bytestring version
# of input to the reader byte by byte. Read everything available from
# the StreamReader and check that the results equal the appropriate
# entries from partialresults.
q = Queue()
r = codecs.getreader(self.encoding)(q)
result = u""
for (c, partialresult) in zip(input.encode(self.encoding), partialresults):
q.write(c)
result += r.read()
self.assertEqual(result, partialresult)
# check that there's nothing left in the buffers
self.assertEqual(r.read(), u"")
self.assertEqual(r.bytebuffer, "")
self.assertEqual(r.charbuffer, u"")
# do the check again, this time using a incremental decoder
d = codecs.getincrementaldecoder(self.encoding)()
result = u""
for (c, partialresult) in zip(input.encode(self.encoding), partialresults):
result += d.decode(c)
self.assertEqual(result, partialresult)
# check that there's nothing left in the buffers
self.assertEqual(d.decode("", True), u"")
self.assertEqual(d.buffer, "")
# Check whether the reset method works properly
d.reset()
result = u""
for (c, partialresult) in zip(input.encode(self.encoding), partialresults):
result += d.decode(c)
self.assertEqual(result, partialresult)
# check that there's nothing left in the buffers
self.assertEqual(d.decode("", True), u"")
self.assertEqual(d.buffer, "")
# check iterdecode()
encoded = input.encode(self.encoding)
self.assertEqual(
input,
u"".join(codecs.iterdecode(encoded, self.encoding))
)
def test_readline(self):
def getreader(input):
stream = StringIO.StringIO(input.encode(self.encoding))
return codecs.getreader(self.encoding)(stream)
def readalllines(input, keepends=True, size=None):
reader = getreader(input)
lines = []
while True:
line = reader.readline(size=size, keepends=keepends)
if not line:
break
lines.append(line)
return "|".join(lines)
s = u"foo\nbar\r\nbaz\rspam\u2028eggs"
sexpected = u"foo\n|bar\r\n|baz\r|spam\u2028|eggs"
sexpectednoends = u"foo|bar|baz|spam|eggs"
self.assertEqual(readalllines(s, True), sexpected)
self.assertEqual(readalllines(s, False), sexpectednoends)
self.assertEqual(readalllines(s, True, 10), sexpected)
self.assertEqual(readalllines(s, False, 10), sexpectednoends)
# Test long lines (multiple calls to read() in readline())
vw = []
vwo = []
for (i, lineend) in enumerate(u"\n \r\n \r \u2028".split()):
vw.append((i*200)*u"\3042" + lineend)
vwo.append((i*200)*u"\3042")
self.assertEqual(readalllines("".join(vw), True), "".join(vw))
self.assertEqual(readalllines("".join(vw), False),"".join(vwo))
# Test lines where the first read might end with \r, so the
# reader has to look ahead whether this is a lone \r or a \r\n
for size in xrange(80):
for lineend in u"\n \r\n \r \u2028".split():
s = 10*(size*u"a" + lineend + u"xxx\n")
reader = getreader(s)
for i in xrange(10):
self.assertEqual(
reader.readline(keepends=True),
size*u"a" + lineend,
)
reader = getreader(s)
for i in xrange(10):
self.assertEqual(
reader.readline(keepends=False),
size*u"a",
)
def test_bug1175396(self):
s = [
'<%!--===================================================\r\n',
' BLOG index page: show recent articles,\r\n',
' today\'s articles, or articles of a specific date.\r\n',
'========================================================--%>\r\n',
'<%@inputencoding="ISO-8859-1"%>\r\n',
'<%@pagetemplate=TEMPLATE.y%>\r\n',
'<%@import=import frog.util, frog%>\r\n',
'<%@import=import frog.objects%>\r\n',
'<%@import=from frog.storageerrors import StorageError%>\r\n',
'<%\r\n',
'\r\n',
'import logging\r\n',
'log=logging.getLogger("Snakelets.logger")\r\n',
'\r\n',
'\r\n',
'user=self.SessionCtx.user\r\n',
'storageEngine=self.SessionCtx.storageEngine\r\n',
'\r\n',
'\r\n',
'def readArticlesFromDate(date, count=None):\r\n',
' entryids=storageEngine.listBlogEntries(date)\r\n',
' entryids.reverse() # descending\r\n',
' if count:\r\n',
' entryids=entryids[:count]\r\n',
' try:\r\n',
' return [ frog.objects.BlogEntry.load(storageEngine, date, Id) for Id in entryids ]\r\n',
' except StorageError,x:\r\n',
' log.error("Error loading articles: "+str(x))\r\n',
' self.abort("cannot load articles")\r\n',
'\r\n',
'showdate=None\r\n',
'\r\n',
'arg=self.Request.getArg()\r\n',
'if arg=="today":\r\n',
' #-------------------- TODAY\'S ARTICLES\r\n',
' self.write("<h2>Today\'s articles</h2>")\r\n',
' showdate = frog.util.isodatestr() \r\n',
' entries = readArticlesFromDate(showdate)\r\n',
'elif arg=="active":\r\n',
' #-------------------- ACTIVE ARTICLES redirect\r\n',
' self.Yredirect("active.y")\r\n',
'elif arg=="login":\r\n',
' #-------------------- LOGIN PAGE redirect\r\n',
' self.Yredirect("login.y")\r\n',
'elif arg=="date":\r\n',
' #-------------------- ARTICLES OF A SPECIFIC DATE\r\n',
' showdate = self.Request.getParameter("date")\r\n',
' self.write("<h2>Articles written on %s</h2>"% frog.util.mediumdatestr(showdate))\r\n',
' entries = readArticlesFromDate(showdate)\r\n',
'else:\r\n',
' #-------------------- RECENT ARTICLES\r\n',
' self.write("<h2>Recent articles</h2>")\r\n',
' dates=storageEngine.listBlogEntryDates()\r\n',
' if dates:\r\n',
' entries=[]\r\n',
' SHOWAMOUNT=10\r\n',
' for showdate in dates:\r\n',
' entries.extend( readArticlesFromDate(showdate, SHOWAMOUNT-len(entries)) )\r\n',
' if len(entries)>=SHOWAMOUNT:\r\n',
' break\r\n',
' \r\n',
]
stream = StringIO.StringIO("".join(s).encode(self.encoding))
reader = codecs.getreader(self.encoding)(stream)
for (i, line) in enumerate(reader):
self.assertEqual(line, s[i])
def test_readlinequeue(self):
q = Queue()
writer = codecs.getwriter(self.encoding)(q)
reader = codecs.getreader(self.encoding)(q)
# No lineends
writer.write(u"foo\r")
self.assertEqual(reader.readline(keepends=False), u"foo")
writer.write(u"\nbar\r")
self.assertEqual(reader.readline(keepends=False), u"")
self.assertEqual(reader.readline(keepends=False), u"bar")
writer.write(u"baz")
self.assertEqual(reader.readline(keepends=False), u"baz")
self.assertEqual(reader.readline(keepends=False), u"")
# Lineends
writer.write(u"foo\r")
self.assertEqual(reader.readline(keepends=True), u"foo\r")
writer.write(u"\nbar\r")
self.assertEqual(reader.readline(keepends=True), u"\n")
self.assertEqual(reader.readline(keepends=True), u"bar\r")
writer.write(u"baz")
self.assertEqual(reader.readline(keepends=True), u"baz")
self.assertEqual(reader.readline(keepends=True), u"")
writer.write(u"foo\r\n")
self.assertEqual(reader.readline(keepends=True), u"foo\r\n")
def test_bug1098990_a(self):
s1 = u"xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx yyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy\r\n"
s2 = u"offending line: ladfj askldfj klasdj fskla dfzaskdj fasklfj laskd fjasklfzzzzaa%whereisthis!!!\r\n"
s3 = u"next line.\r\n"
s = (s1+s2+s3).encode(self.encoding)
stream = StringIO.StringIO(s)
reader = codecs.getreader(self.encoding)(stream)
self.assertEqual(reader.readline(), s1)
self.assertEqual(reader.readline(), s2)
self.assertEqual(reader.readline(), s3)
self.assertEqual(reader.readline(), u"")
def test_bug1098990_b(self):
s1 = u"aaaaaaaaaaaaaaaaaaaaaaaa\r\n"
s2 = u"bbbbbbbbbbbbbbbbbbbbbbbb\r\n"
s3 = u"stillokay:bbbbxx\r\n"
s4 = u"broken!!!!badbad\r\n"
s5 = u"againokay.\r\n"
s = (s1+s2+s3+s4+s5).encode(self.encoding)
stream = StringIO.StringIO(s)
reader = codecs.getreader(self.encoding)(stream)
self.assertEqual(reader.readline(), s1)
self.assertEqual(reader.readline(), s2)
self.assertEqual(reader.readline(), s3)
self.assertEqual(reader.readline(), s4)
self.assertEqual(reader.readline(), s5)
self.assertEqual(reader.readline(), u"")
class UTF32Test(ReadTest):
encoding = "utf-32"
spamle = ('\xff\xfe\x00\x00'
's\x00\x00\x00p\x00\x00\x00a\x00\x00\x00m\x00\x00\x00'
's\x00\x00\x00p\x00\x00\x00a\x00\x00\x00m\x00\x00\x00')
spambe = ('\x00\x00\xfe\xff'
'\x00\x00\x00s\x00\x00\x00p\x00\x00\x00a\x00\x00\x00m'
'\x00\x00\x00s\x00\x00\x00p\x00\x00\x00a\x00\x00\x00m')
def test_only_one_bom(self):
_,_,reader,writer = codecs.lookup(self.encoding)
# encode some stream
s = StringIO.StringIO()
f = writer(s)
f.write(u"spam")
f.write(u"spam")
d = s.getvalue()
# check whether there is exactly one BOM in it
self.assertTrue(d == self.spamle or d == self.spambe)
# try to read it back
s = StringIO.StringIO(d)
f = reader(s)
self.assertEqual(f.read(), u"spamspam")
def test_badbom(self):
s = StringIO.StringIO(4*"\xff")
f = codecs.getreader(self.encoding)(s)
self.assertRaises(UnicodeError, f.read)
s = StringIO.StringIO(8*"\xff")
f = codecs.getreader(self.encoding)(s)
self.assertRaises(UnicodeError, f.read)
def test_partial(self):
self.check_partial(
u"\x00\xff\u0100\uffff",
[
u"", # first byte of BOM read
u"", # second byte of BOM read
u"", # third byte of BOM read
u"", # fourth byte of BOM read => byteorder known
u"",
u"",
u"",
u"\x00",
u"\x00",
u"\x00",
u"\x00",
u"\x00\xff",
u"\x00\xff",
u"\x00\xff",
u"\x00\xff",
u"\x00\xff\u0100",
u"\x00\xff\u0100",
u"\x00\xff\u0100",
u"\x00\xff\u0100",
u"\x00\xff\u0100\uffff",
]
)
def test_handlers(self):
self.assertEqual((u'\ufffd', 1),
codecs.utf_32_decode('\x01', 'replace', True))
self.assertEqual((u'', 1),
codecs.utf_32_decode('\x01', 'ignore', True))
def test_errors(self):
self.assertRaises(UnicodeDecodeError, codecs.utf_32_decode,
"\xff", "strict", True)
def test_issue8941(self):
# Issue #8941: insufficient result allocation when decoding into
# surrogate pairs on UCS-2 builds.
encoded_le = '\xff\xfe\x00\x00' + '\x00\x00\x01\x00' * 1024
self.assertEqual(u'\U00010000' * 1024,
codecs.utf_32_decode(encoded_le)[0])
encoded_be = '\x00\x00\xfe\xff' + '\x00\x01\x00\x00' * 1024
self.assertEqual(u'\U00010000' * 1024,
codecs.utf_32_decode(encoded_be)[0])
class UTF32LETest(ReadTest):
encoding = "utf-32-le"
def test_partial(self):
self.check_partial(
u"\x00\xff\u0100\uffff",
[
u"",
u"",
u"",
u"\x00",
u"\x00",
u"\x00",
u"\x00",
u"\x00\xff",
u"\x00\xff",
u"\x00\xff",
u"\x00\xff",
u"\x00\xff\u0100",
u"\x00\xff\u0100",
u"\x00\xff\u0100",
u"\x00\xff\u0100",
u"\x00\xff\u0100\uffff",
]
)
def test_simple(self):
self.assertEqual(u"\U00010203".encode(self.encoding), "\x03\x02\x01\x00")
def test_errors(self):
self.assertRaises(UnicodeDecodeError, codecs.utf_32_le_decode,
"\xff", "strict", True)
def test_issue8941(self):
# Issue #8941: insufficient result allocation when decoding into
# surrogate pairs on UCS-2 builds.
encoded = '\x00\x00\x01\x00' * 1024
self.assertEqual(u'\U00010000' * 1024,
codecs.utf_32_le_decode(encoded)[0])
class UTF32BETest(ReadTest):
encoding = "utf-32-be"
def test_partial(self):
self.check_partial(
u"\x00\xff\u0100\uffff",
[
u"",
u"",
u"",
u"\x00",
u"\x00",
u"\x00",
u"\x00",
u"\x00\xff",
u"\x00\xff",
u"\x00\xff",
u"\x00\xff",
u"\x00\xff\u0100",
u"\x00\xff\u0100",
u"\x00\xff\u0100",
u"\x00\xff\u0100",
u"\x00\xff\u0100\uffff",
]
)
def test_simple(self):
self.assertEqual(u"\U00010203".encode(self.encoding), "\x00\x01\x02\x03")
def test_errors(self):
self.assertRaises(UnicodeDecodeError, codecs.utf_32_be_decode,
"\xff", "strict", True)
def test_issue8941(self):
# Issue #8941: insufficient result allocation when decoding into
# surrogate pairs on UCS-2 builds.
encoded = '\x00\x01\x00\x00' * 1024
self.assertEqual(u'\U00010000' * 1024,
codecs.utf_32_be_decode(encoded)[0])
class UTF16Test(ReadTest):
encoding = "utf-16"
spamle = '\xff\xfes\x00p\x00a\x00m\x00s\x00p\x00a\x00m\x00'
spambe = '\xfe\xff\x00s\x00p\x00a\x00m\x00s\x00p\x00a\x00m'
def test_only_one_bom(self):
_,_,reader,writer = codecs.lookup(self.encoding)
# encode some stream
s = StringIO.StringIO()
f = writer(s)
f.write(u"spam")
f.write(u"spam")
d = s.getvalue()
# check whether there is exactly one BOM in it
self.assertTrue(d == self.spamle or d == self.spambe)
# try to read it back
s = StringIO.StringIO(d)
f = reader(s)
self.assertEqual(f.read(), u"spamspam")
def test_badbom(self):
s = StringIO.StringIO("\xff\xff")
f = codecs.getreader(self.encoding)(s)
self.assertRaises(UnicodeError, f.read)
s = StringIO.StringIO("\xff\xff\xff\xff")
f = codecs.getreader(self.encoding)(s)
self.assertRaises(UnicodeError, f.read)
def test_partial(self):
self.check_partial(
u"\x00\xff\u0100\uffff",
[
u"", # first byte of BOM read
u"", # second byte of BOM read => byteorder known
u"",
u"\x00",
u"\x00",
u"\x00\xff",
u"\x00\xff",
u"\x00\xff\u0100",
u"\x00\xff\u0100",
u"\x00\xff\u0100\uffff",
]
)
def test_handlers(self):
self.assertEqual((u'\ufffd', 1),
codecs.utf_16_decode('\x01', 'replace', True))
self.assertEqual((u'', 1),
codecs.utf_16_decode('\x01', 'ignore', True))
def test_errors(self):
self.assertRaises(UnicodeDecodeError, codecs.utf_16_decode, "\xff", "strict", True)
def test_bug691291(self):
# Files are always opened in binary mode, even if no binary mode was
# specified. This means that no automatic conversion of '\n' is done
# on reading and writing.
s1 = u'Hello\r\nworld\r\n'
s = s1.encode(self.encoding)
self.addCleanup(test_support.unlink, test_support.TESTFN)
with open(test_support.TESTFN, 'wb') as fp:
fp.write(s)
with codecs.open(test_support.TESTFN, 'U', encoding=self.encoding) as reader:
self.assertEqual(reader.read(), s1)
class UTF16LETest(ReadTest):
encoding = "utf-16-le"
def test_partial(self):
self.check_partial(
u"\x00\xff\u0100\uffff",
[
u"",
u"\x00",
u"\x00",
u"\x00\xff",
u"\x00\xff",
u"\x00\xff\u0100",
u"\x00\xff\u0100",
u"\x00\xff\u0100\uffff",
]
)
def test_errors(self):
self.assertRaises(UnicodeDecodeError, codecs.utf_16_le_decode, "\xff", "strict", True)
class UTF16BETest(ReadTest):
encoding = "utf-16-be"
def test_partial(self):
self.check_partial(
u"\x00\xff\u0100\uffff",
[
u"",
u"\x00",
u"\x00",
u"\x00\xff",
u"\x00\xff",
u"\x00\xff\u0100",
u"\x00\xff\u0100",
u"\x00\xff\u0100\uffff",
]
)
def test_errors(self):
self.assertRaises(UnicodeDecodeError, codecs.utf_16_be_decode, "\xff", "strict", True)
class UTF8Test(ReadTest):
encoding = "utf-8"
def test_partial(self):
self.check_partial(
u"\x00\xff\u07ff\u0800\uffff",
[
u"\x00",
u"\x00",
u"\x00\xff",
u"\x00\xff",
u"\x00\xff\u07ff",
u"\x00\xff\u07ff",
u"\x00\xff\u07ff",
u"\x00\xff\u07ff\u0800",
u"\x00\xff\u07ff\u0800",
u"\x00\xff\u07ff\u0800",
u"\x00\xff\u07ff\u0800\uffff",
]
)
class UTF7Test(ReadTest):
encoding = "utf-7"
def test_partial(self):
self.check_partial(
u"a+-b",
[
u"a",
u"a",
u"a+",
u"a+-",
u"a+-b",
]
)
# Jython extra (test supplementary characters)
@unittest.skipIf(not test_support.is_jython, "Jython supports surrogate pairs")
def test_partial_supp(self):
# Check the encoding is what we think it is
ustr = u"x\U00023456.\u0177\U00023456\u017az"
bstr = b'x+2E3cVg.+AXfYTdxWAXo-z'
self.assertEqual(ustr.encode(self.encoding), bstr)
self.check_partial(
ustr,
[
u"x",
u"x", # '+' added: begins Base64
u"x",
u"x",
u"x",
u"x",
u"x",
u"x",
u"x\U00023456.", # '.' added: ends Base64
u"x\U00023456.", # '+' added: begins Base64
u"x\U00023456.",
u"x\U00023456.",
u"x\U00023456.",
u"x\U00023456.",
u"x\U00023456.",
u"x\U00023456.",
u"x\U00023456.",
u"x\U00023456.",
u"x\U00023456.",
u"x\U00023456.",
u"x\U00023456.",
u"x\U00023456.\u0177\U00023456\u017a", # '-' added: ends Base64
u"x\U00023456.\u0177\U00023456\u017az",
]
)
class UTF16ExTest(unittest.TestCase):
def test_errors(self):
self.assertRaises(UnicodeDecodeError, codecs.utf_16_ex_decode, "\xff", "strict", 0, True)
def test_bad_args(self):
self.assertRaises(TypeError, codecs.utf_16_ex_decode)
@unittest.skipIf(test_support.is_jython, "FIXME Jython has no _codecs.readbuffer_encode method")
class ReadBufferTest(unittest.TestCase):
def test_array(self):
import array
self.assertEqual(
codecs.readbuffer_encode(array.array("c", "spam")),
("spam", 4)
)
def test_empty(self):
self.assertEqual(codecs.readbuffer_encode(""), ("", 0))
def test_bad_args(self):
self.assertRaises(TypeError, codecs.readbuffer_encode)
self.assertRaises(TypeError, codecs.readbuffer_encode, 42)
@unittest.skipIf(test_support.is_jython, "FIXME Jython has no _codecs.charbuffer_encode method")
class CharBufferTest(unittest.TestCase):
def test_string(self):
self.assertEqual(codecs.charbuffer_encode("spam"), ("spam", 4))
def test_empty(self):
self.assertEqual(codecs.charbuffer_encode(""), ("", 0))
def test_bad_args(self):
self.assertRaises(TypeError, codecs.charbuffer_encode)
self.assertRaises(TypeError, codecs.charbuffer_encode, 42)
class UTF8SigTest(ReadTest):
encoding = "utf-8-sig"
def test_partial(self):
self.check_partial(
u"\ufeff\x00\xff\u07ff\u0800\uffff",
[
u"",
u"",
u"", # First BOM has been read and skipped
u"",
u"",
u"\ufeff", # Second BOM has been read and emitted
u"\ufeff\x00", # "\x00" read and emitted
u"\ufeff\x00", # First byte of encoded u"\xff" read
u"\ufeff\x00\xff", # Second byte of encoded u"\xff" read
u"\ufeff\x00\xff", # First byte of encoded u"\u07ff" read
u"\ufeff\x00\xff\u07ff", # Second byte of encoded u"\u07ff" read
u"\ufeff\x00\xff\u07ff",
u"\ufeff\x00\xff\u07ff",
u"\ufeff\x00\xff\u07ff\u0800",
u"\ufeff\x00\xff\u07ff\u0800",
u"\ufeff\x00\xff\u07ff\u0800",
u"\ufeff\x00\xff\u07ff\u0800\uffff",
]
)
def test_bug1601501(self):
# SF bug #1601501: check that the codec works with a buffer
unicode("\xef\xbb\xbf", "utf-8-sig")
def test_bom(self):
d = codecs.getincrementaldecoder("utf-8-sig")()
s = u"spam"
self.assertEqual(d.decode(s.encode("utf-8-sig")), s)
def test_stream_bom(self):
unistring = u"ABC\u00A1\u2200XYZ"
bytestring = codecs.BOM_UTF8 + "ABC\xC2\xA1\xE2\x88\x80XYZ"
reader = codecs.getreader("utf-8-sig")
for sizehint in [None] + range(1, 11) + \
[64, 128, 256, 512, 1024]:
istream = reader(StringIO.StringIO(bytestring))
ostream = StringIO.StringIO()
while 1:
if sizehint is not None:
data = istream.read(sizehint)
else:
data = istream.read()
if not data:
break
ostream.write(data)
got = ostream.getvalue()
self.assertEqual(got, unistring)
def test_stream_bare(self):
unistring = u"ABC\u00A1\u2200XYZ"
bytestring = "ABC\xC2\xA1\xE2\x88\x80XYZ"
reader = codecs.getreader("utf-8-sig")
for sizehint in [None] + range(1, 11) + \
[64, 128, 256, 512, 1024]:
istream = reader(StringIO.StringIO(bytestring))
ostream = StringIO.StringIO()
while 1:
if sizehint is not None:
data = istream.read(sizehint)
else:
data = istream.read()
if not data:
break
ostream.write(data)
got = ostream.getvalue()
self.assertEqual(got, unistring)
class EscapeDecodeTest(unittest.TestCase):
def test_empty(self):
self.assertEqual(codecs.escape_decode(""), ("", 0))
class RecodingTest(unittest.TestCase):
def test_recoding(self):
f = StringIO.StringIO()
f2 = codecs.EncodedFile(f, "unicode_internal", "utf-8")
# f2.write(u"a")
# Must be bytes in Jython (and probably should have been in CPython)
f2.write(b"\x00\x00\x00\x61")
f2.close()
# Python used to crash on this at exit because of a refcount
# bug in _codecsmodule.c
# From RFC 3492
punycode_testcases = [
# A Arabic (Egyptian):
(u"\u0644\u064A\u0647\u0645\u0627\u0628\u062A\u0643\u0644"
u"\u0645\u0648\u0634\u0639\u0631\u0628\u064A\u061F",
"egbpdaj6bu4bxfgehfvwxn"),
# B Chinese (simplified):
(u"\u4ED6\u4EEC\u4E3A\u4EC0\u4E48\u4E0D\u8BF4\u4E2D\u6587",
"ihqwcrb4cv8a8dqg056pqjye"),
# C Chinese (traditional):
(u"\u4ED6\u5011\u7232\u4EC0\u9EBD\u4E0D\u8AAA\u4E2D\u6587",
"ihqwctvzc91f659drss3x8bo0yb"),
# D Czech: Pro<ccaron>prost<ecaron>nemluv<iacute><ccaron>esky
(u"\u0050\u0072\u006F\u010D\u0070\u0072\u006F\u0073\u0074"
u"\u011B\u006E\u0065\u006D\u006C\u0075\u0076\u00ED\u010D"
u"\u0065\u0073\u006B\u0079",
"Proprostnemluvesky-uyb24dma41a"),
# E Hebrew:
(u"\u05DC\u05DE\u05D4\u05D4\u05DD\u05E4\u05E9\u05D5\u05D8"
u"\u05DC\u05D0\u05DE\u05D3\u05D1\u05E8\u05D9\u05DD\u05E2"
u"\u05D1\u05E8\u05D9\u05EA",
"4dbcagdahymbxekheh6e0a7fei0b"),
# F Hindi (Devanagari):
(u"\u092F\u0939\u0932\u094B\u0917\u0939\u093F\u0928\u094D"
u"\u0926\u0940\u0915\u094D\u092F\u094B\u0902\u0928\u0939"
u"\u0940\u0902\u092C\u094B\u0932\u0938\u0915\u0924\u0947"
u"\u0939\u0948\u0902",
"i1baa7eci9glrd9b2ae1bj0hfcgg6iyaf8o0a1dig0cd"),
#(G) Japanese (kanji and hiragana):
(u"\u306A\u305C\u307F\u3093\u306A\u65E5\u672C\u8A9E\u3092"
u"\u8A71\u3057\u3066\u304F\u308C\u306A\u3044\u306E\u304B",
"n8jok5ay5dzabd5bym9f0cm5685rrjetr6pdxa"),
# (H) Korean (Hangul syllables):
(u"\uC138\uACC4\uC758\uBAA8\uB4E0\uC0AC\uB78C\uB4E4\uC774"
u"\uD55C\uAD6D\uC5B4\uB97C\uC774\uD574\uD55C\uB2E4\uBA74"
u"\uC5BC\uB9C8\uB098\uC88B\uC744\uAE4C",
"989aomsvi5e83db1d2a355cv1e0vak1dwrv93d5xbh15a0dt30a5j"
"psd879ccm6fea98c"),
# (I) Russian (Cyrillic):
(u"\u043F\u043E\u0447\u0435\u043C\u0443\u0436\u0435\u043E"
u"\u043D\u0438\u043D\u0435\u0433\u043E\u0432\u043E\u0440"
u"\u044F\u0442\u043F\u043E\u0440\u0443\u0441\u0441\u043A"
u"\u0438",
"b1abfaaepdrnnbgefbaDotcwatmq2g4l"),
# (J) Spanish: Porqu<eacute>nopuedensimplementehablarenEspa<ntilde>ol
(u"\u0050\u006F\u0072\u0071\u0075\u00E9\u006E\u006F\u0070"
u"\u0075\u0065\u0064\u0065\u006E\u0073\u0069\u006D\u0070"
u"\u006C\u0065\u006D\u0065\u006E\u0074\u0065\u0068\u0061"
u"\u0062\u006C\u0061\u0072\u0065\u006E\u0045\u0073\u0070"
u"\u0061\u00F1\u006F\u006C",
"PorqunopuedensimplementehablarenEspaol-fmd56a"),
# (K) Vietnamese:
# T<adotbelow>isaoh<odotbelow>kh<ocirc>ngth<ecirchookabove>ch\
# <ihookabove>n<oacute>iti<ecircacute>ngVi<ecircdotbelow>t
(u"\u0054\u1EA1\u0069\u0073\u0061\u006F\u0068\u1ECD\u006B"
u"\u0068\u00F4\u006E\u0067\u0074\u0068\u1EC3\u0063\u0068"
u"\u1EC9\u006E\u00F3\u0069\u0074\u0069\u1EBF\u006E\u0067"
u"\u0056\u0069\u1EC7\u0074",
"TisaohkhngthchnitingVit-kjcr8268qyxafd2f1b9g"),
#(L) 3<nen>B<gumi><kinpachi><sensei>
(u"\u0033\u5E74\u0042\u7D44\u91D1\u516B\u5148\u751F",
"3B-ww4c5e180e575a65lsy2b"),
# (M) <amuro><namie>-with-SUPER-MONKEYS
(u"\u5B89\u5BA4\u5948\u7F8E\u6075\u002D\u0077\u0069\u0074"
u"\u0068\u002D\u0053\u0055\u0050\u0045\u0052\u002D\u004D"
u"\u004F\u004E\u004B\u0045\u0059\u0053",
"-with-SUPER-MONKEYS-pc58ag80a8qai00g7n9n"),
# (N) Hello-Another-Way-<sorezore><no><basho>
(u"\u0048\u0065\u006C\u006C\u006F\u002D\u0041\u006E\u006F"
u"\u0074\u0068\u0065\u0072\u002D\u0057\u0061\u0079\u002D"
u"\u305D\u308C\u305E\u308C\u306E\u5834\u6240",
"Hello-Another-Way--fc4qua05auwb3674vfr0b"),
# (O) <hitotsu><yane><no><shita>2
(u"\u3072\u3068\u3064\u5C4B\u6839\u306E\u4E0B\u0032",
"2-u9tlzr9756bt3uc0v"),
# (P) Maji<de>Koi<suru>5<byou><mae>
(u"\u004D\u0061\u006A\u0069\u3067\u004B\u006F\u0069\u3059"
u"\u308B\u0035\u79D2\u524D",
"MajiKoi5-783gue6qz075azm5e"),
# (Q) <pafii>de<runba>
(u"\u30D1\u30D5\u30A3\u30FC\u0064\u0065\u30EB\u30F3\u30D0",
"de-jg4avhby1noc0d"),
# (R) <sono><supiido><de>
(u"\u305D\u306E\u30B9\u30D4\u30FC\u30C9\u3067",
"d9juau41awczczp"),
# (S) -> $1.00 <-
(u"\u002D\u003E\u0020\u0024\u0031\u002E\u0030\u0030\u0020"
u"\u003C\u002D",
"-> $1.00 <--")
]
for i in punycode_testcases:
if len(i)!=2:
print repr(i)
class PunycodeTest(unittest.TestCase):
def test_encode(self):
for uni, puny in punycode_testcases:
# Need to convert both strings to lower case, since
# some of the extended encodings use upper case, but our
# code produces only lower case. Converting just puny to
# lower is also insufficient, since some of the input characters
# are upper case.
self.assertEqual(uni.encode("punycode").lower(), puny.lower())
def test_decode(self):
for uni, puny in punycode_testcases:
self.assertEqual(uni, puny.decode("punycode"))
class UnicodeInternalTest(unittest.TestCase):
def test_bug1251300(self):
# Decoding with unicode_internal used to not correctly handle "code
# points" above 0x10ffff on UCS-4 builds.
if sys.maxunicode > 0xffff:
ok = [
("\x00\x10\xff\xff", u"\U0010ffff"),
("\x00\x00\x01\x01", u"\U00000101"),
("", u""),
]
not_ok = [
"\x7f\xff\xff\xff",
"\x80\x00\x00\x00",
"\x81\x00\x00\x00",
"\x00",
"\x00\x00\x00\x00\x00",
]
for internal, uni in ok:
if sys.byteorder == "little":
internal = "".join(reversed(internal))
self.assertEqual(uni, internal.decode("unicode_internal"))
for internal in not_ok:
if sys.byteorder == "little":
internal = "".join(reversed(internal))
self.assertRaises(UnicodeDecodeError, internal.decode,
"unicode_internal")
def test_decode_error_attributes(self):
if sys.maxunicode > 0xffff:
try:
"\x00\x00\x00\x00\x00\x11\x11\x00".decode("unicode_internal")
except UnicodeDecodeError, ex:
if test_support.is_jython:
# Jython delegates internally to utf-32be and it shows here
self.assertEqual("utf-32", ex.encoding)
else:
self.assertEqual("unicode_internal", ex.encoding)
self.assertEqual("\x00\x00\x00\x00\x00\x11\x11\x00", ex.object)
self.assertEqual(4, ex.start)
self.assertEqual(8, ex.end)
else:
self.fail("UnicodeDecodeError not raised")
def test_decode_callback(self):
if sys.maxunicode > 0xffff:
codecs.register_error("UnicodeInternalTest", codecs.ignore_errors)
decoder = codecs.getdecoder("unicode_internal")
ab = u"ab".encode("unicode_internal")
ignored = decoder("%s\x22\x22\x22\x22%s" % (ab[:4], ab[4:]),
"UnicodeInternalTest")
self.assertEqual((u"ab", 12), ignored)
def test_encode_length(self):
# Issue 3739
encoder = codecs.getencoder("unicode_internal")
self.assertEqual(encoder(u"a")[1], 1)
self.assertEqual(encoder(u"\xe9\u0142")[1], 2)
encoder = codecs.getencoder("string-escape")
self.assertEqual(encoder(r'\x00')[1], 4)
# From http://www.gnu.org/software/libidn/draft-josefsson-idn-test-vectors.html
nameprep_tests = [
# 3.1 Map to nothing.
('foo\xc2\xad\xcd\x8f\xe1\xa0\x86\xe1\xa0\x8bbar'
'\xe2\x80\x8b\xe2\x81\xa0baz\xef\xb8\x80\xef\xb8\x88\xef'
'\xb8\x8f\xef\xbb\xbf',
'foobarbaz'),
# 3.2 Case folding ASCII U+0043 U+0041 U+0046 U+0045.
('CAFE',
'cafe'),
# 3.3 Case folding 8bit U+00DF (german sharp s).
# The original test case is bogus; it says \xc3\xdf
('\xc3\x9f',
'ss'),
# 3.4 Case folding U+0130 (turkish capital I with dot).
('\xc4\xb0',
'i\xcc\x87'),
# 3.5 Case folding multibyte U+0143 U+037A.
('\xc5\x83\xcd\xba',
'\xc5\x84 \xce\xb9'),
# 3.6 Case folding U+2121 U+33C6 U+1D7BB.
# XXX: skip this as it fails in UCS-2 mode
#('\xe2\x84\xa1\xe3\x8f\x86\xf0\x9d\x9e\xbb',
# 'telc\xe2\x88\x95kg\xcf\x83'),
(None, None),
# 3.7 Normalization of U+006a U+030c U+00A0 U+00AA.
('j\xcc\x8c\xc2\xa0\xc2\xaa',
'\xc7\xb0 a'),
# 3.8 Case folding U+1FB7 and normalization.
('\xe1\xbe\xb7',
'\xe1\xbe\xb6\xce\xb9'),
# 3.9 Self-reverting case folding U+01F0 and normalization.
# The original test case is bogus, it says `\xc7\xf0'
('\xc7\xb0',
'\xc7\xb0'),
# 3.10 Self-reverting case folding U+0390 and normalization.
('\xce\x90',
'\xce\x90'),
# 3.11 Self-reverting case folding U+03B0 and normalization.
('\xce\xb0',
'\xce\xb0'),
# 3.12 Self-reverting case folding U+1E96 and normalization.
('\xe1\xba\x96',
'\xe1\xba\x96'),
# 3.13 Self-reverting case folding U+1F56 and normalization.
('\xe1\xbd\x96',
'\xe1\xbd\x96'),
# 3.14 ASCII space character U+0020.
(' ',
' '),
# 3.15 Non-ASCII 8bit space character U+00A0.
('\xc2\xa0',
' '),
# 3.16 Non-ASCII multibyte space character U+1680.
('\xe1\x9a\x80',
None),
# 3.17 Non-ASCII multibyte space character U+2000.
('\xe2\x80\x80',
' '),
# 3.18 Zero Width Space U+200b.
('\xe2\x80\x8b',
''),
# 3.19 Non-ASCII multibyte space character U+3000.
('\xe3\x80\x80',
' '),
# 3.20 ASCII control characters U+0010 U+007F.
('\x10\x7f',
'\x10\x7f'),
# 3.21 Non-ASCII 8bit control character U+0085.
('\xc2\x85',
None),
# 3.22 Non-ASCII multibyte control character U+180E.
('\xe1\xa0\x8e',
None),
# 3.23 Zero Width No-Break Space U+FEFF.
('\xef\xbb\xbf',
''),
# 3.24 Non-ASCII control character U+1D175.
('\xf0\x9d\x85\xb5',
None),
# 3.25 Plane 0 private use character U+F123.
('\xef\x84\xa3',
None),
# 3.26 Plane 15 private use character U+F1234.
('\xf3\xb1\x88\xb4',
None),
# 3.27 Plane 16 private use character U+10F234.
('\xf4\x8f\x88\xb4',
None),
# 3.28 Non-character code point U+8FFFE.
('\xf2\x8f\xbf\xbe',
None),
# 3.29 Non-character code point U+10FFFF.
('\xf4\x8f\xbf\xbf',
None),
# 3.30 Surrogate code U+DF42.
# THIS IS NOT LEGAL IN JYTHON so omitting
# ('\xed\xbd\x82',
# None),
# 3.31 Non-plain text character U+FFFD.
('\xef\xbf\xbd',
None),
# 3.32 Ideographic description character U+2FF5.
('\xe2\xbf\xb5',
None),
# 3.33 Display property character U+0341.
('\xcd\x81',
'\xcc\x81'),
# 3.34 Left-to-right mark U+200E.
('\xe2\x80\x8e',
None),
# 3.35 Deprecated U+202A.
('\xe2\x80\xaa',
None),
# 3.36 Language tagging character U+E0001.
('\xf3\xa0\x80\x81',
None),
# 3.37 Language tagging character U+E0042.
('\xf3\xa0\x81\x82',
None),
# 3.38 Bidi: RandALCat character U+05BE and LCat characters.
('foo\xd6\xbebar',
None),
# 3.39 Bidi: RandALCat character U+FD50 and LCat characters.
('foo\xef\xb5\x90bar',
None),
# 3.40 Bidi: RandALCat character U+FB38 and LCat characters.
('foo\xef\xb9\xb6bar',
'foo \xd9\x8ebar'),
# 3.41 Bidi: RandALCat without trailing RandALCat U+0627 U+0031.
('\xd8\xa71',
None),
# 3.42 Bidi: RandALCat character U+0627 U+0031 U+0628.
('\xd8\xa71\xd8\xa8',
'\xd8\xa71\xd8\xa8'),
# 3.43 Unassigned code point U+E0002.
# Skip this test as we allow unassigned
#('\xf3\xa0\x80\x82',
# None),
(None, None),
# 3.44 Larger test (shrinking).
# Original test case reads \xc3\xdf
('X\xc2\xad\xc3\x9f\xc4\xb0\xe2\x84\xa1j\xcc\x8c\xc2\xa0\xc2'
'\xaa\xce\xb0\xe2\x80\x80',
'xssi\xcc\x87tel\xc7\xb0 a\xce\xb0 '),
# 3.45 Larger test (expanding).
# Original test case reads \xc3\x9f
('X\xc3\x9f\xe3\x8c\x96\xc4\xb0\xe2\x84\xa1\xe2\x92\x9f\xe3\x8c'
'\x80',
'xss\xe3\x82\xad\xe3\x83\xad\xe3\x83\xa1\xe3\x83\xbc\xe3'
'\x83\x88\xe3\x83\xabi\xcc\x87tel\x28d\x29\xe3\x82'
'\xa2\xe3\x83\x91\xe3\x83\xbc\xe3\x83\x88')
]
class NameprepTest(unittest.TestCase):
def test_nameprep(self):
from encodings.idna import nameprep
for pos, (orig, prepped) in enumerate(nameprep_tests):
if orig is None:
# Skipped
continue
# The Unicode strings are given in UTF-8
orig = unicode(orig, "utf-8")
if prepped is None:
# Input contains prohibited characters
self.assertRaises(UnicodeError, nameprep, orig)
else:
prepped = unicode(prepped, "utf-8")
try:
self.assertEqual(nameprep(orig), prepped)
except Exception,e:
raise test_support.TestFailed("Test 3.%d: %s" % (pos+1, str(e)))
class IDNACodecTest(unittest.TestCase):
def test_builtin_decode(self):
self.assertEqual(unicode("python.org", "idna"), u"python.org")
self.assertEqual(unicode("python.org.", "idna"), u"python.org.")
self.assertEqual(unicode("xn--pythn-mua.org", "idna"), u"pyth\xf6n.org")
self.assertEqual(unicode("xn--pythn-mua.org.", "idna"), u"pyth\xf6n.org.")
def test_builtin_encode(self):
self.assertEqual(u"python.org".encode("idna"), "python.org")
self.assertEqual("python.org.".encode("idna"), "python.org.")
self.assertEqual(u"pyth\xf6n.org".encode("idna"), "xn--pythn-mua.org")
self.assertEqual(u"pyth\xf6n.org.".encode("idna"), "xn--pythn-mua.org.")
def test_stream(self):
import StringIO
r = codecs.getreader("idna")(StringIO.StringIO("abc"))
r.read(3)
self.assertEqual(r.read(), u"")
def test_incremental_decode(self):
self.assertEqual(
"".join(codecs.iterdecode("python.org", "idna")),
u"python.org"
)
self.assertEqual(
"".join(codecs.iterdecode("python.org.", "idna")),
u"python.org."
)
self.assertEqual(
"".join(codecs.iterdecode("xn--pythn-mua.org.", "idna")),
u"pyth\xf6n.org."
)
self.assertEqual(
"".join(codecs.iterdecode("xn--pythn-mua.org.", "idna")),
u"pyth\xf6n.org."
)
decoder = codecs.getincrementaldecoder("idna")()
self.assertEqual(decoder.decode("xn--xam", ), u"")
self.assertEqual(decoder.decode("ple-9ta.o", ), u"\xe4xample.")
self.assertEqual(decoder.decode(u"rg"), u"")
self.assertEqual(decoder.decode(u"", True), u"org")
decoder.reset()
self.assertEqual(decoder.decode("xn--xam", ), u"")
self.assertEqual(decoder.decode("ple-9ta.o", ), u"\xe4xample.")
self.assertEqual(decoder.decode("rg."), u"org.")
self.assertEqual(decoder.decode("", True), u"")
def test_incremental_encode(self):
self.assertEqual(
"".join(codecs.iterencode(u"python.org", "idna")),
"python.org"
)
self.assertEqual(
"".join(codecs.iterencode(u"python.org.", "idna")),
"python.org."
)
self.assertEqual(
"".join(codecs.iterencode(u"pyth\xf6n.org.", "idna")),
"xn--pythn-mua.org."
)
self.assertEqual(
"".join(codecs.iterencode(u"pyth\xf6n.org.", "idna")),
"xn--pythn-mua.org."
)
encoder = codecs.getincrementalencoder("idna")()
self.assertEqual(encoder.encode(u"\xe4x"), "")
self.assertEqual(encoder.encode(u"ample.org"), "xn--xample-9ta.")
self.assertEqual(encoder.encode(u"", True), "org")
encoder.reset()
self.assertEqual(encoder.encode(u"\xe4x"), "")
self.assertEqual(encoder.encode(u"ample.org."), "xn--xample-9ta.org.")
self.assertEqual(encoder.encode(u"", True), "")
class CodecsModuleTest(unittest.TestCase):
def test_decode(self):
self.assertEqual(codecs.decode('\xe4\xf6\xfc', 'latin-1'),
u'\xe4\xf6\xfc')
self.assertRaises(TypeError, codecs.decode)
self.assertEqual(codecs.decode('abc'), u'abc')
self.assertRaises(UnicodeDecodeError, codecs.decode, '\xff', 'ascii')
def test_encode(self):
self.assertEqual(codecs.encode(u'\xe4\xf6\xfc', 'latin-1'),
'\xe4\xf6\xfc')
self.assertRaises(TypeError, codecs.encode)
self.assertRaises(LookupError, codecs.encode, u"foo", "__spam__")
self.assertEqual(codecs.encode(u'abc'), 'abc')
self.assertRaises(UnicodeEncodeError, codecs.encode, u'\xffff', 'ascii')
def test_register(self):
self.assertRaises(TypeError, codecs.register)
self.assertRaises(TypeError, codecs.register, 42)
def test_lookup(self):
self.assertRaises(TypeError, codecs.lookup)
self.assertRaises(LookupError, codecs.lookup, "__spam__")
self.assertRaises(LookupError, codecs.lookup, " ")
def test_getencoder(self):
self.assertRaises(TypeError, codecs.getencoder)
self.assertRaises(LookupError, codecs.getencoder, "__spam__")
def test_getdecoder(self):
self.assertRaises(TypeError, codecs.getdecoder)
self.assertRaises(LookupError, codecs.getdecoder, "__spam__")
def test_getreader(self):
self.assertRaises(TypeError, codecs.getreader)
self.assertRaises(LookupError, codecs.getreader, "__spam__")
def test_getwriter(self):
self.assertRaises(TypeError, codecs.getwriter)
self.assertRaises(LookupError, codecs.getwriter, "__spam__")
def test_lookup_issue1813(self):
# Issue #1813: under Turkish locales, lookup of some codecs failed
# because 'I' is lowercased as a dotless "i"
oldlocale = locale.getlocale(locale.LC_CTYPE)
self.addCleanup(locale.setlocale, locale.LC_CTYPE, oldlocale)
try:
locale.setlocale(locale.LC_CTYPE, 'tr_TR')
except locale.Error:
# Unsupported locale on this system
self.skipTest('test needs Turkish locale')
c = codecs.lookup('ASCII')
self.assertEqual(c.name, 'ascii')
class StreamReaderTest(unittest.TestCase):
def setUp(self):
self.reader = codecs.getreader('utf-8')
self.stream = StringIO.StringIO('\xed\x95\x9c\n\xea\xb8\x80')
def test_readlines(self):
f = self.reader(self.stream)
self.assertEqual(f.readlines(), [u'\ud55c\n', u'\uae00'])
class EncodedFileTest(unittest.TestCase):
def test_basic(self):
f = StringIO.StringIO('\xed\x95\x9c\n\xea\xb8\x80')
ef = codecs.EncodedFile(f, 'utf-16-le', 'utf-8')
self.assertEqual(ef.read(), '\\\xd5\n\x00\x00\xae')
f = StringIO.StringIO()
ef = codecs.EncodedFile(f, 'utf-8', 'latin1')
ef.write('\xc3\xbc')
self.assertEqual(f.getvalue(), '\xfc')
class Str2StrTest(unittest.TestCase):
def test_read(self):
sin = "\x80".encode("base64_codec")
reader = codecs.getreader("base64_codec")(StringIO.StringIO(sin))
sout = reader.read()
self.assertEqual(sout, "\x80")
self.assertIsInstance(sout, str)
def test_readline(self):
sin = "\x80".encode("base64_codec")
reader = codecs.getreader("base64_codec")(StringIO.StringIO(sin))
sout = reader.readline()
self.assertEqual(sout, "\x80")
self.assertIsInstance(sout, str)
all_unicode_encodings = [
"ascii",
"base64_codec",
"big5",
"big5hkscs",
"charmap",
"cp037",
"cp1006",
"cp1026",
"cp1140",
"cp1250",
"cp1251",
"cp1252",
"cp1253",
"cp1254",
"cp1255",
"cp1256",
"cp1257",
"cp1258",
"cp424",
"cp437",
"cp500",
"cp720",
"cp737",
"cp775",
"cp850",
"cp852",
"cp855",
"cp856",
"cp857",
"cp858",
"cp860",
"cp861",
"cp862",
"cp863",
"cp864",
"cp865",
"cp866",
"cp869",
"cp874",
"cp875",
"cp932",
"cp949",
"cp950",
# "euc_jis_2004", # Not available on Java
# 'euc_jisx0213', # Not available on Java
'euc_jp',
'euc_kr',
'gb18030',
'gb2312',
'gbk',
"hex_codec",
"hp_roman8",
# 'hz', # Not available on Java
"idna",
'iso2022_jp',
# 'iso2022_jp_1', # Not available on Java
'iso2022_jp_2',
# 'iso2022_jp_2004', # Not available on Java
# 'iso2022_jp_3', # Not available on Java
# 'iso2022_jp_ext', # Not available on Java
'iso2022_kr',
"iso8859_1",
"iso8859_10",
"iso8859_11",
"iso8859_13",
"iso8859_14",
"iso8859_15",
"iso8859_16",
"iso8859_2",
"iso8859_3",
"iso8859_4",
"iso8859_5",
"iso8859_6",
"iso8859_7",
"iso8859_8",
"iso8859_9",
'johab',
"koi8_r",
"koi8_u",
"latin_1",
"mac_cyrillic",
"mac_greek",
"mac_iceland",
"mac_latin2",
"mac_roman",
"mac_turkish",
"palmos",
"ptcp154",
"punycode",
"raw_unicode_escape",
"rot_13",
"shift_jis",
#'shift_jis_2004', # Not available on Java
'shift_jisx0213',
"tis_620",
"unicode_escape",
"unicode_internal",
"utf_16",
"utf_16_be",
"utf_16_le",
"utf_7",
"utf_8",
]
if hasattr(codecs, "mbcs_encode"):
all_unicode_encodings.append("mbcs")
# The following encodings work only with str, not unicode
all_string_encodings = [
"quopri_codec",
"string_escape",
"uu_codec",
]
# The following encoding is not tested, because it's not supposed
# to work:
# "undefined"
# The following encodings don't work in stateful mode
broken_unicode_with_streams = [
"base64_codec",
"hex_codec",
"punycode",
"unicode_internal"
]
broken_incremental_coders = broken_unicode_with_streams[:]
# The following encodings only support "strict" mode
only_strict_mode = [
"idna",
"zlib_codec",
"bz2_codec",
]
try:
import bz2
except ImportError:
pass
else:
all_unicode_encodings.append("bz2_codec")
broken_unicode_with_streams.append("bz2_codec")
try:
import zlib
except ImportError:
pass
else:
all_unicode_encodings.append("zlib_codec")
broken_unicode_with_streams.append("zlib_codec")
class BasicUnicodeTest(unittest.TestCase):
@unittest.skipIf(test_support.is_jython, "_testcapi module not present in Jython")
def test_basics(self):
s = u"abc123" # all codecs should be able to encode these
for encoding in all_unicode_encodings:
name = codecs.lookup(encoding).name
if encoding.endswith("_codec"):
name += "_codec"
elif encoding == "latin_1":
name = "latin_1"
self.assertEqual(encoding.replace("_", "-"), name.replace("_", "-"))
(bytes, size) = codecs.getencoder(encoding)(s)
self.assertEqual(size, len(s), "%r != %r (encoding=%r)" % (size, len(s), encoding))
(chars, size) = codecs.getdecoder(encoding)(bytes)
self.assertEqual(chars, s, "%r != %r (encoding=%r)" % (chars, s, encoding))
if encoding not in broken_unicode_with_streams:
# check stream reader/writer
q = Queue()
writer = codecs.getwriter(encoding)(q)
encodedresult = ""
for c in s:
writer.write(c)
encodedresult += q.read()
q = Queue()
reader = codecs.getreader(encoding)(q)
decodedresult = u""
for c in encodedresult:
q.write(c)
decodedresult += reader.read()
self.assertEqual(decodedresult, s, "%r != %r (encoding=%r)" % (decodedresult, s, encoding))
if encoding not in broken_incremental_coders:
# check incremental decoder/encoder (fetched via the Python
# and C API) and iterencode()/iterdecode()
try:
encoder = codecs.getincrementalencoder(encoding)()
cencoder = _testcapi.codec_incrementalencoder(encoding)
except LookupError: # no IncrementalEncoder
pass
else:
# check incremental decoder/encoder
encodedresult = ""
for c in s:
encodedresult += encoder.encode(c)
encodedresult += encoder.encode(u"", True)
decoder = codecs.getincrementaldecoder(encoding)()
decodedresult = u""
for c in encodedresult:
decodedresult += decoder.decode(c)
decodedresult += decoder.decode("", True)
self.assertEqual(decodedresult, s, "%r != %r (encoding=%r)" % (decodedresult, s, encoding))
# check C API
encodedresult = ""
for c in s:
encodedresult += cencoder.encode(c)
encodedresult += cencoder.encode(u"", True)
cdecoder = _testcapi.codec_incrementaldecoder(encoding)
decodedresult = u""
for c in encodedresult:
decodedresult += cdecoder.decode(c)
decodedresult += cdecoder.decode("", True)
self.assertEqual(decodedresult, s, "%r != %r (encoding=%r)" % (decodedresult, s, encoding))
# check iterencode()/iterdecode()
result = u"".join(codecs.iterdecode(codecs.iterencode(s, encoding), encoding))
self.assertEqual(result, s, "%r != %r (encoding=%r)" % (result, s, encoding))
# check iterencode()/iterdecode() with empty string
result = u"".join(codecs.iterdecode(codecs.iterencode(u"", encoding), encoding))
self.assertEqual(result, u"")
if encoding not in only_strict_mode:
# check incremental decoder/encoder with errors argument
try:
encoder = codecs.getincrementalencoder(encoding)("ignore")
cencoder = _testcapi.codec_incrementalencoder(encoding, "ignore")
except LookupError: # no IncrementalEncoder
pass
else:
encodedresult = "".join(encoder.encode(c) for c in s)
decoder = codecs.getincrementaldecoder(encoding)("ignore")
decodedresult = u"".join(decoder.decode(c) for c in encodedresult)
self.assertEqual(decodedresult, s, "%r != %r (encoding=%r)" % (decodedresult, s, encoding))
encodedresult = "".join(cencoder.encode(c) for c in s)
cdecoder = _testcapi.codec_incrementaldecoder(encoding, "ignore")
decodedresult = u"".join(cdecoder.decode(c) for c in encodedresult)
self.assertEqual(decodedresult, s, "%r != %r (encoding=%r)" % (decodedresult, s, encoding))
def test_seek(self):
# all codecs - except idna on Java - should be able to encode these
s1 = u"%s\n%s\n" % (100*u"abc123", 100*u"def456")
for encoding in all_unicode_encodings:
s = s1
if encoding in broken_unicode_with_streams:
continue
if encoding == "idna":
s = u"%s\n%s\n" % (5*u"abc123", 5*u"def456") # idna encoder rejects as being too long
reader = codecs.getreader(encoding)(StringIO.StringIO(s.encode(encoding)))
for t in xrange(5):
# Test that calling seek resets the internal codec state and buffers
reader.seek(0, 0)
line = reader.readline()
self.assertEqual(s[:len(line)], line)
def test_bad_decode_args(self):
for encoding in all_unicode_encodings:
decoder = codecs.getdecoder(encoding)
self.assertRaises(TypeError, decoder)
if encoding not in ("idna", "punycode"):
self.assertRaises(TypeError, decoder, 42)
def test_bad_encode_args(self):
for encoding in all_unicode_encodings:
encoder = codecs.getencoder(encoding)
self.assertRaises(TypeError, encoder)
def test_encoding_map_type_initialized(self):
from encodings import cp1140
# This used to crash, we are only verifying there's no crash.
table_type = type(cp1140.encoding_table)
self.assertEqual(table_type, table_type)
class BasicStrTest(unittest.TestCase):
def test_basics(self):
s = "abc123"
for encoding in all_string_encodings:
(bytes, size) = codecs.getencoder(encoding)(s)
self.assertEqual(size, len(s))
(chars, size) = codecs.getdecoder(encoding)(bytes)
self.assertEqual(chars, s, "%r != %r (encoding=%r)" % (chars, s, encoding))
class CharmapTest(unittest.TestCase):
def test_decode_with_string_map(self):
self.assertEqual(
codecs.charmap_decode("\x00\x01\x02", "strict", u"abc"),
(u"abc", 3)
)
self.assertEqual(
codecs.charmap_decode("\x00\x01\x02", "replace", u"ab"),
(u"ab\ufffd", 3)
)
self.assertEqual(
codecs.charmap_decode("\x00\x01\x02", "replace", u"ab\ufffe"),
(u"ab\ufffd", 3)
)
self.assertEqual(
codecs.charmap_decode("\x00\x01\x02", "ignore", u"ab"),
(u"ab", 3)
)
self.assertEqual(
codecs.charmap_decode("\x00\x01\x02", "ignore", u"ab\ufffe"),
(u"ab", 3)
)
allbytes = "".join(chr(i) for i in xrange(256))
self.assertEqual(
codecs.charmap_decode(allbytes, "ignore", u""),
(u"", len(allbytes))
)
class WithStmtTest(unittest.TestCase):
def test_encodedfile(self):
f = StringIO.StringIO("\xc3\xbc")
with codecs.EncodedFile(f, "latin-1", "utf-8") as ef:
self.assertEqual(ef.read(), "\xfc")
def test_streamreaderwriter(self):
f = StringIO.StringIO("\xc3\xbc")
info = codecs.lookup("utf-8")
with codecs.StreamReaderWriter(f, info.streamreader,
info.streamwriter, 'strict') as srw:
self.assertEqual(srw.read(), u"\xfc")
class BomTest(unittest.TestCase):
def test_seek0(self):
data = u"1234567890"
tests = ("utf-16",
"utf-16-le",
"utf-16-be",
"utf-32",
"utf-32-le",
"utf-32-be",
)
self.addCleanup(test_support.unlink, test_support.TESTFN)
for encoding in tests:
# Check if the BOM is written only once
with codecs.open(test_support.TESTFN, 'w+', encoding=encoding) as f:
f.write(data)
f.write(data)
f.seek(0)
self.assertEqual(f.read(), data * 2)
f.seek(0)
self.assertEqual(f.read(), data * 2)
# Check that the BOM is written after a seek(0)
with codecs.open(test_support.TESTFN, 'w+', encoding=encoding) as f:
f.write(data[0])
self.assertNotEqual(f.tell(), 0)
f.seek(0)
f.write(data)
f.seek(0)
self.assertEqual(f.read(), data)
# (StreamWriter) Check that the BOM is written after a seek(0)
with codecs.open(test_support.TESTFN, 'w+', encoding=encoding) as f:
f.writer.write(data[0])
self.assertNotEqual(f.writer.tell(), 0)
f.writer.seek(0)
f.writer.write(data)
f.seek(0)
self.assertEqual(f.read(), data)
# Check that the BOM is not written after a seek() at a position
# different than the start
with codecs.open(test_support.TESTFN, 'w+', encoding=encoding) as f:
f.write(data)
f.seek(f.tell())
f.write(data)
f.seek(0)
self.assertEqual(f.read(), data * 2)
# (StreamWriter) Check that the BOM is not written after a seek()
# at a position different than the start
with codecs.open(test_support.TESTFN, 'w+', encoding=encoding) as f:
f.writer.write(data)
f.writer.seek(f.writer.tell())
f.writer.write(data)
f.seek(0)
self.assertEqual(f.read(), data * 2)
def test_main():
test_support.run_unittest(
UTF32Test,
UTF32LETest,
UTF32BETest,
UTF16Test,
UTF16LETest,
UTF16BETest,
UTF8Test,
UTF8SigTest,
UTF7Test,
UTF16ExTest,
ReadBufferTest,
CharBufferTest,
EscapeDecodeTest,
RecodingTest,
PunycodeTest,
UnicodeInternalTest,
NameprepTest,
IDNACodecTest,
CodecsModuleTest,
StreamReaderTest,
EncodedFileTest,
Str2StrTest,
BasicUnicodeTest,
BasicStrTest,
CharmapTest,
WithStmtTest,
BomTest,
)
if __name__ == "__main__":
test_main()
|
|
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
"""
Boot session from cache or build
Session bootstraps info needed by common client side activities including
permission, homepage, default variables, system defaults etc
"""
import frappe, json
from frappe import _
import frappe.utils
from frappe.utils import cint, cstr
import frappe.model.meta
import frappe.defaults
import frappe.translate
from frappe.utils.change_log import get_change_log
import redis
import os
from urllib import unquote
@frappe.whitelist()
def clear(user=None):
frappe.local.session_obj.update(force=True)
frappe.local.db.commit()
clear_cache(frappe.session.user)
clear_global_cache()
frappe.response['message'] = _("Cache Cleared")
def clear_cache(user=None):
cache = frappe.cache()
groups = ("bootinfo", "user_recent", "user_roles", "user_doc", "lang",
"defaults", "user_permissions", "roles", "home_page", "linked_with")
if user:
for name in groups:
cache.hdel(name, user)
cache.delete_keys("user:" + user)
frappe.defaults.clear_cache(user)
else:
for name in groups:
cache.delete_key(name, user)
clear_global_cache()
frappe.defaults.clear_cache()
def clear_global_cache():
frappe.model.meta.clear_cache()
frappe.cache().delete_value(["app_hooks", "installed_apps",
"app_modules", "module_app", "time_zone", "notification_config"])
frappe.setup_module_map()
def clear_sessions(user=None, keep_current=False):
if not user:
user = frappe.session.user
for sid in frappe.db.sql("""select sid from tabSessions where user=%s and device=%s""",
(user, frappe.session.data.device or "desktop")):
if keep_current and frappe.session.sid==sid[0]:
continue
else:
delete_session(sid[0])
def delete_session(sid=None, user=None):
if not user:
user = hasattr(frappe.local, "session") and frappe.session.user or "Guest"
frappe.cache().hdel("session", sid)
frappe.cache().hdel("last_db_session_update", sid)
frappe.db.sql("""delete from tabSessions where sid=%s""", sid)
frappe.db.commit()
def clear_all_sessions():
"""This effectively logs out all users"""
frappe.only_for("Administrator")
for sid in frappe.db.sql_list("select sid from `tabSessions`"):
delete_session(sid)
def clear_expired_sessions():
"""This function is meant to be called from scheduler"""
for device in ("desktop", "mobile"):
for sid in frappe.db.sql_list("""select sid from tabSessions
where TIMEDIFF(NOW(), lastupdate) > TIME(%s)
and device = %s""", (get_expiry_period(device), device)):
delete_session(sid)
def get():
"""get session boot info"""
from frappe.desk.notifications import \
get_notification_info_for_boot, get_notifications
from frappe.boot import get_bootinfo
bootinfo = None
if not getattr(frappe.conf,'disable_session_cache', None):
# check if cache exists
bootinfo = frappe.cache().hget("bootinfo", frappe.session.user)
if bootinfo:
bootinfo['from_cache'] = 1
bootinfo["notification_info"].update(get_notifications())
bootinfo["user"]["recent"] = json.dumps(\
frappe.cache().hget("user_recent", frappe.session.user))
if not bootinfo:
# if not create it
bootinfo = get_bootinfo()
bootinfo["notification_info"] = get_notification_info_for_boot()
frappe.cache().hset("bootinfo", frappe.session.user, bootinfo)
try:
frappe.cache().ping()
except redis.exceptions.ConnectionError:
message = _("Redis cache server not running. Please contact Administrator / Tech support")
if 'messages' in bootinfo:
bootinfo['messages'].append(message)
else:
bootinfo['messages'] = [message]
# check only when clear cache is done, and don't cache this
if frappe.local.request:
bootinfo["change_log"] = get_change_log()
bootinfo["metadata_version"] = frappe.cache().get_value("metadata_version")
if not bootinfo["metadata_version"]:
bootinfo["metadata_version"] = frappe.reset_metadata_version()
for hook in frappe.get_hooks("extend_bootinfo"):
frappe.get_attr(hook)(bootinfo=bootinfo)
bootinfo["lang"] = frappe.translate.get_user_lang()
bootinfo["dev_server"] = os.environ.get('DEV_SERVER', False)
bootinfo["disable_async"] = frappe.conf.disable_async
return bootinfo
def get_csrf_token():
if not frappe.local.session.data.csrf_token:
generate_csrf_token()
return frappe.local.session.data.csrf_token
def generate_csrf_token():
frappe.local.session.data.csrf_token = frappe.generate_hash()
frappe.local.session_obj.update(force=True)
# send sid and csrf token to the user
# handles the case when a user logs in again from another tab
# and it leads to invalid request in the current tab
frappe.publish_realtime(event="csrf_generated",
message={"sid": frappe.local.session.sid, "csrf_token": frappe.local.session.data.csrf_token},
user=frappe.session.user)
class Session:
def __init__(self, user, resume=False, full_name=None, user_type=None):
self.sid = cstr(frappe.form_dict.get('sid') or
unquote(frappe.request.cookies.get('sid', 'Guest')))
self.user = user
self.device = frappe.form_dict.get("device") or "desktop"
self.user_type = user_type
self.full_name = full_name
self.data = frappe._dict({'data': frappe._dict({})})
self.time_diff = None
# set local session
frappe.local.session = self.data
if resume:
self.resume()
else:
if self.user:
self.start()
def start(self):
"""start a new session"""
# generate sid
if self.user=='Guest':
sid = 'Guest'
else:
sid = frappe.generate_hash()
self.data.user = self.user
self.data.sid = sid
self.data.data.user = self.user
self.data.data.session_ip = frappe.local.request_ip
if self.user != "Guest":
self.data.data.update({
"last_updated": frappe.utils.now(),
"session_expiry": get_expiry_period(self.device),
"full_name": self.full_name,
"user_type": self.user_type,
"device": self.device,
"session_country": get_geo_ip_country(frappe.local.request_ip) if frappe.local.request_ip else None,
})
# insert session
if self.user!="Guest":
self.insert_session_record()
# update user
frappe.db.sql("""UPDATE tabUser SET last_login = %(now)s, last_ip = %(ip)s, last_active = %(now)s
where name=%(name)s""", {
"now": frappe.utils.now(),
"ip": frappe.local.request_ip,
"name": self.data['user']
})
frappe.db.commit()
def insert_session_record(self):
frappe.db.sql("""insert into tabSessions
(sessiondata, user, lastupdate, sid, status, device)
values (%s , %s, NOW(), %s, 'Active', %s)""",
(str(self.data['data']), self.data['user'], self.data['sid'], self.device))
# also add to memcache
frappe.cache().hset("session", self.data.sid, self.data)
def resume(self):
"""non-login request: load a session"""
import frappe
data = self.get_session_record()
if data:
# set language
self.data.update({'data': data, 'user':data.user, 'sid': self.sid})
self.user = data.user
self.device = data.device
else:
self.start_as_guest()
if self.sid != "Guest":
frappe.local.user_lang = frappe.translate.get_user_lang(self.data.user)
frappe.local.lang = frappe.local.user_lang
def get_session_record(self):
"""get session record, or return the standard Guest Record"""
from frappe.auth import clear_cookies
r = self.get_session_data()
if not r:
frappe.response["session_expired"] = 1
clear_cookies()
self.sid = "Guest"
r = self.get_session_data()
return r
def get_session_data(self):
if self.sid=="Guest":
return frappe._dict({"user":"Guest"})
data = self.get_session_data_from_cache()
if not data:
data = self.get_session_data_from_db()
return data
def get_session_data_from_cache(self):
data = frappe.cache().hget("session", self.sid)
if data:
data = frappe._dict(data)
session_data = data.get("data", {})
# set user for correct timezone
self.time_diff = frappe.utils.time_diff_in_seconds(frappe.utils.now(),
session_data.get("last_updated"))
expiry = self.get_expiry_in_seconds(session_data.get("session_expiry"))
if self.time_diff > expiry:
self.delete_session()
data = None
return data and data.data
def get_session_data_from_db(self):
self.device = frappe.db.get_value("Sessions", {"sid": self.sid}, "device") or 'desktop'
rec = frappe.db.sql("""select user, sessiondata
from tabSessions where sid=%s and
TIMEDIFF(NOW(), lastupdate) < TIME(%s)""", (self.sid,
get_expiry_period(self.device)))
if rec:
data = frappe._dict(eval(rec and rec[0][1] or '{}'))
data.user = rec[0][0]
else:
self.delete_session()
data = None
return data
def get_expiry_in_seconds(self, expiry):
if not expiry:
return 3600
parts = expiry.split(":")
return (cint(parts[0]) * 3600) + (cint(parts[1]) * 60) + cint(parts[2])
def delete_session(self):
delete_session(self.sid, user=self.user)
def start_as_guest(self):
"""all guests share the same 'Guest' session"""
self.user = "Guest"
self.start()
def update(self, force=False):
"""extend session expiry"""
if (frappe.session['user'] == "Guest" or frappe.form_dict.cmd=="logout"):
return
now = frappe.utils.now()
self.data['data']['last_updated'] = now
self.data['data']['lang'] = unicode(frappe.lang)
# update session in db
last_updated = frappe.cache().hget("last_db_session_update", self.sid)
time_diff = frappe.utils.time_diff_in_seconds(now, last_updated) if last_updated else None
# database persistence is secondary, don't update it too often
updated_in_db = False
if force or (time_diff==None) or (time_diff > 600):
# update sessions table
frappe.db.sql("""update tabSessions set sessiondata=%s,
lastupdate=NOW() where sid=%s""" , (str(self.data['data']),
self.data['sid']))
# update last active in user table
frappe.db.sql("""update `tabUser` set last_active=%(now)s where name=%(name)s""", {
"now": frappe.utils.now(),
"name": frappe.session.user
})
frappe.cache().hset("last_db_session_update", self.sid, now)
updated_in_db = True
# set in memcache
frappe.cache().hset("session", self.sid, self.data)
return updated_in_db
def get_expiry_period(device="desktop"):
if device=="mobile":
key = "session_expiry_mobile"
default = "720:00:00"
else:
key = "session_expiry"
default = "06:00:00"
exp_sec = frappe.defaults.get_global_default(key) or default
# incase seconds is missing
if len(exp_sec.split(':')) == 2:
exp_sec = exp_sec + ':00'
return exp_sec
def get_geo_from_ip(ip_addr):
try:
from geoip import geolite2
return geolite2.lookup(ip_addr)
except ImportError:
return
except ValueError:
return
def get_geo_ip_country(ip_addr):
match = get_geo_from_ip(ip_addr)
if match:
return match.country
|
|
import argparse
import os
import re
import shutil
import subprocess
import tempfile
import xml.etree.cElementTree as xml
import zipfile
from os import path
SRC_ROOTS_REGEX = re.compile(r'^\s*src_roots\s*=\s*(.*)$')
BUCK_FILE_TEMPLATE = """{library_type}(
name = '{name}',
srcs = {sources},
deps = [
{deps}
],
visibility = [
'PUBLIC',
],
)
"""
ANDROID_RESOURCE_TEMPLATE = """android_resource(
name = 'res',
package = '{package}',
res = 'res',
deps = [
],
visibility = [
'PUBLIC',
],
)
"""
ANDROID_BUILD_CONFIG_TEMPLATE = """android_build_config(
name = 'build-config',
package = '{package}',
visibility = [
'PUBLIC',
],
)
"""
REMOTE_DEP_TEMPLATE = """
{prebuilt_type}(
name = '{name}',
{binary_field} = ':{name}-jar',
visibility = [
'PUBLIC',
],
)
remote_file(
name = '{name}-jar',
url = '{repo}:{coordinate}',
sha1 = '{hash}',
)
"""
INTERFACE_FILES_TEMPLATE = """INTERFACE_FILES = [
{0}
]
"""
CYCLE_PREFIX = 'BUILD FAILED: Cycle found: '
THIRD_PARTY_JAR = re.compile(r"^\s*(?:\S*ompile|provided)\s*'(\S*)'$")
MAVEN_COORDINATE = re.compile(r"([^:]+):([^:]+):([^:]+:)?([^:]+)")
CLASS_FILE = re.compile(r'\s(\S+).class$')
JAVA_IMPORT = re.compile(r'import (.*);$')
NAME_DECLARATION = re.compile(r"\s*name\s=\s'(\S*)'.*")
DEP_DECLARATION = re.compile(r"\s*'(\S*)',")
DEPS_START = re.compile(r'\s*deps\s*=\s*\[$')
PACKAGE_DECLARATION = re.compile(r"\s*package\s=\s'(\S*)'.*")
FNULL = open(os.devnull, 'w')
INTERFACE_DECLARATION = re.compile(r'public\s+@?interface\s+.*')
POSSIBLE_MAVEN_TYPES = [('aar', 'android_prebuilt_aar', 'aar'),
('jar', 'prebuilt_jar', 'binary_jar')]
INTERFACE_SUFFIX = '-interfaces'
BUCK_CONFIG_TEMPLATE = r"""[java]
; Indicates that any folder named src or test
; are folders that contain Java code.
src_roots = {src_roots}
[project]
ignore = \
.git, \
.buckd, \
.gradle, \
build, \
proguard
temp_files = \
.*\.swp$, \
^#.*#$, .*~$, \
.*___jb_bak___$, .*___jb_old___$, \
.*\.ap_$
[cache]
mode = dir
dir = buck-cache
dir_max_size = 10GB
[download]
in_build = true
[maven_repositories]
{maven_repositories}
"""
REPOSITORY_START = re.compile(r'repositories \{')
GRADLE_EXTERNAL_REPO = re.compile(
'maven\\s+\\{\\s+url\\s+["\'](.*)["\']\\s+\\}')
REPOSITORY_MAP = {
'jcenter': 'https://jcenter.bintray.com',
'mavenCentral': 'https://repo1.maven.org/maven2',
}
def get_repositories_from_gradle_file(gradle_file_path):
in_repositories = False
result = set()
with open(gradle_file_path, 'r') as gradle_file:
for line in gradle_file.readlines():
repository_start_match = REPOSITORY_START.search(line)
if repository_start_match:
in_repositories = True
elif in_repositories:
if line.strip().startswith('}'):
in_repositories = False
else:
external_repo_match = GRADLE_EXTERNAL_REPO.search(line)
repo_function_name = line.strip().strip('()')
if external_repo_match:
result.add(external_repo_match.group(1))
elif repo_function_name in REPOSITORY_MAP:
result.add(REPOSITORY_MAP[repo_function_name])
return result
def get_source_roots(buckconfig):
src_roots = []
with open(buckconfig, 'r') as buckconfig_file:
for line in buckconfig_file.readlines():
match = SRC_ROOTS_REGEX.match(line)
if match:
src_roots = map(str.strip, match.group(1).split(','))
return src_roots
def format_deps_for_buck_file(deps):
return sorted((" '{0}',".format(dep) for dep in deps))
def is_interface_file(file):
if not args.split_interfaces:
return False
with open(file, 'r') as java_file:
for line in java_file.readlines():
if INTERFACE_DECLARATION.match(line):
return True
return False
def get_interface_files(root, files):
interface_files = set()
for file in (x for x in files if x.endswith('.java')):
if is_interface_file(path.join(root, file)):
interface_files.add(file)
break
return interface_files
def get_deps_for_files(root,
files,
src_roots,
rule_name,
third_party_map,
android_libraries):
deps = set()
has_android_deps = False
for file in (x for x in files if x.endswith('.java')):
with open(path.join(root, file), 'r') as java_file:
for line in java_file.readlines():
match = JAVA_IMPORT.match(line)
if match:
needed_class = match.group(1)
if (needed_class.startswith('android') or
needed_class.startswith('com.android')):
has_android_deps = True
if needed_class in third_party_map:
deps.add(third_party_map[needed_class])
else:
java_file = needed_class.replace('.', '/') + '.java'
for src_root in src_roots:
src_root = src_root.lstrip('/')
java_file_full_path = path.join(
src_root, java_file)
if path.exists(java_file_full_path):
target_basename = path.join(
src_root,
path.dirname(java_file))
rule_name = path.basename(
path.dirname(java_file))
if is_interface_file(java_file_full_path):
rule_name += INTERFACE_SUFFIX
target = '//{0}:{1}'.format(
target_basename,
rule_name)
if (path.abspath(target_basename) !=
path.abspath(root)):
deps.add(target)
if target in android_libraries:
has_android_deps = True
break
if has_android_deps:
android_libraries.add(rule_name)
return deps, has_android_deps
def generate_default_buck_files(buckconfig,
src_roots,
third_party_map,
android_libraries,
default_library_type):
buck_files = []
for src_root in src_roots:
src_root = src_root.lstrip('/')
path_walker = os.walk(path.join(path.dirname(buckconfig), src_root))
for root, dirs, files in path_walker:
if 'BUCK' not in files and any((x for x in files if
x.endswith('.java'))):
interface_files = get_interface_files(root, files)
with open(path.join(root, 'BUCK'), 'w') as buck_file:
if interface_files:
buck_file.write(INTERFACE_FILES_TEMPLATE.format(
', \n'.join((" '%s'" % x for x in
interface_files))
))
interface_rule = path.basename(root) + INTERFACE_SUFFIX
interface_buck_rule = '//{0}:{1}-interfaces'.format(
path.relpath(root),
path.basename(root))
interface_deps, has_android_deps = get_deps_for_files(
root,
interface_files,
src_roots,
interface_buck_rule,
third_party_map,
android_libraries)
interface_library_type = default_library_type
if has_android_deps:
interface_library_type = 'android_library'
buck_file.write(
BUCK_FILE_TEMPLATE.format(
library_type=interface_library_type,
sources='INTERFACE_FILES',
name=interface_rule,
deps='\n'.join(
format_deps_for_buck_file(interface_deps))
))
buck_files.append(interface_buck_rule)
main_buck_rule = '//{0}:{1}'.format(
path.relpath(root),
path.basename(root))
main_rule_deps, has_android_deps = get_deps_for_files(
root,
set(files).difference(
interface_files),
src_roots,
main_buck_rule,
third_party_map,
android_libraries)
main_library_type = default_library_type
if has_android_deps:
main_library_type = 'android_library'
main_rule_srcs = "glob(['*.java'])"
if interface_files:
main_rule_srcs = "glob(['*.java'], " \
"excludes=INTERFACE_FILES)"
buck_file.write(
BUCK_FILE_TEMPLATE.format(
library_type=main_library_type,
sources=main_rule_srcs,
name=path.basename(root),
deps='\n'.join(
format_deps_for_buck_file(
main_rule_deps))
))
buck_files.append(main_buck_rule)
return buck_files
def get_maven_coordinates(gradle_files, gradle_cache):
maven_coordinates = {}
for gradle_file in gradle_files:
maven_coordinates.update(
get_maven_coordinates_for_gradle_file(gradle_file, gradle_cache))
return maven_coordinates
def get_maven_coordinates_for_gradle_file(gradle_file_path, gradle_cache):
maven_coordinates = {}
with open(gradle_file_path, 'r') as gradle_file:
for line in gradle_file.readlines():
match = THIRD_PARTY_JAR.match(line)
if match:
coordinate_match = MAVEN_COORDINATE.match(match.group(1))
if coordinate_match:
prebuilt_type = 'prebuilt_jar'
binary_field = 'binary_jar'
group = coordinate_match.group(1)
dep_id = coordinate_match.group(2)
repo = 'mvn'
local_maven_repository = None
if coordinate_match.group(3):
dep_type = coordinate_match.group(3).rstrip(':')
else:
if group.startswith('com.google.android'):
local_maven_repository = path.join(
path.expandvars('$ANDROID_HOME'),
'extras/google/m2repository/')
elif group.startswith('com.android'):
local_maven_repository = path.join(
path.expandvars('$ANDROID_HOME'),
'extras/android/m2repository/')
else:
dep_type = 'jar'
version = coordinate_match.group(4)
dep_hash = None
if local_maven_repository:
maven_path = path.join(
local_maven_repository,
group.replace('.', '/'),
dep_id,
version
)
for possible_type in POSSIBLE_MAVEN_TYPES:
maven_sha = path.join(maven_path,
'{dep_id}-{version}.{type}'
'.sha1'
.format(
dep_id=dep_id,
version=version,
type=possible_type[0],
))
if path.exists(maven_sha):
with open(maven_sha, 'r') as maven_sha_file:
dep_type = possible_type[0]
prebuilt_type = possible_type[1]
binary_field = possible_type[2]
dep_hash = maven_sha_file.read()
else:
for possible_type in POSSIBLE_MAVEN_TYPES:
expected_file = path.join(
'{dep_id}-{version}.{type}'
.format(
dep_id=dep_id,
version=version,
type=possible_type[0],
))
walker = os.walk(gradle_cache)
for root, dirs, files in walker:
if dep_hash:
del dirs[:]
for child_file in files:
if child_file == expected_file:
dep_hash = path.basename(root)
dep_type = possible_type[0]
prebuilt_type = possible_type[1]
binary_field = possible_type[2]
del dirs[:]
if not dep_hash:
print "\tCoudn't find a hash for {0}".format(
coordinate_match.group(0))
else:
if len(dep_hash) % 2 != 0:
dep_hash = '0' + dep_hash
coordinate = "{group}:{id}:{type}:{version}".format(
group=group,
id=dep_id,
type=dep_type,
version=version,
)
maven_coordinates[coordinate] = {
'name': dep_id,
'repo': repo,
'prebuilt_type': prebuilt_type,
'binary_field': binary_field,
'coordinate': coordinate,
'hash': dep_hash
}
else:
print "Couldn't parse maven coordiante {0}".format(
match.group(1))
return maven_coordinates
def write_remote_deps(third_party_buck_file, maven_coordinates):
existing_deps = get_existing_third_party_jars()
if not os.path.exists(os.path.dirname(third_party_buck_file)):
os.makedirs(os.path.dirname(third_party_buck_file))
with open(third_party_buck_file, 'wa') as buck_file:
for maven_coordinate in maven_coordinates.values():
if maven_coordinate['name'] not in existing_deps:
buck_file.write(REMOTE_DEP_TEMPLATE.format(**maven_coordinate))
def get_classes_for_aar(aar):
temp_dir = tempfile.mkdtemp()
try:
with zipfile.ZipFile(aar) as aar_file:
try:
aar_file.extract('classes.jar', temp_dir)
return get_classes_for_jar(path.join(temp_dir, 'classes.jar'))
except KeyError:
pass
finally:
shutil.rmtree(temp_dir)
return []
def get_classes_for_jar(jar):
jar_output = subprocess.check_output(['jar', 'tvf', jar])
classes = []
for line in jar_output.splitlines():
match = CLASS_FILE.search(line)
if match:
classes.append(match.group(1).replace('/', '.').replace('$', '.'))
return classes
def get_existing_third_party_jars():
all_jar_targets = subprocess.check_output(['buck',
'targets',
'--type',
'prebuilt_jar',
'android_prebuilt_aar'],
stderr=FNULL)
result = set()
for jar_target in all_jar_targets.splitlines():
result.add(jar_target.rstrip().split(':')[1])
return result
def create_third_party_map():
third_party_map = {}
android_libraries = set()
all_jar_targets = subprocess.check_output(['buck',
'targets',
'--type',
'prebuilt_jar'],
stderr=FNULL)
for jar_target in all_jar_targets.splitlines():
subprocess.check_call(['buck',
'build',
jar_target],
stderr=FNULL)
jar_location = subprocess.check_output(['buck',
'targets',
'--show_output',
jar_target],
stderr=FNULL).split(' ')[1]
jar_location = jar_location.strip()
for java_class in get_classes_for_jar(jar_location):
third_party_map[java_class] = jar_target
all_aar_targets = subprocess.check_output(['buck',
'targets',
'--type',
'android_prebuilt_aar'],
stderr=FNULL)
for aar_target in all_aar_targets.splitlines():
subprocess.check_call(['buck',
'build',
aar_target],
stderr=FNULL)
aar_location = subprocess.check_output(['buck',
'targets',
'--show_output',
aar_target],
stderr=FNULL).split(' ')[1]
aar_location = aar_location.strip()
for java_class in get_classes_for_aar(aar_location):
third_party_map[java_class] = aar_target
android_libraries.add(aar_target)
build_config_targets = subprocess.check_output(['buck',
'targets',
'--type',
'android_build_config'],
stderr=FNULL)
for build_config_target in build_config_targets.splitlines():
buck_file = build_config_target.split(':')[0].lstrip('/') + '/BUCK'
with open(buck_file, 'r') as buck_file_contents:
for line in buck_file_contents.readlines():
line = line.rstrip()
match = PACKAGE_DECLARATION.match(line)
if match:
third_party_map[match.group(1) + '.BuildConfig'] = \
build_config_target
android_libraries.add(build_config_target)
android_resouce_targets = subprocess.check_output(['buck',
'targets',
'--type',
'android_resource'],
stderr=FNULL)
for android_resouce_target in android_resouce_targets.splitlines():
buck_file = android_resouce_target.split(':')[0].lstrip('/') + '/BUCK'
with open(buck_file, 'r') as buck_file_contents:
for line in buck_file_contents.readlines():
line = line.rstrip()
match = PACKAGE_DECLARATION.match(line)
if match:
third_party_map[match.group(1) + '.R'] = \
android_resouce_target
android_libraries.add(android_resouce_target)
return third_party_map, android_libraries
def find_missing_deps_from_output(buck_rule, output):
in_try_adding = False
in_missing_deps = False
missing_deps = set()
for line in (x.strip() for x in output.splitlines()):
if line == 'Try adding the following deps:':
in_try_adding = True
elif in_try_adding:
if not line:
in_try_adding = False
else:
missing_deps.add(line)
elif line.endswith(' is missing deps:'):
in_missing_deps = True
elif in_missing_deps:
match = DEP_DECLARATION.match(line)
if match:
missing_deps.add(match.group(1))
else:
in_missing_deps = False
return {dep for dep in missing_deps if dep != buck_rule}
def add_missing_deps(buck_rules, android_libraries):
settled = False
pass_count = 1
while not settled:
print '\t*** Adding Deps: Pass {0}'.format(pass_count)
files_changed = add_missing_deps_pass(buck_rules, android_libraries)
print '\t*** Modified {0} BUCK files'.format(files_changed)
settled = files_changed == 0
pass_count += 1
def modify_buck_rule(buck_rule, new_deps_fn=None, new_rule_type=None):
existing_deps = set()
buck_file_with_new_deps = []
found_deps_open = False
found_deps_close = False
found_rule_name = False
buck_file = path.join(buck_rule.lstrip('/').split(':')[0], 'BUCK')
rule_name = buck_rule.split(':')[1]
modified_file = False
with open(buck_file, 'r') as buck_file_contents:
for line in buck_file_contents.readlines():
line = line.rstrip()
name_match = NAME_DECLARATION.match(line)
if name_match and name_match.group(1) == rule_name:
found_rule_name = True
if (new_rule_type and
not buck_file_with_new_deps[-1].startswith(
new_rule_type)):
buck_file_with_new_deps[-1] = '{0}('.format(new_rule_type)
modified_file = True
buck_file_with_new_deps.append(line)
elif (found_rule_name and
DEPS_START.match(line) and
not found_deps_close):
found_deps_open = True
elif found_deps_open and not found_deps_close:
if line.endswith('],'):
buck_file_with_new_deps.append(' deps = [')
new_deps = new_deps_fn(existing_deps)
buck_file_with_new_deps.extend(
format_deps_for_buck_file(new_deps))
buck_file_with_new_deps.append(' ],')
if new_deps != existing_deps:
modified_file = True
found_deps_close = True
else:
match = DEP_DECLARATION.match(line)
if match:
existing_deps.add(match.group(1))
else:
buck_file_with_new_deps.append(line)
if modified_file:
import pdb;
pdb.set_trace()
with open(buck_file, 'w') as buck_file_contents:
buck_file_contents.write(
'\n'.join(buck_file_with_new_deps))
return modified_file
def add_missing_deps_pass(buck_rules, android_libraries):
files_changed = 0
for rule in buck_rules:
buck = subprocess.Popen(['buck', 'build', rule],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
_, err = buck.communicate()
if buck.returncode != 0:
missing_deps = find_missing_deps_from_output(rule, err)
new_rule_type = None
if rule in android_libraries:
new_rule_type = 'android_library'
existing_deps = set()
def update_deps(x):
existing_deps.update(x)
return x.union(missing_deps)
if modify_buck_rule(rule,
new_deps_fn=update_deps,
new_rule_type=new_rule_type):
files_changed += 1
for dep in missing_deps.union(existing_deps):
if dep in android_libraries:
android_libraries.add(rule)
return files_changed
def get_files_for_rule(buck_rule):
existing_deps = set()
def empty_deps(x):
existing_deps.update(x)
return set()
modify_buck_rule(buck_rule, new_deps_fn=empty_deps)
files = subprocess.check_output(['buck',
'audit',
'input',
buck_rule],
stderr=FNULL).splitlines()
modify_buck_rule(buck_rule, new_deps_fn=existing_deps.union)
return files
def find_cycle():
process = subprocess.Popen(['buck', 'targets'],
stdout=FNULL,
stderr=subprocess.PIPE)
_, stderr = process.communicate()
retcode = process.poll()
if retcode:
for line in stderr.splitlines():
if line.startswith(CYCLE_PREFIX):
return line[len(CYCLE_PREFIX):].split(' -> ')
return []
def find_smallest_dep(cycle):
small_dep = None
result = ()
for i in xrange(len(cycle)):
current = cycle[i]
next = cycle[(i + 1) % len(cycle)]
current_files = set(get_files_for_rule(current))
next_files = set(get_files_for_rule(current))
import pdb;
pdb.set_trace()
def break_cycle():
cycle = find_cycle()
if cycle:
find_smallest_dep(cycle)
def create_parser():
parser = argparse.ArgumentParser(
description='Generate a skeleton buck project from a gradle project.')
parser.add_argument(
'--gradle_cache',
dest='gradle_cache',
help='Path to gradle cache',
default=path.expandvars(path.join('$HOME', '.gradle', 'caches')),
)
parser.add_argument(
'--third_party_buck',
dest='third_party_buck',
help='Path to third party code buck file',
default='libs/BUCK'
)
parser.add_argument(
'--split_interfaces',
dest='split_interfaces',
help='Whether or not to split interfaces into their own rule.',
action='store_true',
default=False
)
return parser
def main():
print "**** Creating remote_file rules for maven deps ***"
gradle_files = []
src_roots = []
android_directories = []
external_maven_repos = set()
for root, dirs, files in os.walk(os.getcwd(), followlinks=True):
if 'build.gradle' in files:
gradle_file = path.join(root, 'build.gradle')
gradle_files.append(gradle_file)
external_maven_repos = external_maven_repos.union(
get_repositories_from_gradle_file(gradle_file))
main_root = path.join(root, 'src', 'main')
java_root = path.join(main_root, 'java')
if path.exists(java_root):
src_roots.append(path.relpath(java_root))
if path.exists(path.join(main_root, 'AndroidManifest.xml')):
android_directories.append(main_root)
if not gradle_files:
raise Exception("Couldn't find any 'build.gradle' files.")
if not path.exists('.buckconfig'):
maven_repos = ['mvn{0} = {1}'.format(i, repo)
for i, repo
in enumerate(external_maven_repos)]
with open('.buckconfig', 'w') as buck_config:
buck_config.write(BUCK_CONFIG_TEMPLATE.format(
src_roots=','.join(['/' + x for x in src_roots]),
maven_repositories=' \n'.join(maven_repos)))
for android_directory in android_directories:
buck_file = path.join(android_directory, 'BUCK')
if path.exists(buck_file):
continue
with open(path.join(android_directory,
'AndroidManifest.xml'), 'r') as manifest_file:
manifest_xml = xml.parse(manifest_file)
package = manifest_xml.getroot().get('package')
with open(buck_file, 'w') as buck_file_handle:
buck_file_handle.write(ANDROID_BUILD_CONFIG_TEMPLATE.format(
package=package
))
if path.exists(path.join(android_directory, 'res')):
buck_file_handle.write(ANDROID_RESOURCE_TEMPLATE.format(
package=package
))
maven_coordinates = get_maven_coordinates(gradle_files,
args.gradle_cache)
write_remote_deps(args.third_party_buck, maven_coordinates)
third_party_map, android_libraries = create_third_party_map()
src_roots = get_source_roots('.buckconfig')
print "**** Generating Buck Files ***"
buck_rules = generate_default_buck_files(
'.buckconfig',
src_roots,
third_party_map,
android_libraries,
'java_library')
print "**** Adding missing dependencies ***"
add_missing_deps(buck_rules, android_libraries)
print "**** Checking which rules compile ***"
passing_count = 0
for buck_rule in buck_rules:
try:
subprocess.check_call(['buck', 'build', path.relpath(buck_rule)],
stdout=FNULL,
stderr=FNULL)
passing_count += 1
except:
pass
print '{0} out of {1} rules compile!!!'.format(passing_count,
len(buck_rules))
if __name__ == '__main__':
args = create_parser().parse_args()
main()
|
|
# coding: utf-8
"""
Server API
Reference for Server API (REST/Json)
OpenAPI spec version: 2.0.6
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import sys
import os
import re
# python 2 and python 3 compatibility library
from six import iteritems
from ..configuration import Configuration
from ..api_client import ApiClient
class SupportApi(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
config = Configuration()
if api_client:
self.api_client = api_client
else:
if not config.api_client:
config.api_client = ApiClient()
self.api_client = config.api_client
def create_message(self, body, **kwargs):
"""
Create new message
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_message(body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param CreateMessageRequest body: Message parameters (required)
:return: SupportResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.create_message_with_http_info(body, **kwargs)
else:
(data) = self.create_message_with_http_info(body, **kwargs)
return data
def create_message_with_http_info(self, body, **kwargs):
"""
Create new message
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_message_with_http_info(body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param CreateMessageRequest body: Message parameters (required)
:return: SupportResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_message" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `create_message`")
collection_formats = {}
resource_path = '/support'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
self.api_client.set_default_header('Content-Type', 'application/json')
# Authentication setting
auth_settings = ['ApiClientId', 'ApiClientSecret']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SupportResponse',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_contacts(self, **kwargs):
"""
Get contacts list
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_contacts(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int page:
:param int per_page:
:param str sort_by: Sort by this attribute (id by default)
:param str sort_direction: Sorting direction (asc by default)
:return: ContactListResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_contacts_with_http_info(**kwargs)
else:
(data) = self.get_contacts_with_http_info(**kwargs)
return data
def get_contacts_with_http_info(self, **kwargs):
"""
Get contacts list
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_contacts_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int page:
:param int per_page:
:param str sort_by: Sort by this attribute (id by default)
:param str sort_direction: Sorting direction (asc by default)
:return: ContactListResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['page', 'per_page', 'sort_by', 'sort_direction']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_contacts" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
resource_path = '/support/contacts'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'page' in params:
query_params['page'] = params['page']
if 'per_page' in params:
query_params['per_page'] = params['per_page']
if 'sort_by' in params:
query_params['sort_by'] = params['sort_by']
if 'sort_direction' in params:
query_params['sort_direction'] = params['sort_direction']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['ApiClientId', 'ApiClientSecret']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ContactListResponse',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
|
|
"""
Functions and classes used to extend a GATK tool with Python.
GATK uses two FIFOs to communicate wth Python. The "ack" FIFO is read by GATK
and written by Python code, and is used to signal that a Python command has
completed execution. The "data" FIFO is written by GATK and read by Python,
and is used to pass data to Python from Java.
Most of the functions in this module are intended to be called by GATK via
the StreamingPythonScriptExecutor Java class, and are not called by Python
code directly. The one exception is the readDataFIFO function, which can be
used to read data that had been passed to Python by GATK Java code.
"""
import sys
import os
import cProfile, pstats, io
import traceback
from gatktool import toolconstants
_ackFIFO = None
_dataFIFO = None
_GATKProfiler = None
def initializeGATK(ackFIFOName: str):
"""
Open the GATK ack FIFO and install the exception handler hook.
Called by GATK when the StreamingPythonScriptExecutor is initialized,
which is normally in onTraversalStart. Initializes the ack FIFO and
installs the exception hook. Since the exception hook uses the ack FIFO,
it can't be installed until after the FIFO is initialized.
"""
global _ackFIFO
_ackFIFO = AckFIFO(ackFIFOName)
sys.excepthook = gatkExceptionHook
def gatkExceptionHook(exceptionType, value, tracebck):
"""
GATK Handler for uncaught Python exceptions.
The is installed by initializeGATK after the ack FIFO has been
initialized. When an unhandled exception is caught, the handler
sends a nack to GATK through the FIFO, which results in a
PythonScriptExecutorException being thrown in the tool.
"""
sendNackWithMessage(' '.join(map(str, traceback.format_exception(exceptionType, value, tracebck))))
sys.__excepthook__(exceptionType, value, tracebck)
def sendAck():
"""
Send a positive acknowledgment to GATK. This should generally only
be called by python code that is embedded in Java, since the executor
keeps track of whether an ack request is outstanding.
"""
global _ackFIFO
_ackFIFO.writeAck()
def sendNack():
"""
Send a negative acknowledgment to GATK. Generally only called by the
installed exception hook. This will result in a Java exception being
thrown that unless caught by Java code, will terminate the tool.
"""
global _ackFIFO
_ackFIFO.writeNack()
def sendNackWithMessage(nckMessage: str):
"""
Send a negative acknowledgment to GATK, along with a message. Generally only
called by the installed exception hook. This will result in a Java exception being
thrown that unless caught by Java code, will terminate the tool.
"""
global _ackFIFO
_ackFIFO.writeNackWithMessage(nckMessage)
def terminateGATK():
"""
Called by GATK when no more Python commands will be executed
"""
global _ackFIFO
if _ackFIFO is None:
raise RuntimeError("ack FIFO has not been initialized")
_ackFIFO.close()
_ackFIFO = None
def initializeDataFIFO(dataFIFOName: str):
"""
Initialize the data FIFO for reading.
Once this method has been called, the FIFO may be read using the
readDataFIFO function.
"""
global _dataFIFO
_dataFIFO = DataFIFO(dataFIFOName)
def closeDataFIFO():
""""
Close the data FIFO. Afteer this method hass been called, the
data FIFO can no longer be read.
"""
global _dataFIFO
if _dataFIFO is None:
raise RuntimeError("data FIFO has not been initialized")
_dataFIFO.close()
_dataFIFO = None
def readDataFIFO() -> str:
"""
Read a line from the Data FIFO.
:return: string
"""
global _dataFIFO
return _dataFIFO.readLine()
def startProfiling():
"""
Start Python CProfile profiling.
"""
global _GATKProfiler
_GATKProfiler = cProfile.Profile()
_GATKProfiler.enable()
def endProfiling(profileName: str):
"""
End Python CProfile profiling and write results to a file. The
startProfile function must have been previously called. The results
are ordered by cummulative time.
:param profileName: name of the file to which the profiling results should be written.
"""
global _GATKProfiler
_GATKProfiler.disable()
gatkProfilerDescriptor = os.open(profileName, os.O_WRONLY | os.O_CREAT)
gatkProfileStream = os.fdopen(gatkProfilerDescriptor, 'w')
gatkStats = pstats.Stats(_GATKProfiler, stream=gatkProfileStream).sort_stats('cumulative')
gatkStats.print_stats()
gatkProfileStream.close()
del gatkProfileStream
del gatkProfilerDescriptor
del gatkStats
class AckFIFO:
"""
Manage the FIFO used to notify GATK (via an ack) that a command has
completed, or failed due to an unhandled exception (via a nck).
"""
def __init__(self, ackFIFOName: str) -> None:
"""Open the ack fifo stream for writing only"""
self.ackFIFOName = ackFIFOName
writeDescriptor = os.open(self.ackFIFOName, os.O_WRONLY)
self.fileWriter = os.fdopen(writeDescriptor, 'w')
def writeAck(self):
"""
Write a positive acknowledgement to the ACK FIFO.
"""
if self.fileWriter is None:
raise RuntimeError("ack FIFO has not been initialized")
self.fileWriter.write(toolconstants._ackString)
self.fileWriter.flush()
def writeNack(self):
"""
Write a negative acknowledgement to the ACK FIFO.
Calling this method will result in an exception being thrown
in the GATK tool on whose behalf this module is running.
"""
if self.fileWriter is None:
raise RuntimeError("ack FIFO has not been initialized")
self.fileWriter.write(toolconstants._nackString)
self.fileWriter.flush()
def writeNackWithMessage(self, message: str) -> None:
"""
Write a negative acknowledgement with a supplemental message to the ACK FIFO.
The format of a nck message is:
ncknnnnssss....
where "nck" is literal, "nnnn" is a 4 byte long string of decimal digits representing the length
of the message that follows, and "ssss...." is a string of length nnnn
Calling this method will result in an exception being thrown
in the GATK tool on whose behalf this module is running.
"""
if self.fileWriter is None:
raise RuntimeError("ack FIFO has not been initialized")
self.fileWriter.write(toolconstants._nkmString)
actualMessageLength = len(message)
"""The message length must be exactly 4 bytes"""
if len(str(actualMessageLength)) <= toolconstants._nckMessageLengthSerializedSize:
self.fileWriter.write(str(actualMessageLength).zfill(toolconstants._nckMessageLengthSerializedSize))
self.fileWriter.write(message)
else:
"""Message is too long, trim to 9999 bytes"""
self.fileWriter.write(str(toolconstants._nckMaxMessageLength))
self.fileWriter.write(message[:toolconstants._nckMaxMessageLength])
self.fileWriter.flush()
def close(self):
assert self.fileWriter != None
self.fileWriter.close()
self.fileWriter = None
class DataFIFO:
"""
Manage the FIFO stream used for transferring data from the GATK tool to
Python code.
The FIFO is written by GATK and read by Python.
"""
def __init__(self, dataFIFOName: str) -> None:
"""Open the data stream fifo for reading"""
self.dataFIFOName = dataFIFOName
# the data fifo is always opened for read only on the python side
readDescriptor = os.open(self.dataFIFOName, os.O_RDONLY)
self.fileReader = os.fdopen(readDescriptor, 'r')
def readLine(self) -> str:
"""
Read a single line from the Data FIFO.
:return: string
"""
if self.fileReader is None:
raise RuntimeError("data FIFO reader has not been initialized")
return self.fileReader.readline()
def close(self):
if self.fileReader is None:
raise RuntimeError("data FIFO reader has not been initialized")
self.fileReader.close()
self.fileReader = None
|
|
# Copyright 2014 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import uuid
import mock
from oslo_config import cfg
from keystone import cli
from keystone.common import dependency
from keystone.i18n import _
from keystone import resource
from keystone.tests import unit as tests
from keystone.tests.unit.ksfixtures import database
CONF = cfg.CONF
class CliTestCase(tests.SQLDriverOverrides, tests.TestCase):
def config_files(self):
config_files = super(CliTestCase, self).config_files()
config_files.append(tests.dirs.tests_conf('backend_sql.conf'))
return config_files
def test_token_flush(self):
self.useFixture(database.Database())
self.load_backends()
cli.TokenFlush.main()
class CliDomainConfigAllTestCase(tests.SQLDriverOverrides, tests.TestCase):
def setUp(self):
self.useFixture(database.Database())
super(CliDomainConfigAllTestCase, self).setUp()
self.load_backends()
self.config_fixture.config(
group='identity',
domain_config_dir=tests.TESTCONF + '/domain_configs_multi_ldap')
self.domain_count = 3
self.setup_initial_domains()
def config_files(self):
self.config_fixture.register_cli_opt(cli.command_opt)
self.addCleanup(self.cleanup)
config_files = super(CliDomainConfigAllTestCase, self).config_files()
config_files.append(tests.dirs.tests_conf('backend_sql.conf'))
return config_files
def cleanup(self):
CONF.reset()
CONF.unregister_opt(cli.command_opt)
def cleanup_domains(self):
for domain in self.domains:
if domain == 'domain_default':
# Not allowed to delete the default domain, but should at least
# delete any domain-specific config for it.
self.domain_config_api.delete_config(
CONF.identity.default_domain_id)
continue
this_domain = self.domains[domain]
this_domain['enabled'] = False
self.resource_api.update_domain(this_domain['id'], this_domain)
self.resource_api.delete_domain(this_domain['id'])
self.domains = {}
def config(self, config_files):
CONF(args=['domain_config_upload', '--all'], project='keystone',
default_config_files=config_files)
def setup_initial_domains(self):
def create_domain(domain):
return self.resource_api.create_domain(domain['id'], domain)
self.domains = {}
self.addCleanup(self.cleanup_domains)
for x in range(1, self.domain_count):
domain = 'domain%s' % x
self.domains[domain] = create_domain(
{'id': uuid.uuid4().hex, 'name': domain})
self.domains['domain_default'] = create_domain(
resource.calc_default_domain())
def test_config_upload(self):
# The values below are the same as in the domain_configs_multi_ldap
# directory of test config_files.
default_config = {
'ldap': {'url': 'fake://memory',
'user': 'cn=Admin',
'password': 'password',
'suffix': 'cn=example,cn=com'},
'identity': {'driver': 'keystone.identity.backends.ldap.Identity'}
}
domain1_config = {
'ldap': {'url': 'fake://memory1',
'user': 'cn=Admin',
'password': 'password',
'suffix': 'cn=example,cn=com'},
'identity': {'driver': 'keystone.identity.backends.ldap.Identity'}
}
domain2_config = {
'ldap': {'url': 'fake://memory',
'user': 'cn=Admin',
'password': 'password',
'suffix': 'cn=myroot,cn=com',
'group_tree_dn': 'ou=UserGroups,dc=myroot,dc=org',
'user_tree_dn': 'ou=Users,dc=myroot,dc=org'},
'identity': {'driver': 'keystone.identity.backends.ldap.Identity'}
}
# Clear backend dependencies, since cli loads these manually
dependency.reset()
cli.DomainConfigUpload.main()
res = self.domain_config_api.get_config_with_sensitive_info(
CONF.identity.default_domain_id)
self.assertEqual(default_config, res)
res = self.domain_config_api.get_config_with_sensitive_info(
self.domains['domain1']['id'])
self.assertEqual(domain1_config, res)
res = self.domain_config_api.get_config_with_sensitive_info(
self.domains['domain2']['id'])
self.assertEqual(domain2_config, res)
class CliDomainConfigSingleDomainTestCase(CliDomainConfigAllTestCase):
def config(self, config_files):
CONF(args=['domain_config_upload', '--domain-name', 'Default'],
project='keystone', default_config_files=config_files)
def test_config_upload(self):
# The values below are the same as in the domain_configs_multi_ldap
# directory of test config_files.
default_config = {
'ldap': {'url': 'fake://memory',
'user': 'cn=Admin',
'password': 'password',
'suffix': 'cn=example,cn=com'},
'identity': {'driver': 'keystone.identity.backends.ldap.Identity'}
}
# Clear backend dependencies, since cli loads these manually
dependency.reset()
cli.DomainConfigUpload.main()
res = self.domain_config_api.get_config_with_sensitive_info(
CONF.identity.default_domain_id)
self.assertEqual(default_config, res)
res = self.domain_config_api.get_config_with_sensitive_info(
self.domains['domain1']['id'])
self.assertEqual({}, res)
res = self.domain_config_api.get_config_with_sensitive_info(
self.domains['domain2']['id'])
self.assertEqual({}, res)
def test_no_overwrite_config(self):
# Create a config for the default domain
default_config = {
'ldap': {'url': uuid.uuid4().hex},
'identity': {'driver': 'keystone.identity.backends.ldap.Identity'}
}
self.domain_config_api.create_config(
CONF.identity.default_domain_id, default_config)
# Now try and upload the settings in the configuration file for the
# default domain
dependency.reset()
with mock.patch('__builtin__.print') as mock_print:
self.assertRaises(SystemExit, cli.DomainConfigUpload.main)
file_name = ('keystone.%s.conf' %
resource.calc_default_domain()['name'])
error_msg = _(
'Domain: %(domain)s already has a configuration defined - '
'ignoring file: %(file)s.') % {
'domain': resource.calc_default_domain()['name'],
'file': os.path.join(CONF.identity.domain_config_dir,
file_name)}
mock_print.assert_has_calls([mock.call(error_msg)])
res = self.domain_config_api.get_config(
CONF.identity.default_domain_id)
# The initial config should not have been overwritten
self.assertEqual(default_config, res)
class CliDomainConfigNoOptionsTestCase(CliDomainConfigAllTestCase):
def config(self, config_files):
CONF(args=['domain_config_upload'],
project='keystone', default_config_files=config_files)
def test_config_upload(self):
dependency.reset()
with mock.patch('__builtin__.print') as mock_print:
self.assertRaises(SystemExit, cli.DomainConfigUpload.main)
mock_print.assert_has_calls(
[mock.call(
_('At least one option must be provided, use either '
'--all or --domain-name'))])
class CliDomainConfigTooManyOptionsTestCase(CliDomainConfigAllTestCase):
def config(self, config_files):
CONF(args=['domain_config_upload', '--all', '--domain-name',
'Default'],
project='keystone', default_config_files=config_files)
def test_config_upload(self):
dependency.reset()
with mock.patch('__builtin__.print') as mock_print:
self.assertRaises(SystemExit, cli.DomainConfigUpload.main)
mock_print.assert_has_calls(
[mock.call(_('The --all option cannot be used with '
'the --domain-name option'))])
class CliDomainConfigInvalidDomainTestCase(CliDomainConfigAllTestCase):
def config(self, config_files):
self.invalid_domain_name = uuid.uuid4().hex
CONF(args=['domain_config_upload', '--domain-name',
self.invalid_domain_name],
project='keystone', default_config_files=config_files)
def test_config_upload(self):
dependency.reset()
with mock.patch('__builtin__.print') as mock_print:
self.assertRaises(SystemExit, cli.DomainConfigUpload.main)
file_name = 'keystone.%s.conf' % self.invalid_domain_name
error_msg = (_(
'Invalid domain name: %(domain)s found in config file name: '
'%(file)s - ignoring this file.') % {
'domain': self.invalid_domain_name,
'file': os.path.join(CONF.identity.domain_config_dir,
file_name)})
mock_print.assert_has_calls([mock.call(error_msg)])
|
|
# encoding: utf-8
from collections import defaultdict
import datetime
from decimal import Decimal
import logging
from django.core.exceptions import ObjectDoesNotExist
from django.core.management import call_command
from south.db import db
from south.v2 import DataMigration
from django.db import models
from corehq.apps.accounting.models import (
FeatureType, SoftwarePlanEdition, SoftwareProductType,
SoftwarePlanVisibility,
)
logger = logging.getLogger(__name__)
class Migration(DataMigration):
def forwards(self, orm):
call_command('cchq_prbac_bootstrap')
boostrap_handler = BootstrapSoftwarePlans(orm)
boostrap_handler.bootstrap()
# Reset Subscription plan_version to the latest version for that plan
for subscription in orm.Subscription.objects.all():
software_plan = subscription.plan_version.plan
latest_version = software_plan.softwareplanversion_set.filter(
is_active=True
).latest('date_created')
if subscription.plan_version.pk != latest_version.pk:
logger.info("%s reset to newest version."
% subscription.subscriber.domain)
subscription.plan_version = latest_version
subscription.save()
# make sure that the default standard plan SMS FeatureRate
# has the monthly_limit set to 100
standard_plans = orm.DefaultProductPlan.objects.filter(
edition=SoftwarePlanEdition.STANDARD)
for std_plan in standard_plans:
feature_rate = std_plan.plan.softwareplanversion_set.filter(
is_active=True
).latest('date_created').feature_rates.filter(
feature__feature_type=FeatureType.SMS
)[0]
if feature_rate.monthly_limit != 100:
feature_rate.monthly_limit = 100
feature_rate.save()
for plan in orm.SoftwarePlan.objects.all():
default_version = plan.softwareplanversion_set.filter(
is_active=True
).latest('date_created')
for version in plan.softwareplanversion_set.all():
if version.pk != default_version.pk:
try:
version.delete()
except models.ProtectedError:
logger.info("Skipped deleting SoftwarePlanVersion "
"with id %d for plan %s because it was "
"still being used."
% (version.pk, plan.name))
for credit_line in orm.CreditLine.objects.filter(feature_rate__isnull=False).all():
latest_rate = credit_line.feature_rate.feature.get_rate()
if credit_line.feature_rate.pk != latest_rate.pk:
credit_line.feature_rate = latest_rate
credit_line.save()
for feature_rate in orm.FeatureRate.objects.all():
if feature_rate.softwareplanversion_set.count() == 0:
try:
feature_rate.delete()
except models.ProtectedError:
logger.info("Skipped deleting FeatureRate with id "
"%d because it was still being used."
% feature_rate.pk)
for credit_line in orm.CreditLine.objects.filter(product_rate__isnull=False).all():
latest_rate = credit_line.product_rate.product.get_rate()
if credit_line.product_rate.pk != latest_rate.pk:
credit_line.product_rate = latest_rate
credit_line.save()
for product_rate in orm.SoftwareProductRate.objects.all():
if product_rate.softwareplanversion_set.count() == 0:
try:
product_rate.delete()
except models.ProtectedError:
logger.info("Skipped deleting ProductRate with id "
"%d because it was still being used."
% product_rate.pk)
def backwards(self, orm):
pass
models = {
u'accounting.billingaccount': {
'Meta': {'object_name': 'BillingAccount'},
'account_type': ('django.db.models.fields.CharField', [], {'default': "'CONTRACT'", 'max_length': '25'}),
'billing_admins': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['accounting.BillingAccountAdmin']", 'null': 'True', 'symmetrical': 'False'}),
'created_by': ('django.db.models.fields.CharField', [], {'max_length': '80'}),
'created_by_domain': ('django.db.models.fields.CharField', [], {'max_length': '25', 'null': 'True', 'blank': 'True'}),
'currency': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounting.Currency']"}),
'date_confirmed_extra_charges': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_auto_invoiceable': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'db_index': 'True'}),
'salesforce_account_id': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '80', 'null': 'True', 'blank': 'True'})
},
u'accounting.billingaccountadmin': {
'Meta': {'object_name': 'BillingAccountAdmin'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'web_user': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80', 'db_index': 'True'})
},
u'accounting.billingcontactinfo': {
'Meta': {'object_name': 'BillingContactInfo'},
'account': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['accounting.BillingAccount']", 'unique': 'True', 'primary_key': 'True'}),
'city': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'company_name': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'country': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'emails': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'first_line': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'phone_number': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}),
'postal_code': ('django.db.models.fields.CharField', [], {'max_length': '20'}),
'second_line': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'state_province_region': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'accounting.billingrecord': {
'Meta': {'object_name': 'BillingRecord'},
'date_emailed': ('django.db.models.fields.DateField', [], {'auto_now_add': 'True', 'db_index': 'True', 'blank': 'True'}),
'emailed_to': ('django.db.models.fields.CharField', [], {'max_length': '254', 'db_index': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'invoice': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounting.Invoice']"}),
'pdf_data_id': ('django.db.models.fields.CharField', [], {'max_length': '48'})
},
u'accounting.creditadjustment': {
'Meta': {'object_name': 'CreditAdjustment'},
'amount': ('django.db.models.fields.DecimalField', [], {'default': "'0.0000'", 'max_digits': '10', 'decimal_places': '4'}),
'credit_line': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounting.CreditLine']"}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'invoice': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounting.Invoice']", 'null': 'True'}),
'line_item': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounting.LineItem']", 'null': 'True'}),
'note': ('django.db.models.fields.TextField', [], {}),
'reason': ('django.db.models.fields.CharField', [], {'default': "'MANUAL'", 'max_length': '25'}),
'web_user': ('django.db.models.fields.CharField', [], {'max_length': '80', 'null': 'True'})
},
u'accounting.creditline': {
'Meta': {'object_name': 'CreditLine'},
'account': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounting.BillingAccount']"}),
'balance': ('django.db.models.fields.DecimalField', [], {'default': "'0.0000'", 'max_digits': '10', 'decimal_places': '4'}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'feature_rate': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounting.FeatureRate']", 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'product_rate': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounting.SoftwareProductRate']", 'null': 'True', 'blank': 'True'}),
'subscription': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounting.Subscription']", 'null': 'True', 'blank': 'True'})
},
u'accounting.currency': {
'Meta': {'object_name': 'Currency'},
'code': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '3'}),
'date_updated': ('django.db.models.fields.DateField', [], {'auto_now': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '25', 'db_index': 'True'}),
'rate_to_default': ('django.db.models.fields.DecimalField', [], {'default': "'1.0'", 'max_digits': '20', 'decimal_places': '9'}),
'symbol': ('django.db.models.fields.CharField', [], {'max_length': '10'})
},
u'accounting.defaultproductplan': {
'Meta': {'object_name': 'DefaultProductPlan'},
'edition': ('django.db.models.fields.CharField', [], {'default': "'Community'", 'max_length': '25'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'plan': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounting.SoftwarePlan']"}),
'product_type': ('django.db.models.fields.CharField', [], {'max_length': '25'})
},
u'accounting.feature': {
'Meta': {'object_name': 'Feature'},
'feature_type': ('django.db.models.fields.CharField', [], {'max_length': '10', 'db_index': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '40'})
},
u'accounting.featurerate': {
'Meta': {'object_name': 'FeatureRate'},
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'feature': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounting.Feature']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'monthly_fee': ('django.db.models.fields.DecimalField', [], {'default': "'0.00'", 'max_digits': '10', 'decimal_places': '2'}),
'monthly_limit': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'per_excess_fee': ('django.db.models.fields.DecimalField', [], {'default': "'0.00'", 'max_digits': '10', 'decimal_places': '2'})
},
u'accounting.invoice': {
'Meta': {'object_name': 'Invoice'},
'balance': ('django.db.models.fields.DecimalField', [], {'default': "'0.0000'", 'max_digits': '10', 'decimal_places': '4'}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'date_due': ('django.db.models.fields.DateField', [], {'db_index': 'True'}),
'date_end': ('django.db.models.fields.DateField', [], {}),
'date_paid': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'date_received': ('django.db.models.fields.DateField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'date_start': ('django.db.models.fields.DateField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'subscription': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounting.Subscription']"}),
'tax_rate': ('django.db.models.fields.DecimalField', [], {'default': "'0.0000'", 'max_digits': '10', 'decimal_places': '4'})
},
u'accounting.lineitem': {
'Meta': {'object_name': 'LineItem'},
'base_cost': ('django.db.models.fields.DecimalField', [], {'default': "'0.0000'", 'max_digits': '10', 'decimal_places': '4'}),
'base_description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'feature_rate': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounting.FeatureRate']", 'null': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'invoice': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounting.Invoice']"}),
'product_rate': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounting.SoftwareProductRate']", 'null': 'True'}),
'quantity': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'unit_cost': ('django.db.models.fields.DecimalField', [], {'default': "'0.0000'", 'max_digits': '10', 'decimal_places': '4'}),
'unit_description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'})
},
u'accounting.softwareplan': {
'Meta': {'object_name': 'SoftwarePlan'},
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'edition': ('django.db.models.fields.CharField', [], {'default': "'Enterprise'", 'max_length': '25'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'visibility': ('django.db.models.fields.CharField', [], {'default': "'INTERNAL'", 'max_length': '10'})
},
u'accounting.softwareplanversion': {
'Meta': {'object_name': 'SoftwarePlanVersion'},
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'feature_rates': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['accounting.FeatureRate']", 'symmetrical': 'False', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'plan': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounting.SoftwarePlan']"}),
'product_rates': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['accounting.SoftwareProductRate']", 'symmetrical': 'False', 'blank': 'True'}),
'role': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['django_prbac.Role']"})
},
u'accounting.softwareproduct': {
'Meta': {'object_name': 'SoftwareProduct'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '40'}),
'product_type': ('django.db.models.fields.CharField', [], {'max_length': '25', 'db_index': 'True'})
},
u'accounting.softwareproductrate': {
'Meta': {'object_name': 'SoftwareProductRate'},
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'monthly_fee': ('django.db.models.fields.DecimalField', [], {'default': "'0.00'", 'max_digits': '10', 'decimal_places': '2'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounting.SoftwareProduct']"})
},
u'accounting.subscriber': {
'Meta': {'object_name': 'Subscriber'},
'domain': ('django.db.models.fields.CharField', [], {'max_length': '25', 'null': 'True', 'db_index': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'organization': ('django.db.models.fields.CharField', [], {'max_length': '25', 'null': 'True', 'db_index': 'True'})
},
u'accounting.subscription': {
'Meta': {'object_name': 'Subscription'},
'account': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounting.BillingAccount']"}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'date_delay_invoicing': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'date_end': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'date_start': ('django.db.models.fields.DateField', [], {}),
'do_not_invoice': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'plan_version': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounting.SoftwarePlanVersion']"}),
'salesforce_contract_id': ('django.db.models.fields.CharField', [], {'max_length': '80', 'null': 'True', 'blank': 'True'}),
'subscriber': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounting.Subscriber']"})
},
u'accounting.subscriptionadjustment': {
'Meta': {'object_name': 'SubscriptionAdjustment'},
'date_created': ('django.db.models.fields.DateField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'invoice': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounting.Invoice']", 'null': 'True'}),
'method': ('django.db.models.fields.CharField', [], {'default': "'INTERNAL'", 'max_length': '50'}),
'new_date_delay_invoicing': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'new_date_end': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'new_date_start': ('django.db.models.fields.DateField', [], {}),
'new_salesforce_contract_id': ('django.db.models.fields.CharField', [], {'max_length': '80', 'null': 'True', 'blank': 'True'}),
'note': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'reason': ('django.db.models.fields.CharField', [], {'default': "'CREATE'", 'max_length': '50'}),
'related_subscription': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'subscriptionadjustment_related'", 'null': 'True', 'to': u"orm['accounting.Subscription']"}),
'subscription': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounting.Subscription']"}),
'web_user': ('django.db.models.fields.CharField', [], {'max_length': '80', 'null': 'True'})
},
u'django_prbac.role': {
'Meta': {'object_name': 'Role'},
'description': ('django.db.models.fields.TextField', [], {'default': "u''", 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '256'}),
'parameters': ('django_prbac.fields.StringSetField', [], {'default': '[]', 'blank': 'True'}),
'slug': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '256'})
}
}
complete_apps = ['accounting']
class BootstrapSoftwarePlans(object):
"""
This is a direct copy of the cchq_software_plan_bootstrap management command
so that orm can be used to reference the objects.
"""
def __init__(self, orm):
self.orm = orm
self.verbose = False
self.for_tests = False
def bootstrap(self):
logger.info('Bootstrapping standard plans. Enterprise plans will have to be created via the admin UIs.')
self.product_types = [p[0] for p in SoftwareProductType.CHOICES]
self.editions = [
SoftwarePlanEdition.COMMUNITY,
SoftwarePlanEdition.STANDARD,
SoftwarePlanEdition.PRO,
SoftwarePlanEdition.ADVANCED,
SoftwarePlanEdition.ENTERPRISE,
]
self.feature_types = [f[0] for f in FeatureType.CHOICES]
self.ensure_plans()
def ensure_plans(self, dry_run=False):
edition_to_features = self.ensure_features(dry_run=dry_run)
for product_type in self.product_types:
for edition in self.editions:
role_slug = self.BOOTSTRAP_EDITION_TO_ROLE[edition]
try:
role = self.orm['django_prbac.Role'].objects.get(slug=role_slug)
except ObjectDoesNotExist:
logger.info("Could not find the role '%s'. Did you forget to run cchq_prbac_bootstrap?")
logger.info("Aborting. You should figure this out.")
return
software_plan_version = self.orm.SoftwarePlanVersion(role=role)
product, product_rates = self.ensure_product_and_rate(product_type, edition, dry_run=dry_run)
feature_rates = self.ensure_feature_rates(edition_to_features[edition], edition, dry_run=dry_run)
software_plan = self.orm.SoftwarePlan(
name='%s Edition' % product.name, edition=edition, visibility=SoftwarePlanVisibility.PUBLIC
)
if dry_run:
logger.info("[DRY RUN] Creating Software Plan: %s" % software_plan.name)
else:
try:
software_plan = self.orm.SoftwarePlan.objects.get(name=software_plan.name)
if self.verbose:
logger.info("Plan '%s' already exists. Using existing plan to add version."
% software_plan.name)
except self.orm.SoftwarePlan.DoesNotExist:
software_plan.save()
if self.verbose:
logger.info("Creating Software Plan: %s" % software_plan.name)
software_plan_version.plan = software_plan
software_plan_version.save()
for product_rate in product_rates:
product_rate.save()
software_plan_version.product_rates.add(product_rate)
for feature_rate in feature_rates:
feature_rate.save()
software_plan_version.feature_rates.add(feature_rate)
software_plan_version.save()
default_product_plan = self.orm.DefaultProductPlan(product_type=product.product_type, edition=edition)
if dry_run:
logger.info("[DRY RUN] Setting plan as default for product '%s' and edition '%s'." %
(product.product_type, default_product_plan.edition))
else:
try:
default_product_plan = self.orm.DefaultProductPlan.objects.get(
product_type=product.product_type, edition=edition
)
if self.verbose:
logger.info("Default for product '%s' and edition "
"'%s' already exists." % (
product.product_type, default_product_plan.edition
))
except ObjectDoesNotExist:
default_product_plan.plan = software_plan
default_product_plan.save()
if self.verbose:
logger.info("Setting plan as default for product '%s' and edition '%s'." %
(product.product_type,
default_product_plan.edition))
def ensure_product_and_rate(self, product_type, edition, dry_run=False):
"""
Ensures that all the necessary SoftwareProducts and SoftwareProductRates are created for the plan.
"""
if self.verbose:
logger.info('Ensuring Products and Product Rates')
product = self.orm.SoftwareProduct(name='%s %s' % (product_type, edition), product_type=product_type)
if edition == SoftwarePlanEdition.ENTERPRISE:
product.name = "Dimagi Only %s" % product.name
product_rates = []
BOOTSTRAP_PRODUCT_RATES = {
SoftwarePlanEdition.COMMUNITY: [
self.orm.SoftwareProductRate(), # use all the defaults
],
SoftwarePlanEdition.STANDARD: [
self.orm.SoftwareProductRate(monthly_fee=Decimal('100.00')),
],
SoftwarePlanEdition.PRO: [
self.orm.SoftwareProductRate(monthly_fee=Decimal('500.00')),
],
SoftwarePlanEdition.ADVANCED: [
self.orm.SoftwareProductRate(monthly_fee=Decimal('1000.00')),
],
SoftwarePlanEdition.ENTERPRISE: [
self.orm.SoftwareProductRate(monthly_fee=Decimal('0.00')),
],
}
for product_rate in BOOTSTRAP_PRODUCT_RATES[edition]:
if dry_run:
logger.info("[DRY RUN] Creating Product: %s" % product)
logger.info("[DRY RUN] Corresponding product rate of $%d created." % product_rate.monthly_fee)
else:
try:
product = self.orm.SoftwareProduct.objects.get(name=product.name)
if self.verbose:
logger.info("Product '%s' already exists. Using "
"existing product to add rate."
% product.name)
except self.orm.SoftwareProduct.DoesNotExist:
product.save()
if self.verbose:
logger.info("Creating Product: %s" % product)
if self.verbose:
logger.info("Corresponding product rate of $%d created."
% product_rate.monthly_fee)
product_rate.product = product
product_rates.append(product_rate)
return product, product_rates
def ensure_features(self, dry_run=False):
"""
Ensures that all the Features necessary for the plans are created.
"""
if self.verbose:
logger.info('Ensuring Features')
edition_to_features = defaultdict(list)
for edition in self.editions:
for feature_type in self.feature_types:
feature = self.orm.Feature(name='%s %s' % (feature_type, edition), feature_type=feature_type)
if edition == SoftwarePlanEdition.ENTERPRISE:
feature.name = "Dimagi Only %s" % feature.name
if dry_run:
logger.info("[DRY RUN] Creating Feature: %s" % feature)
else:
try:
feature = self.orm.Feature.objects.get(name=feature.name)
if self.verbose:
logger.info("Feature '%s' already exists. Using "
"existing feature to add rate."
% feature.name)
except ObjectDoesNotExist:
feature.save()
if self.verbose:
logger.info("Creating Feature: %s" % feature)
edition_to_features[edition].append(feature)
return edition_to_features
def ensure_feature_rates(self, features, edition, dry_run=False):
"""
Ensures that all the FeatureRates necessary for the plans are created.
"""
if self.verbose:
logger.info('Ensuring Feature Rates')
feature_rates = []
BOOTSTRAP_FEATURE_RATES = {
SoftwarePlanEdition.COMMUNITY: {
FeatureType.USER: self.orm.FeatureRate(monthly_limit=2 if self.for_tests else 50,
per_excess_fee=Decimal('1.00')),
FeatureType.SMS: self.orm.FeatureRate(monthly_limit=0), # use defaults here
},
SoftwarePlanEdition.STANDARD: {
FeatureType.USER: self.orm.FeatureRate(monthly_limit=4 if self.for_tests else 100,
per_excess_fee=Decimal('1.00')),
FeatureType.SMS: self.orm.FeatureRate(monthly_limit=3 if self.for_tests else 100),
},
SoftwarePlanEdition.PRO: {
FeatureType.USER: self.orm.FeatureRate(monthly_limit=6 if self.for_tests else 500,
per_excess_fee=Decimal('1.00')),
FeatureType.SMS: self.orm.FeatureRate(monthly_limit=5 if self.for_tests else 500),
},
SoftwarePlanEdition.ADVANCED: {
FeatureType.USER: self.orm.FeatureRate(monthly_limit=8 if self.for_tests else 1000,
per_excess_fee=Decimal('1.00')),
FeatureType.SMS: self.orm.FeatureRate(monthly_limit=7 if self.for_tests else 1000),
},
SoftwarePlanEdition.ENTERPRISE: {
FeatureType.USER: self.orm.FeatureRate(monthly_limit=-1, per_excess_fee=Decimal('0.00')),
FeatureType.SMS: self.orm.FeatureRate(monthly_limit=-1),
},
}
for feature in features:
feature_rate = BOOTSTRAP_FEATURE_RATES[edition][feature.feature_type]
feature_rate.feature = feature
if dry_run:
logger.info("[DRY RUN] Creating rate for feature '%s': %s" % (feature.name, feature_rate))
elif self.verbose:
logger.info("Creating rate for feature '%s': %s" % (feature.name, feature_rate))
feature_rates.append(feature_rate)
return feature_rates
BOOTSTRAP_EDITION_TO_ROLE = {
SoftwarePlanEdition.COMMUNITY: 'community_plan_v0',
SoftwarePlanEdition.STANDARD: 'standard_plan_v0',
SoftwarePlanEdition.PRO: 'pro_plan_v0',
SoftwarePlanEdition.ADVANCED: 'advanced_plan_v0',
SoftwarePlanEdition.ENTERPRISE: 'enterprise_plan_v0',
}
|
|
import tempfile
import uuid
from django.core.management import call_command
from django.db import DataError
from django.test import TestCase
from django.test import TransactionTestCase
from le_utils.constants import content_kinds
from mock import call
from mock import patch
from .sqlalchemytesting import django_connection_engine
from kolibri.core.content.models import ChannelMetadata
from kolibri.core.content.models import CONTENT_SCHEMA_VERSION
from kolibri.core.content.models import ContentNode
from kolibri.core.content.models import File
from kolibri.core.content.models import Language
from kolibri.core.content.models import LocalFile
from kolibri.core.content.utils.annotation import calculate_included_languages
from kolibri.core.content.utils.annotation import calculate_published_size
from kolibri.core.content.utils.annotation import calculate_total_resource_count
from kolibri.core.content.utils.annotation import fix_multiple_trees_with_id_one
from kolibri.core.content.utils.annotation import mark_local_files_as_available
from kolibri.core.content.utils.annotation import mark_local_files_as_unavailable
from kolibri.core.content.utils.annotation import recurse_annotation_up_tree
from kolibri.core.content.utils.annotation import (
set_leaf_node_availability_from_local_file_availability,
)
from kolibri.core.content.utils.annotation import set_local_file_availability_from_disk
def get_engine(connection_string):
return django_connection_engine()
test_channel_id = "6199dde695db4ee4ab392222d5af1e5c"
@patch("kolibri.core.content.utils.sqlalchemybridge.get_engine", new=get_engine)
class AnnotationFromLocalFileAvailability(TransactionTestCase):
fixtures = ["content_test.json"]
def test_all_local_files_available(self):
LocalFile.objects.all().update(available=True)
set_leaf_node_availability_from_local_file_availability(test_channel_id)
self.assertTrue(all(File.objects.all().values_list("available", flat=True)))
self.assertTrue(
all(
ContentNode.objects.exclude(kind=content_kinds.TOPIC)
.exclude(files=None)
.values_list("available", flat=True)
)
)
def test_no_local_files_available(self):
LocalFile.objects.all().update(available=False)
set_leaf_node_availability_from_local_file_availability(test_channel_id)
self.assertEqual(File.objects.filter(available=True).count(), 0)
self.assertEqual(
ContentNode.objects.exclude(kind=content_kinds.TOPIC)
.filter(available=True)
.count(),
0,
)
def test_one_local_file_available(self):
LocalFile.objects.all().update(available=False)
LocalFile.objects.filter(id="6bdfea4a01830fdd4a585181c0b8068c").update(
available=True
)
set_leaf_node_availability_from_local_file_availability(test_channel_id)
self.assertTrue(
ContentNode.objects.get(id="32a941fb77c2576e8f6b294cde4c3b0c").available
)
self.assertFalse(
all(
ContentNode.objects.exclude(kind=content_kinds.TOPIC)
.exclude(id="32a941fb77c2576e8f6b294cde4c3b0c")
.values_list("available", flat=True)
)
)
def test_other_channel_node_still_available(self):
test = ContentNode.objects.filter(kind=content_kinds.VIDEO).first()
test.id = uuid.uuid4().hex
test.channel_id = uuid.uuid4().hex
test.available = True
test.parent = None
test.save()
set_leaf_node_availability_from_local_file_availability(test_channel_id)
test.refresh_from_db()
self.assertTrue(test.available)
def tearDown(self):
call_command("flush", interactive=False)
super(AnnotationFromLocalFileAvailability, self).tearDown()
@patch("kolibri.core.content.utils.sqlalchemybridge.get_engine", new=get_engine)
class AnnotationTreeRecursion(TransactionTestCase):
fixtures = ["content_test.json"]
def setUp(self):
super(AnnotationTreeRecursion, self).setUp()
ContentNode.objects.all().update(available=False)
def test_all_content_nodes_available(self):
ContentNode.objects.exclude(kind=content_kinds.TOPIC).update(available=True)
recurse_annotation_up_tree(channel_id="6199dde695db4ee4ab392222d5af1e5c")
self.assertTrue(
ContentNode.objects.get(id="da7ecc42e62553eebc8121242746e88a").available
)
self.assertTrue(
ContentNode.objects.get(id="2e8bac07947855369fe2d77642dfc870").available
)
def test_no_content_nodes_available(self):
ContentNode.objects.filter(kind=content_kinds.TOPIC).update(available=True)
recurse_annotation_up_tree(channel_id="6199dde695db4ee4ab392222d5af1e5c")
# 0, as although there are three childless topics in the fixture, these cannot exist in real databases
# all availability for a channel gets set to False for topics before propagating availability up the tree.
self.assertEqual(
ContentNode.objects.filter(kind=content_kinds.TOPIC)
.filter(available=True)
.count(),
0,
)
def test_one_content_node_available(self):
ContentNode.objects.filter(id="32a941fb77c2576e8f6b294cde4c3b0c").update(
available=True
)
recurse_annotation_up_tree(channel_id="6199dde695db4ee4ab392222d5af1e5c")
# Check parent is available
self.assertTrue(
ContentNode.objects.get(id="da7ecc42e62553eebc8121242746e88a").available
)
def test_all_content_nodes_available_coach_content(self):
ContentNode.objects.exclude(kind=content_kinds.TOPIC).update(
available=True, coach_content=True
)
recurse_annotation_up_tree(channel_id="6199dde695db4ee4ab392222d5af1e5c")
self.assertTrue(
ContentNode.objects.get(id="da7ecc42e62553eebc8121242746e88a").coach_content
)
self.assertTrue(
ContentNode.objects.get(id="2e8bac07947855369fe2d77642dfc870").coach_content
)
def test_no_content_nodes_coach_content(self):
ContentNode.objects.all().update(available=True)
ContentNode.objects.all().update(coach_content=False)
recurse_annotation_up_tree(channel_id="6199dde695db4ee4ab392222d5af1e5c")
self.assertEqual(ContentNode.objects.filter(coach_content=True).count(), 0)
root = ChannelMetadata.objects.get(id=test_channel_id).root
self.assertEqual(root.num_coach_contents, 0)
def test_all_root_content_nodes_coach_content(self):
ContentNode.objects.all().update(available=True, coach_content=False)
root_node = ContentNode.objects.get(parent__isnull=True)
ContentNode.objects.filter(parent=root_node).exclude(
kind=content_kinds.TOPIC
).update(coach_content=True)
recurse_annotation_up_tree(channel_id="6199dde695db4ee4ab392222d5af1e5c")
root_node.refresh_from_db()
self.assertFalse(root_node.coach_content)
self.assertEqual(root_node.num_coach_contents, 2)
def test_one_root_content_node_coach_content(self):
ContentNode.objects.all().update(available=True, coach_content=False)
root_node = ContentNode.objects.get(parent__isnull=True)
node = (
ContentNode.objects.filter(parent=root_node)
.exclude(kind=content_kinds.TOPIC)
.first()
)
node.coach_content = True
node.save()
recurse_annotation_up_tree(channel_id="6199dde695db4ee4ab392222d5af1e5c")
root_node.refresh_from_db()
self.assertFalse(root_node.coach_content)
self.assertEqual(root_node.num_coach_contents, 1)
def test_one_root_topic_node_coach_content(self):
ContentNode.objects.all().update(available=True, coach_content=False)
root_node = ContentNode.objects.get(parent__isnull=True)
node = ContentNode.objects.filter(
parent=root_node, kind=content_kinds.TOPIC
).first()
node.coach_content = True
node.save()
recurse_annotation_up_tree(channel_id="6199dde695db4ee4ab392222d5af1e5c")
root_node.refresh_from_db()
self.assertFalse(root_node.coach_content)
self.assertEqual(root_node.num_coach_contents, 0)
def test_one_child_node_coach_content(self):
ContentNode.objects.all().update(available=True, coach_content=False)
root_node = ContentNode.objects.get(parent__isnull=True)
node = ContentNode.objects.filter(
parent=root_node, kind=content_kinds.TOPIC
).first()
ContentNode.objects.create(
title="test1",
id=uuid.uuid4().hex,
content_id=uuid.uuid4().hex,
channel_id=root_node.channel_id,
parent=node,
kind=content_kinds.VIDEO,
available=True,
coach_content=True,
)
recurse_annotation_up_tree(channel_id="6199dde695db4ee4ab392222d5af1e5c")
root_node.refresh_from_db()
node.refresh_from_db()
self.assertFalse(root_node.coach_content)
self.assertEqual(root_node.num_coach_contents, 1)
self.assertFalse(node.coach_content)
self.assertEqual(node.num_coach_contents, 1)
def test_one_child_coach_content_parent_no_siblings(self):
ContentNode.objects.all().update(available=True, coach_content=False)
root_node = ContentNode.objects.get(parent__isnull=True)
topic_node = ContentNode.objects.filter(
parent=root_node, kind=content_kinds.TOPIC
).first()
parent_node = ContentNode.objects.create(
title="test1",
id=uuid.uuid4().hex,
content_id=uuid.uuid4().hex,
channel_id=root_node.channel_id,
parent=topic_node,
kind=content_kinds.TOPIC,
available=True,
coach_content=False,
)
ContentNode.objects.create(
title="test2",
id=uuid.uuid4().hex,
content_id=uuid.uuid4().hex,
channel_id=root_node.channel_id,
parent=parent_node,
kind=content_kinds.VIDEO,
available=True,
coach_content=True,
)
ContentNode.objects.create(
title="test3",
id=uuid.uuid4().hex,
content_id=uuid.uuid4().hex,
channel_id=root_node.channel_id,
parent=parent_node,
kind=content_kinds.VIDEO,
available=True,
coach_content=False,
)
recurse_annotation_up_tree(channel_id="6199dde695db4ee4ab392222d5af1e5c")
parent_node.refresh_from_db()
self.assertFalse(parent_node.coach_content)
self.assertEqual(parent_node.num_coach_contents, 1)
def test_one_content_node_many_siblings_coach_content(self):
ContentNode.objects.filter(kind=content_kinds.TOPIC).update(available=True)
ContentNode.objects.filter(id="32a941fb77c2576e8f6b294cde4c3b0c").update(
coach_content=True
)
recurse_annotation_up_tree(channel_id="6199dde695db4ee4ab392222d5af1e5c")
# Check parent is not marked as coach_content True because there are non-coach_content siblings
self.assertFalse(
ContentNode.objects.get(id="da7ecc42e62553eebc8121242746e88a").coach_content
)
def test_two_channels_no_annotation_collision_child_false(self):
root_node = ContentNode.objects.create(
title="test",
id=uuid.uuid4().hex,
content_id=uuid.uuid4().hex,
channel_id=uuid.uuid4().hex,
kind=content_kinds.TOPIC,
available=True,
coach_content=True,
)
ContentNode.objects.create(
title="test1",
id=uuid.uuid4().hex,
content_id=uuid.uuid4().hex,
channel_id=root_node.channel_id,
parent=root_node,
kind=content_kinds.VIDEO,
available=False,
coach_content=False,
)
recurse_annotation_up_tree(channel_id="6199dde695db4ee4ab392222d5af1e5c")
root_node.refresh_from_db()
self.assertTrue(root_node.available)
self.assertTrue(root_node.coach_content)
def test_two_channels_no_annotation_collision_child_true(self):
root_node = ContentNode.objects.create(
title="test",
id=uuid.uuid4().hex,
content_id=uuid.uuid4().hex,
channel_id=uuid.uuid4().hex,
kind=content_kinds.TOPIC,
available=False,
coach_content=False,
)
ContentNode.objects.create(
title="test1",
id=uuid.uuid4().hex,
content_id=uuid.uuid4().hex,
channel_id=root_node.channel_id,
parent=root_node,
kind=content_kinds.VIDEO,
available=True,
coach_content=True,
)
recurse_annotation_up_tree(channel_id="6199dde695db4ee4ab392222d5af1e5c")
root_node.refresh_from_db()
self.assertFalse(root_node.available)
self.assertFalse(root_node.coach_content)
def tearDown(self):
call_command("flush", interactive=False)
super(AnnotationTreeRecursion, self).tearDown()
@patch("kolibri.core.content.utils.sqlalchemybridge.get_engine", new=get_engine)
class LocalFileAvailableByChecksum(TransactionTestCase):
fixtures = ["content_test.json"]
def setUp(self):
super(LocalFileAvailableByChecksum, self).setUp()
LocalFile.objects.all().update(available=False)
def test_set_one_file(self):
file_id = "6bdfea4a01830fdd4a585181c0b8068c"
mark_local_files_as_available([file_id])
self.assertEqual(LocalFile.objects.filter(available=True).count(), 1)
self.assertTrue(LocalFile.objects.get(id=file_id).available)
def test_set_two_files(self):
file_id_1 = "6bdfea4a01830fdd4a585181c0b8068c"
file_id_2 = "e00699f859624e0f875ac6fe1e13d648"
mark_local_files_as_available([file_id_1, file_id_2])
self.assertEqual(LocalFile.objects.filter(available=True).count(), 2)
self.assertTrue(LocalFile.objects.get(id=file_id_1).available)
self.assertTrue(LocalFile.objects.get(id=file_id_2).available)
def tearDown(self):
call_command("flush", interactive=False)
super(LocalFileAvailableByChecksum, self).tearDown()
@patch("kolibri.core.content.utils.sqlalchemybridge.get_engine", new=get_engine)
class LocalFileUnAvailableByChecksum(TransactionTestCase):
fixtures = ["content_test.json"]
def setUp(self):
super(LocalFileUnAvailableByChecksum, self).setUp()
LocalFile.objects.all().update(available=True)
def test_set_one_file(self):
file_id = "6bdfea4a01830fdd4a585181c0b8068c"
mark_local_files_as_unavailable([file_id])
self.assertEqual(LocalFile.objects.filter(available=False).count(), 1)
self.assertFalse(LocalFile.objects.get(id=file_id).available)
def test_set_two_files(self):
file_id_1 = "6bdfea4a01830fdd4a585181c0b8068c"
file_id_2 = "e00699f859624e0f875ac6fe1e13d648"
mark_local_files_as_unavailable([file_id_1, file_id_2])
self.assertEqual(LocalFile.objects.filter(available=False).count(), 2)
self.assertFalse(LocalFile.objects.get(id=file_id_1).available)
self.assertFalse(LocalFile.objects.get(id=file_id_2).available)
def tearDown(self):
call_command("flush", interactive=False)
super(LocalFileUnAvailableByChecksum, self).tearDown()
mock_content_file = tempfile.mkstemp()
@patch("kolibri.core.content.utils.sqlalchemybridge.get_engine", new=get_engine)
class LocalFileByDisk(TransactionTestCase):
fixtures = ["content_test.json"]
file_id_1 = "6bdfea4a01830fdd4a585181c0b8068c"
file_id_2 = "e00699f859624e0f875ac6fe1e13d648"
def setUp(self):
super(LocalFileByDisk, self).setUp()
LocalFile.objects.all().update(available=False)
@patch(
"kolibri.core.content.utils.annotation.get_content_storage_file_path",
return_value=mock_content_file[1],
)
def test_set_one_file_not_list_exists(self, path_mock):
set_local_file_availability_from_disk(checksums=self.file_id_1)
self.assertEqual(LocalFile.objects.filter(available=True).count(), 1)
self.assertTrue(LocalFile.objects.get(id=self.file_id_1).available)
@patch(
"kolibri.core.content.utils.annotation.get_content_storage_file_path",
return_value="",
)
def test_set_one_file_not_list_not_exist(self, path_mock):
set_local_file_availability_from_disk(checksums=self.file_id_1)
self.assertEqual(LocalFile.objects.filter(available=True).count(), 0)
self.assertFalse(LocalFile.objects.get(id=self.file_id_1).available)
@patch(
"kolibri.core.content.utils.annotation.get_content_storage_file_path",
return_value=mock_content_file[1],
)
def test_set_one_file_exists(self, path_mock):
set_local_file_availability_from_disk(checksums=[self.file_id_1])
self.assertEqual(LocalFile.objects.filter(available=True).count(), 1)
self.assertTrue(LocalFile.objects.get(id=self.file_id_1).available)
@patch(
"kolibri.core.content.utils.annotation.get_content_storage_file_path",
return_value="",
)
def test_set_one_file_not_exist(self, path_mock):
LocalFile.objects.filter(id=self.file_id_1).update(available=True)
set_local_file_availability_from_disk(checksums=[self.file_id_1])
self.assertEqual(LocalFile.objects.filter(available=True).count(), 0)
self.assertFalse(LocalFile.objects.get(id=self.file_id_1).available)
@patch(
"kolibri.core.content.utils.annotation.get_content_storage_file_path",
return_value=mock_content_file[1],
)
def test_set_two_files_exist(self, path_mock):
set_local_file_availability_from_disk(
checksums=[self.file_id_1, self.file_id_2]
)
self.assertEqual(LocalFile.objects.filter(available=True).count(), 2)
self.assertTrue(LocalFile.objects.get(id=self.file_id_1).available)
self.assertTrue(LocalFile.objects.get(id=self.file_id_2).available)
@patch(
"kolibri.core.content.utils.annotation.get_content_storage_file_path",
side_effect=[mock_content_file[1], ""],
)
def test_set_two_files_one_exists(self, path_mock):
LocalFile.objects.filter(id=self.file_id_2).update(available=True)
set_local_file_availability_from_disk(
checksums=[self.file_id_1, self.file_id_2]
)
self.assertEqual(LocalFile.objects.filter(available=True).count(), 1)
self.assertTrue(LocalFile.objects.get(id=self.file_id_1).available)
self.assertFalse(LocalFile.objects.get(id=self.file_id_2).available)
@patch(
"kolibri.core.content.utils.annotation.get_content_storage_file_path",
return_value="",
)
def test_set_two_files_none_exist(self, path_mock):
LocalFile.objects.filter(id__in=[self.file_id_1, self.file_id_2]).update(
available=True
)
set_local_file_availability_from_disk(
checksums=[self.file_id_1, self.file_id_2]
)
self.assertEqual(LocalFile.objects.filter(available=True).count(), 0)
self.assertFalse(LocalFile.objects.get(id=self.file_id_1).available)
self.assertFalse(LocalFile.objects.get(id=self.file_id_2).available)
@patch(
"kolibri.core.content.utils.annotation.get_content_storage_file_path",
return_value="",
)
def test_set_all_files_none_exist(self, path_mock):
LocalFile.objects.update(available=True)
set_local_file_availability_from_disk()
self.assertEqual(LocalFile.objects.filter(available=True).count(), 0)
@patch(
"kolibri.core.content.utils.annotation.get_content_storage_file_path",
return_value=mock_content_file[1],
)
def test_set_all_files_all_exist(self, path_mock):
LocalFile.objects.update(available=False)
set_local_file_availability_from_disk()
self.assertEqual(LocalFile.objects.exclude(available=True).count(), 0)
@patch(
"kolibri.core.content.utils.annotation.get_content_storage_file_path",
side_effect=[mock_content_file[1]] * 2 + [""] * 3,
)
def test_set_all_files_two_exist(self, path_mock):
set_local_file_availability_from_disk()
self.assertEqual(LocalFile.objects.filter(available=True).count(), 2)
self.assertEqual(LocalFile.objects.exclude(available=True).count(), 3)
def test_set_bad_filenames(self):
local_files = list(LocalFile.objects.all())
LocalFile.objects.all().delete()
for i, lf in enumerate(local_files):
lf.id = "bananas" + str(i)
lf.save()
set_local_file_availability_from_disk()
self.assertEqual(LocalFile.objects.filter(available=True).count(), 0)
def tearDown(self):
call_command("flush", interactive=False)
super(LocalFileByDisk, self).tearDown()
mock_content_db_file = tempfile.mkstemp()
@patch("kolibri.core.content.utils.channel_import.import_channel_from_local_db")
class FixMultipleTreesWithIdOneTestCase(TransactionTestCase):
fixtures = ["content_test.json"]
@patch(
"kolibri.core.content.utils.annotation.get_content_database_file_path",
return_value=mock_content_file[1],
)
def test_extra_channel_contentdb_exists(self, path_mock, import_mock):
root_node = ContentNode.objects.create(
title="test",
id=uuid.uuid4().hex,
content_id=uuid.uuid4().hex,
channel_id=uuid.uuid4().hex,
)
ChannelMetadata.objects.create(
id=root_node.channel_id,
root=root_node,
name="test",
min_schema_version=CONTENT_SCHEMA_VERSION,
)
# Do this to side step django mptts auto tree_id code
ContentNode.objects.filter(parent=None).update(tree_id=1)
self.assertEqual(ContentNode.objects.filter(parent=None, tree_id=1).count(), 2)
fix_multiple_trees_with_id_one()
self.assertEqual(ContentNode.objects.filter(parent=None, tree_id=1).count(), 1)
import_mock.assert_called_with(root_node.channel_id)
@patch(
"kolibri.core.content.utils.annotation.get_content_database_file_path",
return_value=mock_content_file[1],
)
def test_two_extra_channels_contentdb_exists(self, path_mock, import_mock):
root_node_1 = ContentNode.objects.create(
title="test",
id=uuid.uuid4().hex,
content_id=uuid.uuid4().hex,
channel_id=uuid.uuid4().hex,
)
ChannelMetadata.objects.create(
id=root_node_1.channel_id,
root=root_node_1,
name="test",
min_schema_version=CONTENT_SCHEMA_VERSION,
)
root_node_2 = ContentNode.objects.create(
title="test",
id=uuid.uuid4().hex,
content_id=uuid.uuid4().hex,
channel_id=uuid.uuid4().hex,
)
# Add an additional node so that root_node_1 channel is processed first.
ContentNode.objects.create(
title="test1",
id=uuid.uuid4().hex,
content_id=uuid.uuid4().hex,
channel_id=root_node_2.channel_id,
parent=root_node_2,
)
ChannelMetadata.objects.create(
id=root_node_2.channel_id,
root=root_node_2,
name="test",
min_schema_version=CONTENT_SCHEMA_VERSION,
)
# Do this to side step django mptts auto tree_id code
ContentNode.objects.filter(parent=None).update(tree_id=1)
self.assertEqual(ContentNode.objects.filter(parent=None, tree_id=1).count(), 3)
fix_multiple_trees_with_id_one()
self.assertEqual(ContentNode.objects.filter(parent=None, tree_id=1).count(), 1)
import_mock.assert_has_calls(
[call(root_node_1.channel_id), call(root_node_2.channel_id)]
)
@patch(
"kolibri.core.content.utils.annotation.get_content_database_file_path",
return_value="",
)
def test_extra_channel_no_contentdb_exists(self, path_mock, import_mock):
root_node = ContentNode.objects.create(
title="test",
id=uuid.uuid4().hex,
content_id=uuid.uuid4().hex,
channel_id=uuid.uuid4().hex,
)
ChannelMetadata.objects.create(
id=root_node.channel_id,
root=root_node,
name="test",
min_schema_version=CONTENT_SCHEMA_VERSION,
)
# Do this to side step django mptts auto tree_id code
ContentNode.objects.filter(parent=None).update(tree_id=1)
self.assertEqual(ContentNode.objects.filter(parent=None, tree_id=1).count(), 2)
fix_multiple_trees_with_id_one()
self.assertEqual(ContentNode.objects.filter(parent=None, tree_id=1).count(), 2)
import_mock.assert_not_called()
@patch(
"kolibri.core.content.utils.annotation.get_content_database_file_path",
side_effect=["", mock_content_file[1]],
)
def test_two_extra_channels_one_contentdb_exists(self, path_mock, import_mock):
root_node_1 = ContentNode.objects.create(
title="test",
id=uuid.uuid4().hex,
content_id=uuid.uuid4().hex,
channel_id=uuid.uuid4().hex,
)
ChannelMetadata.objects.create(
id=root_node_1.channel_id,
root=root_node_1,
name="test",
min_schema_version=CONTENT_SCHEMA_VERSION,
)
root_node_2 = ContentNode.objects.create(
title="test",
id=uuid.uuid4().hex,
content_id=uuid.uuid4().hex,
channel_id=uuid.uuid4().hex,
)
# Add an additional node so that root_node_1 channel is processed first.
ContentNode.objects.create(
title="test1",
id=uuid.uuid4().hex,
content_id=uuid.uuid4().hex,
channel_id=root_node_2.channel_id,
parent=root_node_2,
)
ChannelMetadata.objects.create(
id=root_node_2.channel_id,
root=root_node_2,
name="test",
min_schema_version=CONTENT_SCHEMA_VERSION,
)
# Do this to side step django mptts auto tree_id code
ContentNode.objects.filter(parent=None).update(tree_id=1)
self.assertEqual(ContentNode.objects.filter(parent=None, tree_id=1).count(), 3)
fix_multiple_trees_with_id_one()
self.assertEqual(ContentNode.objects.filter(parent=None, tree_id=1).count(), 2)
with self.assertRaises(AssertionError):
import_mock.assert_called_with(root_node_1.channel_id)
import_mock.assert_called_with(root_node_2.channel_id)
class CalculateChannelFieldsTestCase(TestCase):
def setUp(self):
self.node = ContentNode.objects.create(
title="test",
id=uuid.uuid4().hex,
content_id=uuid.uuid4().hex,
channel_id=uuid.uuid4().hex,
available=True,
)
self.channel = ChannelMetadata.objects.create(
id=self.node.channel_id, name="channel", root=self.node
)
Language.objects.create(id="en", lang_code="en")
def test_calculate_included_languages(self):
calculate_included_languages(self.channel)
self.assertEqual(
list(self.channel.included_languages.values_list("id", flat=True)), []
)
ContentNode.objects.update(lang_id="en")
calculate_included_languages(self.channel)
self.assertEqual(
list(self.channel.included_languages.values_list("id", flat=True)), ["en"]
)
def test_calculate_total_resources(self):
local_file = LocalFile.objects.create(
id=uuid.uuid4().hex, extension="mp4", available=True, file_size=10
)
File.objects.create(
id=uuid.uuid4().hex,
local_file=local_file,
available=True,
contentnode=self.node,
)
calculate_total_resource_count(self.channel)
self.assertEqual(self.channel.total_resource_count, 1)
def test_calculate_published_size(self):
local_file = LocalFile.objects.create(
id=uuid.uuid4().hex, extension="mp4", available=True, file_size=10
)
File.objects.create(
id=uuid.uuid4().hex,
local_file=local_file,
available=True,
contentnode=self.node,
)
calculate_published_size(self.channel)
self.assertEqual(self.channel.published_size, 10)
def test_published_size_big_integer_field(self):
self.channel.published_size = (
2150000000
) # out of range for integer field on postgres
try:
self.channel.save()
except DataError:
self.fail("DataError: integer out of range")
|
|
from mobiletrans.mtdistmap.transit_network import TransitNetwork
"""
from mobiletrans.mtdistmap import cta_conn
tn = cta_conn.load_transitnetwork()
tn.shortest_path("Red_40330", 'Red_40900')
tn.shortest_path("Red_40900", 'Red_40330')
tn.shortest_path("Red_41320", 'Red_40650')
tn.shortest_path("Red_40650", 'Red_41320')
tn.shortest_path("Brn_41290", 'Brn_40710')
tn.shortest_path('Brn_40710', 'Brn_41290')
tn.shortest_path("Red_41090", 'Brn_41440').pprint()
tn.shortest_path("Red_41090", 'Brn_41440', reverse=True).pprint()
tn.shortest_path("Blue_40890", 'Blue_40490', reverse=False).pprint()
tn.shortest_path("Blue_40890", 'Blue_40490', reverse=True).pprint()
"""
def load_transitnetwork():
tn = TransitNetwork()
wait_time = 5.0
# transfer time for blue line to library at Jackson
bluelib_wait_time = 10.0
# transfer time for red line to library at Jackson
redlib_wait_time = 10.0
# transfer time between red line and blue line at Jackson
red_blue_jackson_wait_time = 10.0
#>>> trans(['Brn', 'P'], '40460', desc="Merchandise Mart")
tn.add_station('Brn_40460', {'P_40460':wait_time, 'Brn_40730':2, 'Brn_40710':2, }, desc='Merchandise Mart') # done
tn.add_station('P_40460', {'Brn_40460':wait_time, 'P_40380':3 }, desc='Merchandise Mart') # done loop
#>>> trans(['Brn', 'P', 'Org', 'Pink'], '40730', desc="Washington/Wells")
tn.add_station('Brn_40730', {'Org_40730':wait_time, 'P_40730':wait_time, 'Pink_40730':wait_time, 'Brn_40040':1, }, desc='Washington/Wells') # done
tn.add_station('P_40730', {'Brn_40730':wait_time, 'Org_40730':wait_time, 'Pink_40730':wait_time, 'P_40460':2, }, desc='Washington/Wells') # done
tn.add_station('Org_40730', {'Brn_40730':wait_time, 'P_40730':wait_time, 'Pink_40730':wait_time, 'Org_40380':3 }, desc='Washington/Wells') # done
tn.add_station('Pink_40730', {'Brn_40730':wait_time, 'Org_40730':wait_time, 'P_40730':wait_time, 'Pink_41160':3}, desc='Washington/Wells') # done clinton estimate
#>>> trans(['Brn', 'P', 'Org', 'Pink'], '40040', desc="Quincy")
tn.add_station('Brn_40040', {'Pink_40040':wait_time, 'P_40040':wait_time, 'Org_40040':wait_time, 'Brn_40160':1, }, desc='Quincy') # done
tn.add_station('P_40040', {'Brn_40040':wait_time, 'Pink_40040':wait_time, 'Org_40040':wait_time, 'P_40730':1, }, desc='Quincy', ) # done
tn.add_station('Org_40040', {'Brn_40040':wait_time, 'Pink_40040':wait_time, 'P_40040':wait_time, 'Org_40730':1, }, desc='Quincy') # done
tn.add_station('Pink_40040', {'Brn_40040':wait_time, 'P_40040':wait_time, 'Org_40040':wait_time, 'Pink_40730':1 }, desc='Quincy') # done
#>>> trans(['Brn', 'P', 'Org', 'Pink'], '40160', desc="LaSalle")
tn.add_station('Brn_40160', {'P_40160':wait_time, 'Pink_40160':wait_time, 'Org_40160':wait_time, 'Brn_40850':1, }, desc='LaSalle') # done
tn.add_station('P_40160', {'Pink_40160':wait_time, 'Brn_40160':wait_time, 'Org_40160':wait_time, 'P_40040':1, }, desc='LaSalle') # done
tn.add_station('Org_40160', {'P_40160':wait_time, 'Pink_40160':wait_time, 'Brn_40160':wait_time, 'Org_40040':1, }, desc='LaSalle') # done
tn.add_station('Pink_40160', {'P_40160':wait_time, 'Brn_40160':wait_time, 'Org_40160':wait_time, 'Pink_40040':1 }, desc='LaSalle') # done
#>>> trans(['Brn', 'P', 'Org', 'Pink'], '40850', desc="Library")
tn.add_station('Brn_40850', {'Pink_40850':wait_time, 'P_40850':wait_time, 'Org_40850':wait_time, 'Blue_40070':bluelib_wait_time, 'Red_40560':redlib_wait_time, 'Brn_40680':2, }, desc='Library') # done
tn.add_station('P_40850', {'Brn_40850':wait_time, 'Pink_40850':wait_time, 'Org_40850':wait_time, 'Blue_40070':bluelib_wait_time, 'Red_40560':redlib_wait_time, 'P_40160':1, }, desc='Library') # done
tn.add_station('Org_40850', {'Brn_40850':wait_time, 'Pink_40850':wait_time, 'P_40850':wait_time, 'Blue_40070':bluelib_wait_time, 'Red_40560':redlib_wait_time, 'Org_40160':1, }, desc='Library') # done
tn.add_station('Pink_40850', {'Brn_40850':wait_time, 'P_40850':wait_time, 'Org_40850':wait_time, 'Blue_40070':bluelib_wait_time, 'Red_40560':redlib_wait_time, 'Pink_40160':1, }, desc='Library') # done
#>>> trans(['Brn', 'P', 'Org', 'G', 'Pink'], '40680', desc="Adams/Wabash")
tn.add_station('Brn_40680', {'P_40680':wait_time, 'G_40680':wait_time, 'Org_40680':wait_time, 'Pink_40680':wait_time, 'Brn_40640':1, }, desc='Adams/Wabash') # done
tn.add_station('P_40680', {'Brn_40680':wait_time, 'G_40680':wait_time, 'Org_40680':wait_time, 'Pink_40680':wait_time, 'P_40850':2, }, desc='Adams/Wabash') # done
tn.add_station('Org_40680', {'Brn_40680':wait_time, 'P_40680':wait_time, 'G_40680':wait_time, 'Pink_40680':wait_time, 'Org_41400':4, }, desc='Adams/Wabash') # done
tn.add_station('G_40680', {'Brn_40680':wait_time, 'P_40680':wait_time, 'Org_40680':wait_time, 'Pink_40680':wait_time, 'G_41400':3, 'G_40640':1, }, desc='Adams/Wabash') # done
tn.add_station('Pink_40680', {'Brn_40680':wait_time, 'P_40680':wait_time, 'G_40680':wait_time, 'Org_40680':wait_time, 'Pink_40850':2, }, desc='Adams/Wabash') # done
#>>> trans(['Brn', 'P', 'Org', 'G', 'Pink'], '40640', desc="Maddison/Wabash")
tn.add_station('Brn_40640', {'Org_40640':wait_time, 'P_40640':wait_time, 'Pink_40640':wait_time, 'G_40640':wait_time, 'Brn_40200':1, }, desc='Maddison/Wabash') # done
tn.add_station('P_40640', {'Org_40640':wait_time, 'Pink_40640':wait_time, 'Brn_40640':wait_time, 'G_40640':wait_time, 'P_40680':1, }, desc='Maddison/Wabash') # done
tn.add_station('Org_40640', {'P_40640':wait_time, 'Pink_40640':wait_time, 'Brn_40640':wait_time, 'G_40640':wait_time, 'Org_40680':1, }, desc='Maddison/Wabash') # done
tn.add_station('G_40640', {'Org_40640':wait_time, 'P_40640':wait_time, 'Pink_40640':wait_time, 'Brn_40640':wait_time, 'G_40680':1, 'G_40200':1, }, desc='Maddison/Wabash') # done
tn.add_station('Pink_40640', {'Org_40640':wait_time, 'P_40640':wait_time, 'Brn_40640':wait_time, 'G_40640':wait_time, 'Pink_40680':1, }, desc='Maddison/Wabash') # done
#>>> trans(['Brn', 'P', 'Org', 'G', 'Pink'], '40200', desc="Randolph/Wabash")
tn.add_station('Brn_40200', {'G_40200':wait_time, 'P_40200':wait_time, 'Org_40200':wait_time, 'Pink_40200':wait_time, 'Brn_40260':1, }, desc='Randolph/Wabash') # done
tn.add_station('P_40200', {'Brn_40200':wait_time, 'G_40200':wait_time, 'Org_40200':wait_time, 'Pink_40200':wait_time, 'P_40640':1, }, desc='Randolph/Wabash') # done
tn.add_station('Org_40200', {'Brn_40200':wait_time, 'G_40200':wait_time, 'P_40200':wait_time, 'Pink_40200':wait_time, 'Org_40640':1, }, desc='Randolph/Wabash') # done
tn.add_station('G_40200', {'Brn_40200':wait_time, 'P_40200':wait_time, 'Org_40200':wait_time, 'Pink_40200':wait_time, 'G_40640':1, 'G_40260':1, }, desc='Randolph/Wabash') # done
tn.add_station('Pink_40200', {'Brn_40200':wait_time, 'G_40200':wait_time, 'P_40200':wait_time, 'Org_40200':wait_time, 'Pink_40640':1 }, desc='Randolph/Wabash') # done
#>>> trans(['Brn', 'P', 'Org', 'G', 'Pink'], '40260', desc="State/Lake")
tn.add_station('Brn_40260', {'G_40260':wait_time, 'Org_40260':wait_time, 'Pink_40260':wait_time, 'P_40260':wait_time, 'Red_41660':wait_time, 'Brn_40380':1 }, desc='State/Lake') # done
tn.add_station('P_40260', {'G_40260':wait_time, 'Org_40260':wait_time, 'Pink_40260':wait_time, 'Brn_40260':wait_time, 'Red_41660':wait_time, 'P_40200':1, }, desc='State/Lake') # done
tn.add_station('Org_40260', {'G_40260':wait_time, 'Pink_40260':wait_time, 'Brn_40260':wait_time, 'P_40260':wait_time, 'Red_41660':wait_time, 'Org_40200':1 }, desc='State/Lake') # done
tn.add_station('G_40260', {'Org_40260':wait_time, 'Pink_40260':wait_time, 'Brn_40260':wait_time, 'P_40260':wait_time, 'Red_41660':wait_time, 'G_40200':1, 'G_40380':1 }, desc='State/Lake') # done
tn.add_station('Pink_40260', {'G_40260':wait_time, 'Org_40260':wait_time, 'Brn_40260':wait_time, 'P_40260':wait_time, 'Red_41660':wait_time, 'Pink_40200':1 }, desc='State/Lake') # done
#>>> trans(['Brn', 'P', 'Org', 'G', 'Blue', 'Pink'], '40380', desc="Clark/Lake")
tn.add_station('Brn_40380', {'Blue_40380':wait_time, 'Pink_40380':wait_time, 'Org_40380':wait_time, 'G_40380':wait_time, 'P_40380':wait_time, 'Brn_40460':4, }, desc='Clark/Lake') # done
tn.add_station('P_40380', {'Blue_40380':wait_time, 'Brn_40380':wait_time, 'Pink_40380':wait_time, 'Org_40380':wait_time, 'G_40380':wait_time, 'P_40260':1, }, desc='Clark/Lake') # done
tn.add_station('Org_40380', {'Blue_40380':wait_time, 'Brn_40380':wait_time, 'Pink_40380':wait_time, 'G_40380':wait_time, 'P_40380':wait_time, 'Org_40260':1, }, desc='Clark/Lake') # done
tn.add_station('G_40380', {'Blue_40380':wait_time, 'Brn_40380':wait_time, 'Pink_40380':wait_time, 'Org_40380':wait_time, 'P_40380':wait_time, 'G_40260': 1, 'G_41160':2 }, desc='Clark/Lake') # done
tn.add_station('Blue_40380', {'Brn_40380':wait_time, 'Pink_40380':wait_time, 'Org_40380':wait_time, 'G_40380':wait_time, 'P_40380':wait_time, 'Blue_40490':2, 'Blue_40370':2 }, desc='Clark/Lake') # done
tn.add_station('Pink_40380', {'Blue_40380':wait_time, 'Brn_40380':wait_time, 'Org_40380':wait_time, 'G_40380':wait_time, 'P_40380':wait_time, 'Pink_40260':1, }, desc='Clark/Lake') # done
#>>> trans(['Red', 'Org', 'G', ], '41400', desc="Roosevelt")
tn.add_station('Red_41400', {'Red_41000':4, 'Org_41400':wait_time, 'G_41400':wait_time, }, desc='Roosevelt') # done loop
tn.add_station('Org_41400', {'Org_41130':4, 'Red_41400':wait_time, 'G_41400':wait_time, 'Org_40850':3 }, desc='Roosevelt') # done loop
tn.add_station('G_41400', {'G_41120':5, 'Org_41400':wait_time, 'Red_41400':wait_time, 'G_40680':3, }, desc='Roosevelt') # done loop
#>>> trans(['Pink', 'G', ], '41160', desc="Clinton")
tn.add_station('Pink_41160', {'G_41160':wait_time, 'Pink_40380':2, 'Pink_morgan':1}, desc='Clinton') # done loop
tn.add_station('G_41160', {'Pink_41160':wait_time, 'G_40380':2, 'G_morgan':1, }, desc='Clinton') # done loop
# Harrison Red
tn.add_station('Red_41490', { 'Red_41400':1, 'Red_40560':1 }, desc='Harrison') # done
# Jackson Red
tn.add_station('Red_40560', { 'Red_41490':1, 'Red_41090':2, 'Blue_40070':red_blue_jackson_wait_time, 'Brn_40850':redlib_wait_time, 'P_40850':redlib_wait_time, 'Org_40850':redlib_wait_time, 'Pink_40850':redlib_wait_time }, desc='Jackson') # done
# Monroe Red
tn.add_station('Red_41090', { 'Red_40560':2, 'Red_41660':1 }, desc='Monroe') # done
# Lake Red
tn.add_station('Red_41660', { 'Red_41090':1, 'Red_40330':1,'Brn_40260':wait_time, 'P_40260':wait_time, 'Org_40260':wait_time, 'G_40260':wait_time, 'Pink_40260':wait_time, }, desc='Lake Red') # done
# Grand Red
tn.add_station('Red_40330', { 'Red_41660':1, 'Red_41450':1, }, desc='Grand Red') # done loop
# Grand Blue
tn.add_station('Blue_40490', { 'Blue_40380':2, 'Blue_41410':2 }, desc='Grand Blue') # done loop
# Washington Blue
tn.add_station('Blue_40370', { 'Blue_40380':2, 'Blue_40790':1 }, desc='Washington') # done
# Monroe Blue
tn.add_station('Blue_40790', { 'Blue_40070':1, 'Blue_40370':1 }, desc='Monroe') # done
# Jackson Blue
tn.add_station('Blue_40070', { 'Blue_40790':1, 'Blue_41340':1, 'Red_40560':red_blue_jackson_wait_time, 'Brn_40850':bluelib_wait_time, 'P_40850':bluelib_wait_time, 'Org_40850':bluelib_wait_time, 'Pink_40850':bluelib_wait_time}, desc='Jackson') # done
# VanBuren Blue
tn.add_station('Blue_41340', { 'Blue_40070':1, 'Blue_40430':1, }, desc='VanBuren') # done loop
### Red North Side ###########################################################
# Chicago Red
tn.add_station('Red_41450', { 'Red_40330':1, 'Red_40630':2 }, desc='Chicago Red') # done
# Clark/Division Red
tn.add_station('Red_40630', { 'Red_41450':2, 'Red_40650':3 }, desc='Clark/Division Red') # done
# North/Clybourn Red
tn.add_station('Red_40650', { 'Red_40630':3, 'Red_41220':2 }, desc='North/Clybourn Red') # done
# Fullerton
tn.add_station('Red_41220', { 'Red_40650':2, 'Red_41320':3, 'Brn_41220':wait_time, 'P_41220':wait_time, }, desc='Fullerton') # done
# Belmont
tn.add_station('Red_41320', { 'Red_41220':3, 'Red_41420':2, 'Brn_41320':wait_time, 'P_41320':wait_time, }, desc='Belmont') # done
# Addison Red
tn.add_station('Red_41420', { 'Red_41320':2, 'Red_40080':2 }, desc='Addison Red') # done
# Sheridan Red
tn.add_station('Red_40080', { 'Red_41420':2, 'Red_40540':3 }, desc='Sheridan Red') # done
# Wilson Red
tn.add_station('Red_40540', { 'Red_40080':3, 'Red_40770':1 }, desc='Wilson Red') # done
# Lawrence Red
tn.add_station('Red_40770', { 'Red_40540':1, 'Red_41200':1 }, desc='Lawrence Red') # done
# Argyle Red
tn.add_station('Red_41200', { 'Red_40770':1, 'Red_40340':1 }, desc='Argyle Red') # done
# Berwyn Red
tn.add_station('Red_40340', { 'Red_41200':1, 'Red_41380':1 }, desc='Berwyn Red') # done
# Bryn Mawr Red
tn.add_station('Red_41380', { 'Red_40340':1, 'Red_40880':2 }, desc='Bryn Mawr Red') # done
# Thorndale Red
tn.add_station('Red_40880', { 'Red_41380':2, 'Red_40760':1 }, desc='Thorndale Red') # done
# Granville Red
tn.add_station('Red_40760', { 'Red_40880':1, 'Red_41300':2 }, desc='Granville Red') # done
# Loyola Red
tn.add_station('Red_41300', { 'Red_40760':2, 'Red_40100':2 }, desc='Loyola Red') # done
# Morse Red
tn.add_station('Red_40100', { 'Red_41300':2, 'Red_41190':1 }, desc='Morse Red') # done
# Jarvis Red
tn.add_station('Red_41190', { 'Red_40100':1, 'Red_40900':2 }, desc='Jarvis Red') # done
# Howard Red
tn.add_station('Red_40900', { 'Red_41190':2, 'P_40900':wait_time, }, desc='Howard Red') # done north side
# Cermak - Chinatown Red
tn.add_station('Red_41000', {'Red_41400':4, 'Red_40190':4 }, desc='Cermak - Chinatown Red') # done south
# Sox 35th Red
tn.add_station('Red_40190', {'Red_41000':4, 'Red_41230':3 }, desc='Sox 35th Red') # done
# 47th Red
tn.add_station('Red_41230', {'Red_40190':3, 'Red_41170':3 }, desc='47th Red') # done
# Garfield Red
tn.add_station('Red_41170', {'Red_41230': 3, 'Red_40910':2 , }, desc='Garfield Red') # done
# 63rd Red
tn.add_station('Red_40910', {'Red_41170':2 , 'Red_40990':3 , }, desc='63rd Red') # done
# 69th Red
tn.add_station('Red_40990', {'Red_40910':3, 'Red_40240':4 , }, desc='69th Red') # done
# 79th Red
tn.add_station('Red_40240', {'Red_40990':4, 'Red_41430':3 , }, desc='79th Red') # done
# 87th Red
tn.add_station('Red_41430', {'Red_40240':3, 'Red_40450':2 , }, desc='87th Red') # done
# 95th Red
tn.add_station('Red_40450', {'Red_41430':2 , }, desc='95th Dan Ryan Red') # done
### Brown North Side ###########################################################
# Chicago Brown
tn.add_station('Brn_40710', { 'Brn_40460':2, 'Brn_40800':4, 'P_40710':wait_time, }, desc='Chicago Brown') # done
# Sedgwic Brown
tn.add_station('Brn_40800', { 'Brn_40710':4, 'Brn_40660':3, 'P_40800':wait_time }, desc='Sedgwic Brown') # done
# Armitage Brown
tn.add_station('Brn_40660', { 'Brn_40800':3, 'Brn_41220':2, 'P_40660':wait_time, }, desc='Armitage Brown') # done
# Fullerton Brown
tn.add_station('Brn_41220', { 'Brn_40660':2, 'Brn_40530':1, 'Red_41220':wait_time, 'P_41220':wait_time, }, desc='Fullerton Brown') # done
# Diversey Brown
tn.add_station('Brn_40530', { 'Brn_41220':1, 'Brn_41210':1, 'P_40530':wait_time, }, desc='Diversey Brown') # done
# Wellington Brown
tn.add_station('Brn_41210', { 'Brn_40530':1, 'Brn_41320':1, 'P_41210':wait_time }, desc='Wellington Brown') # done
# Belmont Brown
tn.add_station('Brn_41320', { 'Brn_41210':1, 'Brn_40360':2, 'Red_41320':wait_time, 'P_41320':wait_time }, desc='Belmont Brown') # done
# Southbort Brown
tn.add_station('Brn_40360', { 'Brn_41320':2, 'Brn_41310':1, }, desc='Southport Brown') # done
# Paulina Brown
tn.add_station('Brn_41310', { 'Brn_40360':1, 'Brn_41440':2, }, desc='Paulina Brown') # done
# Addison Brown
tn.add_station('Brn_41440', { 'Brn_41310':2, 'Brn_41460':1, }, desc='Addison Brown') # done
# Irving Park Brown
tn.add_station('Brn_41460', { 'Brn_41440':1, 'Brn_41500':1, }, desc='Irving Park Brown') # done
# Montrose Brown
tn.add_station('Brn_41500', { 'Brn_41460':1, 'Brn_40090':2, }, desc='Montrose Brown') # done
# Damen Brown
tn.add_station('Brn_40090', { 'Brn_41500':2, 'Brn_41480':1, }, desc='Damen Brown') # done
# Western Brown
tn.add_station('Brn_41480', { 'Brn_40090':1, 'Brn_41010':2, }, desc='Western Brown') # done
# Rockwell Brown
tn.add_station('Brn_41010', { 'Brn_41480':2, 'Brn_40870':1, }, desc='Rockwell Brown') # done
# Fransisco Brown
tn.add_station('Brn_40870', { 'Brn_41010':1, 'Brn_41180':1, }, desc='Fransisco Brown') # done
# Kedzie Brown
tn.add_station('Brn_41180', { 'Brn_40870':1, 'Brn_41290':2, }, desc='Kedzie Brown') # done
# Kimball Brown
tn.add_station('Brn_41290', { 'Brn_41180':2, }, desc='Kimball Brown') # done
### Purple North Side ###########################################################
# Chicago Purple
tn.add_station('P_40710', { 'P_40460':2, 'P_40800':4, 'Brn_40710':wait_time }, desc='Chicago Purple') # done
# Sedgwick Purple
tn.add_station('P_40800', { 'P_40710':4, 'P_40660':3, 'Brn_40800':wait_time }, desc='Sedgwick Purple') # done
# Armitage Purple
tn.add_station('P_40660', { 'P_40800':3, 'P_41220':2, 'Brn_40660':wait_time, }, desc='Armitage Purple') # done
# Fullerton Purple
tn.add_station('P_41220', { 'P_40660':2, 'P_40530':1, 'Brn_41220':wait_time, 'Red_41220':wait_time }, desc='Fullerton Purple') # done
# Diversy Purple
tn.add_station('P_40530', { 'P_41220':1, 'P_41210':1, 'Brn_40530':wait_time,}, desc='Diversy Purple') # done
# Wellington Purple
tn.add_station('P_41210', { 'P_40530':1, 'P_41320':1, 'Brn_41210':wait_time,}, desc='Wellington Purple') # done
# Belmont Purple
tn.add_station('P_41320', { 'P_41210':1, 'P_40900':14, 'Brn_41320':wait_time, 'Red_41320':wait_time, }, desc='Belmont Purple') # done
# Howard Purple
tn.add_station('P_40900', { 'P_41320':14, 'Red_40900':wait_time }, desc='Howard Purple') # done with north side, needs purple north
### Blue West Side ###########################################################
# O'Hare Blue
tn.add_station('Blue_40890', { 'Blue_40820':5, }, desc="O'Hare Blue") # done
# Rosemont Blue
tn.add_station('Blue_40820', { 'Blue_40890':5, 'Blue_40230':2 }, desc="Rosemont Blue") # done
# Cumberland Blue
tn.add_station('Blue_40230', { 'Blue_40820':2, 'Blue_40750':3 }, desc="Cumberland Blue") # done
# Harlem Blue
tn.add_station('Blue_40750', { 'Blue_40230':3, 'Blue_41280':4 }, desc="Harlem Blue") # done
# Jefferson Park Blue
tn.add_station('Blue_41280', { 'Blue_40750':4, 'Blue_41330':2 }, desc="Jefferson Park Blue") # done
# Montrose Blue
tn.add_station('Blue_41330', { 'Blue_41280':2, 'Blue_40550':2 }, desc="Montrose Blue") # done
# Irving Park Blue
tn.add_station('Blue_40550', { 'Blue_41330':2, 'Blue_41240':2 }, desc="Irving Park Blue") # done
# Addison Blue
tn.add_station('Blue_41240', { 'Blue_40550':2, 'Blue_40060':2 }, desc="Addison Blue") # done
# Belmont Blue
tn.add_station('Blue_40060', { 'Blue_41240':2, 'Blue_41020':2 }, desc="Belmont Blue") # done
# Logan Square Blue
tn.add_station('Blue_41020', { 'Blue_40060':2, 'Blue_40570':2 }, desc="Logan Square Blue") # done
# California Blue
tn.add_station('Blue_40570', { 'Blue_41020':2, 'Blue_40670':2 }, desc="California Blue") # done
# Western Blue
tn.add_station('Blue_40670', { 'Blue_40570':2, 'Blue_40590':1 }, desc="Western Blue") # done
# Damen Blue
tn.add_station('Blue_40590', { 'Blue_40670':1, 'Blue_40320':1 }, desc="Damen Blue") # done
# Division Blue
tn.add_station('Blue_40320', { 'Blue_40590':1, 'Blue_41410':2 }, desc="Division Blue") # done
# Chicago Blue
tn.add_station('Blue_41410', { 'Blue_40320':2, 'Blue_40490':2 }, desc="Chicago Blue") # done
### Green West Side ###########################################################
# Harlem/Lake Green
tn.add_station('G_40020', { 'G_41350':1, }, desc="Harlem/Lake Green") # done
# Oak Park Green
tn.add_station('G_41350', { 'G_40020':1, 'G_40610':2 }, desc="Oak Park Green") # done
# Ridgeland Green
tn.add_station('G_40610', { 'G_41350':2, 'G_41260':1 }, desc="Ridgeland Green") # done
# Austin Green
tn.add_station('G_41260', { 'G_40610':1, 'G_40280':2 }, desc="Austin Green") # done
# Central Green
tn.add_station('G_40280', { 'G_41260':2, 'G_40700':2 }, desc="Central Green") # done
# Laramie Green
tn.add_station('G_40700', { 'G_40280':2, 'G_40480':1 }, desc="Laramie Green") # done
# Cicero Green
tn.add_station('G_40480', { 'G_40700':1, 'G_40030':2 }, desc="Cicero Green") # done
# Pulaski Green
tn.add_station('G_40030', { 'G_40480':2, 'G_41670':2 }, desc="Pulaski Green") # done
# Conservatory Green
tn.add_station('G_41670', { 'G_40030':2, 'G_41070':1 }, desc="Conservatory Green") # done
# Kedzie Green
tn.add_station('G_41070', { 'G_41670':1, 'G_41360':1 }, desc="Kedzie Green") # done
# California Green
tn.add_station('G_41360', { 'G_41070':1, 'G_40170':3 }, desc="California Green") # done
# Ashland Green
tn.add_station('G_40170', { 'G_41360':3, 'G_morgan':2, 'Pink_40170':wait_time }, desc="Ashland Green") # partial
# Morgan Green
tn.add_station('G_morgan', { 'G_40170':2, 'G_41160':1 }, desc="Morgan Green") # partial
# 35th Bronzeville Green
tn.add_station('G_41120', { 'G_41400':5, 'G_40300':2 }, desc="35th Bronzeville Green") # done
# Indiana Green
tn.add_station('G_40300', { 'G_41120':2, 'G_41270':2 }, desc="Indiana Green") # done
# 43rd Green
tn.add_station('G_41270', { 'G_40300':2, 'G_41080':1 }, desc="43rd Green") # done
# 47th Green
tn.add_station('G_41080', { 'G_41270':1, 'G_40130':2 }, desc="47th Green") # done
# 51st Green
tn.add_station('G_40130', { 'G_41080':2, 'G_40510':1 }, desc="51st Green") # done
# Garfield Green
tn.add_station('G_40510', { 'G_40130':1, 'G_40940':7, 'G_41140':4 }, desc="Garfield Green") # done
# Halsted Green
tn.add_station('G_40940', { 'G_40510':7, 'G_40290':8 }, desc="Halsted Green") # done
# Ashland Green
tn.add_station('G_40290', { 'G_40940':8, }, desc="Ashland Green") # done
# King Drive Green
tn.add_station('G_41140', { 'G_40510':4, 'G_40720':2 }, desc="King Drive Green") # done
# Cottage Grove Green
tn.add_station('G_40720', { 'G_41140':2, }, desc="Cottage Grove Green") # done
### Blue South West Side ###########################################################
# Forest Park Blue
tn.add_station('Blue_40390', { 'Blue_40980':2, }, desc="Forest Park Blue") # done
# Harlem Blue
tn.add_station('Blue_40980', { 'Blue_40390':2, 'Blue_40180':2 }, desc="Harlem Blue") # done
# Oak Park Blue
tn.add_station('Blue_40180', { 'Blue_40980':2, 'Blue_40010':2 }, desc="Oak Park Blue") # done
# Austin Blue
tn.add_station('Blue_40010', { 'Blue_40180':2, 'Blue_40970':3 }, desc="Austin Blue") # done
# Cicero Blue
tn.add_station('Blue_40970', { 'Blue_40010':3, 'Blue_40920':3 }, desc="Cicero Blue") # done
# Pulaski Blue
tn.add_station('Blue_40920', { 'Blue_40970':3, 'Blue_40250':2 }, desc="Pulaski Blue") # done
# Kedzie Blue
tn.add_station('Blue_40250', { 'Blue_40920':2, 'Blue_40220':3 }, desc="Kedzie Blue") # done
# Western Blue
tn.add_station('Blue_40220', { 'Blue_40250':3, 'Blue_40810':3 }, desc="Western Blue") # done
# Illinois Medical Center Blue
tn.add_station('Blue_40810', { 'Blue_40220':3, 'Blue_40470':3 }, desc="Illinois Medical Center Blue") # done
# Racine Blue
tn.add_station('Blue_40470', { 'Blue_40810':3, 'Blue_40350':1 }, desc="Racine Blue") # done
# UIC Blue
tn.add_station('Blue_40350', { 'Blue_40470':1, 'Blue_40430':2 }, desc="UIC Blue") # done
# Clinton Blue
tn.add_station('Blue_40430', { 'Blue_40350':2, 'Blue_41340':1 }, desc="Clinton Blue") # done
### Pink West Side ###########################################################
# 54th/Cermac Pink
tn.add_station('Pink_40580', { 'Pink_40420':1, }, desc="54th/Cermac Pink") # done
# Cicero Pink
tn.add_station('Pink_40420', { 'Pink_40580':1, 'Pink_40600':2 }, desc="Cicero Pink") # done
# Kostner Pink
tn.add_station('Pink_40600', { 'Pink_40420':2, 'Pink_40150':1 }, desc="Kostner Pink") # done
# Pulaski Pink
tn.add_station('Pink_40150', { 'Pink_40600':1, 'Pink_40780':2 }, desc="Pulaski Pink") # done
# Central Park Pink
tn.add_station('Pink_40780', { 'Pink_40150':2, 'Pink_41040':1 }, desc="Central Park Pink") # done
# Kedzie Pink
tn.add_station('Pink_41040', { 'Pink_40780':1, 'Pink_40440':2 }, desc="Kedzie Pink") # done
# California Pink
tn.add_station('Pink_40440', { 'Pink_41040':2, 'Pink_40740':2 }, desc="California Pink") # done
# Western Pink
tn.add_station('Pink_40740', { 'Pink_40440':2, 'Pink_40210':1 }, desc="Western Pink") # done
# Damen Pink
tn.add_station('Pink_40210', { 'Pink_40740':1, 'Pink_40830':2 }, desc="Damen Pink") # done
# 18th Pink
tn.add_station('Pink_40830', { 'Pink_40210':2, 'Pink_41030':2 }, desc="18th Pink") # done
# Polk Pink
tn.add_station('Pink_41030', { 'Pink_40830':2, 'Pink_40170':3 }, desc="Polk Pink") # done
# Ashland Pink
tn.add_station('Pink_40170', { 'Pink_41030':3, 'Pink_morgan':2, 'G_40170':wait_time}, desc="Ashland Pink") # done
# Morgan Pink
tn.add_station('Pink_morgan', { 'Pink_40170':2, 'Pink_41160':1, 'G_morgan':wait_time}, desc="Morgan Pink")
### Orange South Side ###########################################################
# Halsted Orange
tn.add_station('Org_41130', {'Org_41400':4, 'Org_41060':2 }, desc='Halsted Orange') # done
# Ashland Orange
tn.add_station('Org_41060', {'Org_41130':2, 'Org_40120':2, }, desc='Ashland Orange') # done
# 35th/Archer Orange
tn.add_station('Org_40120', {'Org_41060':2, 'Org_40310':4 }, desc='35th/Archer Orange') # done
# Western Orange
tn.add_station('Org_40310', {'Org_40120':4, 'Org_41150':2, }, desc='Western Orange') # done
# Kedzie Orange
tn.add_station('Org_41150', {'Org_40310':2, 'Org_40960':2 }, desc='Kedzie Orange') # done
# Pulaski Orange
tn.add_station('Org_40960', {'Org_41150':2, 'Org_40930':7 }, desc='Pulaski Orange') # done
# Midway Orange
tn.add_station('Org_40930', {'Org_40960':7 }, desc='Midway Orange') # done
return tn
|
|
# -------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
"""vtlookup test class."""
import unittest
import os
from os import path
import pandas as pd
from msticpy.sectools.vtlookup import VTLookup
from msticpy.sectools.tiproviders.ti_provider_base import preprocess_observable
_test_data_folders = [
d for d, _, _ in os.walk(os.getcwd()) if d.endswith("/tests/testdata")
]
if len(_test_data_folders) == 1:
_TEST_DATA = _test_data_folders[0]
else:
_TEST_DATA = "./tests/testdata"
class TestVTLookup(unittest.TestCase):
_TEST_COLS = [
"Observable",
"IoCType",
"Status",
"ResponseCode",
"Resource",
"SourceIndex",
"VerboseMsg",
"Resource",
"ScanId",
"Permalink",
"Positives",
"MD5",
"SHA1",
"SHA256",
"ResolvedDomains",
"ResolvedIPs",
"DetectedUrls",
]
def test_ipvalidation(self):
vtlookup = VTLookup(vtkey="fake", verbosity=2)
test_ips = [
("valid", "90.156.201.27", "90.156.201.27"),
("local_ip", "10.0.0.1", None),
("mask", "255.255.20.27", None),
("loopback", "127.0.0.1", None),
("bad_format", "122.1.3", None),
]
for test_case in test_ips:
result, status = preprocess_observable(test_case[1], "ipv4")
self.assertEqual(result, test_case[2])
print(test_case[0], status)
def test_urlvalidation(self):
vtlookup = VTLookup(vtkey="fake", verbosity=2)
v1_url = "http://club-fox.ru/img/www.loginalibaba.com/alibaba/alibaba/login.alibaba.com.php?email=biuro"
v2_url = "https://www.virustotal.com/en/ip-address/90.156.201.27/information/"
test_urls = [
("valid1", v1_url, v1_url),
("valid2", "https://microsoft.com", "https://microsoft.com"),
("valid3", "https://python.org", "https://python.org"),
("valid3", v2_url, v2_url),
("local_ip", "http://10.0.0.1/foo", None),
("local_hostname", "https://myhost/path", None),
("invalid1", "http//club-fox.ru/foo.html", None),
("invalid2", "//club-fox.ru/foo.html", None),
("invalid3", "https://123:microsoft.com@user/foo.txt", None),
("invalid4", "http//10.0.0.1/foo.txt", None),
]
for test_case in test_urls:
result, status = preprocess_observable(test_case[1], "url")
print(test_case[0], status)
self.assertEqual(
result,
test_case[2],
f"Failed on test case {test_case} ({test_case[1]})",
)
def test_parse_file_results(self):
vtlookup = VTLookup(vtkey="fake", verbosity=2)
FILE_NAME = path.join(_TEST_DATA, "fileresponse.json")
with open(FILE_NAME, "r") as file_handle:
txt = file_handle.read()
vt_params = vtlookup._VT_API_TYPES["file"]
vtlookup._parse_vt_results(
vt_results=txt,
observable="7657fcb7d772448a6d8504e4b20168b8",
ioc_type="md5_hash",
vt_param=vt_params,
)
test_df = vtlookup.results[self._TEST_COLS]
self.assertEqual(len(test_df), 1)
print(test_df.T)
vtlookup = VTLookup(vtkey="fake", verbosity=2)
FILE_NAME2 = path.join(_TEST_DATA, "file-multi_pos.json")
with open(FILE_NAME2, "r") as file_handle:
txt = file_handle.read()
vt_params = vtlookup._VT_API_TYPES["file"]
vtlookup._parse_vt_results(
vt_results=txt,
observable="7657fcb7d772448a6d8504e4b20168b8",
ioc_type="md5_hash",
vt_param=vt_params,
)
test_df = vtlookup.results[self._TEST_COLS]
self.assertEqual(len(test_df), 3)
print(test_df.T)
def test_parse_url_results(self):
vtlookup = VTLookup(vtkey="fake", verbosity=2)
FILE_NAME = path.join(_TEST_DATA, "url_pos.json")
with open(FILE_NAME, "r") as file_handle:
txt = file_handle.read()
vt_params = vtlookup._VT_API_TYPES["url"]
vtlookup._parse_vt_results(
vt_results=txt,
observable="7657fcb7d772448a6d8504e4b20168b8",
ioc_type="url",
vt_param=vt_params,
)
test_df = vtlookup.results[self._TEST_COLS]
self.assertEqual(len(test_df), 1)
print(test_df.T)
vtlookup = VTLookup(vtkey="fake", verbosity=2)
FILE_NAME2 = path.join(_TEST_DATA, "url_neg.json")
with open(FILE_NAME2, "r") as file_handle:
txt = file_handle.read()
vt_params = vtlookup._VT_API_TYPES["url"]
vtlookup._parse_vt_results(
vt_results=txt,
observable="7657fcb7d772448a6d8504e4b20168b8",
ioc_type="url",
vt_param=vt_params,
)
test_df = vtlookup.results[self._TEST_COLS]
self.assertEqual(len(test_df), 1)
print(test_df.T)
def test_parse_domain_results(self):
vtlookup = VTLookup(vtkey="fake", verbosity=2)
FILE_NAME = path.join(_TEST_DATA, "domain_pos.json")
with open(FILE_NAME, "r") as file_handle:
txt = file_handle.read()
vt_params = vtlookup._VT_API_TYPES["domain"]
vtlookup._parse_vt_results(
vt_results=txt,
observable="7657fcb7d772448a6d8504e4b20168b8",
ioc_type="dns",
vt_param=vt_params,
)
test_df = vtlookup.results[self._TEST_COLS]
self.assertEqual(len(test_df), 1)
self.assertGreater(len(test_df[["ResolvedIPs"]]), 0)
self.assertGreater(len(test_df[["DetectedUrls"]].values), 0)
self.assertGreater(test_df[["Positives"]].values, 0)
print(test_df.T)
vtlookup = VTLookup(vtkey="fake", verbosity=2)
FILE_NAME2 = path.join(_TEST_DATA, "domain_neg.json")
with open(FILE_NAME2, "r") as file_handle:
txt = file_handle.read()
vt_params = vtlookup._VT_API_TYPES["domain"]
vtlookup._parse_vt_results(
vt_results=txt,
observable="7657fcb7d772448a6d8504e4b20168b8",
ioc_type="dns",
vt_param=vt_params,
)
test_df = vtlookup.results[self._TEST_COLS]
self.assertEqual(len(test_df), 1)
self.assertGreater(len(test_df[["ResolvedIPs"]].values), 0)
self.assertGreater(len(test_df[["DetectedUrls"]].values), 0)
print(test_df.T)
def test_parse_ip_results(self):
vtlookup = VTLookup(vtkey="fake", verbosity=2)
FILE_NAME = path.join(_TEST_DATA, "ip-address_pos.json")
with open(FILE_NAME, "r") as file_handle:
txt = file_handle.read()
vt_params = vtlookup._VT_API_TYPES["ip-address"]
vtlookup._parse_vt_results(
vt_results=txt,
observable="7657fcb7d772448a6d8504e4b20168b8",
ioc_type="ipv4",
vt_param=vt_params,
)
test_df = vtlookup.results[self._TEST_COLS]
self.assertEqual(len(test_df), 1)
self.assertGreater(len(test_df[["ResolvedDomains"]].values), 0)
self.assertGreater(len(test_df[["DetectedUrls"]].values), 0)
self.assertGreater(test_df[["Positives"]].values, 0)
print(test_df.T)
vtlookup = VTLookup(vtkey="fake", verbosity=2)
FILE_NAME2 = path.join(_TEST_DATA, "ip-address_neg.json")
with open(FILE_NAME2, "r") as file_handle:
txt = file_handle.read()
vt_params = vtlookup._VT_API_TYPES["ip-address"]
vtlookup._parse_vt_results(
vt_results=txt,
observable="7657fcb7d772448a6d8504e4b20168b8",
ioc_type="ipv4",
vt_param=vt_params,
)
test_df = vtlookup.results[self._TEST_COLS]
self.assertEqual(len(test_df), 1)
self.assertGreater(len(test_df[["ResolvedDomains"]].values), 0)
self.assertEqual(test_df[["Positives"]].values, 0)
print(test_df.T)
if __name__ == "__main__":
unittest.main()
print("bye")
|
|
from django.utils.datastructures import SortedDict
from django.utils.translation import ugettext_lazy as _lazy
# The number of answers per page.
ANSWERS_PER_PAGE = 20
# The number of questions per page.
QUESTIONS_PER_PAGE = 20
# Highest ranking to show for a user
HIGHEST_RANKING = 100
# Special tag names:
ESCALATE_TAG_NAME = 'escalate'
NEEDS_INFO_TAG_NAME = 'needsinfo'
OFFTOPIC_TAG_NAME = 'offtopic'
# Escalation config
ESCALATE_EXCLUDE_PRODUCTS = ['thunderbird', 'webmaker', 'open-badges']
# How long until a question is automatically taken away from a user
TAKE_TIMEOUT = 600
# AAQ config:
products = SortedDict([
('desktop', {
'name': _lazy(u'Firefox'),
'subtitle': _lazy(u'Web browser for Windows, Mac and Linux'),
'extra_fields': ['troubleshooting', 'ff_version', 'os', 'plugins'],
'tags': ['desktop'],
'product': 'firefox',
'categories': SortedDict([
# TODO: Just use the IA topics for this.
# See bug 979397
('download-and-install', {
'name': _lazy(u'Download, install and migration'),
'topic': 'download-and-install',
'tags': ['download-and-install'],
}),
('privacy-and-security', {
'name': _lazy(u'Privacy and security settings'),
'topic': 'privacy-and-security',
'tags': ['privacy-and-security'],
}),
('customize', {
'name': _lazy(u'Customize controls, options and add-ons'),
'topic': 'customize',
'tags': ['customize'],
}),
('fix-problems', {
'name': _lazy(u'Fix slowness, crashing, error messages and '
u'other problems'),
'topic': 'fix-problems',
'tags': ['fix-problems'],
}),
('tips', {
'name': _lazy(u'Tips and tricks'),
'topic': 'tips',
'tags': ['tips'],
}),
('bookmarks', {
'name': _lazy(u'Bookmarks'),
'topic': 'bookmarks',
'tags': ['bookmarks'],
}),
('cookies', {
'name': _lazy(u'Cookies'),
'topic': 'cookies',
'tags': ['cookies'],
}),
('tabs', {
'name': _lazy(u'Tabs'),
'topic': 'tabs',
'tags': ['tabs'],
}),
('websites', {
'name': _lazy(u'Websites'),
'topic': 'websites',
'tags': ['websites'],
}),
('sync', {
'name': _lazy(u'Firefox Sync'),
'topic': 'sync',
'tags': ['sync'],
}),
('other', {
'name': _lazy(u'Other'),
'topic': 'other',
'tags': ['other'],
}),
])
}),
('mobile', {
'name': _lazy(u'Firefox for Android'),
'subtitle': _lazy(u'Web browser for Android smartphones and tablets'),
'extra_fields': ['ff_version', 'os', 'plugins'],
'tags': ['mobile'],
'product': 'mobile',
'categories': SortedDict([
# TODO: Just use the IA topics for this.
# See bug 979397
('download-and-install', {
'name': _lazy(u'Download, install and migration'),
'topic': 'download-and-install',
'tags': ['download-and-install'],
}),
('privacy-and-security', {
'name': _lazy(u'Privacy and security settings'),
'topic': 'privacy-and-security',
'tags': ['privacy-and-security'],
}),
('customize', {
'name': _lazy(u'Customize controls, options and add-ons'),
'topic': 'customize',
'tags': ['customize'],
}),
('fix-problems', {
'name': _lazy(u'Fix slowness, crashing, error messages and '
u'other problems'),
'topic': 'fix-problems',
'tags': ['fix-problems'],
}),
('tips', {
'name': _lazy(u'Tips and tricks'),
'topic': 'tips',
'tags': ['tips'],
}),
('bookmarks', {
'name': _lazy(u'Bookmarks'),
'topic': 'bookmarks',
'tags': ['bookmarks'],
}),
('cookies', {
'name': _lazy(u'Cookies'),
'topic': 'cookies',
'tags': ['cookies'],
}),
('tabs', {
'name': _lazy(u'Tabs'),
'topic': 'tabs',
'tags': ['tabs'],
}),
('websites', {
'name': _lazy(u'Websites'),
'topic': 'websites',
'tags': ['websites'],
}),
('sync', {
'name': _lazy(u'Firefox Sync'),
'topic': 'sync',
'tags': ['sync'],
}),
('other', {
'name': _lazy(u'Other'),
'topic': 'other',
'tags': ['other'],
}),
])
}),
('ios', {
'name': _lazy(u'Firefox for iOS'),
'subtitle': _lazy('Firefox for iPhone, iPad and iPod touch devices'),
'extra_fields': ['ff_version', 'os', 'plugins'],
'tags': ['ios'],
'product': 'ios',
'categories': SortedDict([
('install-and-update-firefox-ios', {
'name': _lazy(u'Install and Update'),
'topic': 'install-and-update-firefox-ios',
'tags': ['install-and-update-firefox-ios']
}),
('how-to-use-firefox-ios', {
'name': _lazy(u'How to use Firefox for iOS'),
'topic': 'how-to-use-firefox-ios',
'tags': ['how-to-use-firefox-ios']
}),
('firefox-ios-not-working-expected', {
'name': _lazy(u'Firefox for iOS is not working as expected'),
'topic': 'firefox-ios-not-working-expected',
'tags': ['firefox-ios-not-working-expected']
}),
])
}),
('focus', {
'name': _lazy(u'Focus by Firefox'),
'subtitle': _lazy('Content blocker for Safari'),
'extra_fields': [],
'tags': ['focus-firefox'],
'product': 'focus-firefox',
'categories': SortedDict([
('get-started', {
'name': _lazy(u'Get started'),
'topic': 'get-started',
'tags': []
}),
])
}),
('firefox-os', {
'name': _lazy(u'Firefox OS'),
'subtitle': _lazy('Mobile OS for smartphones'),
'extra_fields': ['device', 'os'],
'tags': [],
'product': 'firefox-os',
'categories': SortedDict([
# TODO: Just use the IA topics for this.
# See bug 979397
('download-and-install', {
'name': _lazy(u'Download and install apps'),
'topic': 'marketplace',
'tags': ['marketplace'],
}),
('customize', {
'name': _lazy(u'Customize controls, options, settings and '
u'preferences'),
'topic': 'settings',
'tags': ['settings'],
}),
('fix-problems', {
'name': _lazy(u'Fix slowness, crashing, error messages and '
u'other problems'),
'topic': 'fix-problems',
'tags': ['fix-problems'],
}),
])
}),
('webmaker', {
'name': _lazy(u'Webmaker'),
'subtitle': _lazy('Tools for creating and teaching the web'),
'extra_fields': [],
'tags': [],
'product': 'webmaker',
'categories': SortedDict([
# TODO: Just use the IA topics for this.
# See bug 979397
('popcorn-maker', {
'name': _lazy(u'Using Popcorn Maker'),
'topic': 'popcorn-maker',
'tags': ['popcorn-maker'],
}),
('thimble', {
'name': _lazy(u'Using Thimble'),
'topic': 'thimble',
'tags': ['thimble'],
}),
('x-ray-goggles', {
'name': _lazy(u'Using X-Ray Goggles'),
'topic': 'x-ray-goggles',
'tags': ['x-ray-goggles'],
}),
('get-the-most-from-webmaker', {
'name': _lazy(u'Using a feature on webmaker.org'),
'topic': 'get-the-most-from-webmaker',
'tags': ['get-the-most-from-webmaker'],
}),
('events-and-help-for-mentors', {
'name': _lazy(u'Contributing to Webmaker'),
'topic': 'events-and-help-for-mentors',
'tags': ['events-and-help-for-mentors'],
}),
])
}),
('thunderbird', {
'name': _lazy(u'Thunderbird'),
'subtitle': 'Email software for Windows, Mac and Linux',
'extra_fields': [],
'tags': [],
'product': 'thunderbird',
'categories': SortedDict([
# TODO: Just use the IA topics for this.
# See bug 979397
('download-and-install', {
'name': _lazy(u'Download, install and migration'),
'topic': 'download-install-and-migration',
'tags': ['download-and-install'],
}),
('privacy-and-security', {
'name': _lazy(u'Privacy and security settings'),
'topic': 'privacy-and-security-settings',
'tags': ['privacy-and-security'],
}),
('customize', {
'name': _lazy(u'Customize controls, options and add-ons'),
'topic': 'customize-controls-options-and-add-ons',
'tags': ['customize'],
}),
('fix-problems', {
'name': _lazy(u'Fix slowness, crashing, error messages and '
u'other problems'),
'topic': 'fix-slowness-crashing-error-messages-and-other-'
'problems',
'tags': ['fix-problems'],
}),
('calendar', {
'name': _lazy('Calendar'),
'topic': 'calendar',
'tags': ['calendar'],
}),
('other', {
'name': _lazy(u'Other'),
'topic': 'other',
'tags': ['other'],
}),
])
}),
('other', {
'name': _lazy(u'Other Mozilla products'),
'subtitle': '',
'html': 'This site is only provides support for some of our products. '
'For other support, please find your product below.'
'<ul class="product-support">'
'<li><a href="http://www.seamonkey-project.org/doc/">'
'SeaMonkey support</a></li>'
'<li><a href="http://caminobrowser.org/help/">'
'Camino support</a></li>'
'<li><a '
'href="http://www.mozilla.org/projects/calendar/faq.html">'
'Lightning and Sunbird support</a></li>'
'</ul>',
'categories': SortedDict([]),
'deadend': True,
}),
])
def add_backtrack_keys(products):
"""Insert 'key' keys so we can go from product or category back to key."""
for p_k, p_v in products.iteritems():
p_v['key'] = p_k
for c_k, c_v in p_v['categories'].iteritems():
c_v['key'] = c_k
add_backtrack_keys(products)
|
|
from sympy.core import S
from sympy.integrals.quadrature import (gauss_legendre, gauss_laguerre,
gauss_hermite, gauss_gen_laguerre,
gauss_chebyshev_t, gauss_chebyshev_u,
gauss_jacobi)
def test_legendre():
x, w = gauss_legendre(1, 17)
assert [str(r) for r in x] == ['0']
assert [str(r) for r in w] == ['2.0000000000000000']
x, w = gauss_legendre(2, 17)
assert [str(r) for r in x] == ['-0.57735026918962576',
'0.57735026918962576']
assert [str(r) for r in w] == ['1.0000000000000000', '1.0000000000000000']
x, w = gauss_legendre(3, 17)
assert [str(r) for r in x] == ['-0.77459666924148338', '0',
'0.77459666924148338']
assert [str(r) for r in w] == ['0.55555555555555556',
'0.88888888888888889', '0.55555555555555556']
x, w = gauss_legendre(4, 17)
assert [str(r) for r in x] == ['-0.86113631159405258',
'-0.33998104358485626', '0.33998104358485626',
'0.86113631159405258']
assert [str(r) for r in w] == ['0.34785484513745386',
'0.65214515486254614', '0.65214515486254614',
'0.34785484513745386']
def test_legendre_precise():
x, w = gauss_legendre(3, 40)
assert [str(r) for r in x] == \
['-0.7745966692414833770358530799564799221666', '0',
'0.7745966692414833770358530799564799221666']
assert [str(r) for r in w] == \
['0.5555555555555555555555555555555555555556',
'0.8888888888888888888888888888888888888889',
'0.5555555555555555555555555555555555555556']
def test_laguerre():
x, w = gauss_laguerre(1, 17)
assert [str(r) for r in x] == ['1.0000000000000000']
assert [str(r) for r in w] == ['1.0000000000000000']
x, w = gauss_laguerre(2, 17)
assert [str(r) for r in x] == ['0.58578643762690495',
'3.4142135623730950']
assert [str(r) for r in w] == ['0.85355339059327376',
'0.14644660940672624']
x, w = gauss_laguerre(3, 17)
assert [str(r) for r in x] == [
'0.41577455678347908',
'2.2942803602790417',
'6.2899450829374792',
]
assert [str(r) for r in w] == [
'0.71109300992917302',
'0.27851773356924085',
'0.010389256501586136',
]
x, w = gauss_laguerre(4, 17)
assert [str(r) for r in x] == ['0.32254768961939231', '1.7457611011583466',
'4.5366202969211280', '9.3950709123011331']
assert [str(r) for r in w] == ['0.60315410434163360',
'0.35741869243779969', '0.038887908515005384',
'0.00053929470556132745']
x, w = gauss_laguerre(5, 17)
assert [str(r) for r in x] == ['0.26356031971814091', '1.4134030591065168',
'3.5964257710407221', '7.0858100058588376', '12.640800844275783']
assert [str(r) for r in w] == ['0.52175561058280865',
'0.39866681108317593', '0.075942449681707595',
'0.0036117586799220485', '2.3369972385776228e-5']
def test_laguerre_precise():
x, w = gauss_laguerre(3, 40)
assert [str(r) for r in x] == \
['0.4157745567834790833115338731282744735466',
'2.294280360279041719822050361359593868960',
'6.289945082937479196866415765512131657493']
assert [str(r) for r in w] == \
['0.7110930099291730154495901911425944313094',
'0.2785177335692408488014448884567264810349',
'0.01038925650158613574896492040067908765572']
def test_hermite():
x, w = gauss_hermite(1, 17)
assert [str(r) for r in x] == ['0']
assert [str(r) for r in w] == ['1.7724538509055160']
x, w = gauss_hermite(2, 17)
assert [str(r) for r in x] == ['-0.70710678118654752',
'0.70710678118654752']
assert [str(r) for r in w] == ['0.88622692545275801',
'0.88622692545275801']
x, w = gauss_hermite(3, 17)
assert [str(r) for r in x] == [
'-1.2247448713915890',
'0',
'1.2247448713915890']
assert [str(r) for r in w] == [
'0.29540897515091934',
'1.1816359006036774',
'0.29540897515091934']
x, w = gauss_hermite(4, 17)
assert [str(r) for r in x] == [
'-1.6506801238857846',
'-0.52464762327529032',
'0.52464762327529032',
'1.6506801238857846'
]
assert [str(r) for r in w] == [
'0.081312835447245177',
'0.80491409000551284',
'0.80491409000551284',
'0.081312835447245177'
]
x, w = gauss_hermite(5, 17)
assert [str(r) for r in x] == [
'-2.0201828704560856',
'-0.95857246461381851',
'0',
'0.95857246461381851',
'2.0201828704560856'
]
assert [str(r) for r in w] == [
'0.019953242059045913',
'0.39361932315224116',
'0.94530872048294188',
'0.39361932315224116',
'0.019953242059045913'
]
def test_hermite_precise():
x, w = gauss_hermite(3, 40)
assert [str(r) for r in x] == [
'-1.224744871391589049098642037352945695983',
'0',
'1.224744871391589049098642037352945695983'
]
assert [str(r) for r in w] == [
'0.2954089751509193378830279138901908637996',
'1.181635900603677351532111655560763455198',
'0.2954089751509193378830279138901908637996'
]
def test_gen_laguerre():
x, w = gauss_gen_laguerre(1, -S.Half, 17)
assert [str(r) for r in x] == ['0.50000000000000000']
assert [str(r) for r in w] == ['1.7724538509055160']
x, w = gauss_gen_laguerre(2, -S.Half, 17)
assert [str(r) for r in x] == ['0.27525512860841095',
'2.7247448713915890']
assert [str(r) for r in w] == ['1.6098281800110257',
'0.16262567089449035']
x, w = gauss_gen_laguerre(3, -S.Half, 17)
assert [str(r) for r in x] == ['0.19016350919348813',
'1.7844927485432516',
'5.5253437422632603']
assert [str(r) for r in w] == ['1.4492591904487850',
'0.31413464064571329',
'0.0090600198110176913']
x, w = gauss_gen_laguerre(4, -S.Half, 17)
assert [str(r) for r in x] == ['0.14530352150331709',
'1.3390972881263614',
'3.9269635013582872',
'8.5886356890120343']
assert [str(r) for r in w] ==['1.3222940251164826',
'0.41560465162978376',
'0.034155966014826951',
'0.00039920814442273524']
x, w = gauss_gen_laguerre(5, -S.Half, 17)
assert [str(r) for r in x] ==['0.11758132021177814',
'1.0745620124369040',
'3.0859374437175500',
'6.4147297336620305',
'11.807189489971737']
assert [str(r) for r in w] ==['1.2217252674706516',
'0.48027722216462937',
'0.067748788910962126',
'0.0026872914935624654',
'1.5280865710465241e-5']
x, w = gauss_gen_laguerre(1, 2, 17)
assert [str(r) for r in x] ==['3.0000000000000000']
assert [str(r) for r in w] == ['2.0000000000000000']
x, w = gauss_gen_laguerre(2, 2, 17)
assert [str(r) for r in x] == ['2.0000000000000000',
'6.0000000000000000']
assert [str(r) for r in w] ==['1.5000000000000000',
'0.50000000000000000']
x, w = gauss_gen_laguerre(3, 2, 17)
assert [str(r) for r in x] ==['1.5173870806774125',
'4.3115831337195203',
'9.1710297856030672']
assert [str(r) for r in w] ==['1.0374949614904253',
'0.90575000470306537',
'0.056755033806509347']
x, w = gauss_gen_laguerre(4, 2, 17)
assert [str(r) for r in x] ==['1.2267632635003021',
'3.4125073586969460',
'6.9026926058516134',
'12.458036771951139']
assert [str(r) for r in w] ==['0.72552499769865438',
'1.0634242919791946',
'0.20669613102835355',
'0.0043545792937974889']
x, w = gauss_gen_laguerre(5, 2, 17)
assert [str(r) for r in x] ==['1.0311091440933816',
'2.8372128239538217',
'5.6202942725987079',
'9.6829098376640271',
'15.828473921690062']
assert [str(r) for r in w] == ['0.52091739683509184',
'1.0667059331592211',
'0.38354972366693113',
'0.028564233532974658',
'0.00026271280578124935']
def test_gen_laguerre_precise():
x, w = gauss_gen_laguerre(3, -S.Half, 40)
assert [str(r) for r in x] ==['0.1901635091934881328718554276203028970878',
'1.784492748543251591186722461957367638500',
'5.525343742263260275941422110422329464413']
assert [str(r) for r in w] == ['1.449259190448785048183829411195134343108',
'0.3141346406457132878326231270167565378246',
'0.009060019811017691281714945129254301865020']
x, w = gauss_gen_laguerre(3, 2, 40)
assert [str(r) for r in x] == ['1.517387080677412495020323111016672547482',
'4.311583133719520302881184669723530562299',
'9.171029785603067202098492219259796890218']
assert [str(r) for r in w] ==['1.037494961490425285817554606541269153041',
'0.9057500047030653669269785048806009945254',
'0.05675503380650934725546688857812985243312']
def test_chebyshev_t():
x, w = gauss_chebyshev_t(1, 17)
assert [str(r) for r in x] == ['0']
assert [str(r) for r in w] == ['3.1415926535897932']
x, w = gauss_chebyshev_t(2, 17)
assert [str(r) for r in x] == ['0.70710678118654752',
'-0.70710678118654752']
assert [str(r) for r in w] == ['1.5707963267948966',
'1.5707963267948966']
x, w = gauss_chebyshev_t(3, 17)
assert [str(r) for r in x] == ['0.86602540378443865',
'0',
'-0.86602540378443865']
assert [str(r) for r in w] == ['1.0471975511965977',
'1.0471975511965977',
'1.0471975511965977']
x, w = gauss_chebyshev_t(4, 17)
assert [str(r) for r in x] == ['0.92387953251128676',
'0.38268343236508977',
'-0.38268343236508977',
'-0.92387953251128676']
assert [str(r) for r in w] == ['0.78539816339744831',
'0.78539816339744831',
'0.78539816339744831',
'0.78539816339744831']
x, w = gauss_chebyshev_t(5, 17)
assert [str(r) for r in x] == ['0.95105651629515357',
'0.58778525229247313',
'0',
'-0.58778525229247313',
'-0.95105651629515357']
assert [str(r) for r in w] == ['0.62831853071795865',
'0.62831853071795865',
'0.62831853071795865',
'0.62831853071795865',
'0.62831853071795865']
def test_chebyshev_t_precise():
x, w = gauss_chebyshev_t(3, 40)
assert [str(r) for r in x] == [
'0.8660254037844386467637231707529361834714',
'0',
'-0.8660254037844386467637231707529361834714']
assert [str(r) for r in w] == [
'1.047197551196597746154214461093167628066',
'1.047197551196597746154214461093167628066',
'1.047197551196597746154214461093167628066']
def test_chebyshev_u():
x, w = gauss_chebyshev_u(1, 17)
assert [str(r) for r in x] == ['0']
assert [str(r) for r in w] == ['1.5707963267948966']
x, w = gauss_chebyshev_u(2, 17)
assert [str(r) for r in x] == ['0.50000000000000000',
'-0.50000000000000000']
assert [str(r) for r in w] == ['0.78539816339744831',
'0.78539816339744831']
x, w = gauss_chebyshev_u(3, 17)
assert [str(r) for r in x] == ['0.70710678118654752',
'0',
'-0.70710678118654752']
assert [str(r) for r in w] == ['0.39269908169872415',
'0.78539816339744831',
'0.39269908169872415']
x, w = gauss_chebyshev_u(4, 17)
assert [str(r) for r in x] == ['0.80901699437494742',
'0.30901699437494742',
'-0.30901699437494742',
'-0.80901699437494742']
assert [str(r) for r in w] == ['0.21707871342270599',
'0.56831944997474231',
'0.56831944997474231',
'0.21707871342270599']
x, w = gauss_chebyshev_u(5, 17)
assert [str(r) for r in x] == ['0.86602540378443865',
'0.50000000000000000',
'0',
'-0.50000000000000000',
'-0.86602540378443865']
assert [str(r) for r in w] == ['0.13089969389957472',
'0.39269908169872415',
'0.52359877559829887',
'0.39269908169872415',
'0.13089969389957472']
def test_chebyshev_u_precise():
x, w = gauss_chebyshev_u(3, 40)
assert [str(r) for r in x] == [
'0.7071067811865475244008443621048490392848',
'0',
'-0.7071067811865475244008443621048490392848']
assert [str(r) for r in w] == [
'0.3926990816987241548078304229099378605246',
'0.7853981633974483096156608458198757210493',
'0.3926990816987241548078304229099378605246']
def test_jacobi():
x, w = gauss_jacobi(1, -S.Half, S.Half, 17)
assert [str(r) for r in x] == ['0.50000000000000000']
assert [str(r) for r in w] == ['3.1415926535897932']
x, w = gauss_jacobi(2, -S.Half, S.Half, 17)
assert [str(r) for r in x] == ['-0.30901699437494742',
'0.80901699437494742']
assert [str(r) for r in w] == ['0.86831485369082398',
'2.2732777998989693']
x, w = gauss_jacobi(3, -S.Half, S.Half, 17)
assert [str(r) for r in x] == ['-0.62348980185873353',
'0.22252093395631440',
'0.90096886790241913']
assert [str(r) for r in w] == ['0.33795476356635433',
'1.0973322242791115',
'1.7063056657443274']
x, w = gauss_jacobi(4, -S.Half, S.Half, 17)
assert [str(r) for r in x] == ['-0.76604444311897804',
'-0.17364817766693035',
'0.50000000000000000',
'0.93969262078590838']
assert [str(r) for r in w] == ['0.16333179083642836',
'0.57690240318269103',
'1.0471975511965977',
'1.3541609083740761']
x, w = gauss_jacobi(5, -S.Half, S.Half, 17)
assert [str(r) for r in x] == ['-0.84125353283118117',
'-0.41541501300188643',
'0.14231483827328514',
'0.65486073394528506',
'0.95949297361449739']
assert [str(r) for r in w] == ['0.090675770007435371',
'0.33391416373675607',
'0.65248870981926643',
'0.94525424081394926',
'1.1192597692123861']
x, w = gauss_jacobi(1, 2, 3, 17)
assert [str(r) for r in x] == ['0.14285714285714286']
assert [str(r) for r in w] == ['1.0666666666666667']
x, w = gauss_jacobi(2, 2, 3, 17)
assert [str(r) for r in x] == ['-0.24025307335204215',
'0.46247529557426437']
assert [str(r) for r in w] == ['0.48514624517838660',
'0.58152042148828007']
x, w = gauss_jacobi(3, 2, 3, 17)
assert [str(r) for r in x] == ['-0.46115870378089762',
'0.10438533038323902',
'0.62950064612493132']
assert [str(r) for r in w] == ['0.17937613502213266',
'0.61595640991147154',
'0.27133412173306246']
x, w = gauss_jacobi(4, 2, 3, 17)
assert [str(r) for r in x] == ['-0.59903470850824782',
'-0.14761105199952565',
'0.32554377081188859',
'0.72879429738819258']
assert [str(r) for r in w] == ['0.067809641836772187',
'0.38956404952032481',
'0.47995970868024150',
'0.12933326662932816']
x, w = gauss_jacobi(5, 2, 3, 17)
assert [str(r) for r in x] == ['-0.69045775012676106',
'-0.32651993134900065',
'0.082337849552034905',
'0.47517887061283164',
'0.79279429464422850']
assert [str(r) for r in w] ==['0.027410178066337099',
'0.21291786060364828',
'0.43908437944395081',
'0.32220656547221822',
'0.065047683080512268']
def test_jacobi_precise():
x, w = gauss_jacobi(3, -S.Half, S.Half, 40)
assert [str(r) for r in x] == [
'-0.6234898018587335305250048840042398106323',
'0.2225209339563144042889025644967947594664',
'0.9009688679024191262361023195074450511659']
assert [str(r) for r in w] == [
'0.3379547635663543330553835737094171534907',
'1.097332224279111467485302294320899710461',
'1.706305665744327437921957515249186020246']
x, w = gauss_jacobi(3, 2, 3, 40)
assert [str(r) for r in x] == [
'-0.4611587037808976179121958105554375981274',
'0.1043853303832390210914918407615869143233',
'0.6295006461249313240934312425211234110769']
assert [str(r) for r in w] == [
'0.1793761350221326596137764371503859752628',
'0.6159564099114715430909548532229749439714',
'0.2713341217330624639619353762933057474325']
|
|
"""Compute Linearly constrained minimum variance (LCMV) beamformer."""
# Authors: Alexandre Gramfort <[email protected]>
# Roman Goj <[email protected]>
# Britta Westner <[email protected]>
#
# License: BSD-3-Clause
import numpy as np
from ..rank import compute_rank
from ..io.meas_info import _simplify_info
from ..io.pick import pick_channels_cov, pick_info
from ..forward import _subject_from_forward
from ..minimum_norm.inverse import combine_xyz, _check_reference, _check_depth
from ..source_estimate import _make_stc, _get_src_type
from ..utils import logger, verbose, _check_channels_spatial_filter
from ..utils import _check_one_ch_type, _check_info_inv
from ._compute_beamformer import (
_prepare_beamformer_input, _compute_power,
_compute_beamformer, _check_src_type, Beamformer, _proj_whiten_data)
@verbose
def make_lcmv(info, forward, data_cov, reg=0.05, noise_cov=None, label=None,
pick_ori=None, rank='info',
weight_norm='unit-noise-gain-invariant',
reduce_rank=False, depth=None, inversion='matrix', verbose=None):
"""Compute LCMV spatial filter.
Parameters
----------
%(info_not_none)s
Specifies the channels to include. Bad channels (in ``info['bads']``)
are not used.
forward : instance of Forward
Forward operator.
data_cov : instance of Covariance
The data covariance.
reg : float
The regularization for the whitened data covariance.
noise_cov : instance of Covariance
The noise covariance. If provided, whitening will be done. Providing a
noise covariance is mandatory if you mix sensor types, e.g.
gradiometers with magnetometers or EEG with MEG.
label : instance of Label
Restricts the LCMV solution to a given label.
%(bf_pick_ori)s
- ``'vector'``
Keeps the currents for each direction separate
%(rank_info)s
%(weight_norm)s
Defaults to ``'unit-noise-gain-invariant'``.
%(reduce_rank)s
%(depth)s
.. versionadded:: 0.18
%(bf_inversion)s
.. versionadded:: 0.21
%(verbose)s
Returns
-------
filters : instance of Beamformer
Dictionary containing filter weights from LCMV beamformer.
Contains the following keys:
'kind' : str
The type of beamformer, in this case 'LCMV'.
'weights' : array
The filter weights of the beamformer.
'data_cov' : instance of Covariance
The data covariance matrix used to compute the beamformer.
'noise_cov' : instance of Covariance | None
The noise covariance matrix used to compute the beamformer.
'whitener' : None | ndarray, shape (n_channels, n_channels)
Whitening matrix, provided if whitening was applied to the
covariance matrix and leadfield during computation of the
beamformer weights.
'weight_norm' : str | None
Type of weight normalization used to compute the filter
weights.
'pick-ori' : None | 'max-power' | 'normal' | 'vector'
The orientation in which the beamformer filters were computed.
'ch_names' : list of str
Channels used to compute the beamformer.
'proj' : array
Projections used to compute the beamformer.
'is_ssp' : bool
If True, projections were applied prior to filter computation.
'vertices' : list
Vertices for which the filter weights were computed.
'is_free_ori' : bool
If True, the filter was computed with free source orientation.
'n_sources' : int
Number of source location for which the filter weight were
computed.
'src_type' : str
Type of source space.
'source_nn' : ndarray, shape (n_sources, 3)
For each source location, the surface normal.
'proj' : ndarray, shape (n_channels, n_channels)
Projections used to compute the beamformer.
'subject' : str
The subject ID.
'rank' : int
The rank of the data covariance matrix used to compute the
beamformer weights.
'max-power-ori' : ndarray, shape (n_sources, 3) | None
When pick_ori='max-power', this fields contains the estimated
direction of maximum power at each source location.
'inversion' : 'single' | 'matrix'
Whether the spatial filters were computed for each dipole
separately or jointly for all dipoles at each vertex using a
matrix inversion.
Notes
-----
The original reference is :footcite:`VanVeenEtAl1997`.
To obtain the Sekihara unit-noise-gain vector beamformer, you should use
``weight_norm='unit-noise-gain', pick_ori='vector'`` followed by
:meth:`vec_stc.project('pca', src) <mne.VectorSourceEstimate.project>`.
.. versionchanged:: 0.21
The computations were extensively reworked, and the default for
``weight_norm`` was set to ``'unit-noise-gain-invariant'``.
References
----------
.. footbibliography::
"""
# check number of sensor types present in the data and ensure a noise cov
info = _simplify_info(info)
noise_cov, _, allow_mismatch = _check_one_ch_type(
'lcmv', info, forward, data_cov, noise_cov)
# XXX we need this extra picking step (can't just rely on minimum norm's
# because there can be a mismatch. Should probably add an extra arg to
# _prepare_beamformer_input at some point (later)
picks = _check_info_inv(info, forward, data_cov, noise_cov)
info = pick_info(info, picks)
data_rank = compute_rank(data_cov, rank=rank, info=info)
noise_rank = compute_rank(noise_cov, rank=rank, info=info)
for key in data_rank:
if (key not in noise_rank or data_rank[key] != noise_rank[key]) and \
not allow_mismatch:
raise ValueError('%s data rank (%s) did not match the noise '
'rank (%s)'
% (key, data_rank[key],
noise_rank.get(key, None)))
del noise_rank
rank = data_rank
logger.info('Making LCMV beamformer with rank %s' % (rank,))
del data_rank
depth = _check_depth(depth, 'depth_sparse')
if inversion == 'single':
depth['combine_xyz'] = False
is_free_ori, info, proj, vertno, G, whitener, nn, orient_std = \
_prepare_beamformer_input(
info, forward, label, pick_ori, noise_cov=noise_cov, rank=rank,
pca=False, **depth)
ch_names = list(info['ch_names'])
data_cov = pick_channels_cov(data_cov, include=ch_names)
Cm = data_cov._get_square()
if 'estimator' in data_cov:
del data_cov['estimator']
rank_int = sum(rank.values())
del rank
# compute spatial filter
n_orient = 3 if is_free_ori else 1
W, max_power_ori = _compute_beamformer(
G, Cm, reg, n_orient, weight_norm, pick_ori, reduce_rank, rank_int,
inversion=inversion, nn=nn, orient_std=orient_std,
whitener=whitener)
# get src type to store with filters for _make_stc
src_type = _get_src_type(forward['src'], vertno)
# get subject to store with filters
subject_from = _subject_from_forward(forward)
# Is the computed beamformer a scalar or vector beamformer?
is_free_ori = is_free_ori if pick_ori in [None, 'vector'] else False
is_ssp = bool(info['projs'])
filters = Beamformer(
kind='LCMV', weights=W, data_cov=data_cov, noise_cov=noise_cov,
whitener=whitener, weight_norm=weight_norm, pick_ori=pick_ori,
ch_names=ch_names, proj=proj, is_ssp=is_ssp, vertices=vertno,
is_free_ori=is_free_ori, n_sources=forward['nsource'],
src_type=src_type, source_nn=forward['source_nn'].copy(),
subject=subject_from, rank=rank_int, max_power_ori=max_power_ori,
inversion=inversion)
return filters
def _apply_lcmv(data, filters, info, tmin, max_ori_out):
"""Apply LCMV spatial filter to data for source reconstruction."""
if max_ori_out != 'signed':
raise ValueError('max_ori_out must be "signed", got %s'
% (max_ori_out,))
if isinstance(data, np.ndarray) and data.ndim == 2:
data = [data]
return_single = True
else:
return_single = False
W = filters['weights']
for i, M in enumerate(data):
if len(M) != len(filters['ch_names']):
raise ValueError('data and picks must have the same length')
if not return_single:
logger.info("Processing epoch : %d" % (i + 1))
M = _proj_whiten_data(M, info['projs'], filters)
# project to source space using beamformer weights
vector = False
if filters['is_free_ori']:
sol = np.dot(W, M)
if filters['pick_ori'] == 'vector':
vector = True
else:
logger.info('combining the current components...')
sol = combine_xyz(sol)
else:
# Linear inverse: do computation here or delayed
if (M.shape[0] < W.shape[0] and
filters['pick_ori'] != 'max-power'):
sol = (W, M)
else:
sol = np.dot(W, M)
if filters['pick_ori'] == 'max-power' and max_ori_out == 'abs':
sol = np.abs(sol)
tstep = 1.0 / info['sfreq']
# compatibility with 0.16, add src_type as None if not present:
filters, warn_text = _check_src_type(filters)
yield _make_stc(sol, vertices=filters['vertices'], tmin=tmin,
tstep=tstep, subject=filters['subject'],
vector=vector, source_nn=filters['source_nn'],
src_type=filters['src_type'], warn_text=warn_text)
logger.info('[done]')
@verbose
def apply_lcmv(evoked, filters, max_ori_out='signed', verbose=None):
"""Apply Linearly Constrained Minimum Variance (LCMV) beamformer weights.
Apply Linearly Constrained Minimum Variance (LCMV) beamformer weights
on evoked data.
Parameters
----------
evoked : Evoked
Evoked data to invert.
filters : instance of Beamformer
LCMV spatial filter (beamformer weights).
Filter weights returned from :func:`make_lcmv`.
max_ori_out : 'signed'
Specify in case of pick_ori='max-power'.
%(verbose)s
Returns
-------
stc : SourceEstimate | VolSourceEstimate | VectorSourceEstimate
Source time courses.
See Also
--------
make_lcmv, apply_lcmv_raw, apply_lcmv_epochs, apply_lcmv_cov
Notes
-----
.. versionadded:: 0.18
"""
_check_reference(evoked)
info = evoked.info
data = evoked.data
tmin = evoked.times[0]
sel = _check_channels_spatial_filter(evoked.ch_names, filters)
data = data[sel]
stc = _apply_lcmv(data=data, filters=filters, info=info,
tmin=tmin, max_ori_out=max_ori_out)
return next(stc)
@verbose
def apply_lcmv_epochs(epochs, filters, max_ori_out='signed',
return_generator=False, verbose=None):
"""Apply Linearly Constrained Minimum Variance (LCMV) beamformer weights.
Apply Linearly Constrained Minimum Variance (LCMV) beamformer weights
on single trial data.
Parameters
----------
epochs : Epochs
Single trial epochs.
filters : instance of Beamformer
LCMV spatial filter (beamformer weights)
Filter weights returned from :func:`make_lcmv`.
max_ori_out : 'signed'
Specify in case of pick_ori='max-power'.
return_generator : bool
Return a generator object instead of a list. This allows iterating
over the stcs without having to keep them all in memory.
%(verbose)s
Returns
-------
stc: list | generator of (SourceEstimate | VolSourceEstimate)
The source estimates for all epochs.
See Also
--------
make_lcmv, apply_lcmv_raw, apply_lcmv, apply_lcmv_cov
"""
_check_reference(epochs)
info = epochs.info
tmin = epochs.times[0]
sel = _check_channels_spatial_filter(epochs.ch_names, filters)
data = epochs.get_data()[:, sel, :]
stcs = _apply_lcmv(data=data, filters=filters, info=info,
tmin=tmin, max_ori_out=max_ori_out)
if not return_generator:
stcs = [s for s in stcs]
return stcs
@verbose
def apply_lcmv_raw(raw, filters, start=None, stop=None, max_ori_out='signed',
verbose=None):
"""Apply Linearly Constrained Minimum Variance (LCMV) beamformer weights.
Apply Linearly Constrained Minimum Variance (LCMV) beamformer weights
on raw data.
Parameters
----------
raw : mne.io.Raw
Raw data to invert.
filters : instance of Beamformer
LCMV spatial filter (beamformer weights).
Filter weights returned from :func:`make_lcmv`.
start : int
Index of first time sample (index not time is seconds).
stop : int
Index of first time sample not to include (index not time is seconds).
max_ori_out : 'signed'
Specify in case of pick_ori='max-power'.
%(verbose)s
Returns
-------
stc : SourceEstimate | VolSourceEstimate
Source time courses.
See Also
--------
make_lcmv, apply_lcmv_epochs, apply_lcmv, apply_lcmv_cov
"""
_check_reference(raw)
info = raw.info
sel = _check_channels_spatial_filter(raw.ch_names, filters)
data, times = raw[sel, start:stop]
tmin = times[0]
stc = _apply_lcmv(data=data, filters=filters, info=info,
tmin=tmin, max_ori_out=max_ori_out)
return next(stc)
@verbose
def apply_lcmv_cov(data_cov, filters, verbose=None):
"""Apply Linearly Constrained Minimum Variance (LCMV) beamformer weights.
Apply Linearly Constrained Minimum Variance (LCMV) beamformer weights
to a data covariance matrix to estimate source power.
Parameters
----------
data_cov : instance of Covariance
Data covariance matrix.
filters : instance of Beamformer
LCMV spatial filter (beamformer weights).
Filter weights returned from :func:`make_lcmv`.
%(verbose)s
Returns
-------
stc : SourceEstimate | VolSourceEstimate
Source power.
See Also
--------
make_lcmv, apply_lcmv, apply_lcmv_epochs, apply_lcmv_raw
"""
sel = _check_channels_spatial_filter(data_cov.ch_names, filters)
sel_names = [data_cov.ch_names[ii] for ii in sel]
data_cov = pick_channels_cov(data_cov, sel_names)
n_orient = filters['weights'].shape[0] // filters['n_sources']
# Need to project and whiten along both dimensions
data = _proj_whiten_data(data_cov['data'].T, data_cov['projs'], filters)
data = _proj_whiten_data(data.T, data_cov['projs'], filters)
del data_cov
source_power = _compute_power(data, filters['weights'], n_orient)
# compatibility with 0.16, add src_type as None if not present:
filters, warn_text = _check_src_type(filters)
return(_make_stc(source_power, vertices=filters['vertices'],
src_type=filters['src_type'], tmin=0., tstep=1.,
subject=filters['subject'],
source_nn=filters['source_nn'], warn_text=warn_text))
|
|
#!/usr/bin/env python
"""
@package ion.agents.instrument.test.test_high_volume
@file ion/agents.instrument/test_high_volume.py
@author Bill French
@brief Test cases for high volume agents
"""
__author__ = 'Bill French'
import simplejson, urllib, os, unittest, gevent
from mock import patch
import sys
import time
import re
from gevent import Timeout
import numpy as np
from gevent.event import AsyncResult
from pyon.public import log, CFG
from nose.plugins.attrib import attr
import pyon.core.exception as pyex
from pyon.public import RT, PRED
from pyon.core.bootstrap import IonObject
from pyon.core.object import IonObjectSerializer
from pyon.agent.agent import ResourceAgentClient
from pyon.ion.exchange import ExchangeManager
from ion.services.dm.utility.granule_utils import time_series_domain
from pyon.util.int_test import IonIntegrationTestCase
from ion.agents.instrument.test.test_instrument_agent import InstrumentAgentTestMixin
from ion.agents.instrument.test.test_instrument_agent import start_instrument_agent_process
from ion.util.enhanced_resource_registry_client import EnhancedResourceRegistryClient
from ion.services.dm.utility.granule_utils import time_series_domain
from ion.services.dm.utility.granule_utils import RecordDictionaryTool
from interface.services.dm.idata_retriever_service import DataRetrieverServiceClient
from interface.services.dm.idataset_management_service import DatasetManagementServiceClient
from interface.services.sa.idata_product_management_service import DataProductManagementServiceClient
from interface.services.coi.iresource_registry_service import ResourceRegistryServiceClient
from interface.objects import AgentCommand, Dataset
from pyon.agent.agent import ResourceAgentState
from pyon.agent.agent import ResourceAgentEvent
# bin/nosetests -s -v --nologcapture ion/agents/instrument/test/test_high_volume.py:TestInstrumentAgentHighVolume.test_autosample
DVR_CONFIG = {
'dvr_egg' : 'http://sddevrepo.oceanobservatories.org/releases/ooici_mi_test_driver-0.0.1-py2.7.egg',
'dvr_mod' : 'mi.instrument.ooici.mi.test_driver.driver',
'dvr_cls' : 'InstrumentDriver',
'workdir' : CFG.device.sbe37.workdir,
'process_type' : None,
# These values are ignored, but must be defined
'comms_config' : {
'addr' : 'localhost',
'port' : 8080,
'cmd_port' : 8181
}
}
LAUNCH_FROM_EGG=True
if LAUNCH_FROM_EGG:
from ion.agents.instrument.test.load_test_driver_egg import load_egg
DVR_CONFIG = load_egg(DVR_CONFIG)
else:
#mi_repo = '/path/to/your/local/mi/repo'
from ion.agents.instrument.test.load_test_driver_egg import load_repo
DVR_CONFIG = load_repo(mi_repo, DVR_CONFIG)
log.info("adding repo to syspath: %s", sys.path)
# Load MI modules from the egg
from mi.core.instrument.instrument_driver import DriverEvent
from mi.core.instrument.instrument_driver import DriverProtocolState
from mi.core.instrument.instrument_driver import DriverConnectionState
#from mi.instrument.ooici.mi.test_driver.driver import ProtocolEvent
#from mi.instrument.ooici.mi.test_driver.driver import ParameterName
PAYLOAD_SIZE = 'PAYLOAD_SIZE'
SAMPLE_INTERVAL = 'SAMPLE_INTERVAL'
@attr('HARDWARE', group='mi')
@patch.dict(CFG, {'endpoint':{'receive':{'timeout': 120}}})
class TestInstrumentAgentHighVolume(IonIntegrationTestCase, InstrumentAgentTestMixin):
"""
Test cases for pumping high volume data through the agent and ingested. Initially we are just testing raw data, but
eventually we will update to publish data similar to the ORB
"""
def setUp(self):
"""
Set up driver integration support.
Start container.
Start deploy services.
Define agent config, start agent.
Start agent client.
"""
log.info("DVR_CONFIG=%s", DVR_CONFIG)
self._ia_client = None
# Start container.
log.info('Staring capability container.')
self._start_container()
# Bring up services in a deploy file (no need to message)
log.info('Staring deploy services.')
self.container.start_rel_from_url('res/deploy/r2deploy.yml')
log.info('building stream configuration')
# Setup stream config.
self._build_stream_config()
# Start a resource agent client to talk with the instrument agent.
log.info('starting IA process')
self._ia_client = start_instrument_agent_process(self.container, self._stream_config, dvr_config=DVR_CONFIG)
self.addCleanup(self._verify_agent_reset)
log.info('test setup complete')
def assert_initialize(self):
state = self._ia_client.get_agent_state()
self.assertEqual(state, ResourceAgentState.UNINITIALIZED)
cmd = AgentCommand(command=ResourceAgentEvent.INITIALIZE)
retval = self._ia_client.execute_agent(cmd)
state = self._ia_client.get_agent_state()
self.assertEqual(state, ResourceAgentState.INACTIVE)
cmd = AgentCommand(command=ResourceAgentEvent.GO_ACTIVE)
retval = self._ia_client.execute_agent(cmd)
state = self._ia_client.get_agent_state()
self.assertEqual(state, ResourceAgentState.IDLE)
cmd = AgentCommand(command=ResourceAgentEvent.RUN)
retval = self._ia_client.execute_agent(cmd)
state = self._ia_client.get_agent_state()
self.assertEqual(state, ResourceAgentState.COMMAND)
def assert_start_autosample(self):
cmd = AgentCommand(command=DriverEvent.START_AUTOSAMPLE)
retval = self._ia_client.execute_resource(cmd)
def assert_stop_autosample(self):
cmd = AgentCommand(command=DriverEvent.STOP_AUTOSAMPLE)
retval = self._ia_client.execute_resource(cmd)
def assert_reset(self):
cmd = AgentCommand(command=ResourceAgentEvent.RESET)
retval = self._ia_client.execute_agent(cmd)
state = self._ia_client.get_agent_state()
self.assertEqual(state, ResourceAgentState.UNINITIALIZED)
def assert_set_parameter(self, name, value, verify=True):
'''
verify that parameters are set correctly. Assumes we are in command mode.
'''
setParams = { name : value }
getParams = [ name ]
self._ia_client.set_resource(setParams)
if(verify):
result = self._ia_client.get_resource(getParams)
self.assertEqual(result[name], value)
def _get_raw_queue(self):
queue_names = self.container.ex_manager.list_queues(self._raw_exchange_name)
log.info("Rabbit Queues: %s", queue_names)
self.assertEqual(len(queue_names), 1)
return queue_names[0]
def _monitor_queue_size(self, queue_name, size):
"""
Verify a queue doesn't exceed a specific backlog
"""
while self._queue_size_monitor_stop == False:
queue_info = self.container.ex_manager.get_queue_info(queue_name)
log.info("Queue Info: %s", queue_info)
gevent.sleep(1)
def _start_queue_size_monitor(self, queue_name, size):
log.debug("Start Queue Monitor: %s", queue_name)
self._async_queue_size_monitor = AsyncResult()
self._queue_size_monitor_thread = gevent.Greenlet(self._monitor_queue_size, queue_name, size)
self._queue_size_monitor_stop = False
self._queue_size_monitor_thread.start()
def _stop_queue_size_monitor(self):
if self._queue_size_monitor_thread:
self._queue_size_monitor_stop = True
self._queue_size_monitor_thread.join(timeout=60)
self._queue_size_monitor_thread = False
def _start_raw_ingestion(self):
dpsc_cli = DataProductManagementServiceClient()
rrclient = ResourceRegistryServiceClient()
RR2 = EnhancedResourceRegistryClient(rrclient)
dp_obj = IonObject(RT.DataProduct,
name='DP1',
description='some new dp')
dp_obj.geospatial_bounds.geospatial_latitude_limit_north = 10.0
dp_obj.geospatial_bounds.geospatial_latitude_limit_south = -10.0
dp_obj.geospatial_bounds.geospatial_longitude_limit_east = 10.0
dp_obj.geospatial_bounds.geospatial_longitude_limit_west = -10.0
dp_obj.ooi_product_name = "PRODNAME"
#------------------------------------------------------------------------------------------------
# Create a set of ParameterContext objects to define the parameters in the coverage, add each to the ParameterDictionary
#------------------------------------------------------------------------------------------------
log.info("Create data product... raw stream id: %s", self._raw_stream_id)
dp_id = dpsc_cli.create_data_product_(data_product= dp_obj)
dataset_id = self.create_dataset(self._raw_stream_pdict_id)
RR2.assign_stream_definition_to_data_product_with_has_stream_definition(self._raw_stream_def_id, dp_id)
RR2.assign_stream_to_data_product_with_has_stream(self._raw_stream_id, dp_id)
RR2.assign_dataset_to_data_product_with_has_dataset(dataset_id, dp_id)
self._raw_dataset_id = dataset_id
log.info("Create data product...Complete")
# Assert that the data product has an associated stream at this stage
stream_ids, _ = rrclient.find_objects(dp_id, PRED.hasStream, RT.Stream, True)
self.assertNotEquals(len(stream_ids), 0)
# Assert that the data product has an associated stream def at this stage
stream_ids, _ = rrclient.find_objects(dp_id, PRED.hasStreamDefinition, RT.StreamDefinition, True)
self.assertNotEquals(len(stream_ids), 0)
log.info("Activate data product persistence")
dpsc_cli.activate_data_product_persistence(dp_id)
log.info("Read data product")
dp_obj = dpsc_cli.read_data_product(dp_id)
self.assertIsNotNone(dp_obj)
self.assertEquals(dp_obj.geospatial_point_center.lat, 0.0)
log.debug('Created data product %s', dp_obj)
def create_dataset(self, parameter_dict_id=''):
'''
Creates a time-series dataset
'''
dataset_management = DatasetManagementServiceClient()
if not parameter_dict_id:
parameter_dict_id = dataset_management.read_parameter_dictionary_by_name('ctd_parsed_param_dict', id_only=True)
dataset = Dataset(name='test_dataset_')
dataset_id = dataset_management.create_dataset(dataset, parameter_dictionary_id=parameter_dict_id)
self.addCleanup(dataset_management.delete_dataset, dataset_id)
return dataset_id
def assert_raw_granules_ingested(self, count, payload_size):
#--------------------------------------------------------------------------------
# Test the slicing capabilities
#--------------------------------------------------------------------------------
data_retriever = DataRetrieverServiceClient()
for i in range(0, count-1):
granule = data_retriever.retrieve(dataset_id=self._raw_dataset_id, query={'tdoa':slice(i,i+1)})
rdt = RecordDictionaryTool.load_from_granule(granule)
log.info("Granule index: %d, time: %s, size: %s", i, rdt['time'][0], len(rdt['raw'][0]))
self.assertEqual(payload_size, len(rdt['raw'][0]))
def test_autosample(self):
"""
Start up the test instrument and sample at 1Hz. Verify that we product n-1 packets in the duration specified
and verify the payload is the size we expect.
"""
duration = 100
payload_size = 1024
sample_rate = 1
sample_count = duration / sample_rate
timeout = duration * 5
# Start data subscribers.
self._start_data_subscribers(3, sample_count)
self.addCleanup(self._stop_data_subscribers)
self._start_raw_ingestion()
# Get the raw queue name
raw_queue_name = self._get_raw_queue()
self.assertIsNotNone(raw_queue_name)
# Start queue monitor.
#self._start_queue_size_monitor(raw_queue_name, 3)
#self.addCleanup(self._stop_queue_size_monitor)
self.assert_initialize()
self.assert_set_parameter(PAYLOAD_SIZE, payload_size)
self.assert_start_autosample()
gevent.sleep(duration)
self.assert_stop_autosample()
gevent.sleep(2)
samples_rec = len(self._raw_samples_received)
self.assertLessEqual(sample_count - samples_rec, 1)
self.assert_raw_granules_ingested(len(self._raw_samples_received), payload_size)
self.assert_reset()
|
|
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""A handler that exports various App Engine services over HTTP.
You can export this handler in your app by adding it directly to app.yaml's
list of handlers:
handlers:
- url: /remote_api
script: $PYTHON_LIB/google/appengine/ext/remote_api/handler.py
login: admin
Then, you can use remote_api_stub to remotely access services exported by this
handler. See the documentation in remote_api_stub.py for details on how to do
this.
Using this handler without specifying "login: admin" would be extremely unwise.
So unwise that the default handler insists on checking for itself.
"""
import google
import logging
import os
import pickle
import sha
import sys
import wsgiref.handlers
import yaml
from google.appengine.api import api_base_pb
from google.appengine.api import apiproxy_stub
from google.appengine.api import apiproxy_stub_map
from google.appengine.api import users
from google.appengine.datastore import datastore_pb
from google.appengine.ext import webapp
from google.appengine.ext.remote_api import remote_api_pb
from google.appengine.ext.remote_api import remote_api_services
from google.appengine.runtime import apiproxy_errors
class RemoteDatastoreStub(apiproxy_stub.APIProxyStub):
"""Provides a stub that permits execution of stateful datastore queries.
Some operations aren't possible using the standard interface. Notably,
datastore RunQuery operations internally store a cursor that is referenced in
later Next calls, and cleaned up at the end of each request. Because every
call to ApiCallHandler takes place in its own request, this isn't possible.
To work around this, RemoteDatastoreStub provides its own implementation of
RunQuery that immediately returns the query results.
"""
def __init__(self, service='datastore_v3', _test_stub_map=None):
"""Constructor.
Args:
service: The name of the service
_test_stub_map: An APIProxyStubMap to use for testing purposes.
"""
super(RemoteDatastoreStub, self).__init__(service)
if _test_stub_map:
self.__call = _test_stub_map.MakeSyncCall
else:
self.__call = apiproxy_stub_map.MakeSyncCall
def _Dynamic_RunQuery(self, request, response):
"""Handle a RunQuery request.
We handle RunQuery by executing a Query and a Next and returning the result
of the Next request.
This method is DEPRECATED, but left in place for older clients.
"""
runquery_response = datastore_pb.QueryResult()
self.__call('datastore_v3', 'RunQuery', request, runquery_response)
if runquery_response.result_size() > 0:
response.CopyFrom(runquery_response)
return
next_request = datastore_pb.NextRequest()
next_request.mutable_cursor().CopyFrom(runquery_response.cursor())
next_request.set_count(request.limit())
self.__call('datastore_v3', 'Next', next_request, response)
def _Dynamic_Transaction(self, request, response):
"""Handle a Transaction request.
We handle transactions by accumulating Put requests on the client end, as
well as recording the key and hash of Get requests. When Commit is called,
Transaction is invoked, which verifies that all the entities in the
precondition list still exist and their hashes match, then performs a
transaction of its own to make the updates.
"""
begin_request = datastore_pb.BeginTransactionRequest()
begin_request.set_app(os.environ['APPLICATION_ID'])
tx = datastore_pb.Transaction()
self.__call('datastore_v3', 'BeginTransaction', begin_request, tx)
preconditions = request.precondition_list()
if preconditions:
get_request = datastore_pb.GetRequest()
get_request.mutable_transaction().CopyFrom(tx)
for precondition in preconditions:
key = get_request.add_key()
key.CopyFrom(precondition.key())
get_response = datastore_pb.GetResponse()
self.__call('datastore_v3', 'Get', get_request, get_response)
entities = get_response.entity_list()
assert len(entities) == request.precondition_size()
for precondition, entity in zip(preconditions, entities):
if precondition.has_hash() != entity.has_entity():
raise apiproxy_errors.ApplicationError(
datastore_pb.Error.CONCURRENT_TRANSACTION,
"Transaction precondition failed.")
elif entity.has_entity():
entity_hash = sha.new(entity.entity().Encode()).digest()
if precondition.hash() != entity_hash:
raise apiproxy_errors.ApplicationError(
datastore_pb.Error.CONCURRENT_TRANSACTION,
"Transaction precondition failed.")
if request.has_puts():
put_request = request.puts()
put_request.mutable_transaction().CopyFrom(tx)
self.__call('datastore_v3', 'Put', put_request, response)
if request.has_deletes():
delete_request = request.deletes()
delete_request.mutable_transaction().CopyFrom(tx)
self.__call('datastore_v3', 'Delete', delete_request,
api_base_pb.VoidProto())
self.__call('datastore_v3', 'Commit', tx, api_base_pb.VoidProto())
def _Dynamic_GetIDs(self, request, response):
"""Fetch unique IDs for a set of paths."""
for entity in request.entity_list():
assert entity.property_size() == 0
assert entity.raw_property_size() == 0
assert entity.entity_group().element_size() == 0
lastpart = entity.key().path().element_list()[-1]
assert lastpart.id() == 0 and not lastpart.has_name()
begin_request = datastore_pb.BeginTransactionRequest()
begin_request.set_app(os.environ['APPLICATION_ID'])
tx = datastore_pb.Transaction()
self.__call('datastore_v3', 'BeginTransaction', begin_request, tx)
self.__call('datastore_v3', 'Put', request, response)
self.__call('datastore_v3', 'Rollback', tx, api_base_pb.VoidProto())
SERVICE_PB_MAP = remote_api_services.SERVICE_PB_MAP
class ApiCallHandler(webapp.RequestHandler):
"""A webapp handler that accepts API calls over HTTP and executes them."""
LOCAL_STUBS = {
'remote_datastore': RemoteDatastoreStub('remote_datastore'),
}
def CheckIsAdmin(self):
if not users.is_current_user_admin():
self.response.set_status(401)
self.response.out.write(
"You must be logged in as an administrator to access this.")
self.response.headers['Content-Type'] = 'text/plain'
return False
elif 'X-appcfg-api-version' not in self.request.headers:
self.response.set_status(403)
self.response.out.write("This request did not contain a necessary header")
self.response.headers['Content-Type'] = 'text/plain'
return False
return True
def get(self):
"""Handle a GET. Just show an info page."""
if not self.CheckIsAdmin():
return
rtok = self.request.get('rtok', '0')
app_info = {
'app_id': os.environ['APPLICATION_ID'],
'rtok': rtok
}
self.response.headers['Content-Type'] = 'text/plain'
self.response.out.write(yaml.dump(app_info))
def post(self):
"""Handle POST requests by executing the API call."""
if not self.CheckIsAdmin():
return
self.response.headers['Content-Type'] = 'application/octet-stream'
response = remote_api_pb.Response()
try:
request = remote_api_pb.Request()
request.ParseFromString(self.request.body)
response_data = self.ExecuteRequest(request)
response.mutable_response().set_contents(response_data.Encode())
self.response.set_status(200)
except Exception, e:
logging.exception('Exception while handling %s', request)
self.response.set_status(200)
response.mutable_exception().set_contents(pickle.dumps(e))
if isinstance(e, apiproxy_errors.ApplicationError):
application_error = response.mutable_application_error()
application_error.set_code(e.application_error)
application_error.set_detail(e.error_detail)
self.response.out.write(response.Encode())
def ExecuteRequest(self, request):
"""Executes an API invocation and returns the response object."""
service = request.service_name()
method = request.method()
service_methods = SERVICE_PB_MAP.get(service, {})
request_class, response_class = service_methods.get(method, (None, None))
if not request_class:
raise apiproxy_errors.CallNotFoundError()
request_data = request_class()
request_data.ParseFromString(request.request().contents())
response_data = response_class()
if service in self.LOCAL_STUBS:
self.LOCAL_STUBS[service].MakeSyncCall(service, method, request_data,
response_data)
else:
apiproxy_stub_map.MakeSyncCall(service, method, request_data,
response_data)
return response_data
def InfoPage(self):
"""Renders an information page."""
return """
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN"
"http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
<html><head>
<title>App Engine API endpoint.</title>
</head><body>
<h1>App Engine API endpoint.</h1>
<p>This is an endpoint for the App Engine remote API interface.
Point your stubs (google.appengine.ext.remote_api.remote_api_stub) here.</p>
</body>
</html>"""
def main():
application = webapp.WSGIApplication([('.*', ApiCallHandler)])
wsgiref.handlers.CGIHandler().run(application)
if __name__ == '__main__':
main()
|
|
"""
We have four main abstractions: Users, Collections, Memberships, and Roles.
Users represent people, like students in a school, teachers for a classroom, or volunteers setting up informal
installations. There are two main user types, ``FacilityUser`` and ``DeviceOwner``. A ``FacilityUser`` belongs to a
particular facility, and has permissions only with respect to other data that is associated with that facility. A
``DeviceOwner`` is not associated with a particular facility, and has global permissions for data on the local device.
``FacilityUser`` accounts (like other facility data) may be synced across multiple devices, whereas a DeviceOwner account
is specific to a single installation of Kolibri.
Collections form a hierarchy, with Collections able to belong to other Collections. Collections are subdivided
into several pre-defined levels (``Facility`` > ``Classroom`` > ``LearnerGroup``).
A ``FacilityUser`` (but not a ``DeviceOwner``) can be marked as a member of a ``Collection`` through a ``Membership``
object. Being a member of a Collection also means being a member of all the Collections above that Collection in the
hierarchy.
Another way in which a ``FacilityUser`` can be associated with a particular ``Collection`` is through a ``Role``
object, which grants the user a role with respect to the ``Collection`` and all the collections below it. A ``Role``
object also stores the "kind" of the role (currently, one of "admin" or "coach"), which affects what permissions the
user gains through the ``Role``.
"""
from __future__ import absolute_import, print_function, unicode_literals
from django.contrib.auth.models import AbstractBaseUser, AnonymousUser
from django.core import validators
from django.core.exceptions import ValidationError
from django.db import models
from django.db.models.query import F
from django.db.utils import IntegrityError
from django.utils import timezone
from django.utils.encoding import python_2_unicode_compatible
from django.utils.translation import ugettext_lazy as _
from kolibri.core.errors import KolibriValidationError
from mptt.models import MPTTModel, TreeForeignKey
from six import string_types
from .constants import collection_kinds, role_kinds
from .errors import (
InvalidRoleKind, UserDoesNotHaveRoleError,
UserHasRoleOnlyIndirectlyThroughHierarchyError,
UserIsMemberOnlyIndirectlyThroughHierarchyError, UserIsNotFacilityUser,
UserIsNotMemberError
)
from .filters import HierarchyRelationsFilter
from .permissions.auth import CollectionSpecificRoleBasedPermissions
from .permissions.base import BasePermissions, RoleBasedPermissions
from .permissions.general import (
IsAdminForOwnFacility, IsFromSameFacility, IsOwn, IsSelf
)
def _has_permissions_class(obj):
return hasattr(obj, "permissions") and isinstance(obj.permissions, BasePermissions)
@python_2_unicode_compatible
class FacilityDataset(models.Model):
"""
``FacilityDataset`` stores high-level metadata and settings for a particular ``Facility``. It is also the
model that all models storing facility data (data that is associated with a particular facility, and that inherits
from ``AbstractFacilityDataModel``) foreign key onto, to indicate that they belong to this particular ``Facility``.
"""
description = models.TextField(blank=True)
location = models.CharField(max_length=200, blank=True)
allow_signups = models.BooleanField(default=True)
def __str__(self):
facilities = self.collection_set.filter(kind=collection_kinds.FACILITY)
if facilities:
return "FacilityDataset for {}".format(Facility.objects.get(id=facilities[0].id))
else:
return "FacilityDataset (no associated Facility)"
class AbstractFacilityDataModel(models.Model):
"""
Base model for Kolibri "Facility Data", which is data that is specific to a particular ``Facility``,
such as ``FacilityUsers``, ``Collections``, and other data associated with those users and collections.
"""
dataset = models.ForeignKey("FacilityDataset")
class Meta:
abstract = True
def clean_fields(self, *args, **kwargs):
# ensure that we have, or can infer, a dataset for the model instance
self.ensure_dataset()
super(AbstractFacilityDataModel, self).clean_fields(*args, **kwargs)
def save(self, *args, **kwargs):
# before saving, ensure we have a dataset, and convert any validation errors into integrity errors,
# since by this point the `clean_fields` method should already have prevented this situation from arising
try:
self.ensure_dataset()
except KolibriValidationError as e:
raise IntegrityError(str(e))
super(AbstractFacilityDataModel, self).save(*args, **kwargs)
def ensure_dataset(self):
"""
If no dataset has yet been specified, try to infer it. If a dataset has already been specified, to prevent
inconsistencies, make sure it matches the inferred dataset, otherwise raise a ``KolibriValidationError``.
If we have no dataset and it can't be inferred, we raise a ``KolibriValidationError`` exception as well.
"""
inferred_dataset = self.infer_dataset()
if self.dataset_id:
# make sure currently stored dataset matches inferred dataset, if any
if inferred_dataset and inferred_dataset != self.dataset:
raise KolibriValidationError("This model is not associated with the correct FacilityDataset.")
else:
# use the inferred dataset, if there is one, otherwise throw an error
if inferred_dataset:
self.dataset = inferred_dataset
else:
raise KolibriValidationError("FacilityDataset ('dataset') not provided, and could not be inferred.")
def infer_dataset(self):
"""
This method is used by `ensure_dataset` to "infer" which dataset should be associated with this instance.
It should be overridden in any subclass of ``AbstractFacilityDataModel``, to define a model-specific inference.
"""
raise NotImplementedError("Subclasses of AbstractFacilityDataModel must override the `infer_dataset` method.")
class KolibriAbstractBaseUser(AbstractBaseUser):
"""
Our custom user type, derived from ``AbstractBaseUser`` as described in the Django docs.
Draws liberally from ``django.contrib.auth.AbstractUser``, except we exclude some fields
we don't care about, like email.
This model is an abstract model, and is inherited by both ``FacilityUser`` and ``DeviceOwner``.
"""
class Meta:
abstract = True
USERNAME_FIELD = "username"
username = models.CharField(
_('username'),
max_length=30,
help_text=_('Required. 30 characters or fewer. Letters and digits only.'),
validators=[
validators.RegexValidator(
r'^\w+$',
_('Enter a valid username. This value may contain only letters and numbers.')
),
],
)
first_name = models.CharField(_('first name'), max_length=60, blank=True)
last_name = models.CharField(_('last name'), max_length=60, blank=True)
date_joined = models.DateTimeField(_('date joined'), default=timezone.now, editable=False)
def get_full_name(self):
return (self.first_name + " " + self.last_name).strip()
def get_short_name(self):
return self.first_name
def is_member_of(self, coll):
"""
Determine whether this user is a member of the specified ``Collection``.
:param coll: The ``Collection`` for which we are checking this user's membership.
:return: ``True`` if this user is a member of the specified ``Collection``, otherwise False.
:rtype: bool
"""
raise NotImplementedError("Subclasses of KolibriAbstractBaseUser must override the `is_member_of` method.")
def get_roles_for_user(self, user):
"""
Determine all the roles this user has in relation to the target user, and return a set containing the kinds of roles.
:param user: The target user for which this user has the roles.
:return: The kinds of roles this user has with respect to the target user.
:rtype: set of ``kolibri.auth.constants.role_kinds.*`` strings
"""
raise NotImplementedError("Subclasses of KolibriAbstractBaseUser must override the `get_roles_for_user` method.")
def get_roles_for_collection(self, coll):
"""
Determine all the roles this user has in relation to the specified ``Collection``, and return a set containing the kinds of roles.
:param coll: The target ``Collection`` for which this user has the roles.
:return: The kinds of roles this user has with respect to the specified ``Collection``.
:rtype: set of ``kolibri.auth.constants.role_kinds.*`` strings
"""
raise NotImplementedError("Subclasses of KolibriAbstractBaseUser must override the `get_roles_for_collection` method.")
def has_role_for_user(self, kinds, user):
"""
Determine whether this user has (at least one of) the specified role kind(s) in relation to the specified user.
:param user: The user that is the target of the role (for which this user has the roles).
:param kinds: The kind (or kinds) of role to check for, as a string or iterable.
:type kinds: string from ``kolibri.auth.constants.role_kinds.*``
:return: ``True`` if this user has the specified role kind with respect to the target user, otherwise ``False``.
:rtype: bool
"""
raise NotImplementedError("Subclasses of KolibriAbstractBaseUser must override the `has_role_for_user` method.")
def has_role_for_collection(self, kinds, coll):
"""
Determine whether this user has (at least one of) the specified role kind(s) in relation to the specified ``Collection``.
:param kinds: The kind (or kinds) of role to check for, as a string or iterable.
:type kinds: string from kolibri.auth.constants.role_kinds.*
:param coll: The target ``Collection`` for which this user has the roles.
:return: ``True`` if this user has the specified role kind with respect to the target ``Collection``, otherwise ``False``.
:rtype: bool
"""
raise NotImplementedError("Subclasses of KolibriAbstractBaseUser must override the `has_role_for_collection` method.")
def can_create_instance(self, obj):
"""
Checks whether this user (self) has permission to create a particular model instance (obj).
This method should be overridden by classes that inherit from ``KolibriAbstractBaseUser``.
In general, unless an instance has already been initialized, this method should not be called directly;
instead, it should be preferred to call ``can_create``.
:param obj: An (unsaved) instance of a Django model, to check permissions for.
:return: ``True`` if this user should have permission to create the object, otherwise ``False``.
:rtype: bool
"""
raise NotImplementedError("Subclasses of KolibriAbstractBaseUser must override the `can_create_instance` method.")
def can_create(self, Model, data):
"""
Checks whether this user (self) has permission to create an instance of Model with the specified attributes (data).
This method defers to the ``can_create_instance`` method, and in most cases should not itself be overridden.
:param Model: A subclass of ``django.db.models.Model``
:param data: A ``dict`` of data to be used in creating an instance of the Model
:return: ``True`` if this user should have permission to create an instance of Model with the specified data, else ``False``.
:rtype: bool
"""
try:
instance = Model(**data)
instance.full_clean()
except TypeError:
return False # if the data provided does not fit the Model, don't continue checking
except ValidationError:
return False # if the data does not validate, don't continue checking
# now that we have an instance, defer to the permission-checking method that works with instances
return self.can_create_instance(instance)
def can_read(self, obj):
"""
Checks whether this user (self) has permission to read a particular model instance (obj).
This method should be overridden by classes that inherit from ``KolibriAbstractBaseUser``.
:param obj: An instance of a Django model, to check permissions for.
:return: ``True`` if this user should have permission to read the object, otherwise ``False``.
:rtype: bool
"""
raise NotImplementedError("Subclasses of KolibriAbstractBaseUser must override the `can_read` method.")
def can_update(self, obj):
"""
Checks whether this user (self) has permission to update a particular model instance (obj).
This method should be overridden by classes that inherit from KolibriAbstractBaseUser.
:param obj: An instance of a Django model, to check permissions for.
:return: ``True`` if this user should have permission to update the object, otherwise ``False``.
:rtype: bool
"""
raise NotImplementedError("Subclasses of KolibriAbstractBaseUser must override the `can_update` method.")
def can_delete(self, obj):
"""
Checks whether this user (self) has permission to delete a particular model instance (obj).
This method should be overridden by classes that inherit from KolibriAbstractBaseUser.
:param obj: An instance of a Django model, to check permissions for.
:return: ``True`` if this user should have permission to delete the object, otherwise ``False``.
:rtype: bool
"""
raise NotImplementedError("Subclasses of KolibriAbstractBaseUser must override the `can_delete` method.")
def get_roles_for(self, obj):
"""
Helper function that defers to ``get_roles_for_user`` or ``get_roles_for_collection`` based on the type of object passed in.
"""
if isinstance(obj, KolibriAbstractBaseUser):
return self.get_roles_for_user(obj)
elif isinstance(obj, Collection):
return self.get_roles_for_collection(obj)
else:
raise ValueError("The `obj` argument to `get_roles_for` must be either an instance of KolibriAbstractBaseUser or Collection.")
def has_role_for(self, kinds, obj):
"""
Helper function that defers to ``has_role_for_user`` or ``has_role_for_collection`` based on the type of object passed in.
"""
if isinstance(obj, KolibriAbstractBaseUser):
return self.has_role_for_user(kinds, obj)
elif isinstance(obj, Collection):
return self.has_role_for_collection(kinds, obj)
else:
raise ValueError("The `obj` argument to `has_role_for` must be either an instance of KolibriAbstractBaseUser or Collection.")
def filter_readable(self, queryset):
"""
Filters a queryset down to only the elements that this user should have permission to read.
:param queryset: A ``QuerySet`` instance that the filtering should be applied to.
:return: Filtered ``QuerySet`` including only elements that are readable by this user.
"""
raise NotImplementedError("Subclasses of KolibriAbstractBaseUser must override the `can_delete` method.")
class KolibriAnonymousUser(AnonymousUser, KolibriAbstractBaseUser):
"""
Custom anonymous user that also exposes the same interface as KolibriAbstractBaseUser, for consistency.
"""
class Meta:
abstract = True
def is_member_of(self, coll):
return False
def get_roles_for_user(self, user):
return set([])
def get_roles_for_collection(self, coll):
return set([])
def has_role_for_user(self, kinds, user):
return False
def has_role_for_collection(self, kinds, coll):
return False
def can_create_instance(self, obj):
# check the object permissions, if available, just in case permissions are granted to anon users
if _has_permissions_class(obj):
return obj.permissions.user_can_create_object(self, obj)
else:
return False
def can_read(self, obj):
# check the object permissions, if available, just in case permissions are granted to anon users
if _has_permissions_class(obj):
return obj.permissions.user_can_read_object(self, obj)
else:
return False
def can_update(self, obj):
# check the object permissions, if available, just in case permissions are granted to anon users
if _has_permissions_class(obj):
return obj.permissions.user_can_update_object(self, obj)
else:
return False
def can_delete(self, obj):
# check the object permissions, if available, just in case permissions are granted to anon users
if _has_permissions_class(obj):
return obj.permissions.user_can_delete_object(self, obj)
else:
return False
def filter_readable(self, queryset):
# check the object permissions, if available, just in case permissions are granted to anon users
if _has_permissions_class(queryset.model):
return queryset.model.permissions.readable_by_user_filter(self, queryset).distinct()
else:
return queryset.none()
@python_2_unicode_compatible
class FacilityUser(KolibriAbstractBaseUser, AbstractFacilityDataModel):
"""
``FacilityUser`` is the fundamental object of the auth app. These users represent the main users, and can be associated
with a hierarchy of ``Collections`` through ``Memberships`` and ``Roles``, which then serve to help determine permissions.
"""
permissions = (
IsSelf() | # FacilityUser can be read and written by itself
IsAdminForOwnFacility() | # FacilityUser can be read and written by a facility admin
RoleBasedPermissions( # FacilityUser can be read by admin or coach, and updated by admin, but not created/deleted by non-facility admin
target_field=".",
can_be_created_by=(), # we can't check creation permissions by role, as user doesn't exist yet
can_be_read_by=(role_kinds.ADMIN, role_kinds.COACH),
can_be_updated_by=(role_kinds.ADMIN,),
can_be_deleted_by=(), # don't want a classroom admin deleting a user completely, just removing them from the class
)
)
facility = models.ForeignKey("Facility")
# FacilityUsers can't access the Django admin interface
is_staff = False
is_superuser = False
class Meta:
unique_together = (("username", "facility"),)
def infer_dataset(self):
return self.facility.dataset
def is_member_of(self, coll):
if self.dataset_id != coll.dataset_id:
return False
if coll.kind == collection_kinds.FACILITY:
return True # FacilityUser is always a member of her own facility
return HierarchyRelationsFilter(FacilityUser.objects.all()).filter_by_hierarchy(
target_user=F("id"),
ancestor_collection=coll.id,
).filter(id=self.id).exists()
def get_roles_for_user(self, user):
if not hasattr(user, "dataset_id") or self.dataset_id != user.dataset_id:
return set([])
role_instances = HierarchyRelationsFilter(Role).filter_by_hierarchy(
ancestor_collection=F("collection"),
source_user=F("user"),
target_user=user,
).filter(user=self)
return set([instance["kind"] for instance in role_instances.values("kind").distinct()])
def get_roles_for_collection(self, coll):
if self.dataset_id != coll.dataset_id:
return set([])
role_instances = HierarchyRelationsFilter(Role).filter_by_hierarchy(
ancestor_collection=F("collection"),
source_user=F("user"),
descendant_collection=coll,
).filter(user=self)
return set([instance["kind"] for instance in role_instances.values("kind").distinct()])
def has_role_for_user(self, kinds, user):
if not kinds:
return False
if not hasattr(user, "dataset_id") or self.dataset_id != user.dataset_id:
return False
return HierarchyRelationsFilter(Role).filter_by_hierarchy(
ancestor_collection=F("collection"),
source_user=F("user"),
role_kind=kinds,
target_user=user,
).filter(user=self).exists()
def has_role_for_collection(self, kinds, coll):
if not kinds:
return False
if self.dataset_id != coll.dataset_id:
return False
return HierarchyRelationsFilter(Role).filter_by_hierarchy(
ancestor_collection=F("collection"),
source_user=F("user"),
role_kind=kinds,
descendant_collection=coll,
).filter(user=self).exists()
def can_create_instance(self, obj):
# a FacilityUser's permissions are determined through the object's permission class
if _has_permissions_class(obj):
return obj.permissions.user_can_create_object(self, obj)
else:
return False
def can_read(self, obj):
# a FacilityUser's permissions are determined through the object's permission class
if _has_permissions_class(obj):
return obj.permissions.user_can_read_object(self, obj)
else:
return False
def can_update(self, obj):
# a FacilityUser's permissions are determined through the object's permission class
if _has_permissions_class(obj):
return obj.permissions.user_can_update_object(self, obj)
else:
return False
def can_delete(self, obj):
# a FacilityUser's permissions are determined through the object's permission class
if _has_permissions_class(obj):
return obj.permissions.user_can_delete_object(self, obj)
else:
return False
def filter_readable(self, queryset):
if _has_permissions_class(queryset.model):
return queryset.model.permissions.readable_by_user_filter(self, queryset).distinct()
else:
return queryset.none()
def __str__(self):
return '"{user}"@"{facility}"'.format(user=self.get_full_name() or self.username, facility=self.facility)
class DeviceOwnerManager(models.Manager):
def create_superuser(self, username, password, **extra_fields):
if not username:
raise ValueError('The given username must be set')
user = DeviceOwner(username=username)
user.set_password(password)
user.save()
return user
@python_2_unicode_compatible
class DeviceOwner(KolibriAbstractBaseUser):
"""
When a user first installs Kolibri on a device, they will be prompted to create a ``DeviceOwner``, a special kind of
user which is associated with that device only, and who must give permission to make broad changes to the Kolibri
installation on that device (such as creating a ``Facility``, or changing configuration settings).
Actions not relating to user data but specifically to a device -- like upgrading Kolibri, changing whether the
device is a Classroom Server or Classroom Client, or determining manually which data should be synced -- must be
performed by a ``DeviceOwner``.
A ``DeviceOwner`` is a superuser, and has full access to do anything she wants with data on the device.
"""
objects = DeviceOwnerManager()
# DeviceOwners can access the Django admin interface
is_staff = True
is_superuser = True
def is_member_of(self, coll):
return False # a DeviceOwner is not a member of any Collection
def get_roles_for_user(self, user):
return set([role_kinds.ADMIN]) # a DeviceOwner has admin role for all users on the device
def get_roles_for_collection(self, coll):
return set([role_kinds.ADMIN]) # a DeviceOwner has admin role for all collections on the device
def has_role_for_user(self, kinds, user):
if isinstance(kinds, string_types):
kinds = [kinds]
return role_kinds.ADMIN in kinds # a DeviceOwner has admin role for all users on the device
def has_role_for_collection(self, kinds, coll):
if isinstance(kinds, string_types):
kinds = [kinds]
return role_kinds.ADMIN in kinds # a DeviceOwner has admin role for all collections on the device
def can_create_instance(self, obj):
# DeviceOwners are superusers, and can do anything
return True
def can_read(self, obj):
# DeviceOwners are superusers, and can do anything
return True
def can_update(self, obj):
# DeviceOwners are superusers, and can do anything
return True
def can_delete(self, obj):
# DeviceOwners are superusers, and can do anything
return True
def filter_readable(self, queryset):
return queryset
def __str__(self):
return self.get_full_name() or self.username
def has_perm(self, perm, obj=None):
# ensure the DeviceOwner has full access to the Django admin
return True
def has_perms(self, perm_list, obj=None):
# ensure the DeviceOwner has full access to the Django admin
return True
def has_module_perms(self, app_label):
# ensure the DeviceOwner has full access to the Django admin
return True
@python_2_unicode_compatible
class Collection(MPTTModel, AbstractFacilityDataModel):
"""
``Collections`` are hierarchical groups of ``FacilityUsers``, used for grouping users and making decisions about permissions.
``FacilityUsers`` can have roles for one or more ``Collections``, by way of obtaining ``Roles`` associated with those ``Collections``.
``Collections`` can belong to other ``Collections``, and user membership in a ``Collection`` is conferred through ``Memberships``.
``Collections`` are subdivided into several pre-defined levels.
"""
# Collection can be read by anybody from the facility; writing is only allowed by an admin for the collection.
# Furthermore, no FacilityUser can create or delete a Facility. Permission to create a collection is governed
# by roles in relation to the new collection's parent collection (see CollectionSpecificRoleBasedPermissions).
permissions = IsFromSameFacility(read_only=True) | CollectionSpecificRoleBasedPermissions()
_KIND = None # Should be overridden in subclasses to specify what "kind" they are
name = models.CharField(max_length=100)
parent = TreeForeignKey('self', null=True, blank=True, related_name='children', db_index=True)
kind = models.CharField(max_length=20, choices=collection_kinds.choices)
def clean_fields(self, *args, **kwargs):
self._ensure_kind()
super(Collection, self).clean_fields(*args, **kwargs)
def save(self, *args, **kwargs):
self._ensure_kind()
super(Collection, self).save(*args, **kwargs)
def _ensure_kind(self):
"""
Make sure the "kind" is set correctly on the model, corresponding to the appropriate subclass of ``Collection``.
"""
if self._KIND:
self.kind = self._KIND
def get_members(self):
if self.kind == collection_kinds.FACILITY:
return FacilityUser.objects.filter(dataset=self.dataset) # FacilityUser is always a member of her own facility
return HierarchyRelationsFilter(FacilityUser).filter_by_hierarchy(
target_user=F("id"),
ancestor_collection=self,
)
def add_role(self, user, role_kind):
"""
Create a ``Role`` associating the provided user with this collection, with the specified kind of role.
If the Role object already exists, just return that, without changing anything.
:param user: The ``FacilityUser`` to associate with this ``Collection``.
:param role_kind: The kind of role to give the user with respect to this ``Collection``.
:return: The ``Role`` object (possibly new) that associates the user with the ``Collection``.
"""
# ensure the specified role kind is valid
if role_kind not in (kind[0] for kind in role_kinds.choices):
raise InvalidRoleKind("'{role_kind}' is not a valid role kind.".format(role_kind=role_kind))
# ensure the provided user is a FacilityUser
if not isinstance(user, FacilityUser):
raise UserIsNotFacilityUser("You can only add roles for FacilityUsers.")
# create the necessary role, if it doesn't already exist
role, created = Role.objects.get_or_create(user=user, collection=self, kind=role_kind)
return role
def remove_role(self, user, role_kind):
"""
Remove any ``Role`` objects associating the provided user with this ``Collection``, with the specified kind of role.
:param user: The ``FacilityUser`` to dissociate from this ``Collection`` (for the specific role kind).
:param role_kind: The kind of role to remove from the user with respect to this ``Collection``.
"""
# ensure the specified role kind is valid
if role_kind not in (kind[0] for kind in role_kinds.choices):
raise InvalidRoleKind("'{role_kind}' is not a valid role kind.".format(role_kind=role_kind))
# ensure the provided user is a FacilityUser
if not isinstance(user, FacilityUser):
raise UserIsNotFacilityUser("You can only remove roles for FacilityUsers.")
# make sure the user has the role to begin with
if not user.has_role_for_collection(role_kind, self):
raise UserDoesNotHaveRoleError("User does not have this role for this collection.")
# delete the appropriate role, if it exists
results = Role.objects.filter(user=user, collection=self, kind=role_kind).delete()
# if no Roles were deleted, the user's role must have been indirect (via the collection hierarchy)
if results[0] == 0:
raise UserHasRoleOnlyIndirectlyThroughHierarchyError(
"Role cannot be removed, as user has it only indirectly, through the collection hierarchy.")
def add_member(self, user):
"""
Create a ``Membership`` associating the provided user with this ``Collection``.
If the ``Membership`` object already exists, just return that, without changing anything.
:param user: The ``FacilityUser`` to add to this ``Collection``.
:return: The ``Membership`` object (possibly new) that associates the user with the ``Collection``.
"""
# ensure the provided user is a FacilityUser
if not isinstance(user, FacilityUser):
raise UserIsNotFacilityUser("You can only add memberships for FacilityUsers.")
# create the necessary membership, if it doesn't already exist
membership, created = Membership.objects.get_or_create(user=user, collection=self)
return membership
def remove_member(self, user):
"""
Remove any ``Membership`` objects associating the provided user with this ``Collection``.
:param user: The ``FacilityUser`` to remove from this ``Collection``.
:return: ``True`` if a ``Membership`` was removed, ``False`` if there was no matching ``Membership`` to remove.
"""
# ensure the provided user is a FacilityUser
if not isinstance(user, FacilityUser):
raise UserIsNotFacilityUser("You can only remove memberships for FacilityUsers.")
if not user.is_member_of(self):
raise UserIsNotMemberError("The user is not a member of the collection, and cannot be removed.")
# delete the appropriate membership, if it exists
results = Membership.objects.filter(user=user, collection=self).delete()
# if no Memberships were deleted, the user's membership must have been indirect (via the collection hierarchy)
if results[0] == 0:
raise UserIsMemberOnlyIndirectlyThroughHierarchyError(
"Membership cannot be removed, as user is a member only indirectly, through the collection hierarchy.")
def infer_dataset(self):
if self.parent:
# subcollections inherit dataset from root of their tree
# (we can't call `get_root` directly on self, as it won't work if self hasn't yet been saved)
return self.parent.get_root().dataset
else:
return None # the root node (i.e. Facility) must be explicitly tied to a dataset
def __str__(self):
return '"{name}" ({kind})'.format(name=self.name, kind=self.kind)
@python_2_unicode_compatible
class Membership(AbstractFacilityDataModel):
"""
A ``FacilityUser`` can be marked as a member of a ``Collection`` through a ``Membership`` object. Being a member of a
``Collection`` also means being a member of all the ``Collections`` above that ``Collection`` in the tree (i.e. if you
are a member of a ``LearnerGroup``, you are also a member of the ``Classroom`` that contains that ``LearnerGroup``,
and of the ``Facility`` that contains that ``Classroom``).
"""
permissions = (
IsOwn(read_only=True) | # users can read their own Memberships
RoleBasedPermissions( # Memberships can be read and written by admins, and read by coaches, for the member user
target_field="user",
can_be_created_by=(role_kinds.ADMIN,),
can_be_read_by=(role_kinds.ADMIN, role_kinds.COACH),
can_be_updated_by=(), # Membership objects shouldn't be updated; they should be deleted and recreated as needed
can_be_deleted_by=(role_kinds.ADMIN,),
)
)
user = models.ForeignKey('FacilityUser', blank=False, null=False)
# Note: "It's recommended you use mptt.fields.TreeForeignKey wherever you have a foreign key to an MPTT model.
# https://django-mptt.github.io/django-mptt/models.html#treeforeignkey-treeonetoonefield-treemanytomanyfield
collection = TreeForeignKey("Collection")
class Meta:
unique_together = (("user", "collection"),)
def infer_dataset(self):
user_dataset = self.user.dataset
collection_dataset = self.collection.dataset
if user_dataset != collection_dataset:
raise KolibriValidationError("Collection and user for a Membership object must be in same dataset.")
return user_dataset
def __str__(self):
return "{user}'s membership in {collection}".format(user=self.user, collection=self.collection)
@python_2_unicode_compatible
class Role(AbstractFacilityDataModel):
"""
A ``FacilityUser`` can have a role for a particular ``Collection`` through a ``Role`` object, which also stores
the "kind" of the ``Role`` (currently, one of "admin" or "coach"). Having a role for a ``Collection`` also
implies having that role for all sub-collections of that ``Collection`` (i.e. all the ``Collections`` below it
in the tree).
"""
permissions = (
IsOwn(read_only=True) | # users can read their own Roles
RoleBasedPermissions( # Memberships can be read and written by admins, and read by coaches, for the role collection
target_field="collection",
can_be_created_by=(role_kinds.ADMIN,),
can_be_read_by=(role_kinds.ADMIN, role_kinds.COACH),
can_be_updated_by=(), # Role objects shouldn't be updated; they should be deleted and recreated as needed
can_be_deleted_by=(role_kinds.ADMIN,),
)
)
user = models.ForeignKey('FacilityUser', blank=False, null=False)
# Note: "It's recommended you use mptt.fields.TreeForeignKey wherever you have a foreign key to an MPTT model.
# https://django-mptt.github.io/django-mptt/models.html#treeforeignkey-treeonetoonefield-treemanytomanyfield
collection = TreeForeignKey("Collection")
kind = models.CharField(max_length=20, choices=role_kinds.choices)
class Meta:
unique_together = (("user", "collection", "kind"),)
def infer_dataset(self):
user_dataset = self.user.dataset
collection_dataset = self.collection.dataset
if user_dataset != collection_dataset:
raise KolibriValidationError("The collection and user for a Role object must be in the same dataset.")
return user_dataset
def __str__(self):
return "{user}'s {kind} role for {collection}".format(user=self.user, kind=self.kind, collection=self.collection)
class CollectionProxyManager(models.Manager):
def get_queryset(self):
return super(CollectionProxyManager, self).get_queryset().filter(kind=self.model._KIND)
@python_2_unicode_compatible
class Facility(Collection):
_KIND = collection_kinds.FACILITY
objects = CollectionProxyManager()
class Meta:
proxy = True
def save(self, *args, **kwargs):
if self.parent:
raise IntegrityError("Facility must be the root of a collection tree, and cannot have a parent.")
super(Facility, self).save(*args, **kwargs)
def infer_dataset(self):
# if we don't yet have a dataset, create a new one for this facility
if not self.dataset_id:
self.dataset = FacilityDataset.objects.create()
return self.dataset
def get_classrooms(self):
"""
Returns a QuerySet of Classrooms under this Facility.
:return: A Classroom QuerySet.
"""
return Classroom.objects.filter(parent=self)
def add_admin(self, user):
return self.add_role(user, role_kinds.ADMIN)
def add_admins(self, users):
return [self.add_admin(user) for user in users]
def remove_admin(self, user):
self.remove_role(user, role_kinds.ADMIN)
def add_coach(self, user):
return self.add_role(user, role_kinds.COACH)
def add_coaches(self, users):
return [self.add_coach(user) for user in users]
def remove_coach(self, user):
self.remove_role(user, role_kinds.COACH)
def __str__(self):
return self.name
@python_2_unicode_compatible
class Classroom(Collection):
_KIND = collection_kinds.CLASSROOM
objects = CollectionProxyManager()
class Meta:
proxy = True
def save(self, *args, **kwargs):
if not self.parent:
raise IntegrityError("Classroom cannot be the root of a collection tree, and must have a parent.")
super(Classroom, self).save(*args, **kwargs)
def get_facility(self):
"""
Gets the ``Classroom``'s parent ``Facility``.
:return: A ``Facility`` instance.
"""
return Facility.objects.get(id=self.parent_id)
def get_learner_groups(self):
"""
Returns a ``QuerySet`` of ``LearnerGroups`` associated with this ``Classroom``.
:return: A ``LearnerGroup`` ``QuerySet``.
"""
return LearnerGroup.objects.filter(parent=self)
def add_admin(self, user):
return self.add_role(user, role_kinds.ADMIN)
def add_admins(self, users):
return [self.add_admin(user) for user in users]
def remove_admin(self, user):
self.remove_role(user, role_kinds.ADMIN)
def add_coach(self, user):
return self.add_role(user, role_kinds.COACH)
def add_coaches(self, users):
return [self.add_coach(user) for user in users]
def remove_coach(self, user):
self.remove_role(user, role_kinds.COACH)
def __str__(self):
return self.name
@python_2_unicode_compatible
class LearnerGroup(Collection):
_KIND = collection_kinds.LEARNERGROUP
objects = CollectionProxyManager()
class Meta:
proxy = True
def save(self, *args, **kwargs):
if not self.parent:
raise IntegrityError("LearnerGroup cannot be the root of a collection tree, and must have a parent.")
super(LearnerGroup, self).save(*args, **kwargs)
def get_classroom(self):
"""
Gets the ``LearnerGroup``'s parent ``Classroom``.
:return: A ``Classroom`` instance.
"""
return Classroom.objects.get(id=self.parent_id)
def add_learner(self, user):
return self.add_member(user)
def add_learners(self, users):
return [self.add_learner(user) for user in users]
def remove_learner(self, user):
return self.remove_member(user)
def __str__(self):
return self.name
|
|
# -*- coding: utf-8 -*-
"""
A real simple app for using webapp2 with auth and session.
It just covers the basics. Creating a user, login, logout
and a decorator for protecting certain handlers.
Routes are setup in routes.py and added in main.py
"""
# standard library imports
import logging
import json
# related third party imports
import webapp2
import httpagentparser
from webapp2_extras import security
from webapp2_extras.auth import InvalidAuthIdError, InvalidPasswordError
from webapp2_extras.i18n import gettext as _
from webapp2_extras.appengine.auth.models import Unique
from google.appengine.api import taskqueue
from google.appengine.api import users
from google.appengine.api.datastore_errors import BadValueError
from google.appengine.runtime import apiproxy_errors
from github import github
from linkedin import linkedin
from google.appengine.ext import ndb
from google.appengine.api import mail
# local application/library specific imports
import models
import forms as forms
from lib import utils, captcha, twitter
from lib.basehandler import BaseHandler
from lib.decorators import user_required
from lib.decorators import taskqueue_method
from lib import facebook
class LoginRequiredHandler(BaseHandler):
def get(self):
continue_url, = self.request.get('continue', allow_multiple=True)
self.redirect(users.create_login_url(dest_url=continue_url))
class RegisterBaseHandler(BaseHandler):
"""
Base class for handlers with registration and login forms.
"""
@webapp2.cached_property
def form(self):
return forms.RegisterForm(self)
class SendEmailHandler(BaseHandler):
"""
Core Handler for sending Emails
Use with TaskQueue
"""
@taskqueue_method
def post(self):
from google.appengine.api import mail, app_identity
to = self.request.get("to")
subject = self.request.get("subject")
body = self.request.get("body")
sender = self.request.get("sender")
if sender != '' or not utils.is_email_valid(sender):
if utils.is_email_valid(self.app.config.get('contact_sender')):
sender = self.app.config.get('contact_sender')
else:
app_id = app_identity.get_application_id()
sender = "%s <no-reply@%s.appspotmail.com>" % (app_id, app_id)
if self.app.config['log_email']:
try:
logEmail = models.LogEmail(
sender=sender,
to=to,
subject=subject,
body=body,
when=utils.get_date_time("datetimeProperty")
)
logEmail.put()
except (apiproxy_errors.OverQuotaError, BadValueError):
logging.error("Error saving Email Log in datastore")
try:
message = mail.EmailMessage()
message.sender = sender
message.to = to
message.subject = subject
message.html = body
message.send()
except Exception, e:
logging.error("Error sending email: %s" % e)
class LoginHandler(BaseHandler):
"""
Handler for authentication
"""
def get(self):
""" Returns a simple HTML form for login """
if self.user:
self.redirect_to('home')
params = {}
return self.render_template('login.html', **params)
def post(self):
"""
username: Get the username from POST dict
password: Get the password from POST dict
"""
if not self.form.validate():
return self.get()
username = self.form.username.data.lower()
continue_url = self.request.get('continue_url').encode('ascii', 'ignore')
try:
if utils.is_email_valid(username):
user = models.User.get_by_email(username)
if user:
auth_id = user.auth_ids[0]
else:
raise InvalidAuthIdError
else:
auth_id = "own:%s" % username
user = models.User.get_by_auth_id(auth_id)
password = self.form.password.data.strip()
remember_me = True if str(self.request.POST.get('remember_me')) == 'on' else False
# Password to SHA512
password = utils.hashing(password, self.app.config.get('salt'))
# Try to login user with password
# Raises InvalidAuthIdError if user is not found
# Raises InvalidPasswordError if provided password
# doesn't match with specified user
self.auth.get_user_by_password(
auth_id, password, remember=remember_me)
# if user account is not activated, logout and redirect to home
if (user.activated == False):
# logout
self.auth.unset_session()
# redirect to home with error message
resend_email_uri = self.uri_for('resend-account-activation', user_id=user.get_id(),
token=models.User.create_resend_token(user.get_id()))
message = _('Your account has not yet been activated. Please check your email to activate it or') + \
' <a href="' + resend_email_uri + '">' + _('click here') + '</a> ' + _('to resend the email.')
self.add_message(message, 'error')
return self.redirect_to('home')
# check twitter association in session
twitter_helper = twitter.TwitterAuth(self)
twitter_association_data = twitter_helper.get_association_data()
if twitter_association_data is not None:
if models.SocialUser.check_unique(user.key, 'twitter', str(twitter_association_data['id'])):
social_user = models.SocialUser(
user=user.key,
provider='twitter',
uid=str(twitter_association_data['id']),
extra_data=twitter_association_data
)
social_user.put()
# check facebook association
fb_data = None
try:
fb_data = json.loads(self.session['facebook'])
except:
pass
if fb_data is not None:
if models.SocialUser.check_unique(user.key, 'facebook', str(fb_data['id'])):
social_user = models.SocialUser(
user=user.key,
provider='facebook',
uid=str(fb_data['id']),
extra_data=fb_data
)
social_user.put()
# check linkedin association
li_data = None
try:
li_data = json.loads(self.session['linkedin'])
except:
pass
if li_data is not None:
if models.SocialUser.check_unique(user.key, 'linkedin', str(li_data['id'])):
social_user = models.SocialUser(
user=user.key,
provider='linkedin',
uid=str(li_data['id']),
extra_data=li_data
)
social_user.put()
# end linkedin
if self.app.config['log_visit']:
try:
logVisit = models.LogVisit(
user=user.key,
uastring=self.request.user_agent,
ip=self.request.remote_addr,
timestamp=utils.get_date_time()
)
logVisit.put()
except (apiproxy_errors.OverQuotaError, BadValueError):
logging.error("Error saving Visit Log in datastore")
if continue_url:
self.redirect(continue_url)
else:
self.redirect_to('home')
except (InvalidAuthIdError, InvalidPasswordError), e:
# Returns error message to self.response.write in
# the BaseHandler.dispatcher
message = _("Your username or password is incorrect. "
"Please try again (make sure your caps lock is off)")
self.add_message(message, 'error')
self.redirect_to('login', continue_url=continue_url) if continue_url else self.redirect_to('login')
@webapp2.cached_property
def form(self):
return forms.LoginForm(self)
class SocialLoginHandler(BaseHandler):
"""
Handler for Social authentication
"""
def get(self, provider_name):
provider = self.provider_info[provider_name]
if not self.app.config.get('enable_federated_login'):
message = _('Federated login is disabled.')
self.add_message(message, 'warning')
return self.redirect_to('login')
callback_url = "%s/social_login/%s/complete" % (self.request.host_url, provider_name)
if provider_name == "twitter":
twitter_helper = twitter.TwitterAuth(self, redirect_uri=callback_url)
self.redirect(twitter_helper.auth_url())
elif provider_name == "facebook":
self.session['linkedin'] = None
perms = ['email', 'publish_stream']
self.redirect(facebook.auth_url(self.app.config.get('fb_api_key'), callback_url, perms))
elif provider_name == 'linkedin':
self.session['facebook'] = None
authentication = linkedin.LinkedInAuthentication(
self.app.config.get('linkedin_api'),
self.app.config.get('linkedin_secret'),
callback_url,
[linkedin.PERMISSIONS.BASIC_PROFILE, linkedin.PERMISSIONS.EMAIL_ADDRESS])
self.redirect(authentication.authorization_url)
elif provider_name == "github":
scope = 'gist'
github_helper = github.GithubAuth(self.app.config.get('github_server'),
self.app.config.get('github_client_id'), \
self.app.config.get('github_client_secret'),
self.app.config.get('github_redirect_uri'), scope)
self.redirect(github_helper.get_authorize_url())
elif provider_name in models.SocialUser.open_id_providers():
continue_url = self.request.get('continue_url')
if continue_url:
dest_url = self.uri_for('social-login-complete', provider_name=provider_name, continue_url=continue_url)
else:
dest_url = self.uri_for('social-login-complete', provider_name=provider_name)
try:
login_url = users.create_login_url(federated_identity=provider['uri'], dest_url=dest_url)
self.redirect(login_url)
except users.NotAllowedError:
self.add_message('You must enable Federated Login Before for this application.<br> '
'<a href="http://appengine.google.com" target="_blank">Google App Engine Control Panel</a> -> '
'Administration -> Application Settings -> Authentication Options', 'error')
self.redirect_to('login')
else:
message = _('%s authentication is not yet implemented.' % provider.get('label'))
self.add_message(message, 'warning')
self.redirect_to('login')
class CallbackSocialLoginHandler(BaseHandler):
"""
Callback (Save Information) for Social Authentication
"""
def get(self, provider_name):
if not self.app.config.get('enable_federated_login'):
message = _('Federated login is disabled.')
self.add_message(message, 'warning')
return self.redirect_to('login')
continue_url = self.request.get('continue_url')
if provider_name == "twitter":
oauth_token = self.request.get('oauth_token')
oauth_verifier = self.request.get('oauth_verifier')
twitter_helper = twitter.TwitterAuth(self)
user_data = twitter_helper.auth_complete(oauth_token,
oauth_verifier)
logging.info('twitter user_data: ' + str(user_data))
if self.user:
# new association with twitter
user_info = models.User.get_by_id(long(self.user_id))
if models.SocialUser.check_unique(user_info.key, 'twitter', str(user_data['user_id'])):
social_user = models.SocialUser(
user=user_info.key,
provider='twitter',
uid=str(user_data['user_id']),
extra_data=user_data
)
social_user.put()
message = _('Twitter association added.')
self.add_message(message, 'success')
else:
message = _('This Twitter account is already in use.')
self.add_message(message, 'error')
if continue_url:
self.redirect(continue_url)
else:
self.redirect_to('edit-profile')
else:
# login with twitter
social_user = models.SocialUser.get_by_provider_and_uid('twitter',
str(user_data['user_id']))
if social_user:
# Social user exists. Need authenticate related site account
user = social_user.user.get()
self.auth.set_session(self.auth.store.user_to_dict(user), remember=True)
if self.app.config['log_visit']:
try:
logVisit = models.LogVisit(
user=user.key,
uastring=self.request.user_agent,
ip=self.request.remote_addr,
timestamp=utils.get_date_time()
)
logVisit.put()
except (apiproxy_errors.OverQuotaError, BadValueError):
logging.error("Error saving Visit Log in datastore")
if continue_url:
self.redirect(continue_url)
else:
self.redirect_to('home')
else:
uid = str(user_data['user_id'])
email = str(user_data.get('email'))
self.create_account_from_social_provider(provider_name, uid, email, continue_url, user_data)
# github association
elif provider_name == "github":
# get our request code back from the social login handler above
code = self.request.get('code')
# create our github auth object
scope = 'gist'
github_helper = github.GithubAuth(self.app.config.get('github_server'),
self.app.config.get('github_client_id'), \
self.app.config.get('github_client_secret'),
self.app.config.get('github_redirect_uri'), scope)
# retrieve the access token using the code and auth object
access_token = github_helper.get_access_token(code)
user_data = github_helper.get_user_info(access_token)
logging.info('github user_data: ' + str(user_data))
if self.user:
# user is already logged in so we set a new association with twitter
user_info = models.User.get_by_id(long(self.user_id))
if models.SocialUser.check_unique(user_info.key, 'github', str(user_data['login'])):
social_user = models.SocialUser(
user=user_info.key,
provider='github',
uid=str(user_data['login']),
extra_data=user_data
)
social_user.put()
message = _('Github association added.')
self.add_message(message, 'success')
else:
message = _('This Github account is already in use.')
self.add_message(message, 'error')
self.redirect_to('edit-profile')
else:
# user is not logged in, but is trying to log in via github
social_user = models.SocialUser.get_by_provider_and_uid('github', str(user_data['login']))
if social_user:
# Social user exists. Need authenticate related site account
user = social_user.user.get()
self.auth.set_session(self.auth.store.user_to_dict(user), remember=True)
if self.app.config['log_visit']:
try:
logVisit = models.LogVisit(
user=user.key,
uastring=self.request.user_agent,
ip=self.request.remote_addr,
timestamp=utils.get_date_time()
)
logVisit.put()
except (apiproxy_errors.OverQuotaError, BadValueError):
logging.error("Error saving Visit Log in datastore")
self.redirect_to('home')
else:
uid = str(user_data['id'])
email = str(user_data.get('email'))
self.create_account_from_social_provider(provider_name, uid, email, continue_url, user_data)
#end github
# facebook association
elif provider_name == "facebook":
code = self.request.get('code')
callback_url = "%s/social_login/%s/complete" % (self.request.host_url, provider_name)
token = facebook.get_access_token_from_code(code, callback_url, self.app.config.get('fb_api_key'),
self.app.config.get('fb_secret'))
access_token = token['access_token']
fb = facebook.GraphAPI(access_token)
user_data = fb.get_object('me')
logging.info('facebook user_data: ' + str(user_data))
if self.user:
# new association with facebook
user_info = models.User.get_by_id(long(self.user_id))
if models.SocialUser.check_unique(user_info.key, 'facebook', str(user_data['id'])):
social_user = models.SocialUser(
user=user_info.key,
provider='facebook',
uid=str(user_data['id']),
extra_data=user_data
)
social_user.put()
message = _('Facebook association added!')
self.add_message(message, 'success')
else:
message = _('This Facebook account is already in use!')
self.add_message(message, 'error')
if continue_url:
self.redirect(continue_url)
else:
self.redirect_to('edit-profile')
else:
# login with Facebook
social_user = models.SocialUser.get_by_provider_and_uid('facebook',
str(user_data['id']))
if social_user:
# Social user exists. Need authenticate related site account
user = social_user.user.get()
self.auth.set_session(self.auth.store.user_to_dict(user), remember=True)
if self.app.config['log_visit']:
try:
logVisit = models.LogVisit(
user=user.key,
uastring=self.request.user_agent,
ip=self.request.remote_addr,
timestamp=utils.get_date_time()
)
logVisit.put()
except (apiproxy_errors.OverQuotaError, BadValueError):
logging.error("Error saving Visit Log in datastore")
if continue_url:
self.redirect(continue_url)
else:
self.redirect_to('home')
else:
uid = str(user_data['id'])
email = str(user_data.get('email'))
self.create_account_from_social_provider(provider_name, uid, email, continue_url, user_data)
# end facebook
# association with linkedin
elif provider_name == "linkedin":
callback_url = "%s/social_login/%s/complete" % (self.request.host_url, provider_name)
authentication = linkedin.LinkedInAuthentication(
self.app.config.get('linkedin_api'),
self.app.config.get('linkedin_secret'),
callback_url,
[linkedin.PERMISSIONS.BASIC_PROFILE, linkedin.PERMISSIONS.EMAIL_ADDRESS])
authentication.authorization_code = self.request.get('code')
access_token = authentication.get_access_token()
link = linkedin.LinkedInApplication(authentication)
u_data = link.get_profile(selectors=['id', 'first-name', 'last-name', 'email-address'])
user_data = {
'first_name': u_data.get('firstName'),
'last_name': u_data.get('lastName'),
'id': u_data.get('id'),
'email': u_data.get('emailAddress')}
self.session['linkedin'] = json.dumps(user_data)
logging.info('linkedin user_data: ' + str(user_data))
if self.user:
# new association with linkedin
user_info = models.User.get_by_id(long(self.user_id))
if models.SocialUser.check_unique(user_info.key, 'linkedin', str(user_data['id'])):
social_user = models.SocialUser(
user=user_info.key,
provider='linkedin',
uid=str(user_data['id']),
extra_data=user_data
)
social_user.put()
message = _('Linkedin association added!')
self.add_message(message, 'success')
else:
message = _('This Linkedin account is already in use!')
self.add_message(message, 'error')
if continue_url:
self.redirect(continue_url)
else:
self.redirect_to('edit-profile')
else:
# login with Linkedin
social_user = models.SocialUser.get_by_provider_and_uid('linkedin',
str(user_data['id']))
if social_user:
# Social user exists. Need authenticate related site account
user = social_user.user.get()
self.auth.set_session(self.auth.store.user_to_dict(user), remember=True)
if self.app.config['log_visit']:
try:
logVisit = models.LogVisit(
user=user.key,
uastring=self.request.user_agent,
ip=self.request.remote_addr,
timestamp=utils.get_date_time()
)
logVisit.put()
except (apiproxy_errors.OverQuotaError, BadValueError):
logging.error("Error saving Visit Log in datastore")
if continue_url:
self.redirect(continue_url)
else:
self.redirect_to('home')
else:
uid = str(user_data['id'])
email = str(user_data.get('email'))
self.create_account_from_social_provider(provider_name, uid, email, continue_url, user_data)
#end linkedin
# google, myopenid, yahoo OpenID Providers
elif provider_name in models.SocialUser.open_id_providers():
provider_display_name = models.SocialUser.PROVIDERS_INFO[provider_name]['label']
# get info passed from OpenId Provider
from google.appengine.api import users
current_user = users.get_current_user()
if current_user:
if current_user.federated_identity():
uid = current_user.federated_identity()
else:
uid = current_user.user_id()
email = current_user.email()
else:
message = _('No user authentication information received from %s. '
'Please ensure you are logging in from an authorized OpenID Provider (OP).'
% provider_display_name)
self.add_message(message, 'error')
return self.redirect_to('login', continue_url=continue_url) if continue_url else self.redirect_to(
'login')
if self.user:
# add social account to user
user_info = models.User.get_by_id(long(self.user_id))
if models.SocialUser.check_unique(user_info.key, provider_name, uid):
social_user = models.SocialUser(
user=user_info.key,
provider=provider_name,
uid=uid
)
social_user.put()
message = _('%s association successfully added.' % provider_display_name)
self.add_message(message, 'success')
else:
message = _('This %s account is already in use.' % provider_display_name)
self.add_message(message, 'error')
if continue_url:
self.redirect(continue_url)
else:
self.redirect_to('edit-profile')
else:
# login with OpenId Provider
social_user = models.SocialUser.get_by_provider_and_uid(provider_name, uid)
if social_user:
# Social user found. Authenticate the user
user = social_user.user.get()
self.auth.set_session(self.auth.store.user_to_dict(user), remember=True)
if self.app.config['log_visit']:
try:
logVisit = models.LogVisit(
user=user.key,
uastring=self.request.user_agent,
ip=self.request.remote_addr,
timestamp=utils.get_date_time()
)
logVisit.put()
except (apiproxy_errors.OverQuotaError, BadValueError):
logging.error("Error saving Visit Log in datastore")
if continue_url:
self.redirect(continue_url)
else:
self.redirect_to('home')
else:
self.create_account_from_social_provider(provider_name, uid, email, continue_url)
else:
message = _('This authentication method is not yet implemented.')
self.add_message(message, 'warning')
self.redirect_to('login', continue_url=continue_url) if continue_url else self.redirect_to('login')
def create_account_from_social_provider(self, provider_name, uid, email=None, continue_url=None, user_data=None):
"""Social user does not exist yet so create it with the federated identity provided (uid)
and create prerequisite user and log the user account in
"""
provider_display_name = models.SocialUser.PROVIDERS_INFO[provider_name]['label']
if models.SocialUser.check_unique_uid(provider_name, uid):
# create user
# Returns a tuple, where first value is BOOL.
# If True ok, If False no new user is created
# Assume provider has already verified email address
# if email is provided so set activated to True
auth_id = "%s:%s" % (provider_name, uid)
if email:
unique_properties = ['email']
user_info = self.auth.store.user_model.create_user(
auth_id, unique_properties, email=email,
activated=True
)
else:
user_info = self.auth.store.user_model.create_user(
auth_id, activated=True
)
if not user_info[0]: #user is a tuple
message = _('The account %s is already in use.' % provider_display_name)
self.add_message(message, 'error')
return self.redirect_to('register')
user = user_info[1]
# create social user and associate with user
social_user = models.SocialUser(
user=user.key,
provider=provider_name,
uid=uid,
)
if user_data:
social_user.extra_data = user_data
self.session[provider_name] = json.dumps(user_data) # TODO is this needed?
social_user.put()
# authenticate user
self.auth.set_session(self.auth.store.user_to_dict(user), remember=True)
if self.app.config['log_visit']:
try:
logVisit = models.LogVisit(
user=user.key,
uastring=self.request.user_agent,
ip=self.request.remote_addr,
timestamp=utils.get_date_time()
)
logVisit.put()
except (apiproxy_errors.OverQuotaError, BadValueError):
logging.error("Error saving Visit Log in datastore")
message = _(
'Welcome! You have been registered as a new user through %s and logged in.' % provider_display_name)
self.add_message(message, 'success')
else:
message = _('This %s account is already in use.' % provider_display_name)
self.add_message(message, 'error')
if continue_url:
self.redirect(continue_url)
else:
self.redirect_to('edit-profile')
class DeleteSocialProviderHandler(BaseHandler):
"""
Delete Social association with an account
"""
@user_required
def post(self, provider_name):
if self.user:
user_info = models.User.get_by_id(long(self.user_id))
if len(user_info.get_social_providers_info()['used']) > 1 or (user_info.password is not None):
social_user = models.SocialUser.get_by_user_and_provider(user_info.key, provider_name)
if social_user:
social_user.key.delete()
message = _('%s successfully disassociated.' % provider_name)
self.add_message(message, 'success')
else:
message = _('Social account on %s not found for this user.' % provider_name)
self.add_message(message, 'error')
else:
message = ('Social account on %s cannot be deleted for user.'
' Please create a username and password to delete social account.' % provider_name)
self.add_message(message, 'error')
self.redirect_to('edit-profile')
class LogoutHandler(BaseHandler):
"""
Destroy user session and redirect to login
"""
def get(self):
if self.user:
message = _("You've signed out successfully. Warning: Please clear all cookies and logout "
"of OpenId providers too if you logged in on a public computer.")
self.add_message(message, 'info')
self.auth.unset_session()
# User is logged out, let's try redirecting to login page
try:
self.redirect(self.auth_config['login_url'])
except (AttributeError, KeyError), e:
logging.error("Error logging out: %s" % e)
message = _("User is logged out, but there was an error on the redirection.")
self.add_message(message, 'error')
return self.redirect_to('home')
class RegisterHandler(BaseHandler):
"""
Handler for Sign Up Users
"""
def get(self):
""" Returns a simple HTML form for create a new user """
if self.user:
self.redirect_to('home')
params = {}
return self.render_template('register.html', **params)
def post(self):
""" Get fields from POST dict """
if not self.form.validate():
return self.get()
username = self.form.username.data.lower()
name = self.form.name.data.strip()
last_name = self.form.last_name.data.strip()
email = self.form.email.data.lower()
password = self.form.password.data.strip()
country = self.form.country.data
tz = self.form.tz.data
# Password to SHA512
password = utils.hashing(password, self.app.config.get('salt'))
# Passing password_raw=password so password will be hashed
# Returns a tuple, where first value is BOOL.
# If True ok, If False no new user is created
unique_properties = ['username', 'email']
auth_id = "own:%s" % username
user = self.auth.store.user_model.create_user(
auth_id, unique_properties, password_raw=password,
username=username, name=name, last_name=last_name, email=email,
ip=self.request.remote_addr, country=country, tz=tz
)
if not user[0]: #user is a tuple
if "username" in str(user[1]):
message = _(
'Sorry, The username <strong>{}</strong> is already registered.').format(username)
elif "email" in str(user[1]):
message = _('Sorry, The email <strong>{}</strong> is already registered.').format(email)
else:
message = _('Sorry, The user is already registered.')
self.add_message(message, 'error')
return self.redirect_to('register')
else:
# User registered successfully
# But if the user registered using the form, the user has to check their email to activate the account ???
try:
if not user[1].activated:
# send email
subject = _("%s Account Verification" % self.app.config.get('app_name'))
confirmation_url = self.uri_for("account-activation",
user_id=user[1].get_id(),
token=models.User.create_auth_token(user[1].get_id()),
_full=True)
# load email's template
template_val = {
"app_name": self.app.config.get('app_name'),
"username": username,
"confirmation_url": confirmation_url,
"support_url": self.uri_for("contact", _full=True)
}
body_path = "emails/account_activation.txt"
body = self.jinja2.render_template(body_path, **template_val)
email_url = self.uri_for('taskqueue-send-email')
taskqueue.add(url=email_url, params={
'to': str(email),
'subject': subject,
'body': body,
})
message = _('You were successfully registered. '
'Please check your email to activate your account.')
self.add_message(message, 'success')
return self.redirect_to('home')
# If the user didn't register using registration form ???
db_user = self.auth.get_user_by_password(user[1].auth_ids[0], password)
# Check Twitter association in session
twitter_helper = twitter.TwitterAuth(self)
twitter_association_data = twitter_helper.get_association_data()
if twitter_association_data is not None:
if models.SocialUser.check_unique(user[1].key, 'twitter', str(twitter_association_data['id'])):
social_user = models.SocialUser(
user=user[1].key,
provider='twitter',
uid=str(twitter_association_data['id']),
extra_data=twitter_association_data
)
social_user.put()
#check Facebook association
fb_data = json.loads(self.session['facebook'])
if fb_data is not None:
if models.SocialUser.check_unique(user.key, 'facebook', str(fb_data['id'])):
social_user = models.SocialUser(
user=user.key,
provider='facebook',
uid=str(fb_data['id']),
extra_data=fb_data
)
social_user.put()
#check LinkedIn association
li_data = json.loads(self.session['linkedin'])
if li_data is not None:
if models.SocialUser.check_unique(user.key, 'linkedin', str(li_data['id'])):
social_user = models.SocialUser(
user=user.key,
provider='linkedin',
uid=str(li_data['id']),
extra_data=li_data
)
social_user.put()
message = _('Welcome <strong>{}</strong>, you are now logged in.').format(username)
self.add_message(message, 'success')
return self.redirect_to('home')
except (AttributeError, KeyError), e:
logging.error('Unexpected error creating the user %s: %s' % (username, e ))
message = _('Unexpected error creating the user %s' % username)
self.add_message(message, 'error')
return self.redirect_to('home')
@webapp2.cached_property
def form(self):
f = forms.RegisterForm(self)
f.country.choices = self.countries_tuple
f.tz.choices = self.tz
return f
class AccountActivationHandler(BaseHandler):
"""
Handler for account activation
"""
def get(self, user_id, token):
try:
if not models.User.validate_auth_token(user_id, token):
message = _('The link is invalid.')
self.add_message(message, 'error')
return self.redirect_to('home')
user = models.User.get_by_id(long(user_id))
# activate the user's account
user.activated = True
user.put()
# Login User
self.auth.get_user_by_token(int(user_id), token)
# Delete token
models.User.delete_auth_token(user_id, token)
message = _('Congratulations, Your account <strong>{}</strong> has been successfully activated.').format(
user.username)
self.add_message(message, 'success')
self.redirect_to('home')
except (AttributeError, KeyError, InvalidAuthIdError, NameError), e:
logging.error("Error activating an account: %s" % e)
message = _('Sorry, Some error occurred.')
self.add_message(message, 'error')
return self.redirect_to('home')
class ResendActivationEmailHandler(BaseHandler):
"""
Handler to resend activation email
"""
def get(self, user_id, token):
try:
if not models.User.validate_resend_token(user_id, token):
message = _('The link is invalid.')
self.add_message(message, 'error')
return self.redirect_to('home')
user = models.User.get_by_id(long(user_id))
email = user.email
if (user.activated == False):
# send email
subject = _("%s Account Verification" % self.app.config.get('app_name'))
confirmation_url = self.uri_for("account-activation",
user_id=user.get_id(),
token=models.User.create_auth_token(user.get_id()),
_full=True)
# load email's template
template_val = {
"app_name": self.app.config.get('app_name'),
"username": user.username,
"confirmation_url": confirmation_url,
"support_url": self.uri_for("contact", _full=True)
}
body_path = "emails/account_activation.txt"
body = self.jinja2.render_template(body_path, **template_val)
email_url = self.uri_for('taskqueue-send-email')
taskqueue.add(url=email_url, params={
'to': str(email),
'subject': subject,
'body': body,
})
models.User.delete_resend_token(user_id, token)
message = _('The verification email has been resent to %s. '
'Please check your email to activate your account.' % email)
self.add_message(message, 'success')
return self.redirect_to('home')
else:
message = _('Your account has been activated. Please <a href="/login/">sign in</a> to your account.')
self.add_message(message, 'warning')
return self.redirect_to('home')
except (KeyError, AttributeError), e:
logging.error("Error resending activation email: %s" % e)
message = _('Sorry, Some error occurred.')
self.add_message(message, 'error')
return self.redirect_to('home')
class ContactHandler(BaseHandler):
"""
Handler for Contact Form
"""
def get(self):
""" Returns a simple HTML for contact form """
if self.user:
user_info = models.User.get_by_id(long(self.user_id))
if user_info.name or user_info.last_name:
self.form.name.data = user_info.name + " " + user_info.last_name
if user_info.email:
self.form.email.data = user_info.email
params = {
"exception": self.request.get('exception')
}
return self.render_template('contact.html', **params)
def post(self):
""" validate contact form """
if not self.form.validate():
return self.get()
remoteip = self.request.remote_addr
user_agent = self.request.user_agent
exception = self.request.POST.get('exception')
name = self.form.name.data.strip()
email = self.form.email.data.lower()
message = self.form.message.data.strip()
try:
# parsing user_agent and getting which os key to use
# windows uses 'os' while other os use 'flavor'
ua = httpagentparser.detect(user_agent)
_os = ua.has_key('flavor') and 'flavor' or 'os'
operating_system = str(ua[_os]['name']) if "name" in ua[_os] else "-"
if 'version' in ua[_os]:
operating_system += ' ' + str(ua[_os]['version'])
if 'dist' in ua:
operating_system += ' ' + str(ua['dist'])
browser = str(ua['browser']['name']) if 'browser' in ua else "-"
browser_version = str(ua['browser']['version']) if 'browser' in ua else "-"
template_val = {
"name": name,
"email": email,
"browser": browser,
"browser_version": browser_version,
"operating_system": operating_system,
"ip": remoteip,
"message": message
}
except Exception as e:
logging.error("error getting user agent info: %s" % e)
try:
subject = _("Contact")
# exceptions for error pages that redirect to contact
if exception != "":
subject = subject + " (Exception error: %s)" % exception
body_path = "emails/contact.txt"
body = self.jinja2.render_template(body_path, **template_val)
email_url = self.uri_for('taskqueue-send-email')
taskqueue.add(url=email_url, params={
'to': self.app.config.get('contact_recipient'),
'subject': subject,
'body': body,
'sender': self.app.config.get('contact_sender'),
})
message = _('Your message was sent successfully.')
self.add_message(message, 'success')
return self.redirect_to('contact')
except (AttributeError, KeyError), e:
logging.error('Error sending contact form: %s' % e)
message = _('Error sending the message. Please try again later.')
self.add_message(message, 'error')
return self.redirect_to('contact')
@webapp2.cached_property
def form(self):
return forms.ContactForm(self)
class CategoriesHandler(BaseHandler):
#Handler for Category
def get(self):
# Sends the vistor to the categories page
return self.render_template('categories.html')
class AboutHandler(BaseHandler):
#Handler for About
def get(self):
# Sends the vistor to the about page
return self.render_template('about.html')
class PoliciesHandler(BaseHandler):
#Handler for Policies
def get(self):
# Sends the vistor to the policy page
return self.render_template('policies.html')
class MyProfileHandler(BaseHandler):
# Handler for a User's profile
def get(self):
# Sends the user to the user profile page
if self.user:
user_info = models.User.get_by_id(long(self.user_id))
allItems = models.Item.query()
userItems = allItems.filter(models.Item.user == user_info.key)
#itemNames = userItems.fetch(projection=["title"])
if userItems.count() > 0:
return self.render_template('profile.html', uploadedItems = userItems, added = True)
return self.render_template('profile.html', uploadedItems = userItems, added = False)
self.redirect_to('register')
class AddItemHandler(BaseHandler):
# Handler for adding an item to a User's database
def post(self):
# Sends the user to the user profile page
if self.user:
item = models.Item()
item.title = self.request.get("item-name")
item.description = self.request.get("item-description")
item.price = self.request.get("item-price")
user_info = models.User.get_by_id(long(self.user_id))
item.user = user_info.key
item.username = user_info.username
item.email = user_info.email
item.put()
allItems = models.Item.query()
userItems = allItems.filter(models.Item.user == user_info.key)
return self.render_template('profile.html', uploadedItems = userItems, added = True)
self.redirect_to('home')
class EditItemIntermediaryHandler(BaseHandler):
#Intermediary Handler for editing a User's items
def get(self):
# Sends the vistor to the edit item page
if self.user:
item_to_change = self.request.get("item-to-edit")
key = ndb.Key(urlsafe=item_to_change)
old_item = key.get()
return self.render_template('edit_item.html', item_to_edit = old_item)
self.redirect_to('home')
class EditItemHandler(BaseHandler):
#Handler for editing a User's items
def post(self):
# Edits the item's data
if self.user:
item_to_change = self.request.get("old-item-key")
key = ndb.Key(urlsafe=item_to_change)
old_item = key.get()
old_item.title = self.request.get("new-item-name")
old_item.description = self.request.get("new-item-description")
old_item.price = self.request.get("new-item-price")
old_item.put()
user_info = models.User.get_by_id(long(self.user_id))
allItems = models.Item.query()
userItems = allItems.filter(models.Item.user == user_info.key)
return self.render_template('profile.html', uploadedItems = userItems, added = True)
self.redirect_to('home')
class DeleteItemHandler(BaseHandler):
#Handler for deleting a User's items
def post(self):
# Delete's the given item
if self.user:
item_to_delete = self.request.get('item-to-delete')
key = ndb.Key(urlsafe=item_to_delete)
old_item = key.get()
old_item.key.delete()
user_info = models.User.get_by_id(long(self.user_id))
allItems = models.Item.query()
userItems = allItems.filter(models.Item.user == user_info.key)
return self.render_template('profile.html', uploadedItems = userItems, added = True)
self.redirect_to('home')
class ViewProfileHandler(BaseHandler):
#Handler for public profiles
def get(self, username):
# Sends the vistor to the profile page
name = username
allItems = models.Item.query()
if self.user:
user_info = models.User.get_by_id(long(self.user_id))
if user_info.username == name:
userItems = allItems.filter(models.Item.user == user_info.key)
return self.render_template('public_profile.html', user = name, items = userItems, address = user_info.email)
allUsers = models.User.query()
tempUser = allUsers.filter(models.User.username == name)
tempItems = allItems.filter(models.Item.username == name)
return self.render_template('public_profile.html', user = name, items = tempItems, address = tempUser.get().email)
class EmailUserHandler(BaseHandler):
#Handler for users emailing other users
def post(self, username):
from google.appengine.api import mail, app_identity
if self.user:
user_info = models.User.get_by_id(long(self.user_id))
to = self.request.get("email-to")
subject = self.request.get("email-subject")
body = user_info.email + " just sent you a message through AirShareBeta: " + self.request.get("email-body")
app_id = app_identity.get_application_id()
sender = "AirShareBeta <no-reply@%s.appspotmail.com>" % (app_id)
if self.app.config['log_email']:
try:
logEmail = models.LogEmail(
sender=sender,
to=to,
subject=subject,
body=body,
when=utils.get_date_time("datetimeProperty")
)
logEmail.put()
except (apiproxy_errors.OverQuotaError, BadValueError):
logging.error("Error saving Email Log in datastore")
try:
message = mail.EmailMessage()
message.sender = sender
message.to = to
message.subject = subject
message.html = body
message.send()
except Exception, e:
logging.error("Error sending email: %s" % e)
self.redirect_to('home')
class EditProfileHandler(BaseHandler):
"""
Handler for Edit User Profile
"""
@user_required
def get(self):
""" Returns a simple HTML form for edit profile """
params = {}
if self.user:
user_info = models.User.get_by_id(long(self.user_id))
self.form.username.data = user_info.username
self.form.name.data = user_info.name
self.form.last_name.data = user_info.last_name
self.form.country.data = user_info.country
self.form.tz.data = user_info.tz
providers_info = user_info.get_social_providers_info()
if not user_info.password:
params['local_account'] = False
else:
params['local_account'] = True
params['used_providers'] = providers_info['used']
params['unused_providers'] = providers_info['unused']
params['country'] = user_info.country
params['tz'] = user_info.tz
return self.render_template('edit_profile.html', **params)
def post(self):
""" Get fields from POST dict """
if not self.form.validate():
return self.get()
username = self.form.username.data.lower()
name = self.form.name.data.strip()
last_name = self.form.last_name.data.strip()
country = self.form.country.data
tz = self.form.tz.data
try:
user_info = models.User.get_by_id(long(self.user_id))
try:
message = ''
# update username if it has changed and it isn't already taken
if username != user_info.username:
user_info.unique_properties = ['username', 'email']
uniques = [
'User.username:%s' % username,
'User.auth_id:own:%s' % username,
]
# Create the unique username and auth_id.
success, existing = Unique.create_multi(uniques)
if success:
# free old uniques
Unique.delete_multi(
['User.username:%s' % user_info.username, 'User.auth_id:own:%s' % user_info.username])
# The unique values were created, so we can save the user.
user_info.username = username
user_info.auth_ids[0] = 'own:%s' % username
message += _('Your new username is <strong>{}</strong>').format(username)
else:
message += _(
'The username <strong>{}</strong> is already taken. Please choose another.').format(
username)
# At least one of the values is not unique.
self.add_message(message, 'error')
return self.get()
user_info.name = name
user_info.last_name = last_name
user_info.country = country
user_info.tz = tz
user_info.put()
message += " " + _('Thanks, your settings have been saved.')
self.add_message(message, 'success')
return self.get()
except (AttributeError, KeyError, ValueError), e:
logging.error('Error updating profile: ' + e)
message = _('Unable to update profile. Please try again later.')
self.add_message(message, 'error')
return self.get()
except (AttributeError, TypeError), e:
login_error_message = _('Sorry you are not logged in.')
self.add_message(login_error_message, 'error')
self.redirect_to('login')
@webapp2.cached_property
def form(self):
f = forms.EditProfileForm(self)
f.country.choices = self.countries_tuple
f.tz.choices = self.tz
return f
class EditPasswordHandler(BaseHandler):
"""
Handler for Edit User Password
"""
@user_required
def get(self):
""" Returns a simple HTML form for editing password """
params = {}
return self.render_template('edit_password.html', **params)
def post(self):
""" Get fields from POST dict """
if not self.form.validate():
return self.get()
current_password = self.form.current_password.data.strip()
password = self.form.password.data.strip()
try:
user_info = models.User.get_by_id(long(self.user_id))
auth_id = "own:%s" % user_info.username
# Password to SHA512
current_password = utils.hashing(current_password, self.app.config.get('salt'))
try:
user = models.User.get_by_auth_password(auth_id, current_password)
# Password to SHA512
password = utils.hashing(password, self.app.config.get('salt'))
user.password = security.generate_password_hash(password, length=12)
user.put()
# send email
subject = self.app.config.get('app_name') + " Account Password Changed"
# load email's template
template_val = {
"app_name": self.app.config.get('app_name'),
"first_name": user.name,
"username": user.username,
"email": user.email,
"reset_password_url": self.uri_for("password-reset", _full=True)
}
email_body_path = "emails/password_changed.txt"
email_body = self.jinja2.render_template(email_body_path, **template_val)
email_url = self.uri_for('taskqueue-send-email')
taskqueue.add(url=email_url, params={
'to': user.email,
'subject': subject,
'body': email_body,
'sender': self.app.config.get('contact_sender'),
})
#Login User
self.auth.get_user_by_password(user.auth_ids[0], password)
self.add_message(_('Password changed successfully.'), 'success')
return self.redirect_to('edit-profile')
except (InvalidAuthIdError, InvalidPasswordError), e:
# Returns error message to self.response.write in
# the BaseHandler.dispatcher
message = _("Incorrect password! Please enter your current password to change your account settings.")
self.add_message(message, 'error')
return self.redirect_to('edit-password')
except (AttributeError, TypeError), e:
login_error_message = _('Sorry you are not logged in.')
self.add_message(login_error_message, 'error')
self.redirect_to('login')
@webapp2.cached_property
def form(self):
return forms.EditPasswordForm(self)
class EditEmailHandler(BaseHandler):
"""
Handler for Edit User's Email
"""
@user_required
def get(self):
""" Returns a simple HTML form for edit email """
params = {}
if self.user:
user_info = models.User.get_by_id(long(self.user_id))
params['current_email'] = user_info.email
return self.render_template('edit_email.html', **params)
def post(self):
""" Get fields from POST dict """
if not self.form.validate():
return self.get()
new_email = self.form.new_email.data.strip()
password = self.form.password.data.strip()
try:
user_info = models.User.get_by_id(long(self.user_id))
auth_id = "own:%s" % user_info.username
# Password to SHA512
password = utils.hashing(password, self.app.config.get('salt'))
try:
# authenticate user by its password
user = models.User.get_by_auth_password(auth_id, password)
# if the user change his/her email address
if new_email != user.email:
# check whether the new email has been used by another user
aUser = models.User.get_by_email(new_email)
if aUser is not None:
message = _("The email %s is already registered." % new_email)
self.add_message(message, 'error')
return self.redirect_to("edit-email")
# send email
subject = _("%s Email Changed Notification" % self.app.config.get('app_name'))
user_token = models.User.create_auth_token(self.user_id)
confirmation_url = self.uri_for("email-changed-check",
user_id=user_info.get_id(),
encoded_email=utils.encode(new_email),
token=user_token,
_full=True)
# load email's template
template_val = {
"app_name": self.app.config.get('app_name'),
"first_name": user.name,
"username": user.username,
"new_email": new_email,
"confirmation_url": confirmation_url,
"support_url": self.uri_for("contact", _full=True)
}
old_body_path = "emails/email_changed_notification_old.txt"
old_body = self.jinja2.render_template(old_body_path, **template_val)
new_body_path = "emails/email_changed_notification_new.txt"
new_body = self.jinja2.render_template(new_body_path, **template_val)
email_url = self.uri_for('taskqueue-send-email')
taskqueue.add(url=email_url, params={
'to': user.email,
'subject': subject,
'body': old_body,
})
taskqueue.add(url=email_url, params={
'to': new_email,
'subject': subject,
'body': new_body,
})
# display successful message
msg = _(
"Please check your new email for confirmation. Your email will be updated after confirmation.")
self.add_message(msg, 'success')
return self.redirect_to('edit-profile')
else:
self.add_message(_("You didn't change your email."), "warning")
return self.redirect_to("edit-email")
except (InvalidAuthIdError, InvalidPasswordError), e:
# Returns error message to self.response.write in
# the BaseHandler.dispatcher
message = _("Incorrect password! Please enter your current password to change your account settings.")
self.add_message(message, 'error')
return self.redirect_to('edit-email')
except (AttributeError, TypeError), e:
login_error_message = _('Sorry you are not logged in.')
self.add_message(login_error_message, 'error')
self.redirect_to('login')
@webapp2.cached_property
def form(self):
return forms.EditEmailForm(self)
class PasswordResetHandler(BaseHandler):
"""
Password Reset Handler with Captcha
"""
def get(self):
chtml = captcha.displayhtml(
public_key=self.app.config.get('captcha_public_key'),
use_ssl=(self.request.scheme == 'https'),
error=None)
if self.app.config.get('captcha_public_key') == "PUT_YOUR_RECAPCHA_PUBLIC_KEY_HERE" or \
self.app.config.get('captcha_private_key') == "PUT_YOUR_RECAPCHA_PUBLIC_KEY_HERE":
chtml = '<div class="alert alert-error"><strong>Error</strong>: You have to ' \
'<a href="http://www.google.com/recaptcha/whyrecaptcha" target="_blank">sign up ' \
'for API keys</a> in order to use reCAPTCHA.</div>' \
'<input type="hidden" name="recaptcha_challenge_field" value="manual_challenge" />' \
'<input type="hidden" name="recaptcha_response_field" value="manual_challenge" />'
params = {
'captchahtml': chtml,
}
return self.render_template('password_reset.html', **params)
def post(self):
# check captcha
challenge = self.request.POST.get('recaptcha_challenge_field')
response = self.request.POST.get('recaptcha_response_field')
remoteip = self.request.remote_addr
cResponse = captcha.submit(
challenge,
response,
self.app.config.get('captcha_private_key'),
remoteip)
if cResponse.is_valid:
# captcha was valid... carry on..nothing to see here
pass
else:
_message = _('Wrong image verification code. Please try again.')
self.add_message(_message, 'error')
return self.redirect_to('password-reset')
#check if we got an email or username
email_or_username = str(self.request.POST.get('email_or_username')).lower().strip()
if utils.is_email_valid(email_or_username):
user = models.User.get_by_email(email_or_username)
_message = _("If the email address you entered") + " (<strong>%s</strong>) " % email_or_username
else:
auth_id = "own:%s" % email_or_username
user = models.User.get_by_auth_id(auth_id)
_message = _("If the username you entered") + " (<strong>%s</strong>) " % email_or_username
_message = _message + _("is associated with an account in our records, you will receive "
"an email from us with instructions for resetting your password. "
"<br>If you don't receive instructions within a minute or two, "
"check your email's spam and junk filters, or ") + \
'<a href="' + self.uri_for('contact') + '">' + _('contact us') + '</a> ' + _(
"for further assistance.")
if user is not None:
user_id = user.get_id()
token = models.User.create_auth_token(user_id)
email_url = self.uri_for('taskqueue-send-email')
reset_url = self.uri_for('password-reset-check', user_id=user_id, token=token, _full=True)
subject = _("%s Password Assistance" % self.app.config.get('app_name'))
# load email's template
template_val = {
"username": user.username,
"email": user.email,
"reset_password_url": reset_url,
"support_url": self.uri_for("contact", _full=True),
"app_name": self.app.config.get('app_name'),
}
body_path = "emails/reset_password.txt"
body = self.jinja2.render_template(body_path, **template_val)
taskqueue.add(url=email_url, params={
'to': user.email,
'subject': subject,
'body': body,
'sender': self.app.config.get('contact_sender'),
})
self.add_message(_message, 'warning')
return self.redirect_to('login')
class PasswordResetCompleteHandler(BaseHandler):
"""
Handler to process the link of reset password that received the user
"""
def get(self, user_id, token):
verify = models.User.get_by_auth_token(int(user_id), token)
params = {}
if verify[0] is None:
message = _('The URL you tried to use is either incorrect or no longer valid. '
'Enter your details again below to get a new one.')
self.add_message(message, 'warning')
return self.redirect_to('password-reset')
else:
return self.render_template('password_reset_complete.html', **params)
def post(self, user_id, token):
verify = models.User.get_by_auth_token(int(user_id), token)
user = verify[0]
password = self.form.password.data.strip()
if user and self.form.validate():
# Password to SHA512
password = utils.hashing(password, self.app.config.get('salt'))
user.password = security.generate_password_hash(password, length=12)
user.put()
# Delete token
models.User.delete_auth_token(int(user_id), token)
# Login User
self.auth.get_user_by_password(user.auth_ids[0], password)
self.add_message(_('Password changed successfully.'), 'success')
return self.redirect_to('home')
else:
self.add_message(_('The two passwords must match.'), 'error')
return self.redirect_to('password-reset-check', user_id=user_id, token=token)
@webapp2.cached_property
def form(self):
return forms.PasswordResetCompleteForm(self)
class EmailChangedCompleteHandler(BaseHandler):
"""
Handler for completed email change
Will be called when the user click confirmation link from email
"""
def get(self, user_id, encoded_email, token):
verify = models.User.get_by_auth_token(int(user_id), token)
email = utils.decode(encoded_email)
if verify[0] is None:
message = _('The URL you tried to use is either incorrect or no longer valid.')
self.add_message(message, 'warning')
self.redirect_to('home')
else:
# save new email
user = verify[0]
user.email = email
user.put()
# delete token
models.User.delete_auth_token(int(user_id), token)
# add successful message and redirect
message = _('Your email has been successfully updated.')
self.add_message(message, 'success')
self.redirect_to('edit-profile')
class HomeRequestHandler(RegisterBaseHandler):
"""
Handler to show the home page
"""
def get(self):
""" Returns a simple HTML form for home """
allItems = models.Item.query()
hasItems = False
if allItems.count() > 0:
hasItems = True
return self.render_template('home.html', siteItems = allItems, hasUserContent = hasItems)
|
|
#!/usr/bin/env python
from __future__ import print_function
import sys
from builtins import map
from builtins import object
import argparse
import os
from PyAnalysisTools.base.ShellUtils import make_dirs, copy
class ModuleCreator(object):
"""
Class to setup a new analysis module
"""
def __init__(self, **kwargs):
self.name = kwargs['name']
self.path = kwargs['path']
if kwargs['short_name'] is not None:
self.short_name = kwargs['short_name']
else:
self.short_name = self.name
self.modules = ['OutputModule', 'MainSelectionModule']
self.module_path = os.path.abspath(os.path.join(self.path, self.name))
def setup_directory(self):
"""
Create package directory if not existing
:return: nothing
:rtype: None
"""
make_dirs(self.module_path)
folders = ['macros', 'Root', 'data', 'run', 'run/configs', self.name]
list(map(lambda f: make_dirs(os.path.join(self.module_path, f)), folders))
def add_output_module(self):
with open(os.path.join(self.module_path, 'Root', 'OutputModule.cxx'), 'w') as f:
print('#include <{:s}/OutputModule.h>\n\n'.format(self.name), file=f)
print('void OutputModule::initialize (){\n', file=f)
print('\tstd::function<float()> fYourFctPtr = [=](){ return 1111.; /*enter calculation here*/}; \n', file=f)
print('\tcore()->AddTreeBranch("your_branch_name", fYourFctPtr, "your_tree_name");', file=f)
print('}', file=f)
with open(os.path.join(self.module_path, self.name, 'OutputModule.h'), 'w') as f:
print('#pragma once\n', file=f)
print('#include <ELCore/Module.h>\n', file=f)
print('class OutputModule : public Module{', file=f)
print('\tpublic:', file=f)
print('\tOutputModule(const std::string& moduleName) : Module(moduleName) {}', file=f)
print('\tvirtual ~OutputModule(){}\n', file=f)
print('\tvirtual void initialize();\n', file=f)
print('\t//configurables', file=f)
print('\tstd::string exampleKey = "kYourCollection";\n', file=f)
print('\tprivate: \n', file=f)
print('\tClassDef(OutputModule, 1);', file=f)
print('};', file=f)
def add_mainselection_module(self):
with open(os.path.join(self.module_path, 'Root', 'MainSelectionModule.cxx'), 'w') as f:
print('#include <{:s}/MainSelectionModule.h>\n\n'.format(self.name), file=f)
print('void MainSelectionModule::initialize (){\n', file=f)
print('\tcore()->addTemplate("SOME_CUR", &MainSelectionModule::someCut, this); \n', file=f)
print('}\n', file=f)
print('void MainSelectionModule::someCut() {', file=f)
print('\t//Do stuff here, e.g. print configurable', file=f)
print('\tlog->info("Example configurable {}", someConfigurable);', file=f)
print('}\n', file=f)
with open(os.path.join(self.module_path, self.name, 'MainSelectionModule.h'), 'w') as f:
print('#pragma once\n', file=f)
print('#include <ELCore/Module.h>', file=f)
print('#include <ELCore/Exceptions.h>', file=f)
print('#include <ELCore/Container.h>\n', file=f)
print('\nnamespace CP {', file=f)
print('\tclass someCPTool;', file=f)
print('}\n', file=f)
print('class MainSelectionModule : public Module {', file=f)
print('\tpublic:', file=f)
print('\tMainSelectionModule() : Module("MainSelectionModule") {}', file=f)
print('\tvirtual ~MainSelectionModule(){}\n', file=f)
print('\tvirtual void initialize();\n', file=f)
print('\t//configurables', file=f)
print('\tstd::string someConfigurable = "I am a configurable variable to be set in module_config.yml";\n',
file=f)
print('\tprivate:', file=f)
print('// DO NOT FORGET TO DISABLE THE ROOT STREAMERS VIA //!', file=f)
print('\tstd::shared_ptr<CP::someCPTool> m_myCPTool; //!', file=f)
print('\tvoid someCut();\n', file=f)
print('\tClassDef(MainSelectionModule, 1);', file=f)
print('};', file=f)
def add_readme(self):
with open(os.path.join(self.module_path, 'README.md'), 'w') as f:
print('===========================', file=f)
print('{:s}'.format(self.name), file=f)
print('===========================\n\n', file=f)
print('Dependencies', file=f)
print('------------', file=f)
print('* ELCore', file=f)
print('* ELExtrapolator', file=f)
print('* LumiCalculator', file=f)
print('* PyAnalysisTools\n\n', file=f)
print('Setup', file=f)
print('-----', file=f)
print('For details on the ELBrain framework visit: https://elbraindocs.web.cern.ch \n\n', file=f)
print('XXX Analysis', file=f)
print('------------', file=f)
print('Specify here details on this package', file=f)
def add_run_script(self):
with open(os.path.join(self.module_path, 'run', 'run.py'), 'w') as f:
print('from ELCore import RunManager \n', file=f)
print('if __name__ == "__main__":', file=f)
print('\tparser = RunManager.get_parser(\'{:s}\')'.format(self.name), file=f)
print('\targs = parser.parse_args()', file=f)
print('\tmanager = RunManager.RunManager("{:s}", abbrev="{:s}_", **vars(args))'.format(self.name,
self.short_name),
file=f)
print('\tmanager.run()', file=f)
def add_link_def(self):
with open(os.path.join(self.module_path, 'Root', 'LinkDef.h'), 'w') as f:
for mod in self.modules:
print('#include <{:s}/{:s}.h>'.format(self.name, mod), file=f)
print('#ifdef __CINT__', file=f)
print('#pragma link off all globals;', file=f)
print('#pragma link off all classes;', file=f)
print('#pragma link off all functions;', file=f)
print('#pragma link C++ nestedclass;\n', file=f)
print('#endif\n', file=f)
print('#ifdef __CINT__', file=f)
for mod in self.modules:
print('#pragma link C++ class {:s}+;'.format(mod), file=f)
print('#endif', file=f)
def add_cmake_file(self):
with open(os.path.join(self.module_path, 'CMakeLists.txt'), 'w') as f:
print('################################################################################', file=f)
print('# Package: {:s}'.format(self.name), file=f)
print('################################################################################\n', file=f)
print('atlas_subdir({:s}) \n'.format(self.name), file=f)
print('set(CMAKE_CXX_FLAGS "-std=c++14") \n', file=f)
print('# Declare the package\'s dependencies:', file=f)
print('atlas_depends_on_subdirs(\n \t\tPUBLIC', file=f)
print('\t\t\tELCore', file=f)
print('\t\t\tELCommon', file=f)
print('\t\t\tLumiCalculator', file=f)
print('\t\t\t${extra_deps} )\n', file=f)
print('# External dependencies:', file=f)
print('find_package(Boost)', file=f)
print('find_package(ROOT', file=f)
print('\t\tCOMPONENTS;', file=f)
print('\t\tCore', file=f)
print('\t\tTree', file=f)
print('\t\tMathCore', file=f)
print('\t\tHist', file=f)
print('\t\tRIO )\n', file=f)
print('# Libraries in the package:', file=f)
print('atlas_add_root_dictionary({:s}Lib'.format(self.name), file=f)
print('\t\t{:s}LibCintDict'.format(self.name), file=f)
print('\t\tROOT_HEADERS', file=f)
print('\t\t{:s}/*.h'.format(self.name), file=f)
print('\t\tRoot/LinkDef.h', file=f)
print('\t\tEXTERNAL_PACKAGES ROOT )\n', file=f)
print('atlas_add_library({:s}Lib'.format(self.name), file=f)
print('\t{:s}/*.h Root/*.cxx ${{{:s}LibCintDict}}'.format(self.name, self.name), file=f)
print('\tPUBLIC_HEADERS {:s}'.format(self.name), file=f)
print('\tINCLUDE_DIRS ${ROOT_INCLUDE_DIRS} ${BOOST_INCLUDE_DIRS}', file=f)
print('\tLINK_LIBRARIES ELCoreLib ELCommonLib LumiCalculatorLib ${ROOT_LIBRARIES} ${BOOST_LIBRARIES}'
'${extra_libs} )\n', file=f)
print('#Install files from the package:', file=f)
print('atlas_install_data(data/*)', file=f)
def copy_and_update_package_setup(self):
with open(os.path.join(self.path, '.setup_package.py'), 'w') as f_out:
with open(os.path.join(os.path.dirname(__file__), '.common_setup_package.py'), 'r') as f_in:
for line in f_in.readline():
line = line.replace('PACKAGE_NAME_LOWER', self.name.lower())
line = line.replace('PACKAGE_NAME_ABBR', self.short_name)
line = line.replace('PACKAGE_NAME', self.name)
print(line, file=f_out)
def copy_common_files(self):
print('COPY', os.path.join('.analysis_package_generic', 'common_setup.sh'), os.path.join(self.path, 'setup.sh'))
copy(os.path.join(os.path.dirname(__file__), '.analysis_package_generic', 'common_setup.sh'),
os.path.join(self.path, self.module_path, 'setup.sh'))
def create(self):
self.setup_directory()
self.add_link_def()
self.add_cmake_file()
self.add_readme()
self.add_run_script()
self.add_output_module()
self.add_mainselection_module()
self.copy_and_update_package_setup()
self.copy_common_files()
def main(argv):
parser = argparse.ArgumentParser(description="Setup new analysis package")
parser.add_argument('name', help="Name of new package")
parser.add_argument('--short_name', '-a', default=None, help="Abbreviation of package name")
parser.add_argument('--path', '-p', default='../../',
help="path where the package should be set up (default ELCore/../)")
args = parser.parse_args()
creator = ModuleCreator(**vars(args))
creator.create()
if __name__ == '__main__':
main(sys.argv[1:])
|
|
###################################### Sparse Autoencoder ############################################
## Author: Sara Regina Ferreira de Faria
## Email: [email protected]
#Needed libraries
import numpy
import matplotlib.pyplot as plt
import pandas
import math
import scipy.io as spio
import scipy.ndimage
from sklearn.metrics import mean_squared_error, roc_curve, auc
from keras import regularizers
# fix random seed for reproducibility
numpy.random.seed(7)
# load the dataset
def loadData(file, dictName):
matfile = file
matdata = spio.loadmat(matfile)
dataset = numpy.ndarray(shape=(matdata[dictName].shape[1]), dtype=type(matdata[dictName][0,0]))
for i in range(matdata[dictName].shape[1]):
dataset[i] = matdata[dictName][0, i]
return dataset
# normalize dataset
def normalizeData(data):
maxVal = numpy.amax(data)
minVal = numpy.amin(data)
normalizedData = ((data-minVal)/(maxVal-minVal))
return normalizedData
# based on http://machinelearningmastery.com/time-series-prediction-with-deep-learning-in-python-with-keras/
# convert an array of values into a dataset matrix
def createMatrix(dataset, look_back=1):
dataX, dataY = [], []
for i in range(len(dataset)-look_back-1):
a = dataset[i:(i+look_back)]
dataX.append(a)
return numpy.array(dataX)
# based on https://blog.keras.io/building-autoencoders-in-keras.html
# based on http://machinelearningmastery.com/time-series-prediction-with-deep-learning-in-python-with-keras/
# create lstm-based autoencoder
def trainSparseAutoencoder(dataset, timesteps, input_dim, bottleneckDim, lossEvaluation, optimizer, epochs, batchSize, regularizer, verbose=False):
from keras.layers import Input, Dense, LSTM, RepeatVector
from keras.models import Model
# split noise and normal data into train and test sets
train_size = int(len(dataset) * 0.67)
test_size = len(dataset) - train_size
train, test = dataset[0:train_size,:], dataset[train_size:len(dataset),:]
# encoder
inputs = Input(shape=(timesteps, input_dim))
encoded = LSTM(int(bottleneckDim), activity_regularizer=regularizer)(inputs)
# decoder
decoded = RepeatVector(timesteps)(encoded)
decoded = LSTM(input_dim, return_sequences=True)(decoded)
# autoencoder
model = Model(inputs, decoded)
model.compile(loss=lossEvaluation, optimizer=optimizer)
model.fit(train, train, epochs=epochs, batch_size=batchSize, verbose=verbose, validation_data=(test, test))
# Estimate model performance
#trainScore = model.evaluate(train, train, verbose=0)
#print('Train Score: %.6f MSE (%.6f RMSE)' % (trainScore, math.sqrt(trainScore)))
#testScore = model.evaluate(test, test, verbose=0)
#print('Test Score: %.6f MSE (%.6f RMSE)' % (testScore, math.sqrt(testScore)))
return model
# based on https://edouardfouche.com/Neural-based-Outlier-Discovery/
def calculateFprTpr (predicted, labels):
dist = numpy.zeros(len(predicted))
for i in range(len(predicted)):
dist[i] = numpy.linalg.norm(predicted[i])
fpr, tpr, thresholds = roc_curve(labels, dist)
return fpr, tpr
#************* MAIN *****************#
# variables
best_roc_auc = 0
best_epochs = 0
best_limit = 0
best_bottleneckDim = 0
best_look_back = 0
best_regularizerIndex = 0
for epochs in range(16,17): #16
print("epochs", epochs)
for limitAux in range(11,12):
limit = limitAux/10
print("limit", limit)
for bottleneckDim in range (9,10):
print("bottleneckDim", bottleneckDim)
for look_back in range(6,7):
print("look_back", look_back)
for regularizerIndex in range (5,6):
regularizer=regularizers.l1(pow(10,-regularizerIndex))
print("regularizer", regularizerIndex)
batchSizeData = 1
#bottleneckDim = batchSizeData/2
batchSizeModel = 5
lossEvaluation = 'mean_squared_error'
optimizer = 'adam'
roc_auc = []
FPRs = []
TPRs = []
# load dataset with all fault simulation
originalDataset = loadData('DadosTodasFalhas.mat', 'Xsep')
# prepare dataset to input model training
filteredDataset = scipy.ndimage.filters.gaussian_filter(originalDataset[0][:,:], 4.0)
#filteredDataset = originalDataset[0][:,:]
normalizedDataset = normalizeData(filteredDataset)
dataset = createMatrix(normalizedDataset, look_back)
#***** Train model with normal data *****#
# Variables
timesteps = dataset.shape[1]
input_dim = dataset.shape[2]
normalPredict = []
normalError = []
j = 0
# train model
Model = trainSparseAutoencoder(dataset, timesteps, input_dim, bottleneckDim, lossEvaluation, optimizer, epochs, batchSizeModel, regularizer, verbose=False)
# get error for each batch of normal data
for k in range(0,len(dataset),batchSizeData):
dataBatch = dataset[k:k+batchSizeData]
normalPredict.append(Model.predict(dataBatch))
normalError.append(mean_squared_error(dataBatch[:,0,:], normalPredict[j][:,0,:]))
j += 1
#***** Testing if it is a fault or not *****#
for i in range(1,len(originalDataset)):
#local variables
j = 0
faults = []
trainPredict = []
faultError = []
predicted = []
# prepare dataset
filteredDataset = scipy.ndimage.filters.gaussian_filter(originalDataset[i][:,:], 4.0)
#filteredDataset = originalDataset[i][:,0]
normalizedDataset = normalizeData(filteredDataset)
dataset = createMatrix(normalizedDataset, look_back)
#dataset = numpy.reshape(dataset, (dataset.shape[0], dataset.shape[1], 22)) # reshape input to be [samples, time steps, features]
# get error for each batch of data
for k in range(0,len(dataset),batchSizeData):
dataBatch = dataset[k:k+batchSizeData]
# generate predictions using model
trainPredict.append(Model.predict(dataBatch))
predicted.append(trainPredict[j][:,0,:])
faultError.append(mean_squared_error(dataBatch[:,0,:], predicted[j]))
# check if it is a fault or not
if (faultError[j] > normalError[j]*limit):
faults.append(1)
else:
faults.append(0)
j = j + 1
#print("Dataset", i, ". IsFaultVector: ", faults)
# define labels to ROC curve
labels = []
for k in range(0,len(dataset),batchSizeData):
if (k >= 100):
labels.append(1)
if (k < 100):
labels.append(0)
# calculate AUC, fpr and tpr
fpr, tpr = calculateFprTpr(faults, labels)
FPRs.append(fpr)
TPRs.append(tpr)
roc_auc.append(auc(fpr, tpr))
sum_roc_auc = 0
for i in range(len(roc_auc)):
sum_roc_auc += roc_auc[i]
if (sum_roc_auc > best_roc_auc):
best_roc_auc = sum_roc_auc
best_epochs = epochs
best_limit = limit
best_bottleneckDim = bottleneckDim
best_look_back = look_back
best_regularizerIndex = regularizerIndex
#plot baseline and predictions
#plt.plot(normalizedDataset)
#plt.plot(numpy.concatenate( predicted, axis=0 ))
#plt.show()
#plt.plot(roc_auc)
#plt.show()
sum_selected_roc_auc = 0
for j in range(len(FPRs)):
i = j+1
if(i == 1 or i == 2 or i == 5 or i == 7 or i == 8 or i == 9 or i == 10 or i == 11 or i == 12 or i == 14 or i == 15 or i == 19):
plt.plot(FPRs[j], TPRs[j], label="AUC{0}= {1:0.2f}".format(i+1, roc_auc[j]))
sum_selected_roc_auc += roc_auc[j]
plt.xlim((0,1))
plt.ylim((0,1))
plt.plot([0, 1], [0, 1], color='navy', linestyle='--')
plt.xlabel('False Positive rate')
plt.ylabel('True Positive rate')
plt.title('ROC curve - Sparse Autoencoder')
plt.legend(loc="lower right")
plt.show()
print("bests parameters")
print("best_limit", best_limit)
print("best_epochs", best_epochs)
print("best_roc_auc", best_roc_auc)
print("best_look_back", best_look_back)
print("best_bottleneckDim", best_bottleneckDim)
print("best_regularizerIndex", best_regularizerIndex)
print("sum_selected_roc_auc", sum_selected_roc_auc)
|
|
from django.test import TestCase
from django.conf import settings
from django.core.urlresolvers import reverse
from lrs import models, views
import datetime
from django.utils.timezone import utc
from django.utils import timezone
import hashlib
import urllib
import os
import json
import base64
import ast
class ActivityStateTests(TestCase):
url = reverse(views.activity_state)
testagent = '{"name":"test","mbox":"mailto:[email protected]"}'
otheragent = '{"name":"other","mbox":"mailto:[email protected]"}'
activityId = "http://www.iana.org/domains/example/"
activityId2 = "http://www.google.com"
stateId = "the_state_id"
stateId2 = "state_id_2"
stateId3 = "third_state_id"
stateId4 = "4th.id"
registrationId = "some_sort_of_reg_id"
content_type = "application/json"
@classmethod
def setUpClass(cls):
print "\n%s" % __name__
def setUp(self):
self.username = "test"
self.email = "mailto:[email protected]"
self.password = "test"
self.auth = "Basic %s" % base64.b64encode("%s:%s" % (self.username, self.password))
form = {'username':self.username,'email': self.email,'password':self.password,'password2':self.password}
response = self.client.post(reverse(views.register),form, X_Experience_API_Version="1.0.0")
self.testparams1 = {"stateId": self.stateId, "activityId": self.activityId, "agent": self.testagent}
path = '%s?%s' % (self.url, urllib.urlencode(self.testparams1))
self.teststate1 = {"test":"put activity state 1","obj":{"agent":"test"}}
self.put1 = self.client.put(path, json.dumps(self.teststate1), content_type=self.content_type, Authorization=self.auth, X_Experience_API_Version="1.0.0")
self.testparams2 = {"stateId": self.stateId2, "activityId": self.activityId, "agent": self.testagent}
path = '%s?%s' % (self.url, urllib.urlencode(self.testparams2))
self.teststate2 = {"test":"put activity state 2","obj":{"agent":"test"}}
self.put2 = self.client.put(path, json.dumps(self.teststate2), content_type=self.content_type, Authorization=self.auth, X_Experience_API_Version="1.0.0")
self.testparams3 = {"stateId": self.stateId3, "activityId": self.activityId2, "agent": self.testagent}
path = '%s?%s' % (self.url, urllib.urlencode(self.testparams3))
self.teststate3 = {"test":"put activity state 3","obj":{"agent":"test"}}
self.put3 = self.client.put(path, json.dumps(self.teststate3), content_type=self.content_type, Authorization=self.auth, X_Experience_API_Version="1.0.0")
self.testparams4 = {"stateId": self.stateId4, "activityId": self.activityId2, "agent": self.otheragent}
path = '%s?%s' % (self.url, urllib.urlencode(self.testparams4))
self.teststate4 = {"test":"put activity state 4","obj":{"agent":"other"}}
self.put4 = self.client.put(path, json.dumps(self.teststate4), content_type=self.content_type, Authorization=self.auth, X_Experience_API_Version="1.0.0")
def tearDown(self):
self.client.delete(self.url, self.testparams1, Authorization=self.auth, X_Experience_API_Version="1.0.0")
self.client.delete(self.url, self.testparams2, Authorization=self.auth, X_Experience_API_Version="1.0.0")
self.client.delete(self.url, self.testparams3, Authorization=self.auth, X_Experience_API_Version="1.0.0")
self.client.delete(self.url, self.testparams4, Authorization=self.auth, X_Experience_API_Version="1.0.0")
attach_folder_path = os.path.join(settings.MEDIA_ROOT, "activity_state")
for the_file in os.listdir(attach_folder_path):
file_path = os.path.join(attach_folder_path, the_file)
try:
os.unlink(file_path)
except Exception, e:
raise e
def test_put(self):
self.assertEqual(self.put1.status_code, 204)
self.assertEqual(self.put1.content, '')
self.assertEqual(self.put2.status_code, 204)
self.assertEqual(self.put2.content, '')
self.assertEqual(self.put3.status_code, 204)
self.assertEqual(self.put3.content, '')
self.assertEqual(self.put4.status_code, 204)
self.assertEqual(self.put4.content, '')
def test_put_no_existing_activity(self):
testparams = {"stateId": self.stateId3, "activityId": "http://foobar", "agent": self.testagent}
path = '%s?%s' % (self.url, urllib.urlencode(testparams))
teststate = {"test":"put activity state","obj":{"agent":"test"}}
put = self.client.put(path, teststate, content_type=self.content_type, Authorization=self.auth, X_Experience_API_Version="1.0.0")
self.assertEqual(put.status_code, 204)
self.client.delete(path, Authorization=self.auth, X_Experience_API_Version="1.0.0")
def test_put_with_registrationId(self):
testparamsregid = {"registrationId": self.registrationId, "stateId": self.stateId, "activityId": self.activityId, "agent": self.testagent}
path = '%s?%s' % (self.url, urllib.urlencode(testparamsregid))
teststateregid = {"test":"put activity state w/ registrationId","obj":{"agent":"test"}}
put1 = self.client.put(path, teststateregid, content_type=self.content_type, Authorization=self.auth, X_Experience_API_Version="1.0.0")
self.assertEqual(put1.status_code, 204)
self.assertEqual(put1.content, '')
# also testing get w/ registration id
r = self.client.get(self.url, testparamsregid, X_Experience_API_Version="1.0.0", Authorization=self.auth)
self.assertEqual(r.status_code, 200)
robj = ast.literal_eval(r.content)
self.assertEqual(robj['test'], teststateregid['test'])
self.assertEqual(robj['obj']['agent'], teststateregid['obj']['agent'])
self.assertEqual(r['etag'], '"%s"' % hashlib.sha1(r.content).hexdigest())
# and tests delete w/ registration id
del_r = self.client.delete(self.url, testparamsregid, Authorization=self.auth, X_Experience_API_Version="1.0.0")
self.assertEqual(del_r.status_code, 204)
def test_put_without_auth(self):
# Will return 200 if HTTP_AUTH is not enabled
testparamsregid = {"registrationId": self.registrationId, "stateId": self.stateId, "activityId": self.activityId, "agent": self.testagent}
path = '%s?%s' % (self.url, urllib.urlencode(testparamsregid))
teststateregid = {"test":"put activity state w/ registrationId","obj":{"agent":"test"}}
put1 = self.client.put(path, teststateregid, content_type=self.content_type, X_Experience_API_Version="1.0.0")
self.assertEqual(put1.status_code, 401)
def test_put_etag_conflict_if_none_match(self):
teststateetaginm = {"test":"etag conflict - if none match *","obj":{"agent":"test"}}
path = '%s?%s' % (self.url, urllib.urlencode(self.testparams1))
r = self.client.put(path, teststateetaginm, content_type=self.content_type, If_None_Match='*', Authorization=self.auth, X_Experience_API_Version="1.0.0")
self.assertEqual(r.status_code, 412)
self.assertEqual(r.content, 'Resource detected')
r = self.client.get(self.url, self.testparams1, X_Experience_API_Version="1.0.0", Authorization=self.auth)
self.assertEqual(r.status_code, 200)
robj = ast.literal_eval(r.content)
self.assertEqual(robj['test'], self.teststate1['test'])
self.assertEqual(robj['obj']['agent'], self.teststate1['obj']['agent'])
self.assertEqual(r['etag'], '"%s"' % hashlib.sha1(r.content).hexdigest())
def test_put_etag_conflict_if_match(self):
teststateetagim = {"test":"etag conflict - if match wrong hash","obj":{"agent":"test"}}
new_etag = '"%s"' % hashlib.sha1('wrong etag value').hexdigest()
path = '%s?%s' % (self.url, urllib.urlencode(self.testparams1))
r = self.client.put(path, teststateetagim, content_type=self.content_type, If_Match=new_etag, Authorization=self.auth, X_Experience_API_Version="1.0.0")
self.assertEqual(r.status_code, 412)
self.assertIn('No resources matched', r.content)
r = self.client.get(self.url, self.testparams1, X_Experience_API_Version="1.0.0", Authorization=self.auth)
self.assertEqual(r.status_code, 200)
robj = ast.literal_eval(r.content)
self.assertEqual(robj['test'], self.teststate1['test'])
self.assertEqual(robj['obj']['agent'], self.teststate1['obj']['agent'])
self.assertEqual(r['etag'], '"%s"' % hashlib.sha1(r.content).hexdigest())
def test_put_etag_no_conflict_if_match(self):
teststateetagim = {"test":"etag no conflict - if match good hash","obj":{"agent":"test"}}
new_etag = '"%s"' % hashlib.sha1(json.dumps(self.teststate1)).hexdigest()
path = '%s?%s' % (self.url, urllib.urlencode(self.testparams1))
r = self.client.put(path, teststateetagim, content_type=self.content_type, If_Match=new_etag, Authorization=self.auth, X_Experience_API_Version="1.0.0")
self.assertEqual(r.status_code, 204)
self.assertEqual(r.content, '')
r = self.client.get(self.url, self.testparams1, X_Experience_API_Version="1.0.0", Authorization=self.auth)
self.assertEqual(r.status_code, 200)
robj = ast.literal_eval(r.content)
self.assertEqual(robj['test'], teststateetagim['test'])
self.assertEqual(robj['obj']['agent'], teststateetagim['obj']['agent'])
self.assertEqual(r['etag'], '"%s"' % hashlib.sha1(r.content).hexdigest())
def test_put_etag_missing_on_change(self):
teststateetagim = {'test': 'etag no conflict - if match good hash', 'obj': {'agent': 'test'}}
path = '%s?%s' % (self.url, urllib.urlencode(self.testparams1))
r = self.client.put(path, teststateetagim, content_type=self.content_type, Authorization=self.auth, X_Experience_API_Version="1.0.0")
self.assertEqual(r.status_code, 409)
self.assertIn('If-Match and If-None-Match headers were missing', r.content)
r = self.client.get(self.url, self.testparams1, X_Experience_API_Version="1.0.0", Authorization=self.auth)
self.assertEqual(r.status_code, 200)
robj = ast.literal_eval(r.content)
self.assertEqual(robj['test'], self.teststate1['test'])
self.assertEqual(robj['obj']['agent'], self.teststate1['obj']['agent'])
self.assertEqual(r['etag'], '"%s"' % hashlib.sha1(r.content).hexdigest())
def test_put_without_activityid(self):
testparamsbad = {"stateId": "bad_state", "agent": self.testagent}
path = '%s?%s' % (self.url, urllib.urlencode(testparamsbad))
teststatebad = {"test":"put activity state BAD no activity id","obj":{"agent":"test"}}
put1 = self.client.put(path, teststatebad, content_type=self.content_type, Authorization=self.auth, X_Experience_API_Version="1.0.0")
self.assertEqual(put1.status_code, 400)
self.assertIn('activityId parameter is missing', put1.content)
def test_put_without_agent(self):
testparamsbad = {"stateId": "bad_state", "activityId": self.activityId}
path = '%s?%s' % (self.url, urllib.urlencode(testparamsbad))
teststatebad = {"test":"put activity state BAD no agent","obj":{"agent":"none"}}
put1 = self.client.put(path, teststatebad, content_type=self.content_type, Authorization=self.auth, X_Experience_API_Version="1.0.0")
self.assertEqual(put1.status_code, 400)
self.assertIn('agent parameter is missing', put1.content)
def test_put_without_stateid(self):
testparamsbad = {"activityId": self.activityId, "agent": self.testagent}
path = '%s?%s' % (self.url, urllib.urlencode(testparamsbad))
teststatebad = {"test":"put activity state BAD no state id","obj":{"agent":"test"}}
put1 = self.client.put(path, teststatebad, content_type=self.content_type, Authorization=self.auth, X_Experience_API_Version="1.0.0")
self.assertEqual(put1.status_code, 400)
self.assertIn('stateId parameter is missing', put1.content)
# Also tests 403 forbidden status
def test_get(self):
username = "other"
email = "mailto:[email protected]"
password = "test"
auth = "Basic %s" % base64.b64encode("%s:%s" % (username, password))
form = {'username':username,'email': email,'password':password,'password2':password}
response = self.client.post(reverse(views.register),form, X_Experience_API_Version="1.0.0")
r = self.client.get(self.url, self.testparams1, X_Experience_API_Version="1.0.0", Authorization=self.auth)
self.assertEqual(r.status_code, 200)
robj = ast.literal_eval(r.content)
self.assertEqual(robj['test'], self.teststate1['test'])
self.assertEqual(robj['obj']['agent'], self.teststate1['obj']['agent'])
self.assertEqual(r['etag'], '"%s"' % hashlib.sha1(r.content).hexdigest())
r2 = self.client.get(self.url, self.testparams2, X_Experience_API_Version="1.0.0", Authorization=self.auth)
self.assertEqual(r2.status_code, 200)
robj2 = ast.literal_eval(r2.content)
self.assertEqual(robj2['test'], self.teststate2['test'])
self.assertEqual(robj2['obj']['agent'], self.teststate2['obj']['agent'])
self.assertEqual(r2['etag'], '"%s"' % hashlib.sha1(r2.content).hexdigest())
r3 = self.client.get(self.url, self.testparams3, X_Experience_API_Version="1.0.0", Authorization=self.auth)
self.assertEqual(r3.status_code, 200)
robj3 = ast.literal_eval(r3.content)
self.assertEqual(robj3['test'], self.teststate3['test'])
self.assertEqual(robj3['obj']['agent'], self.teststate3['obj']['agent'])
self.assertEqual(r3['etag'], '"%s"' % hashlib.sha1(r3.content).hexdigest())
r4 = self.client.get(self.url, self.testparams4, X_Experience_API_Version="1.0.0", Authorization=auth)
self.assertEqual(r4.status_code, 200)
robj4 = ast.literal_eval(r4.content)
self.assertEqual(robj4['test'], self.teststate4['test'])
self.assertEqual(robj4['obj']['agent'], self.teststate4['obj']['agent'])
self.assertEqual(r4['etag'], '"%s"' % hashlib.sha1(r4.content).hexdigest())
# r5 = self.client.get(self.url, self.testparams3, X_Experience_API_Version="1.0.0", Authorization=auth)
# self.assertEqual(r5.status_code, 403)
def test_get_no_existing_id(self):
testparams = {"stateId": "testID", "activityId": self.activityId, "agent": self.testagent}
r = self.client.get(self.url, testparams, X_Experience_API_Version="1.0.0", Authorization=self.auth)
self.assertEqual(r.status_code, 404)
def test_get_ids(self):
params = {"activityId": self.activityId, "agent": self.testagent}
r = self.client.get(self.url, params, X_Experience_API_Version="1.0.0", Authorization=self.auth)
self.assertEqual(r.status_code, 200)
self.assertIn(self.stateId, r.content)
self.assertIn(self.stateId2, r.content)
self.assertNotIn(self.stateId3, r.content)
self.assertNotIn(self.stateId4, r.content)
def test_get_with_since(self):
state_id = "old_state_test"
testparamssince = {"stateId": state_id, "activityId": self.activityId, "agent": self.testagent}
path = '%s?%s' % (self.url, urllib.urlencode(testparamssince))
teststatesince = {"test":"get w/ since","obj":{"agent":"test"}}
updated = datetime.datetime(2012, 6, 12, 12, 00).replace(tzinfo=timezone.get_default_timezone())
put1 = self.client.put(path, teststatesince, content_type=self.content_type, updated=updated.isoformat(), Authorization=self.auth, X_Experience_API_Version="1.0.0")
self.assertEqual(put1.status_code, 204)
self.assertEqual(put1.content, '')
r = self.client.get(self.url, testparamssince, X_Experience_API_Version="1.0.0", Authorization=self.auth)
self.assertEqual(r.status_code, 200)
robj = ast.literal_eval(r.content)
self.assertEqual(robj['test'], teststatesince['test'])
self.assertEqual(robj['obj']['agent'], teststatesince['obj']['agent'])
self.assertEqual(r['etag'], '"%s"' % hashlib.sha1(r.content).hexdigest())
since = datetime.datetime(2012, 7, 1, 12, 00).replace(tzinfo=utc)
params2 = {"activityId": self.activityId, "agent": self.testagent, "since": since}
r = self.client.get(self.url, params2, X_Experience_API_Version="1.0.0", Authorization=self.auth)
self.assertEqual(r.status_code, 200)
self.assertIn(self.stateId, r.content)
self.assertIn(self.stateId2, r.content)
self.assertNotIn(state_id, r.content)
self.assertNotIn(self.stateId3, r.content)
self.assertNotIn(self.stateId4, r.content)
del_r = self.client.delete(self.url, testparamssince, Authorization=self.auth, X_Experience_API_Version="1.0.0")
def test_get_with_since_tz(self):
state_id = "old_state_test"
testparamssince = {"stateId": state_id, "activityId": self.activityId, "agent": self.testagent}
path = '%s?%s' % (self.url, urllib.urlencode(testparamssince))
teststatesince = {"test":"get w/ since","obj":{"agent":"test"}}
updated = datetime.datetime(2012, 6, 12, 12, 00).replace(tzinfo=timezone.get_default_timezone())
put1 = self.client.put(path, teststatesince, content_type=self.content_type, updated=updated.isoformat(), Authorization=self.auth, X_Experience_API_Version="1.0.0")
self.assertEqual(put1.status_code, 204)
self.assertEqual(put1.content, '')
r = self.client.get(self.url, testparamssince, X_Experience_API_Version="1.0.0", Authorization=self.auth)
self.assertEqual(r.status_code, 200)
robj = ast.literal_eval(r.content)
self.assertEqual(robj['test'], teststatesince['test'])
self.assertEqual(robj['obj']['agent'], teststatesince['obj']['agent'])
self.assertEqual(r['etag'], '"%s"' % hashlib.sha1(r.content).hexdigest())
state_id2 = "new_tz_state_test"
testparamssince2 = {"stateId": state_id2, "activityId": self.activityId, "agent": self.testagent}
path = '%s?%s' % (self.url, urllib.urlencode(testparamssince2))
teststatesince2 = {"test":"get w/ since TZ","obj":{"agent":"test"}}
updated_tz = "2012-7-1T13:30:00+04:00"
put2 = self.client.put(path, teststatesince2, content_type=self.content_type, updated=updated_tz, Authorization=self.auth, X_Experience_API_Version="1.0.0")
self.assertEqual(put2.status_code, 204)
self.assertEqual(put2.content, '')
r2 = self.client.get(self.url, testparamssince2, X_Experience_API_Version="1.0.0", Authorization=self.auth)
self.assertEqual(r2.status_code, 200)
robj2 = ast.literal_eval(r2.content)
self.assertEqual(robj2['test'], teststatesince2['test'])
self.assertEqual(robj2['obj']['agent'], teststatesince2['obj']['agent'])
self.assertEqual(r2['etag'], '"%s"' % hashlib.sha1(r2.content).hexdigest())
since = datetime.datetime(2012, 7, 1, 12, 00).replace(tzinfo=utc)
params2 = {"activityId": self.activityId, "agent": self.testagent, "since": since}
r = self.client.get(self.url, params2, X_Experience_API_Version="1.0.0", Authorization=self.auth)
self.assertEqual(r.status_code, 200)
self.assertIn(self.stateId, r.content)
self.assertIn(self.stateId2, r.content)
self.assertNotIn(state_id, r.content)
self.assertNotIn(state_id2, r.content)
self.assertNotIn(self.stateId3, r.content)
self.assertNotIn(self.stateId4, r.content)
del_r = self.client.delete(self.url, testparamssince, Authorization=self.auth, X_Experience_API_Version="1.0.0")
del_r = self.client.delete(self.url, testparamssince2, Authorization=self.auth, X_Experience_API_Version="1.0.0")
def test_get_with_since_and_regid(self):
# create old state w/ no registration id
state_id = "old_state_test_no_reg"
testparamssince = {"stateId": state_id, "activityId": self.activityId, "agent": self.testagent}
path = '%s?%s' % (self.url, urllib.urlencode(testparamssince))
teststatesince = {"test":"get w/ since","obj":{"agent":"test","stateId":state_id}}
updated = datetime.datetime(2012, 6, 12, 12, 00).replace(tzinfo=utc)
put1 = self.client.put(path, teststatesince, content_type=self.content_type, updated=updated.isoformat(), Authorization=self.auth, X_Experience_API_Version="1.0.0")
self.assertEqual(put1.status_code, 204)
self.assertEqual(put1.content, '')
r = self.client.get(self.url, testparamssince, X_Experience_API_Version="1.0.0", Authorization=self.auth)
self.assertEqual(r.status_code, 200)
robj = ast.literal_eval(r.content)
self.assertEqual(robj['test'], teststatesince['test'])
self.assertEqual(robj['obj']['agent'], teststatesince['obj']['agent'])
self.assertEqual(r['etag'], '"%s"' % hashlib.sha1(r.content).hexdigest())
# create old state w/ registration id
regid = 'test_since_w_regid'
state_id2 = "old_state_test_w_reg"
testparamssince2 = {"registrationId": regid, "activityId": self.activityId, "agent": self.testagent, "stateId":state_id2}
path = '%s?%s' % (self.url, urllib.urlencode(testparamssince2))
teststatesince2 = {"test":"get w/ since and registrationId","obj":{"agent":"test","stateId":state_id2}}
put2 = self.client.put(path, teststatesince2, content_type=self.content_type, updated=updated.isoformat(), Authorization=self.auth, X_Experience_API_Version="1.0.0")
self.assertEqual(put2.status_code, 204)
self.assertEqual(put2.content, '')
r2 = self.client.get(self.url, testparamssince2, X_Experience_API_Version="1.0.0", Authorization=self.auth)
self.assertEqual(r2.status_code, 200)
robj2 = ast.literal_eval(r2.content)
self.assertEqual(robj2['test'], teststatesince2['test'])
self.assertEqual(robj2['obj']['agent'], teststatesince2['obj']['agent'])
self.assertEqual(r2['etag'], '"%s"' % hashlib.sha1(r2.content).hexdigest())
# create new state w/ registration id
state_id3 = "old_state_test_w_new_reg"
testparamssince3 = {"registrationId": regid, "activityId": self.activityId, "agent": self.testagent, "stateId":state_id3}
path = '%s?%s' % (self.url, urllib.urlencode(testparamssince3))
teststatesince3 = {"test":"get w/ since and registrationId","obj":{"agent":"test","stateId":state_id3}}
put3 = self.client.put(path, teststatesince3, content_type=self.content_type, Authorization=self.auth, X_Experience_API_Version="1.0.0")
self.assertEqual(put3.status_code, 204)
self.assertEqual(put3.content, '')
r3 = self.client.get(self.url, testparamssince3, X_Experience_API_Version="1.0.0", Authorization=self.auth)
self.assertEqual(r3.status_code, 200)
robj3 = ast.literal_eval(r3.content)
self.assertEqual(robj3['test'], teststatesince3['test'])
self.assertEqual(robj3['obj']['agent'], teststatesince3['obj']['agent'])
self.assertEqual(r3['etag'], '"%s"' % hashlib.sha1(r3.content).hexdigest())
# get no reg ids set w/o old state
since1 = datetime.datetime(2012, 7, 1, 12, 00).replace(tzinfo=utc)
params = {"activityId": self.activityId, "agent": self.testagent, "since": since1}
r = self.client.get(self.url, params, X_Experience_API_Version="1.0.0", Authorization=self.auth)
self.assertEqual(r.status_code, 200)
self.assertIn(self.stateId, r.content)
self.assertIn(self.stateId2, r.content)
self.assertNotIn(state_id, r.content)
self.assertNotIn(self.stateId3, r.content)
self.assertNotIn(self.stateId4, r.content)
# get reg id set w/o old state
since2 = datetime.datetime(2012, 7, 1, 12, 00).replace(tzinfo=utc)
params2 = {"registrationId": regid, "activityId": self.activityId, "agent": self.testagent, "since": since2}
r = self.client.get(self.url, params2, X_Experience_API_Version="1.0.0", Authorization=self.auth)
self.assertEqual(r.status_code, 200)
self.assertIn(state_id3, r.content)
self.assertNotIn(state_id2, r.content)
self.assertNotIn(self.stateId, r.content)
self.assertNotIn(self.stateId2, r.content)
self.assertNotIn(self.stateId3, r.content)
self.assertNotIn(self.stateId4, r.content)
self.client.delete(self.url, testparamssince, Authorization=self.auth, X_Experience_API_Version="1.0.0")
self.client.delete(self.url, testparamssince2, Authorization=self.auth, X_Experience_API_Version="1.0.0")
self.client.delete(self.url, testparamssince3, Authorization=self.auth, X_Experience_API_Version="1.0.0")
def test_get_without_activityid(self):
params = {"stateId": self.stateId, "agent": self.testagent}
r = self.client.get(self.url, params, X_Experience_API_Version="1.0.0", Authorization=self.auth)
self.assertEqual(r.status_code, 400)
self.assertIn('activityId parameter is missing', r.content)
def test_get_without_agent(self):
params = {"stateId": self.stateId, "activityId": self.activityId}
r = self.client.get(self.url, params, X_Experience_API_Version="1.0.0", Authorization=self.auth)
self.assertEqual(r.status_code, 400)
self.assertIn('agent parameter is missing', r.content)
def test_delete_without_activityid(self):
testparamsregid = {"registrationId": self.registrationId, "stateId": self.stateId, "activityId": self.activityId, "agent": self.testagent}
path = '%s?%s' % (self.url, urllib.urlencode(testparamsregid))
teststateregid = {"test":"delete activity state w/o activityid","obj":{"agent":"test"}}
put1 = self.client.put(path, teststateregid, content_type=self.content_type, Authorization=self.auth, X_Experience_API_Version="1.0.0")
self.assertEqual(put1.status_code, 204)
self.assertEqual(put1.content, '')
r = self.client.get(self.url, testparamsregid, X_Experience_API_Version="1.0.0", Authorization=self.auth)
self.assertEqual(r.status_code, 200)
robj = ast.literal_eval(r.content)
self.assertEqual(robj['test'], teststateregid['test'])
self.assertEqual(robj['obj']['agent'], teststateregid['obj']['agent'])
self.assertEqual(r['etag'], '"%s"' % hashlib.sha1(r.content).hexdigest())
f_r = self.client.delete(self.url, {"registrationId": self.registrationId, "stateId": self.stateId, "agent": self.testagent}, Authorization=self.auth, X_Experience_API_Version="1.0.0")
self.assertEqual(f_r.status_code, 400)
self.assertIn('activityId parameter is missing', f_r.content)
del_r = self.client.delete(self.url, testparamsregid, Authorization=self.auth, X_Experience_API_Version="1.0.0")
self.assertEqual(del_r.status_code, 204)
def test_delete_without_agent(self):
testparamsregid = {"registrationId": self.registrationId, "stateId": self.stateId, "activityId": self.activityId, "agent": self.testagent}
path = '%s?%s' % (self.url, urllib.urlencode(testparamsregid))
teststateregid = {"test":"delete activity state w/o agent","obj":{"agent":"test"}}
put1 = self.client.put(path, teststateregid, content_type=self.content_type, Authorization=self.auth, X_Experience_API_Version="1.0.0")
self.assertEqual(put1.status_code, 204)
self.assertEqual(put1.content, '')
r = self.client.get(self.url, testparamsregid, X_Experience_API_Version="1.0.0", Authorization=self.auth)
self.assertEqual(r.status_code, 200)
robj = ast.literal_eval(r.content)
self.assertEqual(robj['test'], teststateregid['test'])
self.assertEqual(robj['obj']['agent'], teststateregid['obj']['agent'])
self.assertEqual(r['etag'], '"%s"' % hashlib.sha1(r.content).hexdigest())
f_r = self.client.delete(self.url, {"registrationId": self.registrationId, "stateId": self.stateId, "activityId": self.activityId}, Authorization=self.auth, X_Experience_API_Version="1.0.0")
self.assertEqual(f_r.status_code, 400)
self.assertIn('agent parameter is missing', f_r.content)
del_r = self.client.delete(self.url, testparamsregid, Authorization=self.auth, X_Experience_API_Version="1.0.0")
self.assertEqual(del_r.status_code, 204)
def test_delete_set(self):
testparamsdelset1 = {"registrationId": self.registrationId, "stateId": "del_state_set_1", "activityId": self.activityId, "agent": self.testagent}
path = '%s?%s' % (self.url, urllib.urlencode(testparamsdelset1))
teststatedelset1 = {"test":"delete set #1","obj":{"agent":"test"}}
put1 = self.client.put(path, teststatedelset1, content_type=self.content_type, Authorization=self.auth, X_Experience_API_Version="1.0.0")
self.assertEqual(put1.status_code, 204)
self.assertEqual(put1.content, '')
r = self.client.get(self.url, testparamsdelset1, X_Experience_API_Version="1.0.0", Authorization=self.auth)
self.assertEqual(r.status_code, 200)
robj = ast.literal_eval(r.content)
self.assertEqual(robj['test'], teststatedelset1['test'])
self.assertEqual(robj['obj']['agent'], teststatedelset1['obj']['agent'])
self.assertEqual(r['etag'], '"%s"' % hashlib.sha1(r.content).hexdigest())
testparamsdelset2 = {"registrationId": self.registrationId, "stateId": "del_state_set_2", "activityId": self.activityId, "agent": self.testagent}
path = '%s?%s' % (self.url, urllib.urlencode(testparamsdelset2))
teststatedelset2 = {"test":"delete set #2","obj":{"agent":"test"}}
put1 = self.client.put(path, teststatedelset2, content_type=self.content_type, Authorization=self.auth, X_Experience_API_Version="1.0.0")
self.assertEqual(put1.status_code, 204)
self.assertEqual(put1.content, '')
r = self.client.get(self.url, testparamsdelset2, X_Experience_API_Version="1.0.0", Authorization=self.auth)
self.assertEqual(r.status_code, 200)
robj2 = ast.literal_eval(r.content)
self.assertEqual(robj2['test'], teststatedelset2['test'])
self.assertEqual(robj2['obj']['agent'], teststatedelset2['obj']['agent'])
self.assertEqual(r['etag'], '"%s"' % hashlib.sha1(r.content).hexdigest())
f_r = self.client.delete(self.url, {"registrationId": self.registrationId, "agent": self.testagent, "activityId": self.activityId}, Authorization=self.auth, X_Experience_API_Version="1.0.0")
self.assertEqual(f_r.status_code, 204)
r = self.client.get(self.url, testparamsdelset1, X_Experience_API_Version="1.0.0", Authorization=self.auth)
self.assertEqual(r.status_code, 404)
self.assertIn('no activity', r.content)
r = self.client.get(self.url, testparamsdelset2, X_Experience_API_Version="1.0.0", Authorization=self.auth)
self.assertEqual(r.status_code, 404)
self.assertIn('no activity', r.content)
def test_ie_cors_put_delete(self):
username = "another test"
email = "mailto:[email protected]"
password = "test"
auth = "Basic %s" % base64.b64encode("%s:%s" % (username, password))
form = {'username':username,'email': email,'password':password,'password2':password}
response = self.client.post(reverse(views.register),form, X_Experience_API_Version="1.0.0")
testagent = '{"name":"another test","mbox":"mailto:[email protected]"}'
sid = "test_ie_cors_put_delete_set_1"
sparam1 = {"stateId": sid, "activityId": self.activityId, "agent": testagent}
path = '%s?%s' % (self.url, urllib.urlencode({"method":"PUT"}))
content = {"test":"test_ie_cors_put_delete","obj":{"actor":"another test"}}
param = "stateId=%s&activityId=%s&agent=%s&content=%s&Content-Type=application/x-www-form-urlencoded&Authorization=%s&X-Experience-API-Version=1.0.0" % (sid, self.activityId, testagent, content, auth)
put1 = self.client.post(path, param, content_type='application/x-www-form-urlencoded')
self.assertEqual(put1.status_code, 204)
self.assertEqual(put1.content, '')
r = self.client.get(self.url, {"stateId": sid, "activityId": self.activityId, "agent": testagent}, X_Experience_API_Version="1.0.0", Authorization=auth)
self.assertEqual(r.status_code, 200)
import ast
c = ast.literal_eval(r.content)
self.assertEqual(c['test'], content['test'])
self.assertEqual(r['etag'], '"%s"' % hashlib.sha1('%s' % content).hexdigest())
dparam = "agent=%s&activityId=%s&Authorization=%s&Content-Type=application/x-www-form-urlencoded&X-Experience-API-Version=1.0.0" % (testagent,self.activityId,auth)
path = '%s?%s' % (self.url, urllib.urlencode({"method":"DELETE"}))
f_r = self.client.post(path, dparam, content_type='application/x-www-form-urlencoded')
self.assertEqual(f_r.status_code, 204)
def test_agent_is_group(self):
username = "the group"
email = "mailto:[email protected]"
password = "test"
auth = "Basic %s" % base64.b64encode("%s:%s" % (username, password))
form = {'username':username,'email': email,'password':password,'password2':password}
response = self.client.post(reverse(views.register),form, X_Experience_API_Version="1.0.0")
ot = "Group"
name = "the group"
mbox = "mailto:[email protected]"
members = [{"name":"agent1","mbox":"mailto:[email protected]"},
{"name":"agent2","mbox":"mailto:[email protected]"}]
testagent = json.dumps({"objectType":ot, "name":name, "mbox":mbox,"member":members})
testparams1 = {"stateId": "group.state.id", "activityId": self.activityId, "agent": testagent}
path = '%s?%s' % (self.url, urllib.urlencode(testparams1))
teststate1 = {"test":"put activity state using group as agent","obj":{"agent":"group of 2 agents"}}
put1 = self.client.put(path, teststate1, content_type=self.content_type, Authorization=self.auth, X_Experience_API_Version="1.0.0")
self.assertEqual(put1.status_code, 204)
get1 = self.client.get(self.url, {"stateId":"group.state.id", "activityId": self.activityId, "agent":testagent}, X_Experience_API_Version="1.0.0", Authorization=auth)
self.assertEqual(get1.status_code, 200)
robj = ast.literal_eval(get1.content)
self.assertEqual(robj['test'], teststate1['test'])
self.assertEqual(robj['obj']['agent'], teststate1['obj']['agent'])
self.assertEqual(get1['etag'], '"%s"' % hashlib.sha1(get1.content).hexdigest())
delr = self.client.delete(self.url, testparams1, Authorization=auth, X_Experience_API_Version="1.0.0")
self.assertEqual(delr.status_code, 204)
def test_post_new_state(self):
param = {"stateId": "test:postnewstate", "activityId": "act:test/post.new.state", "agent": '{"mbox":"mailto:[email protected]"}'}
path = '%s?%s' % (self.url, urllib.urlencode(param))
state = {"post":"testing new state", "obj":{"f1":"v1","f2":"v2"}}
r = self.client.post(path, json.dumps(state), content_type=self.content_type, Authorization=self.auth, X_Experience_API_Version="1.0.0")
self.assertEqual(r.status_code, 204)
r = self.client.get(path, Authorization=self.auth, X_Experience_API_Version="1.0.0")
self.assertEqual(r.status_code, 200)
self.assertEqual(ast.literal_eval(r.content), state)
self.client.delete(path, Authorization=self.auth, X_Experience_API_Version="1.0.0")
def test_post_update_state(self):
param = {"stateId": "test:postupdatestate", "activityId": "act:test/post.update.state", "agent": '{"mbox":"mailto:[email protected]"}'}
path = '%s?%s' % (self.url, urllib.urlencode(param))
state = {"field1":"value1", "obj":{"ofield1":"oval1","ofield2":"oval2"}}
r = self.client.post(path, json.dumps(state), content_type=self.content_type, Authorization=self.auth, X_Experience_API_Version="1.0.0")
self.assertEqual(r.status_code, 204)
r = self.client.get(path, Authorization=self.auth, X_Experience_API_Version="1.0.0")
self.assertEqual(r.status_code, 200)
self.assertEqual(ast.literal_eval(r.content), state)
state2 = {"field_xtra":"xtra val", "obj":"ha, not a obj"}
r = self.client.post(path, json.dumps(state2), content_type=self.content_type, Authorization=self.auth, X_Experience_API_Version="1.0.0")
self.assertEqual(r.status_code, 204)
r = self.client.get(path, Authorization=self.auth, X_Experience_API_Version="1.0.0")
self.assertEqual(r.status_code, 200)
retstate = ast.literal_eval(r.content)
self.assertEqual(retstate['field1'], state['field1'])
self.assertEqual(retstate['field_xtra'], state2['field_xtra'])
self.assertEqual(retstate['obj'], state2['obj'])
self.client.delete(path, Authorization=self.auth, X_Experience_API_Version="1.0.0")
|
|
# -*- coding: utf-8 -*-
from ccxt.async.base.exchange import Exchange
import base64
import hashlib
from ccxt.base.errors import ExchangeError
class btcturk (Exchange):
def describe(self):
return self.deep_extend(super(btcturk, self).describe(), {
'id': 'btcturk',
'name': 'BTCTurk',
'countries': 'TR', # Turkey
'rateLimit': 1000,
'hasCORS': True,
'hasFetchTickers': True,
'hasFetchOHLCV': True,
'timeframes': {
'1d': '1d',
},
'urls': {
'logo': 'https://user-images.githubusercontent.com/1294454/27992709-18e15646-64a3-11e7-9fa2-b0950ec7712f.jpg',
'api': 'https://www.btcturk.com/api',
'www': 'https://www.btcturk.com',
'doc': 'https://github.com/BTCTrader/broker-api-docs',
},
'api': {
'public': {
'get': [
'ohlcdata', # ?last=COUNT
'orderbook',
'ticker',
'trades', # ?last=COUNT(max 50)
],
},
'private': {
'get': [
'balance',
'openOrders',
'userTransactions', # ?offset=0&limit=25&sort=asc
],
'post': [
'buy',
'cancelOrder',
'sell',
],
},
},
'markets': {
'BTC/TRY': {'id': 'BTCTRY', 'symbol': 'BTC/TRY', 'base': 'BTC', 'quote': 'TRY', 'maker': 0.002 * 1.18, 'taker': 0.0035 * 1.18},
'ETH/TRY': {'id': 'ETHTRY', 'symbol': 'ETH/TRY', 'base': 'ETH', 'quote': 'TRY', 'maker': 0.002 * 1.18, 'taker': 0.0035 * 1.18},
'ETH/BTC': {'id': 'ETHBTC', 'symbol': 'ETH/BTC', 'base': 'ETH', 'quote': 'BTC', 'maker': 0.002 * 1.18, 'taker': 0.0035 * 1.18},
},
})
async def fetch_balance(self, params={}):
response = await self.privateGetBalance()
result = {'info': response}
base = {
'free': response['bitcoin_available'],
'used': response['bitcoin_reserved'],
'total': response['bitcoin_balance'],
}
quote = {
'free': response['money_available'],
'used': response['money_reserved'],
'total': response['money_balance'],
}
symbol = self.symbols[0]
market = self.markets[symbol]
result[market['base']] = base
result[market['quote']] = quote
return self.parse_balance(result)
async def fetch_order_book(self, symbol, params={}):
market = self.market(symbol)
orderbook = await self.publicGetOrderbook(self.extend({
'pairSymbol': market['id'],
}, params))
timestamp = int(orderbook['timestamp'] * 1000)
return self.parse_order_book(orderbook, timestamp)
def parse_ticker(self, ticker, market=None):
symbol = None
if market:
symbol = market['symbol']
timestamp = int(ticker['timestamp']) * 1000
return {
'symbol': symbol,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'high': float(ticker['high']),
'low': float(ticker['low']),
'bid': float(ticker['bid']),
'ask': float(ticker['ask']),
'vwap': None,
'open': float(ticker['open']),
'close': None,
'first': None,
'last': float(ticker['last']),
'change': None,
'percentage': None,
'average': float(ticker['average']),
'baseVolume': float(ticker['volume']),
'quoteVolume': None,
'info': ticker,
}
async def fetch_tickers(self, symbols=None, params={}):
await self.load_markets()
tickers = await self.publicGetTicker(params)
result = {}
for i in range(0, len(tickers)):
ticker = tickers[i]
symbol = ticker['pair']
market = None
if symbol in self.markets_by_id:
market = self.markets_by_id[symbol]
symbol = market['symbol']
result[symbol] = self.parse_ticker(ticker, market)
return result
async def fetch_ticker(self, symbol, params={}):
await self.load_markets()
tickers = await self.fetch_tickers()
result = None
if symbol in tickers:
result = tickers[symbol]
return result
def parse_trade(self, trade, market):
timestamp = trade['date'] * 1000
return {
'id': trade['tid'],
'info': trade,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'symbol': market['symbol'],
'type': None,
'side': None,
'price': trade['price'],
'amount': trade['amount'],
}
async def fetch_trades(self, symbol, since=None, limit=None, params={}):
market = self.market(symbol)
# maxCount = 50
response = await self.publicGetTrades(self.extend({
'pairSymbol': market['id'],
}, params))
return self.parse_trades(response, market, since, limit)
def parse_ohlcv(self, ohlcv, market=None, timeframe='1d', since=None, limit=None):
timestamp = self.parse8601(ohlcv['Time'])
return [
timestamp,
ohlcv['Open'],
ohlcv['High'],
ohlcv['Low'],
ohlcv['Close'],
ohlcv['Volume'],
]
async def fetch_ohlcv(self, symbol, timeframe='1d', since=None, limit=None, params={}):
await self.load_markets()
market = self.market(symbol)
request = {}
if limit:
request['last'] = limit
response = await self.publicGetOhlcdata(self.extend(request, params))
return self.parse_ohlcvs(response, market, timeframe, since, limit)
async def create_order(self, symbol, type, side, amount, price=None, params={}):
method = 'privatePost' + self.capitalize(side)
order = {
'Type': 'BuyBtc' if (side == 'buy') else 'SelBtc',
'IsMarketOrder': 1 if (type == 'market') else 0,
}
if type == 'market':
if side == 'buy':
order['Total'] = amount
else:
order['Amount'] = amount
else:
order['Price'] = price
order['Amount'] = amount
response = await getattr(self, method)(self.extend(order, params))
return {
'info': response,
'id': response['id'],
}
async def cancel_order(self, id, symbol=None, params={}):
return await self.privatePostCancelOrder({'id': id})
def nonce(self):
return self.milliseconds()
def sign(self, path, api='public', method='GET', params={}, headers=None, body=None):
if self.id == 'btctrader':
raise ExchangeError(self.id + ' is an abstract base API for BTCExchange, BTCTurk')
url = self.urls['api'] + '/' + path
if api == 'public':
if params:
url += '?' + self.urlencode(params)
else:
self.check_required_credentials()
nonce = str(self.nonce())
body = self.urlencode(params)
secret = base64.b64decode(self.secret)
auth = self.apiKey + nonce
headers = {
'X-PCK': self.apiKey,
'X-Stamp': nonce,
'X-Signature': self.stringToBase64(self.hmac(self.encode(auth), secret, hashlib.sha256, 'binary')),
'Content-Type': 'application/x-www-form-urlencoded',
}
return {'url': url, 'method': method, 'body': body, 'headers': headers}
|
|
# -*- coding: utf-8 -*-
"""
Tests for Series timezone-related methods
"""
from datetime import datetime
import pytest
import pytz
import numpy as np
from dateutil.tz import tzoffset
import pandas.util.testing as tm
from pandas._libs.tslibs import timezones, conversion
from pandas.compat import lrange
from pandas.core.indexes.datetimes import date_range
from pandas import Series, Timestamp, DatetimeIndex, Index
class TestSeriesTimezones(object):
# -----------------------------------------------------------------
# Series.tz_localize
def test_series_tz_localize(self):
rng = date_range('1/1/2011', periods=100, freq='H')
ts = Series(1, index=rng)
result = ts.tz_localize('utc')
assert result.index.tz.zone == 'UTC'
# Can't localize if already tz-aware
rng = date_range('1/1/2011', periods=100, freq='H', tz='utc')
ts = Series(1, index=rng)
tm.assert_raises_regex(TypeError, 'Already tz-aware',
ts.tz_localize, 'US/Eastern')
def test_series_tz_localize_ambiguous_bool(self):
# make sure that we are correctly accepting bool values as ambiguous
# GH#14402
ts = Timestamp('2015-11-01 01:00:03')
expected0 = Timestamp('2015-11-01 01:00:03-0500', tz='US/Central')
expected1 = Timestamp('2015-11-01 01:00:03-0600', tz='US/Central')
ser = Series([ts])
expected0 = Series([expected0])
expected1 = Series([expected1])
with pytest.raises(pytz.AmbiguousTimeError):
ser.dt.tz_localize('US/Central')
result = ser.dt.tz_localize('US/Central', ambiguous=True)
tm.assert_series_equal(result, expected0)
result = ser.dt.tz_localize('US/Central', ambiguous=[True])
tm.assert_series_equal(result, expected0)
result = ser.dt.tz_localize('US/Central', ambiguous=False)
tm.assert_series_equal(result, expected1)
result = ser.dt.tz_localize('US/Central', ambiguous=[False])
tm.assert_series_equal(result, expected1)
@pytest.mark.parametrize('tzstr', ['US/Eastern', 'dateutil/US/Eastern'])
def test_series_tz_localize_empty(self, tzstr):
# GH#2248
ser = Series()
ser2 = ser.tz_localize('utc')
assert ser2.index.tz == pytz.utc
ser2 = ser.tz_localize(tzstr)
timezones.tz_compare(ser2.index.tz, timezones.maybe_get_tz(tzstr))
# -----------------------------------------------------------------
# Series.tz_convert
def test_series_tz_convert(self):
rng = date_range('1/1/2011', periods=200, freq='D', tz='US/Eastern')
ts = Series(1, index=rng)
result = ts.tz_convert('Europe/Berlin')
assert result.index.tz.zone == 'Europe/Berlin'
# can't convert tz-naive
rng = date_range('1/1/2011', periods=200, freq='D')
ts = Series(1, index=rng)
tm.assert_raises_regex(TypeError, "Cannot convert tz-naive",
ts.tz_convert, 'US/Eastern')
def test_series_tz_convert_to_utc(self):
base = DatetimeIndex(['2011-01-01', '2011-01-02', '2011-01-03'],
tz='UTC')
idx1 = base.tz_convert('Asia/Tokyo')[:2]
idx2 = base.tz_convert('US/Eastern')[1:]
res = Series([1, 2], index=idx1) + Series([1, 1], index=idx2)
tm.assert_series_equal(res, Series([np.nan, 3, np.nan], index=base))
# -----------------------------------------------------------------
# Series.append
def test_series_append_aware(self):
rng1 = date_range('1/1/2011 01:00', periods=1, freq='H',
tz='US/Eastern')
rng2 = date_range('1/1/2011 02:00', periods=1, freq='H',
tz='US/Eastern')
ser1 = Series([1], index=rng1)
ser2 = Series([2], index=rng2)
ts_result = ser1.append(ser2)
exp_index = DatetimeIndex(['2011-01-01 01:00', '2011-01-01 02:00'],
tz='US/Eastern')
exp = Series([1, 2], index=exp_index)
tm.assert_series_equal(ts_result, exp)
assert ts_result.index.tz == rng1.tz
rng1 = date_range('1/1/2011 01:00', periods=1, freq='H', tz='UTC')
rng2 = date_range('1/1/2011 02:00', periods=1, freq='H', tz='UTC')
ser1 = Series([1], index=rng1)
ser2 = Series([2], index=rng2)
ts_result = ser1.append(ser2)
exp_index = DatetimeIndex(['2011-01-01 01:00', '2011-01-01 02:00'],
tz='UTC')
exp = Series([1, 2], index=exp_index)
tm.assert_series_equal(ts_result, exp)
utc = rng1.tz
assert utc == ts_result.index.tz
# GH#7795
# different tz coerces to object dtype, not UTC
rng1 = date_range('1/1/2011 01:00', periods=1, freq='H',
tz='US/Eastern')
rng2 = date_range('1/1/2011 02:00', periods=1, freq='H',
tz='US/Central')
ser1 = Series([1], index=rng1)
ser2 = Series([2], index=rng2)
ts_result = ser1.append(ser2)
exp_index = Index([Timestamp('1/1/2011 01:00', tz='US/Eastern'),
Timestamp('1/1/2011 02:00', tz='US/Central')])
exp = Series([1, 2], index=exp_index)
tm.assert_series_equal(ts_result, exp)
def test_series_append_aware_naive(self):
rng1 = date_range('1/1/2011 01:00', periods=1, freq='H')
rng2 = date_range('1/1/2011 02:00', periods=1, freq='H',
tz='US/Eastern')
ser1 = Series(np.random.randn(len(rng1)), index=rng1)
ser2 = Series(np.random.randn(len(rng2)), index=rng2)
ts_result = ser1.append(ser2)
expected = ser1.index.astype(object).append(ser2.index.astype(object))
assert ts_result.index.equals(expected)
# mixed
rng1 = date_range('1/1/2011 01:00', periods=1, freq='H')
rng2 = lrange(100)
ser1 = Series(np.random.randn(len(rng1)), index=rng1)
ser2 = Series(np.random.randn(len(rng2)), index=rng2)
ts_result = ser1.append(ser2)
expected = ser1.index.astype(object).append(ser2.index)
assert ts_result.index.equals(expected)
def test_series_append_dst(self):
rng1 = date_range('1/1/2016 01:00', periods=3, freq='H',
tz='US/Eastern')
rng2 = date_range('8/1/2016 01:00', periods=3, freq='H',
tz='US/Eastern')
ser1 = Series([1, 2, 3], index=rng1)
ser2 = Series([10, 11, 12], index=rng2)
ts_result = ser1.append(ser2)
exp_index = DatetimeIndex(['2016-01-01 01:00', '2016-01-01 02:00',
'2016-01-01 03:00', '2016-08-01 01:00',
'2016-08-01 02:00', '2016-08-01 03:00'],
tz='US/Eastern')
exp = Series([1, 2, 3, 10, 11, 12], index=exp_index)
tm.assert_series_equal(ts_result, exp)
assert ts_result.index.tz == rng1.tz
# -----------------------------------------------------------------
def test_dateutil_tzoffset_support(self):
values = [188.5, 328.25]
tzinfo = tzoffset(None, 7200)
index = [datetime(2012, 5, 11, 11, tzinfo=tzinfo),
datetime(2012, 5, 11, 12, tzinfo=tzinfo)]
series = Series(data=values, index=index)
assert series.index.tz == tzinfo
# it works! #2443
repr(series.index[0])
@pytest.mark.parametrize('tz', ['US/Eastern', 'dateutil/US/Eastern'])
def test_tz_aware_asfreq(self, tz):
dr = date_range('2011-12-01', '2012-07-20', freq='D', tz=tz)
ser = Series(np.random.randn(len(dr)), index=dr)
# it works!
ser.asfreq('T')
@pytest.mark.parametrize('tz', ['US/Eastern', 'dateutil/US/Eastern'])
def test_string_index_alias_tz_aware(self, tz):
rng = date_range('1/1/2000', periods=10, tz=tz)
ser = Series(np.random.randn(len(rng)), index=rng)
result = ser['1/3/2000']
tm.assert_almost_equal(result, ser[2])
# TODO: De-duplicate with test below
def test_series_add_tz_mismatch_converts_to_utc_duplicate(self):
rng = date_range('1/1/2011', periods=10, freq='H', tz='US/Eastern')
ser = Series(np.random.randn(len(rng)), index=rng)
ts_moscow = ser.tz_convert('Europe/Moscow')
result = ser + ts_moscow
assert result.index.tz is pytz.utc
result = ts_moscow + ser
assert result.index.tz is pytz.utc
def test_series_add_tz_mismatch_converts_to_utc(self):
rng = date_range('1/1/2011', periods=100, freq='H', tz='utc')
perm = np.random.permutation(100)[:90]
ser1 = Series(np.random.randn(90),
index=rng.take(perm).tz_convert('US/Eastern'))
perm = np.random.permutation(100)[:90]
ser2 = Series(np.random.randn(90),
index=rng.take(perm).tz_convert('Europe/Berlin'))
result = ser1 + ser2
uts1 = ser1.tz_convert('utc')
uts2 = ser2.tz_convert('utc')
expected = uts1 + uts2
assert result.index.tz == pytz.UTC
tm.assert_series_equal(result, expected)
def test_series_add_aware_naive_raises(self):
rng = date_range('1/1/2011', periods=10, freq='H')
ser = Series(np.random.randn(len(rng)), index=rng)
ser_utc = ser.tz_localize('utc')
with pytest.raises(Exception):
ser + ser_utc
with pytest.raises(Exception):
ser_utc + ser
def test_series_align_aware(self):
idx1 = date_range('2001', periods=5, freq='H', tz='US/Eastern')
ser = Series(np.random.randn(len(idx1)), index=idx1)
ser_central = ser.tz_convert('US/Central')
# # different timezones convert to UTC
new1, new2 = ser.align(ser_central)
assert new1.index.tz == pytz.UTC
assert new2.index.tz == pytz.UTC
@pytest.mark.parametrize('tzstr', ['US/Eastern', 'dateutil/US/Eastern'])
def test_localized_at_time_between_time(self, tzstr):
from datetime import time
tz = timezones.maybe_get_tz(tzstr)
rng = date_range('4/16/2012', '5/1/2012', freq='H')
ts = Series(np.random.randn(len(rng)), index=rng)
ts_local = ts.tz_localize(tzstr)
result = ts_local.at_time(time(10, 0))
expected = ts.at_time(time(10, 0)).tz_localize(tzstr)
tm.assert_series_equal(result, expected)
assert timezones.tz_compare(result.index.tz, tz)
t1, t2 = time(10, 0), time(11, 0)
result = ts_local.between_time(t1, t2)
expected = ts.between_time(t1, t2).tz_localize(tzstr)
tm.assert_series_equal(result, expected)
assert timezones.tz_compare(result.index.tz, tz)
@pytest.mark.parametrize('tzstr', ['Europe/Berlin',
'dateutil/Europe/Berlin'])
def test_getitem_pydatetime_tz(self, tzstr):
tz = timezones.maybe_get_tz(tzstr)
index = date_range(start='2012-12-24 16:00', end='2012-12-24 18:00',
freq='H', tz=tzstr)
ts = Series(index=index, data=index.hour)
time_pandas = Timestamp('2012-12-24 17:00', tz=tzstr)
dt = datetime(2012, 12, 24, 17, 0)
time_datetime = conversion.localize_pydatetime(dt, tz)
assert ts[time_pandas] == ts[time_datetime]
def test_series_truncate_datetimeindex_tz(self):
# GH 9243
idx = date_range('4/1/2005', '4/30/2005', freq='D', tz='US/Pacific')
s = Series(range(len(idx)), index=idx)
result = s.truncate(datetime(2005, 4, 2), datetime(2005, 4, 4))
expected = Series([1, 2, 3], index=idx[1:4])
tm.assert_series_equal(result, expected)
|
|
"Parses and creates Grammar objects"
import os.path
import sys
from copy import copy, deepcopy
from io import open
from .utils import bfs, eval_escaping
from .lexer import Token, TerminalDef, PatternStr, PatternRE
from .parse_tree_builder import ParseTreeBuilder
from .parser_frontends import LALR_TraditionalLexer
from .common import LexerConf, ParserConf
from .grammar import RuleOptions, Rule, Terminal, NonTerminal, Symbol
from .utils import classify, suppress, dedup_list, Str
from .exceptions import GrammarError, UnexpectedCharacters, UnexpectedToken
from .tree import Tree, SlottedTree as ST
from .visitors import Transformer, Visitor, v_args, Transformer_InPlace, Transformer_NonRecursive
inline_args = v_args(inline=True)
__path__ = os.path.dirname(__file__)
IMPORT_PATHS = [os.path.join(__path__, 'grammars')]
EXT = '.lark'
_RE_FLAGS = 'imslux'
_EMPTY = Symbol('__empty__')
_TERMINAL_NAMES = {
'.' : 'DOT',
',' : 'COMMA',
':' : 'COLON',
';' : 'SEMICOLON',
'+' : 'PLUS',
'-' : 'MINUS',
'*' : 'STAR',
'/' : 'SLASH',
'\\' : 'BACKSLASH',
'|' : 'VBAR',
'?' : 'QMARK',
'!' : 'BANG',
'@' : 'AT',
'#' : 'HASH',
'$' : 'DOLLAR',
'%' : 'PERCENT',
'^' : 'CIRCUMFLEX',
'&' : 'AMPERSAND',
'_' : 'UNDERSCORE',
'<' : 'LESSTHAN',
'>' : 'MORETHAN',
'=' : 'EQUAL',
'"' : 'DBLQUOTE',
'\'' : 'QUOTE',
'`' : 'BACKQUOTE',
'~' : 'TILDE',
'(' : 'LPAR',
')' : 'RPAR',
'{' : 'LBRACE',
'}' : 'RBRACE',
'[' : 'LSQB',
']' : 'RSQB',
'\n' : 'NEWLINE',
'\r\n' : 'CRLF',
'\t' : 'TAB',
' ' : 'SPACE',
}
# Grammar Parser
TERMINALS = {
'_LPAR': r'\(',
'_RPAR': r'\)',
'_LBRA': r'\[',
'_RBRA': r'\]',
'_LBRACE': r'\{',
'_RBRACE': r'\}',
'OP': '[+*]|[?](?![a-z])',
'_COLON': ':',
'_COMMA': ',',
'_OR': r'\|',
'_DOT': r'\.(?!\.)',
'_DOTDOT': r'\.\.',
'TILDE': '~',
'RULE': '!?[_?]?[a-z][_a-z0-9]*',
'TERMINAL': '_?[A-Z][_A-Z0-9]*',
'STRING': r'"(\\"|\\\\|[^"\n])*?"i?',
'REGEXP': r'/(?!/)(\\/|\\\\|[^/\n])*?/[%s]*' % _RE_FLAGS,
'_NL': r'(\r?\n)+\s*',
'WS': r'[ \t]+',
'COMMENT': r'\s*//[^\n]*',
'_TO': '->',
'_IGNORE': r'%ignore',
'_DECLARE': r'%declare',
'_IMPORT': r'%import',
'NUMBER': r'[+-]?\d+',
}
RULES = {
'start': ['_list'],
'_list': ['_item', '_list _item'],
'_item': ['rule', 'term', 'statement', '_NL'],
'rule': ['RULE template_params _COLON expansions _NL',
'RULE template_params _DOT NUMBER _COLON expansions _NL'],
'template_params': ['_LBRACE _template_params _RBRACE',
''],
'_template_params': ['RULE',
'_template_params _COMMA RULE'],
'expansions': ['alias',
'expansions _OR alias',
'expansions _NL _OR alias'],
'?alias': ['expansion _TO RULE', 'expansion'],
'expansion': ['_expansion'],
'_expansion': ['', '_expansion expr'],
'?expr': ['atom',
'atom OP',
'atom TILDE NUMBER',
'atom TILDE NUMBER _DOTDOT NUMBER',
],
'?atom': ['_LPAR expansions _RPAR',
'maybe',
'value'],
'value': ['terminal',
'nonterminal',
'literal',
'range',
'template_usage'],
'terminal': ['TERMINAL'],
'nonterminal': ['RULE'],
'?name': ['RULE', 'TERMINAL'],
'maybe': ['_LBRA expansions _RBRA'],
'range': ['STRING _DOTDOT STRING'],
'template_usage': ['RULE _LBRACE _template_args _RBRACE'],
'_template_args': ['value',
'_template_args _COMMA value'],
'term': ['TERMINAL _COLON expansions _NL',
'TERMINAL _DOT NUMBER _COLON expansions _NL'],
'statement': ['ignore', 'import', 'declare'],
'ignore': ['_IGNORE expansions _NL'],
'declare': ['_DECLARE _declare_args _NL'],
'import': ['_IMPORT _import_path _NL',
'_IMPORT _import_path _LPAR name_list _RPAR _NL',
'_IMPORT _import_path _TO name _NL'],
'_import_path': ['import_lib', 'import_rel'],
'import_lib': ['_import_args'],
'import_rel': ['_DOT _import_args'],
'_import_args': ['name', '_import_args _DOT name'],
'name_list': ['_name_list'],
'_name_list': ['name', '_name_list _COMMA name'],
'_declare_args': ['name', '_declare_args name'],
'literal': ['REGEXP', 'STRING'],
}
@inline_args
class EBNF_to_BNF(Transformer_InPlace):
def __init__(self):
self.new_rules = []
self.rules_by_expr = {}
self.prefix = 'anon'
self.i = 0
self.rule_options = None
def _add_recurse_rule(self, type_, expr):
if expr in self.rules_by_expr:
return self.rules_by_expr[expr]
new_name = '__%s_%s_%d' % (self.prefix, type_, self.i)
self.i += 1
t = NonTerminal(new_name)
tree = ST('expansions', [ST('expansion', [expr]), ST('expansion', [t, expr])])
self.new_rules.append((new_name, tree, self.rule_options))
self.rules_by_expr[expr] = t
return t
def expr(self, rule, op, *args):
if op.value == '?':
empty = ST('expansion', [])
return ST('expansions', [rule, empty])
elif op.value == '+':
# a : b c+ d
# -->
# a : b _c d
# _c : _c c | c;
return self._add_recurse_rule('plus', rule)
elif op.value == '*':
# a : b c* d
# -->
# a : b _c? d
# _c : _c c | c;
new_name = self._add_recurse_rule('star', rule)
return ST('expansions', [new_name, ST('expansion', [])])
elif op.value == '~':
if len(args) == 1:
mn = mx = int(args[0])
else:
mn, mx = map(int, args)
if mx < mn or mn < 0:
raise GrammarError("Bad Range for %s (%d..%d isn't allowed)" % (rule, mn, mx))
return ST('expansions', [ST('expansion', [rule] * n) for n in range(mn, mx+1)])
assert False, op
def maybe(self, rule):
keep_all_tokens = self.rule_options and self.rule_options.keep_all_tokens
def will_not_get_removed(sym):
if isinstance(sym, NonTerminal):
return not sym.name.startswith('_')
if isinstance(sym, Terminal):
return keep_all_tokens or not sym.filter_out
assert False
if any(rule.scan_values(will_not_get_removed)):
empty = _EMPTY
else:
empty = ST('expansion', [])
return ST('expansions', [rule, empty])
class SimplifyRule_Visitor(Visitor):
@staticmethod
def _flatten(tree):
while True:
to_expand = [i for i, child in enumerate(tree.children)
if isinstance(child, Tree) and child.data == tree.data]
if not to_expand:
break
tree.expand_kids_by_index(*to_expand)
def expansion(self, tree):
# rules_list unpacking
# a : b (c|d) e
# -->
# a : b c e | b d e
#
# In AST terms:
# expansion(b, expansions(c, d), e)
# -->
# expansions( expansion(b, c, e), expansion(b, d, e) )
self._flatten(tree)
for i, child in enumerate(tree.children):
if isinstance(child, Tree) and child.data == 'expansions':
tree.data = 'expansions'
tree.children = [self.visit(ST('expansion', [option if i==j else other
for j, other in enumerate(tree.children)]))
for option in dedup_list(child.children)]
self._flatten(tree)
break
def alias(self, tree):
rule, alias_name = tree.children
if rule.data == 'expansions':
aliases = []
for child in tree.children[0].children:
aliases.append(ST('alias', [child, alias_name]))
tree.data = 'expansions'
tree.children = aliases
def expansions(self, tree):
self._flatten(tree)
# Ensure all children are unique
if len(set(tree.children)) != len(tree.children):
tree.children = dedup_list(tree.children) # dedup is expensive, so try to minimize its use
class RuleTreeToText(Transformer):
def expansions(self, x):
return x
def expansion(self, symbols):
return symbols, None
def alias(self, x):
(expansion, _alias), alias = x
assert _alias is None, (alias, expansion, '-', _alias) # Double alias not allowed
return expansion, alias.value
@inline_args
class CanonizeTree(Transformer_InPlace):
def tokenmods(self, *args):
if len(args) == 1:
return list(args)
tokenmods, value = args
return tokenmods + [value]
class PrepareAnonTerminals(Transformer_InPlace):
"Create a unique list of anonymous terminals. Attempt to give meaningful names to them when we add them"
def __init__(self, terminals):
self.terminals = terminals
self.term_set = {td.name for td in self.terminals}
self.term_reverse = {td.pattern: td for td in terminals}
self.i = 0
@inline_args
def pattern(self, p):
value = p.value
if p in self.term_reverse and p.flags != self.term_reverse[p].pattern.flags:
raise GrammarError(u'Conflicting flags for the same terminal: %s' % p)
term_name = None
if isinstance(p, PatternStr):
try:
# If already defined, use the user-defined terminal name
term_name = self.term_reverse[p].name
except KeyError:
# Try to assign an indicative anon-terminal name
try:
term_name = _TERMINAL_NAMES[value]
except KeyError:
if value.isalnum() and value[0].isalpha() and value.upper() not in self.term_set:
with suppress(UnicodeEncodeError):
value.upper().encode('ascii') # Make sure we don't have unicode in our terminal names
term_name = value.upper()
if term_name in self.term_set:
term_name = None
elif isinstance(p, PatternRE):
if p in self.term_reverse: # Kind of a wierd placement.name
term_name = self.term_reverse[p].name
else:
assert False, p
if term_name is None:
term_name = '__ANON_%d' % self.i
self.i += 1
if term_name not in self.term_set:
assert p not in self.term_reverse
self.term_set.add(term_name)
termdef = TerminalDef(term_name, p)
self.term_reverse[p] = termdef
self.terminals.append(termdef)
return Terminal(term_name, filter_out=isinstance(p, PatternStr))
class _ReplaceSymbols(Transformer_InPlace):
" Helper for ApplyTemplates "
def __init__(self):
self.names = {}
def value(self, c):
if len(c) == 1 and isinstance(c[0], Token) and c[0].value in self.names:
return self.names[c[0].value]
return self.__default__('value', c, None)
def template_usage(self, c):
if c[0] in self.names:
return self.__default__('template_usage', [self.names[c[0]].name] + c[1:], None)
return self.__default__('template_usage', c, None)
class ApplyTemplates(Transformer_InPlace):
" Apply the templates, creating new rules that represent the used templates "
def __init__(self, rule_defs):
self.rule_defs = rule_defs
self.replacer = _ReplaceSymbols()
self.created_templates = set()
def template_usage(self, c):
name = c[0]
args = c[1:]
result_name = "%s{%s}" % (name, ",".join(a.name for a in args))
if result_name not in self.created_templates:
self.created_templates.add(result_name)
(_n, params, tree, options) ,= (t for t in self.rule_defs if t[0] == name)
assert len(params) == len(args), args
result_tree = deepcopy(tree)
self.replacer.names = dict(zip(params, args))
self.replacer.transform(result_tree)
self.rule_defs.append((result_name, [], result_tree, deepcopy(options)))
return NonTerminal(result_name)
def _rfind(s, choices):
return max(s.rfind(c) for c in choices)
def _literal_to_pattern(literal):
v = literal.value
flag_start = _rfind(v, '/"')+1
assert flag_start > 0
flags = v[flag_start:]
assert all(f in _RE_FLAGS for f in flags), flags
v = v[:flag_start]
assert v[0] == v[-1] and v[0] in '"/'
x = v[1:-1]
s = eval_escaping(x)
if literal.type == 'STRING':
s = s.replace('\\\\', '\\')
return { 'STRING': PatternStr,
'REGEXP': PatternRE }[literal.type](s, flags)
@inline_args
class PrepareLiterals(Transformer_InPlace):
def literal(self, literal):
return ST('pattern', [_literal_to_pattern(literal)])
def range(self, start, end):
assert start.type == end.type == 'STRING'
start = start.value[1:-1]
end = end.value[1:-1]
assert len(eval_escaping(start)) == len(eval_escaping(end)) == 1, (start, end, len(eval_escaping(start)), len(eval_escaping(end)))
regexp = '[%s-%s]' % (start, end)
return ST('pattern', [PatternRE(regexp)])
class TerminalTreeToPattern(Transformer):
def pattern(self, ps):
p ,= ps
return p
def expansion(self, items):
assert items
if len(items) == 1:
return items[0]
if len({i.flags for i in items}) > 1:
raise GrammarError("Lark doesn't support joining terminals with conflicting flags!")
return PatternRE(''.join(i.to_regexp() for i in items), items[0].flags if items else ())
def expansions(self, exps):
if len(exps) == 1:
return exps[0]
if len({i.flags for i in exps}) > 1:
raise GrammarError("Lark doesn't support joining terminals with conflicting flags!")
return PatternRE('(?:%s)' % ('|'.join(i.to_regexp() for i in exps)), exps[0].flags)
def expr(self, args):
inner, op = args[:2]
if op == '~':
if len(args) == 3:
op = "{%d}" % int(args[2])
else:
mn, mx = map(int, args[2:])
if mx < mn:
raise GrammarError("Bad Range for %s (%d..%d isn't allowed)" % (inner, mn, mx))
op = "{%d,%d}" % (mn, mx)
else:
assert len(args) == 2
return PatternRE('(?:%s)%s' % (inner.to_regexp(), op), inner.flags)
def maybe(self, expr):
return self.expr(expr + ['?'])
def alias(self, t):
raise GrammarError("Aliasing not allowed in terminals (You used -> in the wrong place)")
def value(self, v):
return v[0]
class PrepareSymbols(Transformer_InPlace):
def value(self, v):
v ,= v
if isinstance(v, Tree):
return v
elif v.type == 'RULE':
return NonTerminal(Str(v.value))
elif v.type == 'TERMINAL':
return Terminal(Str(v.value), filter_out=v.startswith('_'))
assert False
def _choice_of_rules(rules):
return ST('expansions', [ST('expansion', [Token('RULE', name)]) for name in rules])
def nr_deepcopy_tree(t):
"Deepcopy tree `t` without recursion"
return Transformer_NonRecursive(False).transform(t)
class Grammar:
def __init__(self, rule_defs, term_defs, ignore):
self.term_defs = term_defs
self.rule_defs = rule_defs
self.ignore = ignore
def compile(self, start):
# We change the trees in-place (to support huge grammars)
# So deepcopy allows calling compile more than once.
term_defs = deepcopy(list(self.term_defs))
rule_defs = [(n,p,nr_deepcopy_tree(t),o) for n,p,t,o in self.rule_defs]
# ===================
# Compile Terminals
# ===================
# Convert terminal-trees to strings/regexps
for name, (term_tree, priority) in term_defs:
if term_tree is None: # Terminal added through %declare
continue
expansions = list(term_tree.find_data('expansion'))
if len(expansions) == 1 and not expansions[0].children:
raise GrammarError("Terminals cannot be empty (%s)" % name)
transformer = PrepareLiterals() * TerminalTreeToPattern()
terminals = [TerminalDef(name, transformer.transform( term_tree ), priority)
for name, (term_tree, priority) in term_defs if term_tree]
# =================
# Compile Rules
# =================
# 1. Pre-process terminals
transformer = PrepareLiterals() * PrepareSymbols() * PrepareAnonTerminals(terminals) # Adds to terminals
# 2. Inline Templates
transformer *= ApplyTemplates(rule_defs)
# 3. Convert EBNF to BNF (and apply step 1 & 2)
ebnf_to_bnf = EBNF_to_BNF()
rules = []
i = 0
while i < len(rule_defs): # We have to do it like this because rule_defs might grow due to templates
name, params, rule_tree, options = rule_defs[i]
i += 1
if len(params) != 0: # Dont transform templates
continue
ebnf_to_bnf.rule_options = RuleOptions(keep_all_tokens=True) if options.keep_all_tokens else None
ebnf_to_bnf.prefix = name
tree = transformer.transform(rule_tree)
res = ebnf_to_bnf.transform(tree)
rules.append((name, res, options))
rules += ebnf_to_bnf.new_rules
assert len(rules) == len({name for name, _t, _o in rules}), "Whoops, name collision"
# 4. Compile tree to Rule objects
rule_tree_to_text = RuleTreeToText()
simplify_rule = SimplifyRule_Visitor()
compiled_rules = []
for rule_content in rules:
name, tree, options = rule_content
simplify_rule.visit(tree)
expansions = rule_tree_to_text.transform(tree)
for i, (expansion, alias) in enumerate(expansions):
if alias and name.startswith('_'):
raise GrammarError("Rule %s is marked for expansion (it starts with an underscore) and isn't allowed to have aliases (alias=%s)" % (name, alias))
empty_indices = [x==_EMPTY for x in expansion]
if any(empty_indices):
exp_options = copy(options) or RuleOptions()
exp_options.empty_indices = empty_indices
expansion = [x for x in expansion if x!=_EMPTY]
else:
exp_options = options
assert all(isinstance(x, Symbol) for x in expansion), expansion
rule = Rule(NonTerminal(name), expansion, i, alias, exp_options)
compiled_rules.append(rule)
# Remove duplicates of empty rules, throw error for non-empty duplicates
if len(set(compiled_rules)) != len(compiled_rules):
duplicates = classify(compiled_rules, lambda x: x)
for dups in duplicates.values():
if len(dups) > 1:
if dups[0].expansion:
raise GrammarError("Rules defined twice: %s\n\n(Might happen due to colliding expansion of optionals: [] or ?)"
% ''.join('\n * %s' % i for i in dups))
# Empty rule; assert all other attributes are equal
assert len({(r.alias, r.order, r.options) for r in dups}) == len(dups)
# Remove duplicates
compiled_rules = list(set(compiled_rules))
# Filter out unused rules
while True:
c = len(compiled_rules)
used_rules = {s for r in compiled_rules
for s in r.expansion
if isinstance(s, NonTerminal)
and s != r.origin}
used_rules |= {NonTerminal(s) for s in start}
compiled_rules = [r for r in compiled_rules if r.origin in used_rules]
if len(compiled_rules) == c:
break
# Filter out unused terminals
used_terms = {t.name for r in compiled_rules
for t in r.expansion
if isinstance(t, Terminal)}
terminals = [t for t in terminals if t.name in used_terms or t.name in self.ignore]
return terminals, compiled_rules, self.ignore
_imported_grammars = {}
def import_grammar(grammar_path, re_, base_paths=[]):
if grammar_path not in _imported_grammars:
import_paths = base_paths + IMPORT_PATHS
for import_path in import_paths:
with suppress(IOError):
joined_path = os.path.join(import_path, grammar_path)
with open(joined_path, encoding='utf8') as f:
text = f.read()
grammar = load_grammar(text, joined_path, re_)
_imported_grammars[grammar_path] = grammar
break
else:
open(grammar_path, encoding='utf8')
assert False
return _imported_grammars[grammar_path]
def import_from_grammar_into_namespace(grammar, namespace, aliases):
"""Returns all rules and terminals of grammar, prepended
with a 'namespace' prefix, except for those which are aliased.
"""
imported_terms = dict(grammar.term_defs)
imported_rules = {n:(n,p,deepcopy(t),o) for n,p,t,o in grammar.rule_defs}
term_defs = []
rule_defs = []
def rule_dependencies(symbol):
if symbol.type != 'RULE':
return []
try:
_, params, tree,_ = imported_rules[symbol]
except KeyError:
raise GrammarError("Missing symbol '%s' in grammar %s" % (symbol, namespace))
return _find_used_symbols(tree) - set(params)
def get_namespace_name(name, params):
if params is not None:
try:
return params[name]
except KeyError:
pass
try:
return aliases[name].value
except KeyError:
if name[0] == '_':
return '_%s__%s' % (namespace, name[1:])
return '%s__%s' % (namespace, name)
to_import = list(bfs(aliases, rule_dependencies))
for symbol in to_import:
if symbol.type == 'TERMINAL':
term_defs.append([get_namespace_name(symbol, None), imported_terms[symbol]])
else:
assert symbol.type == 'RULE'
_, params, tree, options = imported_rules[symbol]
params_map = {p: ('%s__%s' if p[0]!='_' else '_%s__%s' ) % (namespace, p) for p in params}
for t in tree.iter_subtrees():
for i, c in enumerate(t.children):
if isinstance(c, Token) and c.type in ('RULE', 'TERMINAL'):
t.children[i] = Token(c.type, get_namespace_name(c, params_map))
params = [params_map[p] for p in params] # We can not rely on ordered dictionaries
rule_defs.append((get_namespace_name(symbol, params_map), params, tree, options))
return term_defs, rule_defs
def resolve_term_references(term_defs):
# TODO Solve with transitive closure (maybe)
term_dict = {k:t for k, (t,_p) in term_defs}
assert len(term_dict) == len(term_defs), "Same name defined twice?"
while True:
changed = False
for name, (token_tree, _p) in term_defs:
if token_tree is None: # Terminal added through %declare
continue
for exp in token_tree.find_data('value'):
item ,= exp.children
if isinstance(item, Token):
if item.type == 'RULE':
raise GrammarError("Rules aren't allowed inside terminals (%s in %s)" % (item, name))
if item.type == 'TERMINAL':
term_value = term_dict[item]
assert term_value is not None
exp.children[0] = term_value
changed = True
if not changed:
break
for name, term in term_dict.items():
if term: # Not just declared
for child in term.children:
ids = [id(x) for x in child.iter_subtrees()]
if id(term) in ids:
raise GrammarError("Recursion in terminal '%s' (recursion is only allowed in rules, not terminals)" % name)
def options_from_rule(name, params, *x):
if len(x) > 1:
priority, expansions = x
priority = int(priority)
else:
expansions ,= x
priority = None
params = [t.value for t in params.children] if params is not None else [] # For the grammar parser
keep_all_tokens = name.startswith('!')
name = name.lstrip('!')
expand1 = name.startswith('?')
name = name.lstrip('?')
return name, params, expansions, RuleOptions(keep_all_tokens, expand1, priority=priority,
template_source=(name if params else None))
def symbols_from_strcase(expansion):
return [Terminal(x, filter_out=x.startswith('_')) if x.isupper() else NonTerminal(x) for x in expansion]
@inline_args
class PrepareGrammar(Transformer_InPlace):
def terminal(self, name):
return name
def nonterminal(self, name):
return name
def _find_used_symbols(tree):
assert tree.data == 'expansions'
return {t for x in tree.find_data('expansion')
for t in x.scan_values(lambda t: t.type in ('RULE', 'TERMINAL'))}
class GrammarLoader:
def __init__(self, re_):
self.re = re_
terminals = [TerminalDef(name, PatternRE(value)) for name, value in TERMINALS.items()]
rules = [options_from_rule(name, None, x) for name, x in RULES.items()]
rules = [Rule(NonTerminal(r), symbols_from_strcase(x.split()), i, None, o) for r, _p, xs, o in rules for i, x in enumerate(xs)]
callback = ParseTreeBuilder(rules, ST).create_callback()
lexer_conf = LexerConf(terminals, ['WS', 'COMMENT'])
parser_conf = ParserConf(rules, callback, ['start'])
self.parser = LALR_TraditionalLexer(lexer_conf, parser_conf, re_)
self.canonize_tree = CanonizeTree()
def load_grammar(self, grammar_text, grammar_name='<?>'):
"Parse grammar_text, verify, and create Grammar object. Display nice messages on error."
try:
tree = self.canonize_tree.transform( self.parser.parse(grammar_text+'\n') )
except UnexpectedCharacters as e:
context = e.get_context(grammar_text)
raise GrammarError("Unexpected input at line %d column %d in %s: \n\n%s" %
(e.line, e.column, grammar_name, context))
except UnexpectedToken as e:
context = e.get_context(grammar_text)
error = e.match_examples(self.parser.parse, {
'Unclosed parenthesis': ['a: (\n'],
'Umatched closing parenthesis': ['a: )\n', 'a: [)\n', 'a: (]\n'],
'Expecting rule or terminal definition (missing colon)': ['a\n', 'a->\n', 'A->\n', 'a A\n'],
'Alias expects lowercase name': ['a: -> "a"\n'],
'Unexpected colon': ['a::\n', 'a: b:\n', 'a: B:\n', 'a: "a":\n'],
'Misplaced operator': ['a: b??', 'a: b(?)', 'a:+\n', 'a:?\n', 'a:*\n', 'a:|*\n'],
'Expecting option ("|") or a new rule or terminal definition': ['a:a\n()\n'],
'%import expects a name': ['%import "a"\n'],
'%ignore expects a value': ['%ignore %import\n'],
})
if error:
raise GrammarError("%s at line %s column %s\n\n%s" % (error, e.line, e.column, context))
elif 'STRING' in e.expected:
raise GrammarError("Expecting a value at line %s column %s\n\n%s" % (e.line, e.column, context))
raise
tree = PrepareGrammar().transform(tree)
# Extract grammar items
defs = classify(tree.children, lambda c: c.data, lambda c: c.children)
term_defs = defs.pop('term', [])
rule_defs = defs.pop('rule', [])
statements = defs.pop('statement', [])
assert not defs
term_defs = [td if len(td)==3 else (td[0], 1, td[1]) for td in term_defs]
term_defs = [(name.value, (t, int(p))) for name, p, t in term_defs]
rule_defs = [options_from_rule(*x) for x in rule_defs]
# Execute statements
ignore, imports = [], {}
for (stmt,) in statements:
if stmt.data == 'ignore':
t ,= stmt.children
ignore.append(t)
elif stmt.data == 'import':
if len(stmt.children) > 1:
path_node, arg1 = stmt.children
else:
path_node, = stmt.children
arg1 = None
if isinstance(arg1, Tree): # Multi import
dotted_path = tuple(path_node.children)
names = arg1.children
aliases = dict(zip(names, names)) # Can't have aliased multi import, so all aliases will be the same as names
else: # Single import
dotted_path = tuple(path_node.children[:-1])
name = path_node.children[-1] # Get name from dotted path
aliases = {name: arg1 or name} # Aliases if exist
if path_node.data == 'import_lib': # Import from library
base_paths = []
else: # Relative import
if grammar_name == '<string>': # Import relative to script file path if grammar is coded in script
try:
base_file = os.path.abspath(sys.modules['__main__'].__file__)
except AttributeError:
base_file = None
else:
base_file = grammar_name # Import relative to grammar file path if external grammar file
if base_file:
base_paths = [os.path.split(base_file)[0]]
else:
base_paths = [os.path.abspath(os.path.curdir)]
try:
import_base_paths, import_aliases = imports[dotted_path]
assert base_paths == import_base_paths, 'Inconsistent base_paths for %s.' % '.'.join(dotted_path)
import_aliases.update(aliases)
except KeyError:
imports[dotted_path] = base_paths, aliases
elif stmt.data == 'declare':
for t in stmt.children:
term_defs.append([t.value, (None, None)])
else:
assert False, stmt
# import grammars
for dotted_path, (base_paths, aliases) in imports.items():
grammar_path = os.path.join(*dotted_path) + EXT
g = import_grammar(grammar_path, self.re, base_paths=base_paths)
new_td, new_rd = import_from_grammar_into_namespace(g, '__'.join(dotted_path), aliases)
term_defs += new_td
rule_defs += new_rd
# Verify correctness 1
for name, _ in term_defs:
if name.startswith('__'):
raise GrammarError('Names starting with double-underscore are reserved (Error at %s)' % name)
# Handle ignore tokens
# XXX A slightly hacky solution. Recognition of %ignore TERMINAL as separate comes from the lexer's
# inability to handle duplicate terminals (two names, one value)
ignore_names = []
for t in ignore:
if t.data=='expansions' and len(t.children) == 1:
t2 ,= t.children
if t2.data=='expansion' and len(t2.children) == 1:
item ,= t2.children
if item.data == 'value':
item ,= item.children
if isinstance(item, Token) and item.type == 'TERMINAL':
ignore_names.append(item.value)
continue
name = '__IGNORE_%d'% len(ignore_names)
ignore_names.append(name)
term_defs.append((name, (t, 1)))
# Verify correctness 2
terminal_names = set()
for name, _ in term_defs:
if name in terminal_names:
raise GrammarError("Terminal '%s' defined more than once" % name)
terminal_names.add(name)
if set(ignore_names) > terminal_names:
raise GrammarError("Terminals %s were marked to ignore but were not defined!" % (set(ignore_names) - terminal_names))
resolve_term_references(term_defs)
rules = rule_defs
rule_names = {}
for name, params, _x, _o in rules:
if name.startswith('__'):
raise GrammarError('Names starting with double-underscore are reserved (Error at %s)' % name)
if name in rule_names:
raise GrammarError("Rule '%s' defined more than once" % name)
rule_names[name] = len(params)
for name, params , expansions, _o in rules:
for i, p in enumerate(params):
if p in rule_names:
raise GrammarError("Template Parameter conflicts with rule %s (in template %s)" % (p, name))
if p in params[:i]:
raise GrammarError("Duplicate Template Parameter %s (in template %s)" % (p, name))
for temp in expansions.find_data('template_usage'):
sym = temp.children[0]
args = temp.children[1:]
if sym not in params:
if sym not in rule_names:
raise GrammarError("Template '%s' used but not defined (in rule %s)" % (sym, name))
if len(args) != rule_names[sym]:
raise GrammarError("Wrong number of template arguments used for %s "
"(expected %s, got %s) (in rule %s)"%(sym, rule_names[sym], len(args), name))
for sym in _find_used_symbols(expansions):
if sym.type == 'TERMINAL':
if sym not in terminal_names:
raise GrammarError("Token '%s' used but not defined (in rule %s)" % (sym, name))
else:
if sym not in rule_names and sym not in params:
raise GrammarError("Rule '%s' used but not defined (in rule %s)" % (sym, name))
return Grammar(rules, term_defs, ignore_names)
def load_grammar(grammar, source, re_):
return GrammarLoader(re_).load_grammar(grammar, source)
|
|
# Copyright (c) 2011 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Tests For HostManager
"""
import collections
import mock
from oslo_config import cfg
from oslo_serialization import jsonutils
import six
import nova
from nova.compute import task_states
from nova.compute import vm_states
from nova import exception
from nova import objects
from nova.objects import base as obj_base
from nova.pci import stats as pci_stats
from nova.scheduler import filters
from nova.scheduler import host_manager
from nova.scheduler import utils as sched_utils
from nova import test
from nova.tests.unit import fake_instance
from nova.tests.unit import matchers
from nova.tests.unit.scheduler import fakes
from nova import utils
CONF = cfg.CONF
CONF.import_opt('scheduler_tracks_instance_changes',
'nova.scheduler.host_manager')
class FakeFilterClass1(filters.BaseHostFilter):
def host_passes(self, host_state, filter_properties):
pass
class FakeFilterClass2(filters.BaseHostFilter):
def host_passes(self, host_state, filter_properties):
pass
class HostManagerTestCase(test.NoDBTestCase):
"""Test case for HostManager class."""
@mock.patch.object(host_manager.HostManager, '_init_instance_info')
@mock.patch.object(host_manager.HostManager, '_init_aggregates')
def setUp(self, mock_init_agg, mock_init_inst):
super(HostManagerTestCase, self).setUp()
self.flags(scheduler_available_filters=['%s.%s' % (__name__, cls) for
cls in ['FakeFilterClass1',
'FakeFilterClass2']])
self.flags(scheduler_default_filters=['FakeFilterClass1'])
self.host_manager = host_manager.HostManager()
self.fake_hosts = [host_manager.HostState('fake_host%s' % x,
'fake-node') for x in range(1, 5)]
self.fake_hosts += [host_manager.HostState('fake_multihost',
'fake-node%s' % x) for x in range(1, 5)]
def test_load_filters(self):
filters = self.host_manager._load_filters()
self.assertEqual(filters, ['FakeFilterClass1'])
@mock.patch.object(nova.objects.InstanceList, 'get_by_filters')
@mock.patch.object(nova.objects.ComputeNodeList, 'get_all')
@mock.patch('nova.utils.spawn_n')
def test_init_instance_info_batches(self, mock_spawn, mock_get_all,
mock_get_by_filters):
mock_spawn.side_effect = lambda f, *a, **k: f(*a, **k)
cn_list = objects.ComputeNodeList()
for num in range(22):
host_name = 'host_%s' % num
cn_list.objects.append(objects.ComputeNode(host=host_name))
mock_get_all.return_value = cn_list
self.host_manager._init_instance_info()
self.assertEqual(mock_get_by_filters.call_count, 3)
@mock.patch.object(nova.objects.InstanceList, 'get_by_filters')
@mock.patch.object(nova.objects.ComputeNodeList, 'get_all')
@mock.patch('nova.utils.spawn_n')
def test_init_instance_info(self, mock_spawn, mock_get_all,
mock_get_by_filters):
mock_spawn.side_effect = lambda f, *a, **k: f(*a, **k)
cn1 = objects.ComputeNode(host='host1')
cn2 = objects.ComputeNode(host='host2')
inst1 = objects.Instance(host='host1', uuid='uuid1')
inst2 = objects.Instance(host='host1', uuid='uuid2')
inst3 = objects.Instance(host='host2', uuid='uuid3')
mock_get_all.return_value = objects.ComputeNodeList(objects=[cn1, cn2])
mock_get_by_filters.return_value = objects.InstanceList(
objects=[inst1, inst2, inst3])
hm = self.host_manager
hm._instance_info = {}
hm._init_instance_info()
self.assertEqual(len(hm._instance_info), 2)
fake_info = hm._instance_info['host1']
self.assertIn('uuid1', fake_info['instances'])
self.assertIn('uuid2', fake_info['instances'])
self.assertNotIn('uuid3', fake_info['instances'])
def test_default_filters(self):
default_filters = self.host_manager.default_filters
self.assertEqual(1, len(default_filters))
self.assertIsInstance(default_filters[0], FakeFilterClass1)
@mock.patch.object(host_manager.HostManager, '_init_instance_info')
@mock.patch.object(objects.AggregateList, 'get_all')
def test_init_aggregates_no_aggs(self, agg_get_all, mock_init_info):
agg_get_all.return_value = []
self.host_manager = host_manager.HostManager()
self.assertEqual({}, self.host_manager.aggs_by_id)
self.assertEqual({}, self.host_manager.host_aggregates_map)
@mock.patch.object(host_manager.HostManager, '_init_instance_info')
@mock.patch.object(objects.AggregateList, 'get_all')
def test_init_aggregates_one_agg_no_hosts(self, agg_get_all,
mock_init_info):
fake_agg = objects.Aggregate(id=1, hosts=[])
agg_get_all.return_value = [fake_agg]
self.host_manager = host_manager.HostManager()
self.assertEqual({1: fake_agg}, self.host_manager.aggs_by_id)
self.assertEqual({}, self.host_manager.host_aggregates_map)
@mock.patch.object(host_manager.HostManager, '_init_instance_info')
@mock.patch.object(objects.AggregateList, 'get_all')
def test_init_aggregates_one_agg_with_hosts(self, agg_get_all,
mock_init_info):
fake_agg = objects.Aggregate(id=1, hosts=['fake-host'])
agg_get_all.return_value = [fake_agg]
self.host_manager = host_manager.HostManager()
self.assertEqual({1: fake_agg}, self.host_manager.aggs_by_id)
self.assertEqual({'fake-host': set([1])},
self.host_manager.host_aggregates_map)
def test_update_aggregates(self):
fake_agg = objects.Aggregate(id=1, hosts=['fake-host'])
self.host_manager.update_aggregates([fake_agg])
self.assertEqual({1: fake_agg}, self.host_manager.aggs_by_id)
self.assertEqual({'fake-host': set([1])},
self.host_manager.host_aggregates_map)
def test_update_aggregates_remove_hosts(self):
fake_agg = objects.Aggregate(id=1, hosts=['fake-host'])
self.host_manager.update_aggregates([fake_agg])
self.assertEqual({1: fake_agg}, self.host_manager.aggs_by_id)
self.assertEqual({'fake-host': set([1])},
self.host_manager.host_aggregates_map)
# Let's remove the host from the aggregate and update again
fake_agg.hosts = []
self.host_manager.update_aggregates([fake_agg])
self.assertEqual({1: fake_agg}, self.host_manager.aggs_by_id)
self.assertEqual({'fake-host': set([])},
self.host_manager.host_aggregates_map)
def test_delete_aggregate(self):
fake_agg = objects.Aggregate(id=1, hosts=['fake-host'])
self.host_manager.host_aggregates_map = collections.defaultdict(
set, {'fake-host': set([1])})
self.host_manager.aggs_by_id = {1: fake_agg}
self.host_manager.delete_aggregate(fake_agg)
self.assertEqual({}, self.host_manager.aggs_by_id)
self.assertEqual({'fake-host': set([])},
self.host_manager.host_aggregates_map)
def test_choose_host_filters_not_found(self):
self.assertRaises(exception.SchedulerHostFilterNotFound,
self.host_manager._choose_host_filters,
'FakeFilterClass3')
def test_choose_host_filters(self):
# Test we return 1 correct filter object
host_filters = self.host_manager._choose_host_filters(
['FakeFilterClass2'])
self.assertEqual(1, len(host_filters))
self.assertIsInstance(host_filters[0], FakeFilterClass2)
def _mock_get_filtered_hosts(self, info):
info['got_objs'] = []
info['got_fprops'] = []
def fake_filter_one(_self, obj, filter_props):
info['got_objs'].append(obj)
info['got_fprops'].append(filter_props)
return True
self.stubs.Set(FakeFilterClass1, '_filter_one', fake_filter_one)
def _verify_result(self, info, result, filters=True):
for x in info['got_fprops']:
self.assertEqual(x, info['expected_fprops'])
if filters:
self.assertEqual(set(info['expected_objs']), set(info['got_objs']))
self.assertEqual(set(info['expected_objs']), set(result))
def test_get_filtered_hosts(self):
fake_properties = {'moo': 1, 'cow': 2}
info = {'expected_objs': self.fake_hosts,
'expected_fprops': fake_properties}
self._mock_get_filtered_hosts(info)
result = self.host_manager.get_filtered_hosts(self.fake_hosts,
fake_properties)
self._verify_result(info, result)
@mock.patch.object(FakeFilterClass2, '_filter_one', return_value=True)
def test_get_filtered_hosts_with_specified_filters(self, mock_filter_one):
fake_properties = {'moo': 1, 'cow': 2}
specified_filters = ['FakeFilterClass1', 'FakeFilterClass2']
info = {'expected_objs': self.fake_hosts,
'expected_fprops': fake_properties}
self._mock_get_filtered_hosts(info)
result = self.host_manager.get_filtered_hosts(self.fake_hosts,
fake_properties, filter_class_names=specified_filters)
self._verify_result(info, result)
def test_get_filtered_hosts_with_ignore(self):
fake_properties = {'ignore_hosts': ['fake_host1', 'fake_host3',
'fake_host5', 'fake_multihost']}
# [1] and [3] are host2 and host4
info = {'expected_objs': [self.fake_hosts[1], self.fake_hosts[3]],
'expected_fprops': fake_properties}
self._mock_get_filtered_hosts(info)
result = self.host_manager.get_filtered_hosts(self.fake_hosts,
fake_properties)
self._verify_result(info, result)
def test_get_filtered_hosts_with_force_hosts(self):
fake_properties = {'force_hosts': ['fake_host1', 'fake_host3',
'fake_host5']}
# [0] and [2] are host1 and host3
info = {'expected_objs': [self.fake_hosts[0], self.fake_hosts[2]],
'expected_fprops': fake_properties}
self._mock_get_filtered_hosts(info)
result = self.host_manager.get_filtered_hosts(self.fake_hosts,
fake_properties)
self._verify_result(info, result, False)
def test_get_filtered_hosts_with_no_matching_force_hosts(self):
fake_properties = {'force_hosts': ['fake_host5', 'fake_host6']}
info = {'expected_objs': [],
'expected_fprops': fake_properties}
self._mock_get_filtered_hosts(info)
result = self.host_manager.get_filtered_hosts(self.fake_hosts,
fake_properties)
self._verify_result(info, result, False)
def test_get_filtered_hosts_with_ignore_and_force_hosts(self):
# Ensure ignore_hosts processed before force_hosts in host filters.
fake_properties = {'force_hosts': ['fake_host3', 'fake_host1'],
'ignore_hosts': ['fake_host1']}
# only fake_host3 should be left.
info = {'expected_objs': [self.fake_hosts[2]],
'expected_fprops': fake_properties}
self._mock_get_filtered_hosts(info)
result = self.host_manager.get_filtered_hosts(self.fake_hosts,
fake_properties)
self._verify_result(info, result, False)
def test_get_filtered_hosts_with_force_host_and_many_nodes(self):
# Ensure all nodes returned for a host with many nodes
fake_properties = {'force_hosts': ['fake_multihost']}
info = {'expected_objs': [self.fake_hosts[4], self.fake_hosts[5],
self.fake_hosts[6], self.fake_hosts[7]],
'expected_fprops': fake_properties}
self._mock_get_filtered_hosts(info)
result = self.host_manager.get_filtered_hosts(self.fake_hosts,
fake_properties)
self._verify_result(info, result, False)
def test_get_filtered_hosts_with_force_nodes(self):
fake_properties = {'force_nodes': ['fake-node2', 'fake-node4',
'fake-node9']}
# [5] is fake-node2, [7] is fake-node4
info = {'expected_objs': [self.fake_hosts[5], self.fake_hosts[7]],
'expected_fprops': fake_properties}
self._mock_get_filtered_hosts(info)
result = self.host_manager.get_filtered_hosts(self.fake_hosts,
fake_properties)
self._verify_result(info, result, False)
def test_get_filtered_hosts_with_force_hosts_and_nodes(self):
# Ensure only overlapping results if both force host and node
fake_properties = {'force_hosts': ['fake_host1', 'fake_multihost'],
'force_nodes': ['fake-node2', 'fake-node9']}
# [5] is fake-node2
info = {'expected_objs': [self.fake_hosts[5]],
'expected_fprops': fake_properties}
self._mock_get_filtered_hosts(info)
result = self.host_manager.get_filtered_hosts(self.fake_hosts,
fake_properties)
self._verify_result(info, result, False)
def test_get_filtered_hosts_with_force_hosts_and_wrong_nodes(self):
# Ensure non-overlapping force_node and force_host yield no result
fake_properties = {'force_hosts': ['fake_multihost'],
'force_nodes': ['fake-node']}
info = {'expected_objs': [],
'expected_fprops': fake_properties}
self._mock_get_filtered_hosts(info)
result = self.host_manager.get_filtered_hosts(self.fake_hosts,
fake_properties)
self._verify_result(info, result, False)
def test_get_filtered_hosts_with_ignore_hosts_and_force_nodes(self):
# Ensure ignore_hosts can coexist with force_nodes
fake_properties = {'force_nodes': ['fake-node4', 'fake-node2'],
'ignore_hosts': ['fake_host1', 'fake_host2']}
info = {'expected_objs': [self.fake_hosts[5], self.fake_hosts[7]],
'expected_fprops': fake_properties}
self._mock_get_filtered_hosts(info)
result = self.host_manager.get_filtered_hosts(self.fake_hosts,
fake_properties)
self._verify_result(info, result, False)
def test_get_filtered_hosts_with_ignore_hosts_and_force_same_nodes(self):
# Ensure ignore_hosts is processed before force_nodes
fake_properties = {'force_nodes': ['fake_node4', 'fake_node2'],
'ignore_hosts': ['fake_multihost']}
info = {'expected_objs': [],
'expected_fprops': fake_properties}
self._mock_get_filtered_hosts(info)
result = self.host_manager.get_filtered_hosts(self.fake_hosts,
fake_properties)
self._verify_result(info, result, False)
@mock.patch.object(nova.objects.InstanceList, 'get_by_host')
def test_get_all_host_states(self, mock_get_by_host):
mock_get_by_host.return_value = objects.InstanceList()
context = 'fake_context'
self.mox.StubOutWithMock(objects.ServiceList, 'get_by_binary')
self.mox.StubOutWithMock(objects.ComputeNodeList, 'get_all')
self.mox.StubOutWithMock(host_manager.LOG, 'warning')
objects.ServiceList.get_by_binary(
context, 'nova-compute').AndReturn(fakes.SERVICES)
objects.ComputeNodeList.get_all(context).AndReturn(fakes.COMPUTE_NODES)
# node 3 host physical disk space is greater than database
host_manager.LOG.warning("Host %(hostname)s has more disk space "
"than database expected (%(physical)sgb >"
" %(database)sgb)",
{'physical': 3333, 'database': 3072,
'hostname': 'node3'})
# Invalid service
host_manager.LOG.warning("No compute service record found for "
"host %(host)s",
{'host': 'fake'})
self.mox.ReplayAll()
self.host_manager.get_all_host_states(context)
host_states_map = self.host_manager.host_state_map
self.assertEqual(len(host_states_map), 4)
# Check that .service is set properly
for i in range(4):
compute_node = fakes.COMPUTE_NODES[i]
host = compute_node['host']
node = compute_node['hypervisor_hostname']
state_key = (host, node)
self.assertEqual(host_states_map[state_key].service,
obj_base.obj_to_primitive(fakes.get_service_by_host(host)))
self.assertEqual(host_states_map[('host1', 'node1')].free_ram_mb,
512)
# 511GB
self.assertEqual(host_states_map[('host1', 'node1')].free_disk_mb,
524288)
self.assertEqual(host_states_map[('host2', 'node2')].free_ram_mb,
1024)
# 1023GB
self.assertEqual(host_states_map[('host2', 'node2')].free_disk_mb,
1048576)
self.assertEqual(host_states_map[('host3', 'node3')].free_ram_mb,
3072)
# 3071GB
self.assertEqual(host_states_map[('host3', 'node3')].free_disk_mb,
3145728)
self.assertThat(
objects.NUMATopology.obj_from_db_obj(
host_states_map[('host3', 'node3')].numa_topology
)._to_dict(),
matchers.DictMatches(fakes.NUMA_TOPOLOGY._to_dict()))
self.assertEqual(host_states_map[('host4', 'node4')].free_ram_mb,
8192)
# 8191GB
self.assertEqual(host_states_map[('host4', 'node4')].free_disk_mb,
8388608)
@mock.patch.object(nova.objects.InstanceList, 'get_by_host')
@mock.patch.object(host_manager.HostState, 'update_from_compute_node')
@mock.patch.object(objects.ComputeNodeList, 'get_all')
@mock.patch.object(objects.ServiceList, 'get_by_binary')
def test_get_all_host_states_with_no_aggs(self, svc_get_by_binary,
cn_get_all, update_from_cn,
mock_get_by_host):
svc_get_by_binary.return_value = [objects.Service(host='fake')]
cn_get_all.return_value = [
objects.ComputeNode(host='fake', hypervisor_hostname='fake')]
mock_get_by_host.return_value = objects.InstanceList()
self.host_manager.host_aggregates_map = collections.defaultdict(set)
self.host_manager.get_all_host_states('fake-context')
host_state = self.host_manager.host_state_map[('fake', 'fake')]
self.assertEqual([], host_state.aggregates)
@mock.patch.object(nova.objects.InstanceList, 'get_by_host')
@mock.patch.object(host_manager.HostState, 'update_from_compute_node')
@mock.patch.object(objects.ComputeNodeList, 'get_all')
@mock.patch.object(objects.ServiceList, 'get_by_binary')
def test_get_all_host_states_with_matching_aggs(self, svc_get_by_binary,
cn_get_all,
update_from_cn,
mock_get_by_host):
svc_get_by_binary.return_value = [objects.Service(host='fake')]
cn_get_all.return_value = [
objects.ComputeNode(host='fake', hypervisor_hostname='fake')]
mock_get_by_host.return_value = objects.InstanceList()
fake_agg = objects.Aggregate(id=1)
self.host_manager.host_aggregates_map = collections.defaultdict(
set, {'fake': set([1])})
self.host_manager.aggs_by_id = {1: fake_agg}
self.host_manager.get_all_host_states('fake-context')
host_state = self.host_manager.host_state_map[('fake', 'fake')]
self.assertEqual([fake_agg], host_state.aggregates)
@mock.patch.object(nova.objects.InstanceList, 'get_by_host')
@mock.patch.object(host_manager.HostState, 'update_from_compute_node')
@mock.patch.object(objects.ComputeNodeList, 'get_all')
@mock.patch.object(objects.ServiceList, 'get_by_binary')
def test_get_all_host_states_with_not_matching_aggs(self,
svc_get_by_binary,
cn_get_all,
update_from_cn,
mock_get_by_host):
svc_get_by_binary.return_value = [objects.Service(host='fake'),
objects.Service(host='other')]
cn_get_all.return_value = [
objects.ComputeNode(host='fake', hypervisor_hostname='fake'),
objects.ComputeNode(host='other', hypervisor_hostname='other')]
mock_get_by_host.return_value = objects.InstanceList()
fake_agg = objects.Aggregate(id=1)
self.host_manager.host_aggregates_map = collections.defaultdict(
set, {'other': set([1])})
self.host_manager.aggs_by_id = {1: fake_agg}
self.host_manager.get_all_host_states('fake-context')
host_state = self.host_manager.host_state_map[('fake', 'fake')]
self.assertEqual([], host_state.aggregates)
@mock.patch('nova.objects.ServiceList.get_by_binary')
@mock.patch('nova.objects.ComputeNodeList.get_all')
@mock.patch('nova.objects.InstanceList.get_by_host')
def test_get_all_host_states_updated(self, mock_get_by_host,
mock_get_all_comp,
mock_get_svc_by_binary):
mock_get_all_comp.return_value = fakes.COMPUTE_NODES
mock_get_svc_by_binary.return_value = fakes.SERVICES
context = 'fake_context'
hm = self.host_manager
inst1 = objects.Instance(uuid='uuid1')
cn1 = objects.ComputeNode(host='host1')
hm._instance_info = {'host1': {'instances': {'uuid1': inst1},
'updated': True}}
host_state = host_manager.HostState('host1', cn1)
self.assertFalse(host_state.instances)
mock_get_by_host.return_value = None
hm._add_instance_info(context, cn1, host_state)
self.assertFalse(mock_get_by_host.called)
self.assertTrue(host_state.instances)
self.assertEqual(host_state.instances['uuid1'], inst1)
@mock.patch('nova.objects.ServiceList.get_by_binary')
@mock.patch('nova.objects.ComputeNodeList.get_all')
@mock.patch('nova.objects.InstanceList.get_by_host')
def test_get_all_host_states_not_updated(self, mock_get_by_host,
mock_get_all_comp,
mock_get_svc_by_binary):
mock_get_all_comp.return_value = fakes.COMPUTE_NODES
mock_get_svc_by_binary.return_value = fakes.SERVICES
context = 'fake_context'
hm = self.host_manager
inst1 = objects.Instance(uuid='uuid1')
cn1 = objects.ComputeNode(host='host1')
hm._instance_info = {'host1': {'instances': {'uuid1': inst1},
'updated': False}}
host_state = host_manager.HostState('host1', cn1)
self.assertFalse(host_state.instances)
mock_get_by_host.return_value = objects.InstanceList(objects=[inst1])
hm._add_instance_info(context, cn1, host_state)
mock_get_by_host.assert_called_once_with(context, cn1.host)
self.assertTrue(host_state.instances)
self.assertEqual(host_state.instances['uuid1'], inst1)
@mock.patch('nova.objects.InstanceList.get_by_host')
def test_recreate_instance_info(self, mock_get_by_host):
host_name = 'fake_host'
inst1 = fake_instance.fake_instance_obj('fake_context', uuid='aaa',
host=host_name)
inst2 = fake_instance.fake_instance_obj('fake_context', uuid='bbb',
host=host_name)
orig_inst_dict = {inst1.uuid: inst1, inst2.uuid: inst2}
new_inst_list = objects.InstanceList(objects=[inst1, inst2])
mock_get_by_host.return_value = new_inst_list
self.host_manager._instance_info = {
host_name: {
'instances': orig_inst_dict,
'updated': True,
}}
self.host_manager._recreate_instance_info('fake_context', host_name)
new_info = self.host_manager._instance_info[host_name]
self.assertEqual(len(new_info['instances']), len(new_inst_list))
self.assertFalse(new_info['updated'])
def test_update_instance_info(self):
host_name = 'fake_host'
inst1 = fake_instance.fake_instance_obj('fake_context', uuid='aaa',
host=host_name)
inst2 = fake_instance.fake_instance_obj('fake_context', uuid='bbb',
host=host_name)
orig_inst_dict = {inst1.uuid: inst1, inst2.uuid: inst2}
self.host_manager._instance_info = {
host_name: {
'instances': orig_inst_dict,
'updated': False,
}}
inst3 = fake_instance.fake_instance_obj('fake_context', uuid='ccc',
host=host_name)
inst4 = fake_instance.fake_instance_obj('fake_context', uuid='ddd',
host=host_name)
update = objects.InstanceList(objects=[inst3, inst4])
self.host_manager.update_instance_info('fake_context', host_name,
update)
new_info = self.host_manager._instance_info[host_name]
self.assertEqual(len(new_info['instances']), 4)
self.assertTrue(new_info['updated'])
def test_update_instance_info_unknown_host(self):
self.host_manager._recreate_instance_info = mock.MagicMock()
host_name = 'fake_host'
inst1 = fake_instance.fake_instance_obj('fake_context', uuid='aaa',
host=host_name)
inst2 = fake_instance.fake_instance_obj('fake_context', uuid='bbb',
host=host_name)
orig_inst_dict = {inst1.uuid: inst1, inst2.uuid: inst2}
self.host_manager._instance_info = {
host_name: {
'instances': orig_inst_dict,
'updated': False,
}}
bad_host = 'bad_host'
inst3 = fake_instance.fake_instance_obj('fake_context', uuid='ccc',
host=bad_host)
inst_list3 = objects.InstanceList(objects=[inst3])
self.host_manager.update_instance_info('fake_context', bad_host,
inst_list3)
new_info = self.host_manager._instance_info[host_name]
self.host_manager._recreate_instance_info.assert_called_once_with(
'fake_context', bad_host)
self.assertEqual(len(new_info['instances']), len(orig_inst_dict))
self.assertFalse(new_info['updated'])
def test_delete_instance_info(self):
host_name = 'fake_host'
inst1 = fake_instance.fake_instance_obj('fake_context', uuid='aaa',
host=host_name)
inst2 = fake_instance.fake_instance_obj('fake_context', uuid='bbb',
host=host_name)
orig_inst_dict = {inst1.uuid: inst1, inst2.uuid: inst2}
self.host_manager._instance_info = {
host_name: {
'instances': orig_inst_dict,
'updated': False,
}}
self.host_manager.delete_instance_info('fake_context', host_name,
inst1.uuid)
new_info = self.host_manager._instance_info[host_name]
self.assertEqual(len(new_info['instances']), 1)
self.assertTrue(new_info['updated'])
def test_delete_instance_info_unknown_host(self):
self.host_manager._recreate_instance_info = mock.MagicMock()
host_name = 'fake_host'
inst1 = fake_instance.fake_instance_obj('fake_context', uuid='aaa',
host=host_name)
inst2 = fake_instance.fake_instance_obj('fake_context', uuid='bbb',
host=host_name)
orig_inst_dict = {inst1.uuid: inst1, inst2.uuid: inst2}
self.host_manager._instance_info = {
host_name: {
'instances': orig_inst_dict,
'updated': False,
}}
bad_host = 'bad_host'
self.host_manager.delete_instance_info('fake_context', bad_host, 'aaa')
new_info = self.host_manager._instance_info[host_name]
self.host_manager._recreate_instance_info.assert_called_once_with(
'fake_context', bad_host)
self.assertEqual(len(new_info['instances']), len(orig_inst_dict))
self.assertFalse(new_info['updated'])
def test_sync_instance_info(self):
self.host_manager._recreate_instance_info = mock.MagicMock()
host_name = 'fake_host'
inst1 = fake_instance.fake_instance_obj('fake_context', uuid='aaa',
host=host_name)
inst2 = fake_instance.fake_instance_obj('fake_context', uuid='bbb',
host=host_name)
orig_inst_dict = {inst1.uuid: inst1, inst2.uuid: inst2}
self.host_manager._instance_info = {
host_name: {
'instances': orig_inst_dict,
'updated': False,
}}
self.host_manager.sync_instance_info('fake_context', host_name,
['bbb', 'aaa'])
new_info = self.host_manager._instance_info[host_name]
self.assertFalse(self.host_manager._recreate_instance_info.called)
self.assertTrue(new_info['updated'])
def test_sync_instance_info_fail(self):
self.host_manager._recreate_instance_info = mock.MagicMock()
host_name = 'fake_host'
inst1 = fake_instance.fake_instance_obj('fake_context', uuid='aaa',
host=host_name)
inst2 = fake_instance.fake_instance_obj('fake_context', uuid='bbb',
host=host_name)
orig_inst_dict = {inst1.uuid: inst1, inst2.uuid: inst2}
self.host_manager._instance_info = {
host_name: {
'instances': orig_inst_dict,
'updated': False,
}}
self.host_manager.sync_instance_info('fake_context', host_name,
['bbb', 'aaa', 'new'])
new_info = self.host_manager._instance_info[host_name]
self.host_manager._recreate_instance_info.assert_called_once_with(
'fake_context', host_name)
self.assertFalse(new_info['updated'])
class HostManagerChangedNodesTestCase(test.NoDBTestCase):
"""Test case for HostManager class."""
@mock.patch.object(host_manager.HostManager, '_init_instance_info')
@mock.patch.object(host_manager.HostManager, '_init_aggregates')
def setUp(self, mock_init_agg, mock_init_inst):
super(HostManagerChangedNodesTestCase, self).setUp()
self.host_manager = host_manager.HostManager()
self.fake_hosts = [
host_manager.HostState('host1', 'node1'),
host_manager.HostState('host2', 'node2'),
host_manager.HostState('host3', 'node3'),
host_manager.HostState('host4', 'node4')
]
@mock.patch('nova.objects.InstanceList.get_by_host')
def test_get_all_host_states(self, mock_get_by_host):
mock_get_by_host.return_value = objects.InstanceList()
context = 'fake_context'
self.mox.StubOutWithMock(objects.ServiceList, 'get_by_binary')
self.mox.StubOutWithMock(objects.ComputeNodeList, 'get_all')
objects.ServiceList.get_by_binary(
context, 'nova-compute').AndReturn(fakes.SERVICES)
objects.ComputeNodeList.get_all(context).AndReturn(fakes.COMPUTE_NODES)
self.mox.ReplayAll()
self.host_manager.get_all_host_states(context)
host_states_map = self.host_manager.host_state_map
self.assertEqual(len(host_states_map), 4)
@mock.patch('nova.objects.InstanceList.get_by_host')
def test_get_all_host_states_after_delete_one(self, mock_get_by_host):
mock_get_by_host.return_value = objects.InstanceList()
context = 'fake_context'
self.mox.StubOutWithMock(objects.ServiceList, 'get_by_binary')
self.mox.StubOutWithMock(objects.ComputeNodeList, 'get_all')
# all nodes active for first call
objects.ServiceList.get_by_binary(
context, 'nova-compute').AndReturn(fakes.SERVICES)
objects.ComputeNodeList.get_all(context).AndReturn(fakes.COMPUTE_NODES)
# remove node4 for second call
running_nodes = [n for n in fakes.COMPUTE_NODES
if n.get('hypervisor_hostname') != 'node4']
objects.ServiceList.get_by_binary(
context, 'nova-compute').AndReturn(fakes.SERVICES)
objects.ComputeNodeList.get_all(context).AndReturn(running_nodes)
self.mox.ReplayAll()
self.host_manager.get_all_host_states(context)
self.host_manager.get_all_host_states(context)
host_states_map = self.host_manager.host_state_map
self.assertEqual(len(host_states_map), 3)
@mock.patch('nova.objects.InstanceList.get_by_host')
def test_get_all_host_states_after_delete_all(self, mock_get_by_host):
mock_get_by_host.return_value = objects.InstanceList()
context = 'fake_context'
self.mox.StubOutWithMock(objects.ServiceList, 'get_by_binary')
self.mox.StubOutWithMock(objects.ComputeNodeList, 'get_all')
# all nodes active for first call
objects.ServiceList.get_by_binary(
context, 'nova-compute').AndReturn(fakes.SERVICES)
objects.ComputeNodeList.get_all(context).AndReturn(fakes.COMPUTE_NODES)
# remove all nodes for second call
objects.ServiceList.get_by_binary(
context, 'nova-compute').AndReturn(fakes.SERVICES)
objects.ComputeNodeList.get_all(context).AndReturn([])
self.mox.ReplayAll()
self.host_manager.get_all_host_states(context)
self.host_manager.get_all_host_states(context)
host_states_map = self.host_manager.host_state_map
self.assertEqual(len(host_states_map), 0)
class HostStateTestCase(test.NoDBTestCase):
"""Test case for HostState class."""
# update_from_compute_node() and consume_from_instance() are tested
# in HostManagerTestCase.test_get_all_host_states()
def test_stat_consumption_from_compute_node(self):
stats = {
'num_instances': '5',
'num_proj_12345': '3',
'num_proj_23456': '1',
'num_vm_%s' % vm_states.BUILDING: '2',
'num_vm_%s' % vm_states.SUSPENDED: '1',
'num_task_%s' % task_states.RESIZE_MIGRATING: '1',
'num_task_%s' % task_states.MIGRATING: '2',
'num_os_type_linux': '4',
'num_os_type_windoze': '1',
'io_workload': '42',
}
hyper_ver_int = utils.convert_version_to_int('6.0.0')
compute = objects.ComputeNode(
stats=stats, memory_mb=1, free_disk_gb=0, local_gb=0,
local_gb_used=0, free_ram_mb=0, vcpus=0, vcpus_used=0,
disk_available_least=None,
updated_at=None, host_ip='127.0.0.1',
hypervisor_type='htype',
hypervisor_hostname='hostname', cpu_info='cpu_info',
supported_hv_specs=[],
hypervisor_version=hyper_ver_int, numa_topology=None,
pci_device_pools=None, metrics=None)
host = host_manager.HostState("fakehost", "fakenode")
host.update_from_compute_node(compute)
self.assertEqual(5, host.num_instances)
self.assertEqual(42, host.num_io_ops)
self.assertEqual(10, len(host.stats))
self.assertEqual('127.0.0.1', str(host.host_ip))
self.assertEqual('htype', host.hypervisor_type)
self.assertEqual('hostname', host.hypervisor_hostname)
self.assertEqual('cpu_info', host.cpu_info)
self.assertEqual([], host.supported_instances)
self.assertEqual(hyper_ver_int, host.hypervisor_version)
def test_stat_consumption_from_compute_node_non_pci(self):
stats = {
'num_instances': '5',
'num_proj_12345': '3',
'num_proj_23456': '1',
'num_vm_%s' % vm_states.BUILDING: '2',
'num_vm_%s' % vm_states.SUSPENDED: '1',
'num_task_%s' % task_states.RESIZE_MIGRATING: '1',
'num_task_%s' % task_states.MIGRATING: '2',
'num_os_type_linux': '4',
'num_os_type_windoze': '1',
'io_workload': '42',
}
hyper_ver_int = utils.convert_version_to_int('6.0.0')
compute = objects.ComputeNode(
stats=stats, memory_mb=0, free_disk_gb=0, local_gb=0,
local_gb_used=0, free_ram_mb=0, vcpus=0, vcpus_used=0,
disk_available_least=None,
updated_at=None, host_ip='127.0.0.1',
hypervisor_type='htype',
hypervisor_hostname='hostname', cpu_info='cpu_info',
supported_hv_specs=[],
hypervisor_version=hyper_ver_int, numa_topology=None,
pci_device_pools=None, metrics=None)
host = host_manager.HostState("fakehost", "fakenode")
host.update_from_compute_node(compute)
self.assertEqual([], host.pci_stats.pools)
self.assertEqual(hyper_ver_int, host.hypervisor_version)
def test_stat_consumption_from_compute_node_rescue_unshelving(self):
stats = {
'num_instances': '5',
'num_proj_12345': '3',
'num_proj_23456': '1',
'num_vm_%s' % vm_states.BUILDING: '2',
'num_vm_%s' % vm_states.SUSPENDED: '1',
'num_task_%s' % task_states.UNSHELVING: '1',
'num_task_%s' % task_states.RESCUING: '2',
'num_os_type_linux': '4',
'num_os_type_windoze': '1',
'io_workload': '42',
}
hyper_ver_int = utils.convert_version_to_int('6.0.0')
compute = objects.ComputeNode(
stats=stats, memory_mb=0, free_disk_gb=0, local_gb=0,
local_gb_used=0, free_ram_mb=0, vcpus=0, vcpus_used=0,
disk_available_least=None,
updated_at=None, host_ip='127.0.0.1',
hypervisor_type='htype',
hypervisor_hostname='hostname', cpu_info='cpu_info',
supported_hv_specs=[],
hypervisor_version=hyper_ver_int, numa_topology=None,
pci_device_pools=None, metrics=None)
host = host_manager.HostState("fakehost", "fakenode")
host.update_from_compute_node(compute)
self.assertEqual(5, host.num_instances)
self.assertEqual(42, host.num_io_ops)
self.assertEqual(10, len(host.stats))
self.assertEqual([], host.pci_stats.pools)
self.assertEqual(hyper_ver_int, host.hypervisor_version)
@mock.patch('nova.virt.hardware.get_host_numa_usage_from_instance')
@mock.patch('nova.virt.hardware.numa_fit_instance_to_host')
@mock.patch('nova.virt.hardware.instance_topology_from_instance')
@mock.patch('nova.virt.hardware.host_topology_and_format_from_host')
def test_stat_consumption_from_instance(self, host_topo_mock,
instance_topo_mock,
numa_fit_mock,
numa_usage_mock):
fake_numa_topology = mock.Mock()
host_topo_mock.return_value = ('fake-host-topology', None)
numa_usage_mock.return_value = 'fake-consumed-once'
numa_fit_mock.return_value = 'fake-fitted-once'
instance_topo_mock.return_value = fake_numa_topology
instance = dict(root_gb=0, ephemeral_gb=0, memory_mb=0, vcpus=0,
project_id='12345', vm_state=vm_states.BUILDING,
task_state=task_states.SCHEDULING, os_type='Linux',
uuid='fake-uuid',
numa_topology=fake_numa_topology,
pci_requests={'requests': []})
host = host_manager.HostState("fakehost", "fakenode")
host.consume_from_instance(instance)
numa_fit_mock.assert_called_once_with('fake-host-topology',
fake_numa_topology,
limits=None, pci_requests=None,
pci_stats=None)
numa_usage_mock.assert_called_once_with(host, instance)
self.assertEqual('fake-consumed-once', host.numa_topology)
self.assertEqual('fake-fitted-once', instance['numa_topology'])
instance = dict(root_gb=0, ephemeral_gb=0, memory_mb=0, vcpus=0,
project_id='12345', vm_state=vm_states.PAUSED,
task_state=None, os_type='Linux',
uuid='fake-uuid',
numa_topology=fake_numa_topology)
numa_usage_mock.return_value = 'fake-consumed-twice'
numa_fit_mock.return_value = 'fake-fitted-twice'
host.consume_from_instance(instance)
self.assertEqual('fake-fitted-twice', instance['numa_topology'])
self.assertEqual(2, host.num_instances)
self.assertEqual(1, host.num_io_ops)
self.assertEqual(2, numa_usage_mock.call_count)
self.assertEqual(((host, instance),), numa_usage_mock.call_args)
self.assertEqual('fake-consumed-twice', host.numa_topology)
def test_stat_consumption_from_instance_pci(self):
inst_topology = objects.InstanceNUMATopology(
cells = [objects.InstanceNUMACell(
cpuset=set([0]),
memory=512, id=0)])
fake_requests = [{'request_id': 'fake_request1', 'count': 1,
'spec': [{'vendor_id': '8086'}]}]
fake_requests_obj = objects.InstancePCIRequests(
requests=[objects.InstancePCIRequest(**r)
for r in fake_requests],
instance_uuid='fake-uuid')
instance = objects.Instance(root_gb=0, ephemeral_gb=0, memory_mb=512,
vcpus=1,
project_id='12345', vm_state=vm_states.BUILDING,
task_state=task_states.SCHEDULING, os_type='Linux',
uuid='fake-uuid',
numa_topology=inst_topology,
pci_requests=fake_requests_obj,
id = 1243)
req_spec = sched_utils.build_request_spec(None,
None,
[instance],
objects.Flavor(
root_gb=0,
ephemeral_gb=0,
memory_mb=1024,
vcpus=1))
host = host_manager.HostState("fakehost", "fakenode")
host.pci_stats = pci_stats.PciDeviceStats(
[objects.PciDevicePool(vendor_id='8086',
product_id='15ed',
numa_node=1,
count=1)])
host.numa_topology = fakes.NUMA_TOPOLOGY
host.consume_from_instance(req_spec['instance_properties'])
self.assertIsInstance(req_spec['instance_properties']['numa_topology'],
objects.InstanceNUMATopology)
self.assertEqual(512, host.numa_topology.cells[1].memory_usage)
self.assertEqual(1, host.numa_topology.cells[1].cpu_usage)
self.assertEqual(0, len(host.pci_stats.pools))
def test_resources_consumption_from_compute_node(self):
metrics = [
dict(name='res1',
value=1.0,
source='source1',
timestamp=None),
dict(name='res2',
value="string2",
source='source2',
timestamp=None),
]
hyper_ver_int = utils.convert_version_to_int('6.0.0')
compute = objects.ComputeNode(
metrics=jsonutils.dumps(metrics),
memory_mb=0, free_disk_gb=0, local_gb=0,
local_gb_used=0, free_ram_mb=0, vcpus=0, vcpus_used=0,
disk_available_least=None,
updated_at=None, host_ip='127.0.0.1',
hypervisor_type='htype',
hypervisor_hostname='hostname', cpu_info='cpu_info',
supported_hv_specs=[],
hypervisor_version=hyper_ver_int,
numa_topology=fakes.NUMA_TOPOLOGY._to_json(),
stats=None, pci_device_pools=None)
host = host_manager.HostState("fakehost", "fakenode")
host.update_from_compute_node(compute)
self.assertEqual(len(host.metrics), 2)
self.assertEqual(set(['res1', 'res2']), set(host.metrics.keys()))
self.assertEqual(1.0, host.metrics['res1'].value)
self.assertEqual('source1', host.metrics['res1'].source)
self.assertEqual('string2', host.metrics['res2'].value)
self.assertEqual('source2', host.metrics['res2'].source)
self.assertIsInstance(host.numa_topology, six.string_types)
|
|
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import itertools
import six
from webob import exc
from heat.api.openstack.v1 import util
from heat.common.i18n import _
from heat.common import identifier
from heat.common import param_utils
from heat.common import serializers
from heat.common import wsgi
from heat.rpc import api as rpc_api
from heat.rpc import client as rpc_client
def format_resource(req, res, keys=None):
keys = keys or []
def include_key(k):
return k in keys if keys else True
def transform(key, value):
if not include_key(key):
return
if key == rpc_api.RES_ID:
identity = identifier.ResourceIdentifier(**value)
links = [util.make_link(req, identity),
util.make_link(req, identity.stack(), 'stack')]
nested_id = res.get(rpc_api.RES_NESTED_STACK_ID)
if nested_id:
nested_identity = identifier.HeatIdentifier(**nested_id)
links.append(util.make_link(req, nested_identity, 'nested'))
yield ('links', links)
elif (key == rpc_api.RES_STACK_NAME or
key == rpc_api.RES_STACK_ID or
key == rpc_api.RES_ACTION or
key == rpc_api.RES_NESTED_STACK_ID):
return
elif (key == rpc_api.RES_METADATA):
return
elif (key == rpc_api.RES_STATUS and rpc_api.RES_ACTION in res):
# To avoid breaking API compatibility, we join RES_ACTION
# and RES_STATUS, so the API format doesn't expose the
# internal split of state into action/status
yield (key, '_'.join((res[rpc_api.RES_ACTION], value)))
elif (key == rpc_api.RES_NAME):
yield ('logical_resource_id', value)
yield (key, value)
else:
yield (key, value)
return dict(itertools.chain.from_iterable(
transform(k, v) for k, v in res.items()))
class ResourceController(object):
"""WSGI controller for Resources in Heat v1 API.
Implements the API actions.
"""
# Define request scope (must match what is in policy.json)
REQUEST_SCOPE = 'resource'
def __init__(self, options):
self.options = options
self.rpc_client = rpc_client.EngineClient()
def _extract_to_param(self, req, rpc_param, extractor, default):
key = rpc_param
if key in req.params:
try:
return extractor(key, req.params[key])
except ValueError as e:
raise exc.HTTPBadRequest(six.text_type(e))
else:
return default
@util.identified_stack
def index(self, req, identity):
"""Lists information for all resources."""
whitelist = {
'type': 'mixed',
'status': 'mixed',
'name': 'mixed',
'action': 'mixed',
'id': 'mixed',
'physical_resource_id': 'mixed'
}
invalid_keys = (set(req.params.keys()) -
set(list(whitelist) + [rpc_api.PARAM_NESTED_DEPTH,
rpc_api.PARAM_WITH_DETAIL]))
if invalid_keys:
raise exc.HTTPBadRequest(_('Invalid filter parameters %s') %
six.text_type(list(invalid_keys)))
nested_depth = self._extract_to_param(req,
rpc_api.PARAM_NESTED_DEPTH,
param_utils.extract_int,
default=0)
with_detail = self._extract_to_param(req,
rpc_api.PARAM_WITH_DETAIL,
param_utils.extract_bool,
default=False)
params = util.get_allowed_params(req.params, whitelist)
res_list = self.rpc_client.list_stack_resources(req.context,
identity,
nested_depth,
with_detail,
filters=params)
return {'resources': [format_resource(req, res) for res in res_list]}
@util.identified_stack
def show(self, req, identity, resource_name):
"""Gets detailed information for a resource."""
whitelist = {'with_attr': util.PARAM_TYPE_MULTI}
params = util.get_allowed_params(req.params, whitelist)
if 'with_attr' not in params:
params['with_attr'] = None
res = self.rpc_client.describe_stack_resource(req.context,
identity,
resource_name,
**params)
return {'resource': format_resource(req, res)}
@util.identified_stack
def metadata(self, req, identity, resource_name):
"""Gets metadata information for a resource."""
res = self.rpc_client.describe_stack_resource(req.context,
identity,
resource_name)
return {rpc_api.RES_METADATA: res[rpc_api.RES_METADATA]}
@util.identified_stack
def signal(self, req, identity, resource_name, body=None):
self.rpc_client.resource_signal(req.context,
stack_identity=identity,
resource_name=resource_name,
details=body)
@util.identified_stack
def mark_unhealthy(self, req, identity, resource_name, body):
"""Mark a resource as healthy or unhealthy."""
data = dict()
VALID_KEYS = (RES_UPDATE_MARK_UNHEALTHY, RES_UPDATE_STATUS_REASON) = (
'mark_unhealthy', rpc_api.RES_STATUS_DATA)
invalid_keys = set(body) - set(VALID_KEYS)
if invalid_keys:
raise exc.HTTPBadRequest(_("Invalid keys in resource "
"mark unhealthy %s") % invalid_keys)
if RES_UPDATE_MARK_UNHEALTHY not in body:
raise exc.HTTPBadRequest(
_("Missing mandatory (%s) key from mark unhealthy "
"request") % RES_UPDATE_MARK_UNHEALTHY)
try:
data[RES_UPDATE_MARK_UNHEALTHY] = param_utils.extract_bool(
RES_UPDATE_MARK_UNHEALTHY,
body[RES_UPDATE_MARK_UNHEALTHY])
except ValueError as e:
raise exc.HTTPBadRequest(six.text_type(e))
data[RES_UPDATE_STATUS_REASON] = body.get(RES_UPDATE_STATUS_REASON, "")
self.rpc_client.resource_mark_unhealthy(req.context,
stack_identity=identity,
resource_name=resource_name,
**data)
def create_resource(options):
"""Resources resource factory method."""
deserializer = wsgi.JSONRequestDeserializer()
serializer = serializers.JSONResponseSerializer()
return wsgi.Resource(ResourceController(options), deserializer, serializer)
|
|
""" Cisco_IOS_XR_infra_dumper_cfg
This module contains a collection of YANG definitions
for Cisco IOS\-XR infra\-dumper package configuration.
This module contains definitions
for the following management objects\:
exception\: Core dump configuration commands
Copyright (c) 2013\-2015 by Cisco Systems, Inc.
All rights reserved.
"""
import re
import collections
from enum import Enum
from ydk.types import Empty, YList, YLeafList, DELETE, Decimal64, FixedBitsDict
from ydk.errors import YPYError, YPYModelError
class Exception(object):
"""
Core dump configuration commands
.. attribute:: choice1
Preference of the dump location
**type**\: :py:class:`Choice1 <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_dumper_cfg.Exception.Choice1>`
.. attribute:: choice2
Preference of the dump location
**type**\: :py:class:`Choice2 <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_dumper_cfg.Exception.Choice2>`
.. attribute:: choice3
Preference of the dump location
**type**\: :py:class:`Choice3 <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_dumper_cfg.Exception.Choice3>`
.. attribute:: kernel_debugger
Enable kernel debugger
**type**\: :py:class:`Empty <ydk.types.Empty>`
.. attribute:: packet_memory
Specify 'true' to dump packet memory for all process, 'false' to disable dump of packet memory
**type**\: bool
.. attribute:: sparse
Specify 'true' to enable sparse core dump, 'false' to disable sparse core dump
**type**\: bool
.. attribute:: sparse_size
Switch to sparse core dump at this size
**type**\: int
**range:** 1..4095
"""
_prefix = 'infra-dumper-cfg'
_revision = '2015-11-09'
def __init__(self):
self.choice1 = None
self.choice2 = None
self.choice3 = None
self.kernel_debugger = None
self.packet_memory = None
self.sparse = None
self.sparse_size = None
class Choice1(object):
"""
Preference of the dump location
.. attribute:: compress
Specify 'true' to compress core files dumped on this path, 'false' to not compress
**type**\: bool
.. attribute:: file_path
Protocol and directory
**type**\: str
.. attribute:: filename
Dump filename
**type**\: str
.. attribute:: higher_limit
Higher limit. This is required if Filename is specified
**type**\: int
**range:** 5..64
.. attribute:: lower_limit
Lower limit. This is required if Filename is specified
**type**\: int
**range:** 0..4
.. attribute:: _is_presence
Is present if this instance represents presence container else not
**type**\: bool
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'infra-dumper-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self._is_presence = True
self.compress = None
self.file_path = None
self.filename = None
self.higher_limit = None
self.lower_limit = None
@property
def _common_path(self):
return '/Cisco-IOS-XR-infra-dumper-cfg:exception/Cisco-IOS-XR-infra-dumper-cfg:choice1'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self._is_presence:
return True
if self.compress is not None:
return True
if self.file_path is not None:
return True
if self.filename is not None:
return True
if self.higher_limit is not None:
return True
if self.lower_limit is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_dumper_cfg as meta
return meta._meta_table['Exception.Choice1']['meta_info']
class Choice3(object):
"""
Preference of the dump location
.. attribute:: compress
Specify 'true' to compress core files dumped on this path, 'false' to not compress
**type**\: bool
.. attribute:: file_path
Protocol and directory
**type**\: str
.. attribute:: filename
Dump filename
**type**\: str
.. attribute:: higher_limit
Higher limit. This is required if Filename is specified
**type**\: int
**range:** 5..64
.. attribute:: lower_limit
Lower limit. This is required if Filename is specified
**type**\: int
**range:** 0..4
.. attribute:: _is_presence
Is present if this instance represents presence container else not
**type**\: bool
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'infra-dumper-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self._is_presence = True
self.compress = None
self.file_path = None
self.filename = None
self.higher_limit = None
self.lower_limit = None
@property
def _common_path(self):
return '/Cisco-IOS-XR-infra-dumper-cfg:exception/Cisco-IOS-XR-infra-dumper-cfg:choice3'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self._is_presence:
return True
if self.compress is not None:
return True
if self.file_path is not None:
return True
if self.filename is not None:
return True
if self.higher_limit is not None:
return True
if self.lower_limit is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_dumper_cfg as meta
return meta._meta_table['Exception.Choice3']['meta_info']
class Choice2(object):
"""
Preference of the dump location
.. attribute:: compress
Specify 'true' to compress core files dumped on this path, 'false' to not compress
**type**\: bool
.. attribute:: file_path
Protocol and directory
**type**\: str
.. attribute:: filename
Dump filename
**type**\: str
.. attribute:: higher_limit
Higher limit. This is required if Filename is specified
**type**\: int
**range:** 5..64
.. attribute:: lower_limit
Lower limit. This is required if Filename is specified
**type**\: int
**range:** 0..4
.. attribute:: _is_presence
Is present if this instance represents presence container else not
**type**\: bool
This class is a :ref:`presence class<presence-class>`
"""
_prefix = 'infra-dumper-cfg'
_revision = '2015-11-09'
def __init__(self):
self.parent = None
self._is_presence = True
self.compress = None
self.file_path = None
self.filename = None
self.higher_limit = None
self.lower_limit = None
@property
def _common_path(self):
return '/Cisco-IOS-XR-infra-dumper-cfg:exception/Cisco-IOS-XR-infra-dumper-cfg:choice2'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self._is_presence:
return True
if self.compress is not None:
return True
if self.file_path is not None:
return True
if self.filename is not None:
return True
if self.higher_limit is not None:
return True
if self.lower_limit is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_dumper_cfg as meta
return meta._meta_table['Exception.Choice2']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XR-infra-dumper-cfg:exception'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if not self.is_config():
return False
if self.choice1 is not None and self.choice1._has_data():
return True
if self.choice2 is not None and self.choice2._has_data():
return True
if self.choice3 is not None and self.choice3._has_data():
return True
if self.kernel_debugger is not None:
return True
if self.packet_memory is not None:
return True
if self.sparse is not None:
return True
if self.sparse_size is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_infra_dumper_cfg as meta
return meta._meta_table['Exception']['meta_info']
|
|
from django import VERSION as DJANGO_VERSION
from django.contrib.auth.models import Group, Permission
from django.test import TestCase
from wagtail.core.models import GroupPagePermission, Page
from wagtail.tests.testapp.models import BusinessIndex, EventCategory, EventPage
from wagtail.tests.utils import WagtailTestUtils
class TestIndexView(TestCase, WagtailTestUtils):
fixtures = ['test_specific.json']
def setUp(self):
self.login()
def get(self, **params):
return self.client.get('/admin/tests/eventpage/', params)
def test_simple(self):
response = self.get()
self.assertEqual(response.status_code, 200)
# There are four event pages in the test data
self.assertEqual(response.context['result_count'], 4)
# User has add permission
self.assertEqual(response.context['user_can_create'], True)
def test_filter(self):
# Filter by audience
response = self.get(audience__exact='public')
self.assertEqual(response.status_code, 200)
# Only three of the event page in the test data are 'public'
self.assertEqual(response.context['result_count'], 3)
for eventpage in response.context['object_list']:
self.assertEqual(eventpage.audience, 'public')
def test_search(self):
response = self.get(q='Someone')
self.assertEqual(response.status_code, 200)
# There is one eventpage where the title contains 'Someone'
self.assertEqual(response.context['result_count'], 1)
def test_ordering(self):
response = self.get(o='0.1')
self.assertEqual(response.status_code, 200)
# There should still be four results
self.assertEqual(response.context['result_count'], 4)
class TestExcludeFromExplorer(TestCase, WagtailTestUtils):
fixtures = ['modeladmintest_test.json']
def setUp(self):
self.login()
def test_attribute_effects_explorer(self):
# The two VenuePages should appear in the venuepage list
response = self.client.get('/admin/modeladmintest/venuepage/')
if DJANGO_VERSION >= (3, 0):
self.assertContains(response, "Santa's Grotto")
self.assertContains(response, "Santa's Workshop")
else:
self.assertContains(response, "Santa's Grotto")
self.assertContains(response, "Santa's Workshop")
# But when viewing the children of 'Christmas' event in explorer
response = self.client.get('/admin/pages/4/')
if DJANGO_VERSION >= (3, 0):
self.assertNotContains(response, "Santa's Grotto")
self.assertNotContains(response, "Santa's Workshop")
else:
self.assertNotContains(response, "Santa's Grotto")
self.assertNotContains(response, "Santa's Workshop")
# But the other test page should...
self.assertContains(response, "Claim your free present!")
class TestCreateView(TestCase, WagtailTestUtils):
fixtures = ['test_specific.json']
def setUp(self):
self.login()
def test_redirect_to_choose_parent(self):
# When more than one possible parent page exists, redirect to choose_parent
response = self.client.get('/admin/tests/eventpage/create/')
self.assertRedirects(response, '/admin/tests/eventpage/choose_parent/')
def test_one_parent_exists(self):
# Create a BusinessIndex page that BusinessChild can exist under
homepage = Page.objects.get(url_path='/home/')
business_index = BusinessIndex(title='Business Index')
homepage.add_child(instance=business_index)
# When one possible parent page exists, redirect straight to the page create view
response = self.client.get('/admin/tests/businesschild/create/')
expected_path = '/admin/pages/add/tests/businesschild/%d/' % business_index.pk
expected_next_path = '/admin/tests/businesschild/'
self.assertRedirects(response, '%s?next=%s' % (expected_path, expected_next_path))
class TestInspectView(TestCase, WagtailTestUtils):
fixtures = ['test_specific.json', 'modeladmintest_test.json']
def setUp(self):
self.login()
def get(self, id):
return self.client.get('/admin/tests/eventpage/inspect/%d/' % id)
def test_simple(self):
response = self.get(4)
self.assertEqual(response.status_code, 200)
def test_title_present(self):
"""
The page title should appear three times. Once in the header, and two times
in the field listing (as the actual title and as the draft title)
"""
response = self.get(4)
self.assertContains(response, 'Christmas', 3)
def test_manytomany_output(self):
"""
Because ManyToMany fields are output InspectView by default, the
`categories` for the event should output as a comma separated list
once populated.
"""
eventpage = EventPage.objects.get(pk=4)
free_category = EventCategory.objects.create(name='Free')
child_friendly_category = EventCategory.objects.create(name='Child-friendly')
eventpage.categories = (free_category, child_friendly_category)
eventpage.save()
response = self.get(4)
self.assertContains(response, '<dd>Free, Child-friendly</dd>', html=True)
def test_false_values_displayed(self):
"""
Boolean fields with False values should display False, rather than the
value of `get_empty_value_display()`. For this page, those should be
`locked`, `expired` and `has_unpublished_changes`
"""
response = self.get(4)
self.assertContains(response, '<dd>False</dd>', count=3, html=True)
def test_location_present(self):
"""
The location should appear once, in the field listing
"""
response = self.get(4)
self.assertContains(response, 'The North Pole', 1)
def test_non_existent(self):
response = self.get(100)
self.assertEqual(response.status_code, 404)
def test_short_description_is_used_as_field_label(self):
"""
A custom field has been added to the inspect view's `inspect_view_fields` and since
this field has a `short_description` we expect it to be used as the field's label,
and not use the name of the function.
"""
response = self.client.get('/admin/modeladmintest/author/inspect/1/')
self.assertContains(response, 'Birth information')
self.assertNotContains(response, 'author_birth_string')
class TestEditView(TestCase, WagtailTestUtils):
fixtures = ['test_specific.json']
def setUp(self):
self.login()
def get(self, obj_id):
return self.client.get('/admin/tests/eventpage/edit/%d/' % obj_id)
def test_simple(self):
response = self.get(4)
expected_path = '/admin/pages/4/edit/'
expected_next_path = '/admin/tests/eventpage/'
self.assertRedirects(response, '%s?next=%s' % (expected_path, expected_next_path))
def test_non_existent(self):
response = self.get(100)
self.assertEqual(response.status_code, 404)
class TestDeleteView(TestCase, WagtailTestUtils):
fixtures = ['test_specific.json']
def setUp(self):
self.login()
def get(self, obj_id):
return self.client.get('/admin/tests/eventpage/delete/%d/' % obj_id)
def test_simple(self):
response = self.get(4)
expected_path = '/admin/pages/4/delete/'
expected_next_path = '/admin/tests/eventpage/'
self.assertRedirects(response, '%s?next=%s' % (expected_path, expected_next_path))
class TestChooseParentView(TestCase, WagtailTestUtils):
fixtures = ['test_specific.json']
def setUp(self):
self.login()
def test_simple(self):
response = self.client.get('/admin/tests/eventpage/choose_parent/')
self.assertEqual(response.status_code, 200)
def test_no_parent_exists(self):
response = self.client.get('/admin/tests/businesschild/choose_parent/')
self.assertRedirects(response, '/admin/')
def test_post(self):
response = self.client.post('/admin/tests/eventpage/choose_parent/', {
'parent_page': 2,
})
expected_path = '/admin/pages/add/tests/eventpage/2/'
expected_next_path = '/admin/tests/eventpage/'
self.assertRedirects(response, '%s?next=%s' % (expected_path, expected_next_path))
class TestChooseParentViewForNonSuperuser(TestCase, WagtailTestUtils):
fixtures = ['test_specific.json']
def setUp(self):
homepage = Page.objects.get(url_path='/home/')
business_index = BusinessIndex(
title='Public Business Index',
draft_title='Public Business Index',
)
homepage.add_child(instance=business_index)
another_business_index = BusinessIndex(
title='Another Business Index',
draft_title='Another Business Index',
)
homepage.add_child(instance=another_business_index)
secret_business_index = BusinessIndex(
title='Private Business Index',
draft_title='Private Business Index',
)
homepage.add_child(instance=secret_business_index)
business_editors = Group.objects.create(name='Business editors')
business_editors.permissions.add(Permission.objects.get(codename='access_admin'))
GroupPagePermission.objects.create(
group=business_editors,
page=business_index,
permission_type='add'
)
GroupPagePermission.objects.create(
group=business_editors,
page=another_business_index,
permission_type='add'
)
user = self.create_user(username='test2', password='password')
user.groups.add(business_editors)
# Login
self.login(username='test2', password='password')
def test_simple(self):
response = self.client.get('/admin/tests/businesschild/choose_parent/')
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'Public Business Index')
self.assertNotContains(response, 'Private Business Index')
class TestEditorAccess(TestCase, WagtailTestUtils):
fixtures = ['test_specific.json']
def setUp(self):
# Create a user
user = self.create_user(username='test2', password='password')
user.groups.add(Group.objects.get(pk=2))
# Login
self.login(username='test2', password='password')
def test_delete_permitted(self):
response = self.client.get('/admin/tests/eventpage/delete/4/')
self.assertRedirects(response, '/admin/')
class TestModeratorAccess(TestCase, WagtailTestUtils):
fixtures = ['test_specific.json']
def setUp(self):
# Create a user
user = self.create_user(username='test3', password='password')
user.groups.add(Group.objects.get(pk=1))
# Login
self.login(username='test3', password='password')
def test_delete_permitted(self):
response = self.client.get('/admin/tests/eventpage/delete/4/')
self.assertRedirects(response, '/admin/pages/4/delete/?next=/admin/tests/eventpage/')
class TestHeaderBreadcrumbs(TestCase, WagtailTestUtils):
"""
Test that the <ul class="breadcrumbs">... is inserted within the
<header> tag for potential future regression.
See https://github.com/wagtail/wagtail/issues/3889
"""
fixtures = ['test_specific.json']
def setUp(self):
self.login()
def test_choose_parent_page(self):
response = self.client.get('/admin/tests/eventpage/choose_parent/')
# check correct templates were used
self.assertTemplateUsed(response, 'modeladmin/includes/breadcrumb.html')
self.assertTemplateUsed(response, 'wagtailadmin/shared/header.html')
# check that home breadcrumb link exists
expected = """
<li class="home">
<a href="/admin/">
<svg class="icon icon-home home_icon" aria-hidden="true" focusable="false">
<use href="#icon-home"></use>
</svg>
<span class="visuallyhidden">Home</span>
<svg class="icon icon-arrow-right arrow_right_icon" aria-hidden="true" focusable="false">
<use href="#icon-arrow-right"></use>
</svg>
</a>
</li>
"""
self.assertContains(response, expected, html=True)
# check that the breadcrumbs are after the header opening tag
content_str = str(response.content)
position_of_header = content_str.index('<header') # intentionally not closing tag
position_of_breadcrumbs = content_str.index('<ul class="breadcrumb">')
self.assertLess(position_of_header, position_of_breadcrumbs)
def test_choose_inspect_page(self):
response = self.client.get('/admin/tests/eventpage/inspect/4/')
# check correct templates were used
self.assertTemplateUsed(response, 'modeladmin/includes/breadcrumb.html')
self.assertTemplateUsed(response, 'wagtailadmin/shared/header.html')
# check that home breadcrumb link exists
expected = """
<li class="home">
<a href="/admin/">
<svg class="icon icon-home home_icon" aria-hidden="true" focusable="false">
<use href="#icon-home"></use>
</svg>
<span class="visuallyhidden">Home</span>
<svg class="icon icon-arrow-right arrow_right_icon" aria-hidden="true" focusable="false">
<use href="#icon-arrow-right"></use>
</svg>
</a>
</li>
"""
self.assertContains(response, expected, html=True)
# check that the breadcrumbs are after the header opening tag
content_str = str(response.content)
position_of_header = content_str.index('<header') # intentionally not closing tag
position_of_breadcrumbs = content_str.index('<ul class="breadcrumb">')
self.assertLess(position_of_header, position_of_breadcrumbs)
class TestSearch(TestCase, WagtailTestUtils):
fixtures = ['test_specific.json']
def setUp(self):
self.login()
def test_lookup_allowed_on_parentalkey(self):
try:
self.client.get('/admin/tests/eventpage/?related_links__link_page__id__exact=1')
except AttributeError:
self.fail("Lookup on parentalkey raised AttributeError unexpectedly")
|
|
"""
Fuzz Testing for Thrift Services
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import argparse
import collections
import imp
import itertools
import json
import logging
import os
import pprint
import random
import sys
import time
import types
import six
import six.moves as sm
from six.moves.urllib.parse import urlparse
try:
from ServiceRouter import ConnConfigs, ServiceOptions, ServiceRouter
SR_AVAILABLE = True
except ImportError:
SR_AVAILABLE = False
from thrift import Thrift
from thrift.transport import TTransport, TSocket, TSSLSocket, THttpClient
from thrift.protocol import TBinaryProtocol, TCompactProtocol, THeaderProtocol
from . import randomizer
def positive_int(s):
"""Typechecker for positive integers"""
try:
n = int(s)
if not n > 0:
raise argparse.ArgumentTypeError(
"%s is not positive." % s)
return n
except:
raise argparse.ArgumentTypeError(
"Cannot convert %s to an integer." % s)
def prob_float(s):
"""Typechecker for probability values"""
try:
x = float(s)
if not 0 <= x <= 1:
raise argparse.ArgumentTypeError(
"%s is not a valid probability." % x)
return x
except:
raise argparse.ArgumentTypeError(
"Cannot convert %s to a float." % s)
class FuzzerConfiguration(object):
"""Container for Fuzzer configuration options"""
argspec = {
'allow_application_exceptions': {
'description': 'Do not flag TApplicationExceptions as errors',
'type': bool,
'flag': '-a',
'argparse_kwargs': {
'action': 'store_const',
'const': True
},
'default': False,
},
'compact': {
'description': 'Use TCompactProtocol',
'type': bool,
'flag': '-c',
'argparse_kwargs': {
'action': 'store_const',
'const': True
},
'default': False
},
'constraints': {
'description': 'JSON Constraint dictionary',
'type': str,
'flag': '-Con',
'default': {},
'is_json': True
},
'framed': {
'description': 'Use framed transport.',
'type': bool,
'flag': '-f',
'argparse_kwargs': {
'action': 'store_const',
'const': True
},
'default': False
},
'functions': {
'description': 'Which functions to test. If excluded, test all',
'type': str,
'flag': '-F',
'argparse_kwargs': {
'nargs': '*',
},
'default': None
},
'host': {
'description': 'The host and port to connect to',
'type': str,
'flag': '-h',
'argparse_kwargs': {
'metavar': 'HOST[:PORT]'
},
'default': None
},
'iterations': {
'description': 'Number of calls per method.',
'type': positive_int,
'flag': '-n',
'attr_name': 'n_iterations',
'default': 1000
},
'logfile': {
'description': 'File to write output logs.',
'type': str,
'flag': '-l',
'default': None
},
'loglevel': {
'description': 'Level of verbosity to write logs.',
'type': str,
'flag': '-L',
'argparse_kwargs': {
'choices': ['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL'],
},
'default': 'INFO'
},
'service': {
'description': 'Path to file of Python service module.',
'type': str,
'flag': '-S',
'attr_name': 'service_path',
'default': None
},
'ssl': {
'description': 'Use SSL socket.',
'type': bool,
'flag': '-s',
'argparse_kwargs': {
'action': 'store_const',
'const': True
},
'default': False
},
'unframed': {
'description': 'Use unframed transport.',
'type': bool,
'flag': '-U',
'argparse_kwargs': {
'action': 'store_const',
'const': True
},
'default': False
},
'url': {
'description': 'The URL to connect to for HTTP transport',
'type': str,
'flag': '-u',
'default': None
},
}
if SR_AVAILABLE:
argspec['tier'] = {
'description': 'The SMC tier to connect to',
'type': str,
'flag': '-t',
'default': None
}
argspec['conn_configs'] = {
'description': 'ConnConfigs to use for ServiceRouter connection',
'type': str,
'flag': '-Conn',
'default': {},
'is_json': True
}
argspec['service_options'] = {
'description': 'ServiceOptions to use for ServiceRouter connection',
'type': str,
'flag': '-SO',
'default': {},
'is_json': True
}
def __init__(self, service=None):
cls = self.__class__
if service is not None:
self.service = service
parser = argparse.ArgumentParser(description='Fuzzer Configuration',
add_help=False)
parser.add_argument('-C', '--config', dest='config_filename',
help='JSON Configuration file. '
'All settings can be specified as commandline '
'args and config file settings. Commandline args '
'override config file settings.')
parser.add_argument('-?', '--help', action='help',
help='Show this help message and exit.')
for name, arg in six.iteritems(cls.argspec):
kwargs = arg.get('argparse_kwargs', {})
if kwargs.get('action', None) != 'store_const':
# Pass type to argparse. With store_const, type can be inferred
kwargs['type'] = arg['type']
# If an argument is not passed, don't put a value in the namespace
kwargs['default'] = argparse.SUPPRESS
# Use the argument's description and default as a help message
kwargs['help'] = "%s Default: %s" % (arg.get('description', ''),
arg['default'])
kwargs['dest'] = arg.get('attr_name', name)
if hasattr(self, kwargs['dest']):
# Attribute already assigned (e.g., service passed to __init__)
continue
parser.add_argument(arg['flag'], '--%s' % name, **kwargs)
# Assign the default value to config namespace
setattr(self, kwargs['dest'], arg['default'])
args = parser.parse_args()
# Read settings in config file
self.__dict__.update(cls._config_file_settings(args))
# Read settings in args
self.__dict__.update(cls._args_settings(args))
valid, message = self._validate_config()
if not valid:
print(message, file=sys.stderr)
sys.exit(os.EX_USAGE)
@classmethod
def _try_parse_type(cls, name, type_, val):
try:
val = type_(val)
except:
raise TypeError(("Expected type %s for setting %s, "
"but got type %s (%s)") % (
type_, name, type(val), val))
return val
@classmethod
def _try_parse(cls, name, arg, val):
if arg.get('is_json', False):
return val
type_ = arg['type']
nargs = arg.get('argparse_kwargs', {}).get('nargs', None)
if nargs is None:
return cls._try_parse_type(name, type_, val)
else:
if not isinstance(val, list):
raise TypeError(("Expected list of length %s "
"for setting %s, but got type %s (%s)") % (
nargs, name, type(val), val))
ret = []
for elem in val:
ret.append(cls._try_parse_type(name, type_, elem))
return ret
@classmethod
def _config_file_settings(cls, args):
"""Read settings from a configuration file"""
if args.config_filename is None:
return {} # No config file
if not os.path.exists(args.config_filename):
raise OSError(os.EX_NOINPUT,
"Config file does not exist: %s" % args.config_filename)
with open(args.config_filename, "r") as fd:
try:
settings = json.load(fd)
except ValueError as e:
raise ValueError("Error parsing config file: %s" % e)
# Make sure settings are well-formatted
renamed_settings = {}
if not isinstance(settings, dict):
raise TypeError("Invalid config file. Top-level must be Object.")
for name, val in six.iteritems(settings):
if name not in cls.argspec:
raise ValueError(("Unrecognized configuration "
"option: %s") % name)
arg = cls.argspec[name]
val = cls._try_parse(name, arg, val)
attr_name = arg.get('attr_name', name)
renamed_settings[attr_name] = val
return renamed_settings
@classmethod
def _args_settings(cls, args):
"""Read settings from the args namespace returned by argparse"""
settings = {}
for name, arg in six.iteritems(cls.argspec):
attr_name = arg.get('attr_name', name)
if not hasattr(args, attr_name):
continue
value = getattr(args, attr_name)
if arg.get('is_json', False):
settings[attr_name] = json.loads(value)
else:
settings[attr_name] = value
return settings
def __str__(self):
return 'Configuration(\n%s\n)' % pprint.pformat(self.__dict__)
def load_service(self):
if self.service is not None:
if self.service_path is not None:
raise ValueError("Cannot specify a service path when the "
"service is input programmatically")
# Service already loaded programmatically. Just load methods.
self.service.load_methods()
return self.service
if self.service_path is None:
raise ValueError("Error: No service specified")
service_path = self.service_path
if not os.path.exists(service_path):
raise OSError("Service module does not exist: %s" % service_path)
if not service_path.endswith('.py'):
raise OSError("Service module is not a Python module: %s" %
service_path)
parent_path, service_filename = os.path.split(service_path)
service_name = service_filename[:-3] # Truncate extension
logging.info("Service name: %s" (service_name))
parent_path = os.path.dirname(service_path)
ttypes_path = os.path.join(parent_path, 'ttypes.py')
constants_path = os.path.join(parent_path, 'constants.py')
imp.load_source('module', parent_path)
ttypes_module = imp.load_source('module.ttypes', ttypes_path)
constants_module = imp.load_source('module.constants', constants_path)
service_module = imp.load_source('module.%s' % (service_name),
service_path)
service = Service(ttypes_module, constants_module, service_module)
service.load_methods()
return service
def _validate_config(self):
# Verify there is one valid connection flag
specified_flags = []
connection_flags = FuzzerClient.connection_flags
for flag in connection_flags:
if hasattr(self, flag) and getattr(self, flag) is not None:
specified_flags.append(flag)
if not len(specified_flags) == 1:
message = "Exactly one of [%s] must be specified. Got [%s]." % (
(', '.join('--%s' % flag for flag in connection_flags)),
(', '.join('--%s' % flag for flag in specified_flags)))
return False, message
connection_method = specified_flags[0]
self.connection_method = connection_method
if connection_method == 'url':
if not (self.compact or self.framed or self.unframed):
message = ("A protocol (compact, framed, or unframed) "
"must be specified for HTTP Transport.")
return False, message
if connection_method in {'url', 'host'}:
if connection_method == 'url':
try:
url = urlparse(self.url)
except:
return False, "Unable to parse url %s" % self.url
else:
connection_str = url[1]
elif connection_method == 'host':
connection_str = self.host
if ':' in connection_str:
# Get the string after the colon
port = connection_str[connection_str.index(':') + 1:]
try:
int(port)
except ValueError:
message = "Port is not an integer: %s" % port
return False, message
return True, None
class Service(object):
"""Wrapper for a thrift service"""
def __init__(self, ttypes_module, constants_module, service_module):
self.ttypes = ttypes_module
self.constants = constants_module
self.service = service_module
self.methods = None
def __str__(self):
return 'Service(%s)' % self.service.__name__
def load_methods(self):
"""Load a service's methods"""
service_module = self.service
method_inheritance_chain = []
while service_module is not None:
interface = service_module.Iface
if_attrs = [getattr(interface, a) for a in dir(interface)]
if_methods = {m.__name__ for m in if_attrs if
isinstance(m, types.MethodType)}
method_inheritance_chain.append((service_module, if_methods))
if interface.__bases__:
# Can only have single inheritance in thrift
service_module = __import__(interface.__bases__[0].__module__,
{}, {}, ['Iface'])
else:
service_module = None
# Map method names to the module the method is defined in
method_to_module = {}
# Iterate starting at the top of the tree
for (module, if_methods) in method_inheritance_chain[::-1]:
for method_name in if_methods:
if method_name not in method_to_module:
method_to_module[method_name] = module
methods = {}
for method_name, module in six.iteritems(method_to_module):
args_class_name = "%s_args" % (method_name)
result_class_name = "%s_result" % (method_name)
if hasattr(module, args_class_name):
args = getattr(module, args_class_name)
else:
raise AttributeError(
"Method arg spec not found: %s.%s" % (
module.__name__, method_name))
if hasattr(module, result_class_name):
result = getattr(module, result_class_name)
else:
result = None
thrift_exceptions = []
if result is not None:
for res_spec in result.thrift_spec:
if res_spec is None:
continue
if res_spec[2] != 'success':
# This is an exception return type
spec_args = res_spec[3]
exception_type = spec_args[0]
thrift_exceptions.append(exception_type)
methods[method_name] = {
'args_class': args,
'result_spec': result,
'thrift_exceptions': tuple(thrift_exceptions)
}
self.methods = methods
@property
def client_class(self):
return self.service.Client
def get_methods(self, include=None):
"""Get a dictionary of methods provided by the service.
If include is not None, it should be a collection and only
the method names in that collection will be included."""
if self.methods is None:
raise ValueError("Service.load_methods must be "
"called before Service.get_methods")
if include is None:
return self.methods
included_methods = {}
for method_name in include:
if method_name not in self.methods:
raise NameError("Function does not exist: %s" % method_name)
included_methods[method_name] = self.methods[method_name]
return included_methods
class FuzzerClient(object):
"""Client wrapper used to make calls based on configuration settings"""
connection_flags = ['host', 'url', 'tier']
default_port = 9090
def __init__(self, config, client_class):
self.config = config
self.client_class = client_class
def _get_client_by_transport(self, config, transport, socket=None):
# Create the protocol and client
if config.compact:
protocol = TCompactProtocol.TCompactProtocol(transport)
# No explicit option about protocol is specified. Try to infer.
elif config.framed or config.unframed:
protocol = TBinaryProtocol.TBinaryProtocolAccelerated(transport)
elif socket is not None:
protocol = THeaderProtocol.THeaderProtocol(socket)
transport = protocol.trans
else:
raise ValueError("No protocol specified for HTTP Transport")
transport.open()
self._transport = transport
client = self.client_class(protocol)
return client
def _parse_host_port(self, value, default_port):
parts = value.rsplit(':', 1)
if len(parts) == 1:
return (parts[0], default_port)
else:
# FuzzerConfiguration ensures parts[1] is an int
return (parts[0], int(parts[1]))
def _get_client_by_host(self):
config = self.config
host, port = self._parse_host_port(config.host, self.default_port)
socket = (TSSLSocket.TSSLSocket(host, port) if config.ssl
else TSocket.TSocket(host, port))
if config.framed:
transport = TTransport.TFramedTransport(socket)
else:
transport = TTransport.TBufferedTransport(socket)
return self._get_client_by_transport(config, transport, socket=socket)
def _get_client_by_url(self):
config = self.config
url = urlparse(config.url)
host, port = self._parse_host_port(url[1], 80)
transport = THttpClient.THttpClient(config.url)
return self._get_client_by_transport(config, transport)
def _get_client_by_tier(self):
"""Get a client that uses ServiceRouter"""
config = self.config
serviceRouter = ServiceRouter()
overrides = ConnConfigs()
for key, val in six.iteritems(config.conn_configs):
key = six.binary_type(key)
val = six.binary_type(val)
overrides[key] = val
sr_options = ServiceOptions()
for key, val in six.iteritems(config.service_options):
key = six.binary_type(key)
if not isinstance(val, list):
raise TypeError("Service option %s expected list; got %s (%s)"
% (key, val, type(val)))
val = [six.binary_type(elem) for elem in val]
sr_options[key] = val
service_name = config.tier
# Obtain a normal client connection using SR2
client = serviceRouter.getClient2(self.client_class,
service_name, sr_options,
overrides, False)
if client is None:
raise NameError('Failed to lookup host for tier %s' % service_name)
return client
def _get_client(self):
if self.config.connection_method == 'host':
client = self._get_client_by_host()
elif self.config.connection_method == 'url':
client = self._get_client_by_url()
elif self.config.connection_method == 'tier':
client = self._get_client_by_tier()
else:
raise NameError("Unknown connection type: %s" %
self.config.connection_method)
return client
def _close_client(self):
if self.config.connection_method in {'host', 'url'}:
self._transport.close()
def __enter__(self):
self.client = self._get_client()
return self
def __exit__(self, exc_type, exc_value, traceback):
self._close_client()
self.client = None
def reset(self):
self._close_client()
try:
self.client = self._get_client()
return True
except TTransport.TTransportException as e:
logging.error("Unable to reset connection: %r" % e)
return False
def make_call(self, method_name, kwargs, is_oneway=False):
method = getattr(self.client, method_name)
ret = method(**kwargs)
if is_oneway:
self.reset()
return ret
class Timer(object):
def __init__(self, aggregator, category, action):
self.aggregator = aggregator
self.category = category
self.action = action
def __enter__(self):
self.start_time = time.time()
def __exit__(self, exc_type, exc_value, traceback):
end_time = time.time()
time_elapsed = end_time - self.start_time
self.aggregator.add(self.category, self.action, time_elapsed)
class TimeAggregator(object):
def __init__(self):
self.total_time = collections.defaultdict(
lambda: collections.defaultdict(float))
def time(self, category, action):
return Timer(self, category, action)
def add(self, category, action, time_elapsed):
self.total_time[category][action] += time_elapsed
def summarize(self):
max_category_name_length = max(len(name) for name in self.total_time)
max_action_name_length = max(max(len(action_name) for action_name in
self.total_time[name]) for name in
self.total_time)
category_format = "%%%ds: %%s" % max_category_name_length
action_format = "%%%ds: %%4.3fs" % max_action_name_length
category_summaries = []
for category_name, category_actions in sorted(self.total_time.items()):
timing_items = []
for action_name, action_time in sorted(category_actions.items()):
timing_items.append(action_format % (action_name, action_time))
all_actions = " | ".join(timing_items)
category_summaries.append(category_format % (
category_name, all_actions))
summaries = "\n".join(category_summaries)
logging.info("Timing Summary:\n%s" % summaries)
class FuzzTester(object):
summary_interval = 1 # Seconds between summary logs
class Result:
Success = 0
TransportException = 1
ApplicationException = 2
UserDefinedException = 3
OtherException = 4
Crash = 5
def __init__(self, config):
self.config = config
self.service = None
self.randomizer = None
self.client = None
def start_logging(self):
logfile = self.config.logfile
if self.config.logfile is None:
logfile = '/dev/null'
log_level = getattr(logging, self.config.loglevel)
datefmt = '%Y-%m-%d %H:%M:%S'
fmt = "[%(asctime)s] [%(levelname)s] %(message)s"
if logfile == 'stdout':
logging.basicConfig(stream=sys.stdout, level=log_level)
else:
logging.basicConfig(filename=self.config.logfile, level=log_level)
log_handler = logging.getLogger().handlers[0]
log_handler.setFormatter(logging.Formatter(fmt, datefmt=datefmt))
def start_timing(self):
self.timer = TimeAggregator()
self.next_summary_time = time.time() + self.__class__.summary_interval
def _call_string(self, method_name, kwargs):
kwarg_str = ', '.join('%s=%s' % (k, v)
for k, v in six.iteritems(kwargs))
return "%s(%s)" % (method_name, kwarg_str)
def run_test(self, method_name, kwargs, expected_output,
is_oneway, thrift_exceptions):
"""
Make an RPC with given arguments and check for exceptions.
"""
try:
with self.timer.time(method_name, "Thrift"):
self.client.make_call(method_name, kwargs,
is_oneway)
except thrift_exceptions as e:
self.record_result(
method_name, FuzzTester.Result.UserDefinedException)
if self.config.loglevel == "DEBUG":
with self.timer.time(method_name, "Logging"):
logging.debug("Got thrift exception: %r" % e)
logging.debug("Exception thrown by call: %s" % (
self._call_string(method_name, kwargs)))
except Thrift.TApplicationException as e:
self.record_result(
method_name, FuzzTester.Result.ApplicationException)
if self.config.allow_application_exceptions:
if self.config.loglevel == "DEBUG":
with self.timer.time(method_name, "Logging"):
logging.debug("Got TApplication exception %s" % e)
logging.debug("Exception thrown by call: %s" % (
self._call_string(method_name, kwargs)))
else:
with self.timer.time(method_name, "Logging"):
self.n_exceptions += 1
logging.error("Got application exception: %s" % e)
logging.error("Offending call: %s" % (
self._call_string(method_name, kwargs)))
except TTransport.TTransportException as e:
self.n_exceptions += 1
with self.timer.time(method_name, "Logging"):
logging.error("Got TTransportException: (%s, %r)" % (e, e))
logging.error("Offending call: %s" % (
self._call_string(method_name, kwargs)))
if "errno = 111: Connection refused" in e.args[0]:
# Unable to connect to server - server may be down
self.record_result(method_name, FuzzTester.Result.Crash)
return False
if not self.client.reset():
logging.error("Inferring server crash.")
self.record_result(method_name, FuzzTester.Result.Crash)
return False
self.record_result(
method_name, FuzzTester.Result.TransportException)
except Exception as e:
self.record_result(method_name, FuzzTester.Result.OtherException)
with self.timer.time(method_name, "Logging"):
self.n_exceptions += 1
logging.error("Got exception %s (%r)" % (e, e))
logging.error("Offending call: %s" % (
self._call_string(method_name, kwargs)))
if hasattr(self, 'previous_kwargs'):
logging.error("Previous call: %s" % (
self._call_string(method_name, self.previous_kwargs)))
else:
self.record_result(method_name, FuzzTester.Result.Success)
if self.config.loglevel == "DEBUG":
with self.timer.time(method_name, "Logging"):
logging.debug("Successful call: %s" % (
self._call_string(method_name, kwargs)))
finally:
self.n_tests += 1
return True
def fuzz_kwargs(self, method_name, n_iterations):
# For now, just yield n random sets of args
# In future versions, fuzz fields more methodically based
# on feedback and seeds
for _ in sm.xrange(n_iterations):
with self.timer.time(method_name, "Randomizing"):
method_randomizer = self.method_randomizers[method_name]
args_struct = method_randomizer.generate()
if args_struct is None:
logging.error("Unable to produce valid arguments for %s" %
method_name)
else:
kwargs = args_struct.__dict__ # Get members of args struct
yield kwargs
def get_method_randomizers(self, methods, constraints):
"""Create a StructRandomizer for each method"""
state = randomizer.RandomizerState()
method_randomizers = {}
state.push_type_constraints(constraints)
for method_name in methods:
method_constraints = constraints.get(method_name, {})
args_class = methods[method_name]['args_class']
# Create a spec_args tuple for the method args struct type
randomizer_spec_args = (
args_class,
args_class.thrift_spec,
False # isUnion
)
method_randomizer = state.get_randomizer(
Thrift.TType.STRUCT,
randomizer_spec_args,
method_constraints
)
method_randomizers[method_name] = method_randomizer
return method_randomizers
def _split_key(self, key):
"""Split a constraint rule key such as a.b|c into ['a', 'b', '|c']
Dots separate hierarchical field names and property names
Pipes indicate a type name and hashes indicate a field name,
though these rules are not yet supported.
"""
components = []
start_idx = 0
cur_idx = 0
while cur_idx < len(key):
if (cur_idx != start_idx and
key[cur_idx] in {'.', '|', '#'}):
components.append(key[start_idx:cur_idx])
start_idx = cur_idx
if key[cur_idx] == '.':
start_idx += 1
cur_idx = start_idx
else:
cur_idx += 1
components.append(key[start_idx:])
return components
def preprocess_constraints(self, source_constraints):
"""
The constraints dictionary can have any key
that follows the following format:
method_name[.arg_name][.field_name ...].property_name
The values in the dictionary can be nested such that inner field
names are subfields of the outer scope, and inner type rules are
applied only to subvalues of the out scope.
After preprocessing, each dictionary level should have exactly one
method name, field name, or property name as its key.
Any strings of identifiers are converted into the nested dictionary
structure. For example, the constraint set:
{'my_method.my_field.distribution': 'uniform(0,100)'}
Will be preprocessed to:
{'my_method':
{'my_field':
{'distribution': 'uniform(0, 100)'}
}
}
"""
constraints = {}
scope_path = []
def add_constraint(rule):
walk_scope = constraints
for key in scope_path[:-1]:
if key not in walk_scope:
walk_scope[key] = {}
walk_scope = walk_scope[key]
walk_scope[scope_path[-1]] = rule
def add_constraints_from_dict(d):
for key, rule in six.iteritems(d):
key_components = self._split_key(key)
scope_path.extend(key_components)
if isinstance(rule, dict):
add_constraints_from_dict(rule)
else:
add_constraint(rule)
scope_path[-len(key_components):] = []
add_constraints_from_dict(source_constraints)
return constraints
def start_result_counters(self):
"""Create result counters. The counters object is a dict that maps
a method name to a counter of FuzzTest.Results
"""
self.result_counters = collections.defaultdict(collections.Counter)
def record_result(self, method_name, result):
self.result_counters[method_name][result] += 1
def log_result_summary(self, method_name):
if time.time() >= self.next_summary_time:
results = []
for name, val in six.iteritems(vars(FuzzTester.Result)):
if name.startswith('_'):
continue
count = self.result_counters[method_name][val]
if count > 0:
results.append((name, count))
results.sort()
logging.info('%s count: {%s}' %
(method_name, ', '.join('%s: %d' % r for r in results)))
interval = self.__class__.summary_interval
# Determine how many full intervals have passed between
# self.next_summary_time (the scheduled time for this summary) and
# the time the summary is actually completed.
intervals_passed = int(
(time.time() - self.next_summary_time) / interval)
# Schedule the next summary for the first interval that has not yet
# fully passed
self.next_summary_time += interval * (intervals_passed + 1)
def run(self):
self.start_logging()
self.start_timing()
self.start_result_counters()
logging.info("Starting Fuzz Tester")
logging.info(str(self.config))
self.service = self.config.load_service()
client_class = self.service.client_class
methods = self.service.get_methods(self.config.functions)
constraints = self.preprocess_constraints(self.config.constraints)
self.method_randomizers = self.get_method_randomizers(
methods, constraints)
logging.info("Fuzzing methods: %s" % methods.keys())
with FuzzerClient(self.config, client_class) as self.client:
for method_name, spec in six.iteritems(methods):
result_spec = spec.get('result_spec', None)
thrift_exceptions = spec['thrift_exceptions']
is_oneway = result_spec is None
logging.info("Fuzz testing method %s" % (method_name))
self.n_tests = 0
self.n_exceptions = 0
did_crash = False
for kwargs in self.fuzz_kwargs(
method_name, self.config.n_iterations):
if not self.run_test(method_name, kwargs, None,
is_oneway, thrift_exceptions):
did_crash = True
break
self.log_result_summary(method_name)
self.previous_kwargs = kwargs
if did_crash:
logging.error(("Method %s caused the "
"server to crash.") % (
method_name))
break
else:
logging.info(("Method %s raised unexpected "
"exceptions in %d/%d tests.") % (
method_name, self.n_exceptions,
self.n_tests))
self.timer.summarize()
def run_fuzzer(config):
fuzzer = FuzzTester(config)
fuzzer.run()
def fuzz_service(service, ttypes, constants):
"""Run the tester with required modules input programmatically"""
service = Service(ttypes, constants, service)
config = FuzzerConfiguration(service)
run_fuzzer(config)
if __name__ == '__main__':
config = FuzzerConfiguration()
run_fuzzer(config)
|
|
from __future__ import with_statement
from copy import copy
import re
import sys
from django.core.cache import cache
from django.conf import settings
from django.contrib.auth.models import Permission
from django.core.urlresolvers import clear_url_caches
from django.http import Http404
from django.template import Variable
from django.test.utils import override_settings
from cms.api import create_page
from cms.apphook_pool import apphook_pool
from cms.models import PagePermission
from cms.test_utils.testcases import CMSTestCase
from cms.test_utils.util.fuzzy_int import FuzzyInt
from cms.utils.conf import get_cms_setting
from cms.views import _handle_no_page, details
from menus.menu_pool import menu_pool
APP_NAME = 'SampleApp'
APP_MODULE = "cms.test_utils.project.sampleapp.cms_app"
@override_settings(
CMS_PERMISSION=True,
ROOT_URLCONF='cms.test_utils.project.urls',
)
class ViewTests(CMSTestCase):
def setUp(self):
clear_url_caches()
def test_handle_no_page(self):
"""
Test handle nopage correctly works with DEBUG=True
"""
request = self.get_request('/')
slug = ''
self.assertRaises(Http404, _handle_no_page, request, slug)
with self.settings(DEBUG=True):
request = self.get_request('/en/')
slug = ''
response = _handle_no_page(request, slug)
self.assertEqual(response.status_code, 200)
def test_apphook_not_hooked(self):
"""
Test details view when apphook pool has apphooks, but they're not
actually hooked
"""
if APP_MODULE in sys.modules:
del sys.modules[APP_MODULE]
apphooks = (
'%s.%s' % (APP_MODULE, APP_NAME),
)
create_page("page2", "nav_playground.html", "en", published=True)
with self.settings(CMS_APPHOOKS=apphooks):
apphook_pool.clear()
response = self.client.get('/en/')
self.assertEqual(response.status_code, 200)
apphook_pool.clear()
def test_external_redirect(self):
# test external redirect
redirect_one = 'https://www.django-cms.org/'
one = create_page("one", "nav_playground.html", "en", published=True,
redirect=redirect_one)
url = one.get_absolute_url()
request = self.get_request(url)
response = details(request, one.get_path("en"))
self.assertEqual(response.status_code, 302)
self.assertEqual(response['Location'], redirect_one)
def test_internal_neutral_redirect(self):
# test internal language neutral redirect
redirect_one = 'https://www.django-cms.org/'
redirect_two = '/'
one = create_page("one", "nav_playground.html", "en", published=True,
redirect=redirect_one)
two = create_page("two", "nav_playground.html", "en", parent=one,
published=True, redirect=redirect_two)
url = two.get_absolute_url()
request = self.get_request(url)
response = details(request, two.get_path())
self.assertEqual(response.status_code, 302)
self.assertEqual(response['Location'], '/en/')
def test_internal_forced_redirect(self):
# test internal forced language redirect
redirect_one = 'https://www.django-cms.org/'
redirect_three = '/en/'
one = create_page("one", "nav_playground.html", "en", published=True,
redirect=redirect_one)
three = create_page("three", "nav_playground.html", "en", parent=one,
published=True, redirect=redirect_three)
url = three.get_slug()
request = self.get_request(url)
response = details(request, url.strip('/'))
self.assertEqual(response.status_code, 302)
self.assertEqual(response['Location'], redirect_three)
def test_redirect_to_self(self):
one = create_page("one", "nav_playground.html", "en", published=True,
redirect='/')
url = one.get_absolute_url()
request = self.get_request(url)
response = details(request, one.get_path())
self.assertEqual(response.status_code, 200)
def test_redirect_to_self_with_host(self):
one = create_page("one", "nav_playground.html", "en", published=True,
redirect='http://testserver/en/')
url = one.get_absolute_url()
request = self.get_request(url)
response = details(request, one.get_path())
self.assertEqual(response.status_code, 200)
def test_redirect_with_toolbar(self):
create_page("one", "nav_playground.html", "en", published=True,
redirect='/en/page2')
superuser = self.get_superuser()
with self.login_user_context(superuser):
response = self.client.get('/en/?%s' % get_cms_setting('CMS_TOOLBAR_URL__EDIT_ON'))
self.assertEqual(response.status_code, 200)
def test_login_required(self):
create_page("page", "nav_playground.html", "en", published=True,
login_required=True)
plain_url = '/accounts/'
login_rx = re.compile("%s\?(signin=|next=/en/)&" % plain_url)
with self.settings(LOGIN_URL=plain_url + '?signin'):
request = self.get_request('/en/')
response = details(request, '')
self.assertEqual(response.status_code, 302)
self.assertTrue(login_rx.search(response['Location']))
login_rx = re.compile("%s\?(signin=|next=/)&" % plain_url)
with self.settings(USE_I18N=False, LOGIN_URL=plain_url + '?signin'):
request = self.get_request('/')
response = details(request, '')
self.assertEqual(response.status_code, 302)
self.assertTrue(login_rx.search(response['Location']))
def test_edit_permission(self):
page = create_page("page", "nav_playground.html", "en", published=True)
# Anon user
response = self.client.get("/en/?%s" % get_cms_setting('CMS_TOOLBAR_URL__EDIT_ON'))
self.assertNotContains(response, "cms_toolbar-item_switch_save-edit", 200)
# Superuser
user = self.get_superuser()
with self.login_user_context(user):
response = self.client.get("/en/?%s" % get_cms_setting('CMS_TOOLBAR_URL__EDIT_ON'))
self.assertContains(response, "cms_toolbar-item_switch_save-edit", 1, 200)
# Admin but with no permission
user = self.get_staff_user_with_no_permissions()
user.user_permissions.add(Permission.objects.get(codename='change_page'))
with self.login_user_context(user):
response = self.client.get("/en/?%s" % get_cms_setting('CMS_TOOLBAR_URL__EDIT_ON'))
self.assertNotContains(response, "cms_toolbar-item_switch_save-edit", 200)
PagePermission.objects.create(can_change=True, user=user, page=page)
with self.login_user_context(user):
response = self.client.get("/en/?%s" % get_cms_setting('CMS_TOOLBAR_URL__EDIT_ON'))
self.assertContains(response, "cms_toolbar-item_switch_save-edit", 1, 200)
@override_settings(ROOT_URLCONF='cms.test_utils.project.urls')
class ContextTests(CMSTestCase):
def test_context_current_page(self):
"""
Asserts the number of queries triggered by
`cms.context_processors.cms_settings` and `cms.middleware.page`
"""
from django.template import context
page_template = "nav_playground.html"
original_context = settings.TEMPLATE_CONTEXT_PROCESSORS
new_context = copy(original_context)
new_context.remove("cms.context_processors.cms_settings")
page = create_page("page", page_template, "en", published=True)
page_2 = create_page("page-2", page_template, "en", published=True,
parent=page)
# Tests for standard django applications
# 1 query is executed in get_app_patterns(), not related
# to cms.context_processors.cms_settings.
# Executing this oputside queries assertion context ensure
# repetability
self.client.get("/en/plain_view/")
cache.clear()
menu_pool.clear()
context._standard_context_processors = None
# Number of queries when context processors is not enabled
with self.settings(TEMPLATE_CONTEXT_PROCESSORS=new_context):
with self.assertNumQueries(FuzzyInt(0, 12)) as context:
response = self.client.get("/en/plain_view/")
num_queries = len(context.captured_queries)
self.assertFalse('CMS_TEMPLATE' in response.context)
cache.clear()
menu_pool.clear()
# Number of queries when context processor is enabled
with self.settings(TEMPLATE_CONTEXT_PROCESSORS=original_context):
# no extra query is run when accessing urls managed by standard
# django applications
with self.assertNumQueries(FuzzyInt(0, num_queries)):
response = self.client.get("/en/plain_view/")
# One query when determining current page
with self.assertNumQueries(FuzzyInt(0, 1)):
self.assertFalse(response.context['request'].current_page)
self.assertFalse(response.context['request']._current_page_cache)
# Zero more queries when determining the current template
with self.assertNumQueries(0):
# Template is the first in the CMS_TEMPLATES list
template = Variable('CMS_TEMPLATE').resolve(response.context)
self.assertEqual(template, get_cms_setting('TEMPLATES')[0][0])
cache.clear()
menu_pool.clear()
# Number of queries when context processors is not enabled
with self.settings(TEMPLATE_CONTEXT_PROCESSORS=new_context):
# Baseline number of queries
with self.assertNumQueries(FuzzyInt(13, 17)) as context:
response = self.client.get("/en/page-2/")
num_queries_page = len(context.captured_queries)
cache.clear()
menu_pool.clear()
# Number of queries when context processors is enabled
with self.settings(TEMPLATE_CONTEXT_PROCESSORS=original_context):
# Exactly the same number of queries are executed with and without
# the context_processor
with self.assertNumQueries(num_queries_page):
response = self.client.get("/en/page-2/")
template = Variable('CMS_TEMPLATE').resolve(response.context)
self.assertEqual(template, page_template)
cache.clear()
menu_pool.clear()
page_2.template = 'INHERIT'
page_2.save()
page_2.publish('en')
with self.settings(TEMPLATE_CONTEXT_PROCESSORS=original_context):
# One query more triggered as page inherits template from ancestor
with self.assertNumQueries(num_queries_page + 1):
response = self.client.get("/en/page-2/")
template = Variable('CMS_TEMPLATE').resolve(response.context)
self.assertEqual(template, page_template)
|
|
import unittest
from mako import ast
from mako import compat
from mako import exceptions
from mako import pyparser
from test import eq_
from test import requires_python_2
from test import requires_python_3
exception_kwargs = {"source": "", "lineno": 0, "pos": 0, "filename": ""}
class AstParseTest(unittest.TestCase):
def test_locate_identifiers(self):
"""test the location of identifiers in a python code string"""
code = """
a = 10
b = 5
c = x * 5 + a + b + q
(g,h,i) = (1,2,3)
[u,k,j] = [4,5,6]
foo.hoho.lala.bar = 7 + gah.blah + u + blah
for lar in (1,2,3):
gh = 5
x = 12
("hello world, ", a, b)
("Another expr", c)
"""
parsed = ast.PythonCode(code, **exception_kwargs)
eq_(
parsed.declared_identifiers,
set(
["a", "b", "c", "g", "h", "i", "u", "k", "j", "gh", "lar", "x"]
),
)
eq_(
parsed.undeclared_identifiers,
set(["x", "q", "foo", "gah", "blah"]),
)
parsed = ast.PythonCode("x + 5 * (y-z)", **exception_kwargs)
assert parsed.undeclared_identifiers == set(["x", "y", "z"])
assert parsed.declared_identifiers == set()
def test_locate_identifiers_2(self):
code = """
import foobar
from lala import hoho, yaya
import bleep as foo
result = []
data = get_data()
for x in data:
result.append(x+7)
"""
parsed = ast.PythonCode(code, **exception_kwargs)
eq_(parsed.undeclared_identifiers, set(["get_data"]))
eq_(
parsed.declared_identifiers,
set(["result", "data", "x", "hoho", "foobar", "foo", "yaya"]),
)
def test_locate_identifiers_3(self):
"""test that combination assignment/expressions
of the same identifier log the ident as 'undeclared'"""
code = """
x = x + 5
for y in range(1, y):
("hi",)
[z for z in range(1, z)]
(q for q in range (1, q))
"""
parsed = ast.PythonCode(code, **exception_kwargs)
eq_(parsed.undeclared_identifiers, set(["x", "y", "z", "q", "range"]))
def test_locate_identifiers_4(self):
code = """
x = 5
(y, )
def mydef(mydefarg):
print("mda is", mydefarg)
"""
parsed = ast.PythonCode(code, **exception_kwargs)
eq_(parsed.undeclared_identifiers, set(["y"]))
eq_(parsed.declared_identifiers, set(["mydef", "x"]))
def test_locate_identifiers_5(self):
code = """
try:
print(x)
except:
print(y)
"""
parsed = ast.PythonCode(code, **exception_kwargs)
eq_(parsed.undeclared_identifiers, set(["x", "y"]))
def test_locate_identifiers_6(self):
code = """
def foo():
return bar()
"""
parsed = ast.PythonCode(code, **exception_kwargs)
eq_(parsed.undeclared_identifiers, set(["bar"]))
code = """
def lala(x, y):
return x, y, z
print(x)
"""
parsed = ast.PythonCode(code, **exception_kwargs)
eq_(parsed.undeclared_identifiers, set(["z", "x"]))
eq_(parsed.declared_identifiers, set(["lala"]))
code = """
def lala(x, y):
def hoho():
def bar():
z = 7
print(z)
"""
parsed = ast.PythonCode(code, **exception_kwargs)
eq_(parsed.undeclared_identifiers, set(["z"]))
eq_(parsed.declared_identifiers, set(["lala"]))
def test_locate_identifiers_7(self):
code = """
import foo.bar
"""
parsed = ast.PythonCode(code, **exception_kwargs)
eq_(parsed.declared_identifiers, set(["foo"]))
eq_(parsed.undeclared_identifiers, set())
def test_locate_identifiers_8(self):
code = """
class Hi(object):
foo = 7
def hoho(self):
x = 5
"""
parsed = ast.PythonCode(code, **exception_kwargs)
eq_(parsed.declared_identifiers, set(["Hi"]))
eq_(parsed.undeclared_identifiers, set())
def test_locate_identifiers_9(self):
code = """
",".join([t for t in ("a", "b", "c")])
"""
parsed = ast.PythonCode(code, **exception_kwargs)
eq_(parsed.declared_identifiers, set(["t"]))
eq_(parsed.undeclared_identifiers, set(["t"]))
code = """
[(val, name) for val, name in x]
"""
parsed = ast.PythonCode(code, **exception_kwargs)
eq_(parsed.declared_identifiers, set(["val", "name"]))
eq_(parsed.undeclared_identifiers, set(["val", "name", "x"]))
def test_locate_identifiers_10(self):
code = """
lambda q: q + 5
"""
parsed = ast.PythonCode(code, **exception_kwargs)
eq_(parsed.declared_identifiers, set())
eq_(parsed.undeclared_identifiers, set())
def test_locate_identifiers_11(self):
code = """
def x(q):
return q + 5
"""
parsed = ast.PythonCode(code, **exception_kwargs)
eq_(parsed.declared_identifiers, set(["x"]))
eq_(parsed.undeclared_identifiers, set())
def test_locate_identifiers_12(self):
code = """
def foo():
s = 1
def bar():
t = s
"""
parsed = ast.PythonCode(code, **exception_kwargs)
eq_(parsed.declared_identifiers, set(["foo"]))
eq_(parsed.undeclared_identifiers, set())
def test_locate_identifiers_13(self):
code = """
def foo():
class Bat(object):
pass
Bat
"""
parsed = ast.PythonCode(code, **exception_kwargs)
eq_(parsed.declared_identifiers, set(["foo"]))
eq_(parsed.undeclared_identifiers, set())
def test_locate_identifiers_14(self):
code = """
def foo():
class Bat(object):
pass
Bat
print(Bat)
"""
parsed = ast.PythonCode(code, **exception_kwargs)
eq_(parsed.declared_identifiers, set(["foo"]))
eq_(parsed.undeclared_identifiers, set(["Bat"]))
@requires_python_2
def test_locate_identifiers_15(self):
code = """
def t1((x,y)):
return x+5, y+4
t2 = lambda (x,y):(x+5, y+4)
"""
parsed = ast.PythonCode(code, **exception_kwargs)
eq_(parsed.declared_identifiers, set(["t1", "t2"]))
eq_(parsed.undeclared_identifiers, set())
def test_locate_identifiers_16(self):
code = """
try:
print(x)
except Exception as e:
print(y)
"""
parsed = ast.PythonCode(code, **exception_kwargs)
eq_(parsed.undeclared_identifiers, set(["x", "y", "Exception"]))
def test_locate_identifiers_17(self):
code = """
try:
print(x)
except (Foo, Bar) as e:
print(y)
"""
parsed = ast.PythonCode(code, **exception_kwargs)
eq_(parsed.undeclared_identifiers, set(["x", "y", "Foo", "Bar"]))
def test_no_global_imports(self):
code = """
from foo import *
import x as bar
"""
self.assertRaises(
exceptions.CompileException,
ast.PythonCode,
code,
**exception_kwargs
)
def test_python_fragment(self):
parsed = ast.PythonFragment("for x in foo:", **exception_kwargs)
eq_(parsed.declared_identifiers, set(["x"]))
eq_(parsed.undeclared_identifiers, set(["foo"]))
parsed = ast.PythonFragment("try:", **exception_kwargs)
if compat.py3k:
parsed = ast.PythonFragment(
"except MyException as e:", **exception_kwargs
)
else:
parsed = ast.PythonFragment(
"except MyException, e:", **exception_kwargs
)
eq_(parsed.declared_identifiers, set(["e"]))
eq_(parsed.undeclared_identifiers, set(["MyException"]))
def test_argument_list(self):
parsed = ast.ArgumentList(
"3, 5, 'hi', x+5, " "context.get('lala')", **exception_kwargs
)
eq_(parsed.undeclared_identifiers, set(["x", "context"]))
eq_(
[x for x in parsed.args],
["3", "5", "'hi'", "(x + 5)", "context.get('lala')"],
)
parsed = ast.ArgumentList("h", **exception_kwargs)
eq_(parsed.args, ["h"])
def test_function_decl(self):
"""test getting the arguments from a function"""
code = "def foo(a, b, c=None, d='hi', e=x, f=y+7):pass"
parsed = ast.FunctionDecl(code, **exception_kwargs)
eq_(parsed.funcname, "foo")
eq_(parsed.argnames, ["a", "b", "c", "d", "e", "f"])
eq_(parsed.kwargnames, [])
def test_function_decl_2(self):
"""test getting the arguments from a function"""
code = "def foo(a, b, c=None, *args, **kwargs):pass"
parsed = ast.FunctionDecl(code, **exception_kwargs)
eq_(parsed.funcname, "foo")
eq_(parsed.argnames, ["a", "b", "c", "args"])
eq_(parsed.kwargnames, ["kwargs"])
@requires_python_3
def test_function_decl_3(self):
"""test getting the arguments from a fancy py3k function"""
code = "def foo(a, b, *c, d, e, **f):pass"
parsed = ast.FunctionDecl(code, **exception_kwargs)
eq_(parsed.funcname, "foo")
eq_(parsed.argnames, ["a", "b", "c"])
eq_(parsed.kwargnames, ["d", "e", "f"])
def test_expr_generate(self):
"""test the round trip of expressions to AST back to python source"""
x = 1
y = 2
class F(object):
def bar(self, a, b):
return a + b
def lala(arg):
return "blah" + arg
local_dict = dict(x=x, y=y, foo=F(), lala=lala)
code = "str((x+7*y) / foo.bar(5,6)) + lala('ho')"
astnode = pyparser.parse(code)
newcode = pyparser.ExpressionGenerator(astnode).value()
eq_(eval(code, local_dict), eval(newcode, local_dict))
a = ["one", "two", "three"]
hoho = {"somevalue": "asdf"}
g = [1, 2, 3, 4, 5]
local_dict = dict(a=a, hoho=hoho, g=g)
code = (
"a[2] + hoho['somevalue'] + "
"repr(g[3:5]) + repr(g[3:]) + repr(g[:5])"
)
astnode = pyparser.parse(code)
newcode = pyparser.ExpressionGenerator(astnode).value()
eq_(eval(code, local_dict), eval(newcode, local_dict))
local_dict = {"f": lambda: 9, "x": 7}
code = "x+f()"
astnode = pyparser.parse(code)
newcode = pyparser.ExpressionGenerator(astnode).value()
eq_(eval(code, local_dict), eval(newcode, local_dict))
for code in [
"repr({'x':7,'y':18})",
"repr([])",
"repr({})",
"repr([{3:[]}])",
"repr({'x':37*2 + len([6,7,8])})",
"repr([1, 2, {}, {'x':'7'}])",
"repr({'x':-1})",
"repr(((1,2,3), (4,5,6)))",
"repr(1 and 2 and 3 and 4)",
"repr(True and False or 55)",
"repr(lambda x, y: (x + y))",
"repr(lambda *arg, **kw: arg, kw)",
"repr(1 & 2 | 3)",
"repr(3//5)",
"repr(3^5)",
"repr([q.endswith('e') for q in " "['one', 'two', 'three']])",
"repr([x for x in (5,6,7) if x == 6])",
"repr(not False)",
]:
local_dict = {}
astnode = pyparser.parse(code)
newcode = pyparser.ExpressionGenerator(astnode).value()
if "lambda" in code:
eq_(code, newcode)
else:
eq_(eval(code, local_dict), eval(newcode, local_dict))
|
|
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Nova base exception handling.
Includes decorator for re-raising Nova-type exceptions.
SHOULD include dedicated exception logging.
"""
import functools
import sys
from oslo.config import cfg
import webob.exc
from nova.openstack.common import excutils
from nova.openstack.common.gettextutils import _
from nova.openstack.common import log as logging
from nova import safe_utils
LOG = logging.getLogger(__name__)
exc_log_opts = [
cfg.BoolOpt('fatal_exception_format_errors',
default=False,
help='Make exception message format errors fatal'),
]
CONF = cfg.CONF
CONF.register_opts(exc_log_opts)
class ConvertedException(webob.exc.WSGIHTTPException):
def __init__(self, code=0, title="", explanation=""):
self.code = code
self.title = title
self.explanation = explanation
super(ConvertedException, self).__init__()
def _cleanse_dict(original):
"""Strip all admin_password, new_pass, rescue_pass keys from a dict."""
return dict((k, v) for k, v in original.iteritems() if not "_pass" in k)
def wrap_exception(notifier=None, get_notifier=None):
"""This decorator wraps a method to catch any exceptions that may
get thrown. It logs the exception as well as optionally sending
it to the notification system.
"""
def inner(f):
def wrapped(self, context, *args, **kw):
# Don't store self or context in the payload, it now seems to
# contain confidential information.
try:
return f(self, context, *args, **kw)
except Exception as e:
with excutils.save_and_reraise_exception():
if notifier or get_notifier:
payload = dict(exception=e)
call_dict = safe_utils.getcallargs(f, context,
*args, **kw)
cleansed = _cleanse_dict(call_dict)
payload.update({'args': cleansed})
# If f has multiple decorators, they must use
# functools.wraps to ensure the name is
# propagated.
event_type = f.__name__
(notifier or get_notifier()).error(context,
event_type,
payload)
return functools.wraps(f)(wrapped)
return inner
class NovaException(Exception):
"""Base Nova Exception
To correctly use this class, inherit from it and define
a 'msg_fmt' property. That msg_fmt will get printf'd
with the keyword arguments provided to the constructor.
"""
msg_fmt = _("An unknown exception occurred.")
code = 500
headers = {}
safe = False
def __init__(self, message=None, **kwargs):
self.kwargs = kwargs
if 'code' not in self.kwargs:
try:
self.kwargs['code'] = self.code
except AttributeError:
pass
if not message:
try:
message = self.msg_fmt % kwargs
except Exception:
exc_info = sys.exc_info()
# kwargs doesn't match a variable in the message
# log the issue and the kwargs
LOG.exception(_('Exception in string format operation'))
for name, value in kwargs.iteritems():
LOG.error("%s: %s" % (name, value))
if CONF.fatal_exception_format_errors:
raise exc_info[0], exc_info[1], exc_info[2]
else:
# at least get the core message out if something happened
message = self.msg_fmt
super(NovaException, self).__init__(message)
def format_message(self):
# NOTE(mrodden): use the first argument to the python Exception object
# which should be our full NovaException message, (see __init__)
return self.args[0]
class EncryptionFailure(NovaException):
msg_fmt = _("Failed to encrypt text: %(reason)s")
class DecryptionFailure(NovaException):
msg_fmt = _("Failed to decrypt text: %(reason)s")
class VirtualInterfaceCreateException(NovaException):
msg_fmt = _("Virtual Interface creation failed")
class VirtualInterfaceMacAddressException(NovaException):
msg_fmt = _("Creation of virtual interface with "
"unique mac address failed")
class GlanceConnectionFailed(NovaException):
msg_fmt = _("Connection to glance host %(host)s:%(port)s failed: "
"%(reason)s")
class NotAuthorized(NovaException):
ec2_code = 'AuthFailure'
msg_fmt = _("Not authorized.")
code = 403
class AdminRequired(NotAuthorized):
msg_fmt = _("User does not have admin privileges")
class PolicyNotAuthorized(NotAuthorized):
msg_fmt = _("Policy doesn't allow %(action)s to be performed.")
class ImageNotActive(NovaException):
# NOTE(jruzicka): IncorrectState is used for volumes only in EC2,
# but it still seems like the most appropriate option.
ec2_code = 'IncorrectState'
msg_fmt = _("Image %(image_id)s is not active.")
class ImageNotAuthorized(NovaException):
msg_fmt = _("Not authorized for image %(image_id)s.")
class Invalid(NovaException):
msg_fmt = _("Unacceptable parameters.")
code = 400
class InvalidBDM(Invalid):
msg_fmt = _("Block Device Mapping is Invalid.")
class InvalidBDMSnapshot(InvalidBDM):
msg_fmt = _("Block Device Mapping is Invalid: "
"failed to get snapshot %(id)s.")
class InvalidBDMVolume(InvalidBDM):
msg_fmt = _("Block Device Mapping is Invalid: "
"failed to get volume %(id)s.")
class InvalidBDMImage(InvalidBDM):
msg_fmt = _("Block Device Mapping is Invalid: "
"failed to get image %(id)s.")
class InvalidBDMBootSequence(InvalidBDM):
msg_fmt = _("Block Device Mapping is Invalid: "
"Boot sequence for the instance "
"and image/block device mapping "
"combination is not valid.")
class InvalidBDMLocalsLimit(InvalidBDM):
msg_fmt = _("Block Device Mapping is Invalid: "
"You specified more local devices than the "
"limit allows")
class InvalidBDMEphemeralSize(InvalidBDM):
msg_fmt = _("Ephemeral disks requested are larger than "
"the instance type allows.")
class InvalidBDMSwapSize(InvalidBDM):
msg_fmt = _("Swap drive requested is larger than instance type allows.")
class InvalidBDMFormat(InvalidBDM):
msg_fmt = _("Block Device Mapping is Invalid: "
"%(details)s")
class InvalidBDMForLegacy(InvalidBDM):
msg_fmt = _("Block Device Mapping cannot "
"be converted to legacy format. ")
class InvalidAttribute(Invalid):
msg_fmt = _("Attribute not supported: %(attr)s")
class ValidationError(Invalid):
msg_fmt = "%(detail)s"
class VolumeUnattached(Invalid):
ec2_code = 'IncorrectState'
msg_fmt = _("Volume %(volume_id)s is not attached to anything")
class VolumeNotCreated(NovaException):
msg_fmt = _("Volume %(volume_id)s did not finish being created"
" even after we waited %(seconds)s seconds or %(attempts)s"
" attempts.")
class InvalidKeypair(Invalid):
ec2_code = 'InvalidKeyPair.Format'
msg_fmt = _("Keypair data is invalid: %(reason)s")
class InvalidRequest(Invalid):
msg_fmt = _("The request is invalid.")
class InvalidInput(Invalid):
msg_fmt = _("Invalid input received: %(reason)s")
class InvalidVolume(Invalid):
ec2_code = 'UnsupportedOperation'
msg_fmt = _("Invalid volume: %(reason)s")
class InvalidVolumeAccessMode(Invalid):
msg_fmt = _("Invalid volume access mode") + ": %(access_mode)s"
class InvalidMetadata(Invalid):
msg_fmt = _("Invalid metadata: %(reason)s")
class InvalidMetadataSize(Invalid):
msg_fmt = _("Invalid metadata size: %(reason)s")
class InvalidPortRange(Invalid):
ec2_code = 'InvalidParameterValue'
msg_fmt = _("Invalid port range %(from_port)s:%(to_port)s. %(msg)s")
class InvalidIpProtocol(Invalid):
msg_fmt = _("Invalid IP protocol %(protocol)s.")
class InvalidContentType(Invalid):
msg_fmt = _("Invalid content type %(content_type)s.")
class InvalidCidr(Invalid):
msg_fmt = _("Invalid cidr %(cidr)s.")
class InvalidUnicodeParameter(Invalid):
msg_fmt = _("Invalid Parameter: "
"Unicode is not supported by the current database.")
# Cannot be templated as the error syntax varies.
# msg needs to be constructed when raised.
class InvalidParameterValue(Invalid):
ec2_code = 'InvalidParameterValue'
msg_fmt = _("%(err)s")
class InvalidAggregateAction(Invalid):
msg_fmt = _("Cannot perform action '%(action)s' on aggregate "
"%(aggregate_id)s. Reason: %(reason)s.")
class InvalidGroup(Invalid):
msg_fmt = _("Group not valid. Reason: %(reason)s")
class InvalidSortKey(Invalid):
msg_fmt = _("Sort key supplied was not valid.")
class InstanceInvalidState(Invalid):
msg_fmt = _("Instance %(instance_uuid)s in %(attr)s %(state)s. Cannot "
"%(method)s while the instance is in this state.")
class InstanceNotRunning(Invalid):
msg_fmt = _("Instance %(instance_id)s is not running.")
class InstanceNotInRescueMode(Invalid):
msg_fmt = _("Instance %(instance_id)s is not in rescue mode")
class InstanceNotRescuable(Invalid):
msg_fmt = _("Instance %(instance_id)s cannot be rescued: %(reason)s")
class InstanceNotReady(Invalid):
msg_fmt = _("Instance %(instance_id)s is not ready")
class InstanceSuspendFailure(Invalid):
msg_fmt = _("Failed to suspend instance: %(reason)s")
class InstanceResumeFailure(Invalid):
msg_fmt = _("Failed to resume instance: %(reason)s")
class InstancePowerOnFailure(Invalid):
msg_fmt = _("Failed to power on instance: %(reason)s")
class InstancePowerOffFailure(Invalid):
msg_fmt = _("Failed to power off instance: %(reason)s")
class InstanceRebootFailure(Invalid):
msg_fmt = _("Failed to reboot instance: %(reason)s")
class InstanceTerminationFailure(Invalid):
msg_fmt = _("Failed to terminate instance: %(reason)s")
class InstanceDeployFailure(Invalid):
msg_fmt = _("Failed to deploy instance: %(reason)s")
class MultiplePortsNotApplicable(Invalid):
msg_fmt = _("Failed to launch instances: %(reason)s")
class ServiceUnavailable(Invalid):
msg_fmt = _("Service is unavailable at this time.")
class ComputeResourcesUnavailable(ServiceUnavailable):
msg_fmt = _("Insufficient compute resources: %(reason)s.")
class HypervisorUnavailable(NovaException):
msg_fmt = _("Connection to the hypervisor is broken on host: %(host)s")
class ComputeServiceUnavailable(ServiceUnavailable):
msg_fmt = _("Compute service of %(host)s is unavailable at this time.")
class ComputeServiceInUse(NovaException):
msg_fmt = _("Compute service of %(host)s is still in use.")
class UnableToMigrateToSelf(Invalid):
msg_fmt = _("Unable to migrate instance (%(instance_id)s) "
"to current host (%(host)s).")
class InvalidHypervisorType(Invalid):
msg_fmt = _("The supplied hypervisor type of is invalid.")
class DestinationHypervisorTooOld(Invalid):
msg_fmt = _("The instance requires a newer hypervisor version than "
"has been provided.")
class DestinationDiskExists(Invalid):
msg_fmt = _("The supplied disk path (%(path)s) already exists, "
"it is expected not to exist.")
class InvalidDevicePath(Invalid):
msg_fmt = _("The supplied device path (%(path)s) is invalid.")
class DevicePathInUse(Invalid):
msg_fmt = _("The supplied device path (%(path)s) is in use.")
code = 409
class DeviceIsBusy(Invalid):
msg_fmt = _("The supplied device (%(device)s) is busy.")
class InvalidCPUInfo(Invalid):
msg_fmt = _("Unacceptable CPU info: %(reason)s")
class InvalidIpAddressError(Invalid):
msg_fmt = _("%(address)s is not a valid IP v4/6 address.")
class InvalidVLANTag(Invalid):
msg_fmt = _("VLAN tag is not appropriate for the port group "
"%(bridge)s. Expected VLAN tag is %(tag)s, "
"but the one associated with the port group is %(pgroup)s.")
class InvalidVLANPortGroup(Invalid):
msg_fmt = _("vSwitch which contains the port group %(bridge)s is "
"not associated with the desired physical adapter. "
"Expected vSwitch is %(expected)s, but the one associated "
"is %(actual)s.")
class InvalidDiskFormat(Invalid):
msg_fmt = _("Disk format %(disk_format)s is not acceptable")
class ImageUnacceptable(Invalid):
msg_fmt = _("Image %(image_id)s is unacceptable: %(reason)s")
class InstanceUnacceptable(Invalid):
msg_fmt = _("Instance %(instance_id)s is unacceptable: %(reason)s")
class InvalidEc2Id(Invalid):
msg_fmt = _("Ec2 id %(ec2_id)s is unacceptable.")
class InvalidUUID(Invalid):
msg_fmt = _("Expected a uuid but received %(uuid)s.")
class InvalidID(Invalid):
msg_fmt = _("Invalid ID received %(id)s.")
class ConstraintNotMet(NovaException):
msg_fmt = _("Constraint not met.")
code = 412
class NotFound(NovaException):
msg_fmt = _("Resource could not be found.")
code = 404
class AgentBuildNotFound(NotFound):
msg_fmt = _("No agent-build associated with id %(id)s.")
class AgentBuildExists(NovaException):
msg_fmt = _("Agent-build with hypervisor %(hypervisor)s os %(os)s "
"architecture %(architecture)s exists.")
class VolumeNotFound(NotFound):
ec2_code = 'InvalidVolumeID.NotFound'
msg_fmt = _("Volume %(volume_id)s could not be found.")
class VolumeBDMNotFound(NotFound):
msg_fmt = _("No volume Block Device Mapping with id %(volume_id)s.")
class SnapshotNotFound(NotFound):
ec2_code = 'InvalidSnapshotID.NotFound'
msg_fmt = _("Snapshot %(snapshot_id)s could not be found.")
class DiskNotFound(NotFound):
msg_fmt = _("No disk at %(location)s")
class VolumeDriverNotFound(NotFound):
msg_fmt = _("Could not find a handler for %(driver_type)s volume.")
class InvalidImageRef(Invalid):
msg_fmt = _("Invalid image href %(image_href)s.")
class AutoDiskConfigDisabledByImage(Invalid):
msg_fmt = _("Requested image %(image)s "
"has automatic disk resize disabled.")
class ImageNotFound(NotFound):
msg_fmt = _("Image %(image_id)s could not be found.")
class PreserveEphemeralNotSupported(Invalid):
msg_fmt = _("The current driver does not support "
"preserving ephemeral partitions.")
# NOTE(jruzicka): ImageNotFound is not a valid EC2 error code.
class ImageNotFoundEC2(ImageNotFound):
msg_fmt = _("Image %(image_id)s could not be found. The nova EC2 API "
"assigns image ids dynamically when they are listed for the "
"first time. Have you listed image ids since adding this "
"image?")
class ProjectNotFound(NotFound):
msg_fmt = _("Project %(project_id)s could not be found.")
class StorageRepositoryNotFound(NotFound):
msg_fmt = _("Cannot find SR to read/write VDI.")
class NetworkDuplicated(Invalid):
msg_fmt = _("Network %(network_id)s is duplicated.")
class NetworkInUse(NovaException):
msg_fmt = _("Network %(network_id)s is still in use.")
class NetworkNotCreated(NovaException):
msg_fmt = _("%(req)s is required to create a network.")
class NetworkNotFound(NotFound):
msg_fmt = _("Network %(network_id)s could not be found.")
class PortNotFound(NotFound):
msg_fmt = _("Port id %(port_id)s could not be found.")
class NetworkNotFoundForBridge(NetworkNotFound):
msg_fmt = _("Network could not be found for bridge %(bridge)s")
class NetworkNotFoundForUUID(NetworkNotFound):
msg_fmt = _("Network could not be found for uuid %(uuid)s")
class NetworkNotFoundForCidr(NetworkNotFound):
msg_fmt = _("Network could not be found with cidr %(cidr)s.")
class NetworkNotFoundForInstance(NetworkNotFound):
msg_fmt = _("Network could not be found for instance %(instance_id)s.")
class NoNetworksFound(NotFound):
msg_fmt = _("No networks defined.")
class NoMoreNetworks(NovaException):
msg_fmt = _("No more available networks.")
class NetworkNotFoundForProject(NotFound):
msg_fmt = _("Either network uuid %(network_uuid)s is not present or "
"is not assigned to the project %(project_id)s.")
class NetworkAmbiguous(Invalid):
msg_fmt = _("More than one possible network found. Specify "
"network ID(s) to select which one(s) to connect to,")
class DatastoreNotFound(NotFound):
msg_fmt = _("Could not find the datastore reference(s) which the VM uses.")
class PortInUse(Invalid):
msg_fmt = _("Port %(port_id)s is still in use.")
class PortNotUsable(Invalid):
msg_fmt = _("Port %(port_id)s not usable for instance %(instance)s.")
class PortNotFree(Invalid):
msg_fmt = _("No free port available for instance %(instance)s.")
class FixedIpExists(NovaException):
msg_fmt = _("Fixed ip %(address)s already exists.")
class FixedIpNotFound(NotFound):
msg_fmt = _("No fixed IP associated with id %(id)s.")
class FixedIpNotFoundForAddress(FixedIpNotFound):
msg_fmt = _("Fixed ip not found for address %(address)s.")
class FixedIpNotFoundForInstance(FixedIpNotFound):
msg_fmt = _("Instance %(instance_uuid)s has zero fixed ips.")
class FixedIpNotFoundForNetworkHost(FixedIpNotFound):
msg_fmt = _("Network host %(host)s has zero fixed ips "
"in network %(network_id)s.")
class FixedIpNotFoundForSpecificInstance(FixedIpNotFound):
msg_fmt = _("Instance %(instance_uuid)s doesn't have fixed ip '%(ip)s'.")
class FixedIpNotFoundForNetwork(FixedIpNotFound):
msg_fmt = _("Fixed IP address (%(address)s) does not exist in "
"network (%(network_uuid)s).")
class FixedIpAlreadyInUse(NovaException):
msg_fmt = _("Fixed IP address %(address)s is already in use on instance "
"%(instance_uuid)s.")
class FixedIpAssociatedWithMultipleInstances(NovaException):
msg_fmt = _("More than one instance is associated with fixed ip address "
"'%(address)s'.")
class FixedIpInvalid(Invalid):
msg_fmt = _("Fixed IP address %(address)s is invalid.")
class NoMoreFixedIps(NovaException):
ec2_code = 'UnsupportedOperation'
msg_fmt = _("Zero fixed ips available.")
class NoFixedIpsDefined(NotFound):
msg_fmt = _("Zero fixed ips could be found.")
class FloatingIpExists(NovaException):
msg_fmt = _("Floating ip %(address)s already exists.")
class FloatingIpNotFound(NotFound):
ec2_code = "UnsupportedOperation"
msg_fmt = _("Floating ip not found for id %(id)s.")
class FloatingIpDNSExists(Invalid):
msg_fmt = _("The DNS entry %(name)s already exists in domain %(domain)s.")
class FloatingIpNotFoundForAddress(FloatingIpNotFound):
msg_fmt = _("Floating ip not found for address %(address)s.")
class FloatingIpNotFoundForHost(FloatingIpNotFound):
msg_fmt = _("Floating ip not found for host %(host)s.")
class FloatingIpMultipleFoundForAddress(NovaException):
msg_fmt = _("Multiple floating ips are found for address %(address)s.")
class FloatingIpPoolNotFound(NotFound):
msg_fmt = _("Floating ip pool not found.")
safe = True
class NoMoreFloatingIps(FloatingIpNotFound):
msg_fmt = _("Zero floating ips available.")
safe = True
class FloatingIpAssociated(NovaException):
ec2_code = "UnsupportedOperation"
msg_fmt = _("Floating ip %(address)s is associated.")
class FloatingIpNotAssociated(NovaException):
msg_fmt = _("Floating ip %(address)s is not associated.")
class NoFloatingIpsDefined(NotFound):
msg_fmt = _("Zero floating ips exist.")
class NoFloatingIpInterface(NotFound):
ec2_code = "UnsupportedOperation"
msg_fmt = _("Interface %(interface)s not found.")
class CannotDisassociateAutoAssignedFloatingIP(NovaException):
ec2_code = "UnsupportedOperation"
msg_fmt = _("Cannot disassociate auto assigned floating ip")
class KeypairNotFound(NotFound):
ec2_code = 'InvalidKeyPair.NotFound'
msg_fmt = _("Keypair %(name)s not found for user %(user_id)s")
class ServiceNotFound(NotFound):
msg_fmt = _("Service %(service_id)s could not be found.")
class ServiceBinaryExists(NovaException):
msg_fmt = _("Service with host %(host)s binary %(binary)s exists.")
class ServiceTopicExists(NovaException):
msg_fmt = _("Service with host %(host)s topic %(topic)s exists.")
class HostNotFound(NotFound):
msg_fmt = _("Host %(host)s could not be found.")
class ComputeHostNotFound(HostNotFound):
msg_fmt = _("Compute host %(host)s could not be found.")
class HostBinaryNotFound(NotFound):
msg_fmt = _("Could not find binary %(binary)s on host %(host)s.")
class InvalidReservationExpiration(Invalid):
msg_fmt = _("Invalid reservation expiration %(expire)s.")
class InvalidQuotaValue(Invalid):
msg_fmt = _("Change would make usage less than 0 for the following "
"resources: %(unders)s")
class QuotaNotFound(NotFound):
msg_fmt = _("Quota could not be found")
class QuotaExists(NovaException):
msg_fmt = _("Quota exists for project %(project_id)s, "
"resource %(resource)s")
class QuotaResourceUnknown(QuotaNotFound):
msg_fmt = _("Unknown quota resources %(unknown)s.")
class ProjectUserQuotaNotFound(QuotaNotFound):
msg_fmt = _("Quota for user %(user_id)s in project %(project_id)s "
"could not be found.")
class ProjectQuotaNotFound(QuotaNotFound):
msg_fmt = _("Quota for project %(project_id)s could not be found.")
class QuotaClassNotFound(QuotaNotFound):
msg_fmt = _("Quota class %(class_name)s could not be found.")
class QuotaUsageNotFound(QuotaNotFound):
msg_fmt = _("Quota usage for project %(project_id)s could not be found.")
class ReservationNotFound(QuotaNotFound):
msg_fmt = _("Quota reservation %(uuid)s could not be found.")
class OverQuota(NovaException):
msg_fmt = _("Quota exceeded for resources: %(overs)s")
class SecurityGroupNotFound(NotFound):
msg_fmt = _("Security group %(security_group_id)s not found.")
class SecurityGroupNotFoundForProject(SecurityGroupNotFound):
msg_fmt = _("Security group %(security_group_id)s not found "
"for project %(project_id)s.")
class SecurityGroupNotFoundForRule(SecurityGroupNotFound):
msg_fmt = _("Security group with rule %(rule_id)s not found.")
class SecurityGroupExists(Invalid):
ec2_code = 'InvalidGroup.Duplicate'
msg_fmt = _("Security group %(security_group_name)s already exists "
"for project %(project_id)s.")
class SecurityGroupExistsForInstance(Invalid):
msg_fmt = _("Security group %(security_group_id)s is already associated"
" with the instance %(instance_id)s")
class SecurityGroupNotExistsForInstance(Invalid):
msg_fmt = _("Security group %(security_group_id)s is not associated with"
" the instance %(instance_id)s")
class SecurityGroupDefaultRuleNotFound(Invalid):
msg_fmt = _("Security group default rule (%rule_id)s not found.")
class SecurityGroupCannotBeApplied(Invalid):
msg_fmt = _("Network requires port_security_enabled and subnet associated"
" in order to apply security groups.")
class SecurityGroupRuleExists(Invalid):
ec2_code = 'InvalidPermission.Duplicate'
msg_fmt = _("Rule already exists in group: %(rule)s")
class NoUniqueMatch(NovaException):
msg_fmt = _("No Unique Match Found.")
code = 409
class MigrationNotFound(NotFound):
msg_fmt = _("Migration %(migration_id)s could not be found.")
class MigrationNotFoundByStatus(MigrationNotFound):
msg_fmt = _("Migration not found for instance %(instance_id)s "
"with status %(status)s.")
class ConsolePoolNotFound(NotFound):
msg_fmt = _("Console pool %(pool_id)s could not be found.")
class ConsolePoolExists(NovaException):
msg_fmt = _("Console pool with host %(host)s, console_type "
"%(console_type)s and compute_host %(compute_host)s "
"already exists.")
class ConsolePoolNotFoundForHostType(NotFound):
msg_fmt = _("Console pool of type %(console_type)s "
"for compute host %(compute_host)s "
"on proxy host %(host)s not found.")
class ConsoleNotFound(NotFound):
msg_fmt = _("Console %(console_id)s could not be found.")
class ConsoleNotFoundForInstance(ConsoleNotFound):
msg_fmt = _("Console for instance %(instance_uuid)s could not be found.")
class ConsoleNotFoundInPoolForInstance(ConsoleNotFound):
msg_fmt = _("Console for instance %(instance_uuid)s "
"in pool %(pool_id)s could not be found.")
class ConsoleTypeInvalid(Invalid):
msg_fmt = _("Invalid console type %(console_type)s")
class ConsoleTypeUnavailable(Invalid):
msg_fmt = _("Unavailable console type %(console_type)s.")
class ConsolePortRangeExhausted(NovaException):
msg_fmt = _("The console port range %(min_port)d-%(max_port)d is "
"exhausted.")
class FlavorNotFound(NotFound):
msg_fmt = _("Flavor %(flavor_id)s could not be found.")
class FlavorNotFoundByName(FlavorNotFound):
msg_fmt = _("Flavor with name %(flavor_name)s could not be found.")
class FlavorAccessNotFound(NotFound):
msg_fmt = _("Flavor access not found for %(flavor_id)s / "
"%(project_id)s combination.")
class CellNotFound(NotFound):
msg_fmt = _("Cell %(cell_name)s doesn't exist.")
class CellExists(NovaException):
msg_fmt = _("Cell with name %(name)s already exists.")
class CellRoutingInconsistency(NovaException):
msg_fmt = _("Inconsistency in cell routing: %(reason)s")
class CellServiceAPIMethodNotFound(NotFound):
msg_fmt = _("Service API method not found: %(detail)s")
class CellTimeout(NotFound):
msg_fmt = _("Timeout waiting for response from cell")
class CellMaxHopCountReached(NovaException):
msg_fmt = _("Cell message has reached maximum hop count: %(hop_count)s")
class NoCellsAvailable(NovaException):
msg_fmt = _("No cells available matching scheduling criteria.")
class CellsUpdateUnsupported(NovaException):
msg_fmt = _("Cannot update cells configuration file.")
class InstanceUnknownCell(NotFound):
msg_fmt = _("Cell is not known for instance %(instance_uuid)s")
class SchedulerHostFilterNotFound(NotFound):
msg_fmt = _("Scheduler Host Filter %(filter_name)s could not be found.")
class FlavorExtraSpecsNotFound(NotFound):
msg_fmt = _("Flavor %(flavor_id)s has no extra specs with "
"key %(extra_specs_key)s.")
class ComputeHostMetricNotFound(NotFound):
msg_fmt = _("Metric %(name)s could not be found on the compute "
"host node %(host)s.%(node)s.")
class FileNotFound(NotFound):
msg_fmt = _("File %(file_path)s could not be found.")
class NoFilesFound(NotFound):
msg_fmt = _("Zero files could be found.")
class SwitchNotFoundForNetworkAdapter(NotFound):
msg_fmt = _("Virtual switch associated with the "
"network adapter %(adapter)s not found.")
class NetworkAdapterNotFound(NotFound):
msg_fmt = _("Network adapter %(adapter)s could not be found.")
class ClassNotFound(NotFound):
msg_fmt = _("Class %(class_name)s could not be found: %(exception)s")
class NotAllowed(NovaException):
msg_fmt = _("Action not allowed.")
class ImageRotationNotAllowed(NovaException):
msg_fmt = _("Rotation is not allowed for snapshots")
class RotationRequiredForBackup(NovaException):
msg_fmt = _("Rotation param is required for backup image_type")
class KeyPairExists(NovaException):
ec2_code = 'InvalidKeyPair.Duplicate'
msg_fmt = _("Key pair '%(key_name)s' already exists.")
class InstanceExists(NovaException):
msg_fmt = _("Instance %(name)s already exists.")
class FlavorExists(NovaException):
msg_fmt = _("Flavor with name %(name)s already exists.")
class FlavorIdExists(NovaException):
msg_fmt = _("Flavor with ID %(flavor_id)s already exists.")
class FlavorAccessExists(NovaException):
msg_fmt = _("Flavor access already exists for flavor %(flavor_id)s "
"and project %(project_id)s combination.")
class InvalidSharedStorage(NovaException):
msg_fmt = _("%(path)s is not on shared storage: %(reason)s")
class InvalidLocalStorage(NovaException):
msg_fmt = _("%(path)s is not on local storage: %(reason)s")
class MigrationError(NovaException):
msg_fmt = _("Migration error: %(reason)s")
class MigrationPreCheckError(MigrationError):
msg_fmt = _("Migration pre-check error: %(reason)s")
class MalformedRequestBody(NovaException):
msg_fmt = _("Malformed message body: %(reason)s")
# NOTE(johannes): NotFound should only be used when a 404 error is
# appropriate to be returned
class ConfigNotFound(NovaException):
msg_fmt = _("Could not find config at %(path)s")
class PasteAppNotFound(NovaException):
msg_fmt = _("Could not load paste app '%(name)s' from %(path)s")
class CannotResizeToSameFlavor(NovaException):
msg_fmt = _("When resizing, instances must change flavor!")
class ResizeError(NovaException):
msg_fmt = _("Resize error: %(reason)s")
class CannotResizeDisk(NovaException):
msg_fmt = _("Server disk was unable to be resized because: %(reason)s")
class FlavorMemoryTooSmall(NovaException):
msg_fmt = _("Flavor's memory is too small for requested image.")
class FlavorDiskTooSmall(NovaException):
msg_fmt = _("Flavor's disk is too small for requested image.")
class InsufficientFreeMemory(NovaException):
msg_fmt = _("Insufficient free memory on compute node to start %(uuid)s.")
class NoValidHost(NovaException):
msg_fmt = _("No valid host was found. %(reason)s")
class QuotaError(NovaException):
ec2_code = 'ResourceLimitExceeded'
msg_fmt = _("Quota exceeded: code=%(code)s")
code = 413
headers = {'Retry-After': 0}
safe = True
class TooManyInstances(QuotaError):
msg_fmt = _("Quota exceeded for %(overs)s: Requested %(req)s,"
" but already used %(used)d of %(allowed)d %(resource)s")
class FloatingIpLimitExceeded(QuotaError):
msg_fmt = _("Maximum number of floating ips exceeded")
class FixedIpLimitExceeded(QuotaError):
msg_fmt = _("Maximum number of fixed ips exceeded")
class MetadataLimitExceeded(QuotaError):
msg_fmt = _("Maximum number of metadata items exceeds %(allowed)d")
class OnsetFileLimitExceeded(QuotaError):
msg_fmt = _("Personality file limit exceeded")
class OnsetFilePathLimitExceeded(QuotaError):
msg_fmt = _("Personality file path too long")
class OnsetFileContentLimitExceeded(QuotaError):
msg_fmt = _("Personality file content too long")
class KeypairLimitExceeded(QuotaError):
msg_fmt = _("Maximum number of key pairs exceeded")
class SecurityGroupLimitExceeded(QuotaError):
ec2_code = 'SecurityGroupLimitExceeded'
msg_fmt = _("Maximum number of security groups or rules exceeded")
class PortLimitExceeded(QuotaError):
msg_fmt = _("Maximum number of ports exceeded")
class AggregateError(NovaException):
msg_fmt = _("Aggregate %(aggregate_id)s: action '%(action)s' "
"caused an error: %(reason)s.")
class AggregateNotFound(NotFound):
msg_fmt = _("Aggregate %(aggregate_id)s could not be found.")
class AggregateNameExists(NovaException):
msg_fmt = _("Aggregate %(aggregate_name)s already exists.")
class AggregateHostNotFound(NotFound):
msg_fmt = _("Aggregate %(aggregate_id)s has no host %(host)s.")
class AggregateMetadataNotFound(NotFound):
msg_fmt = _("Aggregate %(aggregate_id)s has no metadata with "
"key %(metadata_key)s.")
class AggregateHostExists(NovaException):
msg_fmt = _("Aggregate %(aggregate_id)s already has host %(host)s.")
class FlavorCreateFailed(NovaException):
msg_fmt = _("Unable to create flavor")
class InstancePasswordSetFailed(NovaException):
msg_fmt = _("Failed to set admin password on %(instance)s "
"because %(reason)s")
safe = True
class DuplicateVlan(NovaException):
msg_fmt = _("Detected existing vlan with id %(vlan)d")
class CidrConflict(NovaException):
msg_fmt = _("There was a conflict when trying to complete your request.")
code = 409
class InstanceNotFound(NotFound):
ec2_code = 'InvalidInstanceID.NotFound'
msg_fmt = _("Instance %(instance_id)s could not be found.")
class InstanceInfoCacheNotFound(NotFound):
msg_fmt = _("Info cache for instance %(instance_uuid)s could not be "
"found.")
class NodeNotFound(NotFound):
msg_fmt = _("Node %(node_id)s could not be found.")
class NodeNotFoundByUUID(NotFound):
msg_fmt = _("Node with UUID %(node_uuid)s could not be found.")
class MarkerNotFound(NotFound):
msg_fmt = _("Marker %(marker)s could not be found.")
class InvalidInstanceIDMalformed(Invalid):
ec2_code = 'InvalidInstanceID.Malformed'
msg_fmt = _("Invalid id: %(val)s (expecting \"i-...\").")
class CouldNotFetchImage(NovaException):
msg_fmt = _("Could not fetch image %(image_id)s")
class CouldNotUploadImage(NovaException):
msg_fmt = _("Could not upload image %(image_id)s")
class TaskAlreadyRunning(NovaException):
msg_fmt = _("Task %(task_name)s is already running on host %(host)s")
class TaskNotRunning(NovaException):
msg_fmt = _("Task %(task_name)s is not running on host %(host)s")
class InstanceIsLocked(InstanceInvalidState):
msg_fmt = _("Instance %(instance_uuid)s is locked")
class ConfigDriveInvalidValue(Invalid):
msg_fmt = _("Invalid value for Config Drive option: %(option)s")
class ConfigDriveMountFailed(NovaException):
msg_fmt = _("Could not mount vfat config drive. %(operation)s failed. "
"Error: %(error)s")
class ConfigDriveUnknownFormat(NovaException):
msg_fmt = _("Unknown config drive format %(format)s. Select one of "
"iso9660 or vfat.")
class InterfaceAttachFailed(Invalid):
msg_fmt = _("Failed to attach network adapter device to %(instance)s")
class InterfaceDetachFailed(Invalid):
msg_fmt = _("Failed to detach network adapter device from %(instance)s")
class InstanceUserDataTooLarge(NovaException):
msg_fmt = _("User data too large. User data must be no larger than "
"%(maxsize)s bytes once base64 encoded. Your data is "
"%(length)d bytes")
class InstanceUserDataMalformed(NovaException):
msg_fmt = _("User data needs to be valid base 64.")
class UnexpectedTaskStateError(NovaException):
msg_fmt = _("Unexpected task state: expecting %(expected)s but "
"the actual state is %(actual)s")
class UnexpectedDeletingTaskStateError(UnexpectedTaskStateError):
pass
class InstanceActionNotFound(NovaException):
msg_fmt = _("Action for request_id %(request_id)s on instance"
" %(instance_uuid)s not found")
class InstanceActionEventNotFound(NovaException):
msg_fmt = _("Event %(event)s not found for action id %(action_id)s")
class UnexpectedVMStateError(NovaException):
msg_fmt = _("Unexpected VM state: expecting %(expected)s but "
"the actual state is %(actual)s")
class CryptoCAFileNotFound(FileNotFound):
msg_fmt = _("The CA file for %(project)s could not be found")
class CryptoCRLFileNotFound(FileNotFound):
msg_fmt = _("The CRL file for %(project)s could not be found")
class InstanceRecreateNotSupported(Invalid):
msg_fmt = _('Instance recreate is not supported.')
class ServiceGroupUnavailable(NovaException):
msg_fmt = _("The service from servicegroup driver %(driver)s is "
"temporarily unavailable.")
class DBNotAllowed(NovaException):
msg_fmt = _('%(binary)s attempted direct database access which is '
'not allowed by policy')
class UnsupportedVirtType(Invalid):
msg_fmt = _("Virtualization type '%(virt)s' is not supported by "
"this compute driver")
class UnsupportedHardware(Invalid):
msg_fmt = _("Requested hardware '%(model)s' is not supported by "
"the '%(virt)s' virt driver")
class Base64Exception(NovaException):
msg_fmt = _("Invalid Base 64 data for file %(path)s")
class BuildAbortException(NovaException):
msg_fmt = _("Build of instance %(instance_uuid)s aborted: %(reason)s")
class RescheduledException(NovaException):
msg_fmt = _("Build of instance %(instance_uuid)s was re-scheduled: "
"%(reason)s")
class ShadowTableExists(NovaException):
msg_fmt = _("Shadow table with name %(name)s already exists.")
class InstanceFaultRollback(NovaException):
def __init__(self, inner_exception=None):
message = _("Instance rollback performed due to: %s")
self.inner_exception = inner_exception
super(InstanceFaultRollback, self).__init__(message % inner_exception)
class UnsupportedObjectError(NovaException):
msg_fmt = _('Unsupported object type %(objtype)s')
class OrphanedObjectError(NovaException):
msg_fmt = _('Cannot call %(method)s on orphaned %(objtype)s object')
class IncompatibleObjectVersion(NovaException):
msg_fmt = _('Version %(objver)s of %(objname)s is not supported')
class ObjectActionError(NovaException):
msg_fmt = _('Object action %(action)s failed because: %(reason)s')
class CoreAPIMissing(NovaException):
msg_fmt = _("Core API extensions are missing: %(missing_apis)s")
class AgentError(NovaException):
msg_fmt = _('Error during following call to agent: %(method)s')
class AgentTimeout(AgentError):
msg_fmt = _('Unable to contact guest agent. '
'The following call timed out: %(method)s')
class AgentNotImplemented(AgentError):
msg_fmt = _('Agent does not support the call: %(method)s')
class InstanceGroupNotFound(NotFound):
msg_fmt = _("Instance group %(group_uuid)s could not be found.")
class InstanceGroupIdExists(NovaException):
msg_fmt = _("Instance group %(group_uuid)s already exists.")
class InstanceGroupMetadataNotFound(NotFound):
msg_fmt = _("Instance group %(group_uuid)s has no metadata with "
"key %(metadata_key)s.")
class InstanceGroupMemberNotFound(NotFound):
msg_fmt = _("Instance group %(group_uuid)s has no member with "
"id %(instance_id)s.")
class InstanceGroupPolicyNotFound(NotFound):
msg_fmt = _("Instance group %(group_uuid)s has no policy %(policy)s.")
class PluginRetriesExceeded(NovaException):
msg_fmt = _("Number of retries to plugin (%(num_retries)d) exceeded.")
class ImageDownloadModuleError(NovaException):
msg_fmt = _("There was an error with the download module %(module)s. "
"%(reason)s")
class ImageDownloadModuleMetaDataError(ImageDownloadModuleError):
msg_fmt = _("The metadata for this location will not work with this "
"module %(module)s. %(reason)s.")
class ImageDownloadModuleNotImplementedError(ImageDownloadModuleError):
msg_fmt = _("The method %(method_name)s is not implemented.")
class ImageDownloadModuleConfigurationError(ImageDownloadModuleError):
msg_fmt = _("The module %(module)s is misconfigured: %(reason)s.")
class ResourceMonitorError(NovaException):
msg_fmt = _("Error when creating resource monitor: %(monitor)s")
class PciDeviceWrongAddressFormat(NovaException):
msg_fmt = _("The PCI address %(address)s has an incorrect format.")
class PciDeviceNotFoundById(NotFound):
msg_fmt = _("PCI device %(id)s not found")
class PciDeviceNotFound(NovaException):
msg_fmt = _("PCI Device %(node_id)s:%(address)s not found.")
class PciDeviceInvalidStatus(NovaException):
msg_fmt = _(
"PCI device %(compute_node_id)s:%(address)s is %(status)s "
"instead of %(hopestatus)s")
class PciDeviceInvalidOwner(NovaException):
msg_fmt = _(
"PCI device %(compute_node_id)s:%(address)s is owned by %(owner)s "
"instead of %(hopeowner)s")
class PciDeviceRequestFailed(NovaException):
msg_fmt = _(
"PCI device request (%requests)s failed")
class PciDevicePoolEmpty(NovaException):
msg_fmt = _(
"Attempt to consume PCI device %(compute_node_id)s:%(address)s "
"from empty pool")
class PciInvalidAlias(NovaException):
msg_fmt = _("Invalid PCI alias definition: %(reason)s")
class PciRequestAliasNotDefined(NovaException):
msg_fmt = _("PCI alias %(alias)s is not defined")
class MissingParameter(NovaException):
ec2_code = 'MissingParameter'
msg_fmt = _("Not enough parameters: %(reason)s")
code = 400
class PciConfigInvalidWhitelist(Invalid):
msg_fmt = _("Invalid PCI devices Whitelist config %(reason)s")
class PciTrackerInvalidNodeId(NovaException):
msg_fmt = _("Cannot change %(node_id)s to %(new_node_id)s")
# Cannot be templated, msg needs to be constructed when raised.
class InternalError(NovaException):
ec2_code = 'InternalError'
msg_fmt = "%(err)s"
class PciDevicePrepareFailed(NovaException):
msg_fmt = _("Failed to prepare PCI device %(id)s for instance "
"%(instance_uuid)s: %(reason)s")
class PciDeviceDetachFailed(NovaException):
msg_fmt = _("Failed to detach PCI device %(dev)s: %(reason)s")
class PciDeviceUnsupportedHypervisor(NovaException):
msg_fmt = _("%(type)s hypervisor does not support PCI devices")
class KeyManagerError(NovaException):
msg_fmt = _("Key manager error: %(reason)s")
class InvalidVideoMode(Invalid):
msg_fmt = _("Provided video model (%(model)s) is not supported.")
class RngDeviceNotExist(Invalid):
msg_fmt = _("The provided RNG device path: (%(path)s) is not "
"present on the host.")
class RequestedVRamTooHigh(NovaException):
msg_fmt = _("The requested amount of video memory %(req_vram)d is higher "
"than the maximum allowed by flavor %(max_vram)d.")
class InvalidWatchdogAction(Invalid):
msg_fmt = _("Provided watchdog action (%(action)s) is not supported.")
|
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (C) 2012 Midokura Japan K.K.
# Copyright (C) 2013 Midokura PTE LTD
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# @author: Tomoe Sugihara, Midokura Japan KK
# @author: Ryu Ishimoto, Midokura Japan KK
from webob import exc as w_exc
from neutron.common import exceptions as q_exc
from neutron.openstack.common import log as logging
LOG = logging.getLogger(__name__)
PREFIX = 'OS_SG_'
NAME_IDENTIFIABLE_PREFIX_LEN = len(PREFIX) + 36 # 36 = length of uuid
OS_FLOATING_IP_RULE_KEY = 'OS_FLOATING_IP'
OS_ROUTER_IN_CHAIN_NAME_FORMAT = 'OS_ROUTER_IN_%s'
OS_ROUTER_OUT_CHAIN_NAME_FORMAT = 'OS_ROUTER_OUT_%s'
OS_SG_KEY = 'os_sg_rule_id'
OS_TENANT_ROUTER_RULE_KEY = 'OS_TENANT_ROUTER_RULE'
SNAT_RULE = 'SNAT'
SNAT_RULE_PROPERTY = {OS_TENANT_ROUTER_RULE_KEY: SNAT_RULE}
SUFFIX_IN = '_IN'
SUFFIX_OUT = '_OUT'
def sg_label(sg_id, sg_name):
"""Construct the security group ID used as chain identifier in MidoNet."""
return PREFIX + str(sg_id) + '_' + sg_name
def sg_rule_properties(os_sg_rule_id):
return {OS_SG_KEY: str(os_sg_rule_id)}
port_group_name = sg_label
def chain_names(sg_id, sg_name):
"""Get inbound and outbound chain names."""
prefix = sg_label(sg_id, sg_name)
in_chain_name = prefix + SUFFIX_IN
out_chain_name = prefix + SUFFIX_OUT
return {'in': in_chain_name, 'out': out_chain_name}
def router_chain_names(router_id):
in_name = OS_ROUTER_IN_CHAIN_NAME_FORMAT % router_id
out_name = OS_ROUTER_OUT_CHAIN_NAME_FORMAT % router_id
return {'in': in_name, 'out': out_name}
def handle_api_error(fn):
def wrapped(*args, **kwargs):
try:
return fn(*args, **kwargs)
except w_exc.HTTPException as ex:
raise MidonetApiException(msg=ex)
return wrapped
class MidonetResourceNotFound(q_exc.NotFound):
message = _('MidoNet %(resource_type)s %(id)s could not be found')
class MidonetApiException(q_exc.NeutronException):
message = _("MidoNet API error: %(msg)s")
class MidoClient:
def __init__(self, mido_api):
self.mido_api = mido_api
@handle_api_error
def create_bridge(self, tenant_id, name):
"""Create a new bridge
:param tenant_id: id of tenant creating the bridge
:param name: name of the bridge
:returns: newly created bridge
"""
LOG.debug(_("MidoClient.create_bridge called: "
"tenant_id=%(tenant_id)s, name=%(name)s"),
{'tenant_id': tenant_id, 'name': name})
return self.mido_api.add_bridge().name(name).tenant_id(
tenant_id).create()
@handle_api_error
def delete_bridge(self, id):
"""Delete a bridge
:param id: id of the bridge
"""
LOG.debug(_("MidoClient.delete_bridge called: id=%(id)s"), {'id': id})
return self.mido_api.delete_bridge(id)
@handle_api_error
def get_bridge(self, id):
"""Get a bridge
:param id: id of the bridge
:returns: requested bridge. None if bridge does not exist.
"""
LOG.debug(_("MidoClient.get_bridge called: id=%s"), id)
try:
return self.mido_api.get_bridge(id)
except w_exc.HTTPNotFound:
raise MidonetResourceNotFound(resource_type='Bridge', id=id)
@handle_api_error
def update_bridge(self, id, name):
"""Update a bridge of the given id with the new name
:param id: id of the bridge
:param name: name of the bridge to set to
:returns: bridge object
"""
LOG.debug(_("MidoClient.update_bridge called: "
"id=%(id)s, name=%(name)s"), {'id': id, 'name': name})
try:
return self.mido_api.get_bridge(id).name(name).update()
except w_exc.HTTPNotFound:
raise MidonetResourceNotFound(resource_type='Bridge', id=id)
@handle_api_error
def create_dhcp(self, bridge, gateway_ip, net_addr, net_len):
"""Create a new DHCP entry
:param bridge: bridge object to add dhcp to
:param gateway_ip: IP address of gateway
:param net_addr: network IP address
:param net_len: network IP address length
:returns: newly created dhcp
"""
LOG.debug(_("MidoClient.create_dhcp called: bridge=%s(bridge)s, "
"net_addr=%(net_addr)s, net_len=%(net_len)s, "
"gateway_ip=%(gateway_ip)s"),
{'bridge': bridge, 'net_addr': net_addr, 'net_len': net_len,
'gateway_ip': gateway_ip})
return bridge.add_dhcp_subnet().default_gateway(
gateway_ip).subnet_prefix(net_addr).subnet_length(
net_len).create()
@handle_api_error
def create_dhcp_hosts(self, bridge, ip, mac):
"""Create DHCP host entries
:param bridge: bridge of the DHCP
:param ip: IP address
:param mac: MAC address
"""
LOG.debug(_("MidoClient.create_dhcp_hosts called: bridge=%s(bridge), "
"ip=%(ip)s, mac=%(mac)s"), {'bridge': bridge, 'ip': ip,
'mac': mac})
dhcp_subnets = bridge.get_dhcp_subnets()
if dhcp_subnets:
# Add the host to the first subnet as we currently support one
# subnet per network.
dhcp_subnets[0].add_dhcp_host().ip_addr(ip).mac_addr(mac).create()
@handle_api_error
def delete_dhcp_hosts(self, bridge_id, ip, mac):
"""Delete DHCP host entries
:param bridge_id: id of the bridge of the DHCP
:param ip: IP address
:param mac: MAC address
"""
LOG.debug(_("MidoClient.delete_dhcp_hosts called: "
"bridge_id=%s(bridge_id), ip=%(ip)s, mac=%(mac)s"),
{'bridge_id': bridge_id, 'ip': ip, 'mac': mac})
bridge = self.get_bridge(bridge_id)
dhcp_subnets = bridge.get_dhcp_subnets()
if dhcp_subnets:
for dh in dhcp_subnets[0].get_dhcp_hosts():
if dh.get_mac_addr() == mac and dh.get_ip_addr() == ip:
dh.delete()
@handle_api_error
def delete_dhcp(self, bridge):
"""Delete a DHCP entry
:param bridge: bridge to remove DHCP from
"""
LOG.debug(_("MidoClient.delete_dhcp called: bridge=%s(bridge), "),
{'bridge': bridge})
dhcp = bridge.get_dhcp_subnets()
if not dhcp:
raise MidonetApiException(msg="Tried to delete non-existent DHCP")
dhcp[0].delete()
@handle_api_error
def delete_port(self, id):
"""Delete a port
:param id: id of the port
"""
LOG.debug(_("MidoClient.delete_port called: id=%(id)s"), {'id': id})
self.mido_api.delete_port(id)
@handle_api_error
def get_port(self, id):
"""Get a port
:param id: id of the port
:returns: requested port. None if it does not exist
"""
LOG.debug(_("MidoClient.get_port called: id=%(id)s"), {'id': id})
try:
return self.mido_api.get_port(id)
except w_exc.HTTPNotFound:
raise MidonetResourceNotFound(resource_type='Port', id=id)
@handle_api_error
def create_exterior_bridge_port(self, bridge):
"""Create a new exterior bridge port
:param bridge: bridge object to add port to
:returns: newly created port
"""
LOG.debug(_("MidoClient.create_exterior_bridge_port called: "
"bridge=%(bridge)s"), {'bridge': bridge})
return bridge.add_exterior_port().create()
@handle_api_error
def create_interior_bridge_port(self, bridge):
"""Create a new interior bridge port
:param bridge: bridge object to add port to
:returns: newly created port
"""
LOG.debug(_("MidoClient.create_interior_bridge_port called: "
"bridge=%(bridge)s"), {'bridge': bridge})
return bridge.add_interior_port().create()
@handle_api_error
def create_router(self, tenant_id, name):
"""Create a new router
:param tenant_id: id of tenant creating the router
:param name: name of the router
:returns: newly created router
"""
LOG.debug(_("MidoClient.create_router called: "
"tenant_id=%(tenant_id)s, name=%(name)s"),
{'tenant_id': tenant_id, 'name': name})
return self.mido_api.add_router().name(name).tenant_id(
tenant_id).create()
@handle_api_error
def create_tenant_router(self, tenant_id, name, metadata_router):
"""Create a new tenant router
:param tenant_id: id of tenant creating the router
:param name: name of the router
:param metadata_router: metadata router
:returns: newly created router
"""
LOG.debug(_("MidoClient.create_tenant_router called: "
"tenant_id=%(tenant_id)s, name=%(name)s"
"metadata_router=%(metadata_router)s"),
{'tenant_id': tenant_id, 'name': name,
'metadata_router': metadata_router})
router = self.create_router(tenant_id, name)
self.link_router_to_metadata_router(router, metadata_router)
return router
@handle_api_error
def delete_tenant_router(self, id, metadata_router):
"""Delete a tenant router
:param id: id of router
:param metadata_router: metadata router
"""
LOG.debug(_("MidoClient.delete_tenant_router called: "
"id=%(id)s, metadata_router=%(metadata_router)s"),
{'id': id, 'metadata_router': metadata_router})
self.unlink_router_from_metadata_router(id, metadata_router)
self.destroy_router_chains(id)
# delete the router
self.delete_router(id)
@handle_api_error
def delete_router(self, id):
"""Delete a router
:param id: id of the router
"""
LOG.debug(_("MidoClient.delete_router called: id=%(id)s"), {'id': id})
return self.mido_api.delete_router(id)
@handle_api_error
def get_router(self, id):
"""Get a router with the given id
:param id: id of the router
:returns: requested router object. None if it does not exist.
"""
LOG.debug(_("MidoClient.get_router called: id=%(id)s"), {'id': id})
try:
return self.mido_api.get_router(id)
except w_exc.HTTPNotFound:
raise MidonetResourceNotFound(resource_type='Router', id=id)
@handle_api_error
def update_router(self, id, name):
"""Update a router of the given id with the new name
:param id: id of the router
:param name: name of the router to set to
:returns: router object
"""
LOG.debug(_("MidoClient.update_router called: "
"id=%(id)s, name=%(name)s"), {'id': id, 'name': name})
try:
return self.mido_api.get_router(id).name(name).update()
except w_exc.HTTPNotFound:
raise MidonetResourceNotFound(resource_type='Router', id=id)
@handle_api_error
def link_bridge_port_to_router(self, port_id, router_id, gateway_ip,
net_addr, net_len, metadata_router):
"""Link a tenant bridge port to the router
:param port_id: port ID
:param router_id: router id to link to
:param gateway_ip: IP address of gateway
:param net_addr: network IP address
:param net_len: network IP address length
:param metadata_router: metadata router instance
"""
LOG.debug(_("MidoClient.link_bridge_port_to_router called: "
"port_id=%(port_id)s, router_id=%(router_id)s, "
"gateway_ip=%(gateway_ip)s net_addr=%(net_addr)s, "
"net_len=%(net_len)s, "
"metadata_router=%(metadata_router)s"),
{'port_id': port_id, 'router_id': router_id,
'gateway_ip': gateway_ip, 'net_addr': net_addr,
'net_len': net_len, 'metadata_router': metadata_router})
router = self.get_router(router_id)
# create an interior port on the router
in_port = router.add_interior_port()
router_port = in_port.port_address(gateway_ip).network_address(
net_addr).network_length(net_len).create()
br_port = self.get_port(port_id)
router_port.link(br_port.get_id())
# add a route for the subnet in the provider router
router.add_route().type('Normal').src_network_addr(
'0.0.0.0').src_network_length(0).dst_network_addr(
net_addr).dst_network_length(net_len).weight(
100).next_hop_port(router_port.get_id()).create()
# add a route for the subnet in metadata router; forward
# packets destined to the subnet to the tenant router
for pp in metadata_router.get_peer_ports():
if pp.get_device_id() == router.get_id():
mdr_port_id = pp.get_peer_id()
break
else:
raise Exception(
_("Couldn't find a md router port for the router=%r"), router)
metadata_router.add_route().type('Normal').src_network_addr(
'0.0.0.0').src_network_length(0).dst_network_addr(
net_addr).dst_network_length(net_len).weight(
100).next_hop_port(mdr_port_id).create()
@handle_api_error
def unlink_bridge_port_from_router(self, port_id, net_addr, net_len,
metadata_router):
"""Unlink a tenant bridge port from the router
:param bridge_id: bridge ID
:param net_addr: network IP address
:param net_len: network IP address length
:param metadata_router: metadata router instance
"""
LOG.debug(_("MidoClient.unlink_bridge_port_from_router called: "
"port_id=%(port_id)s, net_addr=%(net_addr)s, "
"net_len=%(net_len)s, "
"metadata_router=%(metadata_router)s"),
{'port_id': port_id, 'net_addr': net_addr,
'net_len': net_len, 'metadata_router': metadata_router})
port = self.get_port(port_id)
port.unlink()
self.delete_port(port.get_peer_id())
self.delete_port(port.get_id())
# delete the route for the subnet in the metadata router
for r in metadata_router.get_routes():
if (r.get_dst_network_addr() == net_addr and
r.get_dst_network_length() == net_len):
LOG.debug(_('Deleting route=%r ...'), r)
self.mido_api.delete_route(r.get_id())
break
@handle_api_error
def link_bridge_to_provider_router(self, bridge, provider_router,
gateway_ip, net_addr, net_len):
"""Link a tenant bridge to the provider router
:param bridge: tenant bridge
:param provider_router: provider router to link to
:param gateway_ip: IP address of gateway
:param net_addr: network IP address
:param net_len: network IP address length
"""
LOG.debug(_("MidoClient.link_bridge_to_provider_router called: "
"bridge=%(bridge)s, provider_router=%(provider_router)s, "
"gateway_ip=%(gateway_ip)s, net_addr=%(net_addr)s, "
"net_len=%(net_len)s"),
{'bridge': bridge, 'provider_router': provider_router,
'gateway_ip': gateway_ip, 'net_addr': net_addr,
'net_len': net_len})
# create an interior port on the provider router
in_port = provider_router.add_interior_port()
pr_port = in_port.port_address(gateway_ip).network_address(
net_addr).network_length(net_len).create()
# create an interior bridge port, then link it to the router.
br_port = bridge.add_interior_port().create()
pr_port.link(br_port.get_id())
# add a route for the subnet in the provider router
provider_router.add_route().type('Normal').src_network_addr(
'0.0.0.0').src_network_length(0).dst_network_addr(
net_addr).dst_network_length(net_len).weight(
100).next_hop_port(pr_port.get_id()).create()
@handle_api_error
def unlink_bridge_from_provider_router(self, bridge, provider_router):
"""Unlink a tenant bridge from the provider router
:param bridge: tenant bridge
:param provider_router: provider router to link to
"""
LOG.debug(_("MidoClient.unlink_bridge_from_provider_router called: "
"bridge=%(bridge)s, provider_router=%(provider_router)s"),
{'bridge': bridge, 'provider_router': provider_router})
# Delete routes and unlink the router and the bridge.
routes = provider_router.get_routes()
bridge_ports_to_delete = [
p for p in provider_router.get_peer_ports()
if p.get_device_id() == bridge.get_id()]
for p in bridge.get_peer_ports():
if p.get_device_id() == provider_router.get_id():
# delete the routes going to the bridge
for r in routes:
if r.get_next_hop_port() == p.get_id():
self.mido_api.delete_route(r.get_id())
p.unlink()
self.mido_api.delete_port(p.get_id())
# delete bridge port
for port in bridge_ports_to_delete:
self.mido_api.delete_port(port.get_id())
@handle_api_error
def set_router_external_gateway(self, id, provider_router, snat_ip):
"""Set router external gateway
:param ID: ID of the tenant router
:param provider_router: provider router
:param snat_ip: SNAT IP address
"""
LOG.debug(_("MidoClient.set_router_external_gateway called: "
"id=%(id)s, provider_router=%(provider_router)s, "
"snat_ip=%s(snat_ip)s)"),
{'id': id, 'provider_router': provider_router,
'snat_ip': snat_ip})
tenant_router = self.get_router(id)
# Create a interior port in the provider router
in_port = provider_router.add_interior_port()
pr_port = in_port.network_address(
'169.254.255.0').network_length(30).port_address(
'169.254.255.1').create()
# Create a port in the tenant router
tr_port = tenant_router.add_interior_port().network_address(
'169.254.255.0').network_length(30).port_address(
'169.254.255.2').create()
# Link them
pr_port.link(tr_port.get_id())
# Add a route for snat_ip to bring it down to tenant
provider_router.add_route().type(
'Normal').src_network_addr('0.0.0.0').src_network_length(
0).dst_network_addr(snat_ip).dst_network_length(
32).weight(100).next_hop_port(
pr_port.get_id()).create()
# Add default route to uplink in the tenant router
tenant_router.add_route().type('Normal').src_network_addr(
'0.0.0.0').src_network_length(0).dst_network_addr(
'0.0.0.0').dst_network_length(0).weight(
100).next_hop_port(tr_port.get_id()).create()
# ADD SNAT(masquerade) rules
chains = self.get_router_chains(
tenant_router.get_tenant_id(), tenant_router.get_id())
chains['in'].add_rule().nw_dst_address(snat_ip).nw_dst_length(
32).type('rev_snat').flow_action('accept').in_ports(
[tr_port.get_id()]).properties(
SNAT_RULE_PROPERTY).position(1).create()
nat_targets = []
nat_targets.append(
{'addressFrom': snat_ip, 'addressTo': snat_ip,
'portFrom': 1, 'portTo': 65535})
chains['out'].add_rule().type('snat').flow_action(
'accept').nat_targets(nat_targets).out_ports(
[tr_port.get_id()]).properties(
SNAT_RULE_PROPERTY).position(1).create()
@handle_api_error
def clear_router_external_gateway(self, id):
"""Clear router external gateway
:param ID: ID of the tenant router
"""
LOG.debug(_("MidoClient.clear_router_external_gateway called: "
"id=%(id)s"), {'id': id})
tenant_router = self.get_router(id)
# delete the port that is connected to provider router
for p in tenant_router.get_ports():
if p.get_port_address() == '169.254.255.2':
peer_port_id = p.get_peer_id()
p.unlink()
self.mido_api.delete_port(peer_port_id)
self.mido_api.delete_port(p.get_id())
# delete default route
for r in tenant_router.get_routes():
if (r.get_dst_network_addr() == '0.0.0.0' and
r.get_dst_network_length() == 0):
self.mido_api.delete_route(r.get_id())
# delete SNAT(masquerade) rules
chains = self.get_router_chains(
tenant_router.get_tenant_id(),
tenant_router.get_id())
for r in chains['in'].get_rules():
if OS_TENANT_ROUTER_RULE_KEY in r.get_properties():
if r.get_properties()[
OS_TENANT_ROUTER_RULE_KEY] == SNAT_RULE:
self.mido_api.delete_rule(r.get_id())
for r in chains['out'].get_rules():
if OS_TENANT_ROUTER_RULE_KEY in r.get_properties():
if r.get_properties()[
OS_TENANT_ROUTER_RULE_KEY] == SNAT_RULE:
self.mido_api.delete_rule(r.get_id())
@handle_api_error
def get_router_chains(self, tenant_id, router_id):
"""Get router chains.
Returns a dictionary that has in/out chain resources key'ed with 'in'
and 'out' respectively, given the tenant_id and the router_id passed
in in the arguments.
"""
LOG.debug(_("MidoClient.get_router_chains called: "
"tenant_id=%(tenant_id)s router_id=%(router_id)s"),
{'tenant_id': tenant_id, 'router_id': router_id})
chain_names = router_chain_names(router_id)
chains = {}
for c in self.mido_api.get_chains({'tenant_id': tenant_id}):
if c.get_name() == chain_names['in']:
chains['in'] = c
elif c.get_name() == chain_names['out']:
chains['out'] = c
return chains
@handle_api_error
def create_router_chains(self, router):
"""Create chains for a new router.
Creates chains for the router and returns the same dictionary as
get_router_chains() returns.
:param router: router to set chains for
"""
LOG.debug(_("MidoClient.create_router_chains called: "
"router=%(router)s"), {'router': router})
chains = {}
router_id = router.get_id()
tenant_id = router.get_tenant_id()
chain_names = router_chain_names(router_id)
chains['in'] = self.mido_api.add_chain().tenant_id(tenant_id).name(
chain_names['in']).create()
chains['out'] = self.mido_api.add_chain().tenant_id(tenant_id).name(
chain_names['out']).create()
# set chains to in/out filters
router.inbound_filter_id(
chains['in'].get_id()).outbound_filter_id(
chains['out'].get_id()).update()
return chains
@handle_api_error
def destroy_router_chains(self, id):
"""Deletes chains of a router.
:param id: router ID to delete chains of
"""
LOG.debug(_("MidoClient.destroy_router_chains called: "
"id=%(id)s"), {'id': id})
# delete corresponding chains
router = self.get_router(id)
chains = self.get_router_chains(router.get_tenant_id(), id)
self.mido_api.delete_chain(chains['in'].get_id())
self.mido_api.delete_chain(chains['out'].get_id())
@handle_api_error
def link_router_to_metadata_router(self, router, metadata_router):
"""Link a router to the metadata router
:param router: router to link
:param metadata_router: metadata router
"""
LOG.debug(_("MidoClient.link_router_to_metadata_router called: "
"router=%(router)s, metadata_router=%(metadata_router)s"),
{'router': router, 'metadata_router': metadata_router})
# link to metadata router
in_port = metadata_router.add_interior_port()
mdr_port = in_port.network_address('169.254.255.0').network_length(
30).port_address('169.254.255.1').create()
tr_port = router.add_interior_port().network_address(
'169.254.255.0').network_length(30).port_address(
'169.254.255.2').create()
mdr_port.link(tr_port.get_id())
# forward metadata traffic to metadata router
router.add_route().type('Normal').src_network_addr(
'0.0.0.0').src_network_length(0).dst_network_addr(
'169.254.169.254').dst_network_length(32).weight(
100).next_hop_port(tr_port.get_id()).create()
@handle_api_error
def unlink_router_from_metadata_router(self, id, metadata_router):
"""Unlink a router from the metadata router
:param id: ID of router
:param metadata_router: metadata router
"""
LOG.debug(_("MidoClient.unlink_router_from_metadata_router called: "
"id=%(id)s, metadata_router=%(metadata_router)s"),
{'id': id, 'metadata_router': metadata_router})
# unlink from metadata router and delete the interior ports
# that connect metadata router and this router.
for pp in metadata_router.get_peer_ports():
if pp.get_device_id() == id:
mdr_port = self.get_port(pp.get_peer_id())
pp.unlink()
self.mido_api.delete_port(pp.get_id())
self.mido_api.delete_port(mdr_port.get_id())
@handle_api_error
def setup_floating_ip(self, router_id, provider_router, floating_ip,
fixed_ip, identifier):
"""Setup MidoNet for floating IP
:param router_id: router_id
:param provider_router: provider router
:param floating_ip: floating IP address
:param fixed_ip: fixed IP address
:param identifier: identifier to use to map to MidoNet
"""
LOG.debug(_("MidoClient.setup_floating_ip called: "
"router_id=%(router_id)s, "
"provider_router=%(provider_router)s"
"floating_ip=%(floating_ip)s, fixed_ip=%(fixed_ip)s"
"identifier=%(identifier)s"),
{'router_id': router_id, 'provider_router': provider_router,
'floating_ip': floating_ip, 'fixed_ip': fixed_ip,
'identifier': identifier})
# unlink from metadata router and delete the interior ports
router = self.mido_api.get_router(router_id)
# find the provider router port that is connected to the tenant
# of the floating ip
for p in router.get_peer_ports():
if p.get_device_id() == provider_router.get_id():
pr_port = p
# get the tenant router port id connected to provider router
tr_port_id = pr_port.get_peer_id()
# add a route for the floating ip to bring it to the tenant
provider_router.add_route().type(
'Normal').src_network_addr('0.0.0.0').src_network_length(
0).dst_network_addr(
floating_ip).dst_network_length(
32).weight(100).next_hop_port(
pr_port.get_id()).create()
chains = self.get_router_chains(router.get_tenant_id(), router_id)
# add dnat/snat rule pair for the floating ip
nat_targets = []
nat_targets.append(
{'addressFrom': fixed_ip, 'addressTo': fixed_ip,
'portFrom': 0, 'portTo': 0})
floating_property = {OS_FLOATING_IP_RULE_KEY: identifier}
chains['in'].add_rule().nw_dst_address(
floating_ip).nw_dst_length(32).type(
'dnat').flow_action('accept').nat_targets(
nat_targets).in_ports([tr_port_id]).position(
1).properties(floating_property).create()
nat_targets = []
nat_targets.append(
{'addressFrom': floating_ip, 'addressTo': floating_ip,
'portFrom': 0, 'portTo': 0})
chains['out'].add_rule().nw_src_address(
fixed_ip).nw_src_length(32).type(
'snat').flow_action('accept').nat_targets(
nat_targets).out_ports(
[tr_port_id]).position(1).properties(
floating_property).create()
@handle_api_error
def clear_floating_ip(self, router_id, provider_router, floating_ip,
identifier):
"""Remove floating IP
:param router_id: router_id
:param provider_router: provider router
:param floating_ip: floating IP address
:param identifier: identifier to use to map to MidoNet
"""
LOG.debug(_("MidoClient.clear_floating_ip called: "
"router_id=%(router_id)s, "
"provider_router=%(provider_router)s"
"floating_ip=%(floating_ip)s, identifier=%(identifier)s"),
{'router_id': router_id, 'provider_router': provider_router,
'floating_ip': floating_ip, 'identifier': identifier})
router = self.mido_api.get_router(router_id)
# find the provider router port that is connected to the tenant
# delete the route for this floating ip
for r in provider_router.get_routes():
if (r.get_dst_network_addr() == floating_ip and
r.get_dst_network_length() == 32):
self.mido_api.delete_route(r.get_id())
# delete snat/dnat rule pair for this floating ip
chains = self.get_router_chains(router.get_tenant_id(), router_id)
for r in chains['in'].get_rules():
if OS_FLOATING_IP_RULE_KEY in r.get_properties():
if r.get_properties()[OS_FLOATING_IP_RULE_KEY] == identifier:
LOG.debug(_('deleting rule=%r'), r)
self.mido_api.delete_rule(r.get_id())
break
for r in chains['out'].get_rules():
if OS_FLOATING_IP_RULE_KEY in r.get_properties():
if r.get_properties()[OS_FLOATING_IP_RULE_KEY] == identifier:
LOG.debug(_('deleting rule=%r'), r)
self.mido_api.delete_rule(r.get_id())
break
@handle_api_error
def create_for_sg(self, tenant_id, sg_id, sg_name):
"""Create a new chain for security group.
Creating a security group creates a pair of chains in MidoNet, one for
inbound and the other for outbound.
"""
LOG.debug(_("MidoClient.create_for_sg called: "
"tenant_id=%(tenant_id)s sg_id=%(sg_id)s "
"sg_name=%(sg_name)s "),
{'tenant_id': tenant_id, 'sg_id': sg_id, 'sg_name': sg_name})
cnames = chain_names(sg_id, sg_name)
self.mido_api.add_chain().tenant_id(tenant_id).name(
cnames['in']).create()
self.mido_api.add_chain().tenant_id(tenant_id).name(
cnames['out']).create()
pg_name = port_group_name(sg_id, sg_name)
self.mido_api.add_port_group().tenant_id(tenant_id).name(
pg_name).create()
@handle_api_error
def delete_for_sg(self, tenant_id, sg_id, sg_name):
"""Delete a chain mapped to a security group.
Delete a SG means deleting all the chains (inbound and outbound)
associated with the SG in MidoNet.
"""
LOG.debug(_("MidoClient.delete_for_sg called: "
"tenant_id=%(tenant_id)s sg_id=%(sg_id)s "
"sg_name=%(sg_name)s "),
{'tenant_id': tenant_id, 'sg_id': sg_id, 'sg_name': sg_name})
cnames = chain_names(sg_id, sg_name)
chains = self.mido_api.get_chains({'tenant_id': tenant_id})
for c in chains:
if c.get_name() == cnames['in'] or c.get_name() == cnames['out']:
LOG.debug(_('MidoClient.delete_for_sg: deleting chain=%r'),
c.get_id())
self.mido_api.delete_chain(c.get_id())
pg_name = port_group_name(sg_id, sg_name)
pgs = self.mido_api.get_port_groups({'tenant_id': tenant_id})
for pg in pgs:
if pg.get_name() == pg_name:
LOG.debug(_("MidoClient.delete_for_sg: deleting pg=%r"),
pg)
self.mido_api.delete_port_group(pg.get_id())
@handle_api_error
def get_sg_chains(self, tenant_id, sg_id):
"""Get a list of chains mapped to a security group."""
LOG.debug(_("MidoClient.get_sg_chains called: "
"tenant_id=%(tenant_id)s sg_id=%(sg_id)s"),
{'tenant_id': tenant_id, 'sg_id': sg_id})
cnames = chain_names(sg_id, sg_name='')
chain_name_prefix_for_id = cnames['in'][:NAME_IDENTIFIABLE_PREFIX_LEN]
chains = {}
for c in self.mido_api.get_chains({'tenant_id': tenant_id}):
if c.get_name().startswith(chain_name_prefix_for_id):
if c.get_name().endswith(SUFFIX_IN):
chains['in'] = c
if c.get_name().endswith(SUFFIX_OUT):
chains['out'] = c
assert 'in' in chains
assert 'out' in chains
return chains
@handle_api_error
def get_port_groups_for_sg(self, tenant_id, sg_id):
LOG.debug(_("MidoClient.get_port_groups_for_sg called: "
"tenant_id=%(tenant_id)s sg_id=%(sg_id)s"),
{'tenant_id': tenant_id, 'sg_id': sg_id})
pg_name_prefix = port_group_name(
sg_id, sg_name='')[:NAME_IDENTIFIABLE_PREFIX_LEN]
port_groups = self.mido_api.get_port_groups({'tenant_id': tenant_id})
for pg in port_groups:
if pg.get_name().startswith(pg_name_prefix):
LOG.debug(_(
"MidoClient.get_port_groups_for_sg exiting: pg=%r"), pg)
return pg
return None
@handle_api_error
def create_for_sg_rule(self, rule):
LOG.debug(_("MidoClient.create_for_sg_rule called: rule=%r"), rule)
direction = rule['direction']
protocol = rule['protocol']
port_range_max = rule['port_range_max']
rule_id = rule['id']
security_group_id = rule['security_group_id']
remote_group_id = rule['remote_group_id']
remote_ip_prefix = rule['remote_ip_prefix'] # watch out. not validated
tenant_id = rule['tenant_id']
port_range_min = rule['port_range_min']
# construct a corresponding rule
tp_src_start = tp_src_end = None
tp_dst_start = tp_dst_end = None
nw_src_address = None
nw_src_length = None
port_group_id = None
# handle source
if remote_ip_prefix is not None:
nw_src_address, nw_src_length = remote_ip_prefix.split('/')
elif not remote_group_id is None: # security group as a srouce
source_pg = self.pg_manager.get_for_sg(tenant_id, remote_group_id)
port_group_id = source_pg.get_id()
else:
raise Exception(_("Don't know what to do with rule=%r"), rule)
# dst ports
tp_dst_start, tp_dst_end = port_range_min, port_range_max
# protocol
if protocol == 'tcp':
nw_proto = 6
elif protocol == 'udp':
nw_proto = 17
elif protocol == 'icmp':
nw_proto = 1
# extract type and code from reporposed fields
icmp_type = rule['from_port']
icmp_code = rule['to_port']
# translate -1(wildcard in OS) to midonet wildcard
if icmp_type == -1:
icmp_type = None
if icmp_code == -1:
icmp_code = None
# set data for midonet rule
tp_src_start = tp_src_end = icmp_type
tp_dst_start = tp_dst_end = icmp_code
chains = self.get_sg_chains(tenant_id, security_group_id)
chain = None
if direction == 'egress':
chain = chains['in']
elif direction == 'ingress':
chain = chains['out']
else:
raise Exception(_("Don't know what to do with rule=%r"), rule)
# create an accept rule
properties = sg_rule_properties(rule_id)
LOG.debug(_("MidoClient.create_for_sg_rule: adding accept rule "
"%(rule_id)s in portgroup %(port_group_id)s"),
{'rule_id': rule_id, 'port_group_id': port_group_id})
chain.add_rule().port_group(port_group_id).type('accept').nw_proto(
nw_proto).nw_src_address(nw_src_address).nw_src_length(
nw_src_length).tp_src_start(tp_src_start).tp_src_end(
tp_src_end).tp_dst_start(tp_dst_start).tp_dst_end(
tp_dst_end).properties(properties).create()
@handle_api_error
def delete_for_sg_rule(self, rule):
LOG.debug(_("MidoClient.delete_for_sg_rule called: rule=%r"), rule)
tenant_id = rule['tenant_id']
security_group_id = rule['security_group_id']
rule_id = rule['id']
properties = sg_rule_properties(rule_id)
# search for the chains to find the rule to delete
chains = self.get_sg_chains(tenant_id, security_group_id)
for c in chains['in'], chains['out']:
rules = c.get_rules()
for r in rules:
if r.get_properties() == properties:
LOG.debug(_("MidoClient.delete_for_sg_rule: deleting "
"rule %r"), r)
self.mido_api.delete_rule(r.get_id())
|
|
#!/usr/bin/env python
"""Events framework with publisher, subscriber and repository."""
__author__ = 'Dave Foster <[email protected]>, Michael Meisinger'
import functools
import sys
import traceback
from gevent import event as gevent_event
from pyon.core import bootstrap
from pyon.core.exception import BadRequest, IonException, StreamException
from pyon.datastore.datastore import DataStore
from pyon.datastore.datastore_query import QUERY_EXP_KEY, DatastoreQueryBuilder, DQ
from pyon.ion.identifier import create_unique_event_id, create_simple_unique_id
from pyon.net.endpoint import Publisher, Subscriber
from pyon.util.async import spawn
from pyon.util.containers import get_ion_ts_millis, is_valid_ts
from pyon.util.log import log
from interface.objects import Event
# @TODO: configurable
EVENTS_XP = "pyon.events"
EVENTS_XP_TYPE = "topic"
#The event will be ignored if older than this time period
VALID_EVENT_TIME_PERIOD = 365 * 24 * 60 * 60 * 1000 # one year
def get_events_exchange_point():
return "%s.%s" % (bootstrap.get_sys_name(), EVENTS_XP)
class EventError(IonException):
status_code = 500
class EventPublisher(Publisher):
def __init__(self, event_type=None, xp=None, process=None, **kwargs):
"""
Constructs a publisher of events for a specific type.
@param event_type The name of the event type object
@param xp Exchange (AMQP) name, can be none, will use events default.
"""
self.event_type = event_type
self.process = process
if bootstrap.container_instance and getattr(bootstrap.container_instance, 'event_repository', None):
self.event_repo = bootstrap.container_instance.event_repository
else:
self.event_repo = None
# generate an exchange name to publish events to
xp = xp or get_events_exchange_point()
name = (xp, None)
Publisher.__init__(self, to_name=name, **kwargs)
def _topic(self, event_object):
"""
Builds the topic that this event should be published to.
"""
assert event_object
base_types = event_object.base_types or []
base_str = ".".join(reversed(base_types))
sub_type = event_object.sub_type or "_"
origin_type = event_object.origin_type or "_"
routing_key = "%s.%s.%s.%s.%s" % (base_str, event_object._get_type(), sub_type, origin_type, event_object.origin)
return routing_key
def publish_event_object(self, event_object):
"""
Publishes an event of given type for the given origin. Event_type defaults to an
event_type set when initializing the EventPublisher. Other kwargs fill out the fields
of the event. This operation will fail with an exception.
@param event_object the event object to be published
@retval event_object the event object which was published
"""
if not event_object:
raise BadRequest("Must provide event_object")
event_object.base_types = event_object._get_extends()
topic = self._topic(event_object) # Routing key generated using type_, base_types, origin, origin_type, sub_type
to_name = (self._send_name.exchange, topic)
current_time = get_ion_ts_millis()
# Ensure valid created timestamp if supplied
if event_object.ts_created:
if not is_valid_ts(event_object.ts_created):
raise BadRequest("The ts_created value is not a valid timestamp: '%s'" % (event_object.ts_created))
# Reject events that are older than specified time
if int(event_object.ts_created) > ( current_time + VALID_EVENT_TIME_PERIOD ):
raise BadRequest("This ts_created value is too far in the future:'%s'" % (event_object.ts_created))
# Reject events that are older than specified time
if int(event_object.ts_created) < (current_time - VALID_EVENT_TIME_PERIOD) :
raise BadRequest("This ts_created value is too old:'%s'" % (event_object.ts_created))
else:
event_object.ts_created = str(current_time)
# Set the actor id based on
if not event_object.actor_id:
event_object.actor_id = self._get_actor_id()
#Validate this object - ideally the validator should pass on problems, but for now just log
#any errors and keep going, since seeing invalid situations are better than skipping validation.
try:
event_object._validate()
except Exception, e:
log.exception(e)
#Ensure the event object has a unique id
if '_id' in event_object:
raise BadRequest("The event object cannot contain a _id field '%s'" % (event_object))
#Generate a unique ID for this event
event_object._id = create_unique_event_id()
try:
self.publish(event_object, to_name=to_name)
except Exception as ex:
log.exception("Failed to publish event (%s): '%s'" % (ex.message, event_object))
raise
return event_object
def publish_event(self, origin=None, event_type=None, **kwargs):
"""
Publishes an event of given type for the given origin. Event_type defaults to an
event_type set when initializing the EventPublisher. Other kwargs fill out the fields
of the event. This operation will fail with an exception.
@param origin the origin field value
@param event_type the event type (defaults to the EventPublisher's event_type if set)
@param kwargs additional event fields
@retval event_object the event object which was published
"""
event_type = event_type or self.event_type
if not event_type:
raise BadRequest("No event_type provided")
event_object = bootstrap.IonObject(event_type, origin=origin, **kwargs)
ret_val = self.publish_event_object(event_object)
return ret_val
def _get_actor_id(self):
"""Returns the current ion-actor-id from incoming process headers"""
actor_id = ""
try:
if self.process:
ctx = self.process.get_context()
actor_id = ctx.get('ion-actor-id', None) or ""
except Exception as ex:
pass
return actor_id
class BaseEventSubscriberMixin(object):
"""
A mixin class for Event subscribers to facilitate inheritance.
EventSubscribers must come in both standard and process level versions, which
rely on common base code. It is difficult to multiple inherit due to both of
them sharing a base class, so this mixin is preferred.
"""
@staticmethod
def _topic(event_type, origin, sub_type=None, origin_type=None):
"""
Builds the topic that this event should be published to.
If either side of the event_id.origin pair are missing, will subscribe to anything.
"""
if event_type == "Event":
event_type = "Event.#"
elif event_type:
event_type = "#.%s.#" % event_type
else:
event_type = "#"
sub_type = sub_type or "*.#"
origin_type = origin_type or "*"
origin = origin or "*"
return "%s.%s.%s.%s" % (event_type, sub_type, origin_type, origin)
def __init__(self, xp_name=None, event_type=None, origin=None, queue_name=None,
sub_type=None, origin_type=None, pattern=None):
self.event_type = event_type
self.sub_type = sub_type
self.origin_type = origin_type
self.origin = origin
xp_name = xp_name or get_events_exchange_point()
if pattern:
binding = pattern
else:
binding = self._topic(event_type, origin, sub_type, origin_type)
self.binding = binding
# TODO: Provide a case where we can have multiple bindings (e.g. different event_types)
# prefix the queue_name, if specified, with the sysname
if queue_name is not None:
if not queue_name.startswith(bootstrap.get_sys_name()):
queue_name = "%s.%s" % (bootstrap.get_sys_name(), queue_name)
else:
queue_name = create_simple_unique_id()
if hasattr(self, "_process") and self._process:
queue_name = "%s_%s" % (self._process._proc_name, queue_name)
queue_name = "%s.%s" % (bootstrap.get_sys_name(), queue_name)
# set this name to be picked up by inherited folks
self._ev_recv_name = (xp_name, queue_name)
class EventSubscriber(Subscriber, BaseEventSubscriberMixin):
ALL_EVENTS = "#"
def __init__(self, xp_name=None, event_type=None, origin=None, queue_name=None, callback=None,
sub_type=None, origin_type=None, pattern=None, auto_delete=None, *args, **kwargs):
"""
Initializer.
If the queue_name is specified here, the sysname is prefixed automatically to it. This is because
named queues are not namespaces to their exchanges, so two different systems on the same broker
can cross-pollute messages if a named queue is used.
Note: an EventSubscriber needs to be closed to free broker resources
"""
self._cbthread = None
self._auto_delete = auto_delete
BaseEventSubscriberMixin.__init__(self, xp_name=xp_name, event_type=event_type, origin=origin,
queue_name=queue_name, sub_type=sub_type, origin_type=origin_type, pattern=pattern)
log.debug("EventPublisher events pattern %s", self.binding)
Subscriber.__init__(self, from_name=self._ev_recv_name, binding=self.binding, callback=callback, **kwargs)
def start(self):
"""
Pass in a subscriber here, this will make it listen in a background greenlet.
"""
assert not self._cbthread, "start called twice on EventSubscriber"
gl = spawn(self.listen)
gl._glname = "EventSubscriber"
self._cbthread = gl
if not self._ready_event.wait(timeout=5):
log.warning('EventSubscriber start timed out.')
log.debug("EventSubscriber started. Event pattern=%s", self.binding)
return gl
def stop(self):
self.close()
self._cbthread.join(timeout=5)
self._cbthread.kill()
self._cbthread = None
log.debug("EventSubscriber stopped. Event pattern=%s", self.binding)
def __str__(self):
return "EventSubscriber at %s:\n\trecv_name: %s\n\tcb: %s" % (hex(id(self)), str(self._recv_name), str(self._callback))
def _create_channel(self, **kwargs):
"""
Override to set the channel's queue_auto_delete property.
"""
ch = Subscriber._create_channel(self, **kwargs)
if self._auto_delete is not None:
ch.queue_auto_delete = self._auto_delete
return ch
class EventRepository(object):
"""
Class that uses a data store to provide a persistent repository for ION events.
"""
def __init__(self, datastore_manager=None, container=None):
self.container = container or bootstrap.container_instance
# Get an instance of datastore configured as directory.
# May be persistent or mock, forced clean, with indexes
datastore_manager = datastore_manager or self.container.datastore_manager
self.event_store = datastore_manager.get_datastore("events", DataStore.DS_PROFILE.EVENTS)
def start(self):
pass
def stop(self):
self.close()
def close(self):
"""
Pass-through method to close the underlying datastore.
"""
self.event_store.close()
def put_event(self, event):
log.trace("Store event persistently %s", event)
if not isinstance(event, Event):
raise BadRequest("event must be type Event, not %s" % type(event))
event_id = event.__dict__.pop("_id", None)
return self.event_store.create(event, event_id)
def put_events(self, events):
log.debug("Store %s events persistently", len(events))
if type(events) is not list:
raise BadRequest("events must be type list, not %s" % type(events))
if not all([isinstance(event, Event) for event in events]):
raise BadRequest("events must all be type Event")
if events:
return self.event_store.create_mult(events, allow_ids=True)
else:
return None
def get_event(self, event_id):
log.trace("Retrieving persistent event for id=%s", event_id)
event_obj = self.event_store.read(event_id)
return event_obj
def find_events(self, event_type=None, origin=None, start_ts=None, end_ts=None, id_only=False, **kwargs):
log.trace("Retrieving persistent event for event_type=%s, origin=%s, start_ts=%s, end_ts=%s, descending=%s, limit=%s",
event_type, origin, start_ts, end_ts, kwargs.get("descending", None), kwargs.get("limit", None))
events = None
design_name = "event"
view_name = None
start_key = []
end_key = []
if origin and event_type:
view_name = "by_origintype"
start_key = [origin, event_type]
end_key = [origin, event_type]
elif origin:
view_name = "by_origin"
start_key = [origin]
end_key = [origin]
elif event_type:
view_name = "by_type"
start_key = [event_type]
end_key = [event_type]
elif start_ts or end_ts:
view_name = "by_time"
start_key = []
end_key = []
else:
view_name = "by_time"
if kwargs.get("limit", 0) < 1:
kwargs["limit"] = 100
log.warn("Querying all events, no limit given. Set limit to 100")
if start_ts:
start_key.append(start_ts)
if end_ts:
end_key.append(end_ts)
events = self.event_store.find_by_view(design_name, view_name, start_key=start_key, end_key=end_key,
id_only=id_only, **kwargs)
return events
def find_events_query(self, query, id_only=False):
"""
Find events or event ids by using a standard datastore query. This function fills in datastore and
profile entries, so these can be omitted from the datastore query.
"""
if not query or not isinstance(query, dict) or not QUERY_EXP_KEY in query:
raise BadRequest("Illegal events query")
qargs = query["query_args"]
qargs["datastore"] = DataStore.DS_EVENTS
qargs["profile"] = DataStore.DS_PROFILE.EVENTS
qargs["id_only"] = id_only
events = self.event_store.find_by_query(query)
log.debug("find_events_query() found %s events", len(events))
return events
class EventGate(EventSubscriber):
def __init__(self, *args, **kwargs):
EventSubscriber.__init__(self, *args, callback=self.trigger_cb, **kwargs)
def trigger_cb(self, event):
self.stop()
self.gate.set()
def await(self, timeout=None):
self.gate = gevent_event.Event()
self.start()
return self.gate.wait(timeout)
def check_or_await(self):
pass
def handle_stream_exception(iorigin="stream_exception"):
"""
decorator for stream exceptions
"""
def real_decorator(fn):
@functools.wraps(fn)
def wrapped(*args, **kwargs):
try:
fn(*args, **kwargs)
except StreamException as e:
info = "".join(traceback.format_tb(sys.exc_info()[2]))
pub = EventPublisher(event_type="ExceptionEvent")
pub.publish_event(origin=iorigin, description="stream exception event", exception_type=str(type(e)), message=info)
return wrapped
return real_decorator
class EventQuery(DatastoreQueryBuilder):
"""
Helper class to build datastore queries for the event repository.
Based on the DatastoreQueryBuilder
"""
def __init__(self):
super(EventQuery, self).__init__(datastore=DataStore.DS_EVENTS, profile=DataStore.DS_PROFILE.EVENTS)
def filter_type(self, type_expr, cmpop=None):
return self.txt_cmp(DQ.ATT_TYPE, type_expr, cmpop)
def filter_origin(self, origin_expr, cmpop=None):
return self.txt_cmp(DQ.EA_ORIGIN, origin_expr, cmpop)
def filter_origin_type(self, origin_expr, cmpop=None):
return self.txt_cmp(DQ.EA_ORIGIN_TYPE, origin_expr, cmpop)
def filter_sub_type(self, type_expr, cmpop=None):
return self.txt_cmp(DQ.EA_SUB_TYPE, type_expr, cmpop)
def filter_ts_created(self, from_expr=None, to_expr=None):
from_expr = self._make_ion_ts(from_expr)
to_expr = self._make_ion_ts(to_expr)
if from_expr and to_expr:
return self.and_(self.gte(DQ.EA_TS_CREATED, from_expr),
self.lte(DQ.EA_TS_CREATED, to_expr))
elif from_expr:
return self.gte(DQ.EA_TS_CREATED, from_expr)
elif to_expr:
return self.lte(DQ.EA_TS_CREATED, to_expr)
|
|
"""Support for Hyperion remotes."""
import json
import logging
import socket
import voluptuous as vol
from homeassistant.components.light import (
ATTR_BRIGHTNESS,
ATTR_EFFECT,
ATTR_HS_COLOR,
PLATFORM_SCHEMA,
SUPPORT_BRIGHTNESS,
SUPPORT_COLOR,
SUPPORT_EFFECT,
LightEntity,
)
from homeassistant.const import CONF_HOST, CONF_NAME, CONF_PORT
import homeassistant.helpers.config_validation as cv
import homeassistant.util.color as color_util
_LOGGER = logging.getLogger(__name__)
CONF_DEFAULT_COLOR = "default_color"
CONF_PRIORITY = "priority"
CONF_HDMI_PRIORITY = "hdmi_priority"
CONF_EFFECT_LIST = "effect_list"
DEFAULT_COLOR = [255, 255, 255]
DEFAULT_NAME = "Hyperion"
DEFAULT_PORT = 19444
DEFAULT_PRIORITY = 128
DEFAULT_HDMI_PRIORITY = 880
DEFAULT_EFFECT_LIST = [
"HDMI",
"Cinema brighten lights",
"Cinema dim lights",
"Knight rider",
"Blue mood blobs",
"Cold mood blobs",
"Full color mood blobs",
"Green mood blobs",
"Red mood blobs",
"Warm mood blobs",
"Police Lights Single",
"Police Lights Solid",
"Rainbow mood",
"Rainbow swirl fast",
"Rainbow swirl",
"Random",
"Running dots",
"System Shutdown",
"Snake",
"Sparks Color",
"Sparks",
"Strobe blue",
"Strobe Raspbmc",
"Strobe white",
"Color traces",
"UDP multicast listener",
"UDP listener",
"X-Mas",
]
SUPPORT_HYPERION = SUPPORT_COLOR | SUPPORT_BRIGHTNESS | SUPPORT_EFFECT
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_HOST): cv.string,
vol.Required(CONF_PORT, default=DEFAULT_PORT): cv.port,
vol.Optional(CONF_DEFAULT_COLOR, default=DEFAULT_COLOR): vol.All(
list,
vol.Length(min=3, max=3),
[vol.All(vol.Coerce(int), vol.Range(min=0, max=255))],
),
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_PRIORITY, default=DEFAULT_PRIORITY): cv.positive_int,
vol.Optional(
CONF_HDMI_PRIORITY, default=DEFAULT_HDMI_PRIORITY
): cv.positive_int,
vol.Optional(CONF_EFFECT_LIST, default=DEFAULT_EFFECT_LIST): vol.All(
cv.ensure_list, [cv.string]
),
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up a Hyperion server remote."""
name = config[CONF_NAME]
host = config[CONF_HOST]
port = config[CONF_PORT]
priority = config[CONF_PRIORITY]
hdmi_priority = config[CONF_HDMI_PRIORITY]
default_color = config[CONF_DEFAULT_COLOR]
effect_list = config[CONF_EFFECT_LIST]
device = Hyperion(
name, host, port, priority, default_color, hdmi_priority, effect_list
)
if device.setup():
add_entities([device])
class Hyperion(LightEntity):
"""Representation of a Hyperion remote."""
def __init__(
self, name, host, port, priority, default_color, hdmi_priority, effect_list
):
"""Initialize the light."""
self._host = host
self._port = port
self._name = name
self._priority = priority
self._hdmi_priority = hdmi_priority
self._default_color = default_color
self._rgb_color = [0, 0, 0]
self._rgb_mem = [0, 0, 0]
self._brightness = 255
self._icon = "mdi:lightbulb"
self._effect_list = effect_list
self._effect = None
self._skip_update = False
@property
def name(self):
"""Return the name of the light."""
return self._name
@property
def brightness(self):
"""Return the brightness of this light between 0..255."""
return self._brightness
@property
def hs_color(self):
"""Return last color value set."""
return color_util.color_RGB_to_hs(*self._rgb_color)
@property
def is_on(self):
"""Return true if not black."""
return self._rgb_color != [0, 0, 0]
@property
def icon(self):
"""Return state specific icon."""
return self._icon
@property
def effect(self):
"""Return the current effect."""
return self._effect
@property
def effect_list(self):
"""Return the list of supported effects."""
return self._effect_list
@property
def supported_features(self):
"""Flag supported features."""
return SUPPORT_HYPERION
def turn_on(self, **kwargs):
"""Turn the lights on."""
if ATTR_HS_COLOR in kwargs:
rgb_color = color_util.color_hs_to_RGB(*kwargs[ATTR_HS_COLOR])
elif self._rgb_mem == [0, 0, 0]:
rgb_color = self._default_color
else:
rgb_color = self._rgb_mem
brightness = kwargs.get(ATTR_BRIGHTNESS, self._brightness)
if ATTR_EFFECT in kwargs:
self._skip_update = True
self._effect = kwargs[ATTR_EFFECT]
if self._effect == "HDMI":
self.json_request({"command": "clearall"})
self._icon = "mdi:video-input-hdmi"
self._brightness = 255
self._rgb_color = [125, 125, 125]
else:
self.json_request(
{
"command": "effect",
"priority": self._priority,
"effect": {"name": self._effect},
}
)
self._icon = "mdi:lava-lamp"
self._rgb_color = [175, 0, 255]
return
cal_color = [int(round(x * float(brightness) / 255)) for x in rgb_color]
self.json_request(
{"command": "color", "priority": self._priority, "color": cal_color}
)
def turn_off(self, **kwargs):
"""Disconnect all remotes."""
self.json_request({"command": "clearall"})
self.json_request(
{"command": "color", "priority": self._priority, "color": [0, 0, 0]}
)
def update(self):
"""Get the lights status."""
# postpone the immediate state check for changes that take time
if self._skip_update:
self._skip_update = False
return
response = self.json_request({"command": "serverinfo"})
if response:
# workaround for outdated Hyperion
if "activeLedColor" not in response["info"]:
self._rgb_color = self._default_color
self._rgb_mem = self._default_color
self._brightness = 255
self._icon = "mdi:lightbulb"
self._effect = None
return
# Check if Hyperion is in ambilight mode trough an HDMI grabber
try:
active_priority = response["info"]["priorities"][0]["priority"]
if active_priority == self._hdmi_priority:
self._brightness = 255
self._rgb_color = [125, 125, 125]
self._icon = "mdi:video-input-hdmi"
self._effect = "HDMI"
return
except (KeyError, IndexError):
pass
led_color = response["info"]["activeLedColor"]
if not led_color or led_color[0]["RGB Value"] == [0, 0, 0]:
# Get the active effect
if response["info"].get("activeEffects"):
self._rgb_color = [175, 0, 255]
self._icon = "mdi:lava-lamp"
try:
s_name = response["info"]["activeEffects"][0]["script"]
s_name = s_name.split("/")[-1][:-3].split("-")[0]
self._effect = [
x for x in self._effect_list if s_name.lower() in x.lower()
][0]
except (KeyError, IndexError):
self._effect = None
# Bulb off state
else:
self._rgb_color = [0, 0, 0]
self._icon = "mdi:lightbulb"
self._effect = None
else:
# Get the RGB color
self._rgb_color = led_color[0]["RGB Value"]
self._brightness = max(self._rgb_color)
self._rgb_mem = [
int(round(float(x) * 255 / self._brightness))
for x in self._rgb_color
]
self._icon = "mdi:lightbulb"
self._effect = None
def setup(self):
"""Get the hostname of the remote."""
response = self.json_request({"command": "serverinfo"})
if response:
if self._name == self._host:
self._name = response["info"]["hostname"]
return True
return False
def json_request(self, request, wait_for_response=False):
"""Communicate with the JSON server."""
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.settimeout(5)
try:
sock.connect((self._host, self._port))
except OSError:
sock.close()
return False
sock.send(bytearray(f"{json.dumps(request)}\n", "utf-8"))
try:
buf = sock.recv(4096)
except socket.timeout:
# Something is wrong, assume it's offline
sock.close()
return False
# Read until a newline or timeout
buffering = True
while buffering:
if "\n" in str(buf, "utf-8"):
response = str(buf, "utf-8").split("\n")[0]
buffering = False
else:
try:
more = sock.recv(4096)
except socket.timeout:
more = None
if not more:
buffering = False
response = str(buf, "utf-8")
else:
buf += more
sock.close()
return json.loads(response)
|
|
#!/usr/bin/env python
"""
Download Padova isochrones from:
http://stev.oapd.inaf.it/cgi-bin/cmd
Adapted from ezpadova by Morgan Fouesneau:
https://github.com/mfouesneau/ezpadova
"""
import os
try:
from urllib.parse import urlencode
from urllib.request import urlopen
except ImportError:
from urllib import urlencode
from urllib2 import urlopen
import re
import subprocess
from multiprocessing import Pool
from collections import OrderedDict as odict
import copy
import numpy as np
from ugali.utils.logger import logger
from ugali.utils.shell import mkdir
from ugali.analysis.isochrone import Padova
# survey system
photsys_dict = odict([
('des' ,'tab_mag_odfnew/tab_mag_decam.dat'),
('sdss','tab_mag_odfnew/tab_mag_sloan.dat'),
('ps1' ,'tab_mag_odfnew/tab_mag_panstarrs1.dat'),
('lsst','tab_mag_odfnew/tab_mag_lsst.dat'),
])
photname_dict = odict([
('des' ,'DECAM'),
('sdss','SDSS'),
('ps1' ,'Pan-STARRS1'),
('lsst','LSST'),
])
# Commented options may need to be restored for older version/isochrones.
# The parameters were tracked down by:
# Chrome -> View -> Developer -> Developer Tools
# Network -> Headers -> Request Payload
defaults_cmd= {#'binary_frac': 0.3,
#'binary_kind': 1,
#'binary_mrinf': 0.7,
#'binary_mrsup': 1,
'cmd_version': 2.7,
'dust_source': 'nodust',
'dust_sourceC': 'nodustC',
'dust_sourceM': 'nodustM',
'eta_reimers': 0.2,
#'extinction_av': 0,
#'icm_lim': 4,
'imf_file': 'tab_imf/imf_chabrier_lognormal.dat',
'isoc_age': 1e9,
'isoc_age0': 12.7e9,
'isoc_dlage': 0.05,
'isoc_dz': 0.0001,
'isoc_kind': 'parsec_CAF09_v1.2S',
'isoc_lage0': 6.602, #Minimum allowed age
'isoc_lage1': 10.1303, #Maximum allowed age
'isoc_val': 0,
'isoc_z0': 0.0001, #Minimum allowed metallicity
'isoc_z1': 0.03, #Maximum allowed metallicity
'isoc_zeta': 0.0002,
'isoc_zeta0': 0.0002,
'kind_cspecmag': 'aringer09',
'kind_dust': 0,
'kind_interp': 1,
'kind_mag': 2,
'kind_postagb': -1,
'kind_pulsecycle': 0,
#'kind_tpagb': 0,
#'lf_deltamag': 0.2,
#'lf_maginf': 20,
#'lf_magsup': -20,
#'mag_lim': 26,
#'mag_res': 0.1,
'output_evstage': 1,
'output_gzip': 0,
'output_kind': 0,
'photsys_file': photsys_dict['des'],
#'photsys_version': 'yang',
'submit_form': 'Submit'}
defaults_27 = dict(defaults_cmd,cmd_version='2.7')
defaults_28 = dict(defaults_cmd,cmd_version='2.8')
defaults_29 = dict(defaults_cmd,cmd_version='2.9')
defaults_30 = dict(defaults_cmd,cmd_version='3.0')
class Download(object):
isochrone = None
def __init__(self,survey='des',**kwargs):
self.survey=survey.lower()
def create_grid(self,abins,zbins):
arange = np.linspace(abins[0],abins[1],abins[2]+1)
zrange = np.logspace(np.log10(zbins[0]),np.log10(zbins[1]),zbins[2]+1)
aa,zz = np.meshgrid(arange,zrange)
return aa.flatten(),zz.flatten()
def print_info(self,age,metallicity):
params = dict(age=age,z=metallicity)
params['name'] = self.__class__.__name__
params['survey'] = self.survey
params['feh'] = self.isochrone.z2feh(metallicity)
msg = 'Downloading: %(name)s (survey=%(survey)s, age=%(age).1fGyr, Z=%(z).5f, Fe/H=%(feh).3f)'%params
logger.info(msg)
return msg
def query_server(self,outfile,age,metallicity):
msg = "'query_server' not implemented by base class."
logger.error(msg)
raise RuntimeError(msg)
@classmethod
def verify(cls,filename,survey,age,metallicity):
msg = "'verify' not implemented by base class."
logger.error(msg)
raise RuntimeError(msg)
def download(self,age,metallicity,outdir=None,force=False):
"""
Check valid parameter range and download isochrones from:
http://stev.oapd.inaf.it/cgi-bin/cmd
"""
if outdir is None: outdir = './'
basename = self.isochrone.params2filename(age,metallicity)
outfile = os.path.join(outdir,basename)
if os.path.exists(outfile) and not force:
try:
self.verify(outfile,self.survey,age,metallicity)
logger.info("Found %s; skipping..."%(outfile))
return
except Exception as e:
msg = "Overwriting corrupted %s..."%(outfile)
logger.warn(msg)
#os.remove(outfile)
mkdir(outdir)
self.print_info(age,metallicity)
try:
self.query_server(outfile,age,metallicity)
except Exception as e:
logger.debug(str(e))
raise RuntimeError('Bad server response')
if not os.path.exists(outfile):
raise RuntimeError('Download failed')
try:
self.verify(outfile,self.survey,age,metallicity)
except Exception as e:
msg = "Output file is corrupted."
logger.error(msg)
#os.remove(outfile)
raise(e)
return outfile
class Padova(Download):
defaults = copy.deepcopy(defaults_27)
isochrone = Padova
abins = np.arange(1.0, 13.5 + 0.1, 0.1)
zbins = np.arange(1e-4,1e-3 + 1e-5,1e-5)
def query_server(self,outfile,age,metallicity):
epsilon = 1e-4
lage = np.log10(age*1e9)
lage_min,lage_max = self.defaults['isoc_lage0'],self.defaults['isoc_lage1']
if not (lage_min-epsilon < lage <lage_max+epsilon):
msg = 'Age outside of valid range: %g [%g < log(age) < %g]'%(lage,lage_min,lage_max)
raise RuntimeError(msg)
z_min,z_max = self.defaults['isoc_z0'],self.defaults['isoc_z1']
if not (z_min <= metallicity <= z_max):
msg = 'Metallicity outside of valid range: %g [%g < z < %g]'%(metallicity,z_min,z_max)
raise RuntimeError(msg)
d = dict(self.defaults)
d['photsys_file'] = photsys_dict[self.survey]
d['isoc_age'] = age * 1e9
d['isoc_zeta'] = metallicity
server = 'http://stev.oapd.inaf.it'
url = server + '/cgi-bin/cmd_%s'%d['cmd_version']
logger.debug("Accessing %s..."%url)
q = urlencode(d)
logger.debug(url+'?'+q)
c = str(urlopen(url, str.encode(q)).read())
aa = re.compile('output\d+')
fname = aa.findall(c)
if len(fname) == 0:
msg = "Output filename not found"
raise RuntimeError(msg)
out = '{0}/tmp/{1}.dat'.format(server, fname[0])
cmd = 'wget %s -O %s'%(out,outfile)
logger.debug(cmd)
stdout = subprocess.check_output(cmd,shell=True,stderr=subprocess.STDOUT)
logger.debug(stdout)
return outfile
def verify(cls, filename, survey, age, metallicity):
age = age*1e9
nlines=14
with open(filename,'r') as f:
lines = [f.readline() for i in range(nlines)]
if len(lines) < nlines:
msg = "Incorrect file size"
raise Exception(msg)
try:
s = lines[2].split()[-2]
assert photname_dict[survey][:4] in s
except:
msg = "Incorrect survey:\n"+lines[2]
raise Exception(msg)
try:
z = lines[5].split()[2]
assert np.allclose(metallicity,float(z),atol=1e-3)
except:
msg = "Metallicity does not match:\n"+lines[5]
raise Exception(msg)
try:
a = lines[13].split()[1]
assert np.allclose(age,float(a),atol=1e-5)
except:
msg = "Age does not match:\n"+lines[13]
raise Exception(msg)
@classmethod
def verify(cls, filename, survey, age, metallicity):
age = age*1e9
nlines=15
with open(filename,'r') as f:
lines = [f.readline() for i in range(nlines)]
if len(lines) < nlines:
msg = "Incorrect file size"
raise Exception(msg)
for i,l in enumerate(lines):
if l.startswith('# Photometric system:'): break
else:
msg = "Incorrect file header"
raise Exception(msg)
try:
s = lines[i].split()[3]
assert photname_dict[survey] == s
except:
msg = "Incorrect survey:\n"+lines[i]
raise Exception(msg)
try:
z = lines[-1].split()[0]
assert np.allclose(metallicity,float(z),atol=1e-5)
except:
msg = "Metallicity does not match:\n"+lines[-1]
raise Exception(msg)
try:
a = lines[-1].split()[1]
# Need to deal with age or log-age
assert (np.allclose(age,float(a),atol=1e-2) or
np.allclose(np.log10(age),float(a),atol=1e-2))
except:
msg = "Age does not match:\n"+lines[-1]
raise Exception(msg)
class Girardi2002(Padova):
defaults = dict(defaults_27)
defaults['isoc_kind'] = 'gi2000'
class Marigo2008(Padova):
defaults = dict(defaults_27)
defaults['isoc_kind'] = 'ma08'
class Girardi2010a(Padova):
defaults = dict(defaults_27)
defaults['isoc_kind'] = 'gi10a'
class Girardi2010b(Padova):
defaults = dict(defaults_27)
defaults['isoc_kind'] = 'gi10b'
class Bressan2012(Padova):
defaults = dict(defaults_27)
defaults['isoc_kind'] = 'parsec_CAF09_v1.2S'
class Marigo2017(Padova):
defaults = dict(defaults_30)
defaults['isoc_kind'] = 'parsec_CAF09_v1.2S_NOV13'
def factory(name, **kwargs):
from ugali.utils.factory import factory
return factory(name, module=__name__, **kwargs)
if __name__ == "__main__":
import ugali.utils.parser
description = "Download isochrones"
parser = ugali.utils.parser.Parser(description=description)
parser.add_verbose()
parser.add_force()
parser.add_argument('-a','--age',default=None,type=float,action='append')
parser.add_argument('-z','--metallicity',default=None,type=float,action='append')
parser.add_argument('-k','--kind',default='Bressan2012')
parser.add_argument('-s','--survey',default='des')
parser.add_argument('-o','--outdir',default=None)
parser.add_argument('-n','--njobs',default=10,type=int)
args = parser.parse_args()
if args.verbose:
try:
from http.client import HTTPConnection
except ImportError:
from httplib import HTTPConnection
HTTPConnection.debuglevel = 1
if args.outdir is None:
args.outdir = os.path.join(args.survey.lower(),args.kind.lower())
logger.info("Writing to output directory: %s"%args.outdir)
p = factory(args.kind,survey=args.survey)
# Defaults
abins = [args.age] if args.age else p.abins
zbins = [args.metallicity] if args.metallicity else p.zbins
grid = [g.flatten() for g in np.meshgrid(abins,zbins)]
logger.info("Ages:\n %s"%np.unique(grid[0]))
logger.info("Metallicities:\n %s"%np.unique(grid[1]))
def run(args):
try:
p.download(*args)
except Exception as e:
logger.warn(str(e))
logger.error("Download failed.")
arguments = [(a,z,args.outdir,args.force) for a,z in zip(*grid)]
if args.njobs > 1:
pool = Pool(processes=args.njobs, maxtasksperchild=100)
results = pool.map(run,arguments)
else:
results = list(map(run,arguments))
|
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.conf import settings # noqa
from django.core.urlresolvers import reverse # noqa
from django.forms import widgets
from django import http
from mox import IsA # noqa
from openstack_dashboard import api
from openstack_dashboard.api import cinder
from openstack_dashboard.dashboards.project.volumes import tables
from openstack_dashboard.test import helpers as test
from openstack_dashboard.usage import quotas
class VolumeViewTests(test.TestCase):
@test.create_stubs({cinder: ('volume_create',
'volume_snapshot_list',
'volume_type_list',),
api.glance: ('image_list_detailed',),
quotas: ('tenant_limit_usages',)})
def test_create_volume(self):
volume = self.volumes.first()
volume_type = self.volume_types.first()
usage_limit = {'maxTotalVolumeGigabytes': 250,
'gigabytesUsed': 20,
'volumesUsed': len(self.volumes.list()),
'maxTotalVolumes': 6}
formData = {'name': u'A Volume I Am Making',
'description': u'This is a volume I am making for a test.',
'method': u'CreateForm',
'type': volume_type.name,
'size': 50,
'snapshot_source': ''}
cinder.volume_type_list(IsA(http.HttpRequest)).\
AndReturn(self.volume_types.list())
quotas.tenant_limit_usages(IsA(http.HttpRequest)).\
AndReturn(usage_limit)
cinder.volume_snapshot_list(IsA(http.HttpRequest)).\
AndReturn(self.volume_snapshots.list())
api.glance.image_list_detailed(IsA(http.HttpRequest),
filters={'is_public': True,
'status': 'active'}) \
.AndReturn([self.images.list(), False])
api.glance.image_list_detailed(IsA(http.HttpRequest),
filters={'property-owner_id': self.tenant.id,
'status': 'active'}) \
.AndReturn([[], False])
cinder.volume_create(IsA(http.HttpRequest),
formData['size'],
formData['name'],
formData['description'],
formData['type'],
metadata={},
snapshot_id=None,
image_id=None).AndReturn(volume)
self.mox.ReplayAll()
url = reverse('horizon:project:volumes:create')
res = self.client.post(url, formData)
redirect_url = reverse('horizon:project:volumes:index')
self.assertRedirectsNoFollow(res, redirect_url)
@test.create_stubs({cinder: ('volume_create',
'volume_snapshot_list',
'volume_type_list',),
api.glance: ('image_list_detailed',),
quotas: ('tenant_limit_usages',)})
def test_create_volume_dropdown(self):
volume = self.volumes.first()
usage_limit = {'maxTotalVolumeGigabytes': 250,
'gigabytesUsed': 20,
'volumesUsed': len(self.volumes.list()),
'maxTotalVolumes': 6}
formData = {'name': u'A Volume I Am Making',
'description': u'This is a volume I am making for a test.',
'method': u'CreateForm',
'size': 50,
'type': '',
'volume_source_type': 'no_source_type',
'snapshot_source': self.volume_snapshots.first().id,
'image_source': self.images.first().id}
cinder.volume_type_list(IsA(http.HttpRequest)).\
AndReturn(self.volume_types.list())
cinder.volume_snapshot_list(IsA(http.HttpRequest)).\
AndReturn(self.volume_snapshots.list())
api.glance.image_list_detailed(IsA(http.HttpRequest),
filters={'is_public': True,
'status': 'active'}) \
.AndReturn([self.images.list(), False])
api.glance.image_list_detailed(IsA(http.HttpRequest),
filters={'property-owner_id': self.tenant.id,
'status': 'active'}) \
.AndReturn([[], False])
quotas.tenant_limit_usages(IsA(http.HttpRequest)).\
AndReturn(usage_limit)
cinder.volume_create(IsA(http.HttpRequest),
formData['size'],
formData['name'],
formData['description'],
'',
metadata={},
snapshot_id=None,
image_id=None).\
AndReturn(volume)
self.mox.ReplayAll()
url = reverse('horizon:project:volumes:create')
res = self.client.post(url, formData)
redirect_url = reverse('horizon:project:volumes:index')
self.assertRedirectsNoFollow(res, redirect_url)
@test.create_stubs({cinder: ('volume_create',
'volume_snapshot_get',
'volume_get',
'volume_type_list',),
quotas: ('tenant_limit_usages',)})
def test_create_volume_from_snapshot(self):
volume = self.volumes.first()
usage_limit = {'maxTotalVolumeGigabytes': 250,
'gigabytesUsed': 20,
'volumesUsed': len(self.volumes.list()),
'maxTotalVolumes': 6}
snapshot = self.volume_snapshots.first()
formData = {'name': u'A Volume I Am Making',
'description': u'This is a volume I am making for a test.',
'method': u'CreateForm',
'size': 50,
'type': '',
'snapshot_source': snapshot.id}
cinder.volume_type_list(IsA(http.HttpRequest)).\
AndReturn(self.volume_types.list())
quotas.tenant_limit_usages(IsA(http.HttpRequest)).\
AndReturn(usage_limit)
cinder.volume_snapshot_get(IsA(http.HttpRequest),
str(snapshot.id)).AndReturn(snapshot)
cinder.volume_get(IsA(http.HttpRequest), snapshot.volume_id).\
AndReturn(self.volumes.first())
cinder.volume_create(IsA(http.HttpRequest),
formData['size'],
formData['name'],
formData['description'],
'',
metadata={},
snapshot_id=snapshot.id,
image_id=None).\
AndReturn(volume)
self.mox.ReplayAll()
# get snapshot from url
url = reverse('horizon:project:volumes:create')
res = self.client.post("?".join([url,
"snapshot_id=" + str(snapshot.id)]),
formData)
redirect_url = reverse('horizon:project:volumes:index')
self.assertRedirectsNoFollow(res, redirect_url)
@test.create_stubs({cinder: ('volume_create',
'volume_snapshot_list',
'volume_snapshot_get',
'volume_get',
'volume_type_list',),
api.glance: ('image_list_detailed',),
quotas: ('tenant_limit_usages',)})
def test_create_volume_from_snapshot_dropdown(self):
volume = self.volumes.first()
usage_limit = {'maxTotalVolumeGigabytes': 250,
'gigabytesUsed': 20,
'volumesUsed': len(self.volumes.list()),
'maxTotalVolumes': 6}
snapshot = self.volume_snapshots.first()
formData = {'name': u'A Volume I Am Making',
'description': u'This is a volume I am making for a test.',
'method': u'CreateForm',
'size': 50,
'type': '',
'volume_source_type': 'snapshot_source',
'snapshot_source': snapshot.id}
cinder.volume_type_list(IsA(http.HttpRequest)).\
AndReturn(self.volume_types.list())
cinder.volume_snapshot_list(IsA(http.HttpRequest)).\
AndReturn(self.volume_snapshots.list())
api.glance.image_list_detailed(IsA(http.HttpRequest),
filters={'is_public': True,
'status': 'active'}) \
.AndReturn([self.images.list(), False])
api.glance.image_list_detailed(IsA(http.HttpRequest),
filters={'property-owner_id': self.tenant.id,
'status': 'active'}) \
.AndReturn([[], False])
quotas.tenant_limit_usages(IsA(http.HttpRequest)).\
AndReturn(usage_limit)
cinder.volume_snapshot_get(IsA(http.HttpRequest),
str(snapshot.id)).AndReturn(snapshot)
cinder.volume_create(IsA(http.HttpRequest),
formData['size'],
formData['name'],
formData['description'],
'',
metadata={},
snapshot_id=snapshot.id,
image_id=None).\
AndReturn(volume)
self.mox.ReplayAll()
# get snapshot from dropdown list
url = reverse('horizon:project:volumes:create')
res = self.client.post(url, formData)
redirect_url = reverse('horizon:project:volumes:index')
self.assertRedirectsNoFollow(res, redirect_url)
@test.create_stubs({cinder: ('volume_snapshot_get',
'volume_type_list',
'volume_get',),
api.glance: ('image_list_detailed',),
quotas: ('tenant_limit_usages',)})
def test_create_volume_from_snapshot_invalid_size(self):
usage_limit = {'maxTotalVolumeGigabytes': 100,
'gigabytesUsed': 20,
'volumesUsed': len(self.volumes.list()),
'maxTotalVolumes': 6}
snapshot = self.volume_snapshots.first()
formData = {'name': u'A Volume I Am Making',
'description': u'This is a volume I am making for a test.',
'method': u'CreateForm',
'size': 20, 'snapshot_source': snapshot.id}
cinder.volume_type_list(IsA(http.HttpRequest)).\
AndReturn(self.volume_types.list())
quotas.tenant_limit_usages(IsA(http.HttpRequest)).\
AndReturn(usage_limit)
cinder.volume_snapshot_get(IsA(http.HttpRequest),
str(snapshot.id)).AndReturn(snapshot)
cinder.volume_get(IsA(http.HttpRequest), snapshot.volume_id).\
AndReturn(self.volumes.first())
quotas.tenant_limit_usages(IsA(http.HttpRequest)).\
AndReturn(usage_limit)
self.mox.ReplayAll()
url = reverse('horizon:project:volumes:create')
res = self.client.post("?".join([url,
"snapshot_id=" + str(snapshot.id)]),
formData, follow=True)
self.assertEqual(res.redirect_chain, [])
self.assertFormError(res, 'form', None,
"The volume size cannot be less than the "
"snapshot size (40GB)")
@test.create_stubs({cinder: ('volume_create',
'volume_type_list',),
api.glance: ('image_get',),
quotas: ('tenant_limit_usages',)})
def test_create_volume_from_image(self):
volume = self.volumes.first()
usage_limit = {'maxTotalVolumeGigabytes': 200,
'gigabytesUsed': 20,
'volumesUsed': len(self.volumes.list()),
'maxTotalVolumes': 6}
image = self.images.first()
formData = {'name': u'A Volume I Am Making',
'description': u'This is a volume I am making for a test.',
'method': u'CreateForm',
'size': 40,
'type': '',
'image_source': image.id}
cinder.volume_type_list(IsA(http.HttpRequest)).\
AndReturn(self.volume_types.list())
quotas.tenant_limit_usages(IsA(http.HttpRequest)).\
AndReturn(usage_limit)
api.glance.image_get(IsA(http.HttpRequest),
str(image.id)).AndReturn(image)
cinder.volume_create(IsA(http.HttpRequest),
formData['size'],
formData['name'],
formData['description'],
'',
metadata={},
snapshot_id=None,
image_id=image.id).\
AndReturn(volume)
self.mox.ReplayAll()
# get image from url
url = reverse('horizon:project:volumes:create')
res = self.client.post("?".join([url,
"image_id=" + str(image.id)]),
formData)
redirect_url = reverse('horizon:project:volumes:index')
self.assertRedirectsNoFollow(res, redirect_url)
@test.create_stubs({cinder: ('volume_create',
'volume_type_list',
'volume_snapshot_list',),
api.glance: ('image_get',
'image_list_detailed'),
quotas: ('tenant_limit_usages',)})
def test_create_volume_from_image_dropdown(self):
volume = self.volumes.first()
usage_limit = {'maxTotalVolumeGigabytes': 200,
'gigabytesUsed': 20,
'volumesUsed': len(self.volumes.list()),
'maxTotalVolumes': 6}
image = self.images.first()
formData = {'name': u'A Volume I Am Making',
'description': u'This is a volume I am making for a test.',
'method': u'CreateForm',
'size': 30,
'type': '',
'volume_source_type': 'image_source',
'snapshot_source': self.volume_snapshots.first().id,
'image_source': image.id}
cinder.volume_type_list(IsA(http.HttpRequest)).\
AndReturn(self.volume_types.list())
cinder.volume_snapshot_list(IsA(http.HttpRequest)).\
AndReturn(self.volume_snapshots.list())
api.glance.image_list_detailed(IsA(http.HttpRequest),
filters={'is_public': True,
'status': 'active'}) \
.AndReturn([self.images.list(), False])
api.glance.image_list_detailed(IsA(http.HttpRequest),
filters={'property-owner_id': self.tenant.id,
'status': 'active'}) \
.AndReturn([[], False])
quotas.tenant_limit_usages(IsA(http.HttpRequest)) \
.AndReturn(usage_limit)
api.glance.image_get(IsA(http.HttpRequest),
str(image.id)).AndReturn(image)
cinder.volume_create(IsA(http.HttpRequest),
formData['size'],
formData['name'],
formData['description'],
'',
metadata={},
snapshot_id=None,
image_id=image.id).\
AndReturn(volume)
self.mox.ReplayAll()
# get image from dropdown list
url = reverse('horizon:project:volumes:create')
res = self.client.post(url, formData)
redirect_url = reverse('horizon:project:volumes:index')
self.assertRedirectsNoFollow(res, redirect_url)
@test.create_stubs({cinder: ('volume_type_list',),
api.glance: ('image_get',
'image_list_detailed'),
quotas: ('tenant_limit_usages',)})
def test_create_volume_from_image_invalid_size(self):
usage_limit = {'maxTotalVolumeGigabytes': 100,
'gigabytesUsed': 20,
'volumesUsed': len(self.volumes.list()),
'maxTotalVolumes': 6}
image = self.images.first()
formData = {'name': u'A Volume I Am Making',
'description': u'This is a volume I am making for a test.',
'method': u'CreateForm',
'size': 1, 'image_source': image.id}
cinder.volume_type_list(IsA(http.HttpRequest)).\
AndReturn(self.volume_types.list())
quotas.tenant_limit_usages(IsA(http.HttpRequest)).\
AndReturn(usage_limit)
api.glance.image_get(IsA(http.HttpRequest),
str(image.id)).AndReturn(image)
quotas.tenant_limit_usages(IsA(http.HttpRequest)).\
AndReturn(usage_limit)
self.mox.ReplayAll()
url = reverse('horizon:project:volumes:create')
res = self.client.post("?".join([url,
"image_id=" + str(image.id)]),
formData, follow=True)
self.assertEqual(res.redirect_chain, [])
self.assertFormError(res, 'form', None,
"The volume size cannot be less than the "
"image size (20.0 GB)")
@test.create_stubs({cinder: ('volume_snapshot_list', 'volume_type_list',),
api.glance: ('image_list_detailed',),
quotas: ('tenant_limit_usages',)})
def test_create_volume_gb_used_over_alloted_quota(self):
usage_limit = {'maxTotalVolumeGigabytes': 100,
'gigabytesUsed': 80,
'volumesUsed': len(self.volumes.list()),
'maxTotalVolumes': 6}
formData = {'name': u'This Volume Is Huge!',
'description': u'This is a volume that is just too big!',
'method': u'CreateForm',
'size': 5000}
cinder.volume_type_list(IsA(http.HttpRequest)).\
AndReturn(self.volume_types.list())
quotas.tenant_limit_usages(IsA(http.HttpRequest)).\
AndReturn(usage_limit)
cinder.volume_snapshot_list(IsA(http.HttpRequest)).\
AndReturn(self.volume_snapshots.list())
api.glance.image_list_detailed(IsA(http.HttpRequest),
filters={'is_public': True,
'status': 'active'}) \
.AndReturn([self.images.list(), False])
api.glance.image_list_detailed(IsA(http.HttpRequest),
filters={'property-owner_id': self.tenant.id,
'status': 'active'}) \
.AndReturn([[], False])
quotas.tenant_limit_usages(IsA(http.HttpRequest)).\
AndReturn(usage_limit)
self.mox.ReplayAll()
url = reverse('horizon:project:volumes:create')
res = self.client.post(url, formData)
expected_error = [u'A volume of 5000GB cannot be created as you only'
' have 20GB of your quota available.']
self.assertEqual(res.context['form'].errors['__all__'], expected_error)
@test.create_stubs({cinder: ('volume_snapshot_list', 'volume_type_list',),
api.glance: ('image_list_detailed',),
quotas: ('tenant_limit_usages',)})
def test_create_volume_number_over_alloted_quota(self):
usage_limit = {'maxTotalVolumeGigabytes': 100,
'gigabytesUsed': 20,
'volumesUsed': len(self.volumes.list()),
'maxTotalVolumes': len(self.volumes.list())}
formData = {'name': u'Too Many...',
'description': u'We have no volumes left!',
'method': u'CreateForm',
'size': 10}
cinder.volume_type_list(IsA(http.HttpRequest)).\
AndReturn(self.volume_types.list())
quotas.tenant_limit_usages(IsA(http.HttpRequest)).\
AndReturn(usage_limit)
cinder.volume_snapshot_list(IsA(http.HttpRequest)).\
AndReturn(self.volume_snapshots.list())
api.glance.image_list_detailed(IsA(http.HttpRequest),
filters={'is_public': True,
'status': 'active'}) \
.AndReturn([self.images.list(), False])
api.glance.image_list_detailed(IsA(http.HttpRequest),
filters={'property-owner_id': self.tenant.id,
'status': 'active'}) \
.AndReturn([[], False])
quotas.tenant_limit_usages(IsA(http.HttpRequest)).\
AndReturn(usage_limit)
self.mox.ReplayAll()
url = reverse('horizon:project:volumes:create')
res = self.client.post(url, formData)
expected_error = [u'You are already using all of your available'
' volumes.']
self.assertEqual(res.context['form'].errors['__all__'], expected_error)
@test.create_stubs({cinder: ('volume_list',
'volume_delete',),
api.nova: ('server_list',),
quotas: ('tenant_quota_usages',)})
def test_delete_volume(self):
volume = self.volumes.first()
formData = {'action':
'volumes__delete__%s' % volume.id}
cinder.volume_list(IsA(http.HttpRequest), search_opts=None).\
AndReturn(self.volumes.list())
cinder.volume_delete(IsA(http.HttpRequest), volume.id)
api.nova.server_list(IsA(http.HttpRequest), search_opts=None).\
AndReturn([self.servers.list(), False])
cinder.volume_list(IsA(http.HttpRequest), search_opts=None).\
AndReturn(self.volumes.list())
api.nova.server_list(IsA(http.HttpRequest), search_opts=None).\
AndReturn([self.servers.list(), False])
quotas.tenant_quota_usages(IsA(http.HttpRequest)).MultipleTimes().\
AndReturn(self.quota_usages.first())
self.mox.ReplayAll()
url = reverse('horizon:project:volumes:index')
res = self.client.post(url, formData, follow=True)
self.assertIn("Scheduled deletion of Volume: Volume name",
[m.message for m in res.context['messages']])
@test.create_stubs({cinder: ('volume_list',
'volume_delete',),
api.nova: ('server_list',),
quotas: ('tenant_quota_usages',)})
def test_delete_volume_error_existing_snapshot(self):
volume = self.volumes.first()
formData = {'action':
'volumes__delete__%s' % volume.id}
exc = self.exceptions.cinder.__class__(400,
"error: dependent snapshots")
cinder.volume_list(IsA(http.HttpRequest), search_opts=None).\
AndReturn(self.volumes.list())
cinder.volume_delete(IsA(http.HttpRequest), volume.id).\
AndRaise(exc)
api.nova.server_list(IsA(http.HttpRequest), search_opts=None).\
AndReturn([self.servers.list(), False])
cinder.volume_list(IsA(http.HttpRequest), search_opts=None).\
AndReturn(self.volumes.list())
api.nova.server_list(IsA(http.HttpRequest), search_opts=None).\
AndReturn([self.servers.list(), False])
quotas.tenant_quota_usages(IsA(http.HttpRequest)).MultipleTimes().\
AndReturn(self.quota_usages.first())
self.mox.ReplayAll()
url = reverse('horizon:project:volumes:index')
res = self.client.post(url, formData, follow=True)
self.assertMessageCount(res, error=1)
self.assertEqual(list(res.context['messages'])[0].message,
u'Unable to delete volume "%s". '
u'One or more snapshots depend on it.' %
volume.display_name)
@test.create_stubs({cinder: ('volume_get',), api.nova: ('server_list',)})
def test_edit_attachments(self):
volume = self.volumes.first()
servers = [s for s in self.servers.list()
if s.tenant_id == self.request.user.tenant_id]
cinder.volume_get(IsA(http.HttpRequest), volume.id).AndReturn(volume)
api.nova.server_list(IsA(http.HttpRequest)).AndReturn([servers, False])
self.mox.ReplayAll()
url = reverse('horizon:project:volumes:attach', args=[volume.id])
res = self.client.get(url)
# Asserting length of 2 accounts for the one instance option,
# and the one 'Choose Instance' option.
form = res.context['form']
self.assertEqual(len(form.fields['instance']._choices),
2)
self.assertEqual(res.status_code, 200)
self.assertTrue(isinstance(form.fields['device'].widget,
widgets.TextInput))
@test.create_stubs({cinder: ('volume_get',), api.nova: ('server_list',)})
def test_edit_attachments_cannot_set_mount_point(self):
PREV = settings.OPENSTACK_HYPERVISOR_FEATURES['can_set_mount_point']
settings.OPENSTACK_HYPERVISOR_FEATURES['can_set_mount_point'] = False
volume = self.volumes.first()
servers = [s for s in self.servers.list()
if s.tenant_id == self.request.user.tenant_id]
cinder.volume_get(IsA(http.HttpRequest), volume.id).AndReturn(volume)
api.nova.server_list(IsA(http.HttpRequest)).AndReturn([servers, False])
self.mox.ReplayAll()
url = reverse('horizon:project:volumes:attach', args=[volume.id])
res = self.client.get(url)
# Assert the device field is hidden.
form = res.context['form']
self.assertTrue(isinstance(form.fields['device'].widget,
widgets.HiddenInput))
settings.OPENSTACK_HYPERVISOR_FEATURES['can_set_mount_point'] = PREV
@test.create_stubs({cinder: ('volume_get',),
api.nova: ('server_get', 'server_list',),
quotas: ('tenant_quota_usages',)})
def test_edit_attachments_attached_volume(self):
servers = [s for s in self.servers.list()
if s.tenant_id == self.request.user.tenant_id]
server = servers[0]
volume = self.volumes.list()[0]
cinder.volume_get(IsA(http.HttpRequest), volume.id) \
.AndReturn(volume)
api.nova.server_list(IsA(http.HttpRequest)) \
.AndReturn([servers, False])
self.mox.ReplayAll()
url = reverse('horizon:project:volumes:attach',
args=[volume.id])
res = self.client.get(url)
self.assertEqual(res.context['form'].fields['instance']._choices[0][1],
"Select an instance")
self.assertEqual(len(res.context['form'].fields['instance'].choices),
2)
self.assertEqual(res.context['form'].fields['instance']._choices[1][0],
server.id)
self.assertEqual(res.status_code, 200)
@test.create_stubs({cinder: ('volume_list',),
api.nova: ('server_list',),
quotas: ('tenant_quota_usages',)})
def test_create_button_disabled_when_quota_exceeded(self):
quota_usages = self.quota_usages.first()
quota_usages['volumes']['available'] = 0
cinder.volume_list(IsA(http.HttpRequest), search_opts=None)\
.AndReturn(self.volumes.list())
api.nova.server_list(IsA(http.HttpRequest), search_opts=None)\
.AndReturn([self.servers.list(), False])
quotas.tenant_quota_usages(IsA(http.HttpRequest))\
.MultipleTimes().AndReturn(quota_usages)
self.mox.ReplayAll()
res = self.client.get(reverse('horizon:project:volumes:index'))
self.assertTemplateUsed(res, 'project/volumes/index.html')
volumes = res.context['volumes_table'].data
self.assertItemsEqual(volumes, self.volumes.list())
create_link = tables.CreateVolume()
url = create_link.get_link_url()
classes = list(create_link.get_default_classes())\
+ list(create_link.classes)
link_name = "%s (%s)" % (unicode(create_link.verbose_name),
"Quota exceeded")
expected_string = "<a href='%s' title='%s' class='%s disabled' "\
"id='volumes__action_create'>%s</a>" \
% (url, link_name, " ".join(classes), link_name)
self.assertContains(res, expected_string, html=True,
msg_prefix="The create button is not disabled")
@test.create_stubs({cinder: ('volume_get',),
api.nova: ('server_get',)})
def test_detail_view(self):
volume = self.volumes.first()
server = self.servers.first()
volume.attachments = [{"server_id": server.id}]
cinder.volume_get(IsA(http.HttpRequest), volume.id).AndReturn(volume)
api.nova.server_get(IsA(http.HttpRequest), server.id).AndReturn(server)
self.mox.ReplayAll()
url = reverse('horizon:project:volumes:detail',
args=[volume.id])
res = self.client.get(url)
self.assertContains(res, "<dd>Volume name</dd>", 1, 200)
self.assertContains(res,
"<dd>41023e92-8008-4c8b-8059-7f2293ff3775</dd>",
1,
200)
self.assertContains(res, "<dd>Available</dd>", 1, 200)
self.assertContains(res, "<dd>40 GB</dd>", 1, 200)
self.assertContains(res,
("<a href=\"/project/instances/1/\">%s</a>"
% server.name),
1,
200)
self.assertNoMessages()
@test.create_stubs({cinder: ('volume_get',)})
def test_get_data(self):
volume = self.volumes.first()
volume.display_name = ''
cinder.volume_get(IsA(http.HttpRequest), volume.id).AndReturn(volume)
self.mox.ReplayAll()
url = reverse('horizon:project:volumes:index') + \
"?action=row_update&table=volumes&obj_id=" + volume.id
res = self.client.get(url, {},
HTTP_X_REQUESTED_WITH='XMLHttpRequest')
self.assertEqual(res.status_code, 200)
self.assertEqual(volume.display_name, volume.id)
|
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright 2010 British Broadcasting Corporation and Kamaelia Contributors(1)
#
# (1) Kamaelia Contributors are listed in the AUTHORS file and at
# http://www.kamaelia.org/AUTHORS - please extend this file,
# not this notice.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# -------------------------------------------------------------------------
"""\
====================================
Detecting cuts/shot changes in video
====================================
DetectShotChanges takes in (framenumber, videoframe) tuples on its "inbox" inbox and
attempts to detect where shot changes have probably occurred in the sequence.
When it thinks one has ocurred, a (framenumber, confidencevalue) tuple is sent
out of the "outbox" outbox.
Example Usage
-------------
Reading in a video in uncompressed YUV4MPEG file format, and outputting the
frame numbers (and confidence values) where cuts probably occur::
Pipeline( RateControlledFileReader(..)
YUV4MPEGToFrame(),
TagWithSequenceNumber(), # pair up frames with a frame number
DetectShotChanges(threshold=0.85),
ConsoleEchoer(),
).run()
Expect output like this::
(17, 0.885)(18, 0.912)(56, 0.91922)(212, 0.818)(213, 0.825)(214, 0.904) ...
More detail
-----------
Send (frame-number, video-frame) tuples to this component's "inbox" inbox and
(frame-number, confidence-value) tuples will be sent out of the "outbox" outbox
whenever it thinks a cut has occurred.
Frames must be in a YUV format. See below for details. Frame numbers need not
necessarily be sequential; but they must be unique! If they are not, then it is
your own fault when you can't match up detected shot changes to actual video
frames!
Internally, the cut detector calculates a 'confidence' value representing how
likely that a shot change has occurred. At initialisation you set a threshold
value - if the confidence value reaches or exceeds this threshold, then a cut
is deemed to have taken place, and output will be generated.
How do you choose a threshold? It is a rather inexact science (as is the
subjective decision of whether something consitutes a shot change!) - you really
need to get a feel for it experimentally. As a rough guide, values between 0.8
and 0.9 are usually reasonable, depending on the type of video material.
Because of the necessary signal processing, this component has a delay of
several frames of data through it before you will get output. It therefore will
not necessarily detect cuts in the first 15 frames or so of a sequence sent to
it. Neither will it generate any output for the last 15 frames or so - they
will never make it through the internal signal processing.
Send a producerFinished() or shutdownMicroprocess() message to this component's
"control" inbox and it will immediately terminate. It will also forward on the
message out of its "signal" outbox.
Implementation details
----------------------
The algorithm used is based on a simple "mean absolute difference" between pixels
of one frame and the next; with some signal processing on the resulting stream
of frame-to-frame difference values, to detect a spike possibly indicating a
shot change.
The algorithm is courtesy of Jim Easterbrook of BBC Research. It is also
available in its own right as an independent open source library
`here. <http://sourceforge.net/projects/shot-change>`_
As signal processing is done on the confidence values internally to emphasise
spikes - which are likely to indicate a sudden increase in the level of change
from one frame to the next - a conseuqence is that this component
internally buffers inter-frame difference values for several frames, resulting
in a delay of about 15 frames through this component. This is the reason why
it is necessary to pair up video frames with a frame number, otherwise you
cannot guarantee being able to match up the resulting detected cuts with the
actual frame where they took place!
The change detection algorithm only looks at the Y (luminance) data in the video
frame.
=========================
UNCOMPRESSED FRAME FORMAT
=========================
A frame is a dictionary data structure. It must, for this component, at minimum
contain a key "yuv" that returns a tuple containing (y_data, u_data, v_data).
Any other entries are ignored.
"""
from Kamaelia.Support.Optimised.Video.ComputeMeanAbsDiff import ComputeMeanAbsDiff
from Axon.Component import component
from Axon.Ipc import producerFinished, shutdownMicroprocess
import math
class DetectShotChanges(component):
"""\
DetectShotChanges([threshold]) -> new DetectShotChanges component.
Send (framenumber, videoframe) tuples to the "inbox" inbox. Sends out
(framenumber, confidence) to its "outbox" outbox when a cut has probably
occurred in the video sequence.
Keyword arguments:
- threshold -- threshold for the confidence value, above which a cut is detected (default=0.9)
"""
def __init__(self, threshold=0.9):
super(DetectShotChanges,self).__init__()
self.C0 = [0.0] * 2 # 'cut' signal
self.C1 = [0.0] * 2 # 'standard converted cut' signal
self.MAD = [0.0] * 10 # mean absolute difference
self.thresh = [0.0] * 11 # threshold based on local activity
self.fnum = [None] * 11 # frame number history
self.ydata = [""] * 2 # frame luminance data
self.validframes = 0 # how many valid frames we've seen
self.threshold = threshold
def main(self):
"""Main loop"""
while 1:
while self.dataReady("inbox"):
(framenum, frame) = self.recv("inbox")
confidence, framenum = self.detectCut(framenum, frame['yuv'][0])
if confidence >= self.threshold:
self.send((framenum,confidence), "outbox")
while self.dataReady("control"):
msg = self.recv("control")
self.send(msg, "signal")
if isinstance(msg, (producerFinished, shutdownMicroprocess)):
return
self.pause()
yield 1
def detectCut(self, framenum, ydata):
# shuffle histories along
self.C0.pop()
self.C0.insert(0,None)
self.C1.pop()
self.C1.insert(0,None)
self.MAD.pop()
self.MAD.insert(0,None)
self.thresh.pop()
self.thresh.insert(0,None)
self.fnum.pop()
self.fnum.insert(0,framenum)
self.ydata.pop()
self.ydata.insert(0,ydata)
self.validframes = min(self.validframes+1, 9999)
# compute mean absolute difference
if self.validframes >= 2:
self.MAD[0] = ComputeMeanAbsDiff(self.ydata[0], self.ydata[1])
# compute variable threshold
self.thresh[0] = 1.3 * max(*self.MAD[0:5])
# compute 'cut' signal
if self.validframes >= 14:
risingEdge = (self.MAD[6] - self.thresh[7]) \
- max(0.0, self.MAD[7] - self.thresh[8])
fallingEdge = (self.MAD[6] - self.thresh[1]) \
- max(0.0, self.MAD[5] - self.thresh[0])
self.C0[0] = (risingEdge-fallingEdge)/2.0
# compute 'standards converted cut' signal
if self.validframes >= 15:
risingEdge = (self.MAD[7] - self.thresh[8]) \
- max(0.0, self.MAD[8] - self.thresh[9]) \
- max(0.0, self.MAD[7] - self.thresh[2])
fallingEdge = (self.MAD[6] - self.thresh[1]) \
- max(0.0, self.MAD[5] - self.thresh[0]) \
- max(0.0, self.MAD[6] - self.thresh[7])
self.C1[0] = (risingEdge-fallingEdge)/2.0
if self.validframes >= 16:
# mask signals to either a cut or sc cut but not both
if self.C0[1]*5.0 >= max(self.C1[0], self.C1[1]):
C0_Msk = self.C0[1]
else:
C0_Msk = 0.0
if self.C1[0] > max(self.C0[0], self.C0[1]) * 5.0:
C1_Msk = self.C1[0]
else:
C1_Msk = 0.0
if C0_Msk > 0.0:
confidence = (math.log(C0_Msk) + 0.1) / 4.6
framenum = self.fnum[7]
return confidence,framenum
if C1_Msk > 0.0:
confidence = (math.log(C1_Msk) + 0.1) / 4.6
framenum = self.fnum[6]
return confidence,framenum
return -99,None
__kamaelia_components__ = ( DetectShotChanges, )
|
|
import os
import shutil
import tempfile
import unittest
from pyparams import ( _bool_check,
_str_list_check,
_str_dict_check,
_Param,
ParamError,
PARAM_TYPE_BOOL,
PARAM_TYPE_INT,
PARAM_TYPE_STR_LIST,
PARAM_TYPE_STR_DICT,
Conf
)
class LowLevelFunctionTests(unittest.TestCase):
"""
Tests, which directly use lower level functions of the module.
"""
def test_bool_check(self):
"""
Test the function that converts various types and values in their
boolean equivalent.
"""
for v in [ True, "y", "Y", "yes", "yEs", "true", "TRUE", "trUE", "1" ]:
self.assertTrue(_bool_check(v))
for v in [ False, "n", "n", "no", "nO", "false", "fALSe", "0" ]:
self.assertFalse(_bool_check(v))
for v in [ 1, 0, "ja", "nein", "j", "n", "t", "f" ]:
self.assertRaises(ParamError, _bool_check, (v,))
def test_str_list_check(self):
"""
Test the function that converts lists of strings.
"""
self.assertEqual([ 1,2,"3",True ], _str_list_check( [1,2,"3",True] ))
self.assertEqual([ "1","2","3" ], _str_list_check( "1,2,3" ))
self.assertEqual([ "1","2","3" ], _str_list_check( "1 , 2 , 3" ))
self.assertEqual([ "","1","","3","" ], _str_list_check( ",1,,3," ))
self.assertEqual([ "1:3" ], _str_list_check( "1:3" ))
def test_str_dict_check(self):
"""
Test the function that converts lists of strings.
"""
self.assertEqual({ 'foo' : 123 }, _str_dict_check( { 'foo' : 123 }))
self.assertEqual({ 'foo' : '123' }, _str_dict_check( "{ foo : 123 }"))
self.assertEqual({ 'foo' : '123', 'bar' : 'ggg' },
_str_dict_check( "{ foo : 123 ; bar : ggg }"))
self.assertEqual({ 'foo' : [ '123', 'ddd' ], 'bar' : 'ggg' },
_str_dict_check( "{ foo : 123 , ddd ; bar : ggg }"))
def test_param_error_class(self):
"""
Test the message formatting in the ParamError class.
"""
try:
raise ParamError("Foobar", "This is the message.")
except ParamError as e:
self.assertEqual(e.message,
"Parameter 'Foobar': This is the message.")
# Starting the parameter name with a "-" allows us to change the
# formatting behaviour of the class, dropping the 'parameter' prefix.
# This makes this class usable in other contexts as well.
try:
raise ParamError("-Foobar", "This is the message.")
except ParamError as e:
self.assertEqual(e.message,
"Foobar: This is the message.")
class ParamClassTests(unittest.TestCase):
"""
Tests for the _Param class.
"""
def _make_param(self, **kwargs):
"""
Helper function to make it easier to test the Param class __init__
function.
"""
if 'name' not in kwargs:
kwargs['name'] = 'dummy'
return _Param(**kwargs)
def test_param_init_errors(self):
"""
Test the initialization catches various error conditions.
"""
self.assertRaisesRegexp(ParamError,
"Unknown parameter type 'FOO'",
self._make_param,
**{ "param_type" : "FOO" })
self.assertRaisesRegexp(ParamError,
"Allowed values or range not allowed for",
self._make_param,
**{ "param_type" : PARAM_TYPE_BOOL,
"allowed_range" : dict(min=1,max=2) })
self.assertRaisesRegexp(ParamError,
"Allowed values or range not allowed for",
self._make_param,
**{ "param_type" : PARAM_TYPE_BOOL,
"allowed_values" : [ "1", "2" ] })
self.assertRaisesRegexp(ParamError,
"Cannot convert 'foo' to type 'integer'.",
self._make_param,
**{ "param_type" : PARAM_TYPE_INT,
"allowed_values" : [ "1", 2, "foo" ] })
self.assertRaisesRegexp(ParamError,
"Malformed dictionary for 'allowed_range'.",
self._make_param,
**{ "param_type" : PARAM_TYPE_INT,
"allowed_range" : dict(foo=1, max=3) })
self.assertRaisesRegexp(ParamError,
"Cannot convert 'x' to type 'integer'.",
self._make_param,
**{ "param_type" : PARAM_TYPE_INT,
"allowed_range" : dict(min="x", max=3) })
self.assertRaisesRegexp(ParamError,
"'123' is not in the allowed range.",
self._make_param,
**{ "param_type" : PARAM_TYPE_INT,
"allowed_range" : dict(min=1, max=3),
"default" : 123 })
self.assertRaisesRegexp(ParamError,
"Cannot convert 'foo' to type 'integer'.",
self._make_param,
**{ "param_type" : PARAM_TYPE_INT,
"default" : "foo" })
self.assertRaisesRegexp(ParamError,
"Invalid command line option specification.",
self._make_param,
**{ "cmd_line" : "foo" })
self.assertRaisesRegexp(ParamError,
"Invalid command line option specification.",
self._make_param,
**{ "cmd_line" : ("1","2","3") })
self.assertRaisesRegexp(ParamError,
"Invalid command line option specification.",
self._make_param,
**{ "cmd_line" : ("ab","foo") })
self.assertRaisesRegexp(ParamError,
"'baz' is not one of the allowed values.",
self._make_param,
**{ "param_type" : PARAM_TYPE_STR_LIST,
"default" : "foo,bar,baz",
"allowed_values" : [ "foo", "bar" ]})
self.assertRaisesRegexp(ParamError,
"'zzz' is not in the allowed range.",
self._make_param,
**{ "param_type" : PARAM_TYPE_STR_LIST,
"default" : "foo,bar,baz,zzz",
"allowed_range" : dict(min="a", max="x")})
def test_param_validate_error(self):
"""
Testing validation of various parameter values.
"""
# Checking for allowed-range
p = _Param(name='foo', param_type=PARAM_TYPE_INT,
allowed_range=dict(min=1, max=5))
p.validate(1)
p.validate("1")
self.assertRaisesRegexp(ParamError,
"Cannot convert 'foo' to type 'integer'.",
p.validate, "foo")
self.assertRaisesRegexp(ParamError,
"'6' is not in the allowed range.",
p.validate, 6)
self.assertRaisesRegexp(ParamError,
"'0' is not in the allowed range.",
p.validate, 0)
# Checking for allowed-values
p = _Param(name='foo', param_type=PARAM_TYPE_INT,
allowed_values=[ 1, 3, 5 ])
p.validate(1)
p.validate("3")
self.assertRaisesRegexp(ParamError,
"'0' is not one of the allowed values.",
p.validate, 0)
# Checking for list validation with allowed values
p = _Param(name='foo', param_type=PARAM_TYPE_STR_LIST,
allowed_values=[ "1", "2", "3" ])
p.validate([ "1", "2" ])
p.validate("1,2,3,1,2")
self.assertRaisesRegexp(ParamError,
"'0' is not one of the allowed values.",
p.validate, "0,1")
# Checking for list validation with allowed ranges
p = _Param(name='foo', param_type=PARAM_TYPE_STR_LIST,
allowed_range=dict(min="a", max="f"))
p.validate([ "a", "aa", "bb" ])
p.validate("a,aa,bb,eeee,f")
self.assertRaisesRegexp(ParamError,
"'A' is not in the allowed range.",
p.validate, "a,f,A")
def test_param_getopt_str_output(self):
"""
Testing that we create correct specs for getopt.
"""
p = _Param(name='foo', param_type=PARAM_TYPE_INT,
allowed_range=dict(min=1, max=5))
self.assertEqual(p.make_getopts_str(), ( None, None ))
p = _Param(name='foo', param_type=PARAM_TYPE_INT,
allowed_range=dict(min=1, max=5),
cmd_line=(None, "foo"))
self.assertEqual(p.make_getopts_str(), ( None, "foo=" ))
p = _Param(name='foo', param_type=PARAM_TYPE_INT,
allowed_range=dict(min=1, max=5),
cmd_line=("f", None))
self.assertEqual(p.make_getopts_str(), ( "f:", None ))
p = _Param(name='foo', param_type=PARAM_TYPE_BOOL,
cmd_line=("f", "foo"))
self.assertEqual(p.make_getopts_str(), ( "f", "foo" ))
def test_param_doc_output(self):
"""
Testing that we create correct doc output.
"""
p = _Param(name='foo', param_type=PARAM_TYPE_INT)
self.assertEqual(p.doc(), ( None, None ))
p = _Param(name='foo', param_type=PARAM_TYPE_INT,
cmd_line=("f", "foo"), # no doc if no cmd_line
doc_spec=dict())
self.assertEqual(p.doc(), ( None, "-f <val>, --foo=<val>\n" ))
p = _Param(name='foo', param_type=PARAM_TYPE_INT,
cmd_line=("f", "foo"),
doc_spec=dict(text="Some text"))
self.assertEqual(p.doc(), ( None,
"-f <val>, --foo=<val>\n"
" Some text\n"))
p = _Param(name='foo', param_type=PARAM_TYPE_INT,
cmd_line=("f", "foo"),
doc_spec=dict(text="Some text", argname="arg"))
self.assertEqual(p.doc(), ( None,
"-f <arg>, --foo=<arg>\n"
" Some text\n"))
p = _Param(name='foo', param_type=PARAM_TYPE_INT,
cmd_line=("f", "foo"),
doc_spec=dict(text="Some text", argname="arg"))
self.assertEqual(p.doc(), ( None,
"-f <arg>, --foo=<arg>\n"
" Some text\n"))
p = _Param(name='foo', param_type=PARAM_TYPE_INT,
default=123,
cmd_line=("f", "foo"),
doc_spec=dict(text="Some text", argname="arg"))
self.assertEqual(p.doc(), ( None,
"-f <arg>, --foo=<arg>\n"
" Some text\n"
" Default value: 123\n"))
p = _Param(name='foo', param_type=PARAM_TYPE_INT,
default=123,
conffile="FOOBAR",
cmd_line=("f", "foo"),
doc_spec=dict(text="Some text", argname="arg"))
self.assertEqual(p.doc(), ( None,
"-f <arg>, --foo=<arg>\n"
" Some text\n"
" Default value: 123\n"
" Conf file equivalent: FOOBAR\n"))
p = _Param(name='foo', param_type=PARAM_TYPE_INT,
default=123,
conffile="FOOBAR",
cmd_line=("f", "foo"),
doc_spec=dict(text="Text\n* Foo\n* Bar", argname="arg"))
self.assertEqual(p.doc(), ( None,
"-f <arg>, --foo=<arg>\n"
" Text\n"
" * Foo\n"
" * Bar\n"
" Default value: 123\n"
" Conf file equivalent: FOOBAR\n"))
class ConfigClassTests(unittest.TestCase):
"""
Tests for the Config class.
"""
@classmethod
def setUpClass(cls):
"""
Create a full config with a number of parameters.
Also create temporary directories to store config files in.
"""
# Create a small temporary directory hierarchy
cls.dir_one_name = tempfile.mkdtemp()
cls.dir_two_name = tempfile.mkdtemp(dir=cls.dir_one_name)
cls.sample_param_dict = {
"configfile" : {
"default" : "t1.conf",
"cmd_line" : ( None, 'configfile' ),
"conffile" : None,
"doc_spec" : { 'text' : "Config file name",
'section' : "General",
'argname' : "config file name" }
},
"foo" : {
"default" : "some-value",
"allowed_values" : [ 'some-value', 'something-else', 'foobar',
'xyz baz' ],
"conffile" : "MY_PARAM",
"cmd_line" : ('f', 'some-param'),
"doc_spec" : { 'text' : "The description string here is "
"long and will automatically be "
"wrapped across multiple lines.",
'section' : "General",
'argname' : "the foo value" }
},
"ddd" : {
"default" : { 'baz' : 123 },
"conffile" : "MY_DICT",
"param_type" : PARAM_TYPE_STR_DICT,
"allowed_keys" : [ "baz", "a", "foo", "bar" ],
"mandatory_keys" : [ "baz", ],
"cmd_line" : ( 'Q', None ),
"doc_spec" : { 'text' : "A dict value.",
'section' : "General",
'argname' : "the ddd value" }
},
"baz" : {
"default" : 123,
"allowed_range" : dict(min=1, max=200),
"param_type" : PARAM_TYPE_INT,
"doc_spec" : { 'text' : "Amount of baz gizmos to add.",
'section' : "Specific parameters",
'argname' : "num" }
},
"ggg" : {
"default" : None,
"param_type" : PARAM_TYPE_BOOL,
"cmd_line" : ('g', None),
"doc_spec" : { 'text' : "Flag control run of foobar.",
'section' : "General" }
},
}
@classmethod
def tearDownClass(cls):
"""
Removing the temporary directories.
"""
shutil.rmtree(cls.dir_one_name)
def _make_conf(self, *args, **kwargs):
return Conf(*args, **kwargs)
def test_config_init_errors(self):
"""
Test correct error handling during configuration creation.
"""
# Error if unknown keyword pass in via param spec
self.assertRaisesRegexp(
ParamError,
"Parameter 'FOO': Invalid parameter config attribute.",
self._make_conf,
**{ "param_dict" : {
"foo" : {
"default" : "some-value",
"FOO" : 123,
"doc_spec" : {
'text' : "Some desc",
'section' : "General",
'argname' : "the foo value" }}}})
# Error if unknown param type presented
self.assertRaisesRegexp(
ParamError,
"Parameter 'foo': Unknown parameter type 'FOO'.",
self._make_conf,
**{ "param_dict" : {
"foo" : {
"default" : "some-value",
"param_type" : 'FOO',
"doc_spec" : {
'text' : "Some desc",
'section' : "General",
'argname' : "the foo value" }}}})
def test_conf_access_functions(self):
"""
Testing of a few smaller access functions for the Conf object.
"""
conf = Conf(self.sample_param_dict)
# Able to get a parameter
self.assertEqual(conf.get('foo'), 'some-value')
# Get proper exception when asking for undefine parameter
self.assertRaisesRegexp(ParamError,
"Parameter 'bar': Unknown parameter.",
conf.get, 'bar')
# Get all the parameter names
k = list(conf.keys())
k.sort()
self.assertEqual([ 'baz', 'configfile', 'ddd', 'foo', 'ggg'], k)
# Get all the items (name and values)
items = conf.items()
self.assertTrue(len(items), 3)
should = {'ggg': None, 'foo': 'some-value', 'baz': 123}
for k,v in should.items():
self.assertTrue(k in items)
self.assertEqual(items[k], v)
# Getting by conffile name
self.assertEqual(conf.get_by_conffile_name('GGG'), None)
self.assertEqual(conf.get_by_conffile_name('MY_PARAM'), 'some-value')
self.assertEqual(conf.get_by_conffile_name('BAZ'), 123)
# Setting invalid values should cause exception
self.assertRaisesRegexp(ParamError,
"Parameter 'baz': "
"Cannot convert 'foo' to type 'integer'.",
conf.set,
'baz', "foo")
self.assertRaisesRegexp(ParamError,
"Parameter 'baz': "
"'444' is not in the allowed range.",
conf.set,
'baz', 444)
# Setting valid value should be allowed
conf.set('baz',40)
self.assertEqual(conf.get('baz'), 40)
def test_conf_doc_creation(self):
"""
Test that the automatically generated doc for the configuration
is correct.
"""
conf = Conf(self.sample_param_dict)
out = conf.make_doc()
should =("General:\n"
" -f <the foo value>, --some-param=<the foo value>\n"
" The description string here is long and will "
"automatically be\n"
" wrapped across multiple lines.\n"
" Default value: some-value\n"
" Conf file equivalent: MY_PARAM\n"
" \n"
" -g\n"
" Flag control run of foobar.\n"
" Conf file equivalent: GGG\n"
" \n"
" -Q <the ddd value>\n"
" A dict value.\n"
" Default value: {'baz': 123}\n"
" Conf file equivalent: MY_DICT\n"
" \n"
" --configfile=<config file name>\n"
" Config file name\n"
" Default value: t1.conf\n"
" \n"
"Specific parameters:\n"
" -b <num>, --baz=<num>\n"
" Amount of baz gizmos to add.\n"
" Default value: 123\n"
" Conf file equivalent: BAZ")
self.assertEqual(out, should)
def test_conf_add_param(self):
"""
Testing manual addition of parameter to existing config.
"""
conf = Conf(self.sample_param_dict)
# Not allowing duplicate names for parameters.
self.assertRaisesRegexp(ParamError,
"Duplicate definition.",
conf.add, "foo")
# Catching already existing command line options.
self.assertRaisesRegexp(ParamError,
"Short option '-f' already in use.",
conf.add,
**{ "name" : "ttt",
"cmd_line" : ( 'f', None ) })
self.assertRaisesRegexp(ParamError,
"Long option '--some-param' already in use.",
conf.add,
**{ "name" : "ttt",
"cmd_line" : ( None, 'some-param' ) })
conf.add("zip-bar")
p = conf.params["zip-bar"]
# Assert that default getopts are correct.
self.assertEqual(('z:', 'zip-bar='), p.make_getopts_str())
# Assert correctness of aut-generated conffile name.
self.assertEqual("ZIP_BAR", p.conffile)
p.value = "foo"
self.assertEqual(conf.get_by_conffile_name("ZIP_BAR"), "foo")
def _make_file(self, buf):
fname = self.dir_two_name+"/t1.conf"
f = open(fname, "w")
f.write(buf)
f.close()
return fname
def test_conf_configfile(self):
"""
Testing parsing of configfile.
"""
# Create config file with unknown parameter.
fname = self._make_file("""
# This is a comment line.
FOO xyz
""")
# First we parse it while we are allowing unknown params: No error
conf = Conf(self.sample_param_dict, default_allow_unknown_params=True)
with open(fname, "r") as f:
conf._parse_config_file(f)
# Now we switch on the default mode: Unknown params are not allowed
conf = Conf(self.sample_param_dict)
with open(fname, "r") as f:
self.assertRaisesRegexp(ParamError,
"Line 4: Unknown parameter 'FOO'.",
conf._parse_config_file, f)
# Create config file with correct parameter but wrong value.
fname = self._make_file("""
MY_PARAM xyz
""")
conf = Conf(self.sample_param_dict)
with open(fname, "r") as f:
self.assertRaisesRegexp(ParamError,
"Line 2: Parameter 'foo': "
"'xyz' is not one of the allowed values.",
conf._parse_config_file, f)
# Create config file with correct parameters
fname = self._make_file("""
# empty lines and comments and stuff with odd indentation
MY_PARAM xyz baz
MY_DICT {
bar : 123; # some comment that's ignored
baz : foo,
bar, blah , fff ;
}
# some comment
GGG yes # comment at end of line
""")
conf = Conf(self.sample_param_dict)
with open(fname, "r") as f:
conf._parse_config_file(f)
self.assertEqual(conf.get('foo'), "xyz baz")
self.assertTrue(conf.get('ggg'))
d = conf.get('ddd')
self.assertEqual(len(d), 2)
self.assertTrue('bar' in d)
self.assertTrue('baz' in d)
self.assertEqual(d['bar'], "123")
self.assertEqual(d['baz'], [ "foo", "bar", "blah", "fff" ])
def test_conf_envvars(self):
"""
Testing parsing of environment variables.
"""
conf = Conf(self.sample_param_dict,
default_env_prefix="FOOBAR_")
# Env variables with the defined prefix, but otherwise unknown name
# will simply be silently ignored.
os.environ['FOOBAR_SOMETHING_UNKNOWN'] = "foo"
# Set illegal value in env variable
os.environ['FOOBAR_MY_PARAM'] = "ggg"
self.assertRaisesRegexp(ParamError,
"Environment variable FOOBAR_MY_PARAM: "
"Parameter 'foo': 'ggg' is not one of the "
"allowed values.",
conf._process_env_vars)
# Set correct value in env variables
os.environ['FOOBAR_MY_PARAM'] = "something-else"
os.environ['FOOBAR_GGG'] = "y"
conf._process_env_vars()
self.assertEqual("something-else", conf.get('foo'))
self.assertTrue(conf.get('ggg'))
def test_conf_cmdline(self):
"""
Testing parsing of command line arguments.
"""
conf = Conf(self.sample_param_dict)
# Testing with illegal parameter
self.assertRaisesRegexp(ParamError,
"Command line option: option --xyz not "
"recognized.",
conf._process_cmd_line,
[ "--xyz=blah" ])
# Testing with illegal value
self.assertRaisesRegexp(ParamError,
"Parameter 'foo': 'blah' is not one of the "
"allowed values.",
conf._process_cmd_line,
[ "--some-param=blah", "-g", "--baz", "200" ])
# Testing with illegal dictionary key value
self.assertRaisesRegexp(ParamError,
"Parameter 'ddd': 'yyy' is not an allowable "
"key value.",
conf._process_cmd_line,
[ "--some-param=foobar", "-g", "--baz", "200",
"-Q", "{ yyy:123 ; baz:1, 2,3; a: X Y Z }" ])
# Testing with missing mandatory key
self.assertRaisesRegexp(ParamError,
"Parameter 'ddd': Mandatory key 'baz' "
"not present.",
conf._process_cmd_line,
[ "--some-param=foobar", "-g", "--baz", "200",
"-Q", "{ foo:123 ; bar:1, 2,3; a: X Y Z }" ])
# Testing with correct value
conf._process_cmd_line([ "--some-param=foobar", "-g", "--baz", "200",
"-Q", "{ foo:123 ; baz:1, 2,3; a: X Y Z }" ])
self.assertEqual('foobar', conf.get('foo'))
self.assertEqual(200, conf.get('baz'))
self.assertTrue(conf.get('ggg'))
d = conf.get('ddd')
self.assertEqual(len(d), 3)
self.assertEqual(d['foo'], "123")
self.assertEqual(d['baz'], ["1", "2", "3"])
self.assertEqual(d['a'], "X Y Z")
def test_conf_acquire(self):
"""
Testing full run of acquire, using defaults, config files, environment
variables and command line options.
"""
# Create the config file, some values are missing
self._make_file("""
MY_PARAM foobar
""")
conf = Conf(self.sample_param_dict,
default_conf_file_locations=[self.dir_one_name,
self.dir_two_name],
default_env_prefix="FOOBAR_",
conf_file_parameter="configfile")
self.assertRaisesRegexp(ParamError,
"Parameter 'ggg': Requires a value, "
"nothing has been set.",
conf.acquire, list())
# Try again, this time set environment variable for missing value.
os.environ['FOOBAR_GGG'] = "yes"
conf.acquire([])
self.assertEqual("foobar", conf.get('foo'))
self.assertTrue(conf.get('ggg'))
# Try again, this time set environment variables for missing value as
# well as env overwrite for other param.
os.environ['FOOBAR_GGG'] = "yes"
os.environ['FOOBAR_MY_PARAM'] = "something-else"
conf.acquire([])
self.assertEqual("something-else", conf.get('foo'))
self.assertTrue(conf.get('ggg'))
# Try again, this time add a command line overwrite
conf.acquire([ "-f", "some-value" ])
self.assertEqual("some-value", conf.get('foo'))
if __name__ == "__main__":
unittest.main()
|
|
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Utility to process UNEnergy data set into a csv with columns for StatVars
and generate a corresponding MCF with statVars and tMCF.
http://data.un.org/Data.aspx
Run this script in this folder:
python3 process.py
"""
import csv
import io
import os
import sys
import datetime
import time
from absl import app
from absl import flags
from collections import defaultdict
# Allows the following module imports to work when running as a script
# module_dir_ is the path to where this code is running from.
module_dir_ = os.path.dirname(__file__)
sys.path.append(os.path.join(module_dir_))
from country_codes import get_country_dcid
import un_energy_codes
import download
FLAGS = flags.FLAGS
flags.DEFINE_list('csv_data_files', [],
'csv files from UNData Energy datasets to process')
flags.DEFINE_string('output_path', 'tmp_data_dir/un_energy_output',
'Data set name used as file name for mcf and tmcf')
flags.DEFINE_integer('debug_level', 0, 'Data dir to download into')
flags.DEFINE_integer('debug_lines', 100000, 'Print error logs every N lines')
flags.DEFINE_bool('copy_input_columns', False,
'Add columns from the input csv into the output')
# Columns in the putput CSV
# todo(ajaits): Should it include original columns like transaction code, fuel code, etc?
OUTPUT_CSV_COLUMNS = [
'Country_dcid',
'Year',
'Quantity',
'Unit_dcid',
'Estimate',
'StatVar',
]
INPUT_CSV_COLUMNS_COPIED = [
'Commodity Code', 'Country or Area', 'Transaction Code',
'Commodity - Transaction Code', 'Commodity - Transaction', 'Unit',
'Quantity Footnotes'
]
_DEFAULT_STAT_VAR_PV = {
'typeOf': 'dcs:StatisticalVariable',
'measurementQualifier': 'dcs:Annual',
'populationType': 'dcs:Energy',
'statType': 'dcs:measuredValue',
}
UN_ENERGY_TMCF = """
Node: E:UNEnergy->E0
typeOf: dcs:StatVarObservation
observationAbout: C:UNEnergy->Country_dcid
variableMeasured: C:UNEnergy->StatVar
observationDate: C:UNEnergy->Year
observationPeriod: "P1Y"
value: C:UNEnergy->Quantity
unit: C:UNEnergy->Unit_dcid
measurementMethod: C:UNEnergy->Estimate
"""
def _print_debug(debug_level: int, *args):
if debug_level > 1:
print("[", datetime.datetime.now(), "] ", *args, file=sys.stderr)
def _print_counters(counters, steps=None):
row_key = 'inputs_processed'
if steps is None or row_key not in counters or counters[
row_key] % steps == 0:
print('\nSTATS:')
for k in sorted(counters):
print(f"\t{k} = {counters[k]}")
if 'inputs_processed' in counters:
start_ts = counters['time_start']
end_ts = time.perf_counter()
print(
'Processing rate: {:.2f}'.format(counters['inputs_processed'] /
(end_ts - start_ts)),
'rows/sec')
print('', flush=True)
def _add_error_counter(counter_name: str, error_msg: str, counters):
_print_debug(2, "Error: ", counter_name, error_msg)
if counters is not None:
debug_lines = 1
if 'debug_lines' in counters:
debug_lines = counters['debug_lines']
if counters[counter_name] % debug_lines == 0:
print("ERROR: ", counter_name, ": ", error_msg)
counters[counter_name] += 1
def _remove_extra_characters(name: str) -> str:
"""Removes the parts of the name that is not used in the node id,
including:
- any namespace: prefix, such as 'dcs:' or 'dcid:'
- capitalized prefix of two or more letters
- Any '_'
For example: 'dcid:EIA_Other_fuel' will be converted to: 'OtherFuel'
Args:
name: string to be normalized.
Returns:
string without the extra characters and capitalized appropriately.
"""
if name is None:
return name
# strip namespace: prefix
name = name[name.find(':') + 1:]
# string any prefix of 2 or more upper case letters.
upper_prefix = 0
while upper_prefix < len(name):
if not name[upper_prefix].isupper():
break
upper_prefix += 1
if upper_prefix > 1:
name = name[upper_prefix:]
if name[0] == '_':
name = name[1:]
# Replace all '_' with a capitalized letter.
# Find all occurences of '_'.
upper_idx = [-1] + \
[i for i, e in enumerate(name) if e == '_'] + [len(name)]
# Capitalize the next letter after '_'.
words = [
name[x + 1].upper() + name[x + 2:y]
for x, y in zip(upper_idx, upper_idx[1:])
if name[x:y] != '_'
]
return ''.join(words)
def _add_property_value_name(pv_dict: dict,
prop: str,
name_list: list,
ignore_list=None):
"""Append value of the property in the pc_dict to the name_list.
The value string is normalized by stripping prefix and removing '_'.
The property is removed from the pv_dict as well.
Args:
pv_dict: dictionary of property and values.
the matching property is remove from this dictionary.
prop: string with the property code whose vales is to be extracted
name_list: output list of strings into which the nornalized value
string is added.
ignore_list: [optional] list of strings of property or value
that is not added to the name_list
"""
if prop not in pv_dict:
return
orig_value = pv_dict[prop]
value = _remove_extra_characters(orig_value)
pv_dict.pop(prop)
if value is None:
return
if ignore_list is not None:
if prop in ignore_list or value in ignore_list or orig_value in ignore_list:
return
prefix_len = value.find(':') + 1
name_list.append(value[prefix_len].upper() + value[prefix_len + 1:])
def _get_stat_var_id(sv_pv: dict, ignore_list=None) -> str:
"""Generate a statvar id from a dictionary of PVs in the following syntax:
<mqualifier>_<statype>_<measuredProp>_<PopulationType>_<constraint1>_<constraint2>_...
where <prop> represents the normalized value string for the property
and constraints are sorted alphabetically.
property and values in the ignore_list are not added to the id.
Args:
sv_pv: dictionary of properties and respective values for a StatVar
for which the node id is to be generated.
ignore_list: List of property of value strings not to be added to the name
Returns:
String with the node id containing values of PVs
that can be used as the node id for a StatVar.
"""
pv = dict(sv_pv)
ids = []
ignore_values = ['MeasuredValue', 'description']
if ignore_list is not None:
ignore_values.extend(ignore_list)
# Add default properties
_add_property_value_name(pv, 'measurementQualifier', ids, ignore_values)
_add_property_value_name(pv, 'statType', ids, ignore_values)
_add_property_value_name(pv, 'measuredProperty', ids, ignore_values)
_add_property_value_name(pv, 'populationType', ids, ignore_values)
pv.pop('typeOf')
# Add the remaining properties in sorted order
for prop in sorted(pv.keys()):
_add_property_value_name(pv, prop, ids)
return '_'.join(ids)
def is_valid_stat_var(sv_pv: dict, counters=None) -> bool:
"""Check if a StatVar is valid.
Verifies if the statVar has the required properties.
Args:
sv_pv: Dictionary of property and value for a StatVar
counters: [optional] error counters to be updated
Returns:
True if the statVar is valid.
"""
# Check StatVar has all required properties.
STAT_VAR_REQUIRED_PROPERTIES = [
'measuredProperty',
'populationType',
]
for prop in STAT_VAR_REQUIRED_PROPERTIES:
if prop not in sv_pv:
_add_error_counter(
f'error_missing_property_{p}',
f'Stat var missing property {p}, statVar: {sv_pv}', counters)
return False
return True
def _get_scaled_value(value: str, multiplier: int) -> str:
"""Returns a scaled value for the given value and multiplier.
Args:
value: Original value in string. If it contains a decimal point
the returned value will have the same precision.
"""
round_digits = 0
fraction_digits = value.find('.')
if fraction_digits >= 0:
round_digits = len(value) - fraction_digits - 1
scaled_value = float(value) * multiplier
if round_digits == 0:
return str(int(scaled_value))
return str(round(scaled_value, round_digits))
def generate_stat_var(data_row: dict, sv_pv: dict, counters=None) -> str:
"""Add property:values for a StatVar for the given data row.
Args:
data_row: dictionary of a cells in a CSV row keyed by the column name
sv_pv: dictinary of PVs for a statVar into which new properties are added
counters: [optional] error counters to be updated
Returns:
string for the stat_var node id with all PVs in sv_pv
"""
sv_pv.update(_DEFAULT_STAT_VAR_PV)
t_code = data_row['Transaction Code']
fuel = data_row['Commodity Code']
data_sv_pv = un_energy_codes.get_pv_for_energy_code(fuel, t_code, counters)
if data_sv_pv is None or len(data_sv_pv) == 0:
# data row is ignored
return None
if 'Ignore' in data_sv_pv:
# statVar is to be ignored.
ignore_reason = data_sv_pv['Ignore']
ignore_reason = ignore_reason[ignore_reason.find(':') + 1:]
_add_error_counter(f'warning_ignored_stat_var_{ignore_reason}',
f'Invalid statVar {sv_pv} for row {data_row}',
counters)
return None
sv_pv.update(data_sv_pv)
if not is_valid_stat_var(sv_pv):
_add_error_counter('error_invalid_stat_var',
f'Invalid statVar {sv_pv} for row {data_row}',
counters)
return None
node_name = _get_stat_var_id(sv_pv)
if node_name is None or len(node_name) == 0:
_add_error_counter('error_null_stat_var_name',
f'No node id for statVar {sv_pv}', counters)
return None
return f'dcid:{node_name}'
def _get_stat_var_mcf(sv_id: str, sv_pv: dict) -> str:
"""Generate a MCF node string for a statVar
Args:
sv_id: Node Id string for the StatVar
sv_pv: dictionary of all property:values for the StatVar
Returns:
a string with StatVar node in MCF format with each property in a new line
and properties are sorted in alphabetical order.
"""
stat_var = []
stat_var.append(f'Node: {sv_id}')
for p in sorted(sv_pv.keys()):
stat_var.append('{}: {}'.format(p, sv_pv[p]))
return '\n'.join(stat_var)
def _get_stat_var_prop(prop_list: list, sv_pv: dict) -> str:
"""Get the value of the first property from the list in the StatVar.
Args:
prop_list: order list of properties looked up in the StatVar
sv_pv: dictionary of StatVar PVs.
Returns:
value of the property without the namespace prefix or
None if none of the properties exist in the statVar.
"""
for prop in prop_list:
if prop in sv_pv:
prop_value = sv_pv[prop]
if prop_value is not None:
return prop_value[prop_value.find(':') + 1:]
return ''
def _add_stat_var_description(data_row: dict, sv_pv: dict):
"""Adds a description to the StatVar using the input data_row containing
the codes and text fields.
Args:
data_row: Dictionary with input/output CSV columns.
sv_pv: Dictionary of StatVar PVs
"""
if 'description' in sv_pv:
return
code = data_row['Commodity - Transaction Code']
transaction = data_row['Commodity - Transaction']
fuel_name = _get_stat_var_prop(
['energySource', 'fuelType', 'populationType'], sv_pv)
measured_prop = _get_stat_var_prop(['measuredProperty'], sv_pv)
sv_pv[
'description'] = f'"UN Energy data for {fuel_name} {measured_prop}, {transaction} (code: {code})"'
def _process_row(data_row: dict, sv_map: dict, row_map: dict, sv_obs: dict,
csv_writer, f_out_mcf, counters):
"""Process a single row of input data for un energy.
Generate a statvar for the fuel and transaction code and adds the MCF for the
unique StatVars into the f_out_mcf file and the columns for the StatVarObservation
into the csv_writer.
Args:
data_row: dictionary of CSV column values from the input file.
sv_map: dictionary of statVar ids that are already emitted into f_out_mcf
row_map: dictionary of data rows already processed.
Used to dedup input rows.
sv_obs: dictionary of StatVarObs already emitted
csv_writer: file handle to write statvar observation values into.
f_out_mcf: file handle to write unique statVar MCF nodes
counters: counters to be updated
"""
counters['inputs_processed'] += 1
fuel = data_row['Commodity Code']
country_code = data_row['Country or Area Code']
country_name = data_row['Country or Area']
t_code = data_row['Transaction Code']
ct_code = data_row['Commodity - Transaction Code']
ct_name = data_row['Commodity - Transaction']
year = data_row['Year']
units = data_row['Unit']
quantity = data_row['Quantity']
notes = data_row['Quantity Footnotes']
# Ignore the column header and footers in case csv files were concatenated.
if fuel == 'Commodity Code' or fuel == 'fnSeqID' or fuel == '1' or fuel == '':
return
if fuel is None or country_code is None or t_code is None or year is None or quantity is None:
_add_error_counter(f'error_invalid_input_row',
f'Invalid data row {data_row}', counters)
return
# Check for duplicate rows
row_key = f'{fuel}-{t_code}-{country_code}-{quantity}-{units}-{notes}'
row_map[row_key] += 1
if row_map[row_key] > 1:
_add_error_counter('inputs_ignored_duplicate',
f'Duplicate input row: {data_row}', counters)
return
# Get the country from the numeric code.
country_dcid = get_country_dcid(country_code)
if country_dcid is None:
_add_error_counter(
f'error_unknown_country_code_{country_code}',
f'Country code: {country_code}, name: {country_name}', counters)
return
if len(country_dcid) == 0:
_add_error_counter(
f'warning_ignoring_country_code_{country_code}',
f'Country ignored: {country_code}, name: {country_name}', counters)
return
data_row['Country_dcid'] = f'dcs:{country_dcid}'
# Add the quantity units and multiplier for the value if any.
unit_dcid, multiplier = un_energy_codes.get_unit_dcid_scale(units)
if not unit_dcid or not multiplier:
_add_error_counter('error_unknown_units',
f'Unit: {units}, Transaction: {ct_name}', counters)
return
data_row['Unit_dcid'] = unit_dcid
if multiplier > 1:
data_row['Quantity'] = _get_scaled_value(quantity, multiplier)
# The observation is an estimated value if it has a footnote.
if notes == "1":
data_row['Estimate'] = 'UNStatsEstimate'
# Generate a StatVar for the row using the fuel and transaction code values.
sv_pv = {}
sv_id = generate_stat_var(data_row, sv_pv, counters)
if not sv_id:
return
data_row['StatVar'] = sv_id
if sv_id not in sv_map:
# New stat var generated. Output PVs to the statvar mcf file.
_add_stat_var_description(data_row, sv_pv)
stat_var_mcf = _get_stat_var_mcf(sv_id, sv_pv)
_print_debug(1, 'Generating stat var node: ', stat_var_mcf)
f_out_mcf.write('\n\n')
f_out_mcf.write(stat_var_mcf)
counters['output_stat_vars'] += 1
sv_map[sv_id] += 1
# Check for duplicate StatVarObs.
obs_key = f'{sv_id}-{country_dcid}-{year}'
cur_value = f'{quantity}-{notes}'
if obs_key in sv_obs:
prev_value = sv_obs[obs_key]
_add_error_counter(
'warning_duplicate_obs_dropped',
f'Duplicate value {cur_value} for SVO: {obs_key}, prev: {prev_value}',
counters)
return
sv_obs[obs_key] = cur_value
# Write the StatVarObs into the csv file.
csv_writer.writerow(data_row)
# Update counters.
for prop in sv_pv:
counters[f'outputs_with_property_{prop}'] += 1
counters['output_csv_rows'] += 1
def process(in_paths: list,
out_path: str,
debug_lines=1,
copy_input_columns=False) -> dict:
"""Read data from CSV and create CSV,MCF with StatVars and tMCF for DC import.
Generates the following output files:
- .csv: File with StatVarObservations
- .mcf: File with StatVar Nodes in MCF format
- .tmcf: File with tMCF for the StatVarObservation
Args:
in_paths: list of UN Energy CSV data files to be processed.
out_path: prefix for the output StatVarObs csv and StatVar mcf files.
debug_lines: Generate each error message once every debug_lines.
copy_input_columns: Copy contents of input csv columns that are not used
in statVarObs as well into the output csv.
INPUT_CSV_COLUMNS_COPIED is the list of such columns.
Returns:
Counters after processing
"""
counters = defaultdict(lambda: 0)
counters['debug_lines'] = debug_lines
sv_map = defaultdict(lambda: 0)
row_map = defaultdict(lambda: 0)
sv_obs = {}
csv_file_path = out_path + '.csv'
start_ts = time.perf_counter()
counters['time_start'] = start_ts
# Setup the output file handles for MCF and CSV.
output_columns = OUTPUT_CSV_COLUMNS
if copy_input_columns:
output_columns.extend(INPUT_CSV_COLUMNS_COPIED)
with open(csv_file_path, 'w', newline='') as f_out_csv:
csv_writer = csv.DictWriter(f_out_csv,
fieldnames=OUTPUT_CSV_COLUMNS,
extrasaction='ignore',
lineterminator='\n')
csv_writer.writeheader()
mcf_file_path = out_path + '.mcf'
with open(mcf_file_path, 'w+', newline='') as f_out_mcf:
# Process each CSV input file, one row at a time.
for in_file in in_paths:
print(f'Processing data file: {in_file}')
with open(in_file) as csvfile:
counters['input_files'] += 1
line = 0
reader = csv.DictReader(csvfile)
for data_row in reader:
line += 1
data_row['_File'] = in_file
data_row['_Row'] = line
_process_row(data_row, sv_map, row_map, sv_obs,
csv_writer, f_out_mcf, counters)
_print_counters(counters, counters['debug_lines'])
print(f'Processed {line} rows from data file: {in_file}')
f_out_mcf.write('\n')
# Generate the tMCF file
tmcf_file_path = out_path + '.tmcf'
with open(tmcf_file_path, 'w', newline='') as f_out_tmcf:
f_out_tmcf.write(UN_ENERGY_TMCF)
end_ts = time.perf_counter()
counters['time_end'] = end_ts
counters['time_total_seconds'] = end_ts - start_ts
_print_counters(counters)
_print_counters(sv_map)
print(
'Processing rate: {:.2f}'.format(counters['inputs_processed'] /
(end_ts - start_ts)), 'rows/sec')
return counters
def main(_):
csv_data_files = FLAGS.csv_data_files
if len(csv_data_files) == 0:
print(f'Downloading energy data set files')
csv_data_files = download.download_un_energy_dataset()
if len(csv_data_files) > 0 and FLAGS.output_path != '':
process(csv_data_files, FLAGS.output_path, FLAGS.debug_lines,
FLAGS.copy_input_columns)
else:
print(f'Please specify files to process with --csv_data_files=<,>')
if __name__ == '__main__':
print('running main')
app.run(main)
|
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Python wrapper for Android uiautomator tool."""
import sys
import os
import subprocess
import time
import itertools
import json
import hashlib
import socket
import re
import collections
DEVICE_PORT = int(os.environ.get('UIAUTOMATOR_DEVICE_PORT', '9008'))
LOCAL_PORT = int(os.environ.get('UIAUTOMATOR_LOCAL_PORT', '9008'))
if 'localhost' not in os.environ.get('no_proxy', ''):
os.environ['no_proxy'] = "localhost,%s" % os.environ.get('no_proxy', '')
try:
import urllib2
except ImportError:
import urllib.request as urllib2
try:
from httplib import HTTPException
except:
from http.client import HTTPException
try:
if os.name == 'nt':
import urllib3
except: # to fix python setup error on Windows.
pass
__version__ = "0.1.35"
__author__ = "Xiaocong He"
__all__ = ["device", "Device", "rect", "point", "Selector", "JsonRPCError"]
def U(x):
if sys.version_info.major == 2:
return x.decode('utf-8') if type(x) is str else x
elif sys.version_info.major == 3:
return x
def param_to_property(*props, **kwprops):
if props and kwprops:
raise SyntaxError("Can not set both props and kwprops at the same time.")
class Wrapper(object):
def __init__(self, func):
self.func = func
self.kwargs, self.args = {}, []
def __getattr__(self, attr):
if kwprops:
for prop_name, prop_values in kwprops.items():
if attr in prop_values and prop_name not in self.kwargs:
self.kwargs[prop_name] = attr
return self
elif attr in props:
self.args.append(attr)
return self
raise AttributeError("%s parameter is duplicated or not allowed!" % attr)
def __call__(self, *args, **kwargs):
if kwprops:
kwargs.update(self.kwargs)
self.kwargs = {}
return self.func(*args, **kwargs)
else:
new_args, self.args = self.args + list(args), []
return self.func(*new_args, **kwargs)
return Wrapper
class JsonRPCError(Exception):
def __init__(self, code, message):
self.code = int(code)
self.message = message
def __str__(self):
return "JsonRPC Error code: %d, Message: %s" % (self.code, self.message)
class JsonRPCMethod(object):
if os.name == 'nt':
pool = urllib3.PoolManager()
def __init__(self, url, method, timeout=30):
self.url, self.method, self.timeout = url, method, timeout
def __call__(self, *args, **kwargs):
if args and kwargs:
raise SyntaxError("Could not accept both *args and **kwargs as JSONRPC parameters.")
data = {"jsonrpc": "2.0", "method": self.method, "id": self.id()}
if args:
data["params"] = args
elif kwargs:
data["params"] = kwargs
jsonresult = {"result": ""}
if os.name == "nt":
res = self.pool.urlopen("POST",
self.url,
headers={"Content-Type": "application/json"},
body=json.dumps(data).encode("utf-8"),
timeout=self.timeout)
jsonresult = json.loads(res.data.decode("utf-8"))
else:
result = None
try:
req = urllib2.Request(self.url,
json.dumps(data).encode("utf-8"),
{"Content-type": "application/json"})
result = urllib2.urlopen(req, timeout=self.timeout)
jsonresult = json.loads(result.read().decode("utf-8"))
finally:
if result is not None:
result.close()
if "error" in jsonresult and jsonresult["error"]:
raise JsonRPCError(
jsonresult["error"]["code"],
"%s: %s" % (jsonresult["error"]["data"]["exceptionTypeName"], jsonresult["error"]["message"])
)
return jsonresult["result"]
def id(self):
m = hashlib.md5()
m.update(("%s at %f" % (self.method, time.time())).encode("utf-8"))
return m.hexdigest()
class JsonRPCClient(object):
def __init__(self, url, timeout=30, method_class=JsonRPCMethod):
self.url = url
self.timeout = timeout
self.method_class = method_class
def __getattr__(self, method):
return self.method_class(self.url, method, timeout=self.timeout)
class Selector(dict):
"""The class is to build parameters for UiSelector passed to Android device.
"""
__fields = {
"text": (0x01, None), # MASK_TEXT,
"textContains": (0x02, None), # MASK_TEXTCONTAINS,
"textMatches": (0x04, None), # MASK_TEXTMATCHES,
"textStartsWith": (0x08, None), # MASK_TEXTSTARTSWITH,
"className": (0x10, None), # MASK_CLASSNAME
"classNameMatches": (0x20, None), # MASK_CLASSNAMEMATCHES
"description": (0x40, None), # MASK_DESCRIPTION
"descriptionContains": (0x80, None), # MASK_DESCRIPTIONCONTAINS
"descriptionMatches": (0x0100, None), # MASK_DESCRIPTIONMATCHES
"descriptionStartsWith": (0x0200, None), # MASK_DESCRIPTIONSTARTSWITH
"checkable": (0x0400, False), # MASK_CHECKABLE
"checked": (0x0800, False), # MASK_CHECKED
"clickable": (0x1000, False), # MASK_CLICKABLE
"longClickable": (0x2000, False), # MASK_LONGCLICKABLE,
"scrollable": (0x4000, False), # MASK_SCROLLABLE,
"enabled": (0x8000, False), # MASK_ENABLED,
"focusable": (0x010000, False), # MASK_FOCUSABLE,
"focused": (0x020000, False), # MASK_FOCUSED,
"selected": (0x040000, False), # MASK_SELECTED,
"packageName": (0x080000, None), # MASK_PACKAGENAME,
"packageNameMatches": (0x100000, None), # MASK_PACKAGENAMEMATCHES,
"resourceId": (0x200000, None), # MASK_RESOURCEID,
"resourceIdMatches": (0x400000, None), # MASK_RESOURCEIDMATCHES,
"index": (0x800000, 0), # MASK_INDEX,
"instance": (0x01000000, 0) # MASK_INSTANCE,
}
__mask, __childOrSibling, __childOrSiblingSelector = "mask", "childOrSibling", "childOrSiblingSelector"
def __init__(self, **kwargs):
super(Selector, self).__setitem__(self.__mask, 0)
super(Selector, self).__setitem__(self.__childOrSibling, [])
super(Selector, self).__setitem__(self.__childOrSiblingSelector, [])
for k in kwargs:
self[k] = kwargs[k]
def __setitem__(self, k, v):
if k in self.__fields:
super(Selector, self).__setitem__(U(k), U(v))
super(Selector, self).__setitem__(self.__mask, self[self.__mask] | self.__fields[k][0])
else:
raise ReferenceError("%s is not allowed." % k)
def __delitem__(self, k):
if k in self.__fields:
super(Selector, self).__delitem__(k)
super(Selector, self).__setitem__(self.__mask, self[self.__mask] & ~self.__fields[k][0])
def clone(self):
kwargs = dict((k, self[k]) for k in self
if k not in [self.__mask, self.__childOrSibling, self.__childOrSiblingSelector])
selector = Selector(**kwargs)
for v in self[self.__childOrSibling]:
selector[self.__childOrSibling].append(v)
for s in self[self.__childOrSiblingSelector]:
selector[self.__childOrSiblingSelector].append(s.clone())
return selector
def child(self, **kwargs):
self[self.__childOrSibling].append("child")
self[self.__childOrSiblingSelector].append(Selector(**kwargs))
return self
def sibling(self, **kwargs):
self[self.__childOrSibling].append("sibling")
self[self.__childOrSiblingSelector].append(Selector(**kwargs))
return self
child_selector, from_parent = child, sibling
def rect(top=0, left=0, bottom=100, right=100):
return {"top": top, "left": left, "bottom": bottom, "right": right}
def intersect(rect1, rect2):
top = rect1["top"] if rect1["top"] > rect2["top"] else rect2["top"]
bottom = rect1["bottom"] if rect1["bottom"] < rect2["bottom"] else rect2["bottom"]
left = rect1["left"] if rect1["left"] > rect2["left"] else rect2["left"]
right = rect1["right"] if rect1["right"] < rect2["right"] else rect2["right"]
return left, top, right, bottom
def point(x=0, y=0):
return {"x": x, "y": y}
class Adb(object):
def __init__(self, serial=None, adb_server_host=None, adb_server_port=None):
self.__adb_cmd = None
self.default_serial = serial if serial else os.environ.get("ANDROID_SERIAL", None)
self.adb_server_host = str(adb_server_host if adb_server_host else 'localhost')
self.adb_server_port = str(adb_server_port if adb_server_port else '5037')
self.adbHostPortOptions = []
if self.adb_server_host not in ['localhost', '127.0.0.1']:
self.adbHostPortOptions += ["-H", self.adb_server_host]
if self.adb_server_port != '5037':
self.adbHostPortOptions += ["-P", self.adb_server_port]
def adb(self):
if self.__adb_cmd is None:
if "ANDROID_HOME" in os.environ:
filename = "adb.exe" if os.name == 'nt' else "adb"
adb_cmd = os.path.join(os.environ["ANDROID_HOME"], "platform-tools", filename)
if not os.path.exists(adb_cmd):
raise EnvironmentError(
"Adb not found in $ANDROID_HOME path: %s." % os.environ["ANDROID_HOME"])
else:
import distutils
if "spawn" not in dir(distutils):
import distutils.spawn
adb_cmd = distutils.spawn.find_executable("adb")
if adb_cmd:
adb_cmd = os.path.realpath(adb_cmd)
else:
raise EnvironmentError("$ANDROID_HOME environment not set.")
self.__adb_cmd = adb_cmd
return self.__adb_cmd
def cmd(self, *args, **kwargs):
'''adb command, add -s serial by default. return the subprocess.Popen object.'''
serial = self.device_serial()
if serial:
if " " in serial: # TODO how to include special chars on command line
serial = "'%s'" % serial
return self.raw_cmd(*["-s", serial] + list(args))
else:
return self.raw_cmd(*args)
def raw_cmd(self, *args):
'''adb command. return the subprocess.Popen object.'''
cmd_line = [self.adb()] + self.adbHostPortOptions + list(args)
if os.name != "nt":
cmd_line = [" ".join(cmd_line)]
return subprocess.Popen(cmd_line, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
def device_serial(self):
if not self.default_serial:
devices = self.devices()
if devices:
if len(devices) is 1:
self.default_serial = list(devices.keys())[0]
else:
raise EnvironmentError("Multiple devices attached but default android serial not set.")
else:
raise EnvironmentError("Device not attached.")
return self.default_serial
def devices(self):
'''get a dict of attached devices. key is the device serial, value is device name.'''
out = self.raw_cmd("devices").communicate()[0].decode("utf-8")
match = "List of devices attached"
index = out.find(match)
if index < 0:
raise EnvironmentError("adb is not working.")
return dict([s.split("\t") for s in out[index + len(match):].strip().splitlines() if s.strip()])
def forward(self, local_port, device_port):
'''adb port forward. return 0 if success, else non-zero.'''
return self.cmd("forward", "tcp:%d" % local_port, "tcp:%d" % device_port).wait()
def forward_list(self):
'''adb forward --list'''
version = self.version()
if int(version[1]) <= 1 and int(version[2]) <= 0 and int(version[3]) < 31:
raise EnvironmentError("Low adb version.")
lines = self.raw_cmd("forward", "--list").communicate()[0].decode("utf-8").strip().splitlines()
return [line.strip().split() for line in lines]
def version(self):
'''adb version'''
match = re.search(r"(\d+)\.(\d+)\.(\d+)", self.raw_cmd("version").communicate()[0].decode("utf-8"))
return [match.group(i) for i in range(4)]
_init_local_port = LOCAL_PORT-1
def next_local_port(adbHost=None):
def is_port_listening(port):
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
result = s.connect_ex((str(adbHost) if adbHost else '127.0.0.1', port))
s.close()
return result == 0
global _init_local_port
_init_local_port = _init_local_port + 1 if _init_local_port < 32764 else LOCAL_PORT
while is_port_listening(_init_local_port):
_init_local_port += 1
return _init_local_port
class NotFoundHandler(object):
'''
Handler for UI Object Not Found exception.
It's a replacement of UiAutomator watcher on device side.
'''
def __init__(self):
self.__handlers = collections.defaultdict(lambda: {'on': True, 'handlers': []})
def __get__(self, instance, type):
return self.__handlers[instance.adb.device_serial()]
class AutomatorServer(object):
"""start and quit rpc server on device.
"""
__jar_files = {
"bundle.jar": "libs/bundle.jar",
"uiautomator-stub.jar": "libs/uiautomator-stub.jar"
}
handlers = NotFoundHandler() # handler UI Not Found exception
def __init__(self, serial=None, local_port=None, device_port=None, adb_server_host=None, adb_server_port=None):
self.uiautomator_process = None
self.adb = Adb(serial=serial, adb_server_host=adb_server_host, adb_server_port=adb_server_port)
self.device_port = int(device_port) if device_port else DEVICE_PORT
if local_port:
self.local_port = local_port
else:
try: # first we will try to use the local port already adb forwarded
for s, lp, rp in self.adb.forward_list():
if s == self.adb.device_serial() and rp == 'tcp:%d' % self.device_port:
self.local_port = int(lp[4:])
break
else:
self.local_port = next_local_port(adb_server_host)
except:
self.local_port = next_local_port(adb_server_host)
def push(self):
base_dir = os.path.dirname(__file__)
for jar, url in self.__jar_files.items():
filename = os.path.join(base_dir, url)
self.adb.cmd("push", filename, "/data/local/tmp/").wait()
return list(self.__jar_files.keys())
def download(self, filename, url):
with open(filename, 'wb') as file:
res = None
try:
res = urllib2.urlopen(url)
file.write(res.read())
finally:
if res is not None:
res.close()
@property
def jsonrpc(self):
return self.jsonrpc_wrap(timeout=int(os.environ.get("jsonrpc_timeout", 90)))
def jsonrpc_wrap(self, timeout):
server = self
ERROR_CODE_BASE = -32000
def _JsonRPCMethod(url, method, timeout, restart=True):
_method_obj = JsonRPCMethod(url, method, timeout)
def wrapper(*args, **kwargs):
URLError = urllib3.exceptions.HTTPError if os.name == "nt" else urllib2.URLError
try:
return _method_obj(*args, **kwargs)
except (URLError, socket.error, HTTPException) as e:
if restart:
server.stop()
server.start(timeout=30)
return _JsonRPCMethod(url, method, timeout, False)(*args, **kwargs)
else:
raise
except JsonRPCError as e:
if e.code >= ERROR_CODE_BASE - 1:
server.stop()
server.start()
return _method_obj(*args, **kwargs)
elif e.code == ERROR_CODE_BASE - 2 and self.handlers['on']: # Not Found
try:
self.handlers['on'] = False
# any handler returns True will break the left handlers
any(handler(self.handlers.get('device', None)) for handler in self.handlers['handlers'])
finally:
self.handlers['on'] = True
return _method_obj(*args, **kwargs)
raise
return wrapper
return JsonRPCClient(self.rpc_uri,
timeout=timeout,
method_class=_JsonRPCMethod)
def __jsonrpc(self):
return JsonRPCClient(self.rpc_uri, timeout=int(os.environ.get("JSONRPC_TIMEOUT", 90)))
def start(self, timeout=5):
files = self.push()
cmd = list(itertools.chain(
["shell", "uiautomator", "runtest"],
files,
["-c", "com.github.uiautomatorstub.Stub"]
))
self.uiautomator_process = self.adb.cmd(*cmd)
self.adb.forward(self.local_port, self.device_port)
while not self.alive and timeout > 0:
time.sleep(0.1)
timeout -= 0.1
if not self.alive:
raise IOError("RPC server not started!")
def ping(self):
try:
return self.__jsonrpc().ping()
except:
return None
@property
def alive(self):
'''Check if the rpc server is alive.'''
return self.ping() == "pong"
def stop(self):
'''Stop the rpc server.'''
if self.uiautomator_process and self.uiautomator_process.poll() is None:
res = None
try:
res = urllib2.urlopen(self.stop_uri)
self.uiautomator_process.wait()
except:
self.uiautomator_process.kill()
finally:
if res is not None:
res.close()
self.uiautomator_process = None
try:
out = self.adb.cmd("shell", "ps", "-C", "uiautomator").communicate()[0].decode("utf-8").strip().splitlines()
if out:
index = out[0].split().index("PID")
for line in out[1:]:
if len(line.split()) > index:
self.adb.cmd("shell", "kill", "-9", line.split()[index]).wait()
except:
pass
@property
def stop_uri(self):
return "http://%s:%d/stop" % (self.adb.adb_server_host, self.local_port)
@property
def rpc_uri(self):
return "http://%s:%d/jsonrpc/0" % (self.adb.adb_server_host, self.local_port)
class AutomatorDevice(object):
'''uiautomator wrapper of android device'''
__orientation = ( # device orientation
(0, "natural", "n", 0),
(1, "left", "l", 90),
(2, "upsidedown", "u", 180),
(3, "right", "r", 270)
)
__alias = {
"width": "displayWidth",
"height": "displayHeight"
}
def __init__(self, serial=None, local_port=None, adb_server_host=None, adb_server_port=None):
self.server = AutomatorServer(
serial=serial,
local_port=local_port,
adb_server_host=adb_server_host,
adb_server_port=adb_server_port
)
def __call__(self, **kwargs):
return AutomatorDeviceObject(self, Selector(**kwargs))
def __getattr__(self, attr):
'''alias of fields in info property.'''
info = self.info
if attr in info:
return info[attr]
elif attr in self.__alias:
return info[self.__alias[attr]]
else:
raise AttributeError("%s attribute not found!" % attr)
@property
def info(self):
'''Get the device info.'''
return self.server.jsonrpc.deviceInfo()
def click(self, x, y):
'''click at arbitrary coordinates.'''
return self.server.jsonrpc.click(x, y)
def long_click(self, x, y):
'''long click at arbitrary coordinates.'''
return self.swipe(x, y, x + 1, y + 1)
def swipe(self, sx, sy, ex, ey, steps=100):
return self.server.jsonrpc.swipe(sx, sy, ex, ey, steps)
def drag(self, sx, sy, ex, ey, steps=100):
'''Swipe from one point to another point.'''
return self.server.jsonrpc.drag(sx, sy, ex, ey, steps)
def dump(self, filename=None, compressed=True):
'''dump device window and pull to local file.'''
content = self.server.jsonrpc.dumpWindowHierarchy(compressed, None)
if filename:
with open(filename, "wb") as f:
f.write(content.encode("utf-8"))
return content
def screenshot(self, filename, scale=1.0, quality=100):
'''take screenshot.'''
device_file = self.server.jsonrpc.takeScreenshot("screenshot.png",
scale, quality)
if not device_file:
return None
p = self.server.adb.cmd("pull", device_file, filename)
p.wait()
self.server.adb.cmd("shell", "rm", device_file).wait()
return filename if p.returncode is 0 else None
def freeze_rotation(self, freeze=True):
'''freeze or unfreeze the device rotation in current status.'''
self.server.jsonrpc.freezeRotation(freeze)
@property
def orientation(self):
'''
orienting the devie to left/right or natural.
left/l: rotation=90 , displayRotation=1
right/r: rotation=270, displayRotation=3
natural/n: rotation=0 , displayRotation=0
upsidedown/u: rotation=180, displayRotation=2
'''
return self.__orientation[self.info["displayRotation"]][1]
@orientation.setter
def orientation(self, value):
'''setter of orientation property.'''
for values in self.__orientation:
if value in values:
# can not set upside-down until api level 18.
self.server.jsonrpc.setOrientation(values[1])
break
else:
raise ValueError("Invalid orientation.")
@property
def last_traversed_text(self):
'''get last traversed text. used in webview for highlighted text.'''
return self.server.jsonrpc.getLastTraversedText()
def clear_traversed_text(self):
'''clear the last traversed text.'''
self.server.jsonrpc.clearLastTraversedText()
@property
def open(self):
'''
Open notification or quick settings.
Usage:
d.open.notification()
d.open.quick_settings()
'''
@param_to_property(action=["notification", "quick_settings"])
def _open(action):
if action == "notification":
return self.server.jsonrpc.openNotification()
else:
return self.server.jsonrpc.openQuickSettings()
return _open
@property
def handlers(self):
obj = self
class Handlers(object):
def on(self, fn):
if fn not in obj.server.handlers['handlers']:
obj.server.handlers['handlers'].append(fn)
obj.server.handlers['device'] = obj
return fn
def off(self, fn):
if fn in obj.server.handlers['handlers']:
obj.server.handlers['handlers'].remove(fn)
return Handlers()
@property
def watchers(self):
obj = self
class Watchers(list):
def __init__(self):
for watcher in obj.server.jsonrpc.getWatchers():
self.append(watcher)
@property
def triggered(self):
return obj.server.jsonrpc.hasAnyWatcherTriggered()
def remove(self, name=None):
if name:
obj.server.jsonrpc.removeWatcher(name)
else:
for name in self:
obj.server.jsonrpc.removeWatcher(name)
def reset(self):
obj.server.jsonrpc.resetWatcherTriggers()
return self
def run(self):
obj.server.jsonrpc.runWatchers()
return self
return Watchers()
def watcher(self, name):
obj = self
class Watcher(object):
def __init__(self):
self.__selectors = []
@property
def triggered(self):
return obj.server.jsonrpc.hasWatcherTriggered(name)
def remove(self):
obj.server.jsonrpc.removeWatcher(name)
def when(self, **kwargs):
self.__selectors.append(Selector(**kwargs))
return self
def click(self, **kwargs):
obj.server.jsonrpc.registerClickUiObjectWatcher(name, self.__selectors, Selector(**kwargs))
@property
def press(self):
@param_to_property(
"home", "back", "left", "right", "up", "down", "center",
"search", "enter", "delete", "del", "recent", "volume_up",
"menu", "volume_down", "volume_mute", "camera", "power")
def _press(*args):
obj.server.jsonrpc.registerPressKeyskWatcher(name, self.__selectors, args)
return _press
return Watcher()
@property
def press(self):
'''
press key via name or key code. Supported key name includes:
home, back, left, right, up, down, center, menu, search, enter,
delete(or del), recent(recent apps), volume_up, volume_down,
volume_mute, camera, power.
Usage:
d.press.back() # press back key
d.press.menu() # press home key
d.press(89) # press keycode
'''
@param_to_property(
key=["home", "back", "left", "right", "up", "down", "center",
"menu", "search", "enter", "delete", "del", "recent",
"volume_up", "volume_down", "volume_mute", "camera", "power"]
)
def _press(key, meta=None):
if isinstance(key, int):
return self.server.jsonrpc.pressKeyCode(key, meta) if meta else self.server.jsonrpc.pressKeyCode(key)
else:
return self.server.jsonrpc.pressKey(str(key))
return _press
def wakeup(self):
'''turn on screen in case of screen off.'''
self.server.jsonrpc.wakeUp()
def sleep(self):
'''turn off screen in case of screen on.'''
self.server.jsonrpc.sleep()
@property
def screen(self):
'''
Turn on/off screen.
Usage:
d.screen.on()
d.screen.off()
'''
@param_to_property(action=["on", "off"])
def _screen(action):
return self.wakeup() if action == "on" else self.sleep()
return _screen
@property
def wait(self):
'''
Waits for the current application to idle or window update event occurs.
Usage:
d.wait.idle(timeout=1000)
d.wait.update(timeout=1000, package_name="com.android.settings")
'''
@param_to_property(action=["idle", "update"])
def _wait(action, timeout=1000, package_name=None):
if timeout/1000 + 5 > int(os.environ.get("JSONRPC_TIMEOUT", 90)):
http_timeout = timeout/1000 + 5
else:
http_timeout = int(os.environ.get("JSONRPC_TIMEOUT", 90))
if action == "idle":
return self.server.jsonrpc_wrap(timeout=http_timeout).waitForIdle(timeout)
elif action == "update":
return self.server.jsonrpc_wrap(timeout=http_timeout).waitForWindowUpdate(package_name, timeout)
return _wait
def exists(self, **kwargs):
'''Check if the specified ui object by kwargs exists.'''
return self(**kwargs).exists
Device = AutomatorDevice
class AutomatorDeviceUiObject(object):
'''Represent a UiObject, on which user can perform actions, such as click, set text
'''
__alias = {'description': "contentDescription"}
def __init__(self, device, selector):
self.device = device
self.jsonrpc = device.server.jsonrpc
self.selector = selector
@property
def exists(self):
'''check if the object exists in current window.'''
return self.jsonrpc.exist(self.selector)
def __getattr__(self, attr):
'''alias of fields in info property.'''
info = self.info
if attr in info:
return info[attr]
elif attr in self.__alias:
return info[self.__alias[attr]]
else:
raise AttributeError("%s attribute not found!" % attr)
@property
def info(self):
'''ui object info.'''
return self.jsonrpc.objInfo(self.selector)
def set_text(self, text):
'''set the text field.'''
if text in [None, ""]:
return self.jsonrpc.clearTextField(self.selector) # TODO no return
else:
return self.jsonrpc.setText(self.selector, text)
def clear_text(self):
'''clear text. alias for set_text(None).'''
self.set_text(None)
@property
def click(self):
'''
click on the ui object.
Usage:
d(text="Clock").click() # click on the center of the ui object
d(text="OK").click.wait(timeout=3000) # click and wait for the new window update
d(text="John").click.topleft() # click on the topleft of the ui object
d(text="John").click.bottomright() # click on the bottomright of the ui object
'''
@param_to_property(action=["tl", "topleft", "br", "bottomright", "wait"])
def _click(action=None, timeout=3000):
if action is None:
return self.jsonrpc.click(self.selector)
elif action in ["tl", "topleft", "br", "bottomright"]:
return self.jsonrpc.click(self.selector, action)
else:
return self.jsonrpc.clickAndWaitForNewWindow(self.selector, timeout)
return _click
@property
def long_click(self):
'''
Perform a long click action on the object.
Usage:
d(text="Image").long_click() # long click on the center of the ui object
d(text="Image").long_click.topleft() # long click on the topleft of the ui object
d(text="Image").long_click.bottomright() # long click on the topleft of the ui object
'''
@param_to_property(corner=["tl", "topleft", "br", "bottomright"])
def _long_click(corner=None):
info = self.info
if info["longClickable"]:
if corner:
return self.jsonrpc.longClick(self.selector, corner)
else:
return self.jsonrpc.longClick(self.selector)
else:
bounds = info.get("visibleBounds") or info.get("bounds")
if corner in ["tl", "topleft"]:
x = (5*bounds["left"] + bounds["right"])/6
y = (5*bounds["top"] + bounds["bottom"])/6
elif corner in ["br", "bottomright"]:
x = (bounds["left"] + 5*bounds["right"])/6
y = (bounds["top"] + 5*bounds["bottom"])/6
else:
x = (bounds["left"] + bounds["right"])/2
y = (bounds["top"] + bounds["bottom"])/2
return self.device.long_click(x, y)
return _long_click
@property
def drag(self):
'''
Drag the ui object to other point or ui object.
Usage:
d(text="Clock").drag.to(x=100, y=100) # drag to point (x,y)
d(text="Clock").drag.to(text="Remove") # drag to another object
'''
def to(obj, *args, **kwargs):
if len(args) >= 2 or "x" in kwargs or "y" in kwargs:
drag_to = lambda x, y, steps=100: self.jsonrpc.dragTo(self.selector, x, y, steps)
else:
drag_to = lambda steps=100, **kwargs: self.jsonrpc.dragTo(self.selector, Selector(**kwargs), steps)
return drag_to(*args, **kwargs)
return type("Drag", (object,), {"to": to})()
def gesture(self, start1, start2, *args, **kwargs):
'''
perform two point gesture.
Usage:
d().gesture(startPoint1, startPoint2).to(endPoint1, endPoint2, steps)
d().gesture(startPoint1, startPoint2, endPoint1, endPoint2, steps)
'''
def to(obj_self, end1, end2, steps=100):
ctp = lambda pt: point(*pt) if type(pt) == tuple else pt # convert tuple to point
s1, s2, e1, e2 = ctp(start1), ctp(start2), ctp(end1), ctp(end2)
return self.jsonrpc.gesture(self.selector, s1, s2, e1, e2, steps)
obj = type("Gesture", (object,), {"to": to})()
return obj if len(args) == 0 else to(None, *args, **kwargs)
@property
def pinch(self):
'''
Perform two point gesture from edge to center(in) or center to edge(out).
Usages:
d().pinch.In(percent=100, steps=10)
d().pinch.Out(percent=100, steps=100)
'''
@param_to_property(in_or_out=["In", "Out"])
def _pinch(in_or_out="Out", percent=100, steps=50):
if in_or_out in ["Out", "out"]:
return self.jsonrpc.pinchOut(self.selector, percent, steps)
elif in_or_out in ["In", "in"]:
return self.jsonrpc.pinchIn(self.selector, percent, steps)
return _pinch
@property
def swipe(self):
'''
Perform swipe action.
Usages:
d().swipe.right()
d().swipe.left(steps=10)
d().swipe.up(steps=10)
d().swipe.down()
d().swipe("right", steps=20)
'''
@param_to_property(direction=["up", "down", "right", "left"])
def _swipe(direction="left", steps=10):
return self.jsonrpc.swipe(self.selector, direction, steps)
return _swipe
@property
def wait(self):
'''
Wait until the ui object gone or exist.
Usage:
d(text="Clock").wait.gone() # wait until it's gone.
d(text="Settings").wait.exists() # wait until it appears.
'''
@param_to_property(action=["exists", "gone"])
def _wait(action, timeout=3000):
if timeout/1000 + 5 > int(os.environ.get("JSONRPC_TIMEOUT", 90)):
http_timeout = timeout/1000 + 5
else:
http_timeout = int(os.environ.get("JSONRPC_TIMEOUT", 90))
method = self.device.server.jsonrpc_wrap(timeout=http_timeout).waitUntilGone if action == "gone" else self.device.server.jsonrpc_wrap(timeout=http_timeout).waitForExists
return method(self.selector, timeout)
return _wait
class AutomatorDeviceNamedUiObject(AutomatorDeviceUiObject):
def __init__(self, device, name):
super(AutomatorDeviceNamedUiObject, self).__init__(device, name)
def child(self, **kwargs):
return AutomatorDeviceNamedUiObject(
self.device,
self.jsonrpc.getChild(self.selector, Selector(**kwargs))
)
def sibling(self, **kwargs):
return AutomatorDeviceNamedUiObject(
self.device,
self.jsonrpc.getFromParent(self.selector, Selector(**kwargs))
)
class AutomatorDeviceObject(AutomatorDeviceUiObject):
'''Represent a generic UiObject/UiScrollable/UiCollection,
on which user can perform actions, such as click, set text
'''
def __init__(self, device, selector):
super(AutomatorDeviceObject, self).__init__(device, selector)
def child(self, **kwargs):
'''set childSelector.'''
return AutomatorDeviceObject(
self.device,
self.selector.clone().child(**kwargs)
)
def sibling(self, **kwargs):
'''set fromParent selector.'''
return AutomatorDeviceObject(
self.device,
self.selector.clone().sibling(**kwargs)
)
child_selector, from_parent = child, sibling
def child_by_text(self, txt, **kwargs):
if "allow_scroll_search" in kwargs:
allow_scroll_search = kwargs.pop("allow_scroll_search")
name = self.jsonrpc.childByText(
self.selector,
Selector(**kwargs),
txt,
allow_scroll_search
)
else:
name = self.jsonrpc.childByText(
self.selector,
Selector(**kwargs),
txt
)
return AutomatorDeviceNamedUiObject(self.device, name)
def child_by_description(self, txt, **kwargs):
if "allow_scroll_search" in kwargs:
allow_scroll_search = kwargs.pop("allow_scroll_search")
name = self.jsonrpc.childByDescription(
self.selector,
Selector(**kwargs),
txt,
allow_scroll_search
)
else:
name = self.jsonrpc.childByDescription(
self.selector,
Selector(**kwargs),
txt
)
return AutomatorDeviceNamedUiObject(self.device, name)
def child_by_instance(self, inst, **kwargs):
return AutomatorDeviceNamedUiObject(
self.device,
self.jsonrpc.childByInstance(self.selector, Selector(**kwargs), inst)
)
@property
def count(self):
return self.jsonrpc.count(self.selector)
def __len__(self):
return self.count
def __getitem__(self, index):
count = self.count
if index >= count:
raise IndexError()
elif count == 1:
return self
else:
selector = self.selector.clone()
selector["instance"] = index
return AutomatorDeviceObject(self.device, selector)
def __iter__(self):
obj, length = self, self.count
class Iter(object):
def __init__(self):
self.index = -1
def next(self):
self.index += 1
if self.index < length:
return obj[self.index]
else:
raise StopIteration()
__next__ = next
return Iter()
def right(self, **kwargs):
def onrightof(rect1, rect2):
left, top, right, bottom = intersect(rect1, rect2)
return rect2["left"] - rect1["right"] if top < bottom else -1
return self.__view_beside(onrightof, **kwargs)
def left(self, **kwargs):
def onleftof(rect1, rect2):
left, top, right, bottom = intersect(rect1, rect2)
return rect1["left"] - rect2["right"] if top < bottom else -1
return self.__view_beside(onleftof, **kwargs)
def up(self, **kwargs):
def above(rect1, rect2):
left, top, right, bottom = intersect(rect1, rect2)
return rect1["top"] - rect2["bottom"] if left < right else -1
return self.__view_beside(above, **kwargs)
def down(self, **kwargs):
def under(rect1, rect2):
left, top, right, bottom = intersect(rect1, rect2)
return rect2["top"] - rect1["bottom"] if left < right else -1
return self.__view_beside(under, **kwargs)
def __view_beside(self, onsideof, **kwargs):
bounds = self.info["bounds"]
min_dist, found = -1, None
for ui in AutomatorDeviceObject(self.device, Selector(**kwargs)):
dist = onsideof(bounds, ui.info["bounds"])
if dist >= 0 and (min_dist < 0 or dist < min_dist):
min_dist, found = dist, ui
return found
@property
def fling(self):
'''
Perform fling action.
Usage:
d().fling() # default vertically, forward
d().fling.horiz.forward()
d().fling.vert.backward()
d().fling.toBeginning(max_swipes=100) # vertically
d().fling.horiz.toEnd()
'''
@param_to_property(
dimention=["vert", "vertically", "vertical", "horiz", "horizental", "horizentally"],
action=["forward", "backward", "toBeginning", "toEnd"]
)
def _fling(dimention="vert", action="forward", max_swipes=1000):
vertical = dimention in ["vert", "vertically", "vertical"]
if action == "forward":
return self.jsonrpc.flingForward(self.selector, vertical)
elif action == "backward":
return self.jsonrpc.flingBackward(self.selector, vertical)
elif action == "toBeginning":
return self.jsonrpc.flingToBeginning(self.selector, vertical, max_swipes)
elif action == "toEnd":
return self.jsonrpc.flingToEnd(self.selector, vertical, max_swipes)
return _fling
@property
def scroll(self):
'''
Perfrom scroll action.
Usage:
d().scroll(steps=50) # default vertically and forward
d().scroll.horiz.forward(steps=100)
d().scroll.vert.backward(steps=100)
d().scroll.horiz.toBeginning(steps=100, max_swipes=100)
d().scroll.vert.toEnd(steps=100)
d().scroll.horiz.to(text="Clock")
'''
def __scroll(vertical, forward, steps=100):
method = self.jsonrpc.scrollForward if forward else self.jsonrpc.scrollBackward
return method(self.selector, vertical, steps)
def __scroll_to_beginning(vertical, steps=100, max_swipes=1000):
return self.jsonrpc.scrollToBeginning(self.selector, vertical, max_swipes, steps)
def __scroll_to_end(vertical, steps=100, max_swipes=1000):
return self.jsonrpc.scrollToEnd(self.selector, vertical, max_swipes, steps)
def __scroll_to(vertical, **kwargs):
return self.jsonrpc.scrollTo(self.selector, Selector(**kwargs), vertical)
@param_to_property(
dimention=["vert", "vertically", "vertical", "horiz", "horizental", "horizentally"],
action=["forward", "backward", "toBeginning", "toEnd", "to"])
def _scroll(dimention="vert", action="forward", **kwargs):
vertical = dimention in ["vert", "vertically", "vertical"]
if action in ["forward", "backward"]:
return __scroll(vertical, action == "forward", **kwargs)
elif action == "toBeginning":
return __scroll_to_beginning(vertical, **kwargs)
elif action == "toEnd":
return __scroll_to_end(vertical, **kwargs)
elif action == "to":
return __scroll_to(vertical, **kwargs)
return _scroll
device = AutomatorDevice()
|
|
#!/usr/bin/python
#
# HD44780 LCD Driver for RaspberryPI
#
# Author: Daniele Costarella <[email protected]>
#
# Date: 07/03/2014
#
__author__ = "Daniele Costarella"
__credits__ = ["Daniele Costarella"]
__license__ = "MIT"
__version__ = "0.1.0"
import RPi.GPIO as GPIO
from time import sleep
"""
RaspberryPi Configuration
"""
LCD_RS = 26 #Control operation type: RS = 1 -> data; RS = 0 -> command
#LCD_RW = #Control operation side: RW = 1 -> read; RW = 0 -> write
LCD_E = 24 #LCD enable pin, send command to lcd on the falling edge
LCD_D = [12, 16, 18, 22, "P8_16", "P8_17", "P8_18", "P8_26"] # LCD_D -> [DB7, DB6 ... , DB0]
class LiquidCrystal_4bits:
def __init__(self):
self.GPIO = GPIO
# only for raspberrypi
self.GPIO.setmode(GPIO.BOARD)
""" GPIO pins setup """
self.GPIO.setup(LCD_RS, GPIO.OUT)
self.GPIO.setup(LCD_E, GPIO.OUT)
#self.GPIO.setup(LCD_RW, GPIO.OUT)
for i in range(0, 4):
GPIO.setup(LCD_D[i], GPIO.OUT)
print "[DEBUG] 4-operation setup: OK"
""" LCD Initialization """
self.GPIO.output(LCD_E,False) # put enable pin low
sleep(0.01)
self.writeByte(0x33,'cmd') # function set
sleep(0.01)
self.writeByte(0x32,'cmd') #
sleep(0.01)
self.writeByte(0x28,'cmd') # 4 bit operation, 2 lines, 5x8 dots
sleep(0.01)
self.writeByte(0x0C,'cmd') # display on/off control: display on, cursor off
sleep(0.01)
self.writeByte(0x06,'cmd') # entry mode set: increase address by one, no shift
sleep(0.01)
self.writeByte(0x01,'cmd')
sleep(0.01)
def enablePulse(self):
""" Put Enable pin to HIGH and then put back to LOW """
self.GPIO.output(LCD_E, True)
# wait 1 ms
sleep(0.01)
self.GPIO.output(LCD_E, False)
def clear(self):
self.writeByte(0x01, 'cmd') # clear display
sleep(0.1)
self.writeByte(0x02, 'cmd') # go home
sleep(0.1)
def home(self):
self.writeByte(0x02, 'cmd')
def goto(self, line, offset):
""" definition of a dictionary with the inital index of every line on the lcd """
position = {1:0x00, 2:0x40, 3:0x14, 4:0x54}
""" send the command to the lcd with the desidered position """
self.writeByte( (0x80 | (position[line] + (offset-1))), 'cmd')
def writeln(self, line, data):
# check length of the string to be write
if (len(data) > 20):
print "[ERROR] Wrong data string length"
else:
# fill all the 20 characters of a display line (not used will be fill with space
data = data.ljust(20)
#Go to selected line
self.goto(line,1)
#Write the string
for i in range(0,20):
self.writeByte(data[i], 'data')
def write(self, text):
""" Send a string to LCD. Line feed character switch to the next line """
for char in text:
if char == "\n":
self.writeByte(0xC0, 'cmd')
else:
self.writeByte(ord(char), 'data')
def writeByte(self, byte, mode):
""" Write most significant nibble first """
if type(byte)==int:
byte = '{:08b}'.format(byte) # format hex value in binary format
else:
byte = '{:08b}'.format(ord(byte)) # byte is a char
#self.GPIO.output(LCD_RW, False) # RW=0 -> Write
""" put data on output port """
for i in range(0, 4):
if byte[i] == '1':
self.GPIO.output(LCD_D[i], True)
else:
self.GPIO.output(LCD_D[i], False)
""" set RS mode """
if mode == 'cmd':
self.GPIO.output(LCD_RS, False)
elif mode == 'data':
self.GPIO.output(LCD_RS, True)
else:
print "[DEBUG] Error in mode selection"
self.enablePulse()
""" put data on output port """
for i in range(4, 8):
if byte[i] == '1':
self.GPIO.output(LCD_D[i-4], True)
else:
self.GPIO.output(LCD_D[i-4], False)
""" set RS mode """
if mode == 'cmd':
GPIO.output(LCD_RS, False)
elif mode == 'data':
GPIO.output(LCD_RS, True)
else:
print "[DEBUG] Error in mode selection"
self.enablePulse()
class LiquidCrystal_8bits:
def __init__(self):
self.GPIO = GPIO
""" GPIO pins setup """
self.GPIO.setup(LCD_RS, GPIO.OUT)
self.GPIO.setup(LCD_E, GPIO.OUT)
#self.GPIO.setup(LCD_RW, GPIO.OUT)
for i in range(0, 8):
GPIO.setup(LCD_D[i], GPIO.OUT)
print "[DEBUG] 8-operation setup: OK"
""" LCD Initialization """
self.GPIO.output(LCD_E,False) # put enable pin low
sleep(0.01)
self.writeByte(0x30,'cmd') # function set
sleep(0.01)
self.writeByte(0x30,'cmd')
sleep(0.01)
self.writeByte(0x30,'cmd')
self.writeByte(0x3C,'cmd') # specify number of lines and character font
self.writeByte(0x08,'cmd') # display off
self.writeByte(0x01,'cmd') # display Clear
self.writeByte(0x05,'cmd') # entry mode set
sleep(0.1)
self.writeByte(0x38, 'cmd') # 8 bit op, 2 lines, 5x8 dots character font
sleep(0.01)
self.writeByte(0x0C, 'cmd') # display on/off control: display on, cursor off
sleep(0.01)
self.writeByte(0x06, 'cmd') # entry mode set: increase address by one, no shift
sleep(0.01)
print "[DEBUG] Init done"
def enablePulse(self):
""" Put Enable pin to HIGH and then put back to LOW """
self.GPIO.output(LCD_E, True)
# wait 1 ms
sleep(0.001)
self.GPIO.output(LCD_E, False)
def clear(self):
self.writeByte(0x01, 'cmd') # clear display
self.writeByte(0x02, 'cmd') # go home
def home(self):
self.writeByte(0x02, 'cmd')
def goto(self, line, offset):
""" definition of a dictionary with the inital index of every line on the lcd """
position = {1:0x00, 2:0x40, 3:0x14, 4:0x54}
""" send the command to the lcd with the desidered position """
self.writeByte( (0x80 | (position[line] + (offset-1))), 'cmd')
def writeln(self, line, data):
# check length of the string to be write
if (len(data) > 20):
print "[ERROR] Wrong data string length"
else:
# fill all the 20 characters of a display line (not used will be fill with space
data = data.ljust(20)
#Go to selected line
self.goto(line,1)
#Write the string
for i in range(0,20):
self.writeByte(data[i], 'data')
def write(self, text):
""" Send a string to LCD. Line feed character switch to the next line """
for char in text:
if char == "\n":
self.writeByte(0xC0, 'cmd')
else:
self.writeByte(ord(char), 'data')
def writeByte(self, byte, mode):
""" Write most significant nibble first """
if type(byte)==int:
byte = '{:08b}'.format(byte) # format hex value in binary format
else:
byte = '{:08b}'.format(ord(byte)) # byte is a char
#self.GPIO.output(LCD_RW, False) # RW=0 -> Write
""" put data on output port """
for i in range(0, 8):
if byte[i] == '1':
self.GPIO.output(LCD_D[i], True)
else:
self.GPIO.output(LCD_D[i], False)
""" set RS mode """
if mode == 'cmd':
self.GPIO.output(LCD_RS, False)
elif mode == 'data':
self.GPIO.output(LCD_RS, True)
else:
print "[DEBUG] Error in mode selection"
self.enablePulse()
if __name__ == '__main__':
lcd = LiquidCrystal_4bits()
lcd.clear()
lcd.writeln(1,"W")
lcd.write("Test1\nTest2")
#lcd.writeln(3, 'Hello!')
#lcd.writeln(4, '01234567890123456789')
|
|
import functools
import logging
from copy import deepcopy
from jsonschema import ValidationError
from .decorators import validation
from .decorators.decorator import (BeginOfRequestLifecycleDecorator,
EndOfRequestLifecycleDecorator)
from .decorators.metrics import UWSGIMetricsCollector
from .decorators.parameter import parameter_to_arg
from .decorators.produces import BaseSerializer, Produces
from .decorators.response import ResponseValidator
from .decorators.security import (get_tokeninfo_url, security_passthrough,
verify_oauth)
from .decorators.validation import (ParameterValidator, RequestBodyValidator,
TypeValidationError)
from .exceptions import InvalidSpecification
from .utils import all_json, is_nullable
logger = logging.getLogger('connexion.operation')
DEFAULT_MIMETYPE = 'application/json'
VALIDATOR_MAP = {
'parameter': ParameterValidator,
'body': RequestBodyValidator,
'response': ResponseValidator,
}
class SecureOperation(object):
def __init__(self, api, security, security_definitions):
"""
:param security: list of security rules the application uses by default
:type security: list
:param security_definitions: `Security Definitions Object
<https://github.com/swagger-api/swagger-spec/blob/master/versions/2.0.md#security-definitions-object>`_
:type security_definitions: dict
"""
self.api = api
self.security = security
self.security_definitions = security_definitions
@property
def security_decorator(self):
"""
Gets the security decorator for operation
From Swagger Specification:
**Security Definitions Object**
A declaration of the security schemes available to be used in the specification.
This does not enforce the security schemes on the operations and only serves to provide the relevant details
for each scheme.
**Security Requirement Object**
Lists the required security schemes to execute this operation. The object can have multiple security schemes
declared in it which are all required (that is, there is a logical AND between the schemes).
The name used for each property **MUST** correspond to a security scheme declared in the Security Definitions.
:rtype: types.FunctionType
"""
logger.debug('... Security: %s', self.security, extra=vars(self))
if self.security:
if len(self.security) > 1:
logger.debug("... More than one security requirement defined. **IGNORING SECURITY REQUIREMENTS**",
extra=vars(self))
return security_passthrough
security = self.security[0] # type: dict
# the following line gets the first (and because of the previous condition only) scheme and scopes
# from the operation's security requirements
scheme_name, scopes = next(iter(security.items())) # type: str, list
security_definition = self.security_definitions[scheme_name]
if security_definition['type'] == 'oauth2':
token_info_url = get_tokeninfo_url(security_definition)
if token_info_url:
scopes = set(scopes) # convert scopes to set because this is needed for verify_oauth
return functools.partial(verify_oauth, token_info_url, scopes)
else:
logger.warning("... OAuth2 token info URL missing. **IGNORING SECURITY REQUIREMENTS**",
extra=vars(self))
elif security_definition['type'] in ('apiKey', 'basic'):
logger.debug(
"... Security type '%s' not natively supported by Connexion; you should handle it yourself",
security_definition['type'], extra=vars(self))
# if we don't know how to handle the security or it's not defined we will usa a passthrough decorator
return security_passthrough
def get_mimetype(self):
return DEFAULT_MIMETYPE
@property
def _request_begin_lifecycle_decorator(self):
"""
Transforms the result of the operation handler in a internal
representation (connexion.lifecycle.ConnexionRequest) to be
used by internal Connexion decorators.
:rtype: types.FunctionType
"""
return BeginOfRequestLifecycleDecorator(self.api, self.get_mimetype())
@property
def _request_end_lifecycle_decorator(self):
"""
Guarantees that instead of the internal representation of the
operation handler response
(connexion.lifecycle.ConnexionRequest) a framework specific
object is returned.
:rtype: types.FunctionType
"""
return EndOfRequestLifecycleDecorator(self.api, self.get_mimetype())
class Operation(SecureOperation):
"""
A single API operation on a path.
"""
def __init__(self, api, method, path, operation, resolver, app_produces, app_consumes,
path_parameters=None, app_security=None, security_definitions=None,
definitions=None, parameter_definitions=None, response_definitions=None,
validate_responses=False, strict_validation=False, randomize_endpoint=None,
validator_map=None, pythonic_params=False):
"""
This class uses the OperationID identify the module and function that will handle the operation
From Swagger Specification:
**OperationID**
A friendly name for the operation. The id MUST be unique among all operations described in the API.
Tools and libraries MAY use the operation id to uniquely identify an operation.
:param method: HTTP method
:type method: str
:param path:
:type path: str
:param operation: swagger operation object
:type operation: dict
:param resolver: Callable that maps operationID to a function
:param app_produces: list of content types the application can return by default
:type app_produces: list
:param app_consumes: list of content types the application consumes by default
:type app_consumes: list
:param validator_map: map of validators
:type validator_map: dict
:param path_parameters: Parameters defined in the path level
:type path_parameters: list
:param app_security: list of security rules the application uses by default
:type app_security: list
:param security_definitions: `Security Definitions Object
<https://github.com/swagger-api/swagger-spec/blob/master/versions/2.0.md#security-definitions-object>`_
:type security_definitions: dict
:param definitions: `Definitions Object
<https://github.com/swagger-api/swagger-spec/blob/master/versions/2.0.md#definitionsObject>`_
:type definitions: dict
:param parameter_definitions: Global parameter definitions
:type parameter_definitions: dict
:param response_definitions: Global response definitions
:type response_definitions: dict
:param validator_map: Custom validators for the types "parameter", "body" and "response".
:type validator_map: dict
:param validate_responses: True enables validation. Validation errors generate HTTP 500 responses.
:type validate_responses: bool
:param strict_validation: True enables validation on invalid request parameters
:type strict_validation: bool
:param pythonic_params: When True CamelCase parameters are converted to snake_case and an underscore is appended
to any shadowed built-ins
:type pythonic_params: bool
"""
self.api = api
self.method = method
self.path = path
self.validator_map = dict(VALIDATOR_MAP)
self.validator_map.update(validator_map or {})
self.security_definitions = security_definitions or {}
self.definitions = definitions or {}
self.parameter_definitions = parameter_definitions or {}
self.response_definitions = response_definitions or {}
self.definitions_map = {
'definitions': self.definitions,
'parameters': self.parameter_definitions,
'responses': self.response_definitions
}
self.validate_responses = validate_responses
self.strict_validation = strict_validation
self.operation = operation
self.randomize_endpoint = randomize_endpoint
self.pythonic_params = pythonic_params
# todo support definition references
# todo support references to application level parameters
self.parameters = list(self.resolve_parameters(operation.get('parameters', [])))
if path_parameters:
self.parameters += list(self.resolve_parameters(path_parameters))
self.security = operation.get('security', app_security)
self.produces = operation.get('produces', app_produces)
self.consumes = operation.get('consumes', app_consumes)
resolution = resolver.resolve(self)
self.operation_id = resolution.operation_id
self.__undecorated_function = resolution.function
self.validate_defaults()
def validate_defaults(self):
for param in self.parameters:
try:
if param['in'] == 'query' and 'default' in param:
validation.validate_type(param, param['default'], 'query', param['name'])
except (TypeValidationError, ValidationError):
raise InvalidSpecification('The parameter \'{param_name}\' has a default value which is not of'
' type \'{param_type}\''.format(param_name=param['name'],
param_type=param['type']))
def resolve_reference(self, schema):
schema = deepcopy(schema) # avoid changing the original schema
self.check_references(schema)
# find the object we need to resolve/update if this is not a proper SchemaObject
# e.g a response or parameter object
for obj in schema, schema.get('items'):
reference = obj and obj.get('$ref') # type: str
if reference:
break
if reference:
definition = deepcopy(self._retrieve_reference(reference))
# Update schema
obj.update(definition)
del obj['$ref']
# if there is a schema object on this param or response, then we just
# need to include the defs and it can be validated by jsonschema
if 'schema' in schema:
schema['schema']['definitions'] = self.definitions
return schema
return schema
def check_references(self, schema):
"""
Searches the keys and values of a schema object for json references.
If it finds one, it attempts to locate it and will thrown an exception
if the reference can't be found in the definitions dictionary.
:param schema: The schema object to check
:type schema: dict
:raises InvalidSpecification: raised when a reference isn't found
"""
stack = [schema]
visited = set()
while stack:
schema = stack.pop()
for k, v in schema.items():
if k == "$ref":
if v in visited:
continue
visited.add(v)
stack.append(self._retrieve_reference(v))
elif isinstance(v, (list, tuple)):
continue
elif hasattr(v, "items"):
stack.append(v)
def _retrieve_reference(self, reference):
if not reference.startswith('#/'):
raise InvalidSpecification(
"{method} {path} '$ref' needs to start with '#/'".format(**vars(self)))
path = reference.split('/')
definition_type = path[1]
try:
definitions = self.definitions_map[definition_type]
except KeyError:
ref_possible = ', '.join(self.definitions_map.keys())
raise InvalidSpecification(
"{method} {path} $ref \"{reference}\" needs to point to one of: "
"{ref_possible}".format(
method=self.method,
path=self.path,
reference=reference,
ref_possible=ref_possible
))
definition_name = path[-1]
try:
# Get sub definition
definition = deepcopy(definitions[definition_name])
except KeyError:
raise InvalidSpecification(
"{method} {path} Definition '{definition_name}' not found".format(
definition_name=definition_name, method=self.method, path=self.path))
return definition
def get_mimetype(self):
"""
If the endpoint has no 'produces' then the default is
'application/json'.
:rtype str
"""
if all_json(self.produces):
try:
return self.produces[0]
except IndexError:
return DEFAULT_MIMETYPE
elif len(self.produces) == 1:
return self.produces[0]
else:
return DEFAULT_MIMETYPE
def resolve_parameters(self, parameters):
for param in parameters:
param = self.resolve_reference(param)
yield param
def get_path_parameter_types(self):
return {p['name']: 'path' if p.get('type') == 'string' and p.get('format') == 'path' else p.get('type')
for p in self.parameters if p['in'] == 'path'}
@property
def body_schema(self):
"""
The body schema definition for this operation.
"""
return self.body_definition.get('schema')
@property
def body_definition(self):
"""
The body complete definition for this operation.
**There can be one "body" parameter at most.**
:rtype: dict
"""
body_parameters = [parameter for parameter in self.parameters if parameter['in'] == 'body']
if len(body_parameters) > 1:
raise InvalidSpecification(
"{method} {path} There can be one 'body' parameter at most".format(**vars(self)))
return body_parameters[0] if body_parameters else {}
@property
def function(self):
"""
Operation function with decorators
:rtype: types.FunctionType
"""
function = parameter_to_arg(
self.parameters, self.consumes, self.__undecorated_function, self.pythonic_params)
function = self._request_begin_lifecycle_decorator(function)
if self.validate_responses:
logger.debug('... Response validation enabled.')
response_decorator = self.__response_validation_decorator
logger.debug('... Adding response decorator (%r)', response_decorator)
function = response_decorator(function)
produces_decorator = self.__content_type_decorator
logger.debug('... Adding produces decorator (%r)', produces_decorator)
function = produces_decorator(function)
for validation_decorator in self.__validation_decorators:
function = validation_decorator(function)
# NOTE: the security decorator should be applied last to check auth before anything else :-)
security_decorator = self.security_decorator
logger.debug('... Adding security decorator (%r)', security_decorator)
function = security_decorator(function)
if UWSGIMetricsCollector.is_available(): # pragma: no cover
decorator = UWSGIMetricsCollector(self.path, self.method)
function = decorator(function)
function = self._request_end_lifecycle_decorator(function)
return function
@property
def __content_type_decorator(self):
"""
Get produces decorator.
If the operation mimetype format is json then the function return value is jsonified
From Swagger Specification:
**Produces**
A list of MIME types the operation can produce. This overrides the produces definition at the Swagger Object.
An empty value MAY be used to clear the global definition.
:rtype: types.FunctionType
"""
logger.debug('... Produces: %s', self.produces, extra=vars(self))
mimetype = self.get_mimetype()
if all_json(self.produces): # endpoint will return json
logger.debug('... Produces json', extra=vars(self))
# TODO: Refactor this.
return lambda f: f
elif len(self.produces) == 1:
logger.debug('... Produces %s', mimetype, extra=vars(self))
decorator = Produces(mimetype)
return decorator
else:
return BaseSerializer()
@property
def __validation_decorators(self):
"""
:rtype: types.FunctionType
"""
ParameterValidator = self.validator_map['parameter']
RequestBodyValidator = self.validator_map['body']
if self.parameters:
yield ParameterValidator(self.parameters,
self.api,
strict_validation=self.strict_validation)
if self.body_schema:
yield RequestBodyValidator(self.body_schema, self.consumes, self.api,
is_nullable(self.body_definition))
@property
def __response_validation_decorator(self):
"""
Get a decorator for validating the generated Response.
:rtype: types.FunctionType
"""
ResponseValidator = self.validator_map['response']
return ResponseValidator(self, self.get_mimetype())
def json_loads(self, data):
"""
A wrapper for calling the API specific JSON loader.
:param data: The JSON data in textual form.
:type data: bytes
"""
return self.api.json_loads(data)
|
|
"""
.. _intro_basic_tracking:
==============================
Introduction to Basic Tracking
==============================
Local fiber tracking is an approach used to model white matter fibers by
creating streamlines from local directional information. The idea is as
follows: if the local directionality of a tract/pathway segment is known, one
can integrate along those directions to build a complete representation of that
structure. Local fiber tracking is widely used in the field of diffusion MRI
because it is simple and robust.
In order to perform local fiber tracking, three things are needed: 1) A method
for getting directions from a diffusion data set. 2) A method for identifying
when the tracking must stop. 3) A set of seeds from which to
begin tracking. This example shows how to combine the 3 parts described above
to create a tractography reconstruction from a diffusion data set.
"""
"""
To begin, let's load an example HARDI data set from Stanford. If you have
not already downloaded this data set, the first time you run this example you
will need to be connected to the internet and this dataset will be downloaded
to your computer.
"""
# Enables/disables interactive visualization
interactive = False
from dipy.core.gradients import gradient_table
from dipy.data import get_fnames
from dipy.io.gradients import read_bvals_bvecs
from dipy.io.image import load_nifti, load_nifti_data
hardi_fname, hardi_bval_fname, hardi_bvec_fname = get_fnames('stanford_hardi')
label_fname = get_fnames('stanford_labels')
data, affine, hardi_img = load_nifti(hardi_fname, return_img=True)
labels = load_nifti_data(label_fname)
bvals, bvecs = read_bvals_bvecs(hardi_bval_fname, hardi_bvec_fname)
gtab = gradient_table(bvals, bvecs)
"""
This dataset provides a label map in which all white matter tissues are
labeled either 1 or 2. Lets create a white matter mask to restrict tracking to
the white matter.
"""
white_matter = (labels == 1) | (labels == 2)
"""
1. The first thing we need to begin fiber tracking is a way of getting
directions from this diffusion data set. In order to do that, we can fit the
data to a Constant Solid Angle ODF Model. This model will estimate the
Orientation Distribution Function (ODF) at each voxel. The ODF is the
distribution of water diffusion as a function of direction. The peaks of an ODF
are good estimates for the orientation of tract segments at a point in the
image. Here, we use ``peaks_from_model`` to fit the data and calculated the
fiber directions in all voxels of the white matter.
"""
from dipy.reconst.csdeconv import auto_response
from dipy.reconst.shm import CsaOdfModel
from dipy.data import default_sphere
from dipy.direction import peaks_from_model
response, ratio = auto_response(gtab, data, roi_radius=10, fa_thr=0.7)
csa_model = CsaOdfModel(gtab, sh_order=6)
csa_peaks = peaks_from_model(csa_model, data, default_sphere,
relative_peak_threshold=.8,
min_separation_angle=45,
mask=white_matter)
"""
For quality assurance we can also visualize a slice from the direction field
which we will use as the basis to perform the tracking. The visualization will
be done using the ``fury`` python package
"""
from dipy.viz import window, actor, has_fury
if has_fury:
ren = window.Renderer()
ren.add(actor.peak_slicer(csa_peaks.peak_dirs,
csa_peaks.peak_values,
colors=None))
window.record(ren, out_path='csa_direction_field.png', size=(900, 900))
if interactive:
window.show(ren, size=(800, 800))
"""
.. figure:: csa_direction_field.png
:align: center
**Direction Field (peaks)**
"""
"""
2. Next we need some way of restricting the fiber tracking to areas with good
directionality information. We've already created the white matter mask,
but we can go a step further and restrict fiber tracking to those areas where
the ODF shows significant restricted diffusion by thresholding on
the generalized fractional anisotropy (GFA).
"""
from dipy.tracking.stopping_criterion import ThresholdStoppingCriterion
stopping_criterion = ThresholdStoppingCriterion(csa_peaks.gfa, .25)
"""
Again, for quality assurance we can also visualize a slice the GFA and the
resulting tracking mask.
"""
import matplotlib.pyplot as plt
sli = csa_peaks.gfa.shape[2] // 2
plt.figure('GFA')
plt.subplot(1, 2, 1).set_axis_off()
plt.imshow(csa_peaks.gfa[:, :, sli].T, cmap='gray', origin='lower')
plt.subplot(1, 2, 2).set_axis_off()
plt.imshow((csa_peaks.gfa[:, :, sli] > 0.25).T, cmap='gray', origin='lower')
plt.savefig('gfa_tracking_mask.png')
"""
.. figure:: gfa_tracking_mask.png
:align: center
An example of tracking mask derived from the generalized fractional
anisotropy (GFA).
"""
"""
3. Before we can begin tracking is to specify where to "seed" (begin) the fiber
tracking. Generally, the seeds chosen will depend on the pathways one is
interested in modeling. In this example, we'll use a $2 \times 2 \times 2$ grid
of seeds per voxel, in a sagittal slice of the corpus callosum. Tracking from
this region will give us a model of the corpus callosum tract. This slice has
label value ``2`` in the labels image.
"""
from dipy.tracking import utils
seed_mask = (labels == 2)
seeds = utils.seeds_from_mask(seed_mask, affine, density=[2, 2, 2])
"""
Finally, we can bring it all together using ``LocalTracking``, performing Using
the EuDX algorithm [Garyfallidis12]_. ``EuDX`` [Garyfallidis12]_ is a fast
algorithm that we use here to generate streamlines. This algorithm is what is
used here and the default option when providing the output of peaks directly
in LocalTracking.
"""
from dipy.tracking.local_tracking import LocalTracking
from dipy.tracking.streamline import Streamlines
# Initialization of LocalTracking. The computation happens in the next step.
streamlines_generator = LocalTracking(csa_peaks, stopping_criterion, seeds,
affine=affine, step_size=.5)
# Generate streamlines object
streamlines = Streamlines(streamlines_generator)
"""
We will then display the resulting streamlines using the ``fury``
python package.
"""
from dipy.viz import colormap
if has_fury:
# Prepare the display objects.
color = colormap.line_colors(streamlines)
streamlines_actor = actor.line(streamlines,
colormap.line_colors(streamlines))
# Create the 3D display.
r = window.Renderer()
r.add(streamlines_actor)
# Save still images for this static example. Or for interactivity use
window.record(r, out_path='tractogram_EuDX.png', size=(800, 800))
if interactive:
window.show(r)
"""
.. figure:: tractogram_EuDX.png
:align: center
**Corpus Callosum using EuDx**
We've created a deterministic set of streamlines using the EuDX algorithm. This
is so called deterministic because if you repeat the fiber tracking (keeping
all the inputs the same) you will get exactly the same set of streamlines.
We can save the streamlines as a Trackvis file so it can be loaded into other
software for visualization or further analysis.
"""
from dipy.io.stateful_tractogram import Space, StatefulTractogram
from dipy.io.streamline import save_trk
sft = StatefulTractogram(streamlines, hardi_img, Space.RASMM)
save_trk(sft, "tractogram_EuDX.trk", streamlines)
"""
References
----------
.. [Garyfallidis12] Garyfallidis E., "Towards an accurate brain tractography",
PhD thesis, University of Cambridge, 2012.
.. include:: ../links_names.inc
"""
|
|
# lesson2.py
# Stand-alone script to run the code from the lesson2-matsaleh.ipynb Jupyter Notebook.
'''
Lesson 3 Assignment Plan:
1. Start with Vgg16 model with binary output and weights from lesson2.5.py.
2. Create an overfitted model:
a. Split conv and FC layers into two separate models.
b. Precalculate FC layer inputs from conv layer output.
c. Remove dropout from the FC model.
d. Fit the FC model to the data.
e. Save the weights.
3. Add data augmentation to the training set:
a. Combine the Conv (locked) and FC models.
b. Compile and train the combined model.
c. Save the weights.
4. Add batchnorm to the combined model:
a. Create a standalone model from the Vgg16bn model's BN layers.
b. Fit the BN model to the data.
c. Save the weights.
d. Create another BN model and combine it with the conv model into a final model.
e. Set the BN layer weights from the first BN model (why not just combine *that* BN model with the conv model)?
f. Save the weights.
5. Fit the final model:
a. Incrementally, saving the weights along the way.
lesson3.0.py:
- Based on lesson2.5.py
- now with functions
'''
import os
import os.path
import click
import utils
from vgg16 import Vgg16
import numpy as np
from sklearn.preprocessing import OneHotEncoder
from sklearn.metrics import confusion_matrix
from numpy.random import random, permutation
from scipy import misc, ndimage
from scipy.ndimage.interpolation import zoom
import keras
from keras import backend as K
from keras.utils.data_utils import get_file
from keras.models import Sequential
from keras.layers import Input
from keras.layers.core import Flatten, Dense, Dropout, Lambda
from keras.layers.convolutional import Convolution2D, MaxPooling2D, ZeroPadding2D
from keras.optimizers import SGD, RMSprop, Nadam
from keras.preprocessing import image
#
# Utility Functions
#
def onehot(x):
# Returns two-column matrix with one row for each class.
return np.array(OneHotEncoder().fit_transform(x.reshape(-1, 1)).todense())
#
# Constants
#
INPUT_PATH = None
OUTPUT_PATH = None
TRAIN_PATH = None
VALID_PATH = None
TEST_PATH = None
MODEL_PATH = None
RESULTS_PATH = None
BATCH_SIZE = None
NUM_EPOCHS = None
#
# Data Setup
#
def setup_folders():
click.echo()
click.echo('Setting up folders...')
click.echo()
click.echo('Input folder: %s' % INPUT_PATH)
global TRAIN_PATH
TRAIN_PATH = os.path.join(INPUT_PATH, 'train')
click.echo('Training data: %s' % TRAIN_PATH)
global VALID_PATH
VALID_PATH = os.path.join(INPUT_PATH, 'valid')
click.echo('Validation data: %s' % VALID_PATH)
global TEST_PATH
TEST_PATH = os.path.join(INPUT_PATH, 'test')
click.echo('Test data: %s' % TEST_PATH)
click.echo()
click.echo('Output folder: %s' % OUTPUT_PATH)
global MODEL_PATH
MODEL_PATH = os.path.join(OUTPUT_PATH, 'models')
if not os.path.exists(MODEL_PATH): os.makedirs(MODEL_PATH)
click.echo('Model data: %s' % MODEL_PATH)
global RESULTS_PATH
RESULTS_PATH = os.path.join(OUTPUT_PATH, 'results')
if not os.path.exists(RESULTS_PATH): os.makedirs(RESULTS_PATH)
click.echo('Results: %s' % RESULTS_PATH)
click.echo()
def load_data():
#
# NOTE: Loading and use of data structures is pretty fucked up here.
# Some things require getting data from generators, others require NumPy arrays.
# In the end we use both, and sometimes re-load the data from disk and/or re-transform
# it more than once.
#
click.echo('Loading raw training data from %s...' % TRAIN_PATH)
global TRAIN_BATCHES
TRAIN_BATCHES = utils.get_batches(TRAIN_PATH, shuffle=False, batch_size=1)
click.echo('Loading array from generator...')
global TRAIN_ARRAY
TRAIN_ARRAY = utils.get_data(TRAIN_PATH)
click.echo('\tshape: %s' % (TRAIN_ARRAY.shape,))
click.echo()
# TRAIN_DATA = os.path.join(MODEL_PATH, 'train_data.bc')
# click.echo('Saving processed training data to %s...' % TRAIN_DATA)
# utils.save_array(TRAIN_DATA, TRAIN_ARRAY)
click.echo('Loading raw validation data from %s...' % VALID_PATH)
global VALID_BATCHES
VALID_BATCHES = utils.get_batches(VALID_PATH, shuffle=False, batch_size=1)
click.echo('Loading array from generator...')
global VALID_ARRAY
VALID_ARRAY = utils.get_data(VALID_PATH)
click.echo('\tshape: %s' % (VALID_ARRAY.shape,))
click.echo()
def get_true_labels():
click.echo('Getting the true labels for every image...')
global TRAIN_CLASSES
TRAIN_CLASSES = TRAIN_BATCHES.classes
global TRAIN_LABELS
TRAIN_LABELS = onehot(TRAIN_CLASSES)
# click.echo('\tTraining labels look like this: \n%s\n...\n%s' % (TRAIN_LABELS[:5], TRAIN_LABELS[-5:]))
# click.echo()
global VALID_CLASSES
VALID_CLASSES = VALID_BATCHES.classes
global VALID_LABELS
VALID_LABELS = onehot(VALID_CLASSES)
# click.echo('\tValidation labels look like this: \n%s\n...\n%s' % (VALID_LABELS[:5], VALID_LABELS[-5:]))
# click.echo()
def prepare_generators():
click.echo('Preparing image data generators...')
gen = image.ImageDataGenerator()
# NOTE: Why do we overwrite these generators using the arrays?
# TRAIN_BATCHES and VALID_BATCHES here are generators,
# but still not quite the same as above.
global TRAIN_BATCHES
TRAIN_BATCHES = gen.flow(TRAIN_ARRAY, TRAIN_LABELS,
batch_size=BATCH_SIZE, shuffle=True)
global VALID_BATCHES
VALID_BATCHES = gen.flow(VALID_ARRAY, VALID_LABELS,
batch_size=BATCH_SIZE, shuffle=False)
def create_model():
vgg = Vgg16()
vgg.model.pop()
click.echo('Replacing last layer of model...')
for layer in vgg.model.layers: layer.trainable=False
vgg.model.add(Dense(2, activation='softmax'))
# OPTIMIZER = Nadam()
OPTIMIZER = RMSprop(lr=0.001)
vgg.model.compile(optimizer=OPTIMIZER, loss='categorical_crossentropy', metrics=['accuracy'])
return vgg, OPTIMIZER
def fit_model(model, opt):
# First epoch higher LR
LR=0.01
K.set_value(opt.lr, LR)
click.echo('Fitting last layer of model using LR=%s' % LR)
model.fit_generator(TRAIN_BATCHES, samples_per_epoch=TRAIN_BATCHES.n, nb_epoch=NUM_EPOCHS,
validation_data=VALID_BATCHES, nb_val_samples=VALID_BATCHES.n)
# Next batch, lower again
LR=0.001
K.set_value(opt.lr, LR)
click.echo('Fitting last layer of model using LR=%s' % LR)
model.fit_generator(TRAIN_BATCHES, samples_per_epoch=TRAIN_BATCHES.n, nb_epoch=NUM_EPOCHS,
validation_data=VALID_BATCHES, nb_val_samples=VALID_BATCHES.n)
click.echo('Saving model weights...')
model.save_weights(os.path.join(MODEL_PATH, 'finetune_1_ll.h5'))
def eval_model(model):
click.echo('Evaluating model with validation data...')
TEST_LOSS = model.evaluate(VALID_ARRAY, VALID_LABELS)
click.echo('TEST_LOSS: %s' % (TEST_LOSS,))
click.echo('Confusion matrix after last layer retraining')
PREDS = model.predict_classes(VALID_ARRAY, batch_size=BATCH_SIZE)
PROBS = model.predict_proba(VALID_ARRAY, batch_size=BATCH_SIZE)[:, 0]
CM = confusion_matrix(VALID_CLASSES, PREDS)
click.echo(CM)
def predict(model):
click.echo('Predicting labels for test data set...')
TEST_BATCHES = utils.get_batches(TEST_PATH, shuffle=False, batch_size=BATCH_SIZE)
TEST_PREDS = model.predict_generator(TEST_BATCHES, TEST_BATCHES.nb_sample)
TEST_FILENAMES = TEST_BATCHES.filenames
#Save our test results arrays so we can use them again later
# click.echo('Saving raw prediction results.')
# utils.save_array(os.path.join(MODEL_PATH, 'test_preds.dat'), TEST_PREDS)
# utils.save_array(os.path.join(MODEL_PATH, 'filenames.dat'), TEST_FILENAMES)
# Grab the dog prediction column
IS_DOG = TEST_PREDS[:, 1]
# To play it safe, we use a sneaky trick to round down our edge predictions
# Swap all ones with .95 and all zeros with .05
IS_DOG = IS_DOG.clip(min=0.05, max=0.95)
# Extract imageIds from the filenames in our test/unknown directory
IDS = np.array([int(os.path.splitext(os.path.basename(f))[0])
for f in TEST_FILENAMES])
# Combine the ids and IS_DOG columns into a single 2-column array.
SUBMIT = np.stack([IDS, IS_DOG], axis=1)
click.echo('Formatting and saving data for Kaggle submission.')
np.savetxt(os.path.join(RESULTS_PATH, 'kaggle_submission.csv'), SUBMIT,
fmt='%d,%.5f', header='id,label', comments='')
click.echo('Model training and prediction complete.')
@click.command()
@click.option('--sample', is_flag=True, default=True, help='Use sample dataset for training.')
@click.option('--sample-set', default='sample', help='Sample dataset to train on.')
@click.option('--local', default=True, help='Local environment (vs. FloydHub)')
def main(sample, sample_set, local):
global BATCH_SIZE
global NUM_EPOCHS
global INPUT_PATH
global OUTPUT_PATH
if local:
BATCH_SIZE = 32
else:
BATCH_SIZE = 64
INPUT_PATH = os.path.join('.', 'input')
OUTPUT_PATH = os.path.join('.', 'output')
if sample:
INPUT_PATH = os.path.join(INPUT_PATH, sample_set)
OUTPUT_PATH = os.path.join(OUTPUT_PATH, sample_set)
NUM_EPOCHS = 4
else:
NUM_EPOCHS = 10
setup_folders()
load_data()
get_true_labels()
prepare_generators()
vgg, opt = create_model()
fit_model(vgg.model, opt)
eval_model(vgg.model)
predict(vgg.model)
if __name__ == '__main__':
main()
|
|
# -*- coding: utf-8 -*-
# Copyright 2013 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import web
from nailgun.extensions import get_all_extensions
from nailgun.api.v1.handlers.assignment import NodeAssignmentHandler
from nailgun.api.v1.handlers.assignment import NodeUnassignmentHandler
from nailgun.api.v1.handlers.capacity import CapacityLogCsvHandler
from nailgun.api.v1.handlers.capacity import CapacityLogHandler
from nailgun.api.v1.handlers.cluster import ClusterAttributesDefaultsHandler
from nailgun.api.v1.handlers.cluster import ClusterAttributesHandler
from nailgun.api.v1.handlers.cluster import ClusterChangesHandler
from nailgun.api.v1.handlers.cluster import ClusterCollectionHandler
from nailgun.api.v1.handlers.cluster import ClusterDeploymentTasksHandler
from nailgun.api.v1.handlers.cluster import ClusterGeneratedData
from nailgun.api.v1.handlers.cluster import ClusterHandler
from nailgun.api.v1.handlers.cluster import ClusterResetHandler
from nailgun.api.v1.handlers.cluster import ClusterStopDeploymentHandler
from nailgun.api.v1.handlers.cluster import ClusterUpdateHandler
from nailgun.api.v1.handlers.cluster import VmwareAttributesDefaultsHandler
from nailgun.api.v1.handlers.cluster import VmwareAttributesHandler
from nailgun.api.v1.handlers.logs import LogEntryCollectionHandler
from nailgun.api.v1.handlers.logs import LogPackageDefaultConfig
from nailgun.api.v1.handlers.logs import LogPackageHandler
from nailgun.api.v1.handlers.logs import LogSourceByNodeCollectionHandler
from nailgun.api.v1.handlers.logs import LogSourceCollectionHandler
from nailgun.api.v1.handlers.logs import SnapshotDownloadHandler
from nailgun.api.v1.handlers.node_group import NodeGroupCollectionHandler
from nailgun.api.v1.handlers.node_group import NodeGroupHandler
from nailgun.api.v1.handlers.network_configuration \
import NeutronNetworkConfigurationHandler
from nailgun.api.v1.handlers.network_configuration \
import NeutronNetworkConfigurationVerifyHandler
from nailgun.api.v1.handlers.network_configuration \
import NovaNetworkConfigurationHandler
from nailgun.api.v1.handlers.network_configuration \
import NovaNetworkConfigurationVerifyHandler
from nailgun.api.v1.handlers.node import NodeAgentHandler
from nailgun.api.v1.handlers.node import NodeCollectionHandler
from nailgun.api.v1.handlers.node import NodeHandler
from nailgun.api.v1.handlers.node import NodesAllocationStatsHandler
from nailgun.api.v1.handlers.plugin import PluginCollectionHandler
from nailgun.api.v1.handlers.plugin import PluginHandler
from nailgun.api.v1.handlers.plugin import PluginSyncHandler
from nailgun.api.v1.handlers.node import NodeCollectionNICsDefaultHandler
from nailgun.api.v1.handlers.node import NodeCollectionNICsHandler
from nailgun.api.v1.handlers.node import NodeNICsDefaultHandler
from nailgun.api.v1.handlers.node import NodeNICsHandler
from nailgun.api.v1.handlers.notifications import NotificationCollectionHandler
from nailgun.api.v1.handlers.notifications import NotificationHandler
from nailgun.api.v1.handlers.orchestrator import DefaultDeploymentInfo
from nailgun.api.v1.handlers.orchestrator import DefaultPostPluginsHooksInfo
from nailgun.api.v1.handlers.orchestrator import DefaultPrePluginsHooksInfo
from nailgun.api.v1.handlers.orchestrator import DefaultProvisioningInfo
from nailgun.api.v1.handlers.orchestrator import DeploymentInfo
from nailgun.api.v1.handlers.orchestrator import DeploySelectedNodes
from nailgun.api.v1.handlers.orchestrator import DeploySelectedNodesWithTasks
from nailgun.api.v1.handlers.orchestrator import ProvisioningInfo
from nailgun.api.v1.handlers.orchestrator import ProvisionSelectedNodes
from nailgun.api.v1.handlers.orchestrator import TaskDeployGraph
from nailgun.api.v1.handlers.registration import FuelLoginForm
from nailgun.api.v1.handlers.registration import FuelRegistrationForm
from nailgun.api.v1.handlers.registration import FuelRestorePasswordForm
from nailgun.api.v1.handlers.release import ReleaseCollectionHandler
from nailgun.api.v1.handlers.release import ReleaseDeploymentTasksHandler
from nailgun.api.v1.handlers.release import ReleaseHandler
from nailgun.api.v1.handlers.release import ReleaseNetworksHandler
from nailgun.api.v1.handlers.role import ClusterRolesCollectionHandler
from nailgun.api.v1.handlers.role import ClusterRolesHandler
from nailgun.api.v1.handlers.role import RoleCollectionHandler
from nailgun.api.v1.handlers.role import RoleHandler
from nailgun.api.v1.handlers.tasks import TaskCollectionHandler
from nailgun.api.v1.handlers.tasks import TaskHandler
from nailgun.api.v1.handlers.version import VersionHandler
from nailgun.api.v1.handlers.vms import NodeVMsHandler
from nailgun.api.v1.handlers.vms import SpawnVmsHandler
from nailgun.api.v1.handlers.removed import RemovedIn51RedHatAccountHandler
from nailgun.api.v1.handlers.removed import RemovedIn51RedHatSetupHandler
from nailgun.api.v1.handlers.master_node_settings \
import MasterNodeSettingsHandler
urls = (
r'/releases/?$',
ReleaseCollectionHandler,
r'/releases/(?P<obj_id>\d+)/?$',
ReleaseHandler,
r'/releases/(?P<obj_id>\d+)/networks/?$',
ReleaseNetworksHandler,
r'/releases/(?P<obj_id>\d+)/deployment_tasks/?$',
ReleaseDeploymentTasksHandler,
r'/releases/(?P<release_id>\d+)/roles/?$',
RoleCollectionHandler,
r'/releases/(?P<release_id>\d+)/roles/(?P<role_name>[a-zA-Z-_]+)/?$',
RoleHandler,
r'/clusters/(?P<cluster_id>\d+)/roles/?$',
ClusterRolesCollectionHandler,
r'/clusters/(?P<cluster_id>\d+)/roles/(?P<role_name>[a-zA-Z-_]+)/?$',
ClusterRolesHandler,
r'/clusters/?$',
ClusterCollectionHandler,
r'/clusters/(?P<obj_id>\d+)/?$',
ClusterHandler,
r'/clusters/(?P<cluster_id>\d+)/changes/?$',
ClusterChangesHandler,
r'/clusters/(?P<cluster_id>\d+)/attributes/?$',
ClusterAttributesHandler,
r'/clusters/(?P<cluster_id>\d+)/attributes/defaults/?$',
ClusterAttributesDefaultsHandler,
# nova network-related
r'/clusters/(?P<cluster_id>\d+)/network_configuration/nova_network/?$',
NovaNetworkConfigurationHandler,
r'/clusters/(?P<cluster_id>\d+)/network_configuration/'
'nova_network/verify/?$',
NovaNetworkConfigurationVerifyHandler,
# neutron-related
r'/clusters/(?P<cluster_id>\d+)/network_configuration/neutron/?$',
NeutronNetworkConfigurationHandler,
r'/clusters/(?P<cluster_id>\d+)/network_configuration/'
'neutron/verify/?$',
NeutronNetworkConfigurationVerifyHandler,
r'/clusters/(?P<cluster_id>\d+)/orchestrator/deployment/?$',
DeploymentInfo,
r'/clusters/(?P<cluster_id>\d+)/orchestrator/deployment/defaults/?$',
DefaultDeploymentInfo,
r'/clusters/(?P<cluster_id>\d+)/orchestrator/provisioning/?$',
ProvisioningInfo,
r'/clusters/(?P<cluster_id>\d+)/orchestrator/provisioning/defaults/?$',
DefaultProvisioningInfo,
r'/clusters/(?P<cluster_id>\d+)/generated/?$',
ClusterGeneratedData,
r'/clusters/(?P<cluster_id>\d+)/orchestrator/plugins_pre_hooks/?$',
DefaultPrePluginsHooksInfo,
r'/clusters/(?P<cluster_id>\d+)/orchestrator/plugins_post_hooks/?$',
DefaultPostPluginsHooksInfo,
r'/clusters/(?P<cluster_id>\d+)/provision/?$',
ProvisionSelectedNodes,
r'/clusters/(?P<cluster_id>\d+)/deploy/?$',
DeploySelectedNodes,
r'/clusters/(?P<cluster_id>\d+)/deploy_tasks/?$',
DeploySelectedNodesWithTasks,
r'/clusters/(?P<cluster_id>\d+)/deploy_tasks/graph.gv$',
TaskDeployGraph,
r'/clusters/(?P<cluster_id>\d+)/stop_deployment/?$',
ClusterStopDeploymentHandler,
r'/clusters/(?P<cluster_id>\d+)/reset/?$',
ClusterResetHandler,
r'/clusters/(?P<cluster_id>\d+)/update/?$',
ClusterUpdateHandler,
r'/clusters/(?P<obj_id>\d+)/deployment_tasks/?$',
ClusterDeploymentTasksHandler,
r'/clusters/(?P<cluster_id>\d+)/spawn_vms/?$',
SpawnVmsHandler,
r'/clusters/(?P<cluster_id>\d+)/assignment/?$',
NodeAssignmentHandler,
r'/clusters/(?P<cluster_id>\d+)/unassignment/?$',
NodeUnassignmentHandler,
r'/clusters/(?P<cluster_id>\d+)/vmware_attributes/?$',
VmwareAttributesHandler,
r'/clusters/(?P<cluster_id>\d+)/vmware_attributes/defaults/?$',
VmwareAttributesDefaultsHandler,
r'/nodegroups/?$',
NodeGroupCollectionHandler,
r'/nodegroups/(?P<obj_id>\d+)/?$',
NodeGroupHandler,
r'/nodes/?$',
NodeCollectionHandler,
r'/nodes/agent/?$',
NodeAgentHandler,
r'/nodes/(?P<obj_id>\d+)/?$',
NodeHandler,
r'/nodes/interfaces/?$',
NodeCollectionNICsHandler,
r'/nodes/interfaces/default_assignment/?$',
NodeCollectionNICsDefaultHandler,
r'/nodes/(?P<node_id>\d+)/interfaces/?$',
NodeNICsHandler,
r'/nodes/(?P<node_id>\d+)/interfaces/default_assignment/?$',
NodeNICsDefaultHandler,
r'/nodes/allocation/stats/?$',
NodesAllocationStatsHandler,
r'/nodes/(?P<node_id>\d+)/vms_conf/?$',
NodeVMsHandler,
r'/tasks/?$',
TaskCollectionHandler,
r'/tasks/(?P<obj_id>\d+)/?$',
TaskHandler,
r'/plugins/(?P<obj_id>\d+)/?$',
PluginHandler,
r'/plugins/?$',
PluginCollectionHandler,
r'/plugins/sync/?$',
PluginSyncHandler,
r'/notifications/?$',
NotificationCollectionHandler,
r'/notifications/(?P<obj_id>\d+)/?$',
NotificationHandler,
r'/dump/(?P<snapshot_name>[A-Za-z0-9-_.]+)$',
SnapshotDownloadHandler,
r'/logs/?$',
LogEntryCollectionHandler,
r'/logs/package/?$',
LogPackageHandler,
r'/logs/package/config/default/?$',
LogPackageDefaultConfig,
r'/logs/sources/?$',
LogSourceCollectionHandler,
r'/logs/sources/nodes/(?P<node_id>\d+)/?$',
LogSourceByNodeCollectionHandler,
r'/tracking/registration/?$',
FuelRegistrationForm,
r'/tracking/login/?$',
FuelLoginForm,
r'/tracking/restore_password/?$',
FuelRestorePasswordForm,
r'/version/?$',
VersionHandler,
r'/capacity/?$',
CapacityLogHandler,
r'/capacity/csv/?$',
CapacityLogCsvHandler,
r'/redhat/account/?$',
RemovedIn51RedHatAccountHandler,
r'/redhat/setup/?$',
RemovedIn51RedHatSetupHandler,
r'/settings/?$',
MasterNodeSettingsHandler,
)
urls = [i if isinstance(i, str) else i.__name__ for i in urls]
_locals = locals()
def get_extensions_urls():
"""Method is used to retrieve the data about
handlers and urls from extensions and convert
them into web.py consumable format.
:returns: dict in the next format:
{'urls': (r'/url/', 'ClassName'),
'handlers': [{
'class': ClassName,
'name': 'ClassName'}]}
"""
urls = []
handlers = []
for extension in get_all_extensions():
for url in extension.urls:
# TODO(eli): handler name should be extension specific
# not to have problems when several extensions use
# the same name for handler classes.
# Should be done as a part of blueprint:
# https://blueprints.launchpad.net/fuel/+spec
# /volume-manager-refactoring
handler_name = url['handler'].__name__
handlers.append({
'class': url['handler'],
'name': handler_name})
urls.extend((url['uri'], handler_name))
return {'urls': urls, 'handlers': handlers}
def get_all_urls():
"""Merges urls and handlers from core with
urls and handlers from extensions
"""
ext_urls = get_extensions_urls()
all_urls = list(urls)
all_urls.extend(ext_urls['urls'])
for handler in ext_urls['handlers']:
_locals[handler['name']] = handler['class']
return [all_urls, _locals]
def app():
return web.application(*get_all_urls())
def public_urls():
return {
r'/nodes/?$': ['POST'],
r'/nodes/agent/?$': ['PUT'],
r'/version/?$': ['GET']
}
|
|
"""aospy.Run objects for observational data."""
import datetime
from aospy.run import Run
from aospy.data_loader import NestedDictDataLoader
# CRU
cru_v322 = Run(
name='v3.22',
description='CRU v3.22',
data_direc='/archive/Spencer.Hill/obs/HadCRU/3.22',
data_dur=113,
data_start_date=datetime.datetime(1901, 1, 1),
data_end_date=datetime.datetime(2013, 12, 31),
data_files={
'precip': 'cru_ts3.22.1901.2013.pre.dat.nc',
'cld_amt': 'cru_ts3.22.1901.2013.cld.dat.nc',
'diurnal_temp_range': 'cru_ts3.22.1901.2013.dtr.dat.nc',
'ground_frost_freq': 'cru_ts3.22.1901.2013.frs.dat.nc',
'pet': 'cru_ts3.22.1901.2013.pet.dat.nc',
't_surf_min': 'cru_ts3.22.1901.2013.tmn.dat.nc',
't_surf_max': 'cru_ts3.22.1901.2013.tmx.dat.nc',
't_surf': 'cru_ts3.22.1901.2013.tmp.dat.nc',
'vap_pres': 'cru_ts3.22.1901.2013.vap.dat.nc',
'wet_day_freq': 'cru_ts3.22.1901.2013.wet.dat.nc'
}
)
# PREC/L
prec_l_0p5deg = Run(
name='0.5deg',
description='PREC/L 0.5x0.5 degree resolution',
data_direc='/archive/Spencer.Hill/obs/PREC_L/20150212',
data_dur=64,
data_start_date=datetime.datetime(1948, 1, 1),
data_end_date=datetime.datetime(2011, 12, 31),
data_files={'precip': 'precip.mon.mean.0.5x0.5.nc'}
)
prec_l_1deg = Run(
name='1deg',
description='PREC/L 1x1 degree resolution',
data_direc='/archive/Spencer.Hill/obs/PREC_L/20150212',
data_dur=67,
data_start_date=datetime.datetime(1948, 1, 1),
data_end_date=datetime.datetime(2014, 12, 31),
data_files={'precip': 'precip.mon.mean.1x1.nc'}
)
prec_l_2p5deg = Run(
name='2.5deg',
description='PREC/L 2.5x2.5 degree resolution',
data_direc='/archive/Spencer.Hill/obs/PREC_L/20150212',
data_dur=67,
data_start_date=datetime.datetime(1948, 1, 1),
data_end_date=datetime.datetime(2014, 12, 31),
data_files={'precip': 'precip.mon.mean.2.5x2.5.nc'}
)
# CERES
ceres_ebaf = Run(
name='ebaf',
description='CERES EBAF',
data_direc=('/archive/pcmdi/repo/obs4MIPs/NASA-LaRC/CERES-EBAF/'
'atmos/mon/v20140402/CERES-EBAF'),
data_dur=14,
data_start_date=datetime.datetime(2000, 3, 1),
data_end_date=datetime.datetime(2013, 10, 31),
data_suffix='_CERES-EBAF_L3B_Ed2-8_200003-201310.nc',
data_files={
'swdn_toa': 'rsdt_CERES-EBAF_L3B_Ed2-8_200003-201310.nc',
'swup_toa': 'rsut_CERES-EBAF_L3B_Ed2-8_200003-201310.nc',
'swup_toa_clr': 'rsutcs_CERES-EBAF_L3B_Ed2-8_200003-201310.nc',
'olr': 'rlut_CERES-EBAF_L3B_Ed2-8_200003-201310.nc',
'olr_clr': 'rlutcs_CERES-EBAF_L3B_Ed2-8_200003-201310.nc',
'swdn_sfc': ('/archive/pcmdi/repo/obs4MIPs/NASA-LaRC/'
'CERES-EBAF_Surface/atmos/mon/v20140402/'
'rsds_CERES-EBAF_L3B_Ed2-7_200003-201303.nc'),
'swdn_sfc_clr': ('/archive/pcmdi/repo/obs4MIPs/NASA-LaRC/'
'CERES-EBAF_Surface/atmos/mon/v20140402/'
'rsdscs_CERES-EBAF_L3B_Ed2-7_200003-201303.nc'),
'swup_sfc': ('/archive/pcmdi/repo/obs4MIPs/NASA-LaRC/'
'CERES-EBAF_Surface/atmos/mon/v20140402/'
'rsus_CERES-EBAF_L3B_Ed2-7_200003-201303.nc'),
'swup_sfc_clr': ('/archive/pcmdi/repo/obs4MIPs/NASA-LaRC/'
'CERES-EBAF_Surface/atmos/mon/v20140402/'
'rsuscs_CERES-EBAF_L3B_Ed2-7_200003-201303.nc'),
'lwdn_sfc': ('/archive/pcmdi/repo/obs4MIPs/NASA-LaRC/'
'CERES-EBAF_Surface/atmos/mon/v20140402/'
'rlds_CERES-EBAF_L3B_Ed2-7_200003-201303.nc'),
'lwdn_sfc_clr': ('/archive/pcmdi/repo/obs4MIPs/NASA-LaRC/'
'CERES-EBAF_Surface/atmos/mon/v20140402/'
'rldscs_CERES-EBAF_L3B_Ed2-7_200003-201303.nc'),
'lwup_sfc': ('/archive/pcmdi/repo/obs4MIPs/NASA-LaRC/'
'CERES-EBAF_Surface/atmos/mon/v20140402/'
'rlus_CERES-EBAF_L3B_Ed2-7_200003-201303.nc'),
}
)
ceres_ebaf_sfc = Run(
name='ebaf-sfc',
description='CERES EBAF-surface',
data_direc=('/archive/pcmdi/repo/obs4MIPs/NASA-LaRC/CERES-EBAF_Surface/'
'atmos/mon/v20140402'),
data_dur=14,
data_start_date=datetime.datetime(2000, 3, 1),
data_end_date=datetime.datetime(2013, 3, 31),
data_suffix='_CERES-EBAF_L3B_Ed2-7_200003-201303.nc',
data_files={
'swdn_sfc': 'rsds_CERES-EBAF_L3B_Ed2-7_200003-201303.nc',
'swdn_sfc_clr': 'rsdscs_CERES-EBAF_L3B_Ed2-7_200003-201303.nc',
'swup_sfc': 'rsus_CERES-EBAF_L3B_Ed2-7_200003-201303.nc',
'swup_sfc_clr': 'rsuscs_CERES-EBAF_L3B_Ed2-7_200003-201303.nc',
'lwdn_sfc': 'rlds_CERES-EBAF_L3B_Ed2-7_200003-201303.nc',
'lwdn_sfc_clr': 'rldscs_CERES-EBAF_L3B_Ed2-7_200003-201303.nc',
'lwup_sfc': 'rlus_CERES-EBAF_L3B_Ed2-7_200003-201303.nc',
}
)
# GPCP
gpcp_v2p2 = Run(
name='v2p2',
description=('GPCP v2.2 gridded precipitation, from blend of '
'satellite and station gauge data.'),
data_direc='/archive/pcmdi/repo/obs4MIPs/NASA-GSFC/GPCP/atmos/',
data_dur=10,
data_start_date=datetime.datetime(1979, 1, 1),
data_end_date=datetime.datetime(2013, 12, 31),
data_files={'monthly':
['mon/v20130401/pr_GPCP-SG_L3_v2.2_' + yrs + '.nc' for yrs in
('197901-197912', '198001-198912', '199001-199912',
'200001-200912', '201001-201312')],
'pentad': 'day/v20121003/'}
)
# TRMM
trmm_v7a = Run(
name='v7a',
description='TRMM v7 gridded precipitation, from satellite data',
data_direc='/archive/pcmdi/repo/obs4MIPs/NASA-GSFC/TRMM/atmos/',
data_dur=2,
data_start_date=datetime.datetime(2000, 1, 1),
data_end_date=datetime.datetime(2010, 9, 30),
data_files={'monthly': ['mon/v20130204/pr_TRMM-L3_v7A_' + yrs + '.nc'
for yrs in ('200001-200912', '201001-201009')]}
)
# CMAP
cmap_standard = Run(
name='standard',
description=('CMAP standard version, which does not include NCEP '
'reanalysis data to fill in gaps.'),
data_direc='/archive/Spencer.Hill/obs/CMAP/standard',
data_dur=36,
data_start_date=datetime.datetime(1979, 1, 1),
data_end_date=datetime.datetime(2014, 12, 31),
data_files={'monthly': 'precip.mon.mean.nc',
'pentad': 'precip.pentad.mean.nc'}
)
cmap_enhanced = Run(
name='enhanced',
description=('CMAP enhanced version, which includes NCEP reanalysis '
'data to fill in gaps.'),
data_direc='/archive/Spencer.Hill/obs/CMAP/enhanced',
data_dur=36,
data_start_date=datetime.datetime(1979, 1, 1),
data_end_date=datetime.datetime(2014, 12, 31),
data_files={'monthly': 'precip.mon.mean.nc',
'pentad': 'precip.pentad.mean.nc'}
)
# U. Delaware
udel_v201 = Run(
name='v201',
description='U. Delaware version 2.01',
data_direc='/archive/Spencer.Hill/obs/U_Del',
data_dur=109,
data_start_date=datetime.datetime(1900, 1, 1),
data_end_date=datetime.datetime(2008, 12, 31),
data_files={'precip': 'precip.mon.total.v201.nc',
't_surf': 'air.mon.total.v201.nc'}
)
udel_v301 = Run(
name='v301',
description='U. Delaware version 3.01',
data_direc='/archive/Spencer.Hill/obs/U_Del',
data_dur=111,
data_start_date=datetime.datetime(1900, 1, 1),
data_end_date=datetime.datetime(2010, 12, 31),
data_files={'precip': 'precip.mon.total.v301.nc',
't_surf': 'air.mon.total.v301.nc'}
)
# ERA-Interim
era_i = Run(
name='interim',
description='',
data_direc=('/archive/pcmdi/repo/ana4MIPs/ECMWF/ERA-Interim/atmos/'
'mon/v20140416'),
data_dur=1,
data_start_date=datetime.datetime(1979, 1, 1),
data_end_date=datetime.datetime(2013, 12, 31),
default_start_date=datetime.datetime(1981, 1, 1),
default_end_date=datetime.datetime(2000, 12, 31),
# data_dir_struc='one_dir',
data_dir_struc='gfdl_repo',
data_files={
'cld_amt': 'cl_*.nc',
'evap': 'evspsbl_*.nc',
'hght': 'zg_*.nc',
'lwdn_sfc': 'rlds_*.nc',
'lwup_sfc': 'rlus_*.nc',
'olr': 'rlut_*.nc',
'olr_clr': 'rlutcs_*.nc',
'omega': 'wap_*.nc',
'precip': 'pr_*.nc',
'ps': 'ps_*.nc',
'rh': 'hur_*.nc',
'shflx': 'hfss_*.nc',
'slp': 'psl_*.nc',
'sphum': 'hus_*.nc',
'swdn_sfc': 'rsds_*.nc',
'swdn_toa': 'rsdt_*.nc',
'swup_sfc': 'rsus_*.nc',
# 'swup_toa': 'rsut_*.nc',
't_surf': 'tas_*.nc',
'temp': 'ta_*.nc',
'ucomp': 'ua_*.nc',
'vcomp': 'va_*.nc',
'wvp': 'prw_*.nc',
}
)
# MERRA
merra = Run(
name='merra',
description='',
data_direc=('/archive/pcmdi/repo/ana4MIPs/NASA-GMAO/MERRA/atmos/mon/'
'v20140624'),
data_dur=1,
data_start_date=datetime.datetime(1979, 1, 1),
data_end_date=datetime.datetime(2011, 12, 31),
default_start_date=datetime.datetime(1981, 1, 1),
default_end_date=datetime.datetime(2000, 12, 31),
data_files={
'cld_amt': ['cl_Amon_reanalysis_MERRA_' + yrs + '.nc'
for yrs in [str(yr) + '01-' + str(yr) + '12'
for yr in range(1979, 2012)]],
'evap': ['evspsbl_Amon_reanalysis_MERRA_' + yrs + '.nc'
for yrs in [str(yr) + '01-' + str(yr) + '12'
for yr in range(1979, 2012)]],
'hght': ['zg_Amon_reanalysis_MERRA_' + yrs + '.nc'
for yrs in [str(yr) + '01-' + str(yr) + '12'
for yr in range(1979, 2012)]],
'lwdn_sfc': ['rlds_Amon_reanalysis_MERRA_' + yrs + '.nc' for yrs in
['%s01-%s12' % (yr, yr) for yr in range(1979, 2012)]],
'lwdn_sfc_clr': ['rldscs_Amon_reanalysis_MERRA_' + yrs + '.nc'
for yrs in
['%s01-%s12' % (yr, yr) for yr in range(1979, 2012)]],
'lwup_sfc': ['rlus_Amon_reanalysis_MERRA_' + yrs + '.nc' for yrs in
['%s01-%s12' % (yr, yr) for yr in range(1979, 2012)]],
'olr': ['rlut_Amon_reanalysis_MERRA_' + yrs + '.nc' for yrs in
['%s01-%s12' % (yr, yr) for yr in range(1979, 2012)]],
'olr_clr': ['rlutcs_Amon_reanalysis_MERRA_' + yrs + '.nc' for yrs in
['%s01-%s12' % (yr, yr) for yr in range(1979, 2012)]],
'omega': ['wap_Amon_reanalysis_MERRA_' + yrs + '.nc'
for yrs in [str(yr) + '01-' + str(yr) + '12'
for yr in range(1979, 2012)]],
'precip': ['pr_Amon_reanalysis_MERRA_' + yrs + '.nc'
for yrs in [str(yr) + '01-' + str(yr) + '12'
for yr in range(1979, 2012)]],
'ps': ['ps_Amon_reanalysis_MERRA_' + yrs + '.nc'
for yrs in [str(yr) + '01-' + str(yr) + '12'
for yr in range(1979, 2012)]],
'rh': ['hur_Amon_reanalysis_MERRA_' + yrs + '.nc'
for yrs in [str(yr) + '01-' + str(yr) + '12'
for yr in range(1979, 2012)]],
'shflx': ['hfss_Amon_reanalysis_MERRA_' + yrs + '.nc'
for yrs in [str(yr) + '01-' + str(yr) + '12'
for yr in range(1979, 2012)]],
'slp': ['psl_Amon_reanalysis_MERRA_' + yrs + '.nc'
for yrs in [str(yr) + '01-' + str(yr) + '12'
for yr in range(1979, 2012)]],
'sphum': ['hus_Amon_reanalysis_MERRA_' + yrs + '.nc'
for yrs in [str(yr) + '01-' + str(yr) + '12'
for yr in range(1979, 2012)]],
'swdn_sfc': ['rsds_Amon_reanalysis_MERRA_' + yrs + '.nc' for yrs in
['%s01-%s12' % (yr, yr) for yr in range(1979, 2012)]],
# 'swup_sfc': ['rsus_Amon_reanalysis_MERRA_' + yrs + '.nc' for yrs in
# ['%s01-%s12' % (yr, yr) for yr in range(1979, 2012)]],
'swdn_toa': ['rsdt_Amon_reanalysis_MERRA_' + yrs + '.nc' for yrs in
['%s01-%s12' % (yr, yr) for yr in range(1979, 2012)]],
'swup_toa': ['rsut_Amon_reanalysis_MERRA_' + yrs + '.nc' for yrs in
['%s01-%s12' % (yr, yr) for yr in range(1979, 2012)]],
'swup_toa_clr': ['rsutcs_Amon_reanalysis_MERRA_' + yrs + '.nc'
for yrs in
['%s01-%s12' % (yr, yr) for yr in range(1979, 2012)]],
'temp': ['ta_Amon_reanalysis_MERRA_' + yrs + '.nc'
for yrs in [str(yr) + '01-' + str(yr) + '12'
for yr in range(1979, 2012)]],
'ucomp': ['ua_Amon_reanalysis_MERRA_' + yrs + '.nc'
for yrs in [str(yr) + '01-' + str(yr) + '12'
for yr in range(1979, 2012)]],
'vcomp': ['va_Amon_reanalysis_MERRA_' + yrs + '.nc' for yrs in
['%s01-%s12' % (yr, yr) for yr in range(1979, 2012)]],
'wvp': ['prw_Amon_reanalysis_MERRA_' + yrs + '.nc'
for yrs in [str(yr) + '01-' + str(yr) + '12'
for yr in range(1979, 2012)]]
}
)
# NCEP CFSR
cfsr = Run(
name='cfsr',
description='',
data_direc=('/archive/pcmdi/repo/ana4MIPs/NOAA-NCEP/CFSR/atmos/'
'mon/v20140822'),
data_dur=1,
data_start_date=datetime.datetime(1979, 1, 1),
data_end_date=datetime.datetime(2013, 12, 31),
default_start_date=datetime.datetime(1981, 1, 1),
default_end_date=datetime.datetime(2000, 12, 31),
data_files={
'cld_amt': ['cl_Amon_reanalysis_CFSR_' + yrs + '.nc' for yrs in
['%s01-%s12'% (yr, yr) for yr in range(1979, 2014)]],
'hght': ['zg_Amon_reanalysis_CFSR_' + yrs + '.nc' for yrs in
['%s01-%s12'% (yr, yr) for yr in range(1979, 2014)]],
'omega': ['wap_Amon_reanalysis_CFSR_' + yrs + '.nc' for yrs in
['%s01-%s12'% (yr, yr) for yr in range(1979, 2014)]],
'rh': ['hur_Amon_reanalysis_CFSR_' + yrs + '.nc' for yrs in
['%s01-%s12'% (yr, yr) for yr in range(1979, 2014)]],
'sphum': ['hus_Amon_reanalysis_CFSR_' + yrs + '.nc' for yrs in
['%s01-%s12'% (yr, yr) for yr in range(1979, 2014)]],
'temp': ['ta_Amon_reanalysis_CFSR_' + yrs + '.nc' for yrs in
['%s01-%s12'% (yr, yr) for yr in range(1979, 2014)]],
'ucomp': ['ua_Amon_reanalysis_CFSR_' + yrs + '.nc' for yrs in
['%s01-%s12'% (yr, yr) for yr in range(1979, 2014)]],
'vcomp': ['va_Amon_reanalysis_CFSR_' + yrs + '.nc' for yrs in
['%s01-%s12'% (yr, yr) for yr in range(1979, 2014)]],
'evap': 'evspsbl_Amon_reanalysis_CFSR_197901-201112.nc',
'lwdn_sfc': 'rlds_Amon_reanalysis_CFSR_197901-201112.nc',
'lwdn_sfc_clr': 'rldscs_Amon_reanalysis_CFSR_197901-201112.nc',
'lwup_sfc': 'rlus_Amon_reanalysis_CFSR_197901-201112.nc',
'olr': 'rlut_Amon_reanalysis_CFSR_197901-201112.nc',
'olr_clr': 'rlutcs_Amon_reanalysis_CFSR_197901-201112.nc',
'precip': 'pr_Amon_reanalysis_CFSR_197901-201112.nc',
'ps': 'ps_Amon_reanalysis_CFSR_197901-201112.nc',
'shflx': 'hfss_Amon_reanalysis_CFSR_197901-201112.nc',
'slp': 'psl_Amon_reanalysis_CFSR_197901-201112.nc',
'swdn_sfc': 'rsds_Amon_reanalysis_CFSR_197901-201112.nc',
'swdn_sfc_clr': 'rsdscs_Amon_reanalysis_CFSR_197901-201112.nc',
'swdn_toa': 'rsdt_Amon_reanalysis_CFSR_197901-201112.nc',
'swup_sfc': 'rsus_Amon_reanalysis_CFSR_197901-201112.nc',
'swup_sfc_clr': 'rsuscs_Amon_reanalysis_CFSR_197901-201112.nc',
'swup_toa': 'rsut_Amon_reanalysis_CFSR_197901-201112.nc',
'swup_toa_clr': 'rsutcs_Amon_reanalysis_CFSR_197901-201112.nc',
't_surf': 'tas_Amon_reanalysis_CFSR_197901-201112.nc',
'wvp': 'prw_Amon_reanalysis_CFSR_197901-201112.nc',
}
)
# JMA JRA-25
jra25 = Run(
name='jra-25',
description='Japanase Meteorological Agency reanalyses',
data_direc='/archive/pcmdi/repo/ana4MIPs/JMA/JRA-25/atmos/mon/v20140408',
data_dur=1,
data_start_date=datetime.datetime(1979, 1, 1),
data_end_date=datetime.datetime(2013, 12, 31),
data_files={'monthly': ['va_Amon_reanalysis_JRA-25_' + yrs + '.nc'
for yrs in [str(yr) + '01-' + str(yr) + '12'
for yr in range(1979, 2014)]]}
)
# LandFlux-EVAL 1989-2005
lfe_all = Run(
name='all',
description='LandFlux-EVAL 1989-2005 using all products',
data_direc='/archive/Spencer.Hill/obs/LandFlux-EVAL',
data_dur=17,
data_start_date=datetime.datetime(1989, 1, 1),
data_end_date=datetime.datetime(2005, 12, 31),
data_files={'monthly': 'LandFluxEVAL.merged.89-05.monthly.all.nc',
'annual': 'LandFluxEVAL.merged.89-05.yearly.all.nc'}
)
lfe_diag = Run(
name='diagnostic',
description='LandFlux-EVAL 1989-2005 using only diagnostic products',
data_direc='/archive/Spencer.Hill/obs/LandFlux-EVAL',
data_dur=17,
data_start_date=datetime.datetime(1989, 1, 1),
data_end_date=datetime.datetime(2005, 12, 31),
data_files={'monthly': 'LandFluxEVAL.merged.89-05.monthly.diagnostic.nc',
'annual': 'LandFluxEVAL.merged.89-05.yearly.diagnostic.nc'}
)
lfe_lsm = Run(
name='lsm',
description='LandFlux-EVAL 1989-2005 using land surface models only',
data_direc='/archive/Spencer.Hill/obs/LandFlux-EVAL',
data_dur=17,
data_start_date=datetime.datetime(1989, 1, 1),
data_end_date=datetime.datetime(2005, 12, 31),
data_files={'monthly': 'LandFluxEVAL.merged.89-05.monthly.lsm.nc',
'annual': 'LandFluxEVAL.merged.89-05.yearly.lsm.nc'}
)
lfe_rean = Run(
name='reanalyses',
description='LandFlux-EVAL 1989-2005 using reanalyses only',
data_direc='/archive/Spencer.Hill/obs/LandFlux-EVAL',
data_dur=17,
data_start_date=datetime.datetime(1989, 1, 1),
data_end_date=datetime.datetime(2005, 12, 31),
data_files={'monthly': 'LandFluxEVAL.merged.89-05.monthly.reanalyses.nc',
'annual': 'LandFluxEVAL.merged.89-05.yearly.reanlayses.nc'}
)
# LandFlux-EVAL 1989-1995
lfe95_all = Run(
name='all',
description='LandFlux-EVAL 1989-1995 using all products',
data_direc='/archive/Spencer.Hill/obs/LandFlux-EVAL',
data_dur=17,
data_start_date=datetime.datetime(1989, 1, 1),
data_end_date=datetime.datetime(1995, 12, 31),
data_files={'monthly': 'LandFluxEVAL.merged.89-95.monthly.all.nc',
'annual': 'LandFluxEVAL.merged.89-95.yearly.all.nc'}
)
lfe95_diag = Run(
name='diagnostic',
description='LandFlux-EVAL 1989-1995 using diagnostic products only',
data_direc='/archive/Spencer.Hill/obs/LandFlux-EVAL',
data_dur=17,
data_start_date=datetime.datetime(1989, 1, 1),
data_end_date=datetime.datetime(1995, 12, 31),
data_files={'monthly': 'LandFluxEVAL.merged.89-95.monthly.diagnostic.nc',
'annual': 'LandFluxEVAL.merged.89-95.yearly.diagnostic.nc'}
)
lfe95_lsm = Run(
name='lsm',
description='LandFlux-EVAL 1989-1995 using land surface models only',
data_direc='/archive/Spencer.Hill/obs/LandFlux-EVAL',
data_dur=17,
data_start_date=datetime.datetime(1989, 1, 1),
data_end_date=datetime.datetime(1995, 12, 31),
data_files={'monthly': 'LandFluxEVAL.merged.89-95.monthly.lsm.nc',
'annual': 'LandFluxEVAL.merged.89-95.yearly.lsm.nc'}
)
lfe95_rean = Run(
name='reanalyses',
description='LandFlux-EVAL 1989-1995 using reanalyses only',
data_direc='/archive/Spencer.Hill/obs/LandFlux-EVAL',
data_dur=17,
data_start_date=datetime.datetime(1989, 1, 1),
data_end_date=datetime.datetime(1995, 12, 31),
data_files={'monthly': 'LandFluxEVAL.merged.89-95.monthly.reanalyses.nc',
'annual': 'LandFluxEVAL.merged.89-95.yearly.reanlayses.nc'}
)
# SST datasets
hadisst1 = Run(
name='hadisst1',
description='HadISST1 product; SST data only',
data_direc='/archive/Spencer.Hill/obs/HadISST',
data_dur=1,
data_start_date=datetime.datetime(2005, 1, 1),
data_end_date=datetime.datetime(2005, 12, 31),
data_files={'monthly': '/archive/Spencer.Hill/obs/HadISST/HadISST_sst.nc'}
)
hurrell = Run(
name='hurrell',
description='Hurrell SST product',
data_direc='/archive/Spencer.Hill/obs/Hurrell',
data_dur=1,
data_start_date=datetime.datetime(2000, 1, 1),
data_end_date=datetime.datetime(2000, 12, 31),
data_files={'monthly':
'/archive/Spencer.Hill/obs/Hurrell/sst.climo.1981-2000.data.nc'}
)
reynolds_oi = Run(
name='reynolds_oi',
description='Reynolds OI SST observational dataset',
data_direc='/archive/Spencer.Hill/obs/ReynoldsOI',
data_dur=19,
data_start_date=datetime.datetime(1981, 11, 1),
data_end_date=datetime.datetime(1999, 1, 31),
data_files={'monthly':
'/archive/Spencer.Hill/obs/ReynoldsOI/reyoi_sst.data.nc'}
)
|
|
"""
An experimental support for curvilinear grid.
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import six
from six.moves import zip
from itertools import chain
from .grid_finder import GridFinder
from .axislines import AxisArtistHelper, GridHelperBase
from .axis_artist import AxisArtist
from matplotlib.transforms import Affine2D, IdentityTransform
import numpy as np
from matplotlib.path import Path
class FixedAxisArtistHelper(AxisArtistHelper.Fixed):
"""
Helper class for a fixed axis.
"""
def __init__(self, grid_helper, side, nth_coord_ticks=None):
"""
nth_coord = along which coordinate value varies.
nth_coord = 0 -> x axis, nth_coord = 1 -> y axis
"""
super(FixedAxisArtistHelper, self).__init__( \
loc=side,
)
self.grid_helper = grid_helper
if nth_coord_ticks is None:
nth_coord_ticks = self.nth_coord
self.nth_coord_ticks = nth_coord_ticks
self.side = side
self._limits_inverted = False
def update_lim(self, axes):
self.grid_helper.update_lim(axes)
if self.nth_coord == 0:
xy1, xy2 = axes.get_ylim()
else:
xy1, xy2 = axes.get_xlim()
if xy1 > xy2:
self._limits_inverted = True
else:
self._limits_inverted = False
def change_tick_coord(self, coord_number=None):
if coord_number is None:
self.nth_coord_ticks = 1 - self.nth_coord_ticks
elif coord_number in [0, 1]:
self.nth_coord_ticks = coord_number
else:
raise Exception("wrong coord number")
def get_tick_transform(self, axes):
return axes.transData
def get_tick_iterators(self, axes):
"""tick_loc, tick_angle, tick_label"""
g = self.grid_helper
if self._limits_inverted:
side = {"left":"right","right":"left",
"top":"bottom", "bottom":"top"}[self.side]
else:
side = self.side
ti1 = g.get_tick_iterator(self.nth_coord_ticks, side)
ti2 = g.get_tick_iterator(1-self.nth_coord_ticks, side, minor=True)
#ti2 = g.get_tick_iterator(1-self.nth_coord_ticks, self.side, minor=True)
return chain(ti1, ti2), iter([])
class FloatingAxisArtistHelper(AxisArtistHelper.Floating):
def __init__(self, grid_helper, nth_coord, value, axis_direction=None):
"""
nth_coord = along which coordinate value varies.
nth_coord = 0 -> x axis, nth_coord = 1 -> y axis
"""
super(FloatingAxisArtistHelper, self).__init__(nth_coord,
value,
)
self.value = value
self.grid_helper = grid_helper
self._extremes = None, None
self._get_line_path = None # a method that returns a Path.
self._line_num_points = 100 # number of points to create a line
def set_extremes(self, e1, e2):
self._extremes = e1, e2
def update_lim(self, axes):
self.grid_helper.update_lim(axes)
x1, x2 = axes.get_xlim()
y1, y2 = axes.get_ylim()
grid_finder = self.grid_helper.grid_finder
extremes = grid_finder.extreme_finder(grid_finder.inv_transform_xy,
x1, y1, x2, y2)
extremes = list(extremes)
e1, e2 = self._extremes # ranges of other coordinates
if self.nth_coord == 0:
if e1 is not None:
extremes[2] = max(e1, extremes[2])
if e2 is not None:
extremes[3] = min(e2, extremes[3])
elif self.nth_coord == 1:
if e1 is not None:
extremes[0] = max(e1, extremes[0])
if e2 is not None:
extremes[1] = min(e2, extremes[1])
grid_info = dict()
lon_min, lon_max, lat_min, lat_max = extremes
lon_levs, lon_n, lon_factor = \
grid_finder.grid_locator1(lon_min, lon_max)
lat_levs, lat_n, lat_factor = \
grid_finder.grid_locator2(lat_min, lat_max)
grid_info["extremes"] = extremes
grid_info["lon_info"] = lon_levs, lon_n, lon_factor
grid_info["lat_info"] = lat_levs, lat_n, lat_factor
grid_info["lon_labels"] = grid_finder.tick_formatter1("bottom",
lon_factor,
lon_levs)
grid_info["lat_labels"] = grid_finder.tick_formatter2("bottom",
lat_factor,
lat_levs)
grid_finder = self.grid_helper.grid_finder
#e1, e2 = self._extremes # ranges of other coordinates
if self.nth_coord == 0:
xx0 = np.linspace(self.value, self.value, self._line_num_points)
yy0 = np.linspace(extremes[2], extremes[3], self._line_num_points)
xx, yy = grid_finder.transform_xy(xx0, yy0)
elif self.nth_coord == 1:
xx0 = np.linspace(extremes[0], extremes[1], self._line_num_points)
yy0 = np.linspace(self.value, self.value, self._line_num_points)
xx, yy = grid_finder.transform_xy(xx0, yy0)
grid_info["line_xy"] = xx, yy
self.grid_info = grid_info
def get_axislabel_transform(self, axes):
return Affine2D() #axes.transData
def get_axislabel_pos_angle(self, axes):
extremes = self.grid_info["extremes"]
if self.nth_coord == 0:
xx0 = self.value
yy0 = (extremes[2]+extremes[3])/2.
dxx, dyy = 0., abs(extremes[2]-extremes[3])/1000.
elif self.nth_coord == 1:
xx0 = (extremes[0]+extremes[1])/2.
yy0 = self.value
dxx, dyy = abs(extremes[0]-extremes[1])/1000., 0.
grid_finder = self.grid_helper.grid_finder
xx1, yy1 = grid_finder.transform_xy([xx0], [yy0])
trans_passingthrough_point = axes.transData + axes.transAxes.inverted()
p = trans_passingthrough_point.transform_point([xx1[0], yy1[0]])
if (0. <= p[0] <= 1.) and (0. <= p[1] <= 1.):
xx1c, yy1c = axes.transData.transform_point([xx1[0], yy1[0]])
xx2, yy2 = grid_finder.transform_xy([xx0+dxx], [yy0+dyy])
xx2c, yy2c = axes.transData.transform_point([xx2[0], yy2[0]])
return (xx1c, yy1c), np.arctan2(yy2c-yy1c, xx2c-xx1c)/np.pi*180.
else:
return None, None
def get_tick_transform(self, axes):
return IdentityTransform() #axes.transData
def get_tick_iterators(self, axes):
"""tick_loc, tick_angle, tick_label, (optionally) tick_label"""
grid_finder = self.grid_helper.grid_finder
lat_levs, lat_n, lat_factor = self.grid_info["lat_info"]
lat_levs = np.asarray(lat_levs)
if lat_factor is not None:
yy0 = lat_levs / lat_factor
dy = 0.01 / lat_factor
else:
yy0 = lat_levs
dy = 0.01
lon_levs, lon_n, lon_factor = self.grid_info["lon_info"]
lon_levs = np.asarray(lon_levs)
if lon_factor is not None:
xx0 = lon_levs / lon_factor
dx = 0.01 / lon_factor
else:
xx0 = lon_levs
dx = 0.01
if None in self._extremes:
e0, e1 = self._extremes
else:
e0, e1 = sorted(self._extremes)
if e0 is None:
e0 = -np.inf
if e1 is None:
e1 = np.inf
if self.nth_coord == 0:
mask = (e0 <= yy0) & (yy0 <= e1)
#xx0, yy0 = xx0[mask], yy0[mask]
yy0 = yy0[mask]
elif self.nth_coord == 1:
mask = (e0 <= xx0) & (xx0 <= e1)
#xx0, yy0 = xx0[mask], yy0[mask]
xx0 = xx0[mask]
def transform_xy(x, y):
x1, y1 = grid_finder.transform_xy(x, y)
x2y2 = axes.transData.transform(np.array([x1, y1]).transpose())
x2, y2 = x2y2.transpose()
return x2, y2
# find angles
if self.nth_coord == 0:
xx0 = np.empty_like(yy0)
xx0.fill(self.value)
xx1, yy1 = transform_xy(xx0, yy0)
xx00 = xx0.copy()
xx00[xx0+dx>e1] -= dx
xx1a, yy1a = transform_xy(xx00, yy0)
xx1b, yy1b = transform_xy(xx00+dx, yy0)
xx2a, yy2a = transform_xy(xx0, yy0)
xx2b, yy2b = transform_xy(xx0, yy0+dy)
labels = self.grid_info["lat_labels"]
labels = [l for l, m in zip(labels, mask) if m]
elif self.nth_coord == 1:
yy0 = np.empty_like(xx0)
yy0.fill(self.value)
xx1, yy1 = transform_xy(xx0, yy0)
xx1a, yy1a = transform_xy(xx0, yy0)
xx1b, yy1b = transform_xy(xx0, yy0+dy)
xx00 = xx0.copy()
xx00[xx0+dx>e1] -= dx
xx2a, yy2a = transform_xy(xx00, yy0)
xx2b, yy2b = transform_xy(xx00+dx, yy0)
labels = self.grid_info["lon_labels"]
labels = [l for l, m in zip(labels, mask) if m]
def f1():
dd = np.arctan2(yy1b-yy1a, xx1b-xx1a) # angle normal
dd2 = np.arctan2(yy2b-yy2a, xx2b-xx2a) # angle tangent
mm = ((yy1b-yy1a)==0.) & ((xx1b-xx1a)==0.) # mask where dd1 is not defined
dd[mm] = dd2[mm]+3.14159/2.
#dd = np.arctan2(yy2-yy1, xx2-xx1) # angle normal
#dd2 = np.arctan2(yy3-yy1, xx3-xx1) # angle tangent
#mm = ((yy2-yy1)==0.) & ((xx2-xx1)==0.) # mask where dd1 is not defined
#dd[mm] = dd2[mm]+3.14159/2.
#dd += 3.14159
#dd = np.arctan2(xx2-xx1, angle_tangent-yy1)
trans_tick = self.get_tick_transform(axes)
tr2ax = trans_tick + axes.transAxes.inverted()
for x, y, d, d2, lab in zip(xx1, yy1, dd, dd2, labels):
c2 = tr2ax.transform_point((x, y))
delta=0.00001
if (0. -delta<= c2[0] <= 1.+delta) and \
(0. -delta<= c2[1] <= 1.+delta):
d1 = d/3.14159*180.
d2 = d2/3.14159*180.
yield [x, y], d1, d2, lab
return f1(), iter([])
def get_line_transform(self, axes):
return axes.transData
def get_line(self, axes):
self.update_lim(axes)
x, y = self.grid_info["line_xy"]
if self._get_line_path is None:
return Path(list(zip(x, y)))
else:
return self._get_line_path(axes, x, y)
class GridHelperCurveLinear(GridHelperBase):
def __init__(self, aux_trans,
extreme_finder=None,
grid_locator1=None,
grid_locator2=None,
tick_formatter1=None,
tick_formatter2=None):
"""
aux_trans : a transform from the source (curved) coordinate to
target (rectilinear) coordinate. An instance of MPL's Transform
(inverse transform should be defined) or a tuple of two callable
objects which defines the transform and its inverse. The callables
need take two arguments of array of source coordinates and
should return two target coordinates:
e.g., x2, y2 = trans(x1, y1)
"""
super(GridHelperCurveLinear, self).__init__()
self.grid_info = None
self._old_values = None
#self._grid_params = dict()
self._aux_trans = aux_trans
self.grid_finder = GridFinder(aux_trans,
extreme_finder,
grid_locator1,
grid_locator2,
tick_formatter1,
tick_formatter2)
def update_grid_finder(self, aux_trans=None, **kw):
if aux_trans is not None:
self.grid_finder.update_transform(aux_trans)
self.grid_finder.update(**kw)
self.invalidate()
def _update(self, x1, x2, y1, y2):
"bbox in 0-based image coordinates"
# update wcsgrid
if self.valid() and self._old_values == (x1, x2, y1, y2):
return
self._update_grid(x1, y1, x2, y2)
self._old_values = (x1, x2, y1, y2)
self._force_update = False
def new_fixed_axis(self, loc,
nth_coord=None,
axis_direction=None,
offset=None,
axes=None):
if axes is None:
axes = self.axes
if axis_direction is None:
axis_direction = loc
_helper = FixedAxisArtistHelper(self, loc,
#nth_coord,
nth_coord_ticks=nth_coord,
)
axisline = AxisArtist(axes, _helper, axis_direction=axis_direction)
return axisline
def new_floating_axis(self, nth_coord,
value,
axes=None,
axis_direction="bottom"
):
if axes is None:
axes = self.axes
_helper = FloatingAxisArtistHelper( \
self, nth_coord, value, axis_direction)
axisline = AxisArtist(axes, _helper)
#_helper = FloatingAxisArtistHelper(self, nth_coord,
# value,
# label_direction=label_direction,
# )
#axisline = AxisArtistFloating(axes, _helper,
# axis_direction=axis_direction)
axisline.line.set_clip_on(True)
axisline.line.set_clip_box(axisline.axes.bbox)
#axisline.major_ticklabels.set_visible(True)
#axisline.minor_ticklabels.set_visible(False)
#axisline.major_ticklabels.set_rotate_along_line(True)
#axisline.set_rotate_label_along_line(True)
return axisline
def _update_grid(self, x1, y1, x2, y2):
self.grid_info = self.grid_finder.get_grid_info(x1, y1, x2, y2)
def get_gridlines(self, which="major", axis="both"):
grid_lines = []
if axis in ["both", "x"]:
for gl in self.grid_info["lon"]["lines"]:
grid_lines.extend(gl)
if axis in ["both", "y"]:
for gl in self.grid_info["lat"]["lines"]:
grid_lines.extend(gl)
return grid_lines
def get_tick_iterator(self, nth_coord, axis_side, minor=False):
#axisnr = dict(left=0, bottom=1, right=2, top=3)[axis_side]
angle_tangent = dict(left=90, right=90, bottom=0, top=0)[axis_side]
#angle = [0, 90, 180, 270][axisnr]
lon_or_lat = ["lon", "lat"][nth_coord]
if not minor: # major ticks
def f():
for (xy, a), l in zip(self.grid_info[lon_or_lat]["tick_locs"][axis_side],
self.grid_info[lon_or_lat]["tick_labels"][axis_side]):
angle_normal = a
yield xy, angle_normal, angle_tangent, l
else:
def f():
for (xy, a), l in zip(self.grid_info[lon_or_lat]["tick_locs"][axis_side],
self.grid_info[lon_or_lat]["tick_labels"][axis_side]):
angle_normal = a
yield xy, angle_normal, angle_tangent, ""
#for xy, a, l in self.grid_info[lon_or_lat]["ticks"][axis_side]:
# yield xy, a, ""
return f()
def test3():
import numpy as np
from matplotlib.transforms import Transform
from matplotlib.path import Path
class MyTransform(Transform):
input_dims = 2
output_dims = 2
is_separable = False
def __init__(self, resolution):
"""
Create a new Aitoff transform. Resolution is the number of steps
to interpolate between each input line segment to approximate its
path in curved Aitoff space.
"""
Transform.__init__(self)
self._resolution = resolution
def transform(self, ll):
x = ll[:, 0:1]
y = ll[:, 1:2]
return np.concatenate((x, y-x), 1)
transform.__doc__ = Transform.transform.__doc__
transform_non_affine = transform
transform_non_affine.__doc__ = Transform.transform_non_affine.__doc__
def transform_path(self, path):
vertices = path.vertices
ipath = path.interpolated(self._resolution)
return Path(self.transform(ipath.vertices), ipath.codes)
transform_path.__doc__ = Transform.transform_path.__doc__
transform_path_non_affine = transform_path
transform_path_non_affine.__doc__ = Transform.transform_path_non_affine.__doc__
def inverted(self):
return MyTransformInv(self._resolution)
inverted.__doc__ = Transform.inverted.__doc__
class MyTransformInv(Transform):
input_dims = 2
output_dims = 2
is_separable = False
def __init__(self, resolution):
Transform.__init__(self)
self._resolution = resolution
def transform(self, ll):
x = ll[:, 0:1]
y = ll[:, 1:2]
return np.concatenate((x, y+x), 1)
transform.__doc__ = Transform.transform.__doc__
def inverted(self):
return MyTransform(self._resolution)
inverted.__doc__ = Transform.inverted.__doc__
import matplotlib.pyplot as plt
fig = plt.figure(1)
fig.clf()
tr = MyTransform(1)
grid_helper = GridHelperCurveLinear(tr)
from mpl_toolkits.axes_grid1.parasite_axes import host_subplot_class_factory
from .axislines import Axes
SubplotHost = host_subplot_class_factory(Axes)
ax1 = SubplotHost(fig, 1, 1, 1, grid_helper=grid_helper)
fig.add_subplot(ax1)
ax2 = ParasiteAxesAuxTrans(ax1, tr, "equal")
ax1.parasites.append(ax2)
ax2.plot([3, 6], [5.0, 10.])
ax1.set_aspect(1.)
ax1.set_xlim(0, 10)
ax1.set_ylim(0, 10)
ax1.grid(True)
plt.draw()
def curvelinear_test2(fig):
"""
polar projection, but in a rectangular box.
"""
global ax1
import numpy as np
from . import angle_helper
from matplotlib.projections import PolarAxes
from matplotlib.transforms import Affine2D
from mpl_toolkits.axes_grid.parasite_axes import SubplotHost, \
ParasiteAxesAuxTrans
import matplotlib.cbook as cbook
# PolarAxes.PolarTransform takes radian. However, we want our coordinate
# system in degree
tr = Affine2D().scale(np.pi/180., 1.) + PolarAxes.PolarTransform()
# polar projection, which involves cycle, and also has limits in
# its coordinates, needs a special method to find the extremes
# (min, max of the coordinate within the view).
# 20, 20 : number of sampling points along x, y direction
extreme_finder = angle_helper.ExtremeFinderCycle(20, 20,
lon_cycle = 360,
lat_cycle = None,
lon_minmax = None,
lat_minmax = (0, np.inf),
)
grid_locator1 = angle_helper.LocatorDMS(5)
# Find a grid values appropriate for the coordinate (degree,
# minute, second).
tick_formatter1 = angle_helper.FormatterDMS()
# And also uses an appropriate formatter. Note that,the
# acceptable Locator and Formatter class is a bit different than
# that of mpl's, and you cannot directly use mpl's Locator and
# Formatter here (but may be possible in the future).
grid_helper = GridHelperCurveLinear(tr,
extreme_finder=extreme_finder,
grid_locator1=grid_locator1,
tick_formatter1=tick_formatter1
)
ax1 = SubplotHost(fig, 1, 1, 1, grid_helper=grid_helper)
# make ticklabels of right and top axis visible.
ax1.axis["right"].major_ticklabels.set_visible(True)
ax1.axis["top"].major_ticklabels.set_visible(True)
# let right axis shows ticklabels for 1st coordinate (angle)
ax1.axis["right"].get_helper().nth_coord_ticks=0
# let bottom axis shows ticklabels for 2nd coordinate (radius)
ax1.axis["bottom"].get_helper().nth_coord_ticks=1
fig.add_subplot(ax1)
grid_helper = ax1.get_grid_helper()
ax1.axis["lat"] = axis = grid_helper.new_floating_axis(0, 60, axes=ax1)
axis.label.set_text("Test")
axis.label.set_visible(True)
#axis._extremes = 2, 10
#axis.label.set_text("Test")
#axis.major_ticklabels.set_visible(False)
#axis.major_ticks.set_visible(False)
axis.get_helper()._extremes=2, 10
ax1.axis["lon"] = axis = grid_helper.new_floating_axis(1, 6, axes=ax1)
#axis.major_ticklabels.set_visible(False)
#axis.major_ticks.set_visible(False)
axis.label.set_text("Test 2")
axis.get_helper()._extremes=-180, 90
# A parasite axes with given transform
ax2 = ParasiteAxesAuxTrans(ax1, tr, "equal")
# note that ax2.transData == tr + ax1.transData
# Anthing you draw in ax2 will match the ticks and grids of ax1.
ax1.parasites.append(ax2)
intp = cbook.simple_linear_interpolation
ax2.plot(intp(np.array([0, 30]), 50),
intp(np.array([10., 10.]), 50))
ax1.set_aspect(1.)
ax1.set_xlim(-5, 12)
ax1.set_ylim(-5, 10)
ax1.grid(True)
def curvelinear_test3(fig):
"""
polar projection, but in a rectangular box.
"""
global ax1, axis
import numpy as np
from . import angle_helper
from matplotlib.projections import PolarAxes
from matplotlib.transforms import Affine2D
from mpl_toolkits.axes_grid.parasite_axes import SubplotHost
# PolarAxes.PolarTransform takes radian. However, we want our coordinate
# system in degree
tr = Affine2D().scale(np.pi/180., 1.) + PolarAxes.PolarTransform()
# polar projection, which involves cycle, and also has limits in
# its coordinates, needs a special method to find the extremes
# (min, max of the coordinate within the view).
# 20, 20 : number of sampling points along x, y direction
extreme_finder = angle_helper.ExtremeFinderCycle(20, 20,
lon_cycle = 360,
lat_cycle = None,
lon_minmax = None,
lat_minmax = (0, np.inf),
)
grid_locator1 = angle_helper.LocatorDMS(12)
# Find a grid values appropriate for the coordinate (degree,
# minute, second).
tick_formatter1 = angle_helper.FormatterDMS()
# And also uses an appropriate formatter. Note that,the
# acceptable Locator and Formatter class is a bit different than
# that of mpl's, and you cannot directly use mpl's Locator and
# Formatter here (but may be possible in the future).
grid_helper = GridHelperCurveLinear(tr,
extreme_finder=extreme_finder,
grid_locator1=grid_locator1,
tick_formatter1=tick_formatter1
)
ax1 = SubplotHost(fig, 1, 1, 1, grid_helper=grid_helper)
for axis in list(six.itervalues(ax1.axis)):
axis.set_visible(False)
fig.add_subplot(ax1)
grid_helper = ax1.get_grid_helper()
ax1.axis["lat1"] = axis = grid_helper.new_floating_axis(0, 130,
axes=ax1,
axis_direction="left"
)
axis.label.set_text("Test")
axis.label.set_visible(True)
axis.get_helper()._extremes=0.001, 10
grid_helper = ax1.get_grid_helper()
ax1.axis["lat2"] = axis = grid_helper.new_floating_axis(0, 50, axes=ax1,
axis_direction="right")
axis.label.set_text("Test")
axis.label.set_visible(True)
axis.get_helper()._extremes=0.001, 10
ax1.axis["lon"] = axis = grid_helper.new_floating_axis(1, 10,
axes=ax1,
axis_direction="bottom")
axis.label.set_text("Test 2")
axis.get_helper()._extremes= 50, 130
axis.major_ticklabels.set_axis_direction("top")
axis.label.set_axis_direction("top")
grid_helper.grid_finder.grid_locator1.den = 5
grid_helper.grid_finder.grid_locator2._nbins = 5
# # A parasite axes with given transform
# ax2 = ParasiteAxesAuxTrans(ax1, tr, "equal")
# # note that ax2.transData == tr + ax1.transData
# # Anthing you draw in ax2 will match the ticks and grids of ax1.
# ax1.parasites.append(ax2)
# intp = cbook.simple_linear_interpolation
# ax2.plot(intp(np.array([0, 30]), 50),
# intp(np.array([10., 10.]), 50))
ax1.set_aspect(1.)
ax1.set_xlim(-5, 12)
ax1.set_ylim(-5, 10)
ax1.grid(True)
if __name__ == "__main__":
import matplotlib.pyplot as plt
fig = plt.figure(1, figsize=(5, 5))
fig.clf()
#test3()
#curvelinear_test2(fig)
curvelinear_test3(fig)
#plt.draw()
plt.show()
|
|
"""`Percept`"""
import numpy as np
import matplotlib.pyplot as plt
from matplotlib.axes import Subplot
from matplotlib.animation import FuncAnimation
from math import isclose
import imageio
import logging
from skimage import img_as_uint
from skimage.transform import resize
from ..utils import Data, Grid2D, deprecated, unique
class Percept(Data):
"""Visual percept
A visual percept in space and time (optional). Typically the output of a
computational model.
.. versionadded:: 0.6
Parameters
----------
data : 3D NumPy array
A NumPy array specifying the percept in (Y, X, T) dimensions
space : :py:class:`~pulse2percept.utils.Grid2D`
A grid object specifying the (x,y) coordinates in space
time : 1D array
A list of time points
metadata : dict, optional
Additional stimulus metadata can be stored in a dictionary.
"""
def __init__(self, data, space=None, time=None, metadata=None):
xdva = None
ydva = None
if space is not None:
if not isinstance(space, Grid2D):
raise TypeError("'space' must be a Grid2D object, not "
"%s." % type(space))
xdva = space._xflat
ydva = space._yflat
if time is not None:
time = np.array([time]).flatten()
self._internal = {
'data': data,
'axes': [('ydva', ydva), ('xdva', xdva), ('time', time)],
'metadata': metadata
}
def __get_item__(self, key):
return self.data[key]
def argmax(self, axis=None):
"""Return the indices of the maximum values along an axis
Parameters
----------
axis : None or 'frames'
Axis along which to operate.
By default, the index of the brightest pixel is returned.
Set ``axis='frames'`` to get the index of the brightest frame.
Returns
-------
argmax : ndarray or scalar
Indices at which the maxima of ``percept.data`` along an axis occur.
If `axis` is None, the result is a scalar value.
If `axis` is 'frames', the result is the time of the brightest
frame.
"""
if axis is not None and not isinstance(axis, str):
raise TypeError('"axis" must be a string or None.')
if axis is None:
return self.data.argmax()
elif axis.lower() == 'frames':
return np.argmax(np.max(self.data, axis=(0, 1)))
raise ValueError('Unknown axis value "%s". Use "frames" or '
'None.' % axis)
def max(self, axis=None):
"""Brightest pixel or frame
Parameters
----------
axis : None or 'frames'
Axis along which to operate.
By default, the value of the brightest pixel is returned.
Set ``axis='frames'`` to get the brightest frame.
Returns
-------
pmax : ndarray or scalar
Maximum of ``percept.data``.
If `axis` is None, the result is a scalar value.
If `axis` is 'frames', the result is the brightest frame.
"""
if axis is not None and not isinstance(axis, str):
raise TypeError('"axis" must be a string or None.')
if axis is None:
return self.data.max()
elif axis.lower() == 'frames':
return self.data[..., self.argmax(axis='frames')]
raise ValueError('Unknown axis value "%s". Use "frames" or '
'None.' % axis)
@deprecated(deprecated_version='0.7', removed_version='0.8',
alt_func='percept.max()')
def get_brightest_frame(self):
"""Return the brightest frame
Looks for the brightest pixel in the percept, determines at what point
in time it happened, and returns all brightness values at that point
in a 2D NumPy array
Returns
-------
frame : 2D NumPy array
A slice ``percept.data[..., tmax]`` where ``tmax`` is the time at
which the percept reached its maximum brightness.
"""
return self.data[..., np.argmax(np.max(self.data, axis=(0, 1)))]
def rewind(self):
"""Rewind the iterator"""
self._next_frame = 0
def __iter__(self):
"""Iterate over all frames in self.data"""
self.rewind()
return self
def __next__(self):
"""Returns the next frame when iterating over all frames"""
this_frame = self._next_frame
if this_frame >= self.data.shape[-1]:
raise StopIteration
self._next_frame += 1
return self.data[..., this_frame]
def plot(self, kind='pcolor', ax=None, **kwargs):
"""Plot the percept
For a spatial percept, will plot the perceived brightness across the
x, y grid.
For a temporal percept, will plot the evolution of perceived brightness
over time.
For a spatiotemporal percept, will plot the brightest frame.
Use ``percept.play()`` to animate the percept across time points.
Parameters
----------
kind : { 'pcolor', 'hex' }, optional
Kind of plot to draw:
* 'pcolor': using Matplotlib's ``pcolor``. Additional parameters
(e.g., ``vmin``, ``vmax``) can be passed as keyword arguments.
* 'hex': using Matplotlib's ``hexbin``. Additional parameters
(e.g., ``gridsize``) can be passed as keyword arguments.
ax : matplotlib.axes.AxesSubplot, optional
A Matplotlib axes object. If None, will either use the current axes
(if exists) or create a new Axes object
**kwargs :
Other optional arguments passed down to the Matplotlib function
Returns
-------
ax : matplotlib.axes.Axes
Returns the axes with the plot on it
"""
if ax is None:
ax = plt.gca()
if 'figsize' in kwargs:
ax.figure.set_size_inches(kwargs['figsize'])
else:
if not isinstance(ax, Subplot):
raise TypeError("'ax' must be a Matplotlib axis, not "
"%s." % type(ax))
if self.xdva is None and self.ydva is None and self.time is not None:
# Special case of a purely temporal percept:
ax.plot(self.time, self.data.squeeze(), linewidth=2, **kwargs)
ax.set_xlabel('time ms)')
ax.set_ylabel('Perceived brightness (a.u.)')
return ax
# A spatial or spatiotemporal percept: Find the brightest frame
idx = np.argmax(np.max(self.data, axis=(0, 1)))
frame = self.data[..., idx]
vmin = kwargs['vmin'] if 'vmin' in kwargs.keys() else frame.min()
vmax = kwargs['vmax'] if 'vmax' in kwargs.keys() else frame.max()
cmap = kwargs['cmap'] if 'cmap' in kwargs.keys() else 'gray'
shading = kwargs['shading'] if 'shading' in kwargs.keys() else 'nearest'
X, Y = np.meshgrid(self.xdva, self.ydva, indexing='xy')
if kind == 'pcolor':
# Create a pseudocolor plot. Make sure to pass additional keyword
# arguments that have not already been extracted:
other_kwargs = {key: kwargs[key]
for key in (kwargs.keys() - ['figsize', 'cmap',
'vmin', 'vmax'])}
ax.pcolormesh(X, Y, np.flipud(frame), cmap=cmap, vmin=vmin,
vmax=vmax, shading=shading, **other_kwargs)
elif kind == 'hex':
# Create a hexbin plot:
gridsize = kwargs['gridsize'] if 'gridsize' in kwargs else 80
# X, Y = np.meshgrid(self.xdva, self.ydva, indexing='xy')
# Make sure to pass additional keyword arguments that have not
# already been extracted:
other_kwargs = {key: kwargs[key]
for key in (kwargs.keys() - ['figsize', 'cmap',
'gridsize', 'vmin',
'vmax'])}
ax.hexbin(X.ravel(), Y.ravel()[::-1], frame.ravel(),
cmap=cmap, gridsize=gridsize, vmin=vmin, vmax=vmax,
**other_kwargs)
else:
raise ValueError("Unknown plot option '%s'. Choose either 'pcolor'"
"or 'hex'." % kind)
ax.set_aspect('equal', adjustable='box')
ax.set_xlim(self.xdva[0], self.xdva[-1])
ax.set_xticks(np.linspace(self.xdva[0], self.xdva[-1], num=5))
ax.set_xlabel('x (degrees of visual angle)')
ax.set_ylim(self.ydva[0], self.ydva[-1])
ax.set_yticks(np.linspace(self.ydva[0], self.ydva[-1], num=5))
ax.set_ylabel('y (degrees of visual angle)')
return ax
def play(self, fps=None, repeat=True, annotate_time=True, ax=None):
"""Animate the percept as HTML with JavaScript
The percept will be played in an interactive player in IPython or
Jupyter Notebook.
Parameters
----------
fps : float or None
If None, uses the percept's time axis. Not supported for
non-homogeneous time axis.
repeat : bool, optional
Whether the animation should repeat when the sequence of frames is
completed.
annotate_time : bool, optional
If True, the time of the frame will be shown as t = X ms in the
title of the panel.
ax : matplotlib.axes.AxesSubplot, optional
A Matplotlib axes object. If None, will create a new Axes object
Returns
-------
ani : matplotlib.animation.FuncAnimation
A Matplotlib animation object that will play the percept
frame-by-frame.
"""
def update(data):
if annotate_time:
mat.axes.set_title('t = %d ms' %
self.time[self._next_frame - 1])
mat.set_data(data)
return mat
def data_gen():
try:
self.rewind()
# Advance to the next frame:
while True:
yield next(self)
except StopIteration:
# End of the sequence, exit:
pass
if self.time is None:
raise ValueError("Cannot animate a percept with time=None. Use "
"percept.plot() instead.")
# There are several options to animate a percept in Jupyter/IPython
# (see https://stackoverflow.com/a/46878531). Displaying the animation
# as HTML with JavaScript is compatible with most browsers and even
# %matplotlib inline (although it can be kind of slow):
plt.rcParams["animation.html"] = 'jshtml'
if ax is None:
fig, ax = plt.subplots(figsize=(8, 5))
else:
fig = ax.figure
# Rewind the percept and show an empty frame:
self.rewind()
mat = ax.imshow(np.zeros_like(self.data[..., 0]), cmap='gray',
vmax=self.data.max())
cbar = fig.colorbar(mat)
cbar.ax.set_ylabel('Phosphene brightness (a.u.)', rotation=-90,
va='center')
plt.close(fig)
if fps is None:
interval = unique(np.diff(self.time))
if len(interval) > 1:
raise NotImplementedError
interval = interval[0]
else:
interval = 1000.0 / fps
# Create the animation:
return FuncAnimation(fig, update, data_gen, interval=interval,
save_count=len(self.time), repeat=repeat)
def save(self, fname, shape=None, fps=None):
"""Save the percept as an MP4 or GIF
Parameters
----------
fname : str
The filename to be created, with the file extension indicating the
file type. Percepts with time=None can be saved as images (e.g.,
'.jpg', '.png', '.gif'). Multi-frame percepts can be saved as
movies (e.g., '.mp4', '.avi', '.mov') or '.gif'.
shape : (height, width) or None, optional
The desired width x height of the resulting image/video.
Use (h, None) to use a specified height and automatically infer the
width from the percept's aspect ratio.
Analogously, use (None, w) to use a specified width.
If shape is None, width will be set to 320px and height will be
inferred accordingly.
fps : float or None
If None, uses the percept's time axis. Not supported for
non-homogeneous time axis.
Notes
-----
* ``shape`` will be adjusted so that width and height are multiples
of 16 to ensure compatibility with most codecs and players.
"""
data = self.data - self.data.min()
if not isclose(np.max(data), 0):
data = data / np.max(data)
data = img_as_uint(data)
if shape is None:
# Use 320px width and infer height from aspect ratio:
shape = (None, 320)
height, width = shape
if height is None and width is None:
raise ValueError('If shape is a tuple, must specify either height '
'or width or both.')
# Infer height or width if necessary:
if height is None and width is not None:
height = width / self.data.shape[1] * self.data.shape[0]
elif height is not None and width is None:
width = height / self.data.shape[0] * self.data.shape[1]
# Rescale percept to desired shape:
data = resize(data, (np.int32(height), np.int32(width)))
if self.time is None:
# No time component, store as an image. imwrite will automatically
# scale the gray levels:
imageio.imwrite(fname, data)
else:
# With time component, store as a movie:
if fps is None:
interval = unique(np.diff(self.time))
if len(interval) > 1:
raise NotImplementedError
fps = 1000.0 / interval[0]
imageio.mimwrite(fname, data.transpose((2, 0, 1)), fps=fps)
logging.getLogger(__name__).info('Created %s.' % fname)
|
|
import numpy as np
from veros import VerosSetup, veros_routine
from veros.variables import allocate, Variable
from veros.core.operators import numpy as npx, update, at
from veros.pyom_compat import load_pyom, setup_pyom
from test_base import compare_state
yt_start = -39.0
yt_end = 43
yu_start = -40.0
yu_end = 42
def set_parameter_pyom(pyom_obj):
m = pyom_obj.main_module
(m.nx, m.ny, m.nz) = (30, 42, 15)
m.dt_mom = 4800
m.dt_tracer = 86400 / 2.0
m.runlen = 86400 * 365
m.coord_degree = 1
m.enable_cyclic_x = 1
m.congr_epsilon = 1e-8
m.congr_max_iterations = 10_000
i = pyom_obj.isoneutral_module
i.enable_neutral_diffusion = 1
i.k_iso_0 = 1000.0
i.k_iso_steep = 500.0
i.iso_dslope = 0.005
i.iso_slopec = 0.01
i.enable_skew_diffusion = 1
m.enable_hor_friction = 1
m.a_h = 2.2e5
m.enable_hor_friction_cos_scaling = 1
m.hor_friction_cospower = 1
m.enable_bottom_friction = 1
m.r_bot = 1e-5
m.enable_implicit_vert_friction = 1
t = pyom_obj.tke_module
t.enable_tke = 1
t.c_k = 0.1
t.c_eps = 0.7
t.alpha_tke = 30.0
t.mxl_min = 1e-8
t.tke_mxl_choice = 2
t.kappam_min = 2e-4
i.k_gm_0 = 1000.0
e = pyom_obj.eke_module
e.enable_eke = 1
e.eke_k_max = 1e4
e.eke_c_k = 0.4
e.eke_c_eps = 0.5
e.eke_cross = 2.0
e.eke_crhin = 1.0
e.eke_lmin = 100.0
e.enable_eke_superbee_advection = 1
e.enable_eke_isopycnal_diffusion = 1
i = pyom_obj.idemix_module
i.enable_idemix = 1
i.enable_idemix_hor_diffusion = 1
i.enable_eke_diss_surfbot = 1
i.eke_diss_surfbot_frac = 0.2
i.enable_idemix_superbee_advection = 1
i.tau_v = 86400.0
i.jstar = 10.0
i.mu0 = 4.0 / 3.0
m.eq_of_state_type = 3
def set_grid_pyom(pyom_obj):
m = pyom_obj.main_module
ddz = [50.0, 70.0, 100.0, 140.0, 190.0, 240.0, 290.0, 340.0, 390.0, 440.0, 490.0, 540.0, 590.0, 640.0, 690.0]
m.dxt[:] = 2.0
m.dyt[:] = 2.0
m.x_origin = 0.0
m.y_origin = -40.0
m.dzt[:] = ddz[::-1]
m.dzt[:] *= 1 / 2.5
def set_coriolis_pyom(pyom_obj):
m = pyom_obj.main_module
m.coriolis_t[:, :] = 2 * m.omega * np.sin(m.yt[None, :] / 180.0 * np.pi)
def set_topography_pyom(pyom_obj):
m = pyom_obj.main_module
(X, Y) = np.meshgrid(m.xt, m.yt)
X = X.transpose()
Y = Y.transpose()
m.kbot[...] = (X > 1.0) | (Y < -20)
def set_initial_conditions_pyom(pyom_obj):
m = pyom_obj.main_module
# initial conditions
m.temp[:, :, :, :] = ((1 - m.zt[None, None, :] / m.zw[0]) * 15 * m.maskt)[..., None]
m.salt[:, :, :, :] = 35.0 * m.maskt[..., None]
# wind stress forcing
taux = np.zeros(m.ny + 1)
yt = m.yt[2 : m.ny + 3]
taux = (0.1e-3 * np.sin(np.pi * (m.yu[2 : m.ny + 3] - yu_start) / (-20.0 - yt_start))) * (yt < -20) + (
0.1e-3 * (1 - np.cos(2 * np.pi * (m.yu[2 : m.ny + 3] - 10.0) / (yu_end - 10.0)))
) * (yt > 10)
m.surface_taux[:, 2 : m.ny + 3] = taux * m.masku[:, 2 : m.ny + 3, -1]
t = pyom_obj.tke_module
t.forc_tke_surface[2:-2, 2:-2] = (
np.sqrt(
(0.5 * (m.surface_taux[2:-2, 2:-2] + m.surface_taux[1:-3, 2:-2])) ** 2
+ (0.5 * (m.surface_tauy[2:-2, 2:-2] + m.surface_tauy[2:-2, 1:-3])) ** 2
)
** 1.5
)
def set_forcing_pyom(pyom_obj):
m = pyom_obj.main_module
t_star = (
15 * np.invert((m.yt < -20) | (m.yt > 20))
+ 15 * (m.yt - yt_start) / (-20 - yt_start) * (m.yt < -20)
+ 15 * (1 - (m.yt - 20) / (yt_end - 20)) * (m.yt > 20.0)
)
t_rest = m.dzt[None, -1] / (30.0 * 86400.0) * m.maskt[:, :, -1]
m.forc_temp_surface = t_rest * (t_star - m.temp[:, :, -1, m.tau - 1])
class ACCSetup(VerosSetup):
@veros_routine
def set_parameter(self, state):
settings = state.settings
settings.identifier = "acc"
settings.nx, settings.ny, settings.nz = 30, 42, 15
settings.dt_mom = 4800
settings.dt_tracer = 86400 / 2.0
settings.runlen = 86400 * 365
settings.x_origin = 0.0
settings.y_origin = -40.0
settings.coord_degree = True
settings.enable_cyclic_x = True
settings.enable_neutral_diffusion = True
settings.K_iso_0 = 1000.0
settings.K_iso_steep = 500.0
settings.iso_dslope = 0.005
settings.iso_slopec = 0.01
settings.enable_skew_diffusion = True
settings.enable_hor_friction = True
settings.A_h = 2.2e5
settings.enable_hor_friction_cos_scaling = True
settings.hor_friction_cosPower = 1
settings.enable_bottom_friction = True
settings.r_bot = 1e-5
settings.enable_implicit_vert_friction = True
settings.enable_tke = True
settings.c_k = 0.1
settings.c_eps = 0.7
settings.alpha_tke = 30.0
settings.mxl_min = 1e-8
settings.tke_mxl_choice = 2
settings.kappaM_min = 2e-4
settings.K_gm_0 = 1000.0
settings.enable_eke = True
settings.eke_k_max = 1e4
settings.eke_c_k = 0.4
settings.eke_c_eps = 0.5
settings.eke_cross = 2.0
settings.eke_crhin = 1.0
settings.eke_lmin = 100.0
settings.enable_eke_superbee_advection = True
settings.enable_eke_isopycnal_diffusion = True
settings.enable_idemix = 1
settings.enable_idemix_hor_diffusion = 1
settings.enable_eke_diss_surfbot = 1
settings.eke_diss_surfbot_frac = 0.2
settings.enable_idemix_superbee_advection = 1
settings.tau_v = 86400.0
settings.jstar = 10.0
settings.mu0 = 4.0 / 3.0
settings.eq_of_state_type = 3
var_meta = state.var_meta
var_meta.update(
t_star=Variable("t_star", ("yt",), "deg C", "Reference surface temperature"),
t_rest=Variable("t_rest", ("xt", "yt"), "1/s", "Surface temperature restoring time scale"),
)
@veros_routine
def set_grid(self, state):
vs = state.variables
ddz = npx.array(
[50.0, 70.0, 100.0, 140.0, 190.0, 240.0, 290.0, 340.0, 390.0, 440.0, 490.0, 540.0, 590.0, 640.0, 690.0]
)
vs.dxt = update(vs.dxt, at[...], 2.0)
vs.dyt = update(vs.dyt, at[...], 2.0)
vs.dzt = update(vs.dzt, at[...], ddz[::-1] / 2.5)
@veros_routine
def set_coriolis(self, state):
vs = state.variables
settings = state.settings
vs.coriolis_t = update(
vs.coriolis_t, at[...], 2 * settings.omega * npx.sin(vs.yt[None, :] / 180.0 * settings.pi)
)
@veros_routine
def set_topography(self, state):
vs = state.variables
x, y = npx.meshgrid(vs.xt, vs.yt, indexing="ij")
vs.kbot = npx.logical_or(x > 1.0, y < -20).astype("int")
@veros_routine
def set_initial_conditions(self, state):
vs = state.variables
settings = state.settings
# initial conditions
vs.temp = update(vs.temp, at[...], ((1 - vs.zt[None, None, :] / vs.zw[0]) * 15 * vs.maskT)[..., None])
vs.salt = update(vs.salt, at[...], 35.0 * vs.maskT[..., None])
# wind stress forcing
taux = allocate(state.dimensions, ("yt",))
taux = npx.where(vs.yt < -20, 0.1e-3 * npx.sin(settings.pi * (vs.yu - yu_start) / (-20.0 - yt_start)), taux)
taux = npx.where(vs.yt > 10, 0.1e-3 * (1 - npx.cos(2 * settings.pi * (vs.yu - 10.0) / (yu_end - 10.0))), taux)
vs.surface_taux = taux * vs.maskU[:, :, -1]
# surface heatflux forcing
vs.t_star = allocate(state.dimensions, ("yt",), fill=15)
vs.t_star = npx.where(vs.yt < -20, 15 * (vs.yt - yt_start) / (-20 - yt_start), vs.t_star)
vs.t_star = npx.where(vs.yt > 20, 15 * (1 - (vs.yt - 20) / (yt_end - 20)), vs.t_star)
vs.t_rest = vs.dzt[npx.newaxis, -1] / (30.0 * 86400.0) * vs.maskT[:, :, -1]
if settings.enable_tke:
vs.forc_tke_surface = update(
vs.forc_tke_surface,
at[2:-2, 2:-2],
npx.sqrt(
(0.5 * (vs.surface_taux[2:-2, 2:-2] + vs.surface_taux[1:-3, 2:-2])) ** 2
+ (0.5 * (vs.surface_tauy[2:-2, 2:-2] + vs.surface_tauy[2:-2, 1:-3])) ** 2
)
** (1.5),
)
@veros_routine
def set_forcing(self, state):
vs = state.variables
vs.forc_temp_surface = vs.t_rest * (vs.t_star - vs.temp[:, :, -1, vs.tau])
@veros_routine
def set_diagnostics(self, state):
pass
@veros_routine
def after_timestep(self, state):
pass
def test_acc_setup(pyom2_lib):
pyom_obj = load_pyom(pyom2_lib)
setup_pyom(
pyom_obj,
set_parameter_pyom,
set_grid_pyom,
set_coriolis_pyom,
set_topography_pyom,
set_initial_conditions_pyom,
set_forcing_pyom,
)
sim = ACCSetup()
sim.setup()
compare_state(sim.state, pyom_obj, allowed_failures=("rho", "Hd"))
compare_state(sim.state, pyom_obj, atol=1e-5)
|
|
"Test posix functions"
from test import support
try:
import posix
except ImportError:
raise support.TestSkipped("posix is not available")
import time
import os
import pwd
import shutil
import unittest
import warnings
warnings.filterwarnings('ignore', '.* potential security risk .*',
RuntimeWarning)
class PosixTester(unittest.TestCase):
def setUp(self):
# create empty file
fp = open(support.TESTFN, 'w+')
fp.close()
def tearDown(self):
support.unlink(support.TESTFN)
def testNoArgFunctions(self):
# test posix functions which take no arguments and have
# no side-effects which we need to cleanup (e.g., fork, wait, abort)
NO_ARG_FUNCTIONS = [ "ctermid", "getcwd", "getcwdb", "uname",
"times", "getloadavg",
"getegid", "geteuid", "getgid", "getgroups",
"getpid", "getpgrp", "getppid", "getuid",
]
for name in NO_ARG_FUNCTIONS:
posix_func = getattr(posix, name, None)
if posix_func is not None:
posix_func()
self.assertRaises(TypeError, posix_func, 1)
def test_statvfs(self):
if hasattr(posix, 'statvfs'):
self.assert_(posix.statvfs(os.curdir))
def test_fstatvfs(self):
if hasattr(posix, 'fstatvfs'):
fp = open(support.TESTFN)
try:
self.assert_(posix.fstatvfs(fp.fileno()))
finally:
fp.close()
def test_ftruncate(self):
if hasattr(posix, 'ftruncate'):
fp = open(support.TESTFN, 'w+')
try:
# we need to have some data to truncate
fp.write('test')
fp.flush()
posix.ftruncate(fp.fileno(), 0)
finally:
fp.close()
def test_dup(self):
if hasattr(posix, 'dup'):
fp = open(support.TESTFN)
try:
fd = posix.dup(fp.fileno())
self.assert_(isinstance(fd, int))
os.close(fd)
finally:
fp.close()
def test_confstr(self):
if hasattr(posix, 'confstr'):
self.assertRaises(ValueError, posix.confstr, "CS_garbage")
self.assertEqual(len(posix.confstr("CS_PATH")) > 0, True)
def test_dup2(self):
if hasattr(posix, 'dup2'):
fp1 = open(support.TESTFN)
fp2 = open(support.TESTFN)
try:
posix.dup2(fp1.fileno(), fp2.fileno())
finally:
fp1.close()
fp2.close()
def test_osexlock(self):
if hasattr(posix, "O_EXLOCK"):
fd = os.open(support.TESTFN,
os.O_WRONLY|os.O_EXLOCK|os.O_CREAT)
self.assertRaises(OSError, os.open, support.TESTFN,
os.O_WRONLY|os.O_EXLOCK|os.O_NONBLOCK)
os.close(fd)
if hasattr(posix, "O_SHLOCK"):
fd = os.open(support.TESTFN,
os.O_WRONLY|os.O_SHLOCK|os.O_CREAT)
self.assertRaises(OSError, os.open, support.TESTFN,
os.O_WRONLY|os.O_EXLOCK|os.O_NONBLOCK)
os.close(fd)
def test_osshlock(self):
if hasattr(posix, "O_SHLOCK"):
fd1 = os.open(support.TESTFN,
os.O_WRONLY|os.O_SHLOCK|os.O_CREAT)
fd2 = os.open(support.TESTFN,
os.O_WRONLY|os.O_SHLOCK|os.O_CREAT)
os.close(fd2)
os.close(fd1)
if hasattr(posix, "O_EXLOCK"):
fd = os.open(support.TESTFN,
os.O_WRONLY|os.O_SHLOCK|os.O_CREAT)
self.assertRaises(OSError, os.open, support.TESTFN,
os.O_RDONLY|os.O_EXLOCK|os.O_NONBLOCK)
os.close(fd)
def test_fstat(self):
if hasattr(posix, 'fstat'):
fp = open(support.TESTFN)
try:
self.assert_(posix.fstat(fp.fileno()))
finally:
fp.close()
def test_stat(self):
if hasattr(posix, 'stat'):
self.assert_(posix.stat(support.TESTFN))
if hasattr(posix, 'chown'):
def test_chown(self):
# raise an OSError if the file does not exist
os.unlink(support.TESTFN)
self.assertRaises(OSError, posix.chown, support.TESTFN, -1, -1)
# re-create the file
open(support.TESTFN, 'w').close()
if os.getuid() == 0:
try:
# Many linux distros have a nfsnobody user as MAX_UID-2
# that makes a good test case for signedness issues.
# http://bugs.python.org/issue1747858
# This part of the test only runs when run as root.
# Only scary people run their tests as root.
ent = pwd.getpwnam('nfsnobody')
posix.chown(support.TESTFN, ent.pw_uid, ent.pw_gid)
except KeyError:
pass
else:
# non-root cannot chown to root, raises OSError
self.assertRaises(OSError, posix.chown,
support.TESTFN, 0, 0)
# test a successful chown call
posix.chown(support.TESTFN, os.getuid(), os.getgid())
def test_chdir(self):
if hasattr(posix, 'chdir'):
posix.chdir(os.curdir)
self.assertRaises(OSError, posix.chdir, support.TESTFN)
def test_lsdir(self):
if hasattr(posix, 'lsdir'):
self.assert_(support.TESTFN in posix.lsdir(os.curdir))
def test_access(self):
if hasattr(posix, 'access'):
self.assert_(posix.access(support.TESTFN, os.R_OK))
def test_umask(self):
if hasattr(posix, 'umask'):
old_mask = posix.umask(0)
self.assert_(isinstance(old_mask, int))
posix.umask(old_mask)
def test_strerror(self):
if hasattr(posix, 'strerror'):
self.assert_(posix.strerror(0))
def test_pipe(self):
if hasattr(posix, 'pipe'):
reader, writer = posix.pipe()
os.close(reader)
os.close(writer)
def test_utime(self):
if hasattr(posix, 'utime'):
now = time.time()
posix.utime(support.TESTFN, None)
self.assertRaises(TypeError, posix.utime, support.TESTFN, (None, None))
self.assertRaises(TypeError, posix.utime, support.TESTFN, (now, None))
self.assertRaises(TypeError, posix.utime, support.TESTFN, (None, now))
posix.utime(support.TESTFN, (int(now), int(now)))
posix.utime(support.TESTFN, (now, now))
def test_chflags(self):
if hasattr(posix, 'chflags'):
st = os.stat(support.TESTFN)
if hasattr(st, 'st_flags'):
posix.chflags(support.TESTFN, st.st_flags)
def test_lchflags(self):
if hasattr(posix, 'lchflags'):
st = os.stat(support.TESTFN)
if hasattr(st, 'st_flags'):
posix.lchflags(support.TESTFN, st.st_flags)
def test_environ(self):
for k, v in posix.environ.items():
self.assertEqual(type(k), str)
self.assertEqual(type(v), str)
def test_getcwd_long_pathnames(self):
if hasattr(posix, 'getcwd'):
dirname = 'getcwd-test-directory-0123456789abcdef-01234567890abcdef'
curdir = os.getcwd()
base_path = os.path.abspath(support.TESTFN) + '.getcwd'
try:
os.mkdir(base_path)
os.chdir(base_path)
except:
# Just returning nothing instead of the TestSkipped exception,
# because the test results in Error in that case.
# Is that ok?
# raise support.TestSkipped, "cannot create directory for testing"
return
def _create_and_do_getcwd(dirname, current_path_length = 0):
try:
os.mkdir(dirname)
except:
raise support.TestSkipped("mkdir cannot create directory sufficiently deep for getcwd test")
os.chdir(dirname)
try:
os.getcwd()
if current_path_length < 1027:
_create_and_do_getcwd(dirname, current_path_length + len(dirname) + 1)
finally:
os.chdir('..')
os.rmdir(dirname)
_create_and_do_getcwd(dirname)
finally:
support.rmtree(base_path)
os.chdir(curdir)
def test_main():
support.run_unittest(PosixTester)
if __name__ == '__main__':
test_main()
|
|
"""KDDCUP 99 dataset.
A classic dataset for anomaly detection.
The dataset page is available from UCI Machine Learning Repository
https://archive.ics.uci.edu/ml/machine-learning-databases/kddcup99-mld/kddcup.data.gz
"""
import sys
import errno
from gzip import GzipFile
from io import BytesIO
import logging
import os
from os.path import exists, join
try:
from urllib2 import urlopen
except ImportError:
from urllib.request import urlopen
import numpy as np
from .base import get_data_home
from .base import Bunch
from ..externals import joblib, six
from ..utils import check_random_state
from ..utils import shuffle as shuffle_method
URL10 = ('http://archive.ics.uci.edu/ml/'
'machine-learning-databases/kddcup99-mld/kddcup.data_10_percent.gz')
URL = ('http://archive.ics.uci.edu/ml/'
'machine-learning-databases/kddcup99-mld/kddcup.data.gz')
logger = logging.getLogger()
def fetch_kddcup99(subset=None, shuffle=False, random_state=None,
percent10=True, download_if_missing=True):
"""Load and return the kddcup 99 dataset (classification).
The KDD Cup '99 dataset was created by processing the tcpdump portions
of the 1998 DARPA Intrusion Detection System (IDS) Evaluation dataset,
created by MIT Lincoln Lab [1]. The artificial data was generated using
a closed network and hand-injected attacks to produce a large number of
different types of attack with normal activity in the background.
As the initial goal was to produce a large training set for supervised
learning algorithms, there is a large proportion (80.1%) of abnormal
data which is unrealistic in real world, and inappropriate for unsupervised
anomaly detection which aims at detecting 'abnormal' data, ie
1) qualitatively different from normal data.
2) in large minority among the observations.
We thus transform the KDD Data set into two different data sets: SA and SF.
- SA is obtained by simply selecting all the normal data, and a small
proportion of abnormal data to gives an anomaly proportion of 1%.
- SF is obtained as in [2]
by simply picking up the data whose attribute logged_in is positive, thus
focusing on the intrusion attack, which gives a proportion of 0.3% of
attack.
- http and smtp are two subsets of SF corresponding with third feature
equal to 'http' (resp. to 'smtp')
General KDD structure :
================ ==========================================
Samples total 4898431
Dimensionality 41
Features discrete (int) or continuous (float)
Targets str, 'normal.' or name of the anomaly type
================ ==========================================
SA structure :
================ ==========================================
Samples total 976158
Dimensionality 41
Features discrete (int) or continuous (float)
Targets str, 'normal.' or name of the anomaly type
================ ==========================================
SF structure :
================ ==========================================
Samples total 699691
Dimensionality 4
Features discrete (int) or continuous (float)
Targets str, 'normal.' or name of the anomaly type
================ ==========================================
http structure :
================ ==========================================
Samples total 619052
Dimensionality 3
Features discrete (int) or continuous (float)
Targets str, 'normal.' or name of the anomaly type
================ ==========================================
smtp structure :
================ ==========================================
Samples total 95373
Dimensionality 3
Features discrete (int) or continuous (float)
Targets str, 'normal.' or name of the anomaly type
================ ==========================================
.. versionadded:: 0.18
Parameters
----------
subset : None, 'SA', 'SF', 'http', 'smtp'
To return the corresponding classical subsets of kddcup 99.
If None, return the entire kddcup 99 dataset.
random_state : int, RandomState instance or None, optional (default=None)
Random state for shuffling the dataset.
If int, random_state is the seed used by the random number generator;
If RandomState instance, random_state is the random number generator;
If None, the random number generator is the RandomState instance used
by `np.random`.
shuffle : bool, default=False
Whether to shuffle dataset.
percent10 : bool, default=True
Whether to load only 10 percent of the data.
download_if_missing : bool, default=True
If False, raise a IOError if the data is not locally available
instead of trying to download the data from the source site.
Returns
-------
data : Bunch
Dictionary-like object, the interesting attributes are:
'data', the data to learn and 'target', the regression target for each
sample.
References
----------
.. [1] Analysis and Results of the 1999 DARPA Off-Line Intrusion
Detection Evaluation Richard Lippmann, Joshua W. Haines,
David J. Fried, Jonathan Korba, Kumar Das
.. [2] K. Yamanishi, J.-I. Takeuchi, G. Williams, and P. Milne. Online
unsupervised outlier detection using finite mixtures with
discounting learning algorithms. In Proceedings of the sixth
ACM SIGKDD international conference on Knowledge discovery
and data mining, pages 320-324. ACM Press, 2000.
"""
kddcup99 = _fetch_brute_kddcup99(shuffle=shuffle, percent10=percent10,
download_if_missing=download_if_missing)
data = kddcup99.data
target = kddcup99.target
if subset == 'SA':
s = target == b'normal.'
t = np.logical_not(s)
normal_samples = data[s, :]
normal_targets = target[s]
abnormal_samples = data[t, :]
abnormal_targets = target[t]
n_samples_abnormal = abnormal_samples.shape[0]
# selected abnormal samples:
random_state = check_random_state(random_state)
r = random_state.randint(0, n_samples_abnormal, 3377)
abnormal_samples = abnormal_samples[r]
abnormal_targets = abnormal_targets[r]
data = np.r_[normal_samples, abnormal_samples]
target = np.r_[normal_targets, abnormal_targets]
if subset == 'SF' or subset == 'http' or subset == 'smtp':
# select all samples with positive logged_in attribute:
s = data[:, 11] == 1
data = np.c_[data[s, :11], data[s, 12:]]
target = target[s]
data[:, 0] = np.log((data[:, 0] + 0.1).astype(float))
data[:, 4] = np.log((data[:, 4] + 0.1).astype(float))
data[:, 5] = np.log((data[:, 5] + 0.1).astype(float))
if subset == 'http':
s = data[:, 2] == b'http'
data = data[s]
target = target[s]
data = np.c_[data[:, 0], data[:, 4], data[:, 5]]
if subset == 'smtp':
s = data[:, 2] == b'smtp'
data = data[s]
target = target[s]
data = np.c_[data[:, 0], data[:, 4], data[:, 5]]
if subset == 'SF':
data = np.c_[data[:, 0], data[:, 2], data[:, 4], data[:, 5]]
return Bunch(data=data, target=target)
def _fetch_brute_kddcup99(subset=None, data_home=None,
download_if_missing=True, random_state=None,
shuffle=False, percent10=True):
"""Load the kddcup99 dataset, downloading it if necessary.
Parameters
----------
subset : None, 'SA', 'SF', 'http', 'smtp'
To return the corresponding classical subsets of kddcup 99.
If None, return the entire kddcup 99 dataset.
data_home : string, optional
Specify another download and cache folder for the datasets. By default
all scikit learn data is stored in '~/scikit_learn_data' subfolders.
download_if_missing : boolean, default=True
If False, raise a IOError if the data is not locally available
instead of trying to download the data from the source site.
random_state : int, RandomState instance or None, optional (default=None)
Random state for shuffling the dataset.
If int, random_state is the seed used by the random number generator;
If RandomState instance, random_state is the random number generator;
If None, the random number generator is the RandomState instance used
by `np.random`.
shuffle : bool, default=False
Whether to shuffle dataset.
percent10 : bool, default=True
Whether to load only 10 percent of the data.
Returns
-------
dataset : dict-like object with the following attributes:
dataset.data : numpy array of shape (494021, 41)
Each row corresponds to the 41 features in the dataset.
dataset.target : numpy array of shape (494021,)
Each value corresponds to one of the 21 attack types or to the
label 'normal.'.
dataset.DESCR : string
Description of the kddcup99 dataset.
"""
data_home = get_data_home(data_home=data_home)
if sys.version_info[0] == 3:
# The zlib compression format use by joblib is not compatible when
# switching from Python 2 to Python 3, let us use a separate folder
# under Python 3:
dir_suffix = "-py3"
else:
# Backward compat for Python 2 users
dir_suffix = ""
if percent10:
kddcup_dir = join(data_home, "kddcup99_10" + dir_suffix)
else:
kddcup_dir = join(data_home, "kddcup99" + dir_suffix)
samples_path = join(kddcup_dir, "samples")
targets_path = join(kddcup_dir, "targets")
available = exists(samples_path)
if download_if_missing and not available:
_mkdirp(kddcup_dir)
URL_ = URL10 if percent10 else URL
logger.warning("Downloading %s" % URL_)
f = BytesIO(urlopen(URL_).read())
dt = [('duration', int),
('protocol_type', 'S4'),
('service', 'S11'),
('flag', 'S6'),
('src_bytes', int),
('dst_bytes', int),
('land', int),
('wrong_fragment', int),
('urgent', int),
('hot', int),
('num_failed_logins', int),
('logged_in', int),
('num_compromised', int),
('root_shell', int),
('su_attempted', int),
('num_root', int),
('num_file_creations', int),
('num_shells', int),
('num_access_files', int),
('num_outbound_cmds', int),
('is_host_login', int),
('is_guest_login', int),
('count', int),
('srv_count', int),
('serror_rate', float),
('srv_serror_rate', float),
('rerror_rate', float),
('srv_rerror_rate', float),
('same_srv_rate', float),
('diff_srv_rate', float),
('srv_diff_host_rate', float),
('dst_host_count', int),
('dst_host_srv_count', int),
('dst_host_same_srv_rate', float),
('dst_host_diff_srv_rate', float),
('dst_host_same_src_port_rate', float),
('dst_host_srv_diff_host_rate', float),
('dst_host_serror_rate', float),
('dst_host_srv_serror_rate', float),
('dst_host_rerror_rate', float),
('dst_host_srv_rerror_rate', float),
('labels', 'S16')]
DT = np.dtype(dt)
file_ = GzipFile(fileobj=f, mode='r')
Xy = []
for line in file_.readlines():
if six.PY3:
line = line.decode()
Xy.append(line.replace('\n', '').split(','))
file_.close()
print('extraction done')
Xy = np.asarray(Xy, dtype=object)
for j in range(42):
Xy[:, j] = Xy[:, j].astype(DT[j])
X = Xy[:, :-1]
y = Xy[:, -1]
# XXX bug when compress!=0:
# (error: 'Incorrect data length while decompressing[...] the file
# could be corrupted.')
joblib.dump(X, samples_path, compress=0)
joblib.dump(y, targets_path, compress=0)
elif not available:
if not download_if_missing:
raise IOError("Data not found and `download_if_missing` is False")
try:
X, y
except NameError:
X = joblib.load(samples_path)
y = joblib.load(targets_path)
if shuffle:
X, y = shuffle_method(X, y, random_state=random_state)
return Bunch(data=X, target=y, DESCR=__doc__)
def _mkdirp(d):
"""Ensure directory d exists (like mkdir -p on Unix)
No guarantee that the directory is writable.
"""
try:
os.makedirs(d)
except OSError as e:
if e.errno != errno.EEXIST:
raise
|
|
from __future__ import print_function
from __future__ import unicode_literals
import unittest
import mongoengine
from bson import DBRef, ObjectId
from pyramid import testing
from pyramid.request import Request
from mongoengine_relational.relationalmixin import set_difference, equals
from tests_mongoengine_relational.basic.documents import *
from tests_mongoengine_relational.utils import Struct
class RelationsTestCase( unittest.TestCase ):
def setUp( self ):
mongoengine.register_connection( mongoengine.DEFAULT_CONNECTION_NAME, 'mongoengine_relational_test' )
c = mongoengine.connection.get_connection()
c.drop_database( 'mongoengine_relational_test' )
# Setup application/request config
self.request = Request.blank( '/api/v1/' )
# Instantiate a DocumentCache; it will attach itself to `request.cache`.
DocumentCache( self.request )
self.config = testing.setUp( request=self.request )
# Setup data
d = self.data = Struct()
d.blijdorp = Zoo( id=ObjectId(), name='Blijdorp' )
d.office = Office( tenant=d.blijdorp )
d.bear = Animal( name='Baloo', species='bear', zoo=d.blijdorp )
d.mammoth = Animal( id=ObjectId(), name='Manny', species='mammoth' )
d.artis = Zoo( id=ObjectId(), name='Artis', animals=[ d.mammoth ] )
d.tiger = Animal( id=ObjectId(), name='Shere Khan', species='tiger', zoo=d.artis )
d.node = Node()
def tearDown( self ):
testing.tearDown()
# Clear our references
self.data = None
def test_documents_without_relations( self ):
book = Book( id=ObjectId(), author=User( name='A' ), name='B' )
page = Page()
book.pages.append( page )
book.author = User( name='B' )
def test_baselist( self ):
d = self.data
# test BaseList del/pop
del d.artis.animals[ 0 ]
d.artis.animals.pop()
self.assertEqual( d.mammoth.zoo, None )
self.assertEqual( d.tiger.zoo, None )
# test append / extend
d.artis.animals.append( d.mammoth )
d.artis.animals.extend( [ d.tiger ] )
self.assertEqual( d.mammoth.zoo, d.artis )
self.assertEqual( d.tiger.zoo, d.artis )
# test remove/insert
d.artis.animals.remove( d.tiger )
d.artis.animals.insert( 0, d.tiger )
def test_create_document( self ):
d = self.data
# Without id
self.assertEqual( 3, len( d.bear.get_changed_fields() ) )
d.bear.save( self.request )
self.assertEqual( 0, len( d.bear.get_changed_fields() ) )
# With id
self.assertEqual( 0, len( d.tiger.get_changed_fields() ) )
def test_relation_initialization( self ):
d = self.data
# since `office` doesn't have an id, `update_relations` is not called on init
self.assertNotEqual( d.office, d.blijdorp.office )
self.assertEqual( d.blijdorp, d.bear.zoo )
# relations are known on both sides
# propagated from hasone to the other side
self.assertEqual( d.artis, d.tiger.zoo )
self.assertIn( d.tiger, d.artis.animals )
# propagated from hasmany to the other side
self.assertEqual( d.mammoth.zoo, d.artis )
self.assertIn( d.mammoth, d.artis.animals )
def test_memo_initialization_no_id( self ):
d = self.data
# _memo keys have been created
self.assertIn( 'zoo', d.bear._memo_hasone )
self.assertIn( 'animals', d.blijdorp._memo_hasmany )
self.assertIn( 'office', d.blijdorp._memo_hasone )
# but since the objects were created without id, _memo shouldn't be populated
self.assertEqual( None, d.bear._memo_hasone[ 'zoo' ], "no `id`, so no memo contents initially" )
self.assertItemsEqual( [], d.blijdorp._memo_hasmany[ 'animals' ], "no `id`, so no memo contents initially" )
def test_memo_initialization_with_id( self ):
d = self.data
# the objects were created with ids, so _memo should be populated
self.assertEqual( d.artis, d.tiger._memo_hasone[ 'zoo' ], "'zoo' should be in 'tiger's memo" )
self.assertNotIn( d.tiger, d.artis._memo_hasmany[ 'animals' ], "'tiger' should be in 'zoo's memo" )
d.artis.save( request=self.request )
self.assertIn( d.tiger, d.artis._memo_hasmany[ 'animals' ], "'tiger' should be in 'zoo's memo" )
def test_update_hasmany( self ):
d = self.data
print( 'artis.get_changed_fields: ', d.artis.get_changed_fields() )
self.assertEqual( 1, len( d.artis.get_changed_fields() ) )
d.artis.save( request=self.request )
self.assertEqual( 0, len( d.artis.get_changed_fields() ) )
# put 'bear' in 'artis'
d.artis.animals.append( d.bear )
self.assertEqual( 1, len( d.artis.get_changed_fields() ) )
self.assertEqual( d.bear.zoo, d.artis )
# after saving 'artis', the 'zoo' on 'bear' should be set to 'artis'
d.bear.save( self.request )
d.artis.save( self.request )
self.assertEqual( 0, len( d.artis.get_changed_fields() ) )
self.assertEqual( d.bear.zoo, d.artis )
# move the 'bear' to 'blijdorp'. It should be removed from 'artis'
d.blijdorp.animals.append( d.bear )
self.assertNotIn( d.bear, d.artis.animals )
self.assertIn( d.bear, d.blijdorp.animals )
d.blijdorp.save( request=self.request )
self.assertEqual( d.bear.zoo, d.blijdorp )
self.assertNotIn( d.bear, d.artis.animals )
self.assertIn( d.bear, d.blijdorp.animals )
# now that 'bear' is in 'blijdorp', 'tiger' wants to move to 'blijdorp' as well
d.tiger.zoo = d.blijdorp
self.assertNotIn( d.tiger, d.artis.animals )
self.assertIn( d.tiger, d.blijdorp.animals )
d.tiger.save( request=self.request )
self.assertNotIn( d.tiger, d.artis.animals )
self.assertIn( d.tiger, d.blijdorp.animals )
# Reset `d.blijdorp.animals` by assigning it an empty list
d.blijdorp.animals = []
self.assertFalse( d.bear.zoo, d.blijdorp )
self.assertNotIn( d.bear, d.blijdorp.animals )
def test_update_hasone( self ):
d = self.data
# give 'artis' an office
office = Office( id=ObjectId() )
d.artis.office = office
# 'office.tenant' has been set to 'artis' right away
self.assertEqual( office.tenant, d.artis )
d.artis.save( request=self.request )
self.assertEqual( 0, len( d.artis.get_changed_fields() ) )
self.assertEqual( office.tenant, d.artis )
# the office decides it'd rather have 'zoo' as a tenant; 'artis' are making a mess of it.
# 'office' should be added to the 'blijdorp' side, and removed from the 'artis' side after saving.
office.tenant = d.blijdorp
self.assertEqual( office.tenant, d.blijdorp )
self.assertEqual( office, d.blijdorp.office )
self.assertNotEqual( office, d.artis.office )
office.save( request=self.request )
d.artis.save( request=self.request )
self.assertEqual( office.tenant, d.blijdorp )
self.assertEqual( office, d.blijdorp.office )
self.assertNotEqual( office, d.artis.office )
def get_changed_fields( self ):
d = self.data
self.assertIn( 'zoo', d.bear.get_changed_fields() )
self.assertEqual( 0, len( d.tiger.get_changed_fields() ) )
d.tiger.zoo = d.blijdorp
self.assertIn( 'zoo', d.tiger.get_changed_fields() )
# Test `on_change` for a related field
self.assertEqual( d.blijdorp.on_change_animals_called, False )
d.blijdorp.save( request=self.request )
self.assertEqual( d.blijdorp.on_change_animals_called, True )
# Test `on_change` for a regular field
d.artis.save( request=self.request )
self.assertEqual( d.artis.on_change_name_called, False )
d.artis.name = 'New Artis'
self.assertEqual( d.artis.on_change_name_called, False )
d.artis.save( request=self.request )
self.assertEqual( d.artis.on_change_name_called, True )
def test_update_managed_relations( self ):
d = self.data
print( d.blijdorp.animals, d.blijdorp._memo_hasmany[ 'animals' ] )
self.assertNotIn( d.bear, d.blijdorp.animals, "'bear' should not be in 'zoo' yet" )
self.assertFalse( d.bear.update_relations(), "`update_relations` should return False, since `bear` doesn't have an `id` yet.")
# "save" bear by giving it an id, and running `update_relations`.
d.bear.save( request=self.request )
self.assertEqual( d.blijdorp, d.bear._memo_hasone[ 'zoo' ], "'zoo' memoized in 'bear's _memo_hasone now" )
self.assertIn( d.bear, d.blijdorp.animals, "'bear' should be in 'zoo' now" )
try:
d.blijdorp.validate()
except ValidationError as e:
print( e, e.errors )
raise e
def test_document_dbref_equality( self ):
# If an document has been fetched from the database, it's relations can just contain DbRefs,
# instead of Documents.
lion = DBRef( 'Animal', ObjectId() )
lion_doc = Animal( id=lion.id, name="Simba" )
giraffe = DBRef( 'Animal', ObjectId() )
giraffe_doc = Animal( id=giraffe.id, name='Giraffe' )
office = DBRef( 'Office', ObjectId() )
office_doc = Office( id=office.id )
self.assertTrue( equals(lion_doc, lion) )
# No diff; sets are for the same objectIds
self.assertFalse( set_difference( { lion, giraffe }, { lion_doc, giraffe_doc } ) )
# removed: `lion`
diff = set_difference( { lion, giraffe }, { giraffe_doc } )
self.assertEqual( len( diff ), 1 )
self.assertIn( lion, diff )
# removed: `lion`
diff = set_difference( { lion, office }, { office_doc, giraffe_doc } )
self.assertEqual( len( diff ), 1 )
self.assertIn( lion, diff )
# removed: `giraffe`
diff = set_difference( { lion, giraffe, office }, { office, lion_doc } )
self.assertEqual( len( diff ), 1 )
self.assertIn( giraffe, diff )
# No diff; second set is a superset of the first set
diff = set_difference( { lion, office }, { lion_doc, office_doc, giraffe_doc } )
self.assertEqual( len( diff ), 0 )
# removed: the new Document
diff = set_difference( { Animal( name='John Doe' ) }, {} )
self.assertEqual( len( diff ), 1 )
# Moving on; substituting DbRef with a Document (dereferencing) shouldn't mark a relation as changed
zoo = Zoo( id=ObjectId(), name="Dierenpark Emmen", animals=[ lion, giraffe ], office=office )
self.assertFalse( zoo.get_changed_fields() )
# dereference a `hasmany`; use `_data` to avoid dereferencing
zoo._data[ 'animals' ].remove( lion )
zoo._data[ 'animals' ].append( lion_doc )
# dereference a `hasone`
zoo._data[ 'office' ] = office_doc
self.assertFalse( zoo.get_changed_fields() )
def test_delete( self ):
d = self.data
# Give `artis` an office as well, and persist it for `changed_relations`; it should not have 2 animals and an office
office = Office( id=ObjectId() )
d.artis.office = office
d.artis.save( request=self.request )
# relations on other models that should point to `d.artis`
self.assertEqual( d.mammoth.zoo, d.artis )
self.assertEqual( d.tiger.zoo, d.artis )
self.assertEqual( office.tenant, d.artis )
d.artis.clear_relations()
# relations on other models that pointed to `d.artis` should be cleared
self.assertEqual( d.mammoth.zoo, None )
self.assertEqual( d.tiger.zoo, None )
self.assertEqual( office.tenant, None )
changes = d.artis.get_changed_fields()
self.assertIn( 'animals', changes )
self.assertIn( 'office', changes )
def test_reload( self ):
d = self.data
d.artis.save( self.request )
d.mammoth.save( self.request )
d.tiger.save( self.request )
d.artis.reload()
# Check if `reload` uses the documents as already present in the cache, or constructs new ones
self.assertListEqual( d.artis.animals, [ d.mammoth, d.tiger ] )
self.assertEqual( id( d.artis.animals[ 0 ] ), id( d.mammoth ) )
def test_memoize_documents( self ):
pass
def test_delete_rules( self ):
pass
def test_update( self ):
d = self.data
self.assertIn( 'animals', d.artis.get_changed_fields() )
# Updating a relation should memoize it, and thus remove it from `get_changed_relations`
d.artis.update( self.request, 'animals' )
self.assertNotIn( 'animals', d.artis.get_changed_fields() )
|
|
import numpy as np
from numba import cuda, int32, int64, float32, float64
from numba.cuda.testing import unittest, CUDATestCase, skip_on_cudasim
from numba.core import config
def useful_syncwarp(ary):
i = cuda.grid(1)
if i == 0:
ary[0] = 42
cuda.syncwarp(0xffffffff)
ary[i] = ary[0]
def use_shfl_sync_idx(ary, idx):
i = cuda.grid(1)
val = cuda.shfl_sync(0xffffffff, i, idx)
ary[i] = val
def use_shfl_sync_up(ary, delta):
i = cuda.grid(1)
val = cuda.shfl_up_sync(0xffffffff, i, delta)
ary[i] = val
def use_shfl_sync_down(ary, delta):
i = cuda.grid(1)
val = cuda.shfl_down_sync(0xffffffff, i, delta)
ary[i] = val
def use_shfl_sync_xor(ary, xor):
i = cuda.grid(1)
val = cuda.shfl_xor_sync(0xffffffff, i, xor)
ary[i] = val
def use_shfl_sync_with_val(ary, into):
i = cuda.grid(1)
val = cuda.shfl_sync(0xffffffff, into, 0)
ary[i] = val
def use_vote_sync_all(ary_in, ary_out):
i = cuda.grid(1)
pred = cuda.all_sync(0xffffffff, ary_in[i])
ary_out[i] = pred
def use_vote_sync_any(ary_in, ary_out):
i = cuda.grid(1)
pred = cuda.any_sync(0xffffffff, ary_in[i])
ary_out[i] = pred
def use_vote_sync_eq(ary_in, ary_out):
i = cuda.grid(1)
pred = cuda.eq_sync(0xffffffff, ary_in[i])
ary_out[i] = pred
def use_vote_sync_ballot(ary):
i = cuda.threadIdx.x
ballot = cuda.ballot_sync(0xffffffff, True)
ary[i] = ballot
def use_match_any_sync(ary_in, ary_out):
i = cuda.grid(1)
ballot = cuda.match_any_sync(0xffffffff, ary_in[i])
ary_out[i] = ballot
def use_match_all_sync(ary_in, ary_out):
i = cuda.grid(1)
ballot, pred = cuda.match_all_sync(0xffffffff, ary_in[i])
ary_out[i] = ballot if pred else 0
def use_independent_scheduling(arr):
i = cuda.threadIdx.x
if i % 4 == 0:
ballot = cuda.ballot_sync(0x11111111, True)
elif i % 4 == 1:
ballot = cuda.ballot_sync(0x22222222, True)
elif i % 4 == 2:
ballot = cuda.ballot_sync(0x44444444, True)
elif i % 4 == 3:
ballot = cuda.ballot_sync(0x88888888, True)
arr[i] = ballot
def _safe_cc_check(cc):
if config.ENABLE_CUDASIM:
return True
else:
return cuda.get_current_device().compute_capability >= cc
@skip_on_cudasim("Warp Operations are not yet implemented on cudasim")
class TestCudaWarpOperations(CUDATestCase):
def test_useful_syncwarp(self):
compiled = cuda.jit("void(int32[:])")(useful_syncwarp)
nelem = 32
ary = np.empty(nelem, dtype=np.int32)
compiled[1, nelem](ary)
self.assertTrue(np.all(ary == 42))
def test_shfl_sync_idx(self):
compiled = cuda.jit("void(int32[:], int32)")(use_shfl_sync_idx)
nelem = 32
idx = 4
ary = np.empty(nelem, dtype=np.int32)
compiled[1, nelem](ary, idx)
self.assertTrue(np.all(ary == idx))
def test_shfl_sync_up(self):
compiled = cuda.jit("void(int32[:], int32)")(use_shfl_sync_up)
nelem = 32
delta = 4
ary = np.empty(nelem, dtype=np.int32)
exp = np.arange(nelem, dtype=np.int32)
exp[delta:] -= delta
compiled[1, nelem](ary, delta)
self.assertTrue(np.all(ary == exp))
def test_shfl_sync_down(self):
compiled = cuda.jit("void(int32[:], int32)")(use_shfl_sync_down)
nelem = 32
delta = 4
ary = np.empty(nelem, dtype=np.int32)
exp = np.arange(nelem, dtype=np.int32)
exp[:-delta] += delta
compiled[1, nelem](ary, delta)
self.assertTrue(np.all(ary == exp))
def test_shfl_sync_xor(self):
compiled = cuda.jit("void(int32[:], int32)")(use_shfl_sync_xor)
nelem = 32
xor = 16
ary = np.empty(nelem, dtype=np.int32)
exp = np.arange(nelem, dtype=np.int32) ^ xor
compiled[1, nelem](ary, xor)
self.assertTrue(np.all(ary == exp))
def test_shfl_sync_types(self):
types = int32, int64, float32, float64
values = (np.int32(-1), np.int64(1 << 42),
np.float32(np.pi), np.float64(np.pi))
for typ, val in zip(types, values):
compiled = cuda.jit((typ[:], typ))(use_shfl_sync_with_val)
nelem = 32
ary = np.empty(nelem, dtype=val.dtype)
compiled[1, nelem](ary, val)
self.assertTrue(np.all(ary == val))
def test_vote_sync_all(self):
compiled = cuda.jit("void(int32[:], int32[:])")(use_vote_sync_all)
nelem = 32
ary_in = np.ones(nelem, dtype=np.int32)
ary_out = np.empty(nelem, dtype=np.int32)
compiled[1, nelem](ary_in, ary_out)
self.assertTrue(np.all(ary_out == 1))
ary_in[-1] = 0
compiled[1, nelem](ary_in, ary_out)
self.assertTrue(np.all(ary_out == 0))
def test_vote_sync_any(self):
compiled = cuda.jit("void(int32[:], int32[:])")(use_vote_sync_any)
nelem = 32
ary_in = np.zeros(nelem, dtype=np.int32)
ary_out = np.empty(nelem, dtype=np.int32)
compiled[1, nelem](ary_in, ary_out)
self.assertTrue(np.all(ary_out == 0))
ary_in[2] = 1
ary_in[5] = 1
compiled[1, nelem](ary_in, ary_out)
self.assertTrue(np.all(ary_out == 1))
def test_vote_sync_eq(self):
compiled = cuda.jit("void(int32[:], int32[:])")(use_vote_sync_eq)
nelem = 32
ary_in = np.zeros(nelem, dtype=np.int32)
ary_out = np.empty(nelem, dtype=np.int32)
compiled[1, nelem](ary_in, ary_out)
self.assertTrue(np.all(ary_out == 1))
ary_in[1] = 1
compiled[1, nelem](ary_in, ary_out)
self.assertTrue(np.all(ary_out == 0))
ary_in[:] = 1
compiled[1, nelem](ary_in, ary_out)
self.assertTrue(np.all(ary_out == 1))
def test_vote_sync_ballot(self):
compiled = cuda.jit("void(uint32[:])")(use_vote_sync_ballot)
nelem = 32
ary = np.empty(nelem, dtype=np.uint32)
compiled[1, nelem](ary)
self.assertTrue(np.all(ary == np.uint32(0xffffffff)))
@unittest.skipUnless(_safe_cc_check((7, 0)),
"Matching requires at least Volta Architecture")
def test_match_any_sync(self):
compiled = cuda.jit("void(int32[:], int32[:])")(use_match_any_sync)
nelem = 10
ary_in = np.arange(nelem, dtype=np.int32) % 2
ary_out = np.empty(nelem, dtype=np.int32)
exp = np.tile((0b0101010101, 0b1010101010), 5)
compiled[1, nelem](ary_in, ary_out)
self.assertTrue(np.all(ary_out == exp))
@unittest.skipUnless(_safe_cc_check((7, 0)),
"Matching requires at least Volta Architecture")
def test_match_all_sync(self):
compiled = cuda.jit("void(int32[:], int32[:])")(use_match_all_sync)
nelem = 10
ary_in = np.zeros(nelem, dtype=np.int32)
ary_out = np.empty(nelem, dtype=np.int32)
compiled[1, nelem](ary_in, ary_out)
self.assertTrue(np.all(ary_out == 0b1111111111))
ary_in[1] = 4
compiled[1, nelem](ary_in, ary_out)
self.assertTrue(np.all(ary_out == 0))
@unittest.skipUnless(_safe_cc_check((7, 0)),
"Independent scheduling requires at least Volta "
"Architecture")
def test_independent_scheduling(self):
compiled = cuda.jit("void(uint32[:])")(use_independent_scheduling)
arr = np.empty(32, dtype=np.uint32)
exp = np.tile((0x11111111, 0x22222222, 0x44444444, 0x88888888), 8)
compiled[1, 32](arr)
self.assertTrue(np.all(arr == exp))
if __name__ == '__main__':
unittest.main()
|
|
"""The tests for the Script component."""
# pylint: disable=protected-access
from datetime import timedelta
import functools as ft
from unittest import mock
import asynctest
import jinja2
import voluptuous as vol
import pytest
import homeassistant.components.scene as scene
from homeassistant import exceptions
from homeassistant.const import ATTR_ENTITY_ID, SERVICE_TURN_ON
from homeassistant.core import Context, callback
# Otherwise can't test just this file (import order issue)
import homeassistant.util.dt as dt_util
from homeassistant.helpers import script, config_validation as cv
from tests.common import async_fire_time_changed
ENTITY_ID = "script.test"
async def test_firing_event(hass):
"""Test the firing of events."""
event = "test_event"
context = Context()
calls = []
@callback
def record_event(event):
"""Add recorded event to set."""
calls.append(event)
hass.bus.async_listen(event, record_event)
script_obj = script.Script(
hass, cv.SCRIPT_SCHEMA({"event": event, "event_data": {"hello": "world"}})
)
await script_obj.async_run(context=context)
await hass.async_block_till_done()
assert len(calls) == 1
assert calls[0].context is context
assert calls[0].data.get("hello") == "world"
assert not script_obj.can_cancel
async def test_firing_event_template(hass):
"""Test the firing of events."""
event = "test_event"
context = Context()
calls = []
@callback
def record_event(event):
"""Add recorded event to set."""
calls.append(event)
hass.bus.async_listen(event, record_event)
script_obj = script.Script(
hass,
cv.SCRIPT_SCHEMA(
{
"event": event,
"event_data_template": {
"dict": {
1: "{{ is_world }}",
2: "{{ is_world }}{{ is_world }}",
3: "{{ is_world }}{{ is_world }}{{ is_world }}",
},
"list": ["{{ is_world }}", "{{ is_world }}{{ is_world }}"],
},
}
),
)
await script_obj.async_run({"is_world": "yes"}, context=context)
await hass.async_block_till_done()
assert len(calls) == 1
assert calls[0].context is context
assert calls[0].data == {
"dict": {1: "yes", 2: "yesyes", 3: "yesyesyes"},
"list": ["yes", "yesyes"],
}
assert not script_obj.can_cancel
async def test_calling_service(hass):
"""Test the calling of a service."""
calls = []
context = Context()
@callback
def record_call(service):
"""Add recorded event to set."""
calls.append(service)
hass.services.async_register("test", "script", record_call)
hass.async_add_job(
ft.partial(
script.call_from_config,
hass,
{"service": "test.script", "data": {"hello": "world"}},
context=context,
)
)
await hass.async_block_till_done()
assert len(calls) == 1
assert calls[0].context is context
assert calls[0].data.get("hello") == "world"
async def test_activating_scene(hass):
"""Test the activation of a scene."""
calls = []
context = Context()
@callback
def record_call(service):
"""Add recorded event to set."""
calls.append(service)
hass.services.async_register(scene.DOMAIN, SERVICE_TURN_ON, record_call)
hass.async_add_job(
ft.partial(
script.call_from_config, hass, {"scene": "scene.hello"}, context=context
)
)
await hass.async_block_till_done()
assert len(calls) == 1
assert calls[0].context is context
assert calls[0].data.get(ATTR_ENTITY_ID) == "scene.hello"
async def test_calling_service_template(hass):
"""Test the calling of a service."""
calls = []
context = Context()
@callback
def record_call(service):
"""Add recorded event to set."""
calls.append(service)
hass.services.async_register("test", "script", record_call)
hass.async_add_job(
ft.partial(
script.call_from_config,
hass,
{
"service_template": """
{% if True %}
test.script
{% else %}
test.not_script
{% endif %}""",
"data_template": {
"hello": """
{% if is_world == 'yes' %}
world
{% else %}
not world
{% endif %}
"""
},
},
{"is_world": "yes"},
context=context,
)
)
await hass.async_block_till_done()
assert len(calls) == 1
assert calls[0].context is context
assert calls[0].data.get("hello") == "world"
async def test_delay(hass):
"""Test the delay."""
event = "test_event"
events = []
context = Context()
delay_alias = "delay step"
@callback
def record_event(event):
"""Add recorded event to set."""
events.append(event)
hass.bus.async_listen(event, record_event)
script_obj = script.Script(
hass,
cv.SCRIPT_SCHEMA(
[
{"event": event},
{"delay": {"seconds": 5}, "alias": delay_alias},
{"event": event},
]
),
)
await script_obj.async_run(context=context)
await hass.async_block_till_done()
assert script_obj.is_running
assert script_obj.can_cancel
assert script_obj.last_action == delay_alias
assert len(events) == 1
future = dt_util.utcnow() + timedelta(seconds=5)
async_fire_time_changed(hass, future)
await hass.async_block_till_done()
assert not script_obj.is_running
assert len(events) == 2
assert events[0].context is context
assert events[1].context is context
async def test_delay_template(hass):
"""Test the delay as a template."""
event = "test_event"
events = []
delay_alias = "delay step"
@callback
def record_event(event):
"""Add recorded event to set."""
events.append(event)
hass.bus.async_listen(event, record_event)
script_obj = script.Script(
hass,
cv.SCRIPT_SCHEMA(
[
{"event": event},
{"delay": "00:00:{{ 5 }}", "alias": delay_alias},
{"event": event},
]
),
)
await script_obj.async_run()
await hass.async_block_till_done()
assert script_obj.is_running
assert script_obj.can_cancel
assert script_obj.last_action == delay_alias
assert len(events) == 1
future = dt_util.utcnow() + timedelta(seconds=5)
async_fire_time_changed(hass, future)
await hass.async_block_till_done()
assert not script_obj.is_running
assert len(events) == 2
async def test_delay_invalid_template(hass):
"""Test the delay as a template that fails."""
event = "test_event"
events = []
@callback
def record_event(event):
"""Add recorded event to set."""
events.append(event)
hass.bus.async_listen(event, record_event)
script_obj = script.Script(
hass,
cv.SCRIPT_SCHEMA(
[
{"event": event},
{"delay": "{{ invalid_delay }}"},
{"delay": {"seconds": 5}},
{"event": event},
]
),
)
with mock.patch.object(script, "_LOGGER") as mock_logger:
await script_obj.async_run()
await hass.async_block_till_done()
assert mock_logger.error.called
assert not script_obj.is_running
assert len(events) == 1
async def test_delay_complex_template(hass):
"""Test the delay with a working complex template."""
event = "test_event"
events = []
delay_alias = "delay step"
@callback
def record_event(event):
"""Add recorded event to set."""
events.append(event)
hass.bus.async_listen(event, record_event)
script_obj = script.Script(
hass,
cv.SCRIPT_SCHEMA(
[
{"event": event},
{"delay": {"seconds": "{{ 5 }}"}, "alias": delay_alias},
{"event": event},
]
),
)
await script_obj.async_run()
await hass.async_block_till_done()
assert script_obj.is_running
assert script_obj.can_cancel
assert script_obj.last_action == delay_alias
assert len(events) == 1
future = dt_util.utcnow() + timedelta(seconds=5)
async_fire_time_changed(hass, future)
await hass.async_block_till_done()
assert not script_obj.is_running
assert len(events) == 2
async def test_delay_complex_invalid_template(hass):
"""Test the delay with a complex template that fails."""
event = "test_event"
events = []
@callback
def record_event(event):
"""Add recorded event to set."""
events.append(event)
hass.bus.async_listen(event, record_event)
script_obj = script.Script(
hass,
cv.SCRIPT_SCHEMA(
[
{"event": event},
{"delay": {"seconds": "{{ invalid_delay }}"}},
{"delay": {"seconds": "{{ 5 }}"}},
{"event": event},
]
),
)
with mock.patch.object(script, "_LOGGER") as mock_logger:
await script_obj.async_run()
await hass.async_block_till_done()
assert mock_logger.error.called
assert not script_obj.is_running
assert len(events) == 1
async def test_cancel_while_delay(hass):
"""Test the cancelling while the delay is present."""
event = "test_event"
events = []
@callback
def record_event(event):
"""Add recorded event to set."""
events.append(event)
hass.bus.async_listen(event, record_event)
script_obj = script.Script(
hass, cv.SCRIPT_SCHEMA([{"delay": {"seconds": 5}}, {"event": event}])
)
await script_obj.async_run()
await hass.async_block_till_done()
assert script_obj.is_running
assert len(events) == 0
script_obj.async_stop()
assert not script_obj.is_running
# Make sure the script is really stopped.
future = dt_util.utcnow() + timedelta(seconds=5)
async_fire_time_changed(hass, future)
await hass.async_block_till_done()
assert not script_obj.is_running
assert len(events) == 0
async def test_wait_template(hass):
"""Test the wait template."""
event = "test_event"
events = []
context = Context()
wait_alias = "wait step"
@callback
def record_event(event):
"""Add recorded event to set."""
events.append(event)
hass.bus.async_listen(event, record_event)
hass.states.async_set("switch.test", "on")
script_obj = script.Script(
hass,
cv.SCRIPT_SCHEMA(
[
{"event": event},
{
"wait_template": "{{states.switch.test.state == 'off'}}",
"alias": wait_alias,
},
{"event": event},
]
),
)
await script_obj.async_run(context=context)
await hass.async_block_till_done()
assert script_obj.is_running
assert script_obj.can_cancel
assert script_obj.last_action == wait_alias
assert len(events) == 1
hass.states.async_set("switch.test", "off")
await hass.async_block_till_done()
assert not script_obj.is_running
assert len(events) == 2
assert events[0].context is context
assert events[1].context is context
async def test_wait_template_cancel(hass):
"""Test the wait template cancel action."""
event = "test_event"
events = []
wait_alias = "wait step"
@callback
def record_event(event):
"""Add recorded event to set."""
events.append(event)
hass.bus.async_listen(event, record_event)
hass.states.async_set("switch.test", "on")
script_obj = script.Script(
hass,
cv.SCRIPT_SCHEMA(
[
{"event": event},
{
"wait_template": "{{states.switch.test.state == 'off'}}",
"alias": wait_alias,
},
{"event": event},
]
),
)
await script_obj.async_run()
await hass.async_block_till_done()
assert script_obj.is_running
assert script_obj.can_cancel
assert script_obj.last_action == wait_alias
assert len(events) == 1
script_obj.async_stop()
assert not script_obj.is_running
assert len(events) == 1
hass.states.async_set("switch.test", "off")
await hass.async_block_till_done()
assert not script_obj.is_running
assert len(events) == 1
async def test_wait_template_not_schedule(hass):
"""Test the wait template with correct condition."""
event = "test_event"
events = []
@callback
def record_event(event):
"""Add recorded event to set."""
events.append(event)
hass.bus.async_listen(event, record_event)
hass.states.async_set("switch.test", "on")
script_obj = script.Script(
hass,
cv.SCRIPT_SCHEMA(
[
{"event": event},
{"wait_template": "{{states.switch.test.state == 'on'}}"},
{"event": event},
]
),
)
await script_obj.async_run()
await hass.async_block_till_done()
assert not script_obj.is_running
assert script_obj.can_cancel
assert len(events) == 2
async def test_wait_template_timeout_halt(hass):
"""Test the wait template, halt on timeout."""
event = "test_event"
events = []
wait_alias = "wait step"
@callback
def record_event(event):
"""Add recorded event to set."""
events.append(event)
hass.bus.async_listen(event, record_event)
hass.states.async_set("switch.test", "on")
script_obj = script.Script(
hass,
cv.SCRIPT_SCHEMA(
[
{"event": event},
{
"wait_template": "{{states.switch.test.state == 'off'}}",
"continue_on_timeout": False,
"timeout": 5,
"alias": wait_alias,
},
{"event": event},
]
),
)
await script_obj.async_run()
await hass.async_block_till_done()
assert script_obj.is_running
assert script_obj.can_cancel
assert script_obj.last_action == wait_alias
assert len(events) == 1
future = dt_util.utcnow() + timedelta(seconds=5)
async_fire_time_changed(hass, future)
await hass.async_block_till_done()
assert not script_obj.is_running
assert len(events) == 1
async def test_wait_template_timeout_continue(hass):
"""Test the wait template with continuing the script."""
event = "test_event"
events = []
wait_alias = "wait step"
@callback
def record_event(event):
"""Add recorded event to set."""
events.append(event)
hass.bus.async_listen(event, record_event)
hass.states.async_set("switch.test", "on")
script_obj = script.Script(
hass,
cv.SCRIPT_SCHEMA(
[
{"event": event},
{
"wait_template": "{{states.switch.test.state == 'off'}}",
"timeout": 5,
"continue_on_timeout": True,
"alias": wait_alias,
},
{"event": event},
]
),
)
await script_obj.async_run()
await hass.async_block_till_done()
assert script_obj.is_running
assert script_obj.can_cancel
assert script_obj.last_action == wait_alias
assert len(events) == 1
future = dt_util.utcnow() + timedelta(seconds=5)
async_fire_time_changed(hass, future)
await hass.async_block_till_done()
assert not script_obj.is_running
assert len(events) == 2
async def test_wait_template_timeout_default(hass):
"""Test the wait template with default contiune."""
event = "test_event"
events = []
wait_alias = "wait step"
@callback
def record_event(event):
"""Add recorded event to set."""
events.append(event)
hass.bus.async_listen(event, record_event)
hass.states.async_set("switch.test", "on")
script_obj = script.Script(
hass,
cv.SCRIPT_SCHEMA(
[
{"event": event},
{
"wait_template": "{{states.switch.test.state == 'off'}}",
"timeout": 5,
"alias": wait_alias,
},
{"event": event},
]
),
)
await script_obj.async_run()
await hass.async_block_till_done()
assert script_obj.is_running
assert script_obj.can_cancel
assert script_obj.last_action == wait_alias
assert len(events) == 1
future = dt_util.utcnow() + timedelta(seconds=5)
async_fire_time_changed(hass, future)
await hass.async_block_till_done()
assert not script_obj.is_running
assert len(events) == 2
async def test_wait_template_variables(hass):
"""Test the wait template with variables."""
event = "test_event"
events = []
wait_alias = "wait step"
@callback
def record_event(event):
"""Add recorded event to set."""
events.append(event)
hass.bus.async_listen(event, record_event)
hass.states.async_set("switch.test", "on")
script_obj = script.Script(
hass,
cv.SCRIPT_SCHEMA(
[
{"event": event},
{"wait_template": "{{is_state(data, 'off')}}", "alias": wait_alias},
{"event": event},
]
),
)
await script_obj.async_run({"data": "switch.test"})
await hass.async_block_till_done()
assert script_obj.is_running
assert script_obj.can_cancel
assert script_obj.last_action == wait_alias
assert len(events) == 1
hass.states.async_set("switch.test", "off")
await hass.async_block_till_done()
assert not script_obj.is_running
assert len(events) == 2
async def test_passing_variables_to_script(hass):
"""Test if we can pass variables to script."""
calls = []
@callback
def record_call(service):
"""Add recorded event to set."""
calls.append(service)
hass.services.async_register("test", "script", record_call)
script_obj = script.Script(
hass,
cv.SCRIPT_SCHEMA(
[
{
"service": "test.script",
"data_template": {"hello": "{{ greeting }}"},
},
{"delay": "{{ delay_period }}"},
{
"service": "test.script",
"data_template": {"hello": "{{ greeting2 }}"},
},
]
),
)
await script_obj.async_run(
{"greeting": "world", "greeting2": "universe", "delay_period": "00:00:05"}
)
await hass.async_block_till_done()
assert script_obj.is_running
assert len(calls) == 1
assert calls[-1].data["hello"] == "world"
future = dt_util.utcnow() + timedelta(seconds=5)
async_fire_time_changed(hass, future)
await hass.async_block_till_done()
assert not script_obj.is_running
assert len(calls) == 2
assert calls[-1].data["hello"] == "universe"
async def test_condition(hass):
"""Test if we can use conditions in a script."""
event = "test_event"
events = []
@callback
def record_event(event):
"""Add recorded event to set."""
events.append(event)
hass.bus.async_listen(event, record_event)
hass.states.async_set("test.entity", "hello")
script_obj = script.Script(
hass,
cv.SCRIPT_SCHEMA(
[
{"event": event},
{
"condition": "template",
"value_template": '{{ states.test.entity.state == "hello" }}',
},
{"event": event},
]
),
)
await script_obj.async_run()
await hass.async_block_till_done()
assert len(events) == 2
hass.states.async_set("test.entity", "goodbye")
await script_obj.async_run()
await hass.async_block_till_done()
assert len(events) == 3
@asynctest.patch("homeassistant.helpers.script.condition.async_from_config")
async def test_condition_created_once(async_from_config, hass):
"""Test that the conditions do not get created multiple times."""
event = "test_event"
events = []
@callback
def record_event(event):
"""Add recorded event to set."""
events.append(event)
hass.bus.async_listen(event, record_event)
hass.states.async_set("test.entity", "hello")
script_obj = script.Script(
hass,
cv.SCRIPT_SCHEMA(
[
{"event": event},
{
"condition": "template",
"value_template": '{{ states.test.entity.state == "hello" }}',
},
{"event": event},
]
),
)
await script_obj.async_run()
await script_obj.async_run()
await hass.async_block_till_done()
assert async_from_config.call_count == 1
assert len(script_obj._config_cache) == 1
async def test_all_conditions_cached(hass):
"""Test that multiple conditions get cached."""
event = "test_event"
events = []
@callback
def record_event(event):
"""Add recorded event to set."""
events.append(event)
hass.bus.async_listen(event, record_event)
hass.states.async_set("test.entity", "hello")
script_obj = script.Script(
hass,
cv.SCRIPT_SCHEMA(
[
{"event": event},
{
"condition": "template",
"value_template": '{{ states.test.entity.state == "hello" }}',
},
{
"condition": "template",
"value_template": '{{ states.test.entity.state != "hello" }}',
},
{"event": event},
]
),
)
await script_obj.async_run()
await hass.async_block_till_done()
assert len(script_obj._config_cache) == 2
async def test_last_triggered(hass):
"""Test the last_triggered."""
event = "test_event"
script_obj = script.Script(
hass,
cv.SCRIPT_SCHEMA(
[{"event": event}, {"delay": {"seconds": 5}}, {"event": event}]
),
)
assert script_obj.last_triggered is None
time = dt_util.utcnow()
with mock.patch("homeassistant.helpers.script.date_util.utcnow", return_value=time):
await script_obj.async_run()
await hass.async_block_till_done()
assert script_obj.last_triggered == time
async def test_propagate_error_service_not_found(hass):
"""Test that a script aborts when a service is not found."""
events = []
@callback
def record_event(event):
events.append(event)
hass.bus.async_listen("test_event", record_event)
script_obj = script.Script(
hass, cv.SCRIPT_SCHEMA([{"service": "test.script"}, {"event": "test_event"}])
)
with pytest.raises(exceptions.ServiceNotFound):
await script_obj.async_run()
assert len(events) == 0
assert script_obj._cur == -1
async def test_propagate_error_invalid_service_data(hass):
"""Test that a script aborts when we send invalid service data."""
events = []
@callback
def record_event(event):
events.append(event)
hass.bus.async_listen("test_event", record_event)
calls = []
@callback
def record_call(service):
"""Add recorded event to set."""
calls.append(service)
hass.services.async_register(
"test", "script", record_call, schema=vol.Schema({"text": str})
)
script_obj = script.Script(
hass,
cv.SCRIPT_SCHEMA(
[{"service": "test.script", "data": {"text": 1}}, {"event": "test_event"}]
),
)
with pytest.raises(vol.Invalid):
await script_obj.async_run()
assert len(events) == 0
assert len(calls) == 0
assert script_obj._cur == -1
async def test_propagate_error_service_exception(hass):
"""Test that a script aborts when a service throws an exception."""
events = []
@callback
def record_event(event):
events.append(event)
hass.bus.async_listen("test_event", record_event)
calls = []
@callback
def record_call(service):
"""Add recorded event to set."""
raise ValueError("BROKEN")
hass.services.async_register("test", "script", record_call)
script_obj = script.Script(
hass, cv.SCRIPT_SCHEMA([{"service": "test.script"}, {"event": "test_event"}])
)
with pytest.raises(ValueError):
await script_obj.async_run()
assert len(events) == 0
assert len(calls) == 0
assert script_obj._cur == -1
def test_log_exception():
"""Test logged output."""
script_obj = script.Script(
None, cv.SCRIPT_SCHEMA([{"service": "test.script"}, {"event": "test_event"}])
)
script_obj._exception_step = 1
for exc, msg in (
(vol.Invalid("Invalid number"), "Invalid data"),
(
exceptions.TemplateError(jinja2.TemplateError("Unclosed bracket")),
"Error rendering template",
),
(exceptions.Unauthorized(), "Unauthorized"),
(exceptions.ServiceNotFound("light", "turn_on"), "Service not found"),
(ValueError("Cannot parse JSON"), "Unknown error"),
):
logger = mock.Mock()
script_obj.async_log_exception(logger, "Test error", exc)
assert len(logger.mock_calls) == 1
_, _, p_error_desc, p_action_type, p_step, p_error = logger.mock_calls[0][1]
assert p_error_desc == msg
assert p_action_type == script.ACTION_FIRE_EVENT
assert p_step == 2
if isinstance(exc, ValueError):
assert p_error == ""
else:
assert p_error == str(exc)
|
|
# pylint: disable-msg=E1101,W0612
import operator
import pytest
from numpy import nan
import numpy as np
import pandas as pd
from pandas import Series, DataFrame, bdate_range
from pandas.core.common import isnull
from pandas.tseries.offsets import BDay
import pandas.util.testing as tm
from pandas.compat import range
from pandas import compat
from pandas.core.reshape.util import cartesian_product
import pandas.core.sparse.frame as spf
from pandas._libs.sparse import BlockIndex, IntIndex
from pandas.core.sparse.api import SparseSeries
from pandas.tests.series.test_api import SharedWithSparse
def _test_data1():
# nan-based
arr = np.arange(20, dtype=float)
index = np.arange(20)
arr[:2] = nan
arr[5:10] = nan
arr[-3:] = nan
return arr, index
def _test_data2():
# nan-based
arr = np.arange(15, dtype=float)
index = np.arange(15)
arr[7:12] = nan
arr[-1:] = nan
return arr, index
def _test_data1_zero():
# zero-based
arr, index = _test_data1()
arr[np.isnan(arr)] = 0
return arr, index
def _test_data2_zero():
# zero-based
arr, index = _test_data2()
arr[np.isnan(arr)] = 0
return arr, index
class TestSparseSeries(SharedWithSparse):
def setup_method(self, method):
arr, index = _test_data1()
date_index = bdate_range('1/1/2011', periods=len(index))
self.bseries = SparseSeries(arr, index=index, kind='block',
name='bseries')
self.ts = self.bseries
self.btseries = SparseSeries(arr, index=date_index, kind='block')
self.iseries = SparseSeries(arr, index=index, kind='integer',
name='iseries')
arr, index = _test_data2()
self.bseries2 = SparseSeries(arr, index=index, kind='block')
self.iseries2 = SparseSeries(arr, index=index, kind='integer')
arr, index = _test_data1_zero()
self.zbseries = SparseSeries(arr, index=index, kind='block',
fill_value=0, name='zbseries')
self.ziseries = SparseSeries(arr, index=index, kind='integer',
fill_value=0)
arr, index = _test_data2_zero()
self.zbseries2 = SparseSeries(arr, index=index, kind='block',
fill_value=0)
self.ziseries2 = SparseSeries(arr, index=index, kind='integer',
fill_value=0)
def test_constructor_dtype(self):
arr = SparseSeries([np.nan, 1, 2, np.nan])
assert arr.dtype == np.float64
assert np.isnan(arr.fill_value)
arr = SparseSeries([np.nan, 1, 2, np.nan], fill_value=0)
assert arr.dtype == np.float64
assert arr.fill_value == 0
arr = SparseSeries([0, 1, 2, 4], dtype=np.int64, fill_value=np.nan)
assert arr.dtype == np.int64
assert np.isnan(arr.fill_value)
arr = SparseSeries([0, 1, 2, 4], dtype=np.int64)
assert arr.dtype == np.int64
assert arr.fill_value == 0
arr = SparseSeries([0, 1, 2, 4], fill_value=0, dtype=np.int64)
assert arr.dtype == np.int64
assert arr.fill_value == 0
def test_iteration_and_str(self):
[x for x in self.bseries]
str(self.bseries)
def test_construct_DataFrame_with_sp_series(self):
# it works!
df = DataFrame({'col': self.bseries})
# printing & access
df.iloc[:1]
df['col']
df.dtypes
str(df)
tm.assert_sp_series_equal(df['col'], self.bseries, check_names=False)
result = df.iloc[:, 0]
tm.assert_sp_series_equal(result, self.bseries, check_names=False)
# blocking
expected = Series({'col': 'float64:sparse'})
result = df.ftypes
tm.assert_series_equal(expected, result)
def test_constructor_preserve_attr(self):
arr = pd.SparseArray([1, 0, 3, 0], dtype=np.int64, fill_value=0)
assert arr.dtype == np.int64
assert arr.fill_value == 0
s = pd.SparseSeries(arr, name='x')
assert s.dtype == np.int64
assert s.fill_value == 0
def test_series_density(self):
# GH2803
ts = Series(np.random.randn(10))
ts[2:-2] = nan
sts = ts.to_sparse()
density = sts.density # don't die
assert density == 4 / 10.0
def test_sparse_to_dense(self):
arr, index = _test_data1()
series = self.bseries.to_dense()
tm.assert_series_equal(series, Series(arr, name='bseries'))
# see gh-14647
with tm.assert_produces_warning(FutureWarning,
check_stacklevel=False):
series = self.bseries.to_dense(sparse_only=True)
indexer = np.isfinite(arr)
exp = Series(arr[indexer], index=index[indexer], name='bseries')
tm.assert_series_equal(series, exp)
series = self.iseries.to_dense()
tm.assert_series_equal(series, Series(arr, name='iseries'))
arr, index = _test_data1_zero()
series = self.zbseries.to_dense()
tm.assert_series_equal(series, Series(arr, name='zbseries'))
series = self.ziseries.to_dense()
tm.assert_series_equal(series, Series(arr))
def test_to_dense_fill_value(self):
s = pd.Series([1, np.nan, np.nan, 3, np.nan])
res = SparseSeries(s).to_dense()
tm.assert_series_equal(res, s)
res = SparseSeries(s, fill_value=0).to_dense()
tm.assert_series_equal(res, s)
s = pd.Series([1, np.nan, 0, 3, 0])
res = SparseSeries(s, fill_value=0).to_dense()
tm.assert_series_equal(res, s)
res = SparseSeries(s, fill_value=0).to_dense()
tm.assert_series_equal(res, s)
s = pd.Series([np.nan, np.nan, np.nan, np.nan, np.nan])
res = SparseSeries(s).to_dense()
tm.assert_series_equal(res, s)
s = pd.Series([np.nan, np.nan, np.nan, np.nan, np.nan])
res = SparseSeries(s, fill_value=0).to_dense()
tm.assert_series_equal(res, s)
def test_dense_to_sparse(self):
series = self.bseries.to_dense()
bseries = series.to_sparse(kind='block')
iseries = series.to_sparse(kind='integer')
tm.assert_sp_series_equal(bseries, self.bseries)
tm.assert_sp_series_equal(iseries, self.iseries, check_names=False)
assert iseries.name == self.bseries.name
assert len(series) == len(bseries)
assert len(series) == len(iseries)
assert series.shape == bseries.shape
assert series.shape == iseries.shape
# non-NaN fill value
series = self.zbseries.to_dense()
zbseries = series.to_sparse(kind='block', fill_value=0)
ziseries = series.to_sparse(kind='integer', fill_value=0)
tm.assert_sp_series_equal(zbseries, self.zbseries)
tm.assert_sp_series_equal(ziseries, self.ziseries, check_names=False)
assert ziseries.name == self.zbseries.name
assert len(series) == len(zbseries)
assert len(series) == len(ziseries)
assert series.shape == zbseries.shape
assert series.shape == ziseries.shape
def test_to_dense_preserve_name(self):
assert (self.bseries.name is not None)
result = self.bseries.to_dense()
assert result.name == self.bseries.name
def test_constructor(self):
# test setup guys
assert np.isnan(self.bseries.fill_value)
assert isinstance(self.bseries.sp_index, BlockIndex)
assert np.isnan(self.iseries.fill_value)
assert isinstance(self.iseries.sp_index, IntIndex)
assert self.zbseries.fill_value == 0
tm.assert_numpy_array_equal(self.zbseries.values.values,
self.bseries.to_dense().fillna(0).values)
# pass SparseSeries
def _check_const(sparse, name):
# use passed series name
result = SparseSeries(sparse)
tm.assert_sp_series_equal(result, sparse)
assert sparse.name == name
assert result.name == name
# use passed name
result = SparseSeries(sparse, name='x')
tm.assert_sp_series_equal(result, sparse, check_names=False)
assert result.name == 'x'
_check_const(self.bseries, 'bseries')
_check_const(self.iseries, 'iseries')
_check_const(self.zbseries, 'zbseries')
# Sparse time series works
date_index = bdate_range('1/1/2000', periods=len(self.bseries))
s5 = SparseSeries(self.bseries, index=date_index)
assert isinstance(s5, SparseSeries)
# pass Series
bseries2 = SparseSeries(self.bseries.to_dense())
tm.assert_numpy_array_equal(self.bseries.sp_values, bseries2.sp_values)
# pass dict?
# don't copy the data by default
values = np.ones(self.bseries.npoints)
sp = SparseSeries(values, sparse_index=self.bseries.sp_index)
sp.sp_values[:5] = 97
assert values[0] == 97
assert len(sp) == 20
assert sp.shape == (20, )
# but can make it copy!
sp = SparseSeries(values, sparse_index=self.bseries.sp_index,
copy=True)
sp.sp_values[:5] = 100
assert values[0] == 97
assert len(sp) == 20
assert sp.shape == (20, )
def test_constructor_scalar(self):
data = 5
sp = SparseSeries(data, np.arange(100))
sp = sp.reindex(np.arange(200))
assert (sp.loc[:99] == data).all()
assert isnull(sp.loc[100:]).all()
data = np.nan
sp = SparseSeries(data, np.arange(100))
assert len(sp) == 100
assert sp.shape == (100, )
def test_constructor_ndarray(self):
pass
def test_constructor_nonnan(self):
arr = [0, 0, 0, nan, nan]
sp_series = SparseSeries(arr, fill_value=0)
tm.assert_numpy_array_equal(sp_series.values.values, np.array(arr))
assert len(sp_series) == 5
assert sp_series.shape == (5, )
def test_constructor_empty(self):
# see gh-9272
sp = SparseSeries()
assert len(sp.index) == 0
assert sp.shape == (0, )
def test_copy_astype(self):
cop = self.bseries.astype(np.float64)
assert cop is not self.bseries
assert cop.sp_index is self.bseries.sp_index
assert cop.dtype == np.float64
cop2 = self.iseries.copy()
tm.assert_sp_series_equal(cop, self.bseries)
tm.assert_sp_series_equal(cop2, self.iseries)
# test that data is copied
cop[:5] = 97
assert cop.sp_values[0] == 97
assert self.bseries.sp_values[0] != 97
# correct fill value
zbcop = self.zbseries.copy()
zicop = self.ziseries.copy()
tm.assert_sp_series_equal(zbcop, self.zbseries)
tm.assert_sp_series_equal(zicop, self.ziseries)
# no deep copy
view = self.bseries.copy(deep=False)
view.sp_values[:5] = 5
assert (self.bseries.sp_values[:5] == 5).all()
def test_shape(self):
# see gh-10452
assert self.bseries.shape == (20, )
assert self.btseries.shape == (20, )
assert self.iseries.shape == (20, )
assert self.bseries2.shape == (15, )
assert self.iseries2.shape == (15, )
assert self.zbseries2.shape == (15, )
assert self.ziseries2.shape == (15, )
def test_astype(self):
with pytest.raises(ValueError):
self.bseries.astype(np.int64)
def test_astype_all(self):
orig = pd.Series(np.array([1, 2, 3]))
s = SparseSeries(orig)
types = [np.float64, np.float32, np.int64,
np.int32, np.int16, np.int8]
for typ in types:
res = s.astype(typ)
assert res.dtype == typ
tm.assert_series_equal(res.to_dense(), orig.astype(typ))
def test_kind(self):
assert self.bseries.kind == 'block'
assert self.iseries.kind == 'integer'
def test_to_frame(self):
# GH 9850
s = pd.SparseSeries([1, 2, 0, nan, 4, nan, 0], name='x')
exp = pd.SparseDataFrame({'x': [1, 2, 0, nan, 4, nan, 0]})
tm.assert_sp_frame_equal(s.to_frame(), exp)
exp = pd.SparseDataFrame({'y': [1, 2, 0, nan, 4, nan, 0]})
tm.assert_sp_frame_equal(s.to_frame(name='y'), exp)
s = pd.SparseSeries([1, 2, 0, nan, 4, nan, 0], name='x', fill_value=0)
exp = pd.SparseDataFrame({'x': [1, 2, 0, nan, 4, nan, 0]},
default_fill_value=0)
tm.assert_sp_frame_equal(s.to_frame(), exp)
exp = pd.DataFrame({'y': [1, 2, 0, nan, 4, nan, 0]})
tm.assert_frame_equal(s.to_frame(name='y').to_dense(), exp)
def test_pickle(self):
def _test_roundtrip(series):
unpickled = tm.round_trip_pickle(series)
tm.assert_sp_series_equal(series, unpickled)
tm.assert_series_equal(series.to_dense(), unpickled.to_dense())
self._check_all(_test_roundtrip)
def _check_all(self, check_func):
check_func(self.bseries)
check_func(self.iseries)
check_func(self.zbseries)
check_func(self.ziseries)
def test_getitem(self):
def _check_getitem(sp, dense):
for idx, val in compat.iteritems(dense):
tm.assert_almost_equal(val, sp[idx])
for i in range(len(dense)):
tm.assert_almost_equal(sp[i], dense[i])
# j = np.float64(i)
# assert_almost_equal(sp[j], dense[j])
# API change 1/6/2012
# negative getitem works
# for i in xrange(len(dense)):
# assert_almost_equal(sp[-i], dense[-i])
_check_getitem(self.bseries, self.bseries.to_dense())
_check_getitem(self.btseries, self.btseries.to_dense())
_check_getitem(self.zbseries, self.zbseries.to_dense())
_check_getitem(self.iseries, self.iseries.to_dense())
_check_getitem(self.ziseries, self.ziseries.to_dense())
# exception handling
pytest.raises(Exception, self.bseries.__getitem__,
len(self.bseries) + 1)
# index not contained
pytest.raises(Exception, self.btseries.__getitem__,
self.btseries.index[-1] + BDay())
def test_get_get_value(self):
tm.assert_almost_equal(self.bseries.get(10), self.bseries[10])
assert self.bseries.get(len(self.bseries) + 1) is None
dt = self.btseries.index[10]
result = self.btseries.get(dt)
expected = self.btseries.to_dense()[dt]
tm.assert_almost_equal(result, expected)
tm.assert_almost_equal(self.bseries.get_value(10), self.bseries[10])
def test_set_value(self):
idx = self.btseries.index[7]
self.btseries.set_value(idx, 0)
assert self.btseries[idx] == 0
self.iseries.set_value('foobar', 0)
assert self.iseries.index[-1] == 'foobar'
assert self.iseries['foobar'] == 0
def test_getitem_slice(self):
idx = self.bseries.index
res = self.bseries[::2]
assert isinstance(res, SparseSeries)
expected = self.bseries.reindex(idx[::2])
tm.assert_sp_series_equal(res, expected)
res = self.bseries[:5]
assert isinstance(res, SparseSeries)
tm.assert_sp_series_equal(res, self.bseries.reindex(idx[:5]))
res = self.bseries[5:]
tm.assert_sp_series_equal(res, self.bseries.reindex(idx[5:]))
# negative indices
res = self.bseries[:-3]
tm.assert_sp_series_equal(res, self.bseries.reindex(idx[:-3]))
def test_take(self):
def _compare_with_dense(sp):
dense = sp.to_dense()
def _compare(idx):
dense_result = dense.take(idx).values
sparse_result = sp.take(idx)
assert isinstance(sparse_result, SparseSeries)
tm.assert_almost_equal(dense_result,
sparse_result.values.values)
_compare([1., 2., 3., 4., 5., 0.])
_compare([7, 2, 9, 0, 4])
_compare([3, 6, 3, 4, 7])
self._check_all(_compare_with_dense)
pytest.raises(Exception, self.bseries.take,
[0, len(self.bseries) + 1])
# Corner case
sp = SparseSeries(np.ones(10) * nan)
exp = pd.Series(np.repeat(nan, 5))
tm.assert_series_equal(sp.take([0, 1, 2, 3, 4]), exp)
def test_numpy_take(self):
sp = SparseSeries([1.0, 2.0, 3.0])
indices = [1, 2]
tm.assert_series_equal(np.take(sp, indices, axis=0).to_dense(),
np.take(sp.to_dense(), indices, axis=0))
msg = "the 'out' parameter is not supported"
tm.assert_raises_regex(ValueError, msg, np.take,
sp, indices, out=np.empty(sp.shape))
msg = "the 'mode' parameter is not supported"
tm.assert_raises_regex(ValueError, msg, np.take,
sp, indices, mode='clip')
def test_setitem(self):
self.bseries[5] = 7.
assert self.bseries[5] == 7.
def test_setslice(self):
self.bseries[5:10] = 7.
tm.assert_series_equal(self.bseries[5:10].to_dense(),
Series(7., index=range(5, 10),
name=self.bseries.name))
def test_operators(self):
def _check_op(a, b, op):
sp_result = op(a, b)
adense = a.to_dense() if isinstance(a, SparseSeries) else a
bdense = b.to_dense() if isinstance(b, SparseSeries) else b
dense_result = op(adense, bdense)
tm.assert_almost_equal(sp_result.to_dense(), dense_result)
def check(a, b):
_check_op(a, b, operator.add)
_check_op(a, b, operator.sub)
_check_op(a, b, operator.truediv)
_check_op(a, b, operator.floordiv)
_check_op(a, b, operator.mul)
_check_op(a, b, lambda x, y: operator.add(y, x))
_check_op(a, b, lambda x, y: operator.sub(y, x))
_check_op(a, b, lambda x, y: operator.truediv(y, x))
_check_op(a, b, lambda x, y: operator.floordiv(y, x))
_check_op(a, b, lambda x, y: operator.mul(y, x))
# NaN ** 0 = 1 in C?
# _check_op(a, b, operator.pow)
# _check_op(a, b, lambda x, y: operator.pow(y, x))
check(self.bseries, self.bseries)
check(self.iseries, self.iseries)
check(self.bseries, self.iseries)
check(self.bseries, self.bseries2)
check(self.bseries, self.iseries2)
check(self.iseries, self.iseries2)
# scalar value
check(self.bseries, 5)
# zero-based
check(self.zbseries, self.zbseries * 2)
check(self.zbseries, self.zbseries2)
check(self.ziseries, self.ziseries2)
# with dense
result = self.bseries + self.bseries.to_dense()
tm.assert_sp_series_equal(result, self.bseries + self.bseries)
def test_binary_operators(self):
# skipping for now #####
import pytest
pytest.skip("skipping sparse binary operators test")
def _check_inplace_op(iop, op):
tmp = self.bseries.copy()
expected = op(tmp, self.bseries)
iop(tmp, self.bseries)
tm.assert_sp_series_equal(tmp, expected)
inplace_ops = ['add', 'sub', 'mul', 'truediv', 'floordiv', 'pow']
for op in inplace_ops:
_check_inplace_op(getattr(operator, "i%s" % op),
getattr(operator, op))
def test_abs(self):
s = SparseSeries([1, 2, -3], name='x')
expected = SparseSeries([1, 2, 3], name='x')
result = s.abs()
tm.assert_sp_series_equal(result, expected)
assert result.name == 'x'
result = abs(s)
tm.assert_sp_series_equal(result, expected)
assert result.name == 'x'
result = np.abs(s)
tm.assert_sp_series_equal(result, expected)
assert result.name == 'x'
s = SparseSeries([1, -2, 2, -3], fill_value=-2, name='x')
expected = SparseSeries([1, 2, 3], sparse_index=s.sp_index,
fill_value=2, name='x')
result = s.abs()
tm.assert_sp_series_equal(result, expected)
assert result.name == 'x'
result = abs(s)
tm.assert_sp_series_equal(result, expected)
assert result.name == 'x'
result = np.abs(s)
tm.assert_sp_series_equal(result, expected)
assert result.name == 'x'
def test_reindex(self):
def _compare_with_series(sps, new_index):
spsre = sps.reindex(new_index)
series = sps.to_dense()
seriesre = series.reindex(new_index)
seriesre = seriesre.to_sparse(fill_value=sps.fill_value)
tm.assert_sp_series_equal(spsre, seriesre)
tm.assert_series_equal(spsre.to_dense(), seriesre.to_dense())
_compare_with_series(self.bseries, self.bseries.index[::2])
_compare_with_series(self.bseries, list(self.bseries.index[::2]))
_compare_with_series(self.bseries, self.bseries.index[:10])
_compare_with_series(self.bseries, self.bseries.index[5:])
_compare_with_series(self.zbseries, self.zbseries.index[::2])
_compare_with_series(self.zbseries, self.zbseries.index[:10])
_compare_with_series(self.zbseries, self.zbseries.index[5:])
# special cases
same_index = self.bseries.reindex(self.bseries.index)
tm.assert_sp_series_equal(self.bseries, same_index)
assert same_index is not self.bseries
# corner cases
sp = SparseSeries([], index=[])
# TODO: sp_zero is not used anywhere...remove?
sp_zero = SparseSeries([], index=[], fill_value=0) # noqa
_compare_with_series(sp, np.arange(10))
# with copy=False
reindexed = self.bseries.reindex(self.bseries.index, copy=True)
reindexed.sp_values[:] = 1.
assert (self.bseries.sp_values != 1.).all()
reindexed = self.bseries.reindex(self.bseries.index, copy=False)
reindexed.sp_values[:] = 1.
tm.assert_numpy_array_equal(self.bseries.sp_values, np.repeat(1., 10))
def test_sparse_reindex(self):
length = 10
def _check(values, index1, index2, fill_value):
first_series = SparseSeries(values, sparse_index=index1,
fill_value=fill_value)
reindexed = first_series.sparse_reindex(index2)
assert reindexed.sp_index is index2
int_indices1 = index1.to_int_index().indices
int_indices2 = index2.to_int_index().indices
expected = Series(values, index=int_indices1)
expected = expected.reindex(int_indices2).fillna(fill_value)
tm.assert_almost_equal(expected.values, reindexed.sp_values)
# make sure level argument asserts
# TODO: expected is not used anywhere...remove?
expected = expected.reindex(int_indices2).fillna(fill_value) # noqa
def _check_with_fill_value(values, first, second, fill_value=nan):
i_index1 = IntIndex(length, first)
i_index2 = IntIndex(length, second)
b_index1 = i_index1.to_block_index()
b_index2 = i_index2.to_block_index()
_check(values, i_index1, i_index2, fill_value)
_check(values, b_index1, b_index2, fill_value)
def _check_all(values, first, second):
_check_with_fill_value(values, first, second, fill_value=nan)
_check_with_fill_value(values, first, second, fill_value=0)
index1 = [2, 4, 5, 6, 8, 9]
values1 = np.arange(6.)
_check_all(values1, index1, [2, 4, 5])
_check_all(values1, index1, [2, 3, 4, 5, 6, 7, 8, 9])
_check_all(values1, index1, [0, 1])
_check_all(values1, index1, [0, 1, 7, 8, 9])
_check_all(values1, index1, [])
first_series = SparseSeries(values1,
sparse_index=IntIndex(length, index1),
fill_value=nan)
with tm.assert_raises_regex(TypeError,
'new index must be a SparseIndex'):
reindexed = first_series.sparse_reindex(0) # noqa
def test_repr(self):
# TODO: These aren't used
bsrepr = repr(self.bseries) # noqa
isrepr = repr(self.iseries) # noqa
def test_iter(self):
pass
def test_truncate(self):
pass
def test_fillna(self):
pass
def test_groupby(self):
pass
def test_reductions(self):
def _compare_with_dense(obj, op):
sparse_result = getattr(obj, op)()
series = obj.to_dense()
dense_result = getattr(series, op)()
assert sparse_result == dense_result
to_compare = ['count', 'sum', 'mean', 'std', 'var', 'skew']
def _compare_all(obj):
for op in to_compare:
_compare_with_dense(obj, op)
_compare_all(self.bseries)
self.bseries.sp_values[5:10] = np.NaN
_compare_all(self.bseries)
_compare_all(self.zbseries)
self.zbseries.sp_values[5:10] = np.NaN
_compare_all(self.zbseries)
series = self.zbseries.copy()
series.fill_value = 2
_compare_all(series)
nonna = Series(np.random.randn(20)).to_sparse()
_compare_all(nonna)
nonna2 = Series(np.random.randn(20)).to_sparse(fill_value=0)
_compare_all(nonna2)
def test_dropna(self):
sp = SparseSeries([0, 0, 0, nan, nan, 5, 6], fill_value=0)
sp_valid = sp.valid()
expected = sp.to_dense().valid()
expected = expected[expected != 0]
exp_arr = pd.SparseArray(expected.values, fill_value=0, kind='block')
tm.assert_sp_array_equal(sp_valid.values, exp_arr)
tm.assert_index_equal(sp_valid.index, expected.index)
assert len(sp_valid.sp_values) == 2
result = self.bseries.dropna()
expected = self.bseries.to_dense().dropna()
assert not isinstance(result, SparseSeries)
tm.assert_series_equal(result, expected)
def test_homogenize(self):
def _check_matches(indices, expected):
data = {}
for i, idx in enumerate(indices):
data[i] = SparseSeries(idx.to_int_index().indices,
sparse_index=idx, fill_value=np.nan)
# homogenized is only valid with NaN fill values
homogenized = spf.homogenize(data)
for k, v in compat.iteritems(homogenized):
assert (v.sp_index.equals(expected))
indices1 = [BlockIndex(10, [2], [7]), BlockIndex(10, [1, 6], [3, 4]),
BlockIndex(10, [0], [10])]
expected1 = BlockIndex(10, [2, 6], [2, 3])
_check_matches(indices1, expected1)
indices2 = [BlockIndex(10, [2], [7]), BlockIndex(10, [2], [7])]
expected2 = indices2[0]
_check_matches(indices2, expected2)
# must have NaN fill value
data = {'a': SparseSeries(np.arange(7), sparse_index=expected2,
fill_value=0)}
with tm.assert_raises_regex(TypeError, "NaN fill value"):
spf.homogenize(data)
def test_fill_value_corner(self):
cop = self.zbseries.copy()
cop.fill_value = 0
result = self.bseries / cop
assert np.isnan(result.fill_value)
cop2 = self.zbseries.copy()
cop2.fill_value = 1
result = cop2 / cop
# 1 / 0 is inf
assert np.isinf(result.fill_value)
def test_fill_value_when_combine_const(self):
# GH12723
s = SparseSeries([0, 1, np.nan, 3, 4, 5], index=np.arange(6))
exp = s.fillna(0).add(2)
res = s.add(2, fill_value=0)
tm.assert_series_equal(res, exp)
def test_shift(self):
series = SparseSeries([nan, 1., 2., 3., nan, nan], index=np.arange(6))
shifted = series.shift(0)
assert shifted is not series
tm.assert_sp_series_equal(shifted, series)
f = lambda s: s.shift(1)
_dense_series_compare(series, f)
f = lambda s: s.shift(-2)
_dense_series_compare(series, f)
series = SparseSeries([nan, 1., 2., 3., nan, nan],
index=bdate_range('1/1/2000', periods=6))
f = lambda s: s.shift(2, freq='B')
_dense_series_compare(series, f)
f = lambda s: s.shift(2, freq=BDay())
_dense_series_compare(series, f)
def test_shift_nan(self):
# GH 12908
orig = pd.Series([np.nan, 2, np.nan, 4, 0, np.nan, 0])
sparse = orig.to_sparse()
tm.assert_sp_series_equal(sparse.shift(0), orig.shift(0).to_sparse())
tm.assert_sp_series_equal(sparse.shift(1), orig.shift(1).to_sparse())
tm.assert_sp_series_equal(sparse.shift(2), orig.shift(2).to_sparse())
tm.assert_sp_series_equal(sparse.shift(3), orig.shift(3).to_sparse())
tm.assert_sp_series_equal(sparse.shift(-1), orig.shift(-1).to_sparse())
tm.assert_sp_series_equal(sparse.shift(-2), orig.shift(-2).to_sparse())
tm.assert_sp_series_equal(sparse.shift(-3), orig.shift(-3).to_sparse())
tm.assert_sp_series_equal(sparse.shift(-4), orig.shift(-4).to_sparse())
sparse = orig.to_sparse(fill_value=0)
tm.assert_sp_series_equal(sparse.shift(0),
orig.shift(0).to_sparse(fill_value=0))
tm.assert_sp_series_equal(sparse.shift(1),
orig.shift(1).to_sparse(fill_value=0))
tm.assert_sp_series_equal(sparse.shift(2),
orig.shift(2).to_sparse(fill_value=0))
tm.assert_sp_series_equal(sparse.shift(3),
orig.shift(3).to_sparse(fill_value=0))
tm.assert_sp_series_equal(sparse.shift(-1),
orig.shift(-1).to_sparse(fill_value=0))
tm.assert_sp_series_equal(sparse.shift(-2),
orig.shift(-2).to_sparse(fill_value=0))
tm.assert_sp_series_equal(sparse.shift(-3),
orig.shift(-3).to_sparse(fill_value=0))
tm.assert_sp_series_equal(sparse.shift(-4),
orig.shift(-4).to_sparse(fill_value=0))
def test_shift_dtype(self):
# GH 12908
orig = pd.Series([1, 2, 3, 4], dtype=np.int64)
sparse = orig.to_sparse()
tm.assert_sp_series_equal(sparse.shift(0), orig.shift(0).to_sparse())
sparse = orig.to_sparse(fill_value=np.nan)
tm.assert_sp_series_equal(sparse.shift(0),
orig.shift(0).to_sparse(fill_value=np.nan))
# shift(1) or more span changes dtype to float64
tm.assert_sp_series_equal(sparse.shift(1), orig.shift(1).to_sparse())
tm.assert_sp_series_equal(sparse.shift(2), orig.shift(2).to_sparse())
tm.assert_sp_series_equal(sparse.shift(3), orig.shift(3).to_sparse())
tm.assert_sp_series_equal(sparse.shift(-1), orig.shift(-1).to_sparse())
tm.assert_sp_series_equal(sparse.shift(-2), orig.shift(-2).to_sparse())
tm.assert_sp_series_equal(sparse.shift(-3), orig.shift(-3).to_sparse())
tm.assert_sp_series_equal(sparse.shift(-4), orig.shift(-4).to_sparse())
def test_shift_dtype_fill_value(self):
# GH 12908
orig = pd.Series([1, 0, 0, 4], dtype=np.int64)
for v in [0, 1, np.nan]:
sparse = orig.to_sparse(fill_value=v)
tm.assert_sp_series_equal(sparse.shift(0),
orig.shift(0).to_sparse(fill_value=v))
tm.assert_sp_series_equal(sparse.shift(1),
orig.shift(1).to_sparse(fill_value=v))
tm.assert_sp_series_equal(sparse.shift(2),
orig.shift(2).to_sparse(fill_value=v))
tm.assert_sp_series_equal(sparse.shift(3),
orig.shift(3).to_sparse(fill_value=v))
tm.assert_sp_series_equal(sparse.shift(-1),
orig.shift(-1).to_sparse(fill_value=v))
tm.assert_sp_series_equal(sparse.shift(-2),
orig.shift(-2).to_sparse(fill_value=v))
tm.assert_sp_series_equal(sparse.shift(-3),
orig.shift(-3).to_sparse(fill_value=v))
tm.assert_sp_series_equal(sparse.shift(-4),
orig.shift(-4).to_sparse(fill_value=v))
def test_combine_first(self):
s = self.bseries
result = s[::2].combine_first(s)
result2 = s[::2].combine_first(s.to_dense())
expected = s[::2].to_dense().combine_first(s.to_dense())
expected = expected.to_sparse(fill_value=s.fill_value)
tm.assert_sp_series_equal(result, result2)
tm.assert_sp_series_equal(result, expected)
class TestSparseHandlingMultiIndexes(object):
def setup_method(self, method):
miindex = pd.MultiIndex.from_product(
[["x", "y"], ["10", "20"]], names=['row-foo', 'row-bar'])
micol = pd.MultiIndex.from_product(
[['a', 'b', 'c'], ["1", "2"]], names=['col-foo', 'col-bar'])
dense_multiindex_frame = pd.DataFrame(
index=miindex, columns=micol).sort_index().sort_index(axis=1)
self.dense_multiindex_frame = dense_multiindex_frame.fillna(value=3.14)
def test_to_sparse_preserve_multiindex_names_columns(self):
sparse_multiindex_frame = self.dense_multiindex_frame.to_sparse()
sparse_multiindex_frame = sparse_multiindex_frame.copy()
tm.assert_index_equal(sparse_multiindex_frame.columns,
self.dense_multiindex_frame.columns)
def test_round_trip_preserve_multiindex_names(self):
sparse_multiindex_frame = self.dense_multiindex_frame.to_sparse()
round_trip_multiindex_frame = sparse_multiindex_frame.to_dense()
tm.assert_frame_equal(self.dense_multiindex_frame,
round_trip_multiindex_frame,
check_column_type=True,
check_names=True)
class TestSparseSeriesScipyInteraction(object):
# Issue 8048: add SparseSeries coo methods
def setup_method(self, method):
tm._skip_if_no_scipy()
import scipy.sparse
# SparseSeries inputs used in tests, the tests rely on the order
self.sparse_series = []
s = pd.Series([3.0, nan, 1.0, 2.0, nan, nan])
s.index = pd.MultiIndex.from_tuples([(1, 2, 'a', 0),
(1, 2, 'a', 1),
(1, 1, 'b', 0),
(1, 1, 'b', 1),
(2, 1, 'b', 0),
(2, 1, 'b', 1)],
names=['A', 'B', 'C', 'D'])
self.sparse_series.append(s.to_sparse())
ss = self.sparse_series[0].copy()
ss.index.names = [3, 0, 1, 2]
self.sparse_series.append(ss)
ss = pd.Series([
nan
] * 12, index=cartesian_product((range(3), range(4)))).to_sparse()
for k, v in zip([(0, 0), (1, 2), (1, 3)], [3.0, 1.0, 2.0]):
ss[k] = v
self.sparse_series.append(ss)
# results used in tests
self.coo_matrices = []
self.coo_matrices.append(scipy.sparse.coo_matrix(
([3.0, 1.0, 2.0], ([0, 1, 1], [0, 2, 3])), shape=(3, 4)))
self.coo_matrices.append(scipy.sparse.coo_matrix(
([3.0, 1.0, 2.0], ([1, 0, 0], [0, 2, 3])), shape=(3, 4)))
self.coo_matrices.append(scipy.sparse.coo_matrix(
([3.0, 1.0, 2.0], ([0, 1, 1], [0, 0, 1])), shape=(3, 2)))
self.ils = [[(1, 2), (1, 1), (2, 1)], [(1, 1), (1, 2), (2, 1)],
[(1, 2, 'a'), (1, 1, 'b'), (2, 1, 'b')]]
self.jls = [[('a', 0), ('a', 1), ('b', 0), ('b', 1)], [0, 1]]
def test_to_coo_text_names_integer_row_levels_nosort(self):
ss = self.sparse_series[0]
kwargs = {'row_levels': [0, 1], 'column_levels': [2, 3]}
result = (self.coo_matrices[0], self.ils[0], self.jls[0])
self._run_test(ss, kwargs, result)
def test_to_coo_text_names_integer_row_levels_sort(self):
ss = self.sparse_series[0]
kwargs = {'row_levels': [0, 1],
'column_levels': [2, 3],
'sort_labels': True}
result = (self.coo_matrices[1], self.ils[1], self.jls[0])
self._run_test(ss, kwargs, result)
def test_to_coo_text_names_text_row_levels_nosort_col_level_single(self):
ss = self.sparse_series[0]
kwargs = {'row_levels': ['A', 'B', 'C'],
'column_levels': ['D'],
'sort_labels': False}
result = (self.coo_matrices[2], self.ils[2], self.jls[1])
self._run_test(ss, kwargs, result)
def test_to_coo_integer_names_integer_row_levels_nosort(self):
ss = self.sparse_series[1]
kwargs = {'row_levels': [3, 0], 'column_levels': [1, 2]}
result = (self.coo_matrices[0], self.ils[0], self.jls[0])
self._run_test(ss, kwargs, result)
def test_to_coo_text_names_text_row_levels_nosort(self):
ss = self.sparse_series[0]
kwargs = {'row_levels': ['A', 'B'], 'column_levels': ['C', 'D']}
result = (self.coo_matrices[0], self.ils[0], self.jls[0])
self._run_test(ss, kwargs, result)
def test_to_coo_bad_partition_nonnull_intersection(self):
ss = self.sparse_series[0]
pytest.raises(ValueError, ss.to_coo, ['A', 'B', 'C'], ['C', 'D'])
def test_to_coo_bad_partition_small_union(self):
ss = self.sparse_series[0]
pytest.raises(ValueError, ss.to_coo, ['A'], ['C', 'D'])
def test_to_coo_nlevels_less_than_two(self):
ss = self.sparse_series[0]
ss.index = np.arange(len(ss.index))
pytest.raises(ValueError, ss.to_coo)
def test_to_coo_bad_ilevel(self):
ss = self.sparse_series[0]
pytest.raises(KeyError, ss.to_coo, ['A', 'B'], ['C', 'D', 'E'])
def test_to_coo_duplicate_index_entries(self):
ss = pd.concat([self.sparse_series[0],
self.sparse_series[0]]).to_sparse()
pytest.raises(ValueError, ss.to_coo, ['A', 'B'], ['C', 'D'])
def test_from_coo_dense_index(self):
ss = SparseSeries.from_coo(self.coo_matrices[0], dense_index=True)
check = self.sparse_series[2]
tm.assert_sp_series_equal(ss, check)
def test_from_coo_nodense_index(self):
ss = SparseSeries.from_coo(self.coo_matrices[0], dense_index=False)
check = self.sparse_series[2]
check = check.dropna().to_sparse()
tm.assert_sp_series_equal(ss, check)
def test_from_coo_long_repr(self):
# GH 13114
# test it doesn't raise error. Formatting is tested in test_format
tm._skip_if_no_scipy()
import scipy.sparse
sparse = SparseSeries.from_coo(scipy.sparse.rand(350, 18))
repr(sparse)
def _run_test(self, ss, kwargs, check):
results = ss.to_coo(**kwargs)
self._check_results_to_coo(results, check)
# for every test, also test symmetry property (transpose), switch
# row_levels and column_levels
d = kwargs.copy()
d['row_levels'] = kwargs['column_levels']
d['column_levels'] = kwargs['row_levels']
results = ss.to_coo(**d)
results = (results[0].T, results[2], results[1])
self._check_results_to_coo(results, check)
def _check_results_to_coo(self, results, check):
(A, il, jl) = results
(A_result, il_result, jl_result) = check
# convert to dense and compare
tm.assert_numpy_array_equal(A.todense(), A_result.todense())
# or compare directly as difference of sparse
# assert(abs(A - A_result).max() < 1e-12) # max is failing in python
# 2.6
assert il == il_result
assert jl == jl_result
def test_concat(self):
val1 = np.array([1, 2, np.nan, np.nan, 0, np.nan])
val2 = np.array([3, np.nan, 4, 0, 0])
for kind in ['integer', 'block']:
sparse1 = pd.SparseSeries(val1, name='x', kind=kind)
sparse2 = pd.SparseSeries(val2, name='y', kind=kind)
res = pd.concat([sparse1, sparse2])
exp = pd.concat([pd.Series(val1), pd.Series(val2)])
exp = pd.SparseSeries(exp, kind=kind)
tm.assert_sp_series_equal(res, exp)
sparse1 = pd.SparseSeries(val1, fill_value=0, name='x', kind=kind)
sparse2 = pd.SparseSeries(val2, fill_value=0, name='y', kind=kind)
res = pd.concat([sparse1, sparse2])
exp = pd.concat([pd.Series(val1), pd.Series(val2)])
exp = pd.SparseSeries(exp, fill_value=0, kind=kind)
tm.assert_sp_series_equal(res, exp)
def test_concat_axis1(self):
val1 = np.array([1, 2, np.nan, np.nan, 0, np.nan])
val2 = np.array([3, np.nan, 4, 0, 0])
sparse1 = pd.SparseSeries(val1, name='x')
sparse2 = pd.SparseSeries(val2, name='y')
res = pd.concat([sparse1, sparse2], axis=1)
exp = pd.concat([pd.Series(val1, name='x'),
pd.Series(val2, name='y')], axis=1)
exp = pd.SparseDataFrame(exp)
tm.assert_sp_frame_equal(res, exp)
def test_concat_different_fill(self):
val1 = np.array([1, 2, np.nan, np.nan, 0, np.nan])
val2 = np.array([3, np.nan, 4, 0, 0])
for kind in ['integer', 'block']:
sparse1 = pd.SparseSeries(val1, name='x', kind=kind)
sparse2 = pd.SparseSeries(val2, name='y', kind=kind, fill_value=0)
res = pd.concat([sparse1, sparse2])
exp = pd.concat([pd.Series(val1), pd.Series(val2)])
exp = pd.SparseSeries(exp, kind=kind)
tm.assert_sp_series_equal(res, exp)
res = pd.concat([sparse2, sparse1])
exp = pd.concat([pd.Series(val2), pd.Series(val1)])
exp = pd.SparseSeries(exp, kind=kind, fill_value=0)
tm.assert_sp_series_equal(res, exp)
def test_concat_axis1_different_fill(self):
val1 = np.array([1, 2, np.nan, np.nan, 0, np.nan])
val2 = np.array([3, np.nan, 4, 0, 0])
sparse1 = pd.SparseSeries(val1, name='x')
sparse2 = pd.SparseSeries(val2, name='y', fill_value=0)
res = pd.concat([sparse1, sparse2], axis=1)
exp = pd.concat([pd.Series(val1, name='x'),
pd.Series(val2, name='y')], axis=1)
assert isinstance(res, pd.SparseDataFrame)
tm.assert_frame_equal(res.to_dense(), exp)
def test_concat_different_kind(self):
val1 = np.array([1, 2, np.nan, np.nan, 0, np.nan])
val2 = np.array([3, np.nan, 4, 0, 0])
sparse1 = pd.SparseSeries(val1, name='x', kind='integer')
sparse2 = pd.SparseSeries(val2, name='y', kind='block', fill_value=0)
res = pd.concat([sparse1, sparse2])
exp = pd.concat([pd.Series(val1), pd.Series(val2)])
exp = pd.SparseSeries(exp, kind='integer')
tm.assert_sp_series_equal(res, exp)
res = pd.concat([sparse2, sparse1])
exp = pd.concat([pd.Series(val2), pd.Series(val1)])
exp = pd.SparseSeries(exp, kind='block', fill_value=0)
tm.assert_sp_series_equal(res, exp)
def test_concat_sparse_dense(self):
# use first input's fill_value
val1 = np.array([1, 2, np.nan, np.nan, 0, np.nan])
val2 = np.array([3, np.nan, 4, 0, 0])
for kind in ['integer', 'block']:
sparse = pd.SparseSeries(val1, name='x', kind=kind)
dense = pd.Series(val2, name='y')
res = pd.concat([sparse, dense])
exp = pd.concat([pd.Series(val1), dense])
exp = pd.SparseSeries(exp, kind=kind)
tm.assert_sp_series_equal(res, exp)
res = pd.concat([dense, sparse, dense])
exp = pd.concat([dense, pd.Series(val1), dense])
exp = pd.SparseSeries(exp, kind=kind)
tm.assert_sp_series_equal(res, exp)
sparse = pd.SparseSeries(val1, name='x', kind=kind, fill_value=0)
dense = pd.Series(val2, name='y')
res = pd.concat([sparse, dense])
exp = pd.concat([pd.Series(val1), dense])
exp = pd.SparseSeries(exp, kind=kind, fill_value=0)
tm.assert_sp_series_equal(res, exp)
res = pd.concat([dense, sparse, dense])
exp = pd.concat([dense, pd.Series(val1), dense])
exp = pd.SparseSeries(exp, kind=kind, fill_value=0)
tm.assert_sp_series_equal(res, exp)
def test_value_counts(self):
vals = [1, 2, nan, 0, nan, 1, 2, nan, nan, 1, 2, 0, 1, 1]
dense = pd.Series(vals, name='xx')
sparse = pd.SparseSeries(vals, name='xx')
tm.assert_series_equal(sparse.value_counts(),
dense.value_counts())
tm.assert_series_equal(sparse.value_counts(dropna=False),
dense.value_counts(dropna=False))
sparse = pd.SparseSeries(vals, name='xx', fill_value=0)
tm.assert_series_equal(sparse.value_counts(),
dense.value_counts())
tm.assert_series_equal(sparse.value_counts(dropna=False),
dense.value_counts(dropna=False))
def test_value_counts_dup(self):
vals = [1, 2, nan, 0, nan, 1, 2, nan, nan, 1, 2, 0, 1, 1]
# numeric op may cause sp_values to include the same value as
# fill_value
dense = pd.Series(vals, name='xx') / 0.
sparse = pd.SparseSeries(vals, name='xx') / 0.
tm.assert_series_equal(sparse.value_counts(),
dense.value_counts())
tm.assert_series_equal(sparse.value_counts(dropna=False),
dense.value_counts(dropna=False))
vals = [1, 2, 0, 0, 0, 1, 2, 0, 0, 1, 2, 0, 1, 1]
dense = pd.Series(vals, name='xx') * 0.
sparse = pd.SparseSeries(vals, name='xx') * 0.
tm.assert_series_equal(sparse.value_counts(),
dense.value_counts())
tm.assert_series_equal(sparse.value_counts(dropna=False),
dense.value_counts(dropna=False))
def test_value_counts_int(self):
vals = [1, 2, 0, 1, 2, 1, 2, 0, 1, 1]
dense = pd.Series(vals, name='xx')
# fill_value is np.nan, but should not be included in the result
sparse = pd.SparseSeries(vals, name='xx')
tm.assert_series_equal(sparse.value_counts(),
dense.value_counts())
tm.assert_series_equal(sparse.value_counts(dropna=False),
dense.value_counts(dropna=False))
sparse = pd.SparseSeries(vals, name='xx', fill_value=0)
tm.assert_series_equal(sparse.value_counts(),
dense.value_counts())
tm.assert_series_equal(sparse.value_counts(dropna=False),
dense.value_counts(dropna=False))
def test_isnull(self):
# GH 8276
s = pd.SparseSeries([np.nan, np.nan, 1, 2, np.nan], name='xxx')
res = s.isnull()
exp = pd.SparseSeries([True, True, False, False, True], name='xxx',
fill_value=True)
tm.assert_sp_series_equal(res, exp)
# if fill_value is not nan, True can be included in sp_values
s = pd.SparseSeries([np.nan, 0., 1., 2., 0.], name='xxx',
fill_value=0.)
res = s.isnull()
assert isinstance(res, pd.SparseSeries)
exp = pd.Series([True, False, False, False, False], name='xxx')
tm.assert_series_equal(res.to_dense(), exp)
def test_isnotnull(self):
# GH 8276
s = pd.SparseSeries([np.nan, np.nan, 1, 2, np.nan], name='xxx')
res = s.isnotnull()
exp = pd.SparseSeries([False, False, True, True, False], name='xxx',
fill_value=False)
tm.assert_sp_series_equal(res, exp)
# if fill_value is not nan, True can be included in sp_values
s = pd.SparseSeries([np.nan, 0., 1., 2., 0.], name='xxx',
fill_value=0.)
res = s.isnotnull()
assert isinstance(res, pd.SparseSeries)
exp = pd.Series([False, True, True, True, True], name='xxx')
tm.assert_series_equal(res.to_dense(), exp)
def _dense_series_compare(s, f):
result = f(s)
assert (isinstance(result, SparseSeries))
dense_result = f(s.to_dense())
tm.assert_series_equal(result.to_dense(), dense_result)
class TestSparseSeriesAnalytics(object):
def setup_method(self, method):
arr, index = _test_data1()
self.bseries = SparseSeries(arr, index=index, kind='block',
name='bseries')
arr, index = _test_data1_zero()
self.zbseries = SparseSeries(arr, index=index, kind='block',
fill_value=0, name='zbseries')
def test_cumsum(self):
result = self.bseries.cumsum()
expected = SparseSeries(self.bseries.to_dense().cumsum())
tm.assert_sp_series_equal(result, expected)
result = self.zbseries.cumsum()
expected = self.zbseries.to_dense().cumsum()
tm.assert_series_equal(result, expected)
axis = 1 # Series is 1-D, so only axis = 0 is valid.
msg = "No axis named {axis}".format(axis=axis)
with tm.assert_raises_regex(ValueError, msg):
self.bseries.cumsum(axis=axis)
def test_numpy_cumsum(self):
result = np.cumsum(self.bseries)
expected = SparseSeries(self.bseries.to_dense().cumsum())
tm.assert_sp_series_equal(result, expected)
result = np.cumsum(self.zbseries)
expected = self.zbseries.to_dense().cumsum()
tm.assert_series_equal(result, expected)
msg = "the 'dtype' parameter is not supported"
tm.assert_raises_regex(ValueError, msg, np.cumsum,
self.bseries, dtype=np.int64)
msg = "the 'out' parameter is not supported"
tm.assert_raises_regex(ValueError, msg, np.cumsum,
self.zbseries, out=result)
def test_numpy_func_call(self):
# no exception should be raised even though
# numpy passes in 'axis=None' or `axis=-1'
funcs = ['sum', 'cumsum', 'var', 'mean',
'prod', 'cumprod', 'std', 'argsort',
'argmin', 'argmax', 'min', 'max']
for func in funcs:
for series in ('bseries', 'zbseries'):
getattr(np, func)(getattr(self, series))
|
|
"""
This example will probe selected switches based on pre-planned probing algorithm
./pox.py log.level --DEBUG MultiSwitchPreplannedProbing
sudo mn --custom ~/mininet/custom/topo-6sw.py --topo mytopo --mac --switch ovsk --controller remote
"""
from pox.core import core
import pox
log = core.getLogger()
from pox.lib.packet.ethernet import ethernet, ETHER_BROADCAST, ETHER_ANY
from pox.lib.packet.ipv4 import ipv4
from pox.lib.packet.arp import arp
from pox.lib.addresses import IPAddr, EthAddr
from pox.lib.util import str_to_bool, dpid_to_str, str_to_dpid
from pox.lib.recoco import Timer
import pox.lib.packet as pkt
import pox.openflow.spanning_tree
import pox.openflow.discovery
from pox.lib.packet.icmp import icmp
import pox.openflow.libopenflow_01 as of
from pox.lib.revent import *
import time
from Preplanned_Probing import Preplanned_Probing
class MultiSwitch (EventMixin):
def __init__ (self):
self.listenTo(core)
#Same as above line
#core.openflow.addListenerByName("ConnectionUp", _handle_ConnectionUp)
self.connections = set()
#Timer(5,self.send_ping, recurring=True)
self.ping_replies = {}
self.probing_nodes = set()
#[node][adjacent node list] -> [4][1,3,5,6]
self.probing_nodes_adj = {}
def _handle_GoingUpEvent (self, event):
core.openflow.miss_send_len = 512
self.listenTo(core.openflow)
log.debug("Controller Up...")
def _handle_PortStatus (self, event):
if event.added:
action = "added"
if event.deleted:
action = "removed"
else:
action = "modified"
#print "Port %s on Switch %s has been %s. " % (event.port, event.dpid, action)
def _handle_PacketIn (self, event):
#print "Packet In from Switch %s" % event.dpid
dpid = event.connection.dpid
inport = event.port
packet = event.parsed
#print event.ofp.reason
#print event.ofp.show
if not packet.parsed:
log.warning("%i %i ignoring unparsed packet", dpid, inport)
return
if packet.type == ethernet.LLDP_TYPE or packet.type == ethernet.IPV6_TYPE:
return
if isinstance(packet.next, ipv4):
if packet.next.protocol == 1:
probe_node = int(str(packet.dst).split(":")[5])
if probe_node in self.ping_replies:
self.ping_replies[probe_node].remove(dpid)
def _handle_ConnectionUp (self, event):
#print "Switch %s has come up." % event.dpid
log.debug("Controlling %s" % (event.connection,))
msg = of.ofp_flow_mod()
msg.priority = 10
msg.actions.append(of.ofp_action_output(port = of.OFPP_NONE))
event.connection.send(msg)
log.info("Disabling Flooding %s", dpid_to_str(event.dpid))
self.connections.add(event.connection)
if len(self.connections) == 6:
time.sleep(2)
self.select_probing_nodes()
self.install_flow ()
def install_flow (self):
for con in core.openflow.connections:
msg = of.ofp_flow_mod()
if con.dpid == 1:
msg.actions.append(of.ofp_action_output(port=of.OFPP_CONTROLLER))
if con.dpid == 2:
msg.match.dl_dst = con.eth_addr
msg.actions.append(of.ofp_action_output(port=of.OFPP_ALL))
if con.dpid == 3:
msg.actions.append(of.ofp_action_output(port=of.OFPP_CONTROLLER))
if con.dpid == 4:
msg.match.dl_dst = con.eth_addr
msg.actions.append(of.ofp_action_output(port=of.OFPP_ALL))
if con.dpid == 5:
msg.actions.append(of.ofp_action_output(port=of.OFPP_CONTROLLER))
if con.dpid == 6:
msg.match.dl_dst = con.eth_addr
msg.actions.append(of.ofp_action_output(port=of.OFPP_ALL))
msg.priority = 100
msg.command = of.OFPFC_ADD
msg.buffer_id = None
msg.idle_timeout = of.OFP_FLOW_PERMANENT
msg.hard_timeout = of.OFP_FLOW_PERMANENT
msg.match.dl_type = 0x0800
msg.match.nw_proto = pkt.ipv4.ICMP_PROTOCOL
msg.out_port = of.OFPP_NONE
con.send(msg)
for con in core.openflow.connections:
msg = of.ofp_flow_mod()
if con.dpid == 2:
msg.actions.append(of.ofp_action_output(port=of.OFPP_CONTROLLER))
if con.dpid == 4:
msg.actions.append(of.ofp_action_output(port=of.OFPP_CONTROLLER))
if con.dpid == 6:
msg.actions.append(of.ofp_action_output(port=of.OFPP_CONTROLLER))
msg.priority = 80
msg.command = of.OFPFC_ADD
msg.buffer_id = None
msg.idle_timeout = of.OFP_FLOW_PERMANENT
msg.hard_timeout = of.OFP_FLOW_PERMANENT
msg.match.dl_type = 0x0800
msg.match.nw_proto = pkt.ipv4.ICMP_PROTOCOL
msg.out_port = of.OFPP_NONE
con.send(msg)
Timer(4,self.start_monitoring, recurring=True)
#time.sleep(2)
#self.send_ping(dpid=4, eth_dst=EthAddr(core.openflow.getConnection(4).eth_addr))
def start_monitoring(self):
#print self.ping_replies
no_errors = True
for probing_node, adj_nodes in self.ping_replies.iteritems():
if adj_nodes:
for node in adj_nodes:
log.info("Link between switches %s and %s is down" % (probing_node, node))
no_errors = False
if no_errors:
log.info("All links are up!")
for probing_node in self.probing_nodes_adj.iterkeys():
#print "probing_node %s" % probing_node
self.ping_replies[probing_node] = self.probing_nodes_adj[probing_node][:]
#self.ping_replies = self.probing_nodes_adj.copy()
#print self.ping_replies
for node in self.probing_nodes:
self.send_ping(dpid=node,eth_dst=EthAddr(core.openflow.getConnection(node).eth_addr))
def send_ping (self, dpid=1, eth_dst = ETHER_ANY):
con = core.openflow.getConnection(dpid)
icmp = pkt.icmp()
icmp.type = pkt.TYPE_ECHO_REQUEST
echo=pkt.ICMP.echo(payload="SENDING PING")
icmp.payload = echo
#Make the IP packet around it
ipp = pkt.ipv4()
ipp.protocol = ipp.ICMP_PROTOCOL
# Ethernet around that...
e = pkt.ethernet()
e.dst = eth_dst
e.type = e.IP_TYPE
# Hook them up...
ipp.payload = icmp
e.payload = ipp
msg = of.ofp_packet_out()
msg.actions.append(of.ofp_action_output(port = of.OFPP_TABLE))
msg.data = e.pack()
con.send(msg)
#self.previous_ping_returned = False
#log.info("Sending ping to switch %s" % dpid)
def select_probing_nodes (self):
Topo = []
for x in range(0,6):
Topo.append([])
Topo[0].extend([0,1,0,1,0,1])
Topo[1].extend([1,0,1,0,0,0])
Topo[2].extend([0,1,0,1,0,0])
Topo[3].extend([1,0,1,0,1,1])
Topo[4].extend([0,0,0,1,0,1])
Topo[5].extend([1,0,0,1,1,0])
pp = Preplanned_Probing(Topo)
self.probing_nodes = pp.select_nodes()
for node in self.probing_nodes:
self.probing_nodes_adj[node] = []
for index in range(0,len(Topo[int(node)-1])):
if Topo[int(node)-1][index] == 1:
self.probing_nodes_adj[node].append(index+1)
#print self.probing_nodes_adj[2]
#self.ping_replies = self.probing_nodes_adj.copy()
def launch ():
import pox.log.color
pox.log.color.launch()
pox.log.launch(format="[@@@bold@@@level%(name)-22s@@@reset] " + "@@@bold%(message)s@@@normal")
#pox.openflow.discovery.launch()
#core.getLogger("openflow.spanning_tree").setLevel("INFO")
core.registerNew(MultiSwitch)
#pox.openflow.spanning_tree.launch()
|
|
# coding: utf-8
from __future__ import absolute_import
# The following YAML grammar is LL(1) and is parsed by a recursive descent
# parser.
#
# stream ::= STREAM-START implicit_document? explicit_document*
# STREAM-END
# implicit_document ::= block_node DOCUMENT-END*
# explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END*
# block_node_or_indentless_sequence ::=
# ALIAS
# | properties (block_content |
# indentless_block_sequence)?
# | block_content
# | indentless_block_sequence
# block_node ::= ALIAS
# | properties block_content?
# | block_content
# flow_node ::= ALIAS
# | properties flow_content?
# | flow_content
# properties ::= TAG ANCHOR? | ANCHOR TAG?
# block_content ::= block_collection | flow_collection | SCALAR
# flow_content ::= flow_collection | SCALAR
# block_collection ::= block_sequence | block_mapping
# flow_collection ::= flow_sequence | flow_mapping
# block_sequence ::= BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)*
# BLOCK-END
# indentless_sequence ::= (BLOCK-ENTRY block_node?)+
# block_mapping ::= BLOCK-MAPPING_START
# ((KEY block_node_or_indentless_sequence?)?
# (VALUE block_node_or_indentless_sequence?)?)*
# BLOCK-END
# flow_sequence ::= FLOW-SEQUENCE-START
# (flow_sequence_entry FLOW-ENTRY)*
# flow_sequence_entry?
# FLOW-SEQUENCE-END
# flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?
# flow_mapping ::= FLOW-MAPPING-START
# (flow_mapping_entry FLOW-ENTRY)*
# flow_mapping_entry?
# FLOW-MAPPING-END
# flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?
#
# FIRST sets:
#
# stream: { STREAM-START }
# explicit_document: { DIRECTIVE DOCUMENT-START }
# implicit_document: FIRST(block_node)
# block_node: { ALIAS TAG ANCHOR SCALAR BLOCK-SEQUENCE-START
# BLOCK-MAPPING-START FLOW-SEQUENCE-START FLOW-MAPPING-START }
# flow_node: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START FLOW-MAPPING-START }
# block_content: { BLOCK-SEQUENCE-START BLOCK-MAPPING-START
# FLOW-SEQUENCE-START FLOW-MAPPING-START SCALAR }
# flow_content: { FLOW-SEQUENCE-START FLOW-MAPPING-START SCALAR }
# block_collection: { BLOCK-SEQUENCE-START BLOCK-MAPPING-START }
# flow_collection: { FLOW-SEQUENCE-START FLOW-MAPPING-START }
# block_sequence: { BLOCK-SEQUENCE-START }
# block_mapping: { BLOCK-MAPPING-START }
# block_node_or_indentless_sequence: { ALIAS ANCHOR TAG SCALAR
# BLOCK-SEQUENCE-START BLOCK-MAPPING-START FLOW-SEQUENCE-START
# FLOW-MAPPING-START BLOCK-ENTRY }
# indentless_sequence: { ENTRY }
# flow_collection: { FLOW-SEQUENCE-START FLOW-MAPPING-START }
# flow_sequence: { FLOW-SEQUENCE-START }
# flow_mapping: { FLOW-MAPPING-START }
# flow_sequence_entry: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START
# FLOW-MAPPING-START KEY }
# flow_mapping_entry: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START
# FLOW-MAPPING-START KEY }
# need to have full path, as pkg_resources tries to load parser.py in __init__.py
# only to not do anything with the package afterwards
# and for Jython too
from ruamel.yaml.error import MarkedYAMLError # type: ignore
from ruamel.yaml.tokens import * # NOQA
from ruamel.yaml.events import * # NOQA
from ruamel.yaml.scanner import Scanner, RoundTripScanner, ScannerError # NOQA
from ruamel.yaml.compat import utf8 # NOQA
__all__ = ['Parser', 'RoundTripParser', 'ParserError']
class ParserError(MarkedYAMLError):
pass
class Parser(object):
# Since writing a recursive-descendant parser is a straightforward task, we
# do not give many comments here.
DEFAULT_TAGS = {
u'!': u'!',
u'!!': u'tag:yaml.org,2002:',
}
def __init__(self):
self.current_event = None
self.yaml_version = None
self.tag_handles = {}
self.states = []
self.marks = []
self.state = self.parse_stream_start
def dispose(self):
# Reset the state attributes (to clear self-references)
self.states = []
self.state = None
def check_event(self, *choices):
# Check the type of the next event.
if self.current_event is None:
if self.state:
self.current_event = self.state()
if self.current_event is not None:
if not choices:
return True
for choice in choices:
if isinstance(self.current_event, choice):
return True
return False
def peek_event(self):
# Get the next event.
if self.current_event is None:
if self.state:
self.current_event = self.state()
return self.current_event
def get_event(self):
# Get the next event and proceed further.
if self.current_event is None:
if self.state:
self.current_event = self.state()
value = self.current_event
self.current_event = None
return value
# stream ::= STREAM-START implicit_document? explicit_document*
# STREAM-END
# implicit_document ::= block_node DOCUMENT-END*
# explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END*
def parse_stream_start(self):
# Parse the stream start.
token = self.get_token()
token.move_comment(self.peek_token())
event = StreamStartEvent(token.start_mark, token.end_mark,
encoding=token.encoding)
# Prepare the next state.
self.state = self.parse_implicit_document_start
return event
def parse_implicit_document_start(self):
# Parse an implicit document.
if not self.check_token(DirectiveToken, DocumentStartToken,
StreamEndToken):
self.tag_handles = self.DEFAULT_TAGS
token = self.peek_token()
start_mark = end_mark = token.start_mark
event = DocumentStartEvent(start_mark, end_mark,
explicit=False)
# Prepare the next state.
self.states.append(self.parse_document_end)
self.state = self.parse_block_node
return event
else:
return self.parse_document_start()
def parse_document_start(self):
# Parse any extra document end indicators.
while self.check_token(DocumentEndToken):
self.get_token()
# Parse an explicit document.
if not self.check_token(StreamEndToken):
token = self.peek_token()
start_mark = token.start_mark
version, tags = self.process_directives()
if not self.check_token(DocumentStartToken):
raise ParserError(None, None,
"expected '<document start>', but found %r"
% self.peek_token().id,
self.peek_token().start_mark)
token = self.get_token()
end_mark = token.end_mark
event = DocumentStartEvent(
start_mark, end_mark,
explicit=True, version=version, tags=tags)
self.states.append(self.parse_document_end)
self.state = self.parse_document_content
else:
# Parse the end of the stream.
token = self.get_token()
event = StreamEndEvent(token.start_mark, token.end_mark,
comment=token.comment)
assert not self.states
assert not self.marks
self.state = None
return event
def parse_document_end(self):
# Parse the document end.
token = self.peek_token()
start_mark = end_mark = token.start_mark
explicit = False
if self.check_token(DocumentEndToken):
token = self.get_token()
end_mark = token.end_mark
explicit = True
event = DocumentEndEvent(start_mark, end_mark, explicit=explicit)
# Prepare the next state.
self.state = self.parse_document_start
return event
def parse_document_content(self):
if self.check_token(
DirectiveToken,
DocumentStartToken, DocumentEndToken, StreamEndToken):
event = self.process_empty_scalar(self.peek_token().start_mark)
self.state = self.states.pop()
return event
else:
return self.parse_block_node()
def process_directives(self):
self.yaml_version = None
self.tag_handles = {}
while self.check_token(DirectiveToken):
token = self.get_token()
if token.name == u'YAML':
if self.yaml_version is not None:
raise ParserError(
None, None,
"found duplicate YAML directive", token.start_mark)
major, minor = token.value
if major != 1:
raise ParserError(
None, None,
"found incompatible YAML document (version 1.* is "
"required)",
token.start_mark)
self.yaml_version = token.value
elif token.name == u'TAG':
handle, prefix = token.value
if handle in self.tag_handles:
raise ParserError(None, None,
"duplicate tag handle %r" % utf8(handle),
token.start_mark)
self.tag_handles[handle] = prefix
if self.tag_handles:
value = self.yaml_version, self.tag_handles.copy()
else:
value = self.yaml_version, None
for key in self.DEFAULT_TAGS:
if key not in self.tag_handles:
self.tag_handles[key] = self.DEFAULT_TAGS[key]
return value
# block_node_or_indentless_sequence ::= ALIAS
# | properties (block_content | indentless_block_sequence)?
# | block_content
# | indentless_block_sequence
# block_node ::= ALIAS
# | properties block_content?
# | block_content
# flow_node ::= ALIAS
# | properties flow_content?
# | flow_content
# properties ::= TAG ANCHOR? | ANCHOR TAG?
# block_content ::= block_collection | flow_collection | SCALAR
# flow_content ::= flow_collection | SCALAR
# block_collection ::= block_sequence | block_mapping
# flow_collection ::= flow_sequence | flow_mapping
def parse_block_node(self):
return self.parse_node(block=True)
def parse_flow_node(self):
return self.parse_node()
def parse_block_node_or_indentless_sequence(self):
return self.parse_node(block=True, indentless_sequence=True)
def transform_tag(self, handle, suffix):
return self.tag_handles[handle] + suffix
def parse_node(self, block=False, indentless_sequence=False):
if self.check_token(AliasToken):
token = self.get_token()
event = AliasEvent(token.value, token.start_mark, token.end_mark)
self.state = self.states.pop()
else:
anchor = None
tag = None
start_mark = end_mark = tag_mark = None
if self.check_token(AnchorToken):
token = self.get_token()
start_mark = token.start_mark
end_mark = token.end_mark
anchor = token.value
if self.check_token(TagToken):
token = self.get_token()
tag_mark = token.start_mark
end_mark = token.end_mark
tag = token.value
elif self.check_token(TagToken):
token = self.get_token()
start_mark = tag_mark = token.start_mark
end_mark = token.end_mark
tag = token.value
if self.check_token(AnchorToken):
token = self.get_token()
end_mark = token.end_mark
anchor = token.value
if tag is not None:
handle, suffix = tag
if handle is not None:
if handle not in self.tag_handles:
raise ParserError(
"while parsing a node", start_mark,
"found undefined tag handle %r" % utf8(handle),
tag_mark)
tag = self.transform_tag(handle, suffix)
else:
tag = suffix
# if tag == u'!':
# raise ParserError("while parsing a node", start_mark,
# "found non-specific tag '!'", tag_mark,
# "Please check 'http://pyyaml.org/wiki/YAMLNonSpecificTag'
# and share your opinion.")
if start_mark is None:
start_mark = end_mark = self.peek_token().start_mark
event = None
implicit = (tag is None or tag == u'!')
if indentless_sequence and self.check_token(BlockEntryToken):
end_mark = self.peek_token().end_mark
event = SequenceStartEvent(anchor, tag, implicit,
start_mark, end_mark)
self.state = self.parse_indentless_sequence_entry
else:
if self.check_token(ScalarToken):
token = self.get_token()
end_mark = token.end_mark
if (token.plain and tag is None) or tag == u'!':
implicit = (True, False)
elif tag is None:
implicit = (False, True)
else:
implicit = (False, False)
event = ScalarEvent(
anchor, tag, implicit, token.value,
start_mark, end_mark, style=token.style,
comment=token.comment
)
self.state = self.states.pop()
elif self.check_token(FlowSequenceStartToken):
end_mark = self.peek_token().end_mark
event = SequenceStartEvent(
anchor, tag, implicit,
start_mark, end_mark, flow_style=True)
self.state = self.parse_flow_sequence_first_entry
elif self.check_token(FlowMappingStartToken):
end_mark = self.peek_token().end_mark
event = MappingStartEvent(
anchor, tag, implicit,
start_mark, end_mark, flow_style=True)
self.state = self.parse_flow_mapping_first_key
elif block and self.check_token(BlockSequenceStartToken):
end_mark = self.peek_token().start_mark
# should inserting the comment be dependent on the
# indentation?
pt = self.peek_token()
comment = pt.comment
# print('pt0', type(pt))
if comment is None or comment[1] is None:
comment = pt.split_comment()
# print('pt1', comment)
event = SequenceStartEvent(
anchor, tag, implicit, start_mark, end_mark,
flow_style=False,
comment=comment,
)
self.state = self.parse_block_sequence_first_entry
elif block and self.check_token(BlockMappingStartToken):
end_mark = self.peek_token().start_mark
comment = self.peek_token().comment
event = MappingStartEvent(
anchor, tag, implicit, start_mark, end_mark,
flow_style=False, comment=comment)
self.state = self.parse_block_mapping_first_key
elif anchor is not None or tag is not None:
# Empty scalars are allowed even if a tag or an anchor is
# specified.
event = ScalarEvent(anchor, tag, (implicit, False), u'',
start_mark, end_mark)
self.state = self.states.pop()
else:
if block:
node = 'block'
else:
node = 'flow'
token = self.peek_token()
raise ParserError(
"while parsing a %s node" % node, start_mark,
"expected the node content, but found %r" % token.id,
token.start_mark)
return event
# block_sequence ::= BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)*
# BLOCK-END
def parse_block_sequence_first_entry(self):
token = self.get_token()
# move any comment from start token
# token.move_comment(self.peek_token())
self.marks.append(token.start_mark)
return self.parse_block_sequence_entry()
def parse_block_sequence_entry(self):
if self.check_token(BlockEntryToken):
token = self.get_token()
token.move_comment(self.peek_token())
if not self.check_token(BlockEntryToken, BlockEndToken):
self.states.append(self.parse_block_sequence_entry)
return self.parse_block_node()
else:
self.state = self.parse_block_sequence_entry
return self.process_empty_scalar(token.end_mark)
if not self.check_token(BlockEndToken):
token = self.peek_token()
raise ParserError(
"while parsing a block collection", self.marks[-1],
"expected <block end>, but found %r" %
token.id, token.start_mark)
token = self.get_token() # BlockEndToken
event = SequenceEndEvent(token.start_mark, token.end_mark,
comment=token.comment)
self.state = self.states.pop()
self.marks.pop()
return event
# indentless_sequence ::= (BLOCK-ENTRY block_node?)+
# indentless_sequence?
# sequence:
# - entry
# - nested
def parse_indentless_sequence_entry(self):
if self.check_token(BlockEntryToken):
token = self.get_token()
token.move_comment(self.peek_token())
if not self.check_token(BlockEntryToken,
KeyToken, ValueToken, BlockEndToken):
self.states.append(self.parse_indentless_sequence_entry)
return self.parse_block_node()
else:
self.state = self.parse_indentless_sequence_entry
return self.process_empty_scalar(token.end_mark)
token = self.peek_token()
event = SequenceEndEvent(token.start_mark, token.start_mark,
comment=token.comment)
self.state = self.states.pop()
return event
# block_mapping ::= BLOCK-MAPPING_START
# ((KEY block_node_or_indentless_sequence?)?
# (VALUE block_node_or_indentless_sequence?)?)*
# BLOCK-END
def parse_block_mapping_first_key(self):
token = self.get_token()
self.marks.append(token.start_mark)
return self.parse_block_mapping_key()
def parse_block_mapping_key(self):
if self.check_token(KeyToken):
token = self.get_token()
token.move_comment(self.peek_token())
if not self.check_token(KeyToken, ValueToken, BlockEndToken):
self.states.append(self.parse_block_mapping_value)
return self.parse_block_node_or_indentless_sequence()
else:
self.state = self.parse_block_mapping_value
return self.process_empty_scalar(token.end_mark)
if not self.check_token(BlockEndToken):
token = self.peek_token()
raise ParserError(
"while parsing a block mapping", self.marks[-1],
"expected <block end>, but found %r" % token.id,
token.start_mark)
token = self.get_token()
token.move_comment(self.peek_token())
event = MappingEndEvent(token.start_mark, token.end_mark,
comment=token.comment)
self.state = self.states.pop()
self.marks.pop()
return event
def parse_block_mapping_value(self):
if self.check_token(ValueToken):
token = self.get_token()
# value token might have post comment move it to e.g. block
if self.check_token(ValueToken):
token.move_comment(self.peek_token())
else:
token.move_comment(self.peek_token(), empty=True)
if not self.check_token(KeyToken, ValueToken, BlockEndToken):
self.states.append(self.parse_block_mapping_key)
return self.parse_block_node_or_indentless_sequence()
else:
self.state = self.parse_block_mapping_key
return self.process_empty_scalar(token.end_mark,
comment=self.peek_token().comment)
else:
self.state = self.parse_block_mapping_key
token = self.peek_token()
return self.process_empty_scalar(token.start_mark)
# flow_sequence ::= FLOW-SEQUENCE-START
# (flow_sequence_entry FLOW-ENTRY)*
# flow_sequence_entry?
# FLOW-SEQUENCE-END
# flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?
#
# Note that while production rules for both flow_sequence_entry and
# flow_mapping_entry are equal, their interpretations are different.
# For `flow_sequence_entry`, the part `KEY flow_node? (VALUE flow_node?)?`
# generate an inline mapping (set syntax).
def parse_flow_sequence_first_entry(self):
token = self.get_token()
self.marks.append(token.start_mark)
return self.parse_flow_sequence_entry(first=True)
def parse_flow_sequence_entry(self, first=False):
if not self.check_token(FlowSequenceEndToken):
if not first:
if self.check_token(FlowEntryToken):
self.get_token()
else:
token = self.peek_token()
raise ParserError(
"while parsing a flow sequence", self.marks[-1],
"expected ',' or ']', but got %r" % token.id,
token.start_mark)
if self.check_token(KeyToken):
token = self.peek_token()
event = MappingStartEvent(None, None, True,
token.start_mark, token.end_mark,
flow_style=True)
self.state = self.parse_flow_sequence_entry_mapping_key
return event
elif not self.check_token(FlowSequenceEndToken):
self.states.append(self.parse_flow_sequence_entry)
return self.parse_flow_node()
token = self.get_token()
event = SequenceEndEvent(token.start_mark, token.end_mark,
comment=token.comment)
self.state = self.states.pop()
self.marks.pop()
return event
def parse_flow_sequence_entry_mapping_key(self):
token = self.get_token()
if not self.check_token(ValueToken,
FlowEntryToken, FlowSequenceEndToken):
self.states.append(self.parse_flow_sequence_entry_mapping_value)
return self.parse_flow_node()
else:
self.state = self.parse_flow_sequence_entry_mapping_value
return self.process_empty_scalar(token.end_mark)
def parse_flow_sequence_entry_mapping_value(self):
if self.check_token(ValueToken):
token = self.get_token()
if not self.check_token(FlowEntryToken, FlowSequenceEndToken):
self.states.append(self.parse_flow_sequence_entry_mapping_end)
return self.parse_flow_node()
else:
self.state = self.parse_flow_sequence_entry_mapping_end
return self.process_empty_scalar(token.end_mark)
else:
self.state = self.parse_flow_sequence_entry_mapping_end
token = self.peek_token()
return self.process_empty_scalar(token.start_mark)
def parse_flow_sequence_entry_mapping_end(self):
self.state = self.parse_flow_sequence_entry
token = self.peek_token()
return MappingEndEvent(token.start_mark, token.start_mark)
# flow_mapping ::= FLOW-MAPPING-START
# (flow_mapping_entry FLOW-ENTRY)*
# flow_mapping_entry?
# FLOW-MAPPING-END
# flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?
def parse_flow_mapping_first_key(self):
token = self.get_token()
self.marks.append(token.start_mark)
return self.parse_flow_mapping_key(first=True)
def parse_flow_mapping_key(self, first=False):
if not self.check_token(FlowMappingEndToken):
if not first:
if self.check_token(FlowEntryToken):
self.get_token()
else:
token = self.peek_token()
raise ParserError(
"while parsing a flow mapping", self.marks[-1],
"expected ',' or '}', but got %r" % token.id,
token.start_mark)
if self.check_token(KeyToken):
token = self.get_token()
if not self.check_token(ValueToken,
FlowEntryToken, FlowMappingEndToken):
self.states.append(self.parse_flow_mapping_value)
return self.parse_flow_node()
else:
self.state = self.parse_flow_mapping_value
return self.process_empty_scalar(token.end_mark)
elif not self.check_token(FlowMappingEndToken):
self.states.append(self.parse_flow_mapping_empty_value)
return self.parse_flow_node()
token = self.get_token()
event = MappingEndEvent(token.start_mark, token.end_mark,
comment=token.comment)
self.state = self.states.pop()
self.marks.pop()
return event
def parse_flow_mapping_value(self):
if self.check_token(ValueToken):
token = self.get_token()
if not self.check_token(FlowEntryToken, FlowMappingEndToken):
self.states.append(self.parse_flow_mapping_key)
return self.parse_flow_node()
else:
self.state = self.parse_flow_mapping_key
return self.process_empty_scalar(token.end_mark)
else:
self.state = self.parse_flow_mapping_key
token = self.peek_token()
return self.process_empty_scalar(token.start_mark)
def parse_flow_mapping_empty_value(self):
self.state = self.parse_flow_mapping_key
return self.process_empty_scalar(self.peek_token().start_mark)
def process_empty_scalar(self, mark, comment=None):
return ScalarEvent(None, None, (True, False), u'', mark, mark, comment=comment)
class RoundTripParser(Parser):
"""roundtrip is a safe loader, that wants to see the unmangled tag"""
def transform_tag(self, handle, suffix):
# return self.tag_handles[handle]+suffix
if handle == '!!' and suffix in (u'null', u'bool', u'int', u'float', u'binary',
u'timestamp', u'omap', u'pairs', u'set', u'str',
u'seq', u'map'):
return Parser.transform_tag(self, handle, suffix)
return handle+suffix
|
|
#!/usr/bin/env python
"""
Copyright 2010-2017 University Of Southern California
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Program to set up a full validation run on the epicenter cluster
"""
# Import Python modules
import os
import sys
import math
import random
import shutil
import optparse
import tempfile
# Import Broadband modules
from install_cfg import InstallCfg
import validation_cfg
import bband_utils
# Constants
BATCH_SIM_FILE = "batch_run_bbp_sims.log"
CORES_PER_NODE = 8
MAX_SIMULATIONS = 200
CODEBASES = ["gp", "ucsb", "sdsu", "exsim", "csm", "irikura"]
def generate_src_files(numsim, source_file, srcdir, prefix, hypo_rand):
"""
Generates num_sim source files in the srcdir using different
random seeds
"""
src_props = bband_utils.parse_properties(source_file)
# Delete "seed" from the property set
if "seed" in src_props:
src_props.pop("seed")
# Get FAULT_LENGTH and FAULT_WIDTH from the SRC file
try:
flen = float(src_props["fault_length"])
fwid = float(src_props["fault_width"])
except KeyError:
raise bband_utils.ParameterError("Cannot read fault_length/fault_width"
" parameters from SRC file!")
if hypo_rand:
# Delete HYPO_ALONG_STK and HYPO_DOWN_DIP
if "hypo_along_stk" in src_props:
src_props.pop("hypo_along_stk")
if "hypo_down_dip" in src_props:
src_props.pop("hypo_down_dip")
# Create common list of keys for all files
output = ""
for key in src_props:
output = output + "%s = %s\n" % (key.upper(), src_props[key])
for sim in range(0, numsim):
random.seed(sim + 1)
seed = int(math.exp(7 * math.log(10.0)) * random.random())
hypo_along_stk = flen * (0.2 + 0.6 * random.random() - 0.5)
hypo_down_dip = fwid * (0.2 + 0.6 * random.random())
srcfile = os.path.join(srcdir, "%s-%04d.src" % (prefix, sim))
outfile = open(srcfile, 'w')
outfile.write(output)
if hypo_rand:
outfile.write("HYPO_ALONG_STK = %.2f\n" % (hypo_along_stk))
outfile.write("HYPO_DOWN_DIP = %.2f\n" % (hypo_down_dip))
outfile.write("SEED = %d\n" % (seed))
outfile.close()
def generate_xml(install, numsim, srcdir, xmldir,
logdir, event, codebase, prefix,
skip_rupgen):
"""
Generates xml files in the xmldir for numsim simulations whose
source files are in the srcdir using the validation event and
codebase specified
"""
tmpdir = tempfile.mkdtemp(prefix="bbp-")
bbproot = "%s/run_bbp.py" % (install.A_COMP_DIR)
bfn = os.path.join(xmldir, BATCH_SIM_FILE)
batchfile = open(bfn, 'w')
for sim in range(0, numsim):
srcfile = os.path.join(srcdir, "%s-%04d.src" % (prefix, sim))
ofn = os.path.join(tmpdir, "bbp.optfile")
optfile = open(ofn, 'w')
optfile.write('y\n') # Validation
optfile.write('%s\n' % (event)) # Validation event
optfile.write('%s\n' % (codebase)) # Codebase to use
if codebase != "exsim" and codebase != "csm":
if skip_rupgen:
optfile.write('n\n') # Skip rupture generator
else:
optfile.write('y\n') # Run rupture generator
if not skip_rupgen:
optfile.write('y\n') # Provide custom source file
optfile.write('2\n') # Enter path to source file
optfile.write('%s\n' % (srcfile)) # Source file
optfile.write('1\n') # All validation stations
if codebase == "exsim":
# Don't specify custom ExSIM template file
optfile.write('n\n')
if (codebase != "exsim" and
codebase != "csm" and
codebase != "irikura"):
# Skip site response
optfile.write('n\n')
optfile.write('y\n') # Plot velocity seismograms
optfile.write('y\n') # Plot acceleration seismograms
optfile.write('y\n') # Generate GMPE comparison plot
optfile.write('1\n') # Run GP_GOF
optfile.flush()
optfile.close()
# Run BBP and generate the xml file
bband_utils.runprog("export BBP_DATA_DIR=%s; %s -o %s -s %d -g" %
(tmpdir, bbproot, ofn, sim+1))
# Copy the xml file
srcxml = os.path.join(tmpdir, "xml", "%d.xml" % (sim+1))
dstxml = os.path.join(xmldir, "%s-%04d.xml" % (prefix, sim))
shutil.copy2(srcxml, dstxml)
# Add entry to the batch file
bbp_sim = 10000000 + sim
logbase = os.path.join(logdir, str(bbp_sim))
logfile = os.path.join(logbase, "%d_%s.log" %
(bbp_sim, prefix))
# Make sure logdir exists
os.makedirs(logbase)
batchfile.write("%s -x %s -s %d -l %s\n" %
(bbproot, dstxml, bbp_sim, logfile))
# Close batch file
batchfile.flush()
batchfile.close()
# Clean-up
shutil.rmtree(tmpdir)
def write_pbs(install, numsim, simdir, xmldir, email, prefix):
"""
Write the pbs script
"""
# Calculate how many nodes we need
nodes = int(math.ceil(1.0 * numsim / CORES_PER_NODE))
# Some path names
outfile = os.path.join(simdir, "%s.out" % (prefix))
errfile = os.path.join(simdir, "%s.err" % (prefix))
bfn = os.path.join(xmldir, BATCH_SIM_FILE)
# Let's create the pbs file
pbsfn = os.path.join(simdir, "%s.pbs" % (prefix))
pbsfile = open(pbsfn, 'w')
pbsfile.write("#!/bin/bash\n")
pbsfile.write("\n")
pbsfile.write("#PBS -l walltime=300:00:00,nodes=%d:ppn=%d\n" %
(nodes, CORES_PER_NODE))
pbsfile.write("#PBS -V\n")
pbsfile.write("#PBS -m abe -M %s\n" % (email))
pbsfile.write("#PBS -e %s\n" % (errfile))
pbsfile.write("#PBS -o %s\n" % (outfile))
pbsfile.write("\n")
pbsfile.write("BBP_DIR=%s\n" % (install.A_INSTALL_ROOT))
pbsfile.write("PYTHONPATH=%s\n" % (install.A_COMP_DIR))
pbsfile.write("BBP_DATA_DIR=/scratch/$PBS_JOBID/bbpruns\n")
pbsfile.write("BBP_BASE_DIR=/scratch/$PBS_JOBID\n")
pbsfile.write("HOME=%s\n" % (simdir))
pbsfile.write("\n")
pbsfile.write("mkdir -p $BBP_DATA_DIR\n")
pbsfile.write("mkdir -p $HOME/Sims/indata\n")
pbsfile.write("mkdir -p $HOME/Sims/logs\n")
pbsfile.write("mkdir -p $HOME/Sims/outdata\n")
pbsfile.write("mkdir -p $HOME/Sims/tmpdata\n")
pbsfile.write("\n")
pbsfile.write('echo "Jobs start"\n')
pbsfile.write("date\n")
pbsfile.write('echo "BBP_DATA_DIR = $BBP_DATA_DIR"\n')
pbsfile.write("\n")
pbsfile.write("cd $HOME\n")
pbsfile.write("\n")
pbsfile.write("python $BBP_DIR/utils/batch/run_parallel.py $BBP_DIR/utils/batch/setup_bbp_epicenter_env.sh %s $PBS_NODEFILE 1\n" %
(bfn))
pbsfile.write("\n")
pbsfile.write('echo "Processing end"\n')
pbsfile.write("date\n")
pbsfile.write("\n")
for dir_to_copy in ['outdata', 'indata', 'logs', 'tmpdata']:
pbsfile.write('python $BBP_DIR/utils/batch/command_parallel.py $BBP_DIR/utils/batch/setup_bbp_epicenter_env.sh "cp -frp $BBP_DATA_DIR/%s/* $HOME/Sims/%s/." $PBS_NODEFILE\n' %
(dir_to_copy, dir_to_copy))
pbsfile.write('python $BBP_DIR/utils/batch/command_parallel.py $BBP_DIR/utils/batch/setup_bbp_epicenter_env.sh "rm -rf $BBP_BASE_DIR" $PBS_NODEFILE\n')
pbsfile.write("\n")
pbsfile.write('echo "Jobs end"\n')
pbsfile.write("date\n")
pbsfile.flush()
pbsfile.close()
# All done!
print
print "Validation run is set up on: %s" % (simdir)
print
print "To start the validation run, just type: "
print "$ qsub %s" % (pbsfn)
print
def main():
"""
Parse command line options and create the needed files/directories
"""
# Detect BBP installation
bbp_install = InstallCfg.getInstance()
prog_base = os.path.basename(sys.argv[0])
usage = "usage: %s [options]" % (prog_base)
parser = optparse.OptionParser(usage)
parser.add_option("-c", "--codebase", type="string", action="store",
dest="codebase",
help="Codebase for the simulation: %s" %
(CODEBASES))
parser.add_option("-e", "--event", type="string", action="store",
dest="event",
help="Validation event (should be configured in BBP)")
parser.add_option("-d", "--dir", type="string", action="store",
dest="simdir",
help="Simulation directory")
parser.add_option("--skip-rupgen", action="store_true", dest="skiprupgen",
help="Skip the rupture generator, run only 1 simulation")
parser.add_option("--hypo-rand", action="store_true", dest="hyporand",
help="Enables hypocenter randomization")
parser.add_option("--no-hypo-rand", action="store_false", dest="hyporand",
help="Disables hypocenter randomization")
parser.add_option("-n", "--num-simulations", type="int", action="store",
dest="numsim", help="Number of simulations to run")
parser.add_option("--email", type="string", action="store",
dest="email", help="Email for job notifications")
(options, args) = parser.parse_args()
# Validate codebase to use
codebase = options.codebase
if codebase is None:
print "Please specify a codebase!"
sys.exit(1)
codebase = codebase.lower()
if codebase not in CODEBASES:
print "Codebase needs to be one of: %s" % (CODEBASES)
# Check for event
event = options.event
if event is None:
print "Please provide a validation event!"
sys.exit(1)
event_names = validation_cfg.VE_EVENTS.get_all_names()
events = [v_event.lower() for v_event in event_names]
if event.lower() not in events:
print ("Event %s does not appear to be properly configured on BBP" %
(event))
print ("Available options are: %s" % (event_names))
print "Please provide another event or check your BBP installation."
sys.exit(1)
val_obj = validation_cfg.VE_EVENTS.get_event_by_print_name(event)
# Check if we want to run the rupture generator
skip_rupgen = options.skiprupgen
# Check for hypocenter randomization
if options.hyporand is None:
print "Please specify --hypo-rand or --no-hypo-rand!"
sys.exit(1)
if options.hyporand:
hypo_rand = True
else:
hypo_rand = False
if not skip_rupgen:
# Get source file
try:
source_file = val_obj.get_input(codebase, "source").strip()
except KeyError:
print ("Unable to get source file for event %s, codebase %s!" %
(event, codebase))
sys.exit(1)
if not source_file:
print ("Source file for event %s, codebase %s not specified!" %
(event, codebase))
sys.exit(1)
else:
# No need to get the source file, we start from the srf
source_file = None
try:
srf_file = val_obj.get_input(codebase, "srf").strip()
except KeyError:
print ("Event %s does not have a srf file for codebase %s!" %
(event, codebase))
sys.exit(1)
if not srf_file:
print ("Event %s does not have a srf file for codebase %s!" %
(event, codebase))
sys.exit(1)
# Force number of simulations to 1
options.numsim = 1
# Check for the simulation directory
simdir = options.simdir
if simdir is None:
print "Please provide a simulation directory!"
sys.exit(1)
simdir = os.path.abspath(simdir)
if os.path.exists(simdir):
print "Simulation directory exists: %s" % (simdir)
opt = raw_input("Do you want to delete its contents (y/n)? ")
if opt.lower() != "y":
print "Please provide another simulation directory!"
sys.exit(1)
opt = raw_input("ARE YOU SURE (y/n)? ")
if opt.lower() != "y":
print "Please provide another simulation directory!"
sys.exit(1)
# Delete existing directory (we already asked the user twice!!!)
shutil.rmtree(simdir)
# Pick up number of simulations to run
numsim = options.numsim
if numsim < 1 or numsim > MAX_SIMULATIONS:
print ("Number of simulations should be between 1 and %d" %
(MAX_SIMULATIONS))
sys.exit(1)
# Check for e-mail address
email = options.email
if email is None:
print "Please provide an e-mail address for job notifications"
sys.exit(1)
# Make sure user has configured the setup_bbp_epicenter_env.sh script
setup_bbp_env = os.path.join(bbp_install.A_INSTALL_ROOT,
"utils/batch/setup_bbp_epicenter_env.sh")
if not os.path.exists(setup_bbp_env):
print ("Cannot find setup_bbp_epicenter_env.sh script!")
print ("Expected at: %s" % (setup_bbp_env))
sys.exit(1)
# Create simulation directories
prefix = "%s-%s" % (event.lower(), codebase.lower())
# Make sure we remove spaces from prefix (e.g. for the "Loma Prieta" event)
prefix = prefix.replace(" ", '')
os.makedirs(simdir)
indir = os.path.join(simdir, "Sims", "indata")
outdir = os.path.join(simdir, "Sims", "outdata")
tmpdir = os.path.join(simdir, "Sims", "tmpdata")
logsdir = os.path.join(simdir, "Sims", "logs")
xmldir = os.path.join(simdir, "Xml")
srcdir = os.path.join(simdir, "Src")
for mdir in [indir, outdir, tmpdir, logsdir, xmldir, srcdir]:
os.makedirs(mdir)
# Generate source files if needed
if source_file is not None:
generate_src_files(numsim, source_file, srcdir, prefix, hypo_rand)
# Generate xml files
generate_xml(bbp_install, numsim, srcdir, xmldir,
logsdir, event, codebase, prefix,
skip_rupgen)
# Write pbs file
write_pbs(bbp_install, numsim, simdir, xmldir, email, prefix)
if __name__ == "__main__":
main()
|
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
'''Tests cribbed from linkscape/processing/test/robotstxt.test.old.cc'''
import unittest
import reppy
import logging
from reppy import Utility
reppy.logger.setLevel(logging.FATAL)
MYNAME = 'rogerbot'
class TestOldMozscape(unittest.TestCase):
@staticmethod
def parse(strng):
'''Helper to parse a string as a Rules object'''
return reppy.parser.Rules('http://example.com/robots.txt', 200, strng, 0)
def test_wwwseomozorg(self):
robots_txt = ( "../resources.test/rep/www.seomoz.org\n"
"User-agent: *\n"
"Disallow: /blogdetail.php?ID=537\n"
"Disallow: /tracker\n"
"\n"
"Sitemap: http://www.seomoz.org/sitemap.xml.gz\n"
"Sitemap: http://files.wistia.com/sitemaps/seomoz_video_sitemap.xml\n" )
rules = self.parse(robots_txt)
# Basic functionality, and lack of case sensitivity.
for agent in [ 'reppy', 'rEpPy' ]:
self.assertTrue(rules.allowed("/blog", agent))
self.assertFalse(rules.allowed("/blogdetail.php?ID=537", agent))
self.assertFalse(rules.allowed("/tracker", agent))
def test_allowall(self):
rules = self.parse("User-agent: *\nDisallow:")
for agent in [ "reppy", "oijsdofijsdofijsodifj" ]:
self.assertTrue(rules.allowed("/", agent))
self.assertTrue(rules.allowed("/foo", agent))
self.assertTrue(rules.allowed("/foo.html", agent))
self.assertTrue(rules.allowed("/foo/bar", agent))
self.assertTrue(rules.allowed("/foo/bar.html", agent))
def test_disallowall(self):
rules = self.parse("User-agent: *\nDisallow: /\n")
for agent in [ "reppy", "oijsdofijsdofijsodifj" ]:
self.assertFalse(rules.allowed("/", agent))
self.assertFalse(rules.allowed("/foo", agent))
self.assertFalse(rules.allowed("/foo.html", agent))
self.assertFalse(rules.allowed("/foo/bar", agent))
self.assertFalse(rules.allowed("/foo/bar.html", agent))
def test_no_googlebot_folder(self):
robots_txt = ( "User-agent: Googlebot\n"
"Disallow: /no-google/\n" )
rules = self.parse(robots_txt)
self.assertFalse(rules.allowed("/no-google/", "googlebot"))
self.assertFalse(rules.allowed("/no-google/something", "googlebot"))
self.assertFalse(rules.allowed("/no-google/something.html", "googlebot"))
self.assertTrue(rules.allowed("/", "googlebot"))
self.assertTrue(rules.allowed("/somethingelse", "googlebot"))
def test_no_googlebot_file(self):
robots_txt = ( "User-agent: Googlebot\n"
"Disallow: /no-google/blocked-page.html\n" )
rules = self.parse(robots_txt)
self.assertFalse(rules.allowed("/no-google/blocked-page.html", "googlebot"))
self.assertTrue(rules.allowed("/", "googlebot"))
self.assertTrue(rules.allowed("/no-google", "googlebot"))
self.assertTrue(rules.allowed("/no-google/someotherfolder", "googlebot"))
self.assertTrue(rules.allowed("/no-google/someotherfolder/somefile", "googlebot"))
def test_rogerbot_only(self):
robots_txt = ( "User-agent: *\n"
"Disallow: /no-bots/block-all-bots-except-rogerbot-page.html \t\t\t\t\n"
"\n"
"User-agent: rogerbot\n"
"Allow: /no-bots/block-all-bots-except-rogerbot-page.html\n" )
rules = self.parse(robots_txt)
self.assertFalse(rules.allowed("/no-bots/block-all-bots-except-rogerbot-page.html", "notroger"))
self.assertTrue(rules.allowed("/", "notroger"))
self.assertTrue(rules.allowed("/no-bots/block-all-bots-except-rogerbot-page.html", "rogerbot"))
self.assertTrue(rules.allowed("/", "rogerbot"))
def test_allow_certain_pages_only(self):
robots_txt = ( "User-agent: *\n"
"Allow: /onepage.html\n"
"Allow: /oneotherpage.php\n"
"Disallow: /\n"
"Allow: /subfolder/page1.html\n"
"Allow: /subfolder/page2.php\n"
"Disallow: /subfolder/\n" )
rules = self.parse(robots_txt)
self.assertFalse(rules.allowed("/", "reppy"))
self.assertFalse(rules.allowed("/foo", "reppy"))
self.assertFalse(rules.allowed("/bar.html", "reppy"))
self.assertTrue(rules.allowed("/onepage.html", "reppy"))
self.assertTrue(rules.allowed("/oneotherpage.php", "reppy"))
self.assertFalse(rules.allowed("/subfolder", "reppy"))
self.assertFalse(rules.allowed("/subfolder/", "reppy"))
self.assertFalse(rules.allowed("/subfolder/aaaaa", "reppy"))
self.assertTrue(rules.allowed("/subfolder/page1.html", "reppy"))
self.assertTrue(rules.allowed("/subfolder/page2.php", "reppy"))
def test_no_gifs_or_jpgs(self):
robots_txt = ( "User-agent: *\n"
"Disallow: /*.gif$\n"
"Disallow: /*.jpg$\n" )
rules = self.parse(robots_txt)
self.assertTrue(rules.allowed("/", "reppy"))
self.assertTrue(rules.allowed("/foo", "reppy"))
self.assertTrue(rules.allowed("/foo.html", "reppy"))
self.assertTrue(rules.allowed("/foo/bar", "reppy"))
self.assertTrue(rules.allowed("/foo/bar.html", "reppy"))
self.assertFalse(rules.allowed("/test.jpg", "reppy"))
self.assertFalse(rules.allowed("/foo/test.jpg", "reppy"))
self.assertFalse(rules.allowed("/foo/bar/test.jpg", "reppy"))
self.assertTrue(rules.allowed("/the-jpg-extension-is-awesome.html", "reppy"))
# Edge cases where the wildcard could match in multiple places
self.assertFalse(rules.allowed("/jpg.jpg", "reppy"))
self.assertFalse(rules.allowed("/foojpg.jpg", "reppy"))
self.assertFalse(rules.allowed("/bar/foojpg.jpg", "reppy"))
self.assertFalse(rules.allowed("/.jpg.jpg", "reppy"))
self.assertFalse(rules.allowed("/.jpg/.jpg", "reppy"))
self.assertFalse(rules.allowed("/test.gif", "reppy"))
self.assertFalse(rules.allowed("/foo/test.gif", "reppy"))
self.assertFalse(rules.allowed("/foo/bar/test.gif", "reppy"))
self.assertTrue(rules.allowed("/the-gif-extension-is-awesome.html", "reppy"))
def test_block_subdirectory_wildcard(self):
robots_txt = ( "User-agent: *\n"
"Disallow: /private*/\n" )
rules = self.parse(robots_txt)
self.assertTrue(rules.allowed("/", "reppy"))
self.assertTrue(rules.allowed("/foo", "reppy"))
self.assertTrue(rules.allowed("/foo.html", "reppy"))
self.assertTrue(rules.allowed("/foo/bar", "reppy"))
self.assertTrue(rules.allowed("/foo/bar.html", "reppy"))
# Disallow clause ends with a slash, so these shouldn't match
self.assertTrue(rules.allowed("/private", "reppy"))
self.assertTrue(rules.allowed("/privates", "reppy"))
self.assertTrue(rules.allowed("/privatedir", "reppy"))
self.assertFalse(rules.allowed("/private/", "reppy"))
self.assertFalse(rules.allowed("/private/foo", "reppy"))
self.assertFalse(rules.allowed("/private/foo/bar.html", "reppy"))
self.assertFalse(rules.allowed("/privates/", "reppy"))
self.assertFalse(rules.allowed("/privates/foo", "reppy"))
self.assertFalse(rules.allowed("/privates/foo/bar.html", "reppy"))
self.assertFalse(rules.allowed("/privatedir/", "reppy"))
self.assertFalse(rules.allowed("/privatedir/foo", "reppy"))
self.assertFalse(rules.allowed("/privatedir/foo/bar.html", "reppy"))
def test_block_urls_with_question_marks(self):
robots_txt = ( "User-agent: *\n"
"Disallow: /*?\n" )
rules = self.parse(robots_txt)
self.assertTrue(rules.allowed("/", "reppy"))
self.assertTrue(rules.allowed("/foo", "reppy"))
self.assertTrue(rules.allowed("/foo.html", "reppy"))
self.assertTrue(rules.allowed("/foo/bar", "reppy"))
self.assertTrue(rules.allowed("/foo/bar.html", "reppy"))
self.assertFalse(rules.allowed("/?", "reppy"))
self.assertFalse(rules.allowed("/foo?q=param", "reppy"))
self.assertFalse(rules.allowed("/foo.html?q=param", "reppy"))
self.assertFalse(rules.allowed("/foo/bar?q=param", "reppy"))
self.assertFalse(rules.allowed("/foo/bar.html?q=param&bar=baz", "reppy"))
def test_no_question_marks_except_at_end(self):
robots_txt = ( "User-agent: *\n"
"Allow: /*?$\n"
"Disallow: /*?\n" )
rules = self.parse(robots_txt)
self.assertTrue(rules.allowed("/", "reppy"))
self.assertTrue(rules.allowed("/foo", "reppy"))
self.assertTrue(rules.allowed("/foo.html", "reppy"))
self.assertTrue(rules.allowed("/foo/bar", "reppy"))
self.assertTrue(rules.allowed("/foo/bar.html", "reppy"))
self.assertTrue(rules.allowed("/?", "reppy"))
self.assertTrue(rules.allowed("/foo/bar.html?", "reppy"))
self.assertFalse(rules.allowed("/foo?q=param", "reppy"))
self.assertFalse(rules.allowed("/foo.html?q=param", "reppy"))
self.assertFalse(rules.allowed("/foo/bar?q=param", "reppy"))
self.assertFalse(rules.allowed("/foo/bar.html?q=param&bar=baz", "reppy"))
def test_wildcard_edge_cases(self):
robots_txt = ( "User-agent: *\n"
"Disallow: /*one\n"
"Disallow: /two*three\n"
"Disallow: /irrelevant/four*five\n"
"Disallow: /six*\n"
"Disallow: /foo/*/seven*/eight*nine\n"
"Disallow: /foo/*/*ten$\n"
"\n"
"Disallow: /*products/default.aspx\n"
"Disallow: /*/feed/$\n" )
rules = self.parse(robots_txt)
self.assertTrue(rules.allowed("/", "reppy"))
self.assertTrue(rules.allowed("/foo", "reppy"))
self.assertTrue(rules.allowed("/foo.html", "reppy"))
self.assertTrue(rules.allowed("/foo/bar", "reppy"))
self.assertTrue(rules.allowed("/foo/bar.html", "reppy"))
self.assertFalse(rules.allowed("/one", "reppy"))
self.assertFalse(rules.allowed("/aaaone", "reppy"))
self.assertFalse(rules.allowed("/aaaaoneaaa", "reppy"))
self.assertFalse(rules.allowed("/oneaaaa", "reppy"))
self.assertFalse(rules.allowed("/twothree", "reppy"))
self.assertFalse(rules.allowed("/twoaaathree", "reppy"))
self.assertFalse(rules.allowed("/twoaaaathreeaaa", "reppy"))
self.assertFalse(rules.allowed("/twothreeaaa", "reppy"))
self.assertFalse(rules.allowed("/irrelevant/fourfive", "reppy"))
self.assertFalse(rules.allowed("/irrelevant/fouraaaafive", "reppy"))
self.assertFalse(rules.allowed("/irrelevant/fouraaafiveaaaa", "reppy"))
self.assertFalse(rules.allowed("/irrelevant/fourfiveaaa", "reppy"))
self.assertFalse(rules.allowed("/six", "reppy"))
self.assertFalse(rules.allowed("/sixaaaa", "reppy"))
self.assertFalse(rules.allowed("/products/default.aspx", "reppy"))
self.assertFalse(rules.allowed("/author/admin/feed/", "reppy"))
def test_allow_edge_cases(self):
robots_txt = ( "User-agent: *\n"
"Disallow:\t/somereallylongfolder/\n"
"Allow:\t\t/*.jpg\n"
"\n"
"Disallow:\t/sales-secrets.php\n"
"Allow: \t\t/sales-secrets.php\n"
"\n"
"Disallow:\t/folder\n"
"Allow:\t\t/folder/\n"
"\n"
"Allow:\t\t/folder2\n"
"Disallow:\t/folder2/\n" )
rules = self.parse(robots_txt)
self.assertFalse(rules.allowed("/somereallylongfolder/", "reppy"))
self.assertFalse(rules.allowed("/somereallylongfolder/aaaa", "reppy"))
self.assertFalse(rules.allowed("/somereallylongfolder/test.jpg", "reppy"))
self.assertTrue(rules.allowed("/sales-secrets.php", "reppy"))
self.assertTrue(rules.allowed("/folder/page", "reppy"))
self.assertTrue(rules.allowed("/folder/page2", "reppy"))
def test_redundant_allow(self):
robots_txt = ( "User-agent: *\n"
"Disallow: /en/\n"
"Disallow: /files/documentation/\n"
"Disallow: /files/\n"
"Disallow: /de/careers/\n"
"Disallow: /images/\n"
"\n"
"Disallow: /print_mode.yes/\n"
"Disallow: /?product=lutensit&print_mode=yes&googlebot=nocrawl\n"
"Allow: /\n"
"Disallow: /search/\n" )
rules = self.parse(robots_txt)
self.assertFalse(rules.allowed("/print_mode.yes/", "reppy"))
self.assertFalse(rules.allowed("/print_mode.yes/foo", "reppy"))
self.assertFalse(rules.allowed("/search/", "reppy"))
self.assertFalse(rules.allowed("/search/foo", "reppy"))
# Some comments, wildcards, and anchor tests -- this was a legacy test
# ported from urlexclude
def test_legacy_test_1(self):
robots_txt = ( "user-agent: * #a comment!\n"
"disallow: /Blerf\n"
"disallow: /Blerg$\n"
"disallow: /blerf/*/print.html$#a comment\n"
"disallow: /blerf/*/blim/blerf$\n"
"disallow: /plerf/*/blim/blim$\n"
"\tuser-agent: BLERF\n"
" DisALLOW: \tblerfPage\n"
"blerf:blah\n" )
rules = self.parse(robots_txt)
self.assertFalse(rules.allowed("/Blerf/blah", "reppy"))
self.assertTrue(rules.allowed("/Blerg/blah", "reppy"))
self.assertTrue(rules.allowed("/blerf/blah", "reppy"))
self.assertFalse(rules.allowed("/Blerg", "reppy"))
self.assertFalse(rules.allowed("/blerf/some/subdirs/print.html", "reppy"))
self.assertTrue(rules.allowed("/blerf/some/subdirs/print.html?extra=stuff", "reppy"))
self.assertFalse(rules.allowed("/blerf/some/sub/dirs/blim/blim/blerf", "reppy"))
self.assertFalse(rules.allowed("/plerf/some/sub/dirs/blim/blim", "reppy"))
def test_legacy_test_2(self):
robots_txt = ( "User-agent: *\n"
"Allow: /searchhistory/\n"
"Disallow: /news?output=xhtml&\n"
"Allow: /news?output=xhtml\n"
"Disallow: /search\n"
"Disallow: /groups\n"
"Disallow: /images\n"
"Disallow: /catalogs\n"
"Disallow: /catalogues\n"
"Disallow: /news\n"
"Disallow: /nwshp\n"
"Allow: /news?btcid=\n"
"Disallow: /news?btcid=*&\n"
"Allow: /news?btaid=\n"
"Disallow: /news?btaid=*&\n"
"Disallow: /?\n"
"Disallow: /addurl/image?\n"
"Disallow: /pagead/\n"
"Disallow: /relpage/\n"
"Disallow: /relcontent\n"
"Disallow: /sorry/\n"
"Disallow: /imgres\n"
"Disallow: /keyword/\n"
"Disallow: /u/\n"
"Disallow: /univ/\n"
"Disallow: /cobrand\n"
"Disallow: /custom\n"
"Disallow: /advanced_group_search\n"
"Disallow: /advanced_search\n"
"Disallow: /googlesite\n"
"Disallow: /preferences\n"
"Disallow: /setprefs\n"
"Disallow: /swr\n"
"Disallow: /url\n"
"Disallow: /default\n"
"Disallow: /m?\n"
"Disallow: /m/?\n"
"Disallow: /m/lcb\n"
"Disallow: /m/search?\n"
"Disallow: /wml?\n"
"Disallow: /wml/?\n"
"Disallow: /wml/search?\n"
"Disallow: /xhtml?\n"
"Disallow: /xhtml/?\n"
"Disallow: /xhtml/search?\n"
"Disallow: /xml?\n"
"Disallow: /imode?\n"
"Disallow: /imode/?\n"
"Disallow: /imode/search?\n"
"Disallow: /jsky?\n"
"Disallow: /jsky/?\n"
"Disallow: /jsky/search?\n"
"Disallow: /pda?\n"
"Disallow: /pda/?\n"
"Disallow: /pda/search?\n"
"Disallow: /sprint_xhtml\n"
"Disallow: /sprint_wml\n"
"Disallow: /pqa\n"
"Disallow: /palm\n"
"Disallow: /gwt/\n"
"Disallow: /purchases\n"
"Disallow: /hws\n"
"Disallow: /bsd?\n"
"Disallow: /linux?\n"
"Disallow: /mac?\n"
"Disallow: /microsoft?\n"
"Disallow: /unclesam?\n"
"Disallow: /answers/search?q=\n"
"Disallow: /local?\n"
"Disallow: /local_url\n"
"Disallow: /froogle?\n"
"Disallow: /products?\n"
"Disallow: /froogle_\n"
"Disallow: /product_\n"
"Disallow: /products_\n"
"Disallow: /print\n"
"Disallow: /books\n"
"Disallow: /patents?\n"
"Disallow: /scholar?\n"
"Disallow: /complete\n"
"Disallow: /sponsoredlinks\n"
"Disallow: /videosearch?\n"
"Disallow: /videopreview?\n"
"Disallow: /videoprograminfo?\n"
"Disallow: /maps?\n"
"Disallow: /mapstt?\n"
"Disallow: /mapslt?\n"
"Disallow: /maps/stk/\n"
"Disallow: /mapabcpoi?\n"
"Disallow: /translate?\n"
"Disallow: /ie?\n"
"Disallow: /sms/demo?\n"
"Disallow: /katrina?\n"
"Disallow: /blogsearch?\n"
"Disallow: /blogsearch/\n"
"Disallow: /blogsearch_feeds\n"
"Disallow: /advanced_blog_search\n"
"Disallow: /reader/\n"
"Disallow: /uds/\n"
"Disallow: /chart?\n"
"Disallow: /transit?\n"
"Disallow: /mbd?\n"
"Disallow: /extern_js/\n"
"Disallow: /calendar/feeds/\n"
"Disallow: /calendar/ical/\n"
"Disallow: /cl2/feeds/\n"
"Disallow: /cl2/ical/\n"
"Disallow: /coop/directory\n"
"Disallow: /coop/manage\n"
"Disallow: /trends?\n"
"Disallow: /trends/music?\n"
"Disallow: /notebook/search?\n"
"Disallow: /music\n"
"Disallow: /browsersync\n"
"Disallow: /call\n"
"Disallow: /archivesearch?\n"
"Disallow: /archivesearch/url\n"
"Disallow: /archivesearch/advanced_search\n"
"Disallow: /base/search?\n"
"Disallow: /base/reportbadoffer\n"
"Disallow: /base/s2\n"
"Disallow: /urchin_test/\n"
"Disallow: /movies?\n"
"Disallow: /codesearch?\n"
"Disallow: /codesearch/feeds/search?\n"
"Disallow: /wapsearch?\n"
"Disallow: /safebrowsing\n"
"Disallow: /reviews/search?\n"
"Disallow: /orkut/albums\n"
"Disallow: /jsapi\n"
"Disallow: /views?\n"
"Disallow: /c/\n"
"Disallow: /cbk\n"
"Disallow: /recharge/dashboard/car\n"
"Disallow: /recharge/dashboard/static/\n"
"Disallow: /translate_c?\n"
"Disallow: /s2/profiles/me\n"
"Allow: /s2/profiles\n"
"Disallow: /s2\n"
"Disallow: /transconsole/portal/\n"
"Disallow: /gcc/\n"
"Disallow: /aclk\n"
"Disallow: /cse?\n"
"Disallow: /tbproxy/\n"
"Disallow: /MerchantSearchBeta/\n"
"Disallow: /ime/\n"
"Disallow: /websites?\n"
"Disallow: /shenghuo/search?\n" )
rules = self.parse(robots_txt)
self.assertFalse(rules.allowed("/?as_q=ethics&ie=UTF-8&ui=blg&bl_url=centrerion.blogspot.com&x=0&y=0&ui=blg", "reppy"))
# Real world example with several similar disallow rules
def test_legacy_test_3(self):
robots_txt = ( "User-agent: *\n"
"Allow: /searchhistory/\n"
"Disallow: /news?output=xhtml&\n"
"Allow: /news?output=xhtml\n"
"Disallow: /search\n"
"Disallow: /groups\n"
"Disallow: /images\n"
"Disallow: /catalogs\n"
"Disallow: /catalogues\n"
"Disallow: /news\n"
"Disallow: /nwshp\n"
"Allow: /news?btcid=\n"
"Disallow: /news?btcid=*&\n"
"Allow: /news?btaid=\n"
"Disallow: /news?btaid=*&\n"
"Disallow: /?\n"
"Disallow: /addurl/image?\n"
"Disallow: /pagead/\n"
"Disallow: /relpage/\n"
"Disallow: /relcontent\n"
"Disallow: /sorry/\n"
"Disallow: /imgres\n"
"Disallow: /keyword/\n"
"Disallow: /u/\n"
"Disallow: /univ/\n"
"Disallow: /cobrand\n"
"Disallow: /custom\n"
"Disallow: /advanced_group_search\n"
"Disallow: /advanced_search\n"
"Disallow: /googlesite\n"
"Disallow: /preferences\n"
"Disallow: /setprefs\n"
"Disallow: /swr\n"
"Disallow: /url\n"
"Disallow: /default\n"
"Disallow: /m?\n"
"Disallow: /m/?\n"
"Disallow: /m/lcb\n"
"Disallow: /m/search?\n"
"Disallow: /wml?\n"
"Disallow: /wml/?\n"
"Disallow: /wml/search?\n"
"Disallow: /xhtml?\n"
"Disallow: /xhtml/?\n"
"Disallow: /xhtml/search?\n"
"Disallow: /xml?\n"
"Disallow: /imode?\n"
"Disallow: /imode/?\n"
"Disallow: /imode/search?\n"
"Disallow: /jsky?\n"
"Disallow: /jsky/?\n"
"Disallow: /jsky/search?\n"
"Disallow: /pda?\n"
"Disallow: /pda/?\n"
"Disallow: /pda/search?\n"
"Disallow: /sprint_xhtml\n"
"Disallow: /sprint_wml\n"
"Disallow: /pqa\n"
"Disallow: /palm\n"
"Disallow: /gwt/\n"
"Disallow: /purchases\n"
"Disallow: /hws\n"
"Disallow: /bsd?\n"
"Disallow: /linux?\n"
"Disallow: /mac?\n"
"Disallow: /microsoft?\n"
"Disallow: /unclesam?\n"
"Disallow: /answers/search?q=\n"
"Disallow: /local?\n"
"Disallow: /local_url\n"
"Disallow: /froogle?\n"
"Disallow: /products?\n"
"Disallow: /froogle_\n"
"Disallow: /product_\n"
"Disallow: /products_\n"
"Disallow: /print\n"
"Disallow: /books\n"
"Disallow: /patents?\n"
"Disallow: /scholar?\n"
"Disallow: /complete\n"
"Disallow: /sponsoredlinks\n"
"Disallow: /videosearch?\n"
"Disallow: /videopreview?\n"
"Disallow: /videoprograminfo?\n"
"Disallow: /maps?\n"
"Disallow: /mapstt?\n"
"Disallow: /mapslt?\n"
"Disallow: /maps/stk/\n"
"Disallow: /mapabcpoi?\n"
"Disallow: /translate?\n"
"Disallow: /ie?\n"
"Disallow: /sms/demo?\n"
"Disallow: /katrina?\n"
"Disallow: /blogsearch?\n"
"Disallow: /blogsearch/\n"
"Disallow: /blogsearch_feeds\n"
"Disallow: /advanced_blog_search\n"
"Disallow: /reader/\n"
"Disallow: /uds/\n"
"Disallow: /chart?\n"
"Disallow: /transit?\n"
"Disallow: /mbd?\n"
"Disallow: /extern_js/\n"
"Disallow: /calendar/feeds/\n"
"Disallow: /calendar/ical/\n"
"Disallow: /cl2/feeds/\n"
"Disallow: /cl2/ical/\n"
"Disallow: /coop/directory\n"
"Disallow: /coop/manage\n"
"Disallow: /trends?\n"
"Disallow: /trends/music?\n"
"Disallow: /notebook/search?\n"
"Disallow: /music\n"
"Disallow: /browsersync\n"
"Disallow: /call\n"
"Disallow: /archivesearch?\n"
"Disallow: /archivesearch/url\n"
"Disallow: /archivesearch/advanced_search\n"
"Disallow: /base/search?\n"
"Disallow: /base/reportbadoffer\n"
"Disallow: /base/s2\n"
"Disallow: /urchin_test/\n"
"Disallow: /movies?\n"
"Disallow: /codesearch?\n"
"Disallow: /codesearch/feeds/search?\n"
"Disallow: /wapsearch?\n"
"Disallow: /safebrowsing\n"
"Disallow: /reviews/search?\n"
"Disallow: /orkut/albums\n"
"Disallow: /jsapi\n"
"Disallow: /views?\n"
"Disallow: /c/\n"
"Disallow: /cbk\n"
"Disallow: /recharge/dashboard/car\n"
"Disallow: /recharge/dashboard/static/\n"
"Disallow: /translate_c?\n"
"Disallow: /s2/profiles/me\n"
"Allow: /s2/profiles\n"
"Disallow: /s2\n"
"Disallow: /transconsole/portal/\n"
"Disallow: /gcc/\n"
"Disallow: /aclk\n"
"Disallow: /cse?\n"
"Disallow: /tbproxy/\n"
"Disallow: /MerchantSearchBeta/\n"
"Disallow: /ime/\n"
"Disallow: /websites?\n"
"Disallow: /shenghuo/search?\n" )
rules = self.parse(robots_txt)
self.assertFalse(rules.allowed("/archivesearch?q=stalin&scoring=t&hl=en&sa=N&sugg=d&as_ldate=1900&as_hdate=1919&lnav=hist2", "reppy"))
# Real world example
def test_legacy_test_4(self):
robots_txt = ( "User-agent: scooter\n"
"Disallow: /\n"
"\n"
"User-agent: wget\n"
"User-agent: webzip\n"
"Disallow: /\n"
"\n"
"User-agent: *\n"
"Disallow:\n" )
rules = self.parse(robots_txt)
self.assertTrue(rules.allowed("/index.html", "reppy"))
# Real world example
def test_legacy_test_5(self):
robots_txt = ( "# Alexa\n"
"User-agent: ia_archiver\n"
"Disallow: /utils/date_picker/\n"
"# Ask Jeeves\n"
"User-agent: Teoma\n"
"Disallow: /utils/date_picker/\n"
"# Google\n"
"User-agent: googlebot\n"
"Disallow: /utils/date_picker/\n"
"# MSN\n"
"User-agent: MSNBot\n"
"Disallow: /utils/date_picker/\n"
"# Yahoo!\n"
"User-agent: Slurp\n"
"Disallow: /utils/date_picker/\n"
"# Baidu\n"
"User-agent: baiduspider\n"
"Disallow: /utils/date_picker/\n"
"# All the rest go away\n"
"User-agent: *\n"
"Disallow: /\n" )
rules = self.parse(robots_txt)
self.assertFalse(rules.allowed("/", "reppy"))
# Real world example with multiple consecutive user agent directives
def test_legacy_test_6(self):
robots_txt = ( "User-agent: reppy\n"
"User-agent:snowball\n"
"Disallow:/\n" )
rules = self.parse(robots_txt)
self.assertFalse(rules.allowed("/", "reppy"))
# Real world example, r.e. phpBB
def test_legacy_test_7(self):
robots_txt = ( "User-agent: Googlebot-Image\n"
"Disallow: /\n"
"\n"
"User-agent: *\n"
"Crawl-delay: 7\n"
"\n"
"Disallow: /faq.php\n"
"Disallow: /groupcp.php\n"
"Disallow: /login.php\n"
"Disallow: /memberlist.php\n"
"Disallow: /merge.php\n"
"Disallow: /modcp.php\n"
"Disallow: /posting.php\n"
"Disallow: /phpBB2/posting.php\n"
"Disallow: /privmsg.php\n"
"Disallow: /profile.php\n"
"Disallow: /search.php\n"
"Disallow: /phpBB2/faq.php\n"
"Disallow: /phpBB2/groupcp.php\n"
"Disallow: /phpBB2/login.php\n"
"Disallow: /phpBB2/memberlist.php\n"
"Disallow: /phpBB2/merge.php\n"
"Disallow: /phpBB2/modcp.php\n"
"Disallow: /phpBB2/posting.php\n"
"Disallow: /phpBB2/posting.php\n"
"Disallow: /phpBB2/privmsg.php\n"
"Disallow: /phpBB2/profile.php\n"
"Disallow: /phpBB2/search.php\n"
"\n"
"Disallow: /admin/\n"
"Disallow: /images/\n"
"Disallow: /includes/\n"
"Disallow: /install/\n"
"Disallow: /modcp/\n"
"Disallow: /templates/\n"
"Disallow: /phpBB2/admin/\n"
"Disallow: /phpBB2/images/\n"
"Disallow: /phpBB2/includes/\n"
"Disallow: /phpBB2/install/\n"
"Disallow: /phpBB2/modcp/\n"
"Disallow: /phpBB2/templates/\n"
"\n"
"Disallow: /trac/\n" )
rules = self.parse(robots_txt)
self.assertFalse(rules.allowed("/phpBB2/posting.php?mode=reply&t=895", "reppy"))
# Is this pertinent to reppy, or would this have been sanitized by
# the time it reaches the parsing stage?
def test_utf8bom(self):
robots_txt = ( "\357\273\277User-agent: *\n"
"Disallow: /Product/List\n"
"Disallow: /Product/Search\n"
"Disallow: /Product/TopSellers\n"
"Disallow: /Product/UploadImage\n"
"Disallow: /WheelPit\n"
"Disallow: /iwwida.pvx\n" )
rules = self.parse(robots_txt)
self.assertFalse(rules.allowed("/WheelPit", "reppy"))
|
|
# -*- coding: utf-8 -*-
# Author: Joris Jensen <[email protected]>
#
# License: BSD 3 clause
from __future__ import division
import numpy as np
from scipy.optimize import minimize
from sklearn.utils import validation
from .rslvq import RslvqModel
class LmrslvqModel(RslvqModel):
"""Localized Matrix Robust Soft Learning Vector Quantization
Parameters
----------
prototypes_per_class : int or list of int, optional (default=1)
Number of prototypes per class. Use list to specify different
numbers per class.
initial_prototypes : array-like, shape = [n_prototypes, n_features + 1],
optional
Prototypes to start with. If not given initialization near the class
means. Class label must be placed as last entry of each prototype.
initial_matrices : list of array-like, optional
Matrices to start with. If not given random initialization
regularization : float or array-like, shape = [n_classes/n_prototypes],
optional (default=0.0)
Values between 0 and 1. Regularization is done by the log determinant
of the relevance matrix. Without regularization relevances may
degenerate to zero.
dim : int, optional
Maximum rank or projection dimensions
classwise : boolean, optional
If true, each class has one relevance matrix.
If false, each prototype has one relevance matrix.
sigma : float, optional (default=0.5)
Variance for the distribution.
max_iter : int, optional (default=2500)
The maximum number of iterations.
gtol : float, optional (default=1e-5)
Gradient norm must be less than gtol before successful termination
of bfgs.
display : boolean, optional (default=False)
Print information about the bfgs steps.
random_state : int, RandomState instance or None, optional
If int, random_state is the seed used by the random number generator;
If RandomState instance, random_state is the random number generator;
If None, the random number generator is the RandomState instance used
by `np.random`.
Attributes
----------
w_ : array-like, shape = [n_prototypes, n_features]
Prototype vector, where n_prototypes in the number of prototypes and
n_features is the number of features
c_w_ : array-like, shape = [n_prototypes]
Prototype classes
classes_ : array-like, shape = [n_classes]
Array containing labels.
omegas_ : list of array-like
Relevance Matrices
dim_ : list of int
Maximum rank of projection
regularization_ : array-like, shape = [n_classes/n_prototypes]
Values between 0 and 1
See also
--------
RslvqModel, MrslvqModel
"""
def __init__(self, prototypes_per_class=1, initial_prototypes=None,
initial_matrices=None, regularization=0.0, dim=None,
classwise=False, sigma=1, max_iter=2500, gtol=1e-5, display=False,
random_state=None):
super(LmrslvqModel, self).__init__(sigma=sigma,
random_state=random_state,
prototypes_per_class=prototypes_per_class,
initial_prototypes=initial_prototypes,
gtol=gtol, display=display, max_iter=max_iter)
self.regularization = regularization
self.initial_matrices = initial_matrices
self.classwise = classwise
self.initialdim = dim
def _optgrad(self, variables, training_data, label_equals_prototype,
random_state, lr_relevances=0, lr_prototypes=1):
n_data, n_dim = training_data.shape
nb_prototypes = self.c_w_.size
variables = variables.reshape(variables.size // n_dim, n_dim)
prototypes = variables[:nb_prototypes]
# dim to indices
indices = []
for i in range(len(self.dim_)):
indices.append(sum(self.dim_[:i + 1]))
omegas = np.split(variables[nb_prototypes:], indices[:-1]) # .conj().T
g = np.zeros(variables.shape)
if lr_relevances > 0:
gw = []
for i in range(len(omegas)):
gw.append(np.zeros(omegas[i].shape))
c = 1 / self.sigma
for i in range(n_data):
xi = training_data[i]
c_xi = label_equals_prototype[i]
for j in range(prototypes.shape[0]):
if len(omegas) == nb_prototypes:
omega_index = j
else:
omega_index = np.where(self.classes_ == self.c_w_[j])[0][0]
oo = omegas[omega_index].T.dot(omegas[omega_index])
d = (xi - prototypes[j])[np.newaxis].T
p = self._p(j, xi, prototypes=prototypes, omega=omegas[omega_index])
if self.c_w_[j] == c_xi:
pj = self._p(j, xi, prototypes=prototypes, y=c_xi,
omega=omegas[omega_index])
if lr_prototypes > 0:
if self.c_w_[j] == c_xi:
g[j] += (c * (pj - p) * oo.dot(d)).ravel()
else:
g[j] -= (c * p * oo.dot(d)).ravel()
if lr_relevances > 0:
if self.c_w_[j] == c_xi:
gw -= (pj - p) / self.sigma * (
omegas[omega_index].dot(d).dot(d.T))
else:
gw += p / self.sigma * (omegas[omega_index].dot(d).dot(d.T))
if lr_relevances > 0:
if sum(self.regularization_) > 0:
regmatrices = np.zeros([sum(self.dim_), n_dim])
for i in range(len(omegas)):
regmatrices[sum(self.dim_[:i + 1]) - self.dim_[i]:sum(
self.dim_[:i + 1])] = \
self.regularization_[i] * np.linalg.pinv(omegas[i])
g[nb_prototypes:] = 2 / n_data * lr_relevances * \
np.concatenate(gw) - regmatrices
else:
g[nb_prototypes:] = 2 / n_data * lr_relevances * \
np.concatenate(gw)
if lr_prototypes > 0:
g[:nb_prototypes] = 1 / n_data * \
lr_prototypes * g[:nb_prototypes]
g *= -(1 + 0.0001 * random_state.rand(*g.shape) - 0.5)
return g.ravel()
def _optfun(self, variables, training_data, label_equals_prototype):
n_data, n_dim = training_data.shape
nb_prototypes = self.c_w_.size
variables = variables.reshape(variables.size // n_dim, n_dim)
prototypes = variables[:nb_prototypes]
indices = []
for i in range(len(self.dim_)):
indices.append(sum(self.dim_[:i + 1]))
omegas = np.split(variables[nb_prototypes:], indices[:-1])
out = 0
for i in range(n_data):
xi = training_data[i]
y = label_equals_prototype[i]
if len(omegas) == nb_prototypes:
fs = [self._costf(xi, prototypes[j], omega=omegas[j])
for j in range(nb_prototypes)]
else:
fs = [self._costf(xi, prototypes[j], omega=omegas[np.where(self.classes_ == self.c_w_[j])[0][0]])
for j in range(nb_prototypes)]
fs_max = max(fs)
s1 = sum([np.math.exp(fs[i] - fs_max) for i in range(len(fs))
if self.c_w_[i] == y])
s2 = sum([np.math.exp(f - fs_max) for f in fs])
s1 += 0.0000001
s2 += 0.0000001
out += np.math.log(s1 / s2)
return -out
def _optimize(self, x, y, random_state):
nb_prototypes, nb_features = self.w_.shape
nb_classes = len(self.classes_)
if not isinstance(self.classwise, bool):
raise ValueError("classwise must be a boolean")
if self.initialdim is None:
if self.classwise:
self.dim_ = nb_features * np.ones(nb_classes, dtype=np.int)
else:
self.dim_ = nb_features * np.ones(nb_prototypes, dtype=np.int)
else:
self.dim_ = validation.column_or_1d(self.initialdim)
if self.dim_.size == 1:
if self.classwise:
self.dim_ = self.dim_[0] * np.ones(nb_classes,
dtype=np.int)
else:
self.dim_ = self.dim_[0] * np.ones(nb_prototypes,
dtype=np.int)
elif self.classwise and self.dim_.size != nb_classes:
raise ValueError("dim length must be number of classes")
elif self.dim_.size != nb_prototypes:
raise ValueError("dim length must be number of prototypes")
if self.dim_.min() <= 0:
raise ValueError("dim must be a list of positive ints")
# initialize psis (psis is list of arrays)
if self.initial_matrices is None:
self.omegas_ = []
for d in self.dim_:
self.omegas_.append(
random_state.rand(d, nb_features) * 2.0 - 1.0)
else:
if not isinstance(self.initial_matrices, list):
raise ValueError("initial matrices must be a list")
self.omegas_ = list(map(lambda v: validation.check_array(v),
self.initial_matrices))
if self.classwise:
if len(self.omegas_) != nb_classes:
raise ValueError("length of matrices wrong\n"
"found=%d\n"
"expected=%d" % (
len(self.omegas_), nb_classes))
elif np.sum(map(lambda v: v.shape[1],
self.omegas_)) != nb_features * \
len(self.omegas_):
raise ValueError(
"each matrix should have %d columns" % nb_features)
elif len(self.omegas_) != nb_prototypes:
raise ValueError("length of matrices wrong\n"
"found=%d\n"
"expected=%d" % (
len(self.omegas_), nb_classes))
elif np.sum([v.shape[1] for v in self.omegas_]) != \
nb_features * len(self.omegas_):
raise ValueError(
"each matrix should have %d columns" % nb_features)
if isinstance(self.regularization, float):
if self.regularization < 0:
raise ValueError('regularization must be a positive float')
self.regularization_ = np.repeat(self.regularization,
len(self.omegas_))
else:
self.regularization_ = validation.column_or_1d(self.regularization)
if self.classwise:
if self.regularization_.size != nb_classes:
raise ValueError(
"length of regularization must be number of classes")
else:
if self.regularization_.size != self.w_.shape[0]:
raise ValueError(
"length of regularization "
"must be number of prototypes")
variables = np.append(self.w_, np.concatenate(self.omegas_), axis=0)
label_equals_prototype = y
res = minimize(
fun=lambda vs: self._optfun(
vs, x, label_equals_prototype=label_equals_prototype),
jac=lambda vs: self._optgrad(
vs, x, label_equals_prototype=label_equals_prototype,
lr_prototypes=0, lr_relevances=1, random_state=random_state),
method='L-BFGS-B',
x0=variables, options={'disp': self.display, 'gtol': self.gtol,
'maxiter': self.max_iter})
n_iter = res.nit
res = minimize(
fun=lambda vs: self._optfun(
vs, x, label_equals_prototype=label_equals_prototype),
jac=lambda vs: self._optgrad(
vs, x, label_equals_prototype=label_equals_prototype,
lr_prototypes=0, lr_relevances=1, random_state=random_state),
method='L-BFGS-B',
x0=res.x, options={'disp': self.display, 'gtol': self.gtol,
'maxiter': self.max_iter})
n_iter = max(n_iter, res.nit)
res = minimize(
fun=lambda vs: self._optfun(
vs, x, label_equals_prototype=label_equals_prototype),
jac=lambda vs: self._optgrad(
vs, x, label_equals_prototype=label_equals_prototype,
lr_prototypes=1, lr_relevances=1, random_state=random_state),
method='L-BFGS-B',
x0=res.x, options={'disp': self.display, 'gtol': self.gtol,
'maxiter': self.max_iter})
n_iter = max(n_iter, res.nit)
out = res.x.reshape(res.x.size // nb_features, nb_features)
self.w_ = out[:nb_prototypes]
indices = []
for i in range(len(self.dim_)):
indices.append(sum(self.dim_[:i + 1]))
self.omegas_ = np.split(out[nb_prototypes:], indices[:-1]) # .conj().T
self.n_iter_ = n_iter
def _f(self, x, i):
d = (x - self.w_[i])[np.newaxis].T
d = d.T.dot(self.omegas_[i].T).dot(self.omegas_[i]).dot(d)
return -d / (2 * self.sigma)
def _costf(self, x, w, **kwargs):
if 'omega' in kwargs:
omega = kwargs['omega']
else:
omega = self.omegas_[np.where(self.w_ == w)[0][0]]
d = (x - w)[np.newaxis].T
d = d.T.dot(omega.T).dot(omega).dot(d)
return -d / (2 * self.sigma)
def _compute_distance(self, x, w=None):
if w is None:
w = self.w_
def foo(e):
fun = np.vectorize(lambda w: self._costf(e, w),
signature='(n)->()')
return fun(w)
return np.vectorize(foo, signature='(n)->()')(x)
def project(self, x, prototype_idx, dims, print_variance_covered=False):
"""Projects the data input data X using the relevance matrix of the
prototype specified by prototype_idx to dimension dim
Parameters
----------
x : array-like, shape = [n,n_features]
input data for project
prototype_idx : int
index of the prototype
dims : int
dimension to project to
print_variance_covered : boolean
flag to print the covered variance of the projection
Returns
--------
C : array, shape = [n,n_features]
Returns predicted values.
"""
nb_prototypes = self.w_.shape[0]
if len(self.omegas_) != nb_prototypes \
or self.prototypes_per_class != 1:
print('project only possible with classwise relevance matrix')
# y = self.predict(X)
v, u = np.linalg.eig(
self.omegas_[prototype_idx].T.dot(self.omegas_[prototype_idx]))
idx = v.argsort()[::-1]
if print_variance_covered:
print('variance coverd by projection:',
v[idx][:dims].sum() / v.sum() * 100)
return x.dot(u[:, idx][:, :dims].dot(np.diag(np.sqrt(v[idx][:dims]))))
|
|
'''
Created on Apr 20, 2015
@author: root
'''
import mock
import testtools
from paxes_nova.virt.ibmpowervm.ivm import operator
from paxes_nova.virt.ibmpowervm.ivm.common import Connection
from paxes_nova.virt.ibmpowervm.ivm import exception
from decimal import Decimal
from paxes_nova.virt.ibmpowervm.ivm.operator import IVMOperator
class IVMOperatorTestCase(testtools.TestCase):
def setUp(self):
super(IVMOperatorTestCase, self).setUp()
conn = Connection('172.24.23.212', 'root', 'teamsun')
self.ivm_opt = operator.IVMOperator(conn)
self.rasis = exception
def tearDown(self):
super(IVMOperatorTestCase, self).tearDown()
@mock.patch('nova.openstack.common.processutils.execute')
def test_run_interactive(self, mock_processutils_execute):
cmd = ['oem_setup_env', 'lsvg -o', 'exit']
mock_processutils_execute.return_value = ('datavg\nrootvg\n', '')
self.assertEqual(self.ivm_opt.run_interactive(cmd), [])
@mock.patch('paxes_nova.virt.ibmpowervm.ivm.operator.IVMOperator.run_interactive')
def test_get_wwpns(self, mock_run_interactive):
mock_run_interactive.return_value = ([' Network Address.............10000090FA1B2436',
' Network Address.............10000090FA1B2437',
' Network Address.............10000090FA1B2874',
' Network Address.............10000090FA1B2875',
])
self.assertEqual(self.ivm_opt.get_wwpns(), ['10000090FA1B2436',
'10000090FA1B2437',
'10000090FA1B2874',
'10000090FA1B2875',
])
@mock.patch('paxes_nova.virt.ibmpowervm.ivm.operator.IVMOperator.run_interactive')
def test_get_device_name_by_wwpn(self, mock_run_interactive):
mock_run_interactive.return_value = ([' fcs2 U78AB.001.WZSJH1H-P1-C3-T1 4Gb FC PCI Express Adapter (df1000fe)', '',
' Part Number.................00E0807',
' Serial Number...............1A249002C8',
' Manufacturer................001A',
' EC Level.................... D77162',
' Customer Card ID Number.....5774',
' Manufacturer................001',
' FRU Number.................. 00E0807',
' Device Specific.(ZM)........3',
' Network Address.............10000090FA1B2436',
' ROS Level and ID............02E8277F',
' Device Specific.(Z0)........2057706D',
' Device Specific.(Z1)........00000000',
' Device Specific.(Z2)........00000000',
' Device Specific.(Z3)........03000909',
' Device Specific.(Z4)........FFE01212',
' Device Specific.(Z5)........02E8277F',
' Device Specific.(Z6)........06E12715',
' Device Specific.(Z7)........07E1277F',
' Device Specific.(Z8)........20000090FA1B2436',
' Device Specific.(Z9)........ZS2.71X15',
' Device Specific.(ZA)........Z1F2.70A5 ',
' Device Specific.(ZB)........Z2F2.71X15',
' Device Specific.(ZC)........00000000',
' Hardware Location Code......U78AB.001.WZSJH1H-P1-C3-T1', '',
' fcs3 U78AB.001.WZSJH1H-P1-C3-T2 4Gb FC PCI Express Adapter (df1000fe)', '',
' Part Number.................00E0807',
' Serial Number...............1A249002C8',
' Manufacturer................001A',
' EC Level.................... D77162',
' Customer Card ID Number.....5774',
' Manufacturer................001',
' FRU Number.................. 00E0807',
' Device Specific.(ZM)........3',
' Network Address.............10000090FA1B2437',
' ROS Level and ID............02E8277F',
' Device Specific.(Z0)........2057706D',
' Device Specific.(Z1)........00000000',
' Device Specific.(Z2)........00000000',
' Device Specific.(Z3)........03000909',
' Device Specific.(Z4)........FFE01212',
' Device Specific.(Z5)........02E8277F',
' Device Specific.(Z6)........06E12715',
' Device Specific.(Z7)........07E1277F',
' Device Specific.(Z8)........20000090FA1B2437',
' Device Specific.(Z9)........ZS2.71X15',
' Device Specific.(ZA)........Z1F2.70A5 ',
' Device Specific.(ZB)........Z2F2.71X15',
' Device Specific.(ZC)........00000000',
' Hardware Location Code......U78AB.001.WZSJH1H-P1-C3-T2', '',
' fcs0 U78AB.001.WZSJH1H-P1-C2-T1 4Gb FC PCI Express Adapter (df1000fe)', '',
' Part Number.................00E0807',
' Serial Number...............1A2490024E',
' Manufacturer................001A',
' EC Level.................... D77162',
' Customer Card ID Number.....5774',
' Manufacturer................001',
' FRU Number.................. 00E0807',
' Device Specific.(ZM)........3',
' Network Address.............10000090FA1B2874',
' ROS Level and ID............02E8277F',
' Device Specific.(Z0)........2057706D',
' Device Specific.(Z1)........00000000',
' Device Specific.(Z2)........00000000',
' Device Specific.(Z3)........03000909',
' Device Specific.(Z4)........FFE01212',
' Device Specific.(Z5)........02E8277F',
' Device Specific.(Z6)........06E12715',
' Device Specific.(Z7)........07E1277F',
' Device Specific.(Z8)........20000090FA1B2874',
' Device Specific.(Z9)........ZS2.71X15',
' Device Specific.(ZA)........Z1F2.70A5 ',
' Device Specific.(ZB)........Z2F2.71X15',
' Device Specific.(ZC)........00000000',
' Hardware Location Code......U78AB.001.WZSJH1H-P1-C2-T1', '',
' fcs1 U78AB.001.WZSJH1H-P1-C2-T2 4Gb FC PCI Express Adapter (df1000fe)', '',
' Part Number.................00E0807',
' Serial Number...............1A2490024E',
' Manufacturer................001A',
' EC Level.................... D77162',
' Customer Card ID Number.....5774',
' Manufacturer................001',
' FRU Number.................. 00E0807',
' Device Specific.(ZM)........3',
' Network Address.............10000090FA1B2875',
' ROS Level and ID............02E8277F',
' Device Specific.(Z0)........2057706D',
' Device Specific.(Z1)........00000000',
' Device Specific.(Z2)........00000000',
' Device Specific.(Z3)........03000909',
' Device Specific.(Z4)........FFE01212',
' Device Specific.(Z5)........02E8277F',
' Device Specific.(Z6)........06E12715',
' Device Specific.(Z7)........07E1277F',
' Device Specific.(Z8)........20000090FA1B2875',
' Device Specific.(Z9)........ZS2.71X15',
' Device Specific.(ZA)........Z1F2.70A5 ',
' Device Specific.(ZB)........Z2F2.71X15',
' Device Specific.(ZC)........00000000',
' Hardware Location Code......U78AB.001.WZSJH1H-P1-C2-T2', '', '',
' PLATFORM SPECIFIC', '', ' Name: fibre-channel',
' Model: LPe11002', ' Node: fibre-channel@0',
' Device Type: fcp',
' Physical Location: U78AB.001.WZSJH1H-P1-C2-T1', '',
' Name: fibre-channel', ' Model: LPe11002',
' Node: fibre-channel@0,1',
' Device Type: fcp',
' Physical Location: U78AB.001.WZSJH1H-P1-C2-T2', '',
' Name: fibre-channel',
' Model: LPe11002',
' Node: fibre-channel@0',
' Device Type: fcp',
' Physical Location: U78AB.001.WZSJH1H-P1-C3-T1', '',
' Name: fibre-channel',
' Model: LPe11002',
' Node: fibre-channel@0,1',
' Device Type: fcp',
' Physical Location: U78AB.001.WZSJH1H-P1-C3-T2'])
wwpn = '10000090FA1B2436'
self.assertEqual(self.ivm_opt.get_device_name_by_wwpn(wwpn), 'fcs2')
@mock.patch('paxes_nova.virt.ibmpowervm.ivm.operator.IVMOperator.run_command')
def test_get_vopt_size(self, mock_run_command):
mock_run_command.return_value = ('6515')
self.assertEqual(self.ivm_opt.get_vopt_size(), '6515')
@mock.patch('paxes_nova.virt.ibmpowervm.ivm.operator.IVMOperator.run_command')
def test_get_staging_size(self, mock_run_command):
mock_run_command.return_value = (['9613688'])
self.assertEqual(self.ivm_opt.get_vopt_size(), ['9613688'])
@mock.patch('paxes_nova.virt.ibmpowervm.ivm.operator.IVMOperator.get_actual_lpar_name')
@mock.patch('paxes_nova.virt.ibmpowervm.ivm.operator.IVMOperator.run_vios_command')
def test_get_lpar_mem(self, mock_run_vios_command, mock_get_actual_lpar_name):
lpar_name = 'instane-0000011f'
mock_get_actual_lpar_name.return_value = ('instane-0000011f')
mock_run_vios_command.return_value = (['lpar_name=instane-0000011f',
'lpar_id=26',
'mem_mode=ded',
'curr_min_mem=512',
'curr_mem=1024',
'curr_max_mem=4096',
'pend_min_mem=512',
'pend_mem=1024',
'pend_max_mem=4096',
'run_min_mem=0',
'run_mem=1024'])
self.assertEqual(self.ivm_opt.get_lpar_mem(lpar_name), {'lpar_name': 'instane-0000011f'})
@mock.patch('paxes_nova.virt.ibmpowervm.ivm.operator.IVMOperator.run_vios_command')
def test_get_lpar_proc(self, mock_run_vios_command):
lpar_name = 'instane-0000011f'
mock_run_vios_command.return_value = (['lpar_name=instane-0000011f',
'lpar_id=26',
'curr_shared_proc_pool_id=0',
'curr_proc_mode=shared',
])
self.assertEqual(self.ivm_opt.get_lpar_proc(lpar_name), {'lpar_name': 'instane-0000011f'})
@mock.patch('paxes_nova.virt.ibmpowervm.ivm.operator.IVMOperator.run_command')
def test_get_lpar_cpu_util(self, mock_run_command):
lpar_id = 26
mock_run_command.return_value = (['128,1,1024',
'128,1,1024',
'128,1,1024'
])
self.assertEqual(self.ivm_opt.get_lpar_cpu_util(lpar_id), 0)
@mock.patch('paxes_nova.virt.ibmpowervm.ivm.operator.IVMOperator.run_command')
def test_get_cpu_info_for_lpars(self, mock_run_command):
mock_run_command.return_value = (['instance-00000119,shared,0.50,1',
'instance-00000121,shared,0.20,1',
'instance-0000015b,shared,0.50,1',
'instance-00000001,shared,0.50,1'])
self.assertEqual(self.ivm_opt.get_cpu_info_for_lpars(), {'instance-00000119': Decimal('0.50'),
'instance-00000121': Decimal('0.20'),
'instance-0000015b': Decimal('0.50'),
'instance-00000001': Decimal('0.50')
})
@mock.patch('paxes_nova.virt.ibmpowervm.ivm.operator.IVMOperator.run_command')
@mock.patch('paxes_nova.virt.ibmpowervm.ivm.operator.IVMOperator._calc_cpu_utilization')
def test_get_lpar_info(self, mock_calc_cpu_utilization, mock_run_command):
lpar_id = 26
mock_run_command.return_value = ['entitled_cycles=283424875907440,capped_cycles=317971615126,uncapped_cycles=18638892330',
'entitled_cycles=283424875907440,capped_cycles=317971615126,uncapped_cycles=18638892330',
'entitled_cycles=283424875907440,capped_cycles=317971615126,uncapped_cycles=18638892330']
mock_calc_cpu_utilization.return_value = (0)
self.assertEqual(self.ivm_opt.get_lpar_info(lpar_id), {'capped_cycles': '317971615126',
'curr_cpu_util': '0.0',
'entitled_cycles': '283424875907440',
'uncapped_cycles': '18638892330'})
# @mock.patch('paxes_nova.virt.ibmpowervm.ivm.operator.IVMOperator.run_command')
# @mock.patch('paxes_nova.virt.ibmpowervm.ivm.operator.IVMOperator._calc_cpu_utilization')
# def test_get_lpar_info_fault(self, mock_calc_cpu_utilization, mock_run_command):
# lpar_id = 26
# ex_args = {'command': 'lslparutil --filter "lpar_ids=26" -n 3 -r lpar',
# 'error': ['entitled_cycles=283424875907440,capped_cycles=317971615126,uncapped_cycles=18638892330',
# 'entitled_cycles=283424875907440,capped_cycles=317971615126,uncapped_cycles=18638892330']
# }
# mock_run_command.return_value = ['entitled_cycles=283424875907440,capped_cycles=317971615126,uncapped_cycles=18638892330',
# 'entitled_cycles=283424875907440,capped_cycles=317971615126,uncapped_cycles=18638892330']
# mock_calc_cpu_utilization.return_value = (0)
# self.assertRaises(self.rasis.IBMPowerVMCommandFailed, self.ivm_opt.get_lpar_info(lpar_id), ex_args)
@mock.patch('paxes_nova.virt.ibmpowervm.ivm.operator.IVMOperator.run_command')
def test_get_disk_names_for_vhost(self, mock_run_command):
vhost = 'vhost1'
mock_run_command.return_value = (['0x8200000000000000:/var/vio/VMLibrary/c53b6b58-4eca-8e90-c3e9e0f0babb:0x8100000000000000:lv16'])
self.assertEqual(self.ivm_opt.get_disk_names_for_vhost(vhost, local=True), ['lv16'])
@mock.patch('paxes_nova.virt.ibmpowervm.ivm.operator.IVMOperator.run_command')
def test_get_hdisk_reserve_policy(self, mock_run_command):
diskname = 'hdisk4'
mock_run_command.return_value = (['value', ' ', 'no_reserve'])
self.assertEqual(self.ivm_opt.get_hdisk_reserve_policy(diskname), 'no_reserve')
@mock.patch('paxes_nova.virt.ibmpowervm.ivm.operator.IVMOperator.run_command')
def test_get_management_sys_name(self, mock_run_command):
mock_run_command.return_value = (['Server-8246-L2D-SN06052EA'])
self.assertEqual(self.ivm_opt.get_management_sys_name(), 'Server-8246-L2D-SN06052EA')
@mock.patch('paxes_nova.virt.ibmpowervm.ivm.operator.IVMOperator.get_actual_lpar_name')
@mock.patch('paxes_nova.virt.ibmpowervm.ivm.operator.IVMOperator.run_command')
def test_get_refcode(self, mock_run_command, mock_get_actual_lpar_name):
instance_name = 'instance-0000011f'
mock_run_command.return_value = (['Linux ppc64,04/09/2015 10:44:57'])
mock_get_actual_lpar_name.return_value = ('instance-0000011f')
self.assertEqual(self.ivm_opt.get_refcode(instance_name), 'Linux ppc64')
@mock.patch('paxes_nova.virt.ibmpowervm.ivm.operator.IVMOperator.get_actual_lpar_name')
@mock.patch('paxes_nova.virt.ibmpowervm.ivm.operator.IVMOperator.run_command')
def test_get_live_migration_state(self, mock_run_command, mock_get_actual_lpar_name):
inst_name = 'instance-0000011f'
mock_run_command.return_value = (['Not Migrating'])
mock_get_actual_lpar_name.return_value = ('instance-0000011f')
self.assertEqual(self.ivm_opt.get_live_migration_state(inst_name), 'Not Migrating')
@mock.patch('paxes_nova.virt.ibmpowervm.ivm.operator.IVMOperator.run_command')
def test_get_lpar_proc_compat_modes(self, mock_run_command):
mock_run_command.return_value = (['"default,POWER6,POWER6+,POWER7"'])
self.assertEqual(self.ivm_opt.get_lpar_proc_compat_modes(), ['default', 'POWER6', 'POWER6+', 'POWER7'])
@mock.patch('paxes_nova.virt.ibmpowervm.ivm.operator.IVMOperator.get_actual_lpar_name')
@mock.patch('paxes_nova.virt.ibmpowervm.ivm.operator.IVMOperator.run_vios_command')
def test_get_curr_and_desired_proc_compat_modes(self, mock_run_vios_command, mock_get_actual_lpar_name):
instance_name = 'instance-0000011f'
mock_run_vios_command.return_value = (['POWER7,default'])
mock_get_actual_lpar_name.return_value = ('instance-0000011f')
self.assertEqual(self.ivm_opt.get_curr_and_desired_proc_compat_modes(instance_name), ['POWER7', 'default'])
@mock.patch('paxes_nova.virt.ibmpowervm.ivm.operator.IVMOperator.get_actual_lpar_name')
@mock.patch('paxes_nova.virt.ibmpowervm.ivm.operator.IVMOperator.run_vios_command')
def test_get_lpar_operating_system(self, mock_run_vios_command, mock_get_actual_lpar_name):
instance_name = 'instance-0000011f'
mock_run_vios_command.return_value = (['0.0.0.0.0.0'])
mock_get_actual_lpar_name.return_value = ('instance-0000011f')
self.assertEqual(self.ivm_opt.get_curr_and_desired_proc_compat_modes(instance_name), ['0.0.0.0.0.0'])
def test_get_disk_names_for_vhost_frm_dict(self):
vhost_id = 3
disk_dict = {1:'15',
2:'80',
3:'123'}
self.assertEqual(self.ivm_opt.get_disk_names_for_vhost_frm_dict(vhost_id, disk_dict), [])
@mock.patch('paxes_nova.virt.ibmpowervm.ivm.operator.IVMOperator.run_command')
def test_get_disk_uid_by_name(self, mock_run_command):
disk_name = 'hdisk3'
mock_run_command.return_value = (['332136005076300818001A000000000000D4F04214503IBMfcp'])
self.assertEqual(self.ivm_opt.get_disk_uid_by_name(disk_name), '6005076300818001A000000000000D4F')
@mock.patch('paxes_nova.virt.ibmpowervm.ivm.operator.IVMOperator.get_disk_names_for_vhost_frm_dict')
@mock.patch('paxes_nova.virt.ibmpowervm.ivm.operator.IVMOperator.get_disk_uid_by_name')
def test_get_volumes_by_vhost_from_dict(self, mock_get_disk_uid_by_name, mock_get_disk_names_for_vhost_frm_dict):
vhost_id = 3
disk_dict = {1:'15',
2:'80',
3:'123'}
mock_get_disk_uid_by_name.return_value = ('6005076300818001A000000000000D4F')
mock_get_disk_names_for_vhost_frm_dict.return_value = ([])
self.assertEqual(self.ivm_opt.get_volumes_by_vhost_from_dict(vhost_id, disk_dict), [])
@mock.patch('paxes_nova.virt.ibmpowervm.ivm.operator.IVMOperator.run_vios_command')
def test_get_vhost_by_instance_id(self, mock_run_vios_command):
instance_id = 17
mock_run_vios_command.return_value = (['vhost15'])
self.assertEqual(self.ivm_opt.get_vhost_by_instance_id(instance_id), 'vhost15')
@mock.patch('paxes_nova.virt.ibmpowervm.ivm.operator.IVMOperator.run_command')
@mock.patch('paxes_nova.virt.ibmpowervm.ivm.operator.IVMOperator._get_all_virt_slots_in_use')
def test_get_num_reserved_in_use_vios_slots(self, mock__get_all_virt_slots_in_use, mock_run_command):
managed_lpar_names = ['06-052EA', '06-052EB']
mock_run_command.return_value = (['0,serial', '1,serial', '2,scsi', '3,reserved', '32,eth'])
mock__get_all_virt_slots_in_use.return_value = (1)
self.assertEqual(self.ivm_opt.get_num_reserved_in_use_vios_slots(managed_lpar_names), (4, 1))
def test_get_volume_aix_conn_info(self):
volume_data = {'target_wwn': ['10000090FA1B2436',
'10000090FA1B2437',
'10000090FA1B2874',
'10000090FA1B2875'],
'target_lun': '10'}
self.assertEqual(self.ivm_opt.get_volume_aix_conn_info(volume_data), ['10000090fa1b2436,a000000000000',
'10000090fa1b2437,a000000000000',
'10000090fa1b2874,a000000000000',
'10000090fa1b2875,a000000000000'])
@mock.patch('paxes_nova.virt.ibmpowervm.ivm.operator.IVMOperator.run_command')
def test_get_devname_by_aix_conn(self, mock_run_command):
conn_info = ['5005076803080067,2000000000000']
mock_run_command.return_value = (['Enabled:hdisk4:fscsi2:5005076803080067,2000000000000'])
self.assertEqual(self.ivm_opt.get_devname_by_aix_conn(conn_info), {'device_name': 'hdisk4'})
@mock.patch('paxes_nova.virt.ibmpowervm.ivm.operator.IVMOperator.run_command')
def test_get_fcs_parent_devices(self, mock_run_command):
mock_run_command.return_value = (['pci4:fcs0', 'pci4:fcs1', 'pci5:fcs2', 'pci5:fcs3'])
self.assertEqual(self.ivm_opt.get_fcs_parent_devices(), {'pci4': ['fcs0', 'fcs1'], 'pci5': ['fcs2', 'fcs3']})
@mock.patch('paxes_nova.virt.ibmpowervm.ivm.operator.IVMOperator.run_interactive')
def test_get_fcs_device_names(self, mock_run_interactive):
wwpns = ['10000090FA1B2436',
'10000090FA1B2437',
'10000090FA1B2874',
'10000090FA1B2875']
mock_run_interactive.return_value = ([' fcs2 U78AB.001.WZSJH1H-P1-C3-T1 4Gb FC PCI Express Adapter (df1000fe)', '',
' Part Number.................00E0807',
' Serial Number...............1A249002C8',
' Manufacturer................001A',
' EC Level.................... D77162',
' Customer Card ID Number.....5774',
' Manufacturer................001',
' FRU Number.................. 00E0807',
' Device Specific.(ZM)........3',
' Network Address.............10000090FA1B2436',
' ROS Level and ID............02E8277F',
' Device Specific.(Z0)........2057706D',
' Device Specific.(Z1)........00000000',
' Device Specific.(Z2)........00000000',
' Device Specific.(Z3)........03000909',
' Device Specific.(Z4)........FFE01212',
' Device Specific.(Z5)........02E8277F',
' Device Specific.(Z6)........06E12715',
' Device Specific.(Z7)........07E1277F',
' Device Specific.(Z8)........20000090FA1B2436',
' Device Specific.(Z9)........ZS2.71X15',
' Device Specific.(ZA)........Z1F2.70A5 ',
' Device Specific.(ZB)........Z2F2.71X15',
' Device Specific.(ZC)........00000000',
' Hardware Location Code......U78AB.001.WZSJH1H-P1-C3-T1', '',
' fcs3 U78AB.001.WZSJH1H-P1-C3-T2 4Gb FC PCI Express Adapter (df1000fe)', '',
' Part Number.................00E0807',
' Serial Number...............1A249002C8',
' Manufacturer................001A',
' EC Level.................... D77162',
' Customer Card ID Number.....5774',
' Manufacturer................001',
' FRU Number.................. 00E0807',
' Device Specific.(ZM)........3',
' Network Address.............10000090FA1B2437',
' ROS Level and ID............02E8277F',
' Device Specific.(Z0)........2057706D',
' Device Specific.(Z1)........00000000',
' Device Specific.(Z2)........00000000',
' Device Specific.(Z3)........03000909',
' Device Specific.(Z4)........FFE01212',
' Device Specific.(Z5)........02E8277F',
' Device Specific.(Z6)........06E12715',
' Device Specific.(Z7)........07E1277F',
' Device Specific.(Z8)........20000090FA1B2437',
' Device Specific.(Z9)........ZS2.71X15',
' Device Specific.(ZA)........Z1F2.70A5 ',
' Device Specific.(ZB)........Z2F2.71X15',
' Device Specific.(ZC)........00000000',
' Hardware Location Code......U78AB.001.WZSJH1H-P1-C3-T2', '',
' fcs0 U78AB.001.WZSJH1H-P1-C2-T1 4Gb FC PCI Express Adapter (df1000fe)', '',
' Part Number.................00E0807',
' Serial Number...............1A2490024E',
' Manufacturer................001A',
' EC Level.................... D77162',
' Customer Card ID Number.....5774',
' Manufacturer................001',
' FRU Number.................. 00E0807',
' Device Specific.(ZM)........3',
' Network Address.............10000090FA1B2874',
' ROS Level and ID............02E8277F',
' Device Specific.(Z0)........2057706D',
' Device Specific.(Z1)........00000000',
' Device Specific.(Z2)........00000000',
' Device Specific.(Z3)........03000909',
' Device Specific.(Z4)........FFE01212',
' Device Specific.(Z5)........02E8277F',
' Device Specific.(Z6)........06E12715',
' Device Specific.(Z7)........07E1277F',
' Device Specific.(Z8)........20000090FA1B2874',
' Device Specific.(Z9)........ZS2.71X15',
' Device Specific.(ZA)........Z1F2.70A5 ',
' Device Specific.(ZB)........Z2F2.71X15',
' Device Specific.(ZC)........00000000',
' Hardware Location Code......U78AB.001.WZSJH1H-P1-C2-T1', '',
' fcs1 U78AB.001.WZSJH1H-P1-C2-T2 4Gb FC PCI Express Adapter (df1000fe)', '',
' Part Number.................00E0807',
' Serial Number...............1A2490024E',
' Manufacturer................001A',
' EC Level.................... D77162',
' Customer Card ID Number.....5774',
' Manufacturer................001',
' FRU Number.................. 00E0807',
' Device Specific.(ZM)........3',
' Network Address.............10000090FA1B2875',
' ROS Level and ID............02E8277F',
' Device Specific.(Z0)........2057706D',
' Device Specific.(Z1)........00000000',
' Device Specific.(Z2)........00000000',
' Device Specific.(Z3)........03000909',
' Device Specific.(Z4)........FFE01212',
' Device Specific.(Z5)........02E8277F',
' Device Specific.(Z6)........06E12715',
' Device Specific.(Z7)........07E1277F',
' Device Specific.(Z8)........20000090FA1B2875',
' Device Specific.(Z9)........ZS2.71X15',
' Device Specific.(ZA)........Z1F2.70A5 ',
' Device Specific.(ZB)........Z2F2.71X15',
' Device Specific.(ZC)........00000000',
' Hardware Location Code......U78AB.001.WZSJH1H-P1-C2-T2', '', '',
' PLATFORM SPECIFIC', '', ' Name: fibre-channel',
' Model: LPe11002', ' Node: fibre-channel@0',
' Device Type: fcp',
' Physical Location: U78AB.001.WZSJH1H-P1-C2-T1', '',
' Name: fibre-channel', ' Model: LPe11002',
' Node: fibre-channel@0,1',
' Device Type: fcp',
' Physical Location: U78AB.001.WZSJH1H-P1-C2-T2', '',
' Name: fibre-channel',
' Model: LPe11002',
' Node: fibre-channel@0',
' Device Type: fcp',
' Physical Location: U78AB.001.WZSJH1H-P1-C3-T1', '',
' Name: fibre-channel',
' Model: LPe11002',
' Node: fibre-channel@0,1',
' Device Type: fcp',
' Physical Location: U78AB.001.WZSJH1H-P1-C3-T2'])
self.assertEqual(self.ivm_opt.get_fcs_device_names(wwpns), ['fcs2', 'fcs3', 'fcs0', 'fcs1'])
@mock.patch('paxes_nova.virt.ibmpowervm.ivm.operator.IVMOperator.run_command')
def test_get_disk_name_by_volume_uid(self, mock_run_command):
uid = 'D5304214503'
mock_run_command.return_value = (['hdisk4:332136005076300818001A000000000000D5304214503IBMfcp'])
self.assertEqual(self.ivm_opt.get_disk_name_by_volume_uid(uid), 'hdisk4')
@mock.patch('paxes_nova.virt.ibmpowervm.ivm.operator.IVMOperator.run_command')
def test_get_lpar_max_virtual_slots(self, mock_run_command):
lpar_id = '26'
mock_run_command.return_value = ([64])
self.assertEqual(self.ivm_opt.get_lpar_max_virtual_slots(lpar_id), 64)
@mock.patch('paxes_nova.virt.ibmpowervm.ivm.operator.IVMOperator.get_lpar_max_virtual_slots')
def test_get_vios_max_virt_slots(self, mock_get_lpar_max_virtual_slots):
mock_get_lpar_max_virtual_slots.return_value = (64)
self.assertEqual(self.ivm_opt.get_vios_max_virt_slots(), 64)
@mock.patch('paxes_nova.virt.ibmpowervm.ivm.operator.IVMOperator.run_command')
def test_get_hyp_capability(self, mock_run_command):
mock_run_command.return_value = (["active_lpar_mobility_capable,inactive_lpar_mobility_capable,cod_proc_capable,vet_activation_capable,shared_proc_capable,active_lpar_share_idle_procs_capable,micro_lpar_capable,dlpar_mem_capable,assign_phys_io_capable,lpar_avail_priority_capable,lpar_proc_compat_mode_capable,virtual_fc_capable,active_mem_sharing_capable"])
self.assertEqual(self.ivm_opt.get_hyp_capability(), {'active_lpar_mobility_capable': True, 'inactive_lpar_mobility_capable': True})
@mock.patch('paxes_nova.virt.ibmpowervm.ivm.operator.IVMOperator.run_command')
def test_get_migration_stats(self, mock_run_command):
mock_run_command.return_value = (['64,8,0,0'])
self.assertEqual(self.ivm_opt.get_migration_stats(), {'inactv_migr_sup': 64,
'actv_migr_supp': 8,
'inactv_migr_prg': 0,
'actv_migr_prg': 0})
@mock.patch('paxes_nova.virt.ibmpowervm.ivm.operator.IVMOperator.run_command')
def test_get_inactv_migration_stats(self, mock_run_command):
mock_run_command.return_value = (['64,0'])
self.assertEqual(self.ivm_opt.get_inactv_migration_stats(), {'inactv_migr_sup': 64,
'inactv_migr_prg': 0})
@mock.patch('paxes_nova.virt.ibmpowervm.ivm.operator.IVMOperator.run_command')
def test_get_actv_migration_stats(self, mock_run_command):
mock_run_command.return_value = (['8,0'])
self.assertEqual(self.ivm_opt.get_actv_migration_stats(), {'actv_migr_supp': 8,
'actv_migr_prg': 0})
@mock.patch('paxes_nova.virt.ibmpowervm.ivm.operator.IVMOperator.run_command')
def test_check_vopt_exists(self, mock_run_command):
name = 'RHEL6.5-2013-Server-ppc64-DVD.iso'
mock_run_command.return_value = (['10d2f623-4225-45ec-bc46-4b98ab7c65b3',
'61702c3b-001b-4311-850e-cab2f016add1',
'6c0ec0e7-7655-4932-bcca-cb30a6356fab',
'6df387ae-6be8-40b8-a1d2-6d3633a0fc24',
'753ce55f-4983-4e7e-99c0-ce7e497f559f',
'8354e5a1-91dd-47fd-b3a0-7cf8f057cb43',
'8354e5a1-91dd-47fd-b3a0-7cf8f057cb43',
'RHEL-7.0-20140507.0-Server-ppc64-dvd1.iso',
'RHEL6.4-20130130.0-Server-ppc64-DVD1.',
'RHEL6.5-2013-Server-ppc64-DVD.iso',
'RHEL6.5-2013-Server-ppc64-DVD.iso',
'RHEL6.5-2013-Server-ppc64-DVD.iso',
'RHEL6.5-2013-Server-ppc64-DVD.iso',
'RHEL6.5-2013-Server-ppc64-DVD.iso',
'RHEL6.5-2013-Server-ppc64-DVD.iso',
'RHEL6.5-2013-Server-ppc64-DVD.iso',
'RHEL6.5-2013-Server-ppc64-DVD.iso',
'RHEL6.5-2013-Server-ppc64-DVD.iso',
'RHEL6.5-2013-Server-ppc64-DVD.iso',
'RHEL6.5-2013-Server-ppc64-DVD.iso',
'b3ec3547-307d-4a58-9c69-4955f6df2059',
'c53b6b58-e6f3-4eca-8e90-c3e9e0f0babb',
'd0ed1883-2812-4513-b3f5-092ee8adecd3',
'e4efe10b-34e6-4bb2-b139-0d9a54e55456',
'fdf9b77e-e62c-4fc6-a52a-c355217adaea',
'fdf9b77e-e62c-4fc6-a52a-c355217adaea',
'vopt_06a5f3344fd1402d9bf5bc2c2a5bff41',
'vopt_1e4ea2dd2afa46e89fd0a3234336157e',
'vopt_2a37ccafa8f343a2a08ef119d7b7513b',
'vopt_36e1d56b897946d78293a4594362b884',
'vopt_53de1f8e17d44c1395c4c9b3a4d603fe',
'vopt_59e46a072d484fd9a8c5cdded8214aa4',
'vopt_5e6b50fa0c7e4befa2a537706544e07b',
'vopt_60056ead188a447fbbf51f0dc416627f',
'vopt_6490f79f8e2049018e817fdb75a2cc79',
'vopt_6572e1870a9a4b4993f34aae5d351e4d',
'vopt_7c3085768c9e4a0ab796fa7148b58824',
'vopt_83f111feb8464f5aa0e2702d9cad54ae',
'vopt_c1e896b6656f4c94a70036ae4c518795',
'vopt_d0fd2eed31034148a54659b2987e5ded',
])
self.assertEqual(self.ivm_opt.check_vopt_exists(name), True)
@mock.patch('paxes_nova.virt.ibmpowervm.ivm.operator.IVMOperator.run_vios_command')
def test_prepare_vhost_dev_lun_dict(self, mock_run_vios_command):
mock_run_vios_command.return_value = (['vhost0:0x00000002:0x8200000000000000:/var/vio/VMLibrary/RHEL6.5-2013-Server-ppc64-DVD.iso:0x8100000000000000:lp2vd2',
'vhost1:0x00000003:0x8200000000000000: :0x8100000000000000:lv02',
'vhost2:0x00000004:0x8200000000000000: :0x8100000000000000:lv00',
'vhost3:0x00000005:0x8200000000000000:/var/vio/VMLibrary/RHEL6.4-20130130.0-Server-ppc64-DVD1.:0x8100000000000000:lv01',
'vhost4:0x00000006:0x8200000000000000:/var/vio/VMLibrary/RHEL6.5-2013-Server-ppc64-DVD.iso:0x8100000000000000:lv04:0x8300000000000000:lv04_ext',
'vhost5:0x00000007:0x8200000000000000:/var/vio/VMLibrary/RHEL6.5-2013-Server-ppc64-DVD.iso:0x8100000000000000:lv05',
'vhost6:0x0000000f:0x8200000000000000:/var/vio/VMLibrary/dc5e181d-797d-4e89-8b72-05e6d87519d5:0x8100000000000000:lv20',
'vhost7:0x00000008:0x8200000000000000:/var/vio/VMLibrary/RHEL6.5-2013-Server-ppc64-DVD.iso:0x8100000000000000:lp8vd1',
'vhost8:0x0000000a:0x8200000000000000:/var/vio/VMLibrary/RHEL6.5-2013-Server-ppc64-DVD.iso:0x8100000000000000:lp10vd1',
'vhost10:0x0000000b:0x8200000000000000:/var/vio/VMLibrary/vopt_83f111feb8464f5aa0e2702d9cad54ae:0x8100000000000000:lv03',
'vhost11:0x0000000d:0x8200000000000000:/var/vio/VMLibrary/RHEL6.5-2013-Server-ppc64-DVD.iso:0x8100000000000000:lp13vd1',
'vhost12:0x0000000c:0x8200000000000000:/var/vio/VMLibrary/vopt_d0fd2eed31034148a54659b2987e5ded:0x8100000000000000:lv09',
'vhost13:0x0000000e:0x8200000000000000:/var/vio/VMLibrary/8354e5a1-91dd-47fd-b3a0-7cf8f057cb43:0x8100000000000000:lv14',
'vhost14:0x00000010:0x8200000000000000:/var/vio/VMLibrary/RHEL6.5-2013-Server-ppc64-DVD.iso:0x8100000000000000:lp16vd1',
'vhost15:0x00000011:0x8200000000000000:/var/vio/VMLibrary/e4efe10b-34e6-4bb2-b139-0d9a54e55456:0x8100000000000000:lv11',
'vhost16:0x00000012:0x8200000000000000:/var/vio/VMLibrary/6c0ec0e7-7655-4932-bcca-cb30a6356fab:0x8100000000000000:lv10',
'vhost17:0x00000013:0x8200000000000000:/var/vio/VMLibrary/8354e5a1-91dd-47fd-b3a0-7cf8f057cb43:0x8100000000000000:lv13',
'vhost18:0x00000014:0x8200000000000000:/var/vio/VMLibrary/RHEL6.5-2013-Server-ppc64-DVD.iso:0x8100000000000000:lp20vd1',
'vhost19:0x00000015:0x8200000000000000:/var/vio/VMLibrary/RHEL6.5-2013-Server-ppc64-DVD.iso:0x8100000000000000:lp21vd1',
'vhost21:0x00000017:0x8200000000000000:/var/vio/VMLibrary/RHEL6.5-2013-Server-ppc64-DVD.iso:0x8100000000000000:test-paxes',
'vhost22:0x00000018:0x8200000000000000:/var/vio/VMLibrary/b0d6fcc9-85ee-464e-af68-afdb921701af:0x8100000000000000:lv12',
'vhost23:0x00000019:0x8200000000000000:/var/vio/VMLibrary/RHEL-7.0-20140507.0-Server-ppc64-dvd1.iso:0x8100000000000000:lp25vd1',
'vhost24:0x0000001a:0x8200000000000000:/var/vio/VMLibrary/RHEL6.5-2013-Server-ppc64-DVD.iso:0x8100000000000000:lv06',
'vhost25:0x0000001b:0x8200000000000000:/var/vio/VMLibrary/c53b6b58-e6f3-4eca-8e90-c3e9e0f0babb:0x8100000000000000:lv16',
'vhost26:0x0000001c:0x8100000000000000:/var/vio/VMLibrary/fdf9b77e-e62c-4fc6-a52a-c355217adaea:0x8200000000000000:hdisk3',
'vhost27:0x0000001d:0x8100000000000000:/var/vio/VMLibrary/fdf9b77e-e62c-4fc6-a52a-c355217adaea:0x8200000000000000:hdisk4',
])
self.assertEqual(self.ivm_opt.prepare_vhost_dev_lun_dict(), ({'vhost0': ['0x8200000000000000', '/var/vio/VMLibrary/RHEL6.5-2013-Server-ppc64-DVD.iso', '0x8100000000000000', 'lp2vd2'],
'vhost1': ['0x8200000000000000', ' ', '0x8100000000000000', 'lv02'],
'vhost10': ['0x8200000000000000', '/var/vio/VMLibrary/vopt_83f111feb8464f5aa0e2702d9cad54ae', '0x8100000000000000', 'lv03'],
'vhost11': ['0x8200000000000000', '/var/vio/VMLibrary/RHEL6.5-2013-Server-ppc64-DVD.iso', '0x8100000000000000', 'lp13vd1'],
'vhost12': ['0x8200000000000000', '/var/vio/VMLibrary/vopt_d0fd2eed31034148a54659b2987e5ded', '0x8100000000000000', 'lv09'],
'vhost13': ['0x8200000000000000', '/var/vio/VMLibrary/8354e5a1-91dd-47fd-b3a0-7cf8f057cb43', '0x8100000000000000', 'lv14'],
'vhost14': ['0x8200000000000000', '/var/vio/VMLibrary/RHEL6.5-2013-Server-ppc64-DVD.iso', '0x8100000000000000', 'lp16vd1'],
'vhost15': ['0x8200000000000000', '/var/vio/VMLibrary/e4efe10b-34e6-4bb2-b139-0d9a54e55456', '0x8100000000000000', 'lv11'],
'vhost16': ['0x8200000000000000', '/var/vio/VMLibrary/6c0ec0e7-7655-4932-bcca-cb30a6356fab', '0x8100000000000000', 'lv10'],
'vhost17': ['0x8200000000000000', '/var/vio/VMLibrary/8354e5a1-91dd-47fd-b3a0-7cf8f057cb43', '0x8100000000000000', 'lv13'],
'vhost18': ['0x8200000000000000', '/var/vio/VMLibrary/RHEL6.5-2013-Server-ppc64-DVD.iso', '0x8100000000000000', 'lp20vd1'],
'vhost19': ['0x8200000000000000', '/var/vio/VMLibrary/RHEL6.5-2013-Server-ppc64-DVD.iso', '0x8100000000000000', 'lp21vd1'],
'vhost2': ['0x8200000000000000', ' ', '0x8100000000000000', 'lv00'],
'vhost21': ['0x8200000000000000', '/var/vio/VMLibrary/RHEL6.5-2013-Server-ppc64-DVD.iso', '0x8100000000000000', 'test-paxes'],
'vhost22': ['0x8200000000000000', '/var/vio/VMLibrary/b0d6fcc9-85ee-464e-af68-afdb921701af', '0x8100000000000000', 'lv12'],
'vhost23': ['0x8200000000000000', '/var/vio/VMLibrary/RHEL-7.0-20140507.0-Server-ppc64-dvd1.iso', '0x8100000000000000', 'lp25vd1'],
'vhost24': ['0x8200000000000000', '/var/vio/VMLibrary/RHEL6.5-2013-Server-ppc64-DVD.iso', '0x8100000000000000', 'lv06'],
'vhost25': ['0x8200000000000000', '/var/vio/VMLibrary/c53b6b58-e6f3-4eca-8e90-c3e9e0f0babb', '0x8100000000000000', 'lv16'],
'vhost26': ['0x8100000000000000', '/var/vio/VMLibrary/fdf9b77e-e62c-4fc6-a52a-c355217adaea', '0x8200000000000000', 'hdisk3'],
'vhost27': ['0x8100000000000000', '/var/vio/VMLibrary/fdf9b77e-e62c-4fc6-a52a-c355217adaea', '0x8200000000000000', 'hdisk4'],
'vhost3': ['0x8200000000000000', '/var/vio/VMLibrary/RHEL6.4-20130130.0-Server-ppc64-DVD1.', '0x8100000000000000', 'lv01'],
'vhost4': ['0x8200000000000000', '/var/vio/VMLibrary/RHEL6.5-2013-Server-ppc64-DVD.iso', '0x8100000000000000', 'lv04', '0x8300000000000000', 'lv04_ext'],
'vhost5': ['0x8200000000000000', '/var/vio/VMLibrary/RHEL6.5-2013-Server-ppc64-DVD.iso', '0x8100000000000000', 'lv05'],
'vhost6': ['0x8200000000000000', '/var/vio/VMLibrary/dc5e181d-797d-4e89-8b72-05e6d87519d5', '0x8100000000000000', 'lv20'],
'vhost7': ['0x8200000000000000', '/var/vio/VMLibrary/RHEL6.5-2013-Server-ppc64-DVD.iso', '0x8100000000000000', 'lp8vd1'],
'vhost8': ['0x8200000000000000', '/var/vio/VMLibrary/RHEL6.5-2013-Server-ppc64-DVD.iso', '0x8100000000000000', 'lp10vd1']},
{'0x00000002': 'vhost0',
'0x00000003': 'vhost1',
'0x00000004': 'vhost2',
'0x00000005': 'vhost3',
'0x00000006': 'vhost4',
'0x00000007': 'vhost5',
'0x00000008': 'vhost7',
'0x0000000a': 'vhost8',
'0x0000000b': 'vhost10',
'0x0000000c': 'vhost12',
'0x0000000d': 'vhost11',
'0x0000000e': 'vhost13',
'0x0000000f': 'vhost6',
'0x00000010': 'vhost14',
'0x00000011': 'vhost15',
'0x00000012': 'vhost16',
'0x00000013': 'vhost17',
'0x00000014': 'vhost18',
'0x00000015': 'vhost19',
'0x00000017': 'vhost21',
'0x00000018': 'vhost22',
'0x00000019': 'vhost23',
'0x0000001a': 'vhost24',
'0x0000001b': 'vhost25',
'0x0000001c': 'vhost26',
'0x0000001d': 'vhost27'}))
@mock.patch('paxes_nova.virt.ibmpowervm.ivm.operator.IVMOperator.run_command')
def test_check_dlpar_connectivity(self, mock_run_command):
instance_name = 'instance-0000011f'
mock_run_command.return_value = (['0,0,none'])
self.assertEqual(self.ivm_opt.check_dlpar_connectivity(instance_name), (True, 'none'))
|
|
"""
Core OpenBCI object for handling connections and samples from the board.
EXAMPLE USE:
def handle_sample(sample):
print(sample.channels)
board = OpenBCIBoard()
board.print_register_settings()
board.start(handle_sample)
NOTE: If daisy modules is enabled, the callback will occur every two samples, hence "packet_id" will only contain even numbers. As a side effect, the sampling rate will be divided by 2.
FIXME: at the moment we can just force daisy mode, do not check that the module is detected.
"""
import serial
import struct
import numpy as np
import time
import timeit
import atexit
SAMPLE_RATE = 250.0 # Hz
START_BYTE = 0xA0 # start of data packet
END_BYTE = 0xC0 # end of data packet
ADS1299_Vref = 4.5 #reference voltage for ADC in ADS1299. set by its hardware
ADS1299_gain = 24.0 #assumed gain setting for ADS1299. set by its Arduino code
scale_fac_uVolts_per_count = ADS1299_Vref/float((pow(2,23)-1))/ADS1299_gain*1000000.
scale_fac_accel_G_per_count = 0.002 /(pow(2,4)) #assume set to +/4G, so 2 mG
'''
#Commands for in SDK http://docs.openbci.com/software/01-Open BCI_SDK:
command_stop = "s";
command_startText = "x";
command_startBinary = "b";
command_startBinary_wAux = "n";
command_startBinary_4chan = "v";
command_activateFilters = "F";
command_deactivateFilters = "g";
command_deactivate_channel = {"1", "2", "3", "4", "5", "6", "7", "8"};
command_activate_channel = {"q", "w", "e", "r", "t", "y", "u", "i"};
command_activate_leadoffP_channel = {"!", "@", "#", "$", "%", "^", "&", "*"}; //shift + 1-8
command_deactivate_leadoffP_channel = {"Q", "W", "E", "R", "T", "Y", "U", "I"}; //letters (plus shift) right below 1-8
command_activate_leadoffN_channel = {"A", "S", "D", "F", "G", "H", "J", "K"}; //letters (plus shift) below the letters below 1-8
command_deactivate_leadoffN_channel = {"Z", "X", "C", "V", "B", "N", "M", "<"}; //letters (plus shift) below the letters below the letters below 1-8
command_biasAuto = "`";
command_biasFixed = "~";
'''
class OpenBCIBoard(object):
"""
Handle a connection to an OpenBCI board.
Args:
port: The port to connect to.
baud: The baud of the serial connection.
daisy: Enable or disable daisy module and 16 chans readings
"""
def __init__(self, port=None, baud=115200, filter_data=True,
scaled_output=True, daisy=False):
if not port:
port = find_port()
if not port:
raise OSError('Cannot find OpenBCI port')
print("Connecting to %s" %(port))
self.ser = serial.Serial(port, baud)
print("Serial established...")
#Initialize 32-bit board, doesn't affect 8bit board
self.ser.write('v');
#wait for device to be ready
time.sleep(1)
self.print_incoming_text()
self.streaming = False
self.filtering_data = filter_data
self.scaling_output = scaled_output
self.eeg_channels_per_sample = 8 # number of EEG channels per sample *from the board*
self.aux_channels_per_sample = 3 # number of AUX channels per sample *from the board*
self.read_state = 0;
self.daisy = daisy
self.last_odd_sample = OpenBCISample(-1, [], []) # used for daisy
#Disconnects from board when terminated
atexit.register(self.disconnect)
def getSampleRate(self):
if self.daisy:
return SAMPLE_RATE/2
else:
return SAMPLE_RATE
def getNbEEGChannels(self):
if self.daisy:
return self.eeg_channels_per_sample*2
else:
return self.eeg_channels_per_sample
def getNbAUXChannels(self):
return self.aux_channels_per_sample
def start_streaming(self, callback, lapse=-1):
"""
Start handling streaming data from the board. Call a provided callback
for every single sample that is processed (every two samples with daisy module).
Args:
callback: A callback function -- or a list of functions -- that will receive a single argument of the
OpenBCISample object captured.
"""
if not self.streaming:
self.ser.write('b')
self.streaming = True
start_time = timeit.default_timer()
# Enclose callback funtion in a list if it comes alone
if not isinstance(callback, list):
callback = [callback]
while self.streaming:
# read current sample
sample = self._read_serial_binary()
# if a daisy module is attached, wait to concatenate two samples (main board + daisy) before passing it to callback
if self.daisy:
# odd sample: daisy sample, save for later
if ~sample.id % 2:
self.last_odd_sample = sample
# even sample: concatenate and send if last sample was the fist part, otherwise drop the packet
elif sample.id - 1 == self.last_odd_sample.id:
# the aux data will be the average between the two samples, as the channel samples themselves have been averaged by the board
avg_aux_data = list((np.array(sample.aux_data) + np.array(self.last_odd_sample.aux_data))/2)
whole_sample = OpenBCISample(sample.id, sample.channel_data + self.last_odd_sample.channel_data, avg_aux_data)
for call in callback:
call(whole_sample)
else:
for call in callback:
call(sample)
if(lapse > 0 and timeit.default_timer() - start_time > lapse):
self.stop();
"""
Used by exit clean up function (atexit)
"""
def warn(self, text):
print("Warning: %s" % text)
def stop(self):
self.warn("Stopping streaming...\nWait for buffer to flush...")
self.streaming = False
self.ser.write('s')
def disconnect(self):
if(self.streaming == True):
self.stop()
if (self.ser.isOpen()):
self.warn("Closing Serial...")
self.ser.close()
"""
PARSER:
Parses incoming data packet into OpenBCISample.
Incoming Packet Structure:
Start Byte(1)|Sample ID(1)|Channel Data(24)|Aux Data(6)|End Byte(1)
0xA0|0-255|8, 3-byte signed ints|3 2-byte signed ints|0xC0
"""
def _read_serial_binary(self, max_bytes_to_skip=3000):
def read(n):
b = self.ser.read(n)
# print b
return b
for rep in xrange(max_bytes_to_skip):
#---------Start Byte & ID---------
if self.read_state == 0:
b = read(1)
if not b:
if not self.ser.inWaiting():
self.warn('Device appears to be stalled. Restarting...')
self.ser.write('b\n') # restart if it's stopped...
time.sleep(.100)
continue
if struct.unpack('B', b)[0] == START_BYTE:
if(rep != 0):
self.warn('Skipped %d bytes before start found' %(rep))
packet_id = struct.unpack('B', read(1))[0] #packet id goes from 0-255
self.read_state = 1
#---------Channel Data---------
elif self.read_state == 1:
channel_data = []
for c in xrange(self.eeg_channels_per_sample):
#3 byte ints
literal_read = read(3)
unpacked = struct.unpack('3B', literal_read)
#3byte int in 2s compliment
if (unpacked[0] >= 127):
pre_fix = '\xFF'
else:
pre_fix = '\x00'
literal_read = pre_fix + literal_read;
#unpack little endian(>) signed integer(i) (makes unpacking platform independent)
myInt = struct.unpack('>i', literal_read)[0]
if self.scaling_output:
channel_data.append(myInt*scale_fac_uVolts_per_count)
else:
channel_data.append(myInt)
self.read_state = 2;
#---------Accelerometer Data---------
elif self.read_state == 2:
aux_data = []
for a in xrange(self.aux_channels_per_sample):
#short = h
acc = struct.unpack('>h', read(2))[0]
if self.scaling_output:
aux_data.append(acc*scale_fac_accel_G_per_count)
else:
aux_data.append(acc)
self.read_state = 3;
#---------End Byte---------
elif self.read_state == 3:
val = struct.unpack('B', read(1))[0]
if (val == END_BYTE):
sample = OpenBCISample(packet_id, channel_data, aux_data)
self.read_state = 0 #read next packet
return sample
else:
self.warn("Warning: Unexpected END_BYTE found <%s> instead of <%s>,\
discarted packet with id <%d>"
%(val, END_BYTE, packet_id))
"""
SETTINGS AND HELPERS
"""
def print_incoming_text(self):
"""
When starting the connection, print all the debug data until
we get to a line with the end sequence '$$$'.
"""
line = ''
#Wait for device to send data
time.sleep(0.5)
if self.ser.inWaiting():
line = ''
c = ''
#Look for end sequence $$$
while '$$$' not in line:
c = self.ser.read()
line += c
print(line);
def print_register_settings(self):
self.ser.write('?')
time.sleep(0.5)
print_incoming_text();
#DEBBUGING: Prints individual incoming bytes
def print_bytes_in(self):
if not self.streaming:
self.ser.write('b')
self.streaming = True
while self.streaming:
print(struct.unpack('B',self.ser.read())[0]);
#Adds a filter at 60hz to cancel out ambient electrical noise
def enable_filters(self):
self.ser.write('f')
self.filtering_data = True;
def disable_filters(self):
self.ser.write('g')
self.filtering_data = False;
def test_signal(self, signal):
if signal == 0:
self.ser.write('0')
self.warn("Connecting all pins to ground")
elif signal == 1:
self.ser.write('p')
self.warn("Connecting all pins to Vcc")
elif signal == 2:
self.ser.write('-')
self.warn("Connecting pins to low frequency 1x amp signal")
elif signal == 3:
self.ser.write('=')
self.warn("Connecting pins to high frequency 1x amp signal")
elif signal == 4:
self.ser.write('[')
self.warn("Connecting pins to low frequency 2x amp signal")
elif signal == 5:
self.ser.write(']')
self.warn("Connecting pins to high frequency 2x amp signal")
else:
self.warn("%s is not a known test signal. Valid signals go from 0-5" %(signal))
def set_channel(self, channel, toggle_position):
#Commands to set toggle to on position
if toggle_position == 1:
if channel is 1:
self.ser.write('!')
if channel is 2:
self.ser.write('@')
if channel is 3:
self.ser.write('#')
if channel is 4:
self.ser.write('$')
if channel is 5:
self.ser.write('%')
if channel is 6:
self.ser.write('^')
if channel is 7:
self.ser.write('&')
if channel is 8:
self.ser.write('*')
if channel is 9 and self.daisy:
self.ser.write('Q')
if channel is 10 and self.daisy:
self.ser.write('W')
if channel is 11 and self.daisy:
self.ser.write('E')
if channel is 12 and self.daisy:
self.ser.write('R')
if channel is 13 and self.daisy:
self.ser.write('T')
if channel is 14 and self.daisy:
self.ser.write('Y')
if channel is 15 and self.daisy:
self.ser.write('U')
if channel is 16 and self.daisy:
self.ser.write('I')
#Commands to set toggle to off position
elif toggle_position == 0:
if channel is 1:
self.ser.write('1')
if channel is 2:
self.ser.write('2')
if channel is 3:
self.ser.write('3')
if channel is 4:
self.ser.write('4')
if channel is 5:
self.ser.write('5')
if channel is 6:
self.ser.write('6')
if channel is 7:
self.ser.write('7')
if channel is 8:
self.ser.write('8')
if channel is 9 and self.daisy:
self.ser.write('q')
if channel is 10 and self.daisy:
self.ser.write('w')
if channel is 11 and self.daisy:
self.ser.write('e')
if channel is 12 and self.daisy:
self.ser.write('r')
if channel is 13 and self.daisy:
self.ser.write('t')
if channel is 14 and self.daisy:
self.ser.write('y')
if channel is 15 and self.daisy:
self.ser.write('u')
if channel is 16 and self.daisy:
self.ser.write('i')
class OpenBCISample(object):
"""Object encapulsating a single sample from the OpenBCI board."""
def __init__(self, packet_id, channel_data, aux_data):
self.id = packet_id;
self.channel_data = channel_data;
self.aux_data = aux_data;
|
|
# coding=utf-8
"""
Collects all number values from the db.serverStatus() command, other
values are ignored.
#### Dependencies
* pymongo
#### Example Configuration
MongoDBCollector.conf
```
enabled = True
hosts = localhost:27017, alias1@localhost:27018, etc
```
"""
import diamond.collector
from diamond.collector import str_to_bool
import re
import zlib
try:
import pymongo
except ImportError:
pymongo = None
try:
from pymongo import ReadPreference
except ImportError:
ReadPreference = None
class MongoDBCollector(diamond.collector.Collector):
MAX_CRC32 = 4294967295
def __init__(self, *args, **kwargs):
self.__totals = {}
super(MongoDBCollector, self).__init__(*args, **kwargs)
def get_default_config_help(self):
config_help = super(MongoDBCollector, self).get_default_config_help()
config_help.update({
'hosts': 'Array of hostname(:port) elements to get metrics from'
'Set an alias by prefixing host:port with alias@',
'host': 'A single hostname(:port) to get metrics from'
' (can be used instead of hosts and overrides it)',
'user': 'Username for authenticated login (optional)',
'passwd': 'Password for authenticated login (optional)',
'databases': 'A regex of which databases to gather metrics for.'
' Defaults to all databases.',
'ignore_collections': 'A regex of which collections to ignore.'
' MapReduce temporary collections (tmp.mr.*)'
' are ignored by default.',
'collection_sample_rate': 'Only send stats for a consistent subset '
'of collections. This is applied after collections are ignored via'
' ignore_collections Sampling uses crc32 so it is consistent across'
' replicas. Value between 0 and 1. Default is 1',
'network_timeout': 'Timeout for mongodb connection (in seconds).'
' There is no timeout by default.',
'simple': 'Only collect the same metrics as mongostat.',
'translate_collections': 'Translate dot (.) to underscores (_)'
' in collection names.',
'ssl': 'True to enable SSL connections to the MongoDB server.'
' Default is False'
})
return config_help
def get_default_config(self):
"""
Returns the default collector settings
"""
config = super(MongoDBCollector, self).get_default_config()
config.update({
'path': 'mongo',
'hosts': ['localhost'],
'user': None,
'passwd': None,
'databases': '.*',
'ignore_collections': '^tmp\.mr\.',
'network_timeout': None,
'simple': 'False',
'translate_collections': 'False',
'collection_sample_rate': 1,
'ssl': False
})
return config
def collect(self):
"""Collect number values from db.serverStatus()"""
if pymongo is None:
self.log.error('Unable to import pymongo')
return
hosts = self.config.get('hosts')
# Convert a string config value to be an array
if isinstance(hosts, basestring):
hosts = [hosts]
# we need this for backwards compatibility
if 'host' in self.config:
hosts = [self.config['host']]
# convert network_timeout to integer
if self.config['network_timeout']:
self.config['network_timeout'] = int(
self.config['network_timeout'])
# convert collection_sample_rate to float
if self.config['collection_sample_rate']:
self.config['collection_sample_rate'] = float(
self.config['collection_sample_rate'])
# use auth if given
if 'user' in self.config:
user = self.config['user']
else:
user = None
if 'passwd' in self.config:
passwd = self.config['passwd']
else:
passwd = None
for host in hosts:
matches = re.search('((.+)\@)?(.+)?', host)
alias = matches.group(2)
host = matches.group(3)
if alias is None:
if len(hosts) == 1:
# one host only, no need to have a prefix
base_prefix = []
else:
base_prefix = [re.sub('[:\.]', '_', host)]
else:
base_prefix = [alias]
try:
# Ensure that the SSL option is a boolean.
if type(self.config['ssl']) is str:
self.config['ssl'] = str_to_bool(self.config['ssl'])
if ReadPreference is None:
conn = pymongo.Connection(
host,
network_timeout=self.config['network_timeout'],
ssl=self.config['ssl'],
slave_okay=True
)
else:
conn = pymongo.Connection(
host,
network_timeout=self.config['network_timeout'],
ssl=self.config['ssl'],
read_preference=ReadPreference.SECONDARY,
)
except Exception, e:
self.log.error('Couldnt connect to mongodb: %s', e)
continue
# try auth
if user:
try:
conn.admin.authenticate(user, passwd)
except Exception, e:
self.log.error('User auth given, but could not autheticate'
+ ' with host: %s, err: %s' % (host, e))
return{}
data = conn.db.command('serverStatus')
self._publish_transformed(data, base_prefix)
if str_to_bool(self.config['simple']):
data = self._extract_simple_data(data)
self._publish_dict_with_prefix(data, base_prefix)
db_name_filter = re.compile(self.config['databases'])
ignored_collections = re.compile(self.config['ignore_collections'])
sample_threshold = self.MAX_CRC32 * self.config[
'collection_sample_rate']
for db_name in conn.database_names():
if not db_name_filter.search(db_name):
continue
db_stats = conn[db_name].command('dbStats')
db_prefix = base_prefix + ['databases', db_name]
self._publish_dict_with_prefix(db_stats, db_prefix)
for collection_name in conn[db_name].collection_names():
if ignored_collections.search(collection_name):
continue
if (self.config['collection_sample_rate'] < 1 and (
zlib.crc32(collection_name) & 0xffffffff
) > sample_threshold):
continue
collection_stats = conn[db_name].command('collstats',
collection_name)
if str_to_bool(self.config['translate_collections']):
collection_name = collection_name.replace('.', '_')
collection_prefix = db_prefix + [collection_name]
self._publish_dict_with_prefix(collection_stats,
collection_prefix)
def _publish_transformed(self, data, base_prefix):
""" Publish values of type: counter or percent """
self._publish_dict_with_prefix(data.get('opcounters', {}),
base_prefix + ['opcounters_per_sec'],
self.publish_counter)
self._publish_dict_with_prefix(data.get('opcountersRepl', {}),
base_prefix + ['opcountersRepl_per_sec'],
self.publish_counter)
self._publish_metrics(base_prefix + ['backgroundFlushing_per_sec'],
'flushes',
data.get('backgroundFlushing', {}),
self.publish_counter)
self._publish_dict_with_prefix(data.get('network', {}),
base_prefix + ['network_per_sec'],
self.publish_counter)
self._publish_metrics(base_prefix + ['extra_info_per_sec'],
'page_faults',
data.get('extra_info', {}),
self.publish_counter)
def get_dotted_value(data, key_name):
key_name = key_name.split('.')
for i in key_name:
data = data.get(i, {})
if not data:
return 0
return data
def compute_interval(data, total_name):
current_total = get_dotted_value(data, total_name)
total_key = '.'.join(base_prefix + [total_name])
last_total = self.__totals.get(total_key, current_total)
interval = current_total - last_total
self.__totals[total_key] = current_total
return interval
def publish_percent(value_name, total_name, data):
value = float(get_dotted_value(data, value_name) * 100)
interval = compute_interval(data, total_name)
key = '.'.join(base_prefix + ['percent', value_name])
self.publish_counter(key, value, time_delta=bool(interval),
interval=interval)
publish_percent('globalLock.lockTime', 'globalLock.totalTime', data)
publish_percent('indexCounters.btree.misses',
'indexCounters.btree.accesses', data)
locks = data.get('locks')
if locks:
if '.' in locks:
locks['_global_'] = locks['.']
del (locks['.'])
key_prefix = '.'.join(base_prefix + ['percent'])
db_name_filter = re.compile(self.config['databases'])
interval = compute_interval(data, 'uptimeMillis')
for db_name in locks:
if not db_name_filter.search(db_name):
continue
r = get_dotted_value(
locks,
'%s.timeLockedMicros.r' % db_name)
R = get_dotted_value(
locks,
'.%s.timeLockedMicros.R' % db_name)
value = float(r + R) / 10
if value:
self.publish_counter(
key_prefix + '.locks.%s.read' % db_name,
value, time_delta=bool(interval),
interval=interval)
w = get_dotted_value(
locks,
'%s.timeLockedMicros.w' % db_name)
W = get_dotted_value(
locks,
'%s.timeLockedMicros.W' % db_name)
value = float(w + W) / 10
if value:
self.publish_counter(
key_prefix + '.locks.%s.write' % db_name,
value, time_delta=bool(interval), interval=interval)
def _publish_dict_with_prefix(self, dict, prefix, publishfn=None):
for key in dict:
self._publish_metrics(prefix, key, dict, publishfn)
def _publish_metrics(self, prev_keys, key, data, publishfn=None):
"""Recursively publish keys"""
if not key in data:
return
value = data[key]
keys = prev_keys + [key]
if not publishfn:
publishfn = self.publish
if isinstance(value, dict):
for new_key in value:
self._publish_metrics(keys, new_key, value)
elif isinstance(value, int) or isinstance(value, float):
publishfn('.'.join(keys), value)
elif isinstance(value, long):
publishfn('.'.join(keys), float(value))
def _extract_simple_data(self, data):
return {
'connections': data.get('connections'),
'globalLock': data.get('globalLock'),
'indexCounters': data.get('indexCounters')
}
|
|
#########################
#### General Imports ####
#########################
import sys
import os
import cProfile
import pstats
import StringIO
import time
from idaapi import *
from idautils import *
from idc import *
### DIE Imports###
import DIE.Lib.DieConfig
import DIE.Lib.DataParser
from DIE.Lib.DIE_Exceptions import FuncCallExceedMax, DieCallStackPopError, DieThunkFunctionDetected
from DIE.Lib.CallStack import *
from DIE.Lib.DbgImports import *
from DIE.Lib.IDAConnector import get_cur_ea, is_call, is_ida_debugger_present, is_system_lib
import DIE.Lib.DIEDb
##########################
#### Defines ####
##########################
WAS_USER_BREAKPOINT = 0x1
class DebugHooker(DBG_Hooks):
"""
IDA Debug hooking functionality
"""
def __init__(self, is_dbg_pause=False, is_dbg_profile=False, is_dyn_bp=False):
try:
self.logger = logging.getLogger(__name__)
self.config = DIE.Lib.DieConfig.get_config()
data_parser = DIE.Lib.DataParser.getParser()
plugin_path = self.config.parser_path
data_parser.set_plugin_path(plugin_path)
data_parser.loadPlugins()
# Breakpoint Exceptions
self.bp_handler = DIE.Lib.BpHandler.get_bp_handler()
self.bp_handler.load_exceptions(DIE.Lib.DIEDb.get_db())
### Debugging ###
DBG_Hooks.__init__(self) # IDA Debug Hooking API
self.isHooked = False # Is debugger currently hooked
self.runtime_imports = DbgImports() # Runtime import addresses
self.callStack = {} # Function call-stack dictionary
# (Key: ThreadId, Value: Thread specific Call-Stack)
self.current_callstack = None # A pointer to the currently active call-stack
self.prev_bp_ea = None # Address of previously hit breakpoint
self.end_bp = None # If set framework will stop once this bp was reached
self.start_time = None # Debugging start time
self.end_time = None # Debugging end time
### Flags
self.is_dbg_pause = is_dbg_pause # Pause execution at each breakpoint
self.is_dbg_profile = is_dbg_profile # Profiling flag
self.is_dyn_breakpoints = is_dyn_bp # Should breakpoint be set dynamically or statically
self.update_imports = True # IAT updating flag (when set runtime_imports will be updated)
### Debugging
self.pr = None # Profiling object (for debug only)
except Exception as ex:
self.logger.exception("Failed to initialize DebugAPI: %s", ex)
return
def Hook(self):
"""
Hook to IDA Debugger
"""
if self.isHooked: # Release any current hooks
self.logger.debug("Debugger is already hooked, releasing previous hook.")
self.UnHook()
try:
if not is_ida_debugger_present():
self.logger.error("DIE cannot be started with no debugger defined.")
return
self.logger.info("Hooking to debugger.")
self.hook()
self.isHooked = True
except Exception as ex:
self.logger.exception("Failed to hook debugger", ex)
sys.exit(1)
def UnHook(self):
"""
Release hooks from IDA Debugger
"""
try:
self.logger.info("Removing previous debugger hooks.")
self.unhook()
self.isHooked = False
except Exception as ex:
self.logger.exception("Failed to hook debugger", ex)
raise RuntimeError("Failed to unhook debugger")
def update_iat(self):
"""
Update the current IAT state and reset flag
"""
self.runtime_imports.getImportTableData()
self.update_imports = False
######################################################################
# Debugger Hooking Callback Routines
def dbg_bpt(self, tid, ea):
"""
'Hit Debug Breakpoint' Callback -
this callback gets called once a breakpoint has been reached -
this means we can either be in a CALL or a RET instruction.
"""
try:
# If final breakpoint has been reached. skip all further breakpoints.
if self.end_bp is not None and ea == self.end_bp:
self.logger.info("Final breakpoint reached at %s. context logging is stopped.", hex(ea))
self.bp_handler.unsetBPs()
request_continue_process()
run_requests()
return 0
# If required, update IAT
if self.update_imports:
self.update_iat()
# Set current call-stack
if tid not in self.callStack:
idaapi.msg("Creating new callstack for thread %d\n" % tid)
self.callStack[tid] = CallStack()
self.current_callstack = self.callStack[tid]
# Did we just return from a function call?
if self.bp_handler.isRetBP(ea):
try:
self.current_callstack.pop()
except DieCallStackPopError:
self.logger.exception("Error while popping function from callstack")
self.bp_handler.removeRetBP(ea)
if not is_call(ea):
request_continue_process()
run_requests()
# Is this a CALL instruction?
if is_call(ea):
self.prev_bp_ea = ea # Set prev ea
self.bp_handler.addRetBP(ea)
if not self.is_dbg_pause:
request_step_into() # Great, step into the called function
run_requests() # Execute dbg_step_into callback.
return 0
except Exception as ex:
self.logger.exception("Failed while handling breakpoint at %s:", ea, ex)
return 1
def dbg_step_into(self):
"""
Step into gets called whenever we step into a CALL instruction.
The callback checks if the function we have stepped into is a library function (in which case
no BPs should be set inside it, so we need to skip to the next RET instruction), or we have
stepped into a native function (in which case we just need to gather data and continue to next BP).
"""
try:
refresh_debugger_memory()
ea = get_cur_ea()
# If function in IAT, retrieve IAT details
iatEA, library_name = self.runtime_imports.find_func_iat_adrs(ea)
# If stepped into an excepted function, remove calling bp and skip over.
if self.bp_handler.is_exception_func(ea, iatEA):
self.logger.debug("Removing breakpoint from %s", hex(self.prev_bp_ea))
self.bp_handler.removeBP(self.prev_bp_ea)
return 0
# Save CALL context
func_call_num = self.current_callstack.push(ea, iatEA, library_name=library_name, calling_ea=self.prev_bp_ea)
(func_adr, func_name) = self.current_callstack.get_top_func_data()
if not func_name:
self.logger.debug("Stepped into function %s at address %s", func_name, hex(ea))
# If "Step-Into System Libraries" option is set, walk function for breakpoints
if self.config.debugging.step_into_syslibs:
self.bp_handler.walk_function(ea)
# If "Step-Into System Libraries" option is not set, check for syslib first first.
elif not self.runtime_imports.is_func_imported(ea) and self.is_dyn_breakpoints:
self.bp_handler.walk_function(ea)
# Check if total number of function calls exceeded the max configured value (0 = Disabled)
if self.config.debugging.max_func_call != 0 and func_call_num > self.config.debugging.max_func_call:
self.make_exception_last_func()
except DieCallStackPushError as ex:
self._callStackPushErrorHandler(ex.ea)
except DieThunkFunctionDetected as ex:
#TODO: Handle cases where a thunk function (jmp wrapper) has been encountered.
pass
except Exception as ex:
self.logger.exception("failed while stepping into breakpoint: %s", ex)
return 0
finally:
# Continue Debugging
request_continue_process()
run_requests()
return 0
def dbg_step_until_ret(self):
"""
Step until return gets called when entering a library function.
the debugger will stop at the next instruction after the RET.
Context info needs to be collected here and execution should be resumed.
"""
try:
# Save Return Context
self.current_callstack.pop()
except DieCallStackPopError as ex:
self.logger.exception("Error while popping function from callstack")
#TODO: Handle this exception
except Exception as ex:
self.logger.exception("Failed while stepping until return: %s", ex)
finally:
if not self.is_dbg_pause:
request_continue_process()
run_requests()
def dbg_thread_start(self, pid, tid, ea):
"""
TODO: debugging, should be implemented fully.
@return:
"""
try:
# Walk thread entry point for breakpoints
self.bp_handler.walk_function(ea)
# If no call-stack exist for this thread, create one.
if not tid in self.callStack:
self.callStack[tid] = CallStack()
if not self.is_dbg_pause:
request_continue_process()
run_requests()
except Exception as ex:
self.logger.exception("Failed while handling new thread: %s", ex)
def dbg_process_exit(self, pid, tid, ea, exit_code):
"""
TODO: debugging, should be implemented fully.
@return:
"""
try:
if self.is_dbg_profile:
self.profile_stop()
except Exception as ex:
self.logger.error("Failed to stop profiling: %s", ex)
try:
self.end_time = time.time()
self.bp_handler.unsetBPs()
die_db = DIE.Lib.DIEDb.get_db()
die_db.add_run_info(self.callStack,
self.start_time,
self.end_time,
idaapi.get_input_file_path(),
idautils.GetInputFileMD5())
self.bp_handler.save_exceptions(die_db)
except Exception as ex:
self.logger.exception("Failed while finalizing DIE run: %s", ex)
def dbg_process_start(self, pid, tid, ea, name, base, size):
"""
TODO: debugging, should be implemented fully.
@return:
"""
return True
def dbg_continue_process(self):
return True
###############################################
# Convenience Function
def make_exception_last_func(self):
"""
Adds the last called function to exceptions
@return: True if succeeded, otherwise False
"""
try:
(except_ea, except_name) = self.current_callstack.get_top_func_data()
self.logger.debug("Function %s was called more then %d times.",
except_name, self.config.debugging.max_func_call)
self.logger.debug("Removing breakpoint from %s", hex(self.prev_bp_ea))
self.bp_handler.removeBP(self.prev_bp_ea)
# Add function to exceptions, and reload breakpoints
self.logger.debug("Adding address %s to exception list", except_ea)
self.bp_handler.add_bp_ea_exception(except_ea)
self.logger.debug("Adding function name %s to exception list", except_name)
#self.bp_handler.add_bp_funcname_exception(except_name, reload_bps=True)
self.bp_handler.add_bp_funcname_exception(except_name)
return True
except Exception as ex:
self.logger.exception("Error while creating exception: %s", ex)
return False
def _callStackPushErrorHandler(self, ea, function_name=None):
"""
Handle a failed attempt to push function to callstack
@param ea: Function Address
@param function_name: Function Name
@return:
"""
try:
self.logger.info("Trying to walk un-pushed function %s for breakpoints", hex(ea))
if not self.runtime_imports.is_func_imported(ea) and self.is_dyn_breakpoints:
self.bp_handler.walk_function(ea)
except Exception as ex:
self.logger.exception("Failed to handle callstack push error for function: %s", hex(ea))
###############################################
# Debugging
def start_debug(self, start_func_ea=None, end_func_ea=None, auto_start=False):
"""
Start Debugging
@param start_func_ea: ea of function to start debugging from
@param end_func_ea: ea of function to stop debugging end
@param auto_start: Automatically start the debugger
@rtype : object
"""
try:
if self.is_dbg_profile:
self.profile_start()
except Exception as ex:
self.logger.error("Failed to start profiling: %s", ex)
try:
self.Hook()
if start_func_ea is not None:
self.is_dyn_breakpoints = True
# If end function address was not explicitly defined, set to end of current function
if end_func_ea is None:
self.address = DIE.Lib.IDAConnector.get_function_end_address(start_func_ea)
self.end_bp = self.address
self.bp_handler.addBP(self.end_bp, "FINAL_BP")
# Walk current function
self.bp_handler.walk_function(start_func_ea)
else:
self.bp_handler.setBPs()
# Set start time
if self.start_time is None:
self.start_time = time.time()
# start the process automatically
if auto_start:
request_start_process(None, None, None)
run_requests()
except Exception as ex:
self.logger.exception("Error while staring debugger: %s", ex)
################################################################################
# Profiling, for debug usage only.
def profile_start(self):
"""
Start profiling the application.
@return:
"""
# Start Profiling
self.pr = cProfile.Profile()
self.pr.enable()
def profile_stop(self):
"""
Stop profiling the application and display results.
@return:
"""
# If profiling is activated:
if self.pr is None:
return False
self.pr.disable()
s = StringIO.StringIO()
sortby = 'tottime'
ps = pstats.Stats(self.pr, stream=s).sort_stats(sortby)
ps.print_stats()
idaapi.msg("%s\n" % (s.getvalue(), ))
|
|
# coding=utf-8
r"""
This code was generated by
\ / _ _ _| _ _
| (_)\/(_)(_|\/| |(/_ v1.0.0
/ /
"""
from twilio.base import deserialize
from twilio.base import values
from twilio.base.instance_resource import InstanceResource
from twilio.base.list_resource import ListResource
from twilio.base.page import Page
class MobileList(ListResource):
""" """
def __init__(self, version, account_sid):
"""
Initialize the MobileList
:param Version version: Version that contains the resource
:param account_sid: The SID of the Account that created the resource
:returns: twilio.rest.api.v2010.account.incoming_phone_number.mobile.MobileList
:rtype: twilio.rest.api.v2010.account.incoming_phone_number.mobile.MobileList
"""
super(MobileList, self).__init__(version)
# Path Solution
self._solution = {'account_sid': account_sid, }
self._uri = '/Accounts/{account_sid}/IncomingPhoneNumbers/Mobile.json'.format(**self._solution)
def stream(self, beta=values.unset, friendly_name=values.unset,
phone_number=values.unset, origin=values.unset, limit=None,
page_size=None):
"""
Streams MobileInstance records from the API as a generator stream.
This operation lazily loads records as efficiently as possible until the limit
is reached.
The results are returned as a generator, so this operation is memory efficient.
:param bool beta: Whether to include new phone numbers
:param unicode friendly_name: A string that identifies the resources to read
:param unicode phone_number: The phone numbers of the resources to read
:param unicode origin: Include phone numbers based on their origin. By default, phone numbers of all origin are included.
:param int limit: Upper limit for the number of records to return. stream()
guarantees to never return more than limit. Default is no limit
:param int page_size: Number of records to fetch per request, when not set will use
the default value of 50 records. If no page_size is defined
but a limit is defined, stream() will attempt to read the
limit with the most efficient page size, i.e. min(limit, 1000)
:returns: Generator that will yield up to limit results
:rtype: list[twilio.rest.api.v2010.account.incoming_phone_number.mobile.MobileInstance]
"""
limits = self._version.read_limits(limit, page_size)
page = self.page(
beta=beta,
friendly_name=friendly_name,
phone_number=phone_number,
origin=origin,
page_size=limits['page_size'],
)
return self._version.stream(page, limits['limit'], limits['page_limit'])
def list(self, beta=values.unset, friendly_name=values.unset,
phone_number=values.unset, origin=values.unset, limit=None,
page_size=None):
"""
Lists MobileInstance records from the API as a list.
Unlike stream(), this operation is eager and will load `limit` records into
memory before returning.
:param bool beta: Whether to include new phone numbers
:param unicode friendly_name: A string that identifies the resources to read
:param unicode phone_number: The phone numbers of the resources to read
:param unicode origin: Include phone numbers based on their origin. By default, phone numbers of all origin are included.
:param int limit: Upper limit for the number of records to return. list() guarantees
never to return more than limit. Default is no limit
:param int page_size: Number of records to fetch per request, when not set will use
the default value of 50 records. If no page_size is defined
but a limit is defined, list() will attempt to read the limit
with the most efficient page size, i.e. min(limit, 1000)
:returns: Generator that will yield up to limit results
:rtype: list[twilio.rest.api.v2010.account.incoming_phone_number.mobile.MobileInstance]
"""
return list(self.stream(
beta=beta,
friendly_name=friendly_name,
phone_number=phone_number,
origin=origin,
limit=limit,
page_size=page_size,
))
def page(self, beta=values.unset, friendly_name=values.unset,
phone_number=values.unset, origin=values.unset,
page_token=values.unset, page_number=values.unset,
page_size=values.unset):
"""
Retrieve a single page of MobileInstance records from the API.
Request is executed immediately
:param bool beta: Whether to include new phone numbers
:param unicode friendly_name: A string that identifies the resources to read
:param unicode phone_number: The phone numbers of the resources to read
:param unicode origin: Include phone numbers based on their origin. By default, phone numbers of all origin are included.
:param str page_token: PageToken provided by the API
:param int page_number: Page Number, this value is simply for client state
:param int page_size: Number of records to return, defaults to 50
:returns: Page of MobileInstance
:rtype: twilio.rest.api.v2010.account.incoming_phone_number.mobile.MobilePage
"""
params = values.of({
'Beta': beta,
'FriendlyName': friendly_name,
'PhoneNumber': phone_number,
'Origin': origin,
'PageToken': page_token,
'Page': page_number,
'PageSize': page_size,
})
response = self._version.page(
'GET',
self._uri,
params=params,
)
return MobilePage(self._version, response, self._solution)
def get_page(self, target_url):
"""
Retrieve a specific page of MobileInstance records from the API.
Request is executed immediately
:param str target_url: API-generated URL for the requested results page
:returns: Page of MobileInstance
:rtype: twilio.rest.api.v2010.account.incoming_phone_number.mobile.MobilePage
"""
response = self._version.domain.twilio.request(
'GET',
target_url,
)
return MobilePage(self._version, response, self._solution)
def create(self, phone_number, api_version=values.unset,
friendly_name=values.unset, sms_application_sid=values.unset,
sms_fallback_method=values.unset, sms_fallback_url=values.unset,
sms_method=values.unset, sms_url=values.unset,
status_callback=values.unset, status_callback_method=values.unset,
voice_application_sid=values.unset,
voice_caller_id_lookup=values.unset,
voice_fallback_method=values.unset, voice_fallback_url=values.unset,
voice_method=values.unset, voice_url=values.unset,
identity_sid=values.unset, address_sid=values.unset):
"""
Create a new MobileInstance
:param unicode phone_number: The phone number to purchase in E.164 format
:param unicode api_version: The API version to use for incoming calls made to the new phone number
:param unicode friendly_name: A string to describe the new phone number
:param unicode sms_application_sid: The SID of the application to handle SMS messages
:param unicode sms_fallback_method: HTTP method used with sms_fallback_url
:param unicode sms_fallback_url: The URL we call when an error occurs while executing TwiML
:param unicode sms_method: The HTTP method to use with sms url
:param unicode sms_url: The URL we should call when the new phone number receives an incoming SMS message
:param unicode status_callback: The URL we should call to send status information to your application
:param unicode status_callback_method: The HTTP method we should use to call status_callback
:param unicode voice_application_sid: The SID of the application to handle the new phone number
:param bool voice_caller_id_lookup: Whether to lookup the caller's name
:param unicode voice_fallback_method: The HTTP method used with voice_fallback_url
:param unicode voice_fallback_url: The URL we will call when an error occurs in TwiML
:param unicode voice_method: The HTTP method used with the voice_url
:param unicode voice_url: The URL we should call when the phone number receives a call
:param unicode identity_sid: The SID of the Identity resource to associate with the new phone number
:param unicode address_sid: The SID of the Address resource associated with the phone number
:returns: Newly created MobileInstance
:rtype: twilio.rest.api.v2010.account.incoming_phone_number.mobile.MobileInstance
"""
data = values.of({
'PhoneNumber': phone_number,
'ApiVersion': api_version,
'FriendlyName': friendly_name,
'SmsApplicationSid': sms_application_sid,
'SmsFallbackMethod': sms_fallback_method,
'SmsFallbackUrl': sms_fallback_url,
'SmsMethod': sms_method,
'SmsUrl': sms_url,
'StatusCallback': status_callback,
'StatusCallbackMethod': status_callback_method,
'VoiceApplicationSid': voice_application_sid,
'VoiceCallerIdLookup': voice_caller_id_lookup,
'VoiceFallbackMethod': voice_fallback_method,
'VoiceFallbackUrl': voice_fallback_url,
'VoiceMethod': voice_method,
'VoiceUrl': voice_url,
'IdentitySid': identity_sid,
'AddressSid': address_sid,
})
payload = self._version.create(
'POST',
self._uri,
data=data,
)
return MobileInstance(self._version, payload, account_sid=self._solution['account_sid'], )
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
return '<Twilio.Api.V2010.MobileList>'
class MobilePage(Page):
""" """
def __init__(self, version, response, solution):
"""
Initialize the MobilePage
:param Version version: Version that contains the resource
:param Response response: Response from the API
:param account_sid: The SID of the Account that created the resource
:returns: twilio.rest.api.v2010.account.incoming_phone_number.mobile.MobilePage
:rtype: twilio.rest.api.v2010.account.incoming_phone_number.mobile.MobilePage
"""
super(MobilePage, self).__init__(version, response)
# Path Solution
self._solution = solution
def get_instance(self, payload):
"""
Build an instance of MobileInstance
:param dict payload: Payload response from the API
:returns: twilio.rest.api.v2010.account.incoming_phone_number.mobile.MobileInstance
:rtype: twilio.rest.api.v2010.account.incoming_phone_number.mobile.MobileInstance
"""
return MobileInstance(self._version, payload, account_sid=self._solution['account_sid'], )
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
return '<Twilio.Api.V2010.MobilePage>'
class MobileInstance(InstanceResource):
""" """
class AddressRequirement(object):
NONE = "none"
ANY = "any"
LOCAL = "local"
FOREIGN = "foreign"
def __init__(self, version, payload, account_sid):
"""
Initialize the MobileInstance
:returns: twilio.rest.api.v2010.account.incoming_phone_number.mobile.MobileInstance
:rtype: twilio.rest.api.v2010.account.incoming_phone_number.mobile.MobileInstance
"""
super(MobileInstance, self).__init__(version)
# Marshaled Properties
self._properties = {
'account_sid': payload.get('account_sid'),
'address_sid': payload.get('address_sid'),
'address_requirements': payload.get('address_requirements'),
'api_version': payload.get('api_version'),
'beta': payload.get('beta'),
'capabilities': payload.get('capabilities'),
'date_created': deserialize.rfc2822_datetime(payload.get('date_created')),
'date_updated': deserialize.rfc2822_datetime(payload.get('date_updated')),
'friendly_name': payload.get('friendly_name'),
'identity_sid': payload.get('identity_sid'),
'phone_number': payload.get('phone_number'),
'origin': payload.get('origin'),
'sid': payload.get('sid'),
'sms_application_sid': payload.get('sms_application_sid'),
'sms_fallback_method': payload.get('sms_fallback_method'),
'sms_fallback_url': payload.get('sms_fallback_url'),
'sms_method': payload.get('sms_method'),
'sms_url': payload.get('sms_url'),
'status_callback': payload.get('status_callback'),
'status_callback_method': payload.get('status_callback_method'),
'trunk_sid': payload.get('trunk_sid'),
'uri': payload.get('uri'),
'voice_application_sid': payload.get('voice_application_sid'),
'voice_caller_id_lookup': payload.get('voice_caller_id_lookup'),
'voice_fallback_method': payload.get('voice_fallback_method'),
'voice_fallback_url': payload.get('voice_fallback_url'),
'voice_method': payload.get('voice_method'),
'voice_url': payload.get('voice_url'),
}
# Context
self._context = None
self._solution = {'account_sid': account_sid, }
@property
def account_sid(self):
"""
:returns: The SID of the Account that created the resource
:rtype: unicode
"""
return self._properties['account_sid']
@property
def address_sid(self):
"""
:returns: The SID of the Address resource associated with the phone number
:rtype: unicode
"""
return self._properties['address_sid']
@property
def address_requirements(self):
"""
:returns: Whether the phone number requires an Address registered with Twilio.
:rtype: MobileInstance.AddressRequirement
"""
return self._properties['address_requirements']
@property
def api_version(self):
"""
:returns: The API version used to start a new TwiML session
:rtype: unicode
"""
return self._properties['api_version']
@property
def beta(self):
"""
:returns: Whether the phone number is new to the Twilio platform
:rtype: bool
"""
return self._properties['beta']
@property
def capabilities(self):
"""
:returns: Indicate if a phone can receive calls or messages
:rtype: unicode
"""
return self._properties['capabilities']
@property
def date_created(self):
"""
:returns: The RFC 2822 date and time in GMT that the resource was created
:rtype: datetime
"""
return self._properties['date_created']
@property
def date_updated(self):
"""
:returns: The RFC 2822 date and time in GMT that the resource was last updated
:rtype: datetime
"""
return self._properties['date_updated']
@property
def friendly_name(self):
"""
:returns: The string that you assigned to describe the resource
:rtype: unicode
"""
return self._properties['friendly_name']
@property
def identity_sid(self):
"""
:returns: The SID of the Identity resource associated with number
:rtype: unicode
"""
return self._properties['identity_sid']
@property
def phone_number(self):
"""
:returns: The phone number in E.164 format
:rtype: unicode
"""
return self._properties['phone_number']
@property
def origin(self):
"""
:returns: The phone number's origin. Can be twilio or hosted.
:rtype: unicode
"""
return self._properties['origin']
@property
def sid(self):
"""
:returns: The unique string that identifies the resource
:rtype: unicode
"""
return self._properties['sid']
@property
def sms_application_sid(self):
"""
:returns: The SID of the application that handles SMS messages sent to the phone number
:rtype: unicode
"""
return self._properties['sms_application_sid']
@property
def sms_fallback_method(self):
"""
:returns: The HTTP method used with sms_fallback_url
:rtype: unicode
"""
return self._properties['sms_fallback_method']
@property
def sms_fallback_url(self):
"""
:returns: The URL that we call when an error occurs while retrieving or executing the TwiML
:rtype: unicode
"""
return self._properties['sms_fallback_url']
@property
def sms_method(self):
"""
:returns: The HTTP method to use with sms_url
:rtype: unicode
"""
return self._properties['sms_method']
@property
def sms_url(self):
"""
:returns: The URL we call when the phone number receives an incoming SMS message
:rtype: unicode
"""
return self._properties['sms_url']
@property
def status_callback(self):
"""
:returns: The URL to send status information to your application
:rtype: unicode
"""
return self._properties['status_callback']
@property
def status_callback_method(self):
"""
:returns: The HTTP method we use to call status_callback
:rtype: unicode
"""
return self._properties['status_callback_method']
@property
def trunk_sid(self):
"""
:returns: The SID of the Trunk that handles calls to the phone number
:rtype: unicode
"""
return self._properties['trunk_sid']
@property
def uri(self):
"""
:returns: The URI of the resource, relative to `https://api.twilio.com`
:rtype: unicode
"""
return self._properties['uri']
@property
def voice_application_sid(self):
"""
:returns: The SID of the application that handles calls to the phone number
:rtype: unicode
"""
return self._properties['voice_application_sid']
@property
def voice_caller_id_lookup(self):
"""
:returns: Whether to lookup the caller's name
:rtype: bool
"""
return self._properties['voice_caller_id_lookup']
@property
def voice_fallback_method(self):
"""
:returns: The HTTP method used with voice_fallback_url
:rtype: unicode
"""
return self._properties['voice_fallback_method']
@property
def voice_fallback_url(self):
"""
:returns: The URL we call when an error occurs in TwiML
:rtype: unicode
"""
return self._properties['voice_fallback_url']
@property
def voice_method(self):
"""
:returns: The HTTP method used with the voice_url
:rtype: unicode
"""
return self._properties['voice_method']
@property
def voice_url(self):
"""
:returns: The URL we call when the phone number receives a call
:rtype: unicode
"""
return self._properties['voice_url']
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
return '<Twilio.Api.V2010.MobileInstance>'
|
|
#!/usr/bin/env python
import math
import random
import pygame
import sys
screen_width = 640
screen_height = 480
screen_size = screen_width, screen_height
press_events = pygame.KEYDOWN, pygame.MOUSEBUTTONDOWN
screen = None
wait_time = 2000 # Display each shape for 2 seconds
def write_text( screen, text, color, big ):
if big:
height = screen.get_height() / 5
up = screen.get_height() / 2
else:
height = screen_height / 12
up = screen.get_height() - ( screen_height / 24 )
font = pygame.font.Font( None, height )
rend = font.render( text, 1, color )
pos = rend.get_rect(
centerx = screen.get_width() / 2,
centery = up
)
screen.blit( rend, pos )
def timed_wait( time_to_wait, event_types_that_cancel ):
"""
Wait for time_to_wait, but cancel if a relevant event happens.
Return True if cancelled, or False if we waited the full time.
"""
start_time = pygame.time.get_ticks()
finished_waiting_event_id = pygame.USEREVENT + 1
pygame.time.set_timer( finished_waiting_event_id, time_to_wait )
try:
pygame.event.clear()
pressed = False
waiting = True
time = time_to_wait
while waiting:
evt = pygame.event.wait()
if is_quit( evt ):
quit()
elif evt.type in event_types_that_cancel:
waiting = False
pressed = True
time = pygame.time.get_ticks() - start_time
elif evt.type == finished_waiting_event_id:
waiting = False
finally:
pygame.time.set_timer( finished_waiting_event_id, 0 )
return pressed, time
def start():
global screen
pygame.init()
screen = pygame.display.set_mode( screen_size, pygame.FULLSCREEN )
def quit():
pygame.quit()
sys.exit()
def ready_screen( go_number, correct, time_score ):
screen.fill( pygame.Color( "black" ) )
white = pygame.Color( "white" )
write_text( screen, "Ready?", white, True )
go_number_str = "Turn: %d Correct: %d Score: %d" % (
( go_number + 1 ), correct, time_score )
write_text( screen, go_number_str, pygame.Color( "white" ), False )
pygame.display.flip()
def wait():
time_to_wait = random.randint( 1500, 3000 ) # Between 1.5 and 3 seconds
timed_wait( time_to_wait, () )
def is_quit( evt ):
return (
evt.type == pygame.QUIT or
(
evt.type == pygame.KEYDOWN and
evt.key == pygame.K_ESCAPE
)
)
def shape_wait():
"""
Wait while we display a shape. Return True if a key was pressed,
or false otherwise.
"""
return timed_wait( wait_time, press_events ) # 2 seconds
def tick():
colour = pygame.Color( "green" )
w = screen.get_width() / 2
h = screen.get_height() / 4
points = (
( w - w/5, h - h/9 ),
( w, h + h/5 ),
( w + w/3, h - h/3 ),
)
screen.fill( pygame.Color( "black" ) )
pygame.draw.lines( screen, colour, False, points, 20 )
def cross():
colour = pygame.Color( "red" )
w = screen.get_width() / 2
h = screen.get_height() / 4
left = w - w/3
right = w + w/3
top = h - h/3
bottom = h + h/3
start1 = left, top
end1 = right, bottom
start2 = left, bottom
end2 = right, top
screen.fill( pygame.Color( "black" ) )
pygame.draw.line( screen, colour, start1, end1, 20 )
pygame.draw.line( screen, colour, start2, end2, 20 )
def green_success():
tick()
green = pygame.Color( "green" )
white = pygame.Color( "white" )
write_text( screen, "Well done!", green, True )
write_text( screen, "You pressed on green!", white, False )
pygame.display.flip()
timed_wait( 2000, press_events ) # 2 seconds
def green_failure():
cross()
red = pygame.Color( "red" )
white = pygame.Color( "white" )
write_text( screen, "Bad Luck!", red, True )
write_text( screen, "Green means press something!", white, False )
pygame.display.flip()
timed_wait( 2000, press_events ) # 2 seconds
def red_success():
tick()
green = pygame.Color( "green" )
white = pygame.Color( "white" )
write_text( screen, "Well done!", green, True )
write_text( screen, "You didn't press on red!", white, False )
pygame.display.flip()
timed_wait( 2000, press_events ) # 2 seconds
def red_failure():
cross()
red = pygame.Color( "red" )
white = pygame.Color( "white" )
write_text( screen, "Bad Luck!", red, True )
write_text( screen, "Red means don't press anything!", white, False )
pygame.display.flip()
timed_wait( 2000, press_events ) # 2 seconds
def green_shape():
green = pygame.Color( "green" )
centre = ( screen.get_width() / 2, screen.get_height() / 2 )
radius = screen.get_height() / 3
screen.fill( pygame.Color( "white" ) )
pygame.draw.circle( screen, green, centre, radius, 0 )
write_text( screen, "Press something!", pygame.Color( "black" ), False )
pygame.display.flip()
pressed, time = shape_wait()
if pressed:
green_success()
return True, 1, time
else:
green_failure()
return True, 0, time
def red_shape():
red = pygame.Color( "red" )
height = 2 * ( screen.get_height() / 3 )
left = ( screen.get_width() / 2 ) - ( height / 2 )
top = screen.get_height() / 6
screen.fill( pygame.Color( "white" ) )
pygame.draw.rect( screen, red, ( left, top, height, height ), 0 )
write_text( screen, "Don't press!", pygame.Color( "black" ), False )
pygame.display.flip()
pressed, time = shape_wait()
if pressed:
red_failure()
return False, 0, wait_time
else:
red_success()
return False, 1, 0
def shape():
GREEN = 0
RED = 1
shape = random.choice( [GREEN, RED] )
if shape == GREEN:
return green_shape()
else:
return red_shape()
def end( correct, time_score ):
print "You got %d correct answers" % correct
print "You scored %d" % time_score
screen.fill( pygame.Color( "black" ) )
white = pygame.Color( "white" )
write_text( screen, "Thanks for playing!", white, True )
msg = "Correct: %d Score: %d" % ( correct, time_score )
write_text( screen, msg, white, False )
pygame.display.flip()
timed_wait( 0, press_events )
quit()
# We start from here
start()
num_greens = 10 # How many times we play
correct = 0
time_millis = 0
i = 0
while i < num_greens:
max_time = i * wait_time
ready_screen( i, correct, max_time - time_millis )
wait()
wasgreen, correct_points, tm_points = shape()
if wasgreen:
i += 1
correct += correct_points
time_millis += tm_points
max_time = num_greens * wait_time
end( correct, max_time - time_millis )
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.