repo_name
stringlengths 5
100
| path
stringlengths 4
299
| copies
stringclasses 990
values | size
stringlengths 4
7
| content
stringlengths 666
1.03M
| license
stringclasses 15
values | hash
int64 -9,223,351,895,964,839,000
9,223,297,778B
| line_mean
float64 3.17
100
| line_max
int64 7
1k
| alpha_frac
float64 0.25
0.98
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
ivanhorvath/openshift-tools | openshift/installer/vendored/openshift-ansible-3.8.36-1/roles/openshift_logging/filter_plugins/openshift_logging.py | 13 | 4698 | '''
Openshift Logging class that provides useful filters used in Logging
'''
import random
import re
def es_storage(os_logging_facts, dc_name, pvc_claim, root='elasticsearch'):
'''Return a hash with the desired storage for the given ES instance'''
deploy_config = os_logging_facts[root]['deploymentconfigs'].get(dc_name)
if deploy_config:
storage = deploy_config['volumes']['elasticsearch-storage']
if storage.get('hostPath'):
return dict(kind='hostpath', path=storage.get('hostPath').get('path'))
if len(pvc_claim.strip()) > 0:
return dict(kind='pvc', pvc_claim=pvc_claim)
return dict(kind='emptydir')
def min_cpu(left, right):
'''Return the minimum cpu value of the two values given'''
message = "Unable to evaluate {} cpu value is specified correctly '{}'. Exp whole, decimal or int followed by M"
pattern = re.compile(r"^(\d*\.?\d*)([Mm])?$")
millis_per_core = 1000
if not right:
return left
m_left = pattern.match(left)
if not m_left:
raise RuntimeError(message.format("left", left))
m_right = pattern.match(right)
if not m_right:
raise RuntimeError(message.format("right", right))
left_value = float(m_left.group(1))
right_value = float(m_right.group(1))
if m_left.group(2) not in ["M", "m"]:
left_value = left_value * millis_per_core
if m_right.group(2) not in ["M", "m"]:
right_value = right_value * millis_per_core
response = left
if left_value != min(left_value, right_value):
response = right
return response
def walk(source, path, default, delimiter='.'):
'''Walk the sourch hash given the path and return the value or default if not found'''
if not isinstance(source, dict):
raise RuntimeError('The source is not a walkable dict: {} path: {}'.format(source, path))
keys = path.split(delimiter)
max_depth = len(keys)
cur_depth = 0
while cur_depth < max_depth:
if keys[cur_depth] in source:
source = source[keys[cur_depth]]
cur_depth = cur_depth + 1
else:
return default
return source
def random_word(source_alpha, length):
''' Returns a random word given the source of characters to pick from and resulting length '''
return ''.join(random.choice(source_alpha) for i in range(length))
def entry_from_named_pair(register_pairs, key):
''' Returns the entry in key given results provided by register_pairs '''
results = register_pairs.get("results")
if results is None:
raise RuntimeError("The dict argument does not have a 'results' entry. "
"Must not have been created using 'register' in a loop")
for result in results:
item = result.get("item")
if item is not None:
name = item.get("name")
if name == key:
return result["content"]
raise RuntimeError("There was no entry found in the dict that had an item with a name that matched {}".format(key))
def serviceaccount_name(qualified_sa):
''' Returns the simple name from a fully qualified name '''
return qualified_sa.split(":")[-1]
def serviceaccount_namespace(qualified_sa, default=None):
''' Returns the namespace from a fully qualified name '''
seg = qualified_sa.split(":")
if len(seg) > 1:
return seg[-2]
if default:
return default
return seg[-1]
def flatten_dict(data, parent_key=None):
""" This filter plugin will flatten a dict and its sublists into a single dict
"""
if not isinstance(data, dict):
raise RuntimeError("flatten_dict failed, expects to flatten a dict")
merged = dict()
for key in data:
if parent_key is not None:
insert_key = '.'.join((parent_key, key))
else:
insert_key = key
if isinstance(data[key], dict):
merged.update(flatten_dict(data[key], insert_key))
else:
merged[insert_key] = data[key]
return merged
# pylint: disable=too-few-public-methods
class FilterModule(object):
''' OpenShift Logging Filters '''
# pylint: disable=no-self-use, too-few-public-methods
def filters(self):
''' Returns the names of the filters provided by this class '''
return {
'random_word': random_word,
'entry_from_named_pair': entry_from_named_pair,
'min_cpu': min_cpu,
'es_storage': es_storage,
'serviceaccount_name': serviceaccount_name,
'serviceaccount_namespace': serviceaccount_namespace,
'walk': walk,
"flatten_dict": flatten_dict
}
| apache-2.0 | 1,076,155,341,146,802,400 | 33.8 | 119 | 0.623883 | false |
lkraider/iban-generator | iban_rest.py | 1 | 1412 | #!/usr/bin/env python
# coding: utf-8
import webapp2
import iban
import json
class IBAN(webapp2.RequestHandler):
def get(self):
self.response.headers['Content-Type'] = 'application/json'
data = {
'ispb': '',
'agency': '',
'account': '',
'country': 'BR',
'account_type': 'C',
'account_owner': '1',
}
args = self.request.params
if args.get('iban'):
return self._check_iban(args['iban'])
data.update((k, args[k].replace('-', '').strip())
for k in set(data).intersection(args))
if not all(data.viewvalues()):
data['ispb'] = iban.ISPB
self.response.write(json.dumps(data))
else:
return self._make_iban(data)
def _check_iban(self, value):
check = iban.check_iban(value.upper())
self.response.write(json.dumps({'valid': check}))
def _make_iban(self, data):
try:
value = iban.make_iban(
data['ispb'], data['agency'], data['account'], data['country'],
data['account_type'], data['account_owner'])
except Exception:
self.response.status = 403
value = 'Invalid parameters'
self.response.write(json.dumps({'iban': value}))
application = webapp2.WSGIApplication([
('/rest/', IBAN),
], debug=True)
| bsd-2-clause | 7,722,671,790,860,284,000 | 27.816327 | 79 | 0.529745 | false |
orgito/ansible | lib/ansible/modules/network/f5/bigip_device_group_member.py | 22 | 8861 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright: (c) 2017, F5 Networks Inc.
# GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['stableinterface'],
'supported_by': 'certified'}
DOCUMENTATION = r'''
---
module: bigip_device_group_member
short_description: Manages members in a device group
description:
- Manages members in a device group. Members in a device group can only
be added or removed, never updated. This is because the members are
identified by unique name values and changing that name would invalidate
the uniqueness.
version_added: 2.5
options:
name:
description:
- Specifies the name of the device that you want to add to the
device group. Often this will be the hostname of the device.
This member must be trusted by the device already. Trusting
can be done with the C(bigip_device_trust) module and the
C(peer_hostname) option to that module.
required: True
device_group:
description:
- The device group that you want to add the member to.
required: True
state:
description:
- When C(present), ensures that the device group member exists.
- When C(absent), ensures the device group member is removed.
default: present
choices:
- present
- absent
extends_documentation_fragment: f5
author:
- Tim Rupp (@caphrim007)
- Wojciech Wypior (@wojtek0806)
'''
EXAMPLES = r'''
- name: Add the current device to the "device_trust_group" device group
bigip_device_group_member:
name: "{{ inventory_hostname }}"
device_group: device_trust_group
provider:
password: secret
server: lb.mydomain.com
user: admin
delegate_to: localhost
- name: Add the hosts in the current scope to "device_trust_group"
bigip_device_group_member:
name: "{{ item }}"
device_group: device_trust_group
provider:
password: secret
server: lb.mydomain.com
user: admin
loop: "{{ hostvars.keys() }}"
run_once: true
delegate_to: localhost
'''
RETURN = r'''
# only common fields returned
'''
from ansible.module_utils.basic import AnsibleModule
try:
from library.module_utils.network.f5.bigip import F5RestClient
from library.module_utils.network.f5.common import F5ModuleError
from library.module_utils.network.f5.common import AnsibleF5Parameters
from library.module_utils.network.f5.common import cleanup_tokens
from library.module_utils.network.f5.common import f5_argument_spec
from library.module_utils.network.f5.common import exit_json
from library.module_utils.network.f5.common import fail_json
except ImportError:
from ansible.module_utils.network.f5.bigip import F5RestClient
from ansible.module_utils.network.f5.common import F5ModuleError
from ansible.module_utils.network.f5.common import AnsibleF5Parameters
from ansible.module_utils.network.f5.common import cleanup_tokens
from ansible.module_utils.network.f5.common import f5_argument_spec
from ansible.module_utils.network.f5.common import exit_json
from ansible.module_utils.network.f5.common import fail_json
class Parameters(AnsibleF5Parameters):
api_map = {}
api_attributes = []
returnables = []
updatables = []
class ApiParameters(Parameters):
pass
class ModuleParameters(Parameters):
pass
class Changes(Parameters):
def to_return(self):
result = {}
try:
for returnable in self.returnables:
change = getattr(self, returnable)
if isinstance(change, dict):
result.update(change)
else:
result[returnable] = change
result = self._filter_params(result)
except Exception:
pass
return result
class UsableChanges(Changes):
pass
class ReportableChanges(Changes):
pass
class Difference(object):
pass
class ModuleManager(object):
def __init__(self, *args, **kwargs):
self.module = kwargs.get('module', None)
self.client = kwargs.get('client', None)
self.want = Parameters(params=self.module.params)
self.have = None
self.changes = Changes()
def _set_changed_options(self):
changed = {}
for key in Parameters.returnables:
if getattr(self.want, key) is not None:
changed[key] = getattr(self.want, key)
if changed:
self.changes = Changes(params=changed)
def _announce_deprecations(self, result):
warnings = result.pop('__warnings', [])
for warning in warnings:
self.module.deprecate(
msg=warning['msg'],
version=warning['version']
)
def exec_module(self):
changed = False
result = dict()
state = self.want.state
if state == "present":
changed = self.present()
elif state == "absent":
changed = self.absent()
reportable = ReportableChanges(params=self.changes.to_return())
changes = reportable.to_return()
result.update(**changes)
result.update(dict(changed=changed))
self._announce_deprecations(result)
return result
def present(self):
if self.exists():
return False
else:
return self.create()
def absent(self):
if self.exists():
return self.remove()
return False
def create(self):
self._set_changed_options()
if self.module.check_mode:
return True
self.create_on_device()
return True
def remove(self):
if self.module.check_mode:
return True
self.remove_from_device()
if self.exists():
raise F5ModuleError("Failed to remove the member from the device group.")
return True
def exists(self):
uri = "https://{0}:{1}/mgmt/tm/cm/device-group/{2}/devices/{3}".format(
self.client.provider['server'],
self.client.provider['server_port'],
self.want.device_group,
self.want.name
)
resp = self.client.api.get(uri)
try:
response = resp.json()
except ValueError:
return False
if resp.status == 404 or 'code' in response and response['code'] == 404:
return False
return True
def create_on_device(self):
params = self.changes.api_params()
params['name'] = self.want.name
params['partition'] = self.want.partition
uri = "https://{0}:{1}/mgmt/tm/cm/device-group/{2}/devices/".format(
self.client.provider['server'],
self.client.provider['server_port'],
self.want.device_group
)
resp = self.client.api.post(uri, json=params)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] in [400, 403]:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
def remove_from_device(self):
uri = "https://{0}:{1}/mgmt/tm/cm/device-group/{2}/devices/{3}".format(
self.client.provider['server'],
self.client.provider['server_port'],
self.want.device_group,
self.want.name
)
response = self.client.api.delete(uri)
if response.status == 200:
return True
raise F5ModuleError(response.content)
class ArgumentSpec(object):
def __init__(self):
self.supports_check_mode = True
argument_spec = dict(
name=dict(required=True),
device_group=dict(required=True),
state=dict(
default='present',
choices=['absent', 'present']
),
)
self.argument_spec = {}
self.argument_spec.update(f5_argument_spec)
self.argument_spec.update(argument_spec)
def main():
spec = ArgumentSpec()
module = AnsibleModule(
argument_spec=spec.argument_spec,
supports_check_mode=spec.supports_check_mode
)
client = F5RestClient(**module.params)
try:
mm = ModuleManager(module=module, client=client)
results = mm.exec_module()
cleanup_tokens(client)
exit_json(module, results, client)
except F5ModuleError as ex:
cleanup_tokens(client)
fail_json(module, ex, client)
if __name__ == '__main__':
main()
| gpl-3.0 | -2,200,906,649,473,327,400 | 28.536667 | 91 | 0.616522 | false |
CreativeOutbreak/wagtail | wagtail/wagtailsearch/views/editorspicks.py | 3 | 4447 | from django.shortcuts import render, redirect, get_object_or_404
from django.contrib.auth.decorators import permission_required
from django.contrib import messages
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
from django.utils.translation import ugettext as _
from wagtail.wagtailsearch import models, forms
from wagtail.wagtailadmin.forms import SearchForm
@permission_required('wagtailadmin.access_admin')
def index(request):
page = request.GET.get('p', 1)
query_string = request.GET.get('q', "")
queries = models.Query.objects.filter(editors_picks__isnull=False).distinct()
# Search
if query_string:
queries = queries.filter(query_string__icontains=query_string)
# Pagination
paginator = Paginator(queries, 20)
try:
queries = paginator.page(page)
except PageNotAnInteger:
queries = paginator.page(1)
except EmptyPage:
queries = paginator.page(paginator.num_pages)
if request.is_ajax():
return render(request, "wagtailsearch/editorspicks/results.html", {
'queries': queries,
'query_string': query_string,
})
else:
return render(request, 'wagtailsearch/editorspicks/index.html', {
'queries': queries,
'query_string': query_string,
'search_form': SearchForm(data=dict(q=query_string) if query_string else None, placeholder=_("Search editor's picks")),
})
def save_editorspicks(query, new_query, editors_pick_formset):
# Set sort_order
for i, form in enumerate(editors_pick_formset.ordered_forms):
form.instance.sort_order = i
# Save
if editors_pick_formset.is_valid():
editors_pick_formset.save()
# If query was changed, move all editors picks to the new query
if query != new_query:
editors_pick_formset.get_queryset().update(query=new_query)
return True
else:
return False
@permission_required('wagtailadmin.access_admin')
def add(request):
if request.POST:
# Get query
query_form = forms.QueryForm(request.POST)
if query_form.is_valid():
query = models.Query.get(query_form['query_string'].value())
# Save editors picks
editors_pick_formset = forms.EditorsPickFormSet(request.POST, instance=query)
if save_editorspicks(query, query, editors_pick_formset):
messages.success(request, _("Editor's picks for '{0}' created.").format(query))
return redirect('wagtailsearch_editorspicks_index')
else:
editors_pick_formset = forms.EditorsPickFormSet()
else:
query_form = forms.QueryForm()
editors_pick_formset = forms.EditorsPickFormSet()
return render(request, 'wagtailsearch/editorspicks/add.html', {
'query_form': query_form,
'editors_pick_formset': editors_pick_formset,
})
@permission_required('wagtailadmin.access_admin')
def edit(request, query_id):
query = get_object_or_404(models.Query, id=query_id)
if request.POST:
# Get query
query_form = forms.QueryForm(request.POST)
if query_form.is_valid():
new_query = models.Query.get(query_form['query_string'].value())
# Save editors picks
editors_pick_formset = forms.EditorsPickFormSet(request.POST, instance=query)
if save_editorspicks(query, new_query, editors_pick_formset):
messages.success(request, _("Editor's picks for '{0}' updated.").format(new_query))
return redirect('wagtailsearch_editorspicks_index')
else:
query_form = forms.QueryForm(initial=dict(query_string=query.query_string))
editors_pick_formset = forms.EditorsPickFormSet(instance=query)
return render(request, 'wagtailsearch/editorspicks/edit.html', {
'query_form': query_form,
'editors_pick_formset': editors_pick_formset,
'query': query,
})
@permission_required('wagtailadmin.access_admin')
def delete(request, query_id):
query = get_object_or_404(models.Query, id=query_id)
if request.POST:
query.editors_picks.all().delete()
messages.success(request, _("Editor's picks deleted."))
return redirect('wagtailsearch_editorspicks_index')
return render(request, 'wagtailsearch/editorspicks/confirm_delete.html', {
'query': query,
})
| bsd-3-clause | 6,392,514,387,058,111,000 | 34.015748 | 131 | 0.659996 | false |
cypod/arsenalsuite | cpp/apps/burner/examples/fvm_test.py | 11 | 1149 |
from blur.quickinit import *
from blur.absubmit import *
import sys
testSequence = 'C:/temp/test_v2_0000.tif'
testOutput = 'C:/temp/test.avi'
job = JobFusionVideoMaker()
job.setJobType( JobType.recordByName( 'FusionVideoMaker' ) )
job.setSequenceFrameStart( 0 )
job.setSequenceFrameEnd( 400 )
job.setInputFramePath( testSequence )
job.setHost( Host.currentHost() )
job.setUser( User.currentUser() )
job.setName( 'Fusion_Video_Maker_Test' )
job.setOutputPath( testOutput )
job.setPriority( 50 )
# Create the submission object. This is responsible for making sure the job has all prerequisites.
sub = Submitter()
# Will call qApp->exit() after the job submits or encounters a submission error
sub.setExitAppOnFinish( True )
sub.setJob( job )
# This adds a single output with a single task(1), it's not possible
# for multiple hosts to work on a single video output, so we do the whole thing as a single task
sub.addJobOutput( testOutput, "Output1", "1" )
# If this needs to wait for a render to finish, add the job as a dependency
# sub.addDependencies( JobList().append( job ) )
sub.submit()
sys.exit( QCoreApplication.instance().exec_() )
| gpl-2.0 | 3,850,273,831,782,690,300 | 30.916667 | 99 | 0.751958 | false |
jackylk/incubator-carbondata | python/pycarbon/tests/hello_world/dataset_with_normal_schema/python_hello_world_carbon.py | 3 | 1882 | # Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Minimal example of how to read samples from a dataset generated by `generate_external_dataset_carbon.py`
using plain Python"""
from __future__ import print_function
import argparse
import jnius_config
from pycarbon.reader import make_reader
from pycarbon.tests import DEFAULT_CARBONSDK_PATH
def python_hello_world(dataset_url='file:///tmp/carbon_external_dataset'):
# Reading data from the non-Pycarbon Carbon via pure Python
with make_reader(dataset_url, schema_fields=["id", "value1", "value2"]) as reader:
for schema_view in reader:
# make_reader() returns batches of rows instead of individual rows
print("Batched read:\nid: {0} value1: {1} value2: {2}".format(
schema_view.id, schema_view.value1, schema_view.value2))
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Python hello world')
parser.add_argument('-c', '--carbon-sdk-path', type=str, default=DEFAULT_CARBONSDK_PATH,
help='carbon sdk path')
args = parser.parse_args()
jnius_config.set_classpath(args.carbon_sdk_path)
python_hello_world()
| apache-2.0 | -1,569,839,990,374,237,400 | 39.042553 | 107 | 0.738045 | false |
keon/algorithms | algorithms/dp/min_cost_path.py | 2 | 1385 | """
author @goswami-rahul
To find minimum cost path
from station 0 to station N-1,
where cost of moving from ith station to jth station is given as:
Matrix of size (N x N)
where Matrix[i][j] denotes the cost of moving from
station i --> station j for i < j
NOTE that values where Matrix[i][j] and i > j does not
mean anything, and hence represented by -1 or INF
For the input below (cost matrix),
Minimum cost is obtained as from { 0 --> 1 --> 3}
= cost[0][1] + cost[1][3] = 65
the Output will be:
The Minimum cost to reach station 4 is 65
Time Complexity: O(n^2)
Space Complexity: O(n)
"""
INF = float("inf")
def min_cost(cost):
n = len(cost)
# dist[i] stores minimum cost from 0 --> i.
dist = [INF] * n
dist[0] = 0 # cost from 0 --> 0 is zero.
for i in range(n):
for j in range(i+1,n):
dist[j] = min(dist[j], dist[i] + cost[i][j])
return dist[n-1]
if __name__ == '__main__':
cost = [ [ 0, 15, 80, 90], # cost[i][j] is the cost of
[-1, 0, 40, 50], # going from i --> j
[-1, -1, 0, 70],
[-1, -1, -1, 0] ] # cost[i][j] = -1 for i > j
total_len = len(cost)
mcost = min_cost(cost)
assert mcost == 65
print("The Minimum cost to reach station %d is %d" % (total_len, mcost))
| mit | 2,080,829,204,673,353,000 | 25.132075 | 76 | 0.53574 | false |
mahabs/nitro | nssrc/com/citrix/netscaler/nitro/resource/config/ssl/sslservicegroup_sslcipher_binding.py | 1 | 7605 | #
# Copyright (c) 2008-2015 Citrix Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_resource
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_response
from nssrc.com.citrix.netscaler.nitro.service.options import options
from nssrc.com.citrix.netscaler.nitro.exception.nitro_exception import nitro_exception
from nssrc.com.citrix.netscaler.nitro.util.nitro_util import nitro_util
class sslservicegroup_sslcipher_binding(base_resource) :
""" Binding class showing the sslcipher that can be bound to sslservicegroup.
"""
def __init__(self) :
self._cipheraliasname = ""
self._description = ""
self._servicegroupname = ""
self._ciphername = ""
self.___count = 0
@property
def ciphername(self) :
try :
return self._ciphername
except Exception as e:
raise e
@ciphername.setter
def ciphername(self, ciphername) :
try :
self._ciphername = ciphername
except Exception as e:
raise e
@property
def cipheraliasname(self) :
"""The name of the cipher group/alias/name configured for the SSL service group.
"""
try :
return self._cipheraliasname
except Exception as e:
raise e
@cipheraliasname.setter
def cipheraliasname(self, cipheraliasname) :
"""The name of the cipher group/alias/name configured for the SSL service group.
"""
try :
self._cipheraliasname = cipheraliasname
except Exception as e:
raise e
@property
def servicegroupname(self) :
"""The name of the SSL service to which the SSL policy needs to be bound.<br/>Minimum length = 1.
"""
try :
return self._servicegroupname
except Exception as e:
raise e
@servicegroupname.setter
def servicegroupname(self, servicegroupname) :
"""The name of the SSL service to which the SSL policy needs to be bound.<br/>Minimum length = 1
"""
try :
self._servicegroupname = servicegroupname
except Exception as e:
raise e
@property
def description(self) :
"""The description of the cipher.
"""
try :
return self._description
except Exception as e:
raise e
@description.setter
def description(self, description) :
"""The description of the cipher.
"""
try :
self._description = description
except Exception as e:
raise e
def _get_nitro_response(self, service, response) :
""" converts nitro response into object and returns the object array in case of get request.
"""
try :
result = service.payload_formatter.string_to_resource(sslservicegroup_sslcipher_binding_response, response, self.__class__.__name__)
if(result.errorcode != 0) :
if (result.errorcode == 444) :
service.clear_session(self)
if result.severity :
if (result.severity == "ERROR") :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
else :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
return result.sslservicegroup_sslcipher_binding
except Exception as e :
raise e
def _get_object_name(self) :
""" Returns the value of object identifier argument
"""
try :
if (self.servicegroupname) :
return str(self.servicegroupname)
return None
except Exception as e :
raise e
@classmethod
def add(cls, client, resource) :
try :
if resource and type(resource) is not list :
updateresource = sslservicegroup_sslcipher_binding()
updateresource.servicegroupname = resource.servicegroupname
updateresource.ciphername = resource.ciphername
return updateresource.update_resource(client)
else :
if resource and len(resource) > 0 :
updateresources = [sslservicegroup_sslcipher_binding() for _ in range(len(resource))]
for i in range(len(resource)) :
updateresources[i].servicegroupname = resource[i].servicegroupname
updateresources[i].ciphername = resource[i].ciphername
return cls.update_bulk_request(client, updateresources)
except Exception as e :
raise e
@classmethod
def delete(cls, client, resource) :
try :
if resource and type(resource) is not list :
deleteresource = sslservicegroup_sslcipher_binding()
deleteresource.servicegroupname = resource.servicegroupname
deleteresource.ciphername = resource.ciphername
return deleteresource.delete_resource(client)
else :
if resource and len(resource) > 0 :
deleteresources = [sslservicegroup_sslcipher_binding() for _ in range(len(resource))]
for i in range(len(resource)) :
deleteresources[i].servicegroupname = resource[i].servicegroupname
deleteresources[i].ciphername = resource[i].ciphername
return cls.delete_bulk_request(client, deleteresources)
except Exception as e :
raise e
@classmethod
def get(cls, service, servicegroupname) :
""" Use this API to fetch sslservicegroup_sslcipher_binding resources.
"""
try :
obj = sslservicegroup_sslcipher_binding()
obj.servicegroupname = servicegroupname
response = obj.get_resources(service)
return response
except Exception as e:
raise e
@classmethod
def get_filtered(cls, service, servicegroupname, filter_) :
""" Use this API to fetch filtered set of sslservicegroup_sslcipher_binding resources.
Filter string should be in JSON format.eg: "port:80,servicetype:HTTP".
"""
try :
obj = sslservicegroup_sslcipher_binding()
obj.servicegroupname = servicegroupname
option_ = options()
option_.filter = filter_
response = obj.getfiltered(service, option_)
return response
except Exception as e:
raise e
@classmethod
def count(cls, service, servicegroupname) :
""" Use this API to count sslservicegroup_sslcipher_binding resources configued on NetScaler.
"""
try :
obj = sslservicegroup_sslcipher_binding()
obj.servicegroupname = servicegroupname
option_ = options()
option_.count = True
response = obj.get_resources(service, option_)
if response :
return response[0].__dict__['___count']
return 0
except Exception as e:
raise e
@classmethod
def count_filtered(cls, service, servicegroupname, filter_) :
""" Use this API to count the filtered set of sslservicegroup_sslcipher_binding resources.
Filter string should be in JSON format.eg: "port:80,servicetype:HTTP".
"""
try :
obj = sslservicegroup_sslcipher_binding()
obj.servicegroupname = servicegroupname
option_ = options()
option_.count = True
option_.filter = filter_
response = obj.getfiltered(service, option_)
if response :
return response[0].__dict__['___count']
return 0
except Exception as e:
raise e
class Ocspcheck:
Mandatory = "Mandatory"
Optional = "Optional"
class Crlcheck:
Mandatory = "Mandatory"
Optional = "Optional"
class sslservicegroup_sslcipher_binding_response(base_response) :
def __init__(self, length=1) :
self.sslservicegroup_sslcipher_binding = []
self.errorcode = 0
self.message = ""
self.severity = ""
self.sessionid = ""
self.sslservicegroup_sslcipher_binding = [sslservicegroup_sslcipher_binding() for _ in range(length)]
| apache-2.0 | 3,139,148,336,113,200,000 | 30.168033 | 135 | 0.720447 | false |
arbrandes/xblock-drag-and-drop-v2 | tests/integration/test_events.py | 2 | 7732 | from ddt import data, ddt, unpack
from mock import Mock, patch
from selenium.webdriver.common.keys import Keys
from workbench.runtime import WorkbenchRuntime
from drag_and_drop_v2.default_data import (
TOP_ZONE_TITLE, TOP_ZONE_ID, MIDDLE_ZONE_TITLE, MIDDLE_ZONE_ID, BOTTOM_ZONE_ID,
ITEM_CORRECT_FEEDBACK, ITEM_INCORRECT_FEEDBACK,
ITEM_TOP_ZONE_NAME, ITEM_MIDDLE_ZONE_NAME,
)
from tests.integration.test_base import BaseIntegrationTest, DefaultDataTestMixin, InteractionTestBase, ItemDefinition
from tests.integration.test_interaction import DefaultDataTestMixin, ParameterizedTestsMixin
from tests.integration.test_interaction_assessment import DefaultAssessmentDataTestMixin, AssessmentTestMixin
class BaseEventsTests(InteractionTestBase, BaseIntegrationTest):
def setUp(self):
mock = Mock()
context = patch.object(WorkbenchRuntime, 'publish', mock)
context.start()
self.addCleanup(context.stop)
self.publish = mock
super(BaseEventsTests, self).setUp()
@ddt
class EventsFiredTest(DefaultDataTestMixin, ParameterizedTestsMixin, BaseEventsTests):
"""
Tests that the analytics events are fired and in the proper order.
"""
# These events must be fired in this order.
scenarios = (
{
'name': 'edx.drag_and_drop_v2.loaded',
'data': {},
},
{
'name': 'edx.drag_and_drop_v2.item.picked_up',
'data': {'item_id': 0},
},
{
'name': 'grade',
'data': {'max_value': 1, 'value': (2.0 / 5)},
},
{
'name': 'edx.drag_and_drop_v2.item.dropped',
'data': {
'is_correct': True,
'item': ITEM_TOP_ZONE_NAME,
'item_id': 0,
'location': TOP_ZONE_TITLE,
'location_id': TOP_ZONE_ID,
},
},
{
'name': 'edx.drag_and_drop_v2.feedback.opened',
'data': {
'content': ITEM_CORRECT_FEEDBACK.format(zone=TOP_ZONE_TITLE),
'truncated': False,
},
},
{
'name': 'edx.drag_and_drop_v2.feedback.closed',
'data': {
'manually': False,
'content': ITEM_CORRECT_FEEDBACK.format(zone=TOP_ZONE_TITLE),
'truncated': False,
},
},
)
def _get_scenario_xml(self): # pylint: disable=no-self-use
return "<vertical_demo><drag-and-drop-v2/></vertical_demo>"
@data(*enumerate(scenarios)) # pylint: disable=star-args
@unpack
def test_event(self, index, event):
self.parameterized_item_positive_feedback_on_good_move(self.items_map)
dummy, name, published_data = self.publish.call_args_list[index][0]
self.assertEqual(name, event['name'])
self.assertEqual(published_data, event['data'])
@ddt
class AssessmentEventsFiredTest(
DefaultAssessmentDataTestMixin, AssessmentTestMixin, BaseEventsTests
):
scenarios = (
{
'name': 'edx.drag_and_drop_v2.loaded',
'data': {},
},
{
'name': 'edx.drag_and_drop_v2.item.picked_up',
'data': {'item_id': 0},
},
{
'name': 'edx.drag_and_drop_v2.item.dropped',
'data': {
'is_correct': False,
'item': ITEM_TOP_ZONE_NAME,
'item_id': 0,
'location': MIDDLE_ZONE_TITLE,
'location_id': MIDDLE_ZONE_ID,
},
},
{
'name': 'edx.drag_and_drop_v2.item.picked_up',
'data': {'item_id': 1},
},
{
'name': 'edx.drag_and_drop_v2.item.dropped',
'data': {
'is_correct': False,
'item': ITEM_MIDDLE_ZONE_NAME,
'item_id': 1,
'location': TOP_ZONE_TITLE,
'location_id': TOP_ZONE_ID,
},
},
{
'name': 'grade',
'data': {'max_value': 1, 'value': (1.0 / 5)},
},
{
'name': 'edx.drag_and_drop_v2.feedback.opened',
'data': {
'content': "\n".join([ITEM_INCORRECT_FEEDBACK, ITEM_INCORRECT_FEEDBACK]),
'truncated': False,
},
},
)
def test_event(self):
self.scroll_down(pixels=100)
self.place_item(0, MIDDLE_ZONE_ID)
self.wait_until_ondrop_xhr_finished(self._get_item_by_value(0))
self.place_item(1, TOP_ZONE_ID)
self.wait_until_ondrop_xhr_finished(self._get_item_by_value(0))
self.click_submit()
self.wait_for_ajax()
for index, event in enumerate(self.scenarios):
dummy, name, published_data = self.publish.call_args_list[index][0]
self.assertEqual(name, event['name'])
self.assertEqual(published_data, event['data'])
def test_grade(self):
"""
Test grading after submitting solution in assessment mode
"""
self.place_item(0, TOP_ZONE_ID, Keys.RETURN) # Correctly placed item
self.place_item(1, BOTTOM_ZONE_ID, Keys.RETURN) # Incorrectly placed item
self.place_item(4, MIDDLE_ZONE_ID, Keys.RETURN) # Incorrectly placed decoy
self.click_submit()
events = self.publish.call_args_list
published_grade = next((event[0][2] for event in events if event[0][1] == 'grade'))
expected_grade = {'max_value': 1, 'value': (1.0 / 5.0)}
self.assertEqual(published_grade, expected_grade)
@ddt
class ItemDroppedEventTest(DefaultDataTestMixin, BaseEventsTests):
"""
Test that the item.dropped event behaves properly.
"""
items_map = {
0: ItemDefinition(0, "Has name", "", 'zone-1', "Zone 1", "Yes", "No"),
1: ItemDefinition(1, "", "https://placehold.it/100x100", 'zone-2', "Zone 2", "Yes", "No"),
}
scenarios = (
(
['zone-1', 'zone-2'],
[
{
'is_correct': True,
'item': "Has name",
'item_id': 0,
'location': 'Zone 1',
'location_id': 'zone-1'
},
{
'is_correct': True,
'item': "https://placehold.it/100x100",
'item_id': 1,
'location': 'Zone 2',
'location_id': 'zone-2'
}
],
),
(
['zone-2', 'zone-1'],
[
{
'is_correct': False,
'item': "Has name",
'item_id': 0,
'location': 'Zone 2',
'location_id': 'zone-2'
},
{
'is_correct': False,
'item': "https://placehold.it/100x100",
'item_id': 1,
'location': 'Zone 1',
'location_id': 'zone-1'
}
],
),
)
def _get_scenario_xml(self):
return self._get_custom_scenario_xml("data/test_item_dropped.json")
@data(*scenarios) # pylint: disable=star-args
@unpack
def test_item_dropped_event(self, placement, expected_events):
for i, zone in enumerate(placement):
self.place_item(i, zone, Keys.RETURN)
events = self.publish.call_args_list
event_name = 'edx.drag_and_drop_v2.item.dropped'
published_events = [event[0][2] for event in events if event[0][1] == event_name]
self.assertEqual(published_events, expected_events)
| agpl-3.0 | 2,887,298,535,916,663,000 | 32.764192 | 118 | 0.516425 | false |
BreizhGeek/wavedrompy | wavedrom/wavedrom.py | 1 | 52544 | #!/usr/bin/python
# The MIT License (MIT)
#
# Copyright (c) 2011-2016 Aliaksei Chapyzhenka
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
# Translated to Python from original file:
# https://github.com/drom/wavedrom/blob/master/src/WaveDrom.js
#
import sys
import json
import math
from . import waveskin, css
import argparse
from attrdict import AttrDict
import svgwrite
class WaveDrom(object):
container = AttrDict({
"defs": svgwrite.container.Defs,
"g": svgwrite.container.Group,
"marker": svgwrite.container.Marker,
"use": svgwrite.container.Use,
})
element = AttrDict({
"rect": svgwrite.shapes.Rect,
"path": svgwrite.path.Path,
"text": svgwrite.text.Text,
"tspan": svgwrite.text.TSpan,
})
def __init__(self):
self.font_width = 7
self.lane = AttrDict({
"xs": 20, # tmpgraphlane0.width
"ys": 20, # tmpgraphlane0.height
"xg": 120, # tmpgraphlane0.x
"yg": 0, # head gap
"yh0": 0, # head gap title
"yh1": 0, # head gap
"yf0": 0, # foot gap
"yf1": 0, # foot gap
"y0": 5, # tmpgraphlane0.y
"yo": 30, # tmpgraphlane1.y - y0
"tgo": -10, # tmptextlane0.x - xg
"ym": 15, # tmptextlane0.y - y0
"xlabel": 6, # tmptextlabel.x - xg
"xmax": 1,
"scale": 1,
"head": {},
"foot": {}
})
def genBrick(self, texts="", extra="", times=""):
R = []
if len(texts) == 4:
for j in range(times):
R.append(texts[0])
for i in range(extra):
R.append(texts[1])
R.append(texts[2])
for i in range(extra):
R.append(texts[3])
return R
if len(texts) == 1:
texts.append(texts[0])
R.append(texts[0])
for i in range(times * (2 * (extra + 1)) - 1):
R.append(texts[1])
return R
def genFirstWaveBrick(self, text="", extra="", times=""):
pattern = {
"p": ["pclk", "111", "nclk", "000"],
"n": ["nclk", "000", "pclk", "111"],
"P": ["Pclk", "111", "nclk", "000"],
"N": ["Nclk", "000", "pclk", "111"],
"l": ["000"],
"L": ["000"],
"0": ["000"],
"h": ["111"],
"H": ["111"],
"1": ["111"],
"=": ["vvv-2"],
"2": ["vvv-2"],
"3": ["vvv-3"],
"4": ["vvv-4"],
"5": ["vvv-5"],
"d": ["ddd"],
"u": ["uuu"],
"z": ["zzz"]
}
return self.genBrick(pattern.get(text, ["xxx"]), extra, times)
def genWaveBrick(self, text="", extra="", times=""):
x1 = {"p": "pclk", "n": "nclk",
"P": "Pclk", "N": "Nclk",
"h": "pclk", "l": "nclk",
"H": "Pclk", "L": "Nclk"}
x2 = {"0": "0", "1": "1", "x": "x", "d": "d", "u": "u", "z": "z",
"=": "v", "2": "v", "3": "v", "4": "v", "5": "v"}
x3 = {"0": "", "1": "", "x": "", "d": "", "u": "", "z": "",
"=": "-2", "2": "-2", "3": "-3", "4": "-4", "5": "-5"}
y1 = {
"p": "0", "n": "1",
"P": "0", "N": "1",
"h": "1", "l": "0",
"H": "1", "L": "0",
"0": "0", "1": "1",
"x": "x", "d": "d", "u": "u", "z": "z",
"=": "v", "2": "v", "3": "v", "4": "v", "5": "v"}
y2 = {
"p": "", "n": "",
"P": "", "N": "",
"h": "", "l": "",
"H": "", "L": "",
"0": "", "1": "",
"x": "", "d": "", "u": "", "z": "",
"=": "-2", "2": "-2", "3": "-3", "4": "-4", "5": "-5"}
x4 = {
"p": "111", "n": "000",
"P": "111", "N": "000",
"h": "111", "l": "000",
"H": "111", "L": "000",
"0": "000", "1": "111", "x": "xxx", "d": "ddd", "u": "uuu", "z": "zzz",
"=": "vvv-2", "2": "vvv-2", "3": "vvv-3", "4": "vvv-4", "5": "vvv-5"}
x5 = {"p": "nclk", "n": "pclk", "P": "nclk", "N": "pclk"}
x6 = {"p": "000", "n": "111", "P": "000", "N": "111"}
xclude = {"hp": "111", "Hp": "111", "ln": "000", "Ln": "000",
"nh": "111", "Nh": "111", "pl": "000", "Pl": "000"}
# atext = text.split()
atext = text
tmp0 = x4.get(atext[1])
tmp1 = x1.get(atext[1])
if tmp1 is None:
tmp2 = x2.get(atext[1])
if tmp2 is None:
# unknown
return self.genBrick(["xxx"], extra, times)
else:
tmp3 = y1.get(atext[0])
if tmp3 is None:
# unknown
return self.genBrick(["xxx"], extra, times)
# soft curves
return self.genBrick([tmp3 + "m" + tmp2 + y2[atext[0]] + x3[atext[1]], tmp0], extra, times)
else:
tmp4 = xclude.get(text)
if tmp4 is not None:
tmp1 = tmp4
# sharp curves
tmp2 = x5.get(atext[1])
if tmp2 is None:
# hlHL
return self.genBrick([tmp1, tmp0], extra, times)
else:
# pnPN
return self.genBrick([tmp1, tmp0, tmp2, x6[atext[1]]], extra, times)
def parseWaveLane(self, text="", extra=""):
R = []
Stack = text
Next = Stack[0]
Stack = Stack[1:]
Repeats = 1
while len(Stack) and (Stack[0] in [".", "|"]): # repeaters parser
Stack = Stack[1:]
Repeats += 1
R.extend(self.genFirstWaveBrick(Next, extra, Repeats))
while len(Stack):
Top = Next
Next = Stack[0]
Stack = Stack[1:]
Repeats = 1
while len(Stack) and (Stack[0] in [".", "|"]): # repeaters parser
Stack = Stack[1:]
Repeats += 1
R.extend(self.genWaveBrick((Top + Next), extra, Repeats))
for i in range(self.lane.phase):
R = R[1:]
return R
def parseWaveLanes(self, sig=""):
def data_extract(e):
tmp = e.get("data")
if tmp is not None:
tmp = tmp.split() if self.is_type_str(tmp) else tmp
return tmp
content = []
for sigx in sig:
self.lane.period = sigx.get("period", 1)
self.lane.phase = int(sigx.get("phase", 0) * 2)
sub_content = []
sub_content.append([sigx.get("name", " "), sigx.get("phase", 0)])
if sigx.get("wave"):
sub_content.append(self.parseWaveLane(sigx["wave"], int(self.lane.period * self.lane.hscale - 1)))
else:
sub_content.append(None)
sub_content.append(data_extract(sigx))
content.append(sub_content)
return content
def findLaneMarkers(self, lanetext=""):
lcount = 0
gcount = 0
ret = []
for idx, val in enumerate(lanetext):
if val in ["vvv-2", "vvv-3", "vvv-4", "vvv-5"]:
lcount += 1
else:
if lcount != 0:
ret.append(gcount - ((lcount + 1) / 2))
lcount = 0
gcount += 1
if lcount != 0:
ret.append(gcount - ((lcount + 1) / 2))
return ret
def renderWaveLane(self, root=[], content="", index=0):
"""
root=[]
"""
xmax = 0
xgmax = 0
glengths = []
svgns = "http://www.w3.org/2000/svg"
xlinkns = "http://www.w3.org/1999/xlink"
xmlns = "http://www.w3.org/XML/1998/namespace"
for j, val in enumerate(content):
name = val[0][0]
if name: # check name
dy = self.lane.y0 + j * self.lane.yo
g = self.container.g(id="wavelane_{j}_{index}".format(j=j, index=index))
g.translate(0, dy)
# g = [
# "g",
# {
# # "id": "_".join(["wavelane", str(j), str(index)]),
# # "transform": "".join(["translate(0,", str(self.lane.y0 + j * self.lane.yo), ")"])
# "id": "wavelane_{j}_{index}".format(j=j, index=index),
# "transform": "translate(0,{dy})".format(dy=dy),
# }
# ]
# root.append(g)
title = self.element.text("", x=[self.lane.tgo], y=[self.lane.ym], text_anchor="end")
title.add(self.element.tspan(name))
title["xml:space"] = "preserve"
title["class"] = "info"
# title = [
# "text",
# {
# "x": self.lane.tgo,
# "y": self.lane.ym,
# "class": "info",
# "text-anchor": "end",
# "xml:space": "preserve"
# },
# ["tspan", name]
# ]
# g.append(title)
g.add(title)
glengths.append(len(name) * self.font_width + self.font_width)
xoffset = val[0][1]
xoffset = math.ceil(2 * xoffset) - 2 * xoffset if xoffset > 0 else -2 * xoffset
gg = self.container.g(id="wavelane_draw_{j}_{index}".format(j=j, index=index))
gg.translate(xoffset * self.lane.xs, 0)
# gg = [
# "g",
# {
# "id": "wavelane_draw_{j}_{index}".format(j=j, index=index),
# "transform": "translate({x},0)".format(x=xoffset * self.lane.xs)
# }
# ]
# g.append(gg)
if val[1]:
for i in range(len(val[1])):
b = self.container.use(href="#{}".format(val[1][i]))
b.translate(i * self.lane.xs)
# b = [
# "use",
# {
# # "id": "use_" + str(i) + "_" + str(j) + "_" + str(index),
# "xmlns:xlink": xlinkns,
# "xlink:href": "#{}".format(val[1][i]),
# "transform": "translate({})".format(i * self.lane.xs)
# }
# ]
# gg.append(b)
gg.add(b)
if val[2] and len(val[2]):
labels = self.findLaneMarkers(val[1])
if len(labels) != 0:
for k in range(len(labels)):
if val[2] and k < len(val[2]):
tx = int(labels[k]) * self.lane.xs + self.lane.xlabel
title = self.element.text("", x=[tx], y=[self.lane.ym], text_anchor="middle")
title.add(self.element.tspan(val[2][k]))
title["xml:space"] = "preserve"
# title = [
# "text",
# {
# "x": int(labels[k]) * self.lane.xs + self.lane.xlabel,
# "y": self.lane.ym,
# "text-anchor": "middle",
# "xml:space": "preserve"
# },
# ["tspan", val[2][k]]
# ]
# gg.append(title)
gg.add(title)
if len(val[1]) > xmax:
xmax = len(val[1])
g.add(gg)
root.add(g)
self.lane.xmax = xmax
self.lane.xg = xgmax + 20
return glengths
def renderMarks(self, root=[], content="", index=0):
def get_elem(e):
if len(e) == 3:
ret = self.element[e[0]](e[2])
ret.attribs = e[1]
elif len(e) == 2:
ret = self.element[e[0]](e[1])
else:
ret = self.element.tspan(e)
return ret
def captext(g, cxt, anchor, y):
if cxt.get(anchor) and cxt[anchor].get("text"):
tmark = self.element.text("", x=[float(cxt.xmax) * float(cxt.xs) / 2],
y=[y], text_anchor="middle", fill="#000")
tmark["xml:space"] = "preserve"
[tmark.add(get_elem(e)) for e in cxt[anchor]["text"][1:]]
# print(cxt[anchor]["text"]) # list
# cxt[anchor]["text"] = ["tspan", # [0
# ["tspan", {"class": "error h1"}, "error"], # [1] case 1
# ["tspan", "E=mc"], # [2] case 2
# "default", # [3] case 3
# ]
# tmark = [
# "text",
# {
# "x": float(cxt.xmax) * float(cxt.xs) / 2,
# "y": y,
# "text-anchor": "middle",
# "fill": "#000",
# "xml:space": "preserve"
# }, cxt[anchor]["text"]
# ]
# g.append(tmark)
g.add(tmark)
def ticktock(g, cxt, ref1, ref2, x, dx, y, length):
L = []
if cxt.get(ref1) is None or cxt[ref1].get(ref2) is None:
return
val = cxt[ref1][ref2]
if self.is_type_str(val):
val = val.split()
elif type(val) is int:
offset = val
val = []
for i in range(length):
val.append(i + offset)
if type(val) is list:
if len(val) == 0:
return
elif len(val) == 1:
offset = val[0]
if self.is_type_str(offset):
L = val
else:
for i in range(length):
L[i] = i + offset
elif len(val) == 2:
offset = int(val[0])
step = int(val[1])
tmp = val[1].split(".")
if len(tmp) == 2:
dp = len(tmp[1])
if self.is_type_str(offset) or self.is_type_str(step):
L = val
else:
offset = step * offset
for i in range(length):
L[i] = "{0:.", dp, "f}".format(step * i + offset)
else:
L = val
else:
return
for i in range(length):
tmp = L[i]
tmark = self.element.text("", x=[i * dx + x], y=[y], text_anchor="middle")
tmark["class"] = "muted"
tmark["xml:space"] = "preserve"
# tmark = [
# "text",
# {
# "x": i * dx + x,
# "y": y,
# "text-anchor": "middle",
# "class": "muted",
# "xml:space": "preserve"
# }, str(tmp)
# ]
# g.append(tmark)
g.add(tmark)
mstep = 2 * int(self.lane.hscale)
mmstep = mstep * self.lane.xs
marks = int(self.lane.xmax / mstep)
gy = len(content) * int(self.lane.yo)
# g = ["g", {"id": "gmarks_{}".format(index)}]
g = self.container.g(id="gmarks_{}".format(index))
root.add(g)
# root.insert(0, g)
for i in range(marks + 1):
gg = self.element.path(id="gmark_{i}_{index}".format(i=i, index=index),
d="m {dx},0 0,{gy}".format(dx=i * mmstep, gy=gy),
style="stroke:#888;stroke-width:0.5;stroke-dasharray:1,3")
# gg = [
# "path",
# {
# "id": "gmark_{i}_{index}".format(i=i, index=index),
# "d": "m {dx},0 0,{gy}".format(dx=i * mmstep, gy=gy),
# "style": "stroke:#888;stroke-width:0.5;stroke-dasharray:1,3"
# }
# ]
# g.append(gg)
g.add(gg)
captext(g, self.lane, "head", -33 if self.lane.yh0 else -13)
captext(g, self.lane, "foot", gy + (45 if self.lane.yf0 else 25))
ticktock(g, self.lane, "head", "tick", 0, mmstep, -5, marks + 1)
ticktock(g, self.lane, "head", "tock", mmstep / 2, mmstep, -5, marks)
ticktock(g, self.lane, "foot", "tick", 0, mmstep, gy + 15, marks + 1)
ticktock(g, self.lane, "foot", "tock", mmstep / 2, mmstep, gy + 15, marks)
return g
def renderArcs(self, root, source, index, top):
Stack = []
Edge = AttrDict({"words": [], "frm": 0, "shape": "", "to": 0, "label": ""})
Events = AttrDict({})
svgns = "http://www.w3.org/2000/svg"
xmlns = "http://www.w3.org/XML/1998/namespace"
if source:
for idx, val in enumerate(source):
self.lane.period = val.get("period", 1)
self.lane.phase = int(val.get("phase", 0) * 2)
text = val.get("node")
if text:
Stack = text
pos = 0
while len(Stack):
eventname = Stack[0]
Stack = Stack[1:]
x = int(float(self.lane.xs) * (2 * pos * self.lane.period *
self.lane.hscale - self.lane.phase) + float(self.lane.xlabel))
y = int(idx * self.lane.yo + self.lane.y0 + float(self.lane.ys) * 0.5)
if eventname != ".":
Events[eventname] = AttrDict({"x": str(x), "y": str(y)})
pos += 1
# gg = ["g", {"id": "wavearcs_{index}".format(index=index)}]
gg = self.container.g(id="wavearcs_{index}".format(index=index))
# root.append(gg)
const_style = AttrDict({
"a": "marker-end:url(#arrowhead);stroke:#0041c4;stroke-width:1;fill:none",
"b": "marker-end:url(#arrowhead);marker-start:url(#arrowtail);stroke:#0041c4;stroke-width:1;fill:none"
})
if top.get("edge"):
for i, val in enumerate(top["edge"]):
Edge.words = val.split()
Edge.label = val[len(Edge.words[0]):]
Edge.label = Edge.label[1:]
Edge.frm = Edge.words[0][0]
Edge.to = Edge.words[0][-1]
Edge.shape = Edge.words[0][1:-1]
frm = AttrDict(Events[Edge.frm])
to = AttrDict(Events[Edge.to])
# gmark = [
# "path",
# {
# "id": "gmark_{frm}_{to}".format(frm=Edge.frm, to=Edge.to),
# "d": "M {fx},{fy} {tx},{ty}".format(fx=frm.x, fy=frm.y, tx=to.x, ty=to.y),
# "style": "fill:none;stroke:#00F;stroke-width:1"
# }
# ]
# gg.append(gmark)
dx = float(to.x) - float(frm.x)
dy = float(to.y) - float(frm.y)
lx = (float(frm.x) + float(to.x)) / 2
ly = (float(frm.y) + float(to.y)) / 2
pattern = {
"~": {"d": "M {fx},{fy} c {dx},{dy} {dxx},{dyy} {dxxx},{dyyy}".format(fx=frm.x, fy=frm.y,
dx=(0.7 * dx), dy=0,
dxx=(0.3 * dx), dyy=dy,
dxxx=dx, dyyy=dy)},
"-~": {"d": "M {fx},{fy} c {dx},{dy} {dxx},{dyy} {dxxx},{dyyy}".format(fx=frm.x, fy=frm.y,
dx=(0.7 * dx), dy=0,
dxx=dx, dyy=dy,
dxxx=dx, dyyy=dy)},
"~-": {"d": "M {fx},{fy} c {dx},{dy} {dxx},{dyy} {dxxx},{dyyy}".format(fx=frm.x, fy=frm.y,
dx=0, dy=0,
dxx=(0.3 * dx), dyy=dy,
dxxx=dx, dyyy=dy)},
"-|": {"d": "m {fx},{fy} {dx},{dy} {dxx},{dyy}".format(fx=frm.x, fy=frm.y,
dx=dx, dy=0,
dxx=0, dyy=dy)},
"|-": {"d": "m {fx},{fy} {dx},{dy} {dxx},{dyy}".format(fx=frm.x, fy=frm.y,
dx=0, dy=dy,
dxx=dx, dyy=0)},
"-|-": {"d": "m {fx},{fy} {dx},{dy} {dxx},{dyy} {dxxx},{dyyy}".format(fx=frm.x, fy=frm.y,
dx=(dx / 2), dy=0,
dxx=0, dyy=dy,
dxxx=(dx / 2), dyyy=0)},
"->": {"style": const_style.a},
"~>": {"style": const_style.a,
"d": "M {fx},{fy} c {dx},{dy} {dxx},{dyy} {dxxx},{dyyy}".format(fx=frm.x, fy=frm.y,
dx=(0.7 * dx), dy=0,
dxx=(0.3 * dx), dyy=dy,
dxxx=dx, dyyy=dy)},
"-~>": {"style": const_style.a,
"d": "M {fx},{fy} c {dx},{dy} {dxx},{dyy} {dxxx},{dyyy}".format(fx=frm.x, fy=frm.y,
dx=(0.7 * dx), dy=0,
dxx=dx, dyy=dy,
dxxx=dx, dyyy=dy)},
"~->": {"style": const_style.a,
"d": "M {fx},{fy} c {dx},{dy} {dxx},{dyy} {dxxx},{dyyy}".format(fx=frm.x, fy=frm.y,
dx=0, dy=0,
dxx=(0.3 * dx), dyy=dy,
dxxx=dx, dyyy=dy)},
"-|>": {"style": const_style.a,
"d": "m {fx},{fy} {dx},{dy} {dxx},{dyy}".format(fx=frm.x, fy=frm.y,
dx=dx, dy=0,
dxx=0, dyy=dy)},
"|->": {"style": const_style.a,
"d": "m {fx},{fy} {dx},{dy} {dxx},{dyy}".format(fx=frm.x, fy=frm.y,
dx=0, dy=dy,
dxx=dx, dyy=0
)},
"-|->": {"style": const_style.a,
"d": "m {fx},{fy} {dx},{dy} {dxx},{dyy} {dxxx},{dyyy}".format(fx=frm.x, fy=frm.y,
dx=(dx / 2), dy=0,
dxx=0, dyy=dy,
dxxx=(dx / 2), dyyy=0
)},
"<->": {"style": const_style.b},
"<~>": {"style": const_style.b,
"d": "M {fx},{fy} c {dx},{dy} {dxx},{dyy} {dxxx},{dyyy}".format(fx=frm.x, fy=frm.y,
dx=(0.7 * dx), dy=0,
dxx=(0.3 * dx), dyy=dy,
dxxx=dx, dyyy=dy
)},
"<-~>": {"style": const_style.b,
"d": "M {fx},{fy} c {dx},{dy} {dxx},{dyy} {dxxx},{dyyy}".format(fx=frm.x, fy=frm.y,
dx=(0.7 * dx), dy=0,
dxx=dx, dyy=dy,
dxxx=dx, dyyy=dy
)},
"<-|>": {"style": const_style.b,
"d": "m {fx},{fy} {dx},{dy} {dxx},{dyy}".format(fx=frm.x, fy=frm.y,
dx=dx, dy=0,
dxx=0, dyy=dy
)},
"<-|->": {"style": const_style.b,
"d": "m {fx},{fy} {dx},{dy} {dxx},{dyy} {dxxx},{dyyy}".format(fx=frm.x, fy=frm.y,
dx=(dx / 2), dy=0,
dxx=0, dyy=dy,
dxxx=(dx / 2), dyyy=0,
)}
}
pat = pattern.get(Edge.shape, {"style": "fill:none;stroke:#00F;stroke-width:1",
"d": "M {fx},{fy} {tx},{ty}".format(fx=frm.x, fy=frm.y,
tx=to.x, ty=to.y)
})
# gmark[1].update(pat)
gmark = self.element.path(id="gmark_{frm}_{to}".format(frm=Edge.frm, to=Edge.to),
d=pat.get("d", "M {fx},{fy} {tx},{ty}".format(fx=frm.x, fy=frm.y,
tx=to.x, ty=to.y)),
style=pat.get("style", "fill:none;stroke:#00F;stroke-width:1"))
gg.add(gmark)
if Edge.label:
if Edge.shape in ["-~", "-~>", "<-~>"]:
lx = float(frm.x) + (float(to.x) - float(frm.x)) * 0.75
elif Edge.shape in ["~-", "~->"]:
lx = float(frm.x) + (float(to.x) - float(frm.x)) * 0.25
elif Edge.shape in ["-|", "-|>", "<-|>"]:
lx = float(to.x)
elif Edge.shape in ["|-", "|->"]:
lx = float(frm.x)
# if Edge.shape == "-~":
# lx = float(frm.x) + (float(to.x) - float(frm.x)) * 0.75
# if Edge.shape == "-~>":
# lx = float(frm.x) + (float(to.x) - float(frm.x)) * 0.75
# if Edge.shape == "<-~>":
# lx = float(frm.x) + (float(to.x) - float(frm.x)) * 0.75
# if Edge.shape == "~-":
# lx = float(frm.x) + (float(to.x) - float(frm.x)) * 0.25
# if Edge.shape == "~->":
# lx = float(frm.x) + (float(to.x) - float(frm.x)) * 0.25
# if Edge.shape == "-|":
# lx = float(to.x)
# if Edge.shape == "-|>":
# lx = float(to.x)
# if Edge.shape == "<-|>":
# lx = float(to.x)
# if Edge.shape == "|-":
# lx = float(frm.x)
# if Edge.shape == "|->":
# lx = float(frm.x)
lwidth = len(Edge.label) * self.font_width
label = self.element.text("", style="font-size:10px;", text_anchor="middle",
x=[int(lx)], y=[int(ly + 3)])
label.add(self.element.tspan(Edge.label))
# label = [
# "text",
# {
# "style": "font-size:10px;",
# "text-anchor": "middle",
# "xml:space": "preserve",
# "x": int(lx),
# "y": int(ly + 3)
# },
# ["tspan", Edge.label]
# ]
underlabel = self.element.rect(insert=(int(lx - lwidth / 2), int(ly - 5)),
size=(lwidth, 9), style="fill:#FFF;")
# underlabel = [
# "rect",
# {
# "height": 9,
# "style": "fill:#FFF;",
# "width": lwidth,
# "x": int(lx - lwidth / 2),
# "y": int(ly - 5)
# }
# ]
# gg.append(underlabel)
# gg.append(label)
gg.add(underlabel)
gg.add(label)
for k in Events:
if k.islower():
if int(Events[k].x) > 0:
lwidth = len(k) * self.font_width
lx = float(Events[k].x) - float(lwidth) / 2
ly = int(Events[k].y) - 4
underlabel = self.element.rect(insert=(lx, ly),
size=(lwidth, 8), style="fill:#FFF;")
# underlabel = [
# "rect",
# {
# "x": lx,
# "y": ly,
# "height": 8,
# "width": lwidth,
# "style": "fill:#FFF;"
# }
# ]
# gg.append(underlabel)
gg.add(underlabel)
lx = int(Events[k].x)
ly = int(Events[k].y) + 2
label = self.element.text(k, style="font-size:8px;", text_anchor="middle",
x=[lx], y=[ly])
# label = [
# "text",
# {
# "style": "font-size:8px;",
# "x": lx,
# "y": ly,
# # "width": lwidth,
# "text-anchor": "middle"
# },
# k
# ]
# gg.append(label)
gg.add(label)
root.add(gg)
# return root
def parseConfig(self, source={}):
"""
source = AttrDict()
"""
self.lane.hscale = 1
if self.lane.get("hscale0"):
self.lane.hscale = self.lane.hscale0
if source and source.get("config") and source.get("config").get("hscale"):
hscale = round(source.get("config").get("hscale"))
if hscale > 0:
if hscale > 100:
hscale = 100
self.lane.hscale = hscale
self.lane.yh0 = 0
self.lane.yh1 = 0
if source and source.get("head"):
self.lane.head = source["head"]
if source.get("head").get("tick", 0) == 0:
self.lane.yh0 = 20
if source.get("head").get("tock", 0) == 0:
self.lane.yh0 = 20
if source.get("head").get("text"):
self.lane.yh1 = 46
self.lane.head["text"] = source["head"]["text"]
self.lane.yf0 = 0
self.lane.yf1 = 0
if source and source.get("foot"):
self.lane.foot = source["foot"]
if source.get("foot").get("tick", 0) == 0:
self.lane.yf0 = 20
if source.get("foot").get("tock", 0) == 0:
self.lane.yf0 = 20
if source.get("foot").get("text"):
self.lane.yf1 = 46
self.lane.foot["text"] = source["foot"]["text"]
def rec(self, tmp=[], state={}):
"""
tmp = source["signal"] = []
state = AttrDict({"x": 0, "y": 0, "xmax": 0, "width": [], "lanes": [], "groups": []})
[
{ "name": "clk", "wave": "p..Pp..P"},
["Master",
["ctrl",
{"name": "write", "wave": "01.0...."},
{"name": "read", "wave": "0...1..0"}
],
{ "name": "addr", "wave": "x3.x4..x", "data": "A1 A2"},
{ "name": "wdata", "wave": "x3.x....", "data": "D1" }
],
{},
["Slave",
["ctrl",
{"name": "ack", "wave": "x01x0.1x"}
],
{ "name": "rdata", "wave": "x.....4x", "data": "Q2"}
]
]
"""
name = str(tmp[0])
delta_x = 25
state.x += delta_x
for idx, val in enumerate(tmp):
if type(val) is list:
old_y = state.y
self.rec(val, state)
state["groups"].append({"x": state.xx,
"y": old_y,
"height": state.y - old_y,
"name": state.name})
elif type(val) is dict:
state["lanes"].append(val)
state["width"].append(state.x)
state.y += 1
state.xx = state.x
state.x -= delta_x
state.name = name
def anotherTemplate(self, index, source):
def get_container(elem):
"""
elem = [
"g", # elem[0]
{"id": "socket"}, # elem[1]
[ # elem[2]
"rect", # elem[2][0]
{"y": "15", "x": "6", "height": "20", "width": "20"} # elem[2][1]
]
]
["g", {"id": "0mx"}, # elem[0:1]
["path", {"d": "M3,20 9,0 20,0", "class": "s1"}], # elem[2]
["path", {"d": "m20,15 -5,5", "class": "s2"}], # elem[3]
["path", {"d": "M20,10 10,20", "class": "s2"}],
["path", {"d": "M20,5 5,20", "class": "s2"}],
["path", {"d": "M20,0 4,16", "class": "s2"}],
["path", {"d": "M15,0 6,9", "class": "s2"}],
["path", {"d": "M10,0 9,1", "class": "s2"}],
["path", {"d": "m0,20 20,0", "class": "s1"}] # elem[9]
],
"""
ctype = elem[0]
ret = self.container[ctype]()
ret.attribs = elem[1]
def gen_elem(e):
if e[0] == "path":
attr = e[1]
elem = self.element.path(d=attr["d"])
elem.attribs = attr
elif e[0] == "rect":
attr = e[1]
x = attr["x"]
y = attr["y"]
w = attr["width"]
h = attr["height"]
elem = self.element.rect(insert=(x, y), size=(w, h))
elem.attribs = attr
return elem
[ret.add(gen_elem(e)) for e in elem[2:]]
return ret
# default.add_stylesheet("css/default.css", title="default")
skin = waveskin.WaveSkin["default"]
if source.get("config") and source.get("config").get("skin"):
if waveskin.WaveSkin.get(source.get("config").get("skin")):
skin = waveskin.WaveSkin[source.get("config").get("skin")]
template = svgwrite.Drawing(id="svgcontent_{index}".format(index=index))
if index == 0:
template.defs.add(template.style(css.css.default))
[template.defs.add(get_container(e)) for e in skin[3][1:]]
self.lane.xs = int(skin[3][1][2][1]["width"])
self.lane.ys = int(skin[3][1][2][1]["height"])
self.lane.xlabel = int(skin[3][1][2][1]["x"])
self.lane.ym = int(skin[3][1][2][1]["y"])
return template
def insertSVGTemplate(self, index=0, parent=[], source={}):
"""
index = 0
parent = []
source = {}
"""
e = waveskin.WaveSkin["default"]
if source.get("config") and source.get("config").get("skin"):
if waveskin.WaveSkin.get(source.get("config").get("skin")):
e = waveskin.WaveSkin[source.get("config").get("skin")]
if index == 0:
# get unit size
# ["rect", {"y": "15", "x": "6", "height": "20", "width": "20"}]
self.lane.xs = int(e[3][1][2][1]["width"])
self.lane.ys = int(e[3][1][2][1]["height"])
self.lane.xlabel = int(e[3][1][2][1]["x"])
self.lane.ym = int(e[3][1][2][1]["y"])
else:
e = ["svg",
{"id": "svg",
"xmlns": "http://www.w3.org/2000/svg",
"xmlns:xlink": "http://www.w3.org/1999/xlink",
"height": "0"},
[ # e[-1]
"g", # e[-1][0]
{"id": "waves"}, # e[-1][1]
[ # e[-1][2]
"g", # e[-1][2][0]
{"id": "lanes"} # e[-1][2][1]
],
[ # e[-1][3]
"g", # e[-1][3][0]
{"id": "groups"} # e[-1][3][1]
]
]
]
e[-1][1]["id"] = "waves_{index}".format(index=index)
e[-1][2][1]["id"] = "lanes_{index}".format(index=index)
e[-1][3][1]["id"] = "groups_{index}".format(index=index)
e[1]["id"] = "svgcontent_{index}".format(index=index)
e[1]["height"] = 0
parent.extend(e)
def renderWaveForm(self, index=0, source={}, output=[]):
"""
index = 0
source = {}
output = []
"""
xmax = 0
root = []
groups = []
if source.get("signal"):
# self.insertSVGTemplate(index, output, source)
template = self.anotherTemplate(index, source)
waves = template.g(id="waves_{index}".format(index=index))
lanes = template.g(id="lanes_{index}".format(index=index))
groups = template.g(id="groups_{index}".format(index=index))
self.parseConfig(source)
ret = AttrDict({"x": 0, "y": 0, "xmax": 0, "width": [], "lanes": [], "groups": []})
self.rec(source["signal"], ret) # parse lanes
content = self.parseWaveLanes(ret.lanes)
glengths = self.renderWaveLane(lanes, content, index)
for i, val in enumerate(glengths):
xmax = max(xmax, (val + ret.width[i]))
# self.renderMarks(root, content, index)
marks = self.renderMarks(lanes, content, index)
self.renderArcs(lanes, ret.lanes, index, source)
self.renderGaps(lanes, ret.lanes, index)
self.renderGroups(groups, ret.groups, index)
self.lane.xg = int(math.ceil(float(xmax - self.lane.tgo) / float(self.lane.xs))) * self.lane.xs
width = self.lane.xg + self.lane.xs * (self.lane.xmax + 1)
height = len(content) * self.lane.yo + self.lane.yh0 + self.lane.yh1 + self.lane.yf0 + self.lane.yf1
# output[1] = {
# "id": "svgcontent_{}".format(index),
# "xmlns": "http://www.w3.org/2000/svg",
# "xmlns:xlink": "http://www.w3.org/1999/xlink",
# "width": str(width),
# "height": str(height),
# "viewBox": "0 0 {w} {h}".format(w=width, h=height),
# "overflow": "hidden"
# }
template["width"] = width
template["height"] = height
template.viewbox(0, 0, width, height)
dx = self.lane.xg + 0.5
dy = float(self.lane.yh0) + float(self.lane.yh1)
lanes.translate(dx, dy)
# output[-1][2][1]["transform"] = "translate({dx},{dy})".format(dx=dx, dy=dy)
waves.add(lanes)
waves.add(groups)
template.add(waves)
return template
# output[-1][2].extend(root)
# output[-1][3].extend(groups)
def renderGroups(self, root=[], groups=[], index=0):
svgns = "http://www.w3.org/2000/svg",
xmlns = "http://www.w3.org/XML/1998/namespace"
for i, val in enumerate(groups):
dx = groups[i].x + 0.5
dy = groups[i].y * self.lane.yo + 3.5 + self.lane.yh0 + self.lane.yh1
h = int(groups[i]["height"] * self.lane.yo - 16)
group = self.element.path(id="group_{i}_{index}".format(i=i, index=index),
d="m {dx},{dy} c -3,0 -5,2 -5,5 l 0,{h}".format(dx=dx, dy=dy, h=h),
style="stroke:#0041c4;stroke-width:1;fill:none")
# group = [
# "path",
# {
# # "id": "group_" + str(i) + "_" + str(index),
# # "d": "m " + str(groups[i]["x"] + 0.5) + "," + str(groups[i]["y"] * self.lane.yo + 3.5 + self.lane.yh0 + self.lane.yh1) + " c -3,0 -5,2 -5,5 l 0," + str(int(groups[i]["height"] * self.lane.yo - 16)) + " c 0,3 2,5 5,5",
# "id": "group_{i}_{index}".format(i=i, index=index),
# "d": "m {dx},{dy} c -3,0 -5,2 -5,5 l 0,{h}".format(dx=dx, dy=dy, h=h),
# "style": "stroke:#0041c4;stroke-width:1;fill:none"
# }
# ]
# root.append(group)
root.add(group)
name = groups[i]["name"]
x = int(groups[i]["x"] - 10)
y = int(self.lane.yo * (groups[i]["y"] + (float(groups[i]["height"]) / 2)) +
self.lane.yh0 + self.lane.yh1)
label = self.container.g()
label.translate(x, y)
gg = self.container.g()
gg.rotate(270)
t = self.element.text("", text_anchor="middle")
t["class"] = "info"
t["xml:space"] = "preserve"
t.add(self.element.tspan(name))
gg.add(t)
label.add(gg)
# label = [
# ["g",
# # {"transform": "translate(" + x + "," + y + ")"},
# {"transform": "translate({x},{y})".format(x=x, y=y)},
# ["g", {"transform": "rotate(270)"},
# "text",
# {
# "text-anchor": "middle",
# "class": "info",
# "xml:space": "preserve"
# },
# ["tspan", name]
# ]
# ]
# ]
# root.append(label)
root.add(label)
def renderGaps(self, root, source, index):
Stack = []
svgns = "http://www.w3.org/2000/svg",
xlinkns = "http://www.w3.org/1999/xlink"
if source:
gg = self.container.g(id="wavegaps_{index}".format(index=index))
# gg = [
# "g",
# {"id": "wavegaps_{index}".format(index=index)}
# ]
for idx, val in enumerate(source):
self.lane.period = val.get("period", 1)
self.lane.phase = int(val.get("phase", 0) * 2)
dy = self.lane.y0 + idx * self.lane.yo
g = self.container.g(id="wavegap_{i}_{index}".format(i=idx, index=index))
g.translate(0, dy)
# g = [
# "g",
# {
# # "id": "wavegap_" + str(i) + "_" + str(index),
# # "transform": "translate(0," + str(self.lane.y0 + i * self.lane.yo) + ")"
# "id": "wavegap_{i}_{index}".format(i=idx, index=index),
# "transform": "translate(0,{dy})".format(dy=dy)
# }
# ]
# gg.append(g)
if val.get("wave"):
text = val["wave"]
Stack = text
pos = 0
while len(Stack):
c = Stack[0]
Stack = Stack[1:]
if c == "|":
dx = float(self.lane.xs) * ((2 * pos + 1) * float(self.lane.period)
* float(self.lane.hscale) - float(self.lane.phase))
b = self.container.use(href="#gap")
b.translate(dx)
# b = [
# "use",
# {
# "xmlns:xlink": xlinkns,
# "xlink:href": "#gap",
# # "transform": "translate(" + str(int(float(self.lane.xs) * ((2 * pos + 1) * float(self.lane.period) * float(self.lane.hscale) - float(self.lane.phase)))) + ")"
# "transform": "translate({dx})".format(dx=dx)
# }
# ]
# g.append(b)
g.add(b)
pos += 1
gg.add(g)
# root.append(gg)
root.add(gg)
# return root
def is_type_str(self, var):
if sys.version_info[0] < 3:
return type(var) in [str, unicode]
else:
return type(var) is str
def convert_to_svg(self, root):
svg_output = ""
if type(root) is list:
if len(root) >= 2 and type(root[1]) is dict:
if len(root) == 2:
svg_output += "<{}{}/>\n".format(root[0], self.convert_to_svg(root[1]))
elif len(root) >= 3:
svg_output += "<{}{}/>\n".format(root[0], self.convert_to_svg(root[1]))
if len(root) == 3:
svg_output += self.convert_to_svg(root[2])
else:
svg_output += self.convert_to_svg(root[2:])
svg_output += "</{}>\n".format(root[0])
elif type(root[0]) is list:
for eleml in root:
svg_output += self.convert_to_svg(eleml)
else:
svg_output += "<{}>\n".format(root[0])
for eleml in root[1:]:
svg_output += self.convert_to_svg(eleml)
svg_output += "</{}>\n".format(root[0])
elif type(root) is dict:
for elemd in root:
svg_output += " {}=\"{}\"".format(elemd, root[elemd])
else:
svg_output += root
return svg_output
def main(args=None):
if not args:
parser = argparse.ArgumentParser(description="")
parser.add_argument("--input", "-i", help="<input wavedrom source filename>")
# parser.add_argument("--png", "-p", help="<output PNG image file name>")
# parser.add_argument("--pdf", "-P", help="<output PDF file name>")
parser.add_argument("--svg", "-s", help="<output SVG image file name>")
args = parser.parse_args()
output = []
inputfile = args.input
outputfile = args.svg
wavedrom = WaveDrom()
if not inputfile or not outputfile:
parser.print_help()
else:
with open(inputfile, "r") as f:
jinput = json.load(f)
output = wavedrom.renderWaveForm(0, jinput)
# wavedrom.renderWaveForm(0, jinput, output)
# svg_output = wavedrom.convert_to_svg(output)
# with open(outputfile, "w") as f:
# f.write(svg_output)
output.saveas(outputfile)
if __name__ == "__main__":
main()
| mit | -8,199,215,439,648,676,000 | 42.823186 | 241 | 0.348774 | false |
openearth/delft3d-gt-server | delft3dworker/migrations/0082_auto_20161122_1530.py | 1 | 2104 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("delft3dworker", "0081_auto_20161122_1241"),
]
operations = [
migrations.AlterField(
model_name="scene",
name="phase",
field=models.PositiveSmallIntegerField(
default=0,
choices=[
(0, "New"),
(2, "Allocating preprocessing resources"),
(3, "Starting preprocessing"),
(4, "Running preprocessing"),
(5, "Finished preprocessing"),
(6, "Idle: waiting for user input"),
(10, "Allocating simulation resources"),
(11, "Starting simulation"),
(12, "Running simulation"),
(15, "Finishing simulation"),
(13, "Finished simulation"),
(14, "Stopping simulation"),
(20, "Allocating postprocessing resources"),
(21, "Starting postprocessing"),
(22, "Running postprocessing"),
(23, "Finished postprocessing"),
(30, "Allocating export resources"),
(31, "Starting export"),
(32, "Running export"),
(33, "Finished export"),
(17, "Starting container remove"),
(18, "Removing containers"),
(19, "Containers removed"),
(40, "Allocating synchronization resources"),
(41, "Started synchronization"),
(42, "Running synchronization"),
(43, "Finished synchronization"),
(50, "Finished"),
(1000, "Starting Abort"),
(1001, "Aborting"),
(1002, "Finished Abort"),
(1003, "Queued"),
],
),
),
]
| gpl-3.0 | -3,775,505,861,707,509,000 | 37.254545 | 65 | 0.443441 | false |
RedMap/Django-ImmOrbit | example/src/Demo/settings.py | 1 | 5562 | import os
gettext = lambda s: s
DATA_DIR = os.path.dirname(os.path.dirname(__file__))
"""
Django settings for Demo project.
Generated by 'django-admin startproject' using Django 1.8.3.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.8/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.8/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'y4c)%jasu93fhrxgrd2_ydyneqxnpvy$9)2+5pnk5fiz_rfx*m'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
ROOT_URLCONF = 'Demo.urls'
WSGI_APPLICATION = 'Demo.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.8/ref/settings/#databases
# Internationalization
# https://docs.djangoproject.com/en/1.8/topics/i18n/
LANGUAGE_CODE = 'de'
TIME_ZONE = 'Europe/Berlin'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.8/howto/static-files/
STATIC_URL = '/static/'
MEDIA_URL = '/media/'
MEDIA_ROOT = os.path.join(DATA_DIR, 'media')
STATIC_ROOT = os.path.join(DATA_DIR, 'static')
STATICFILES_DIRS = (
os.path.join(BASE_DIR, 'Demo', 'static'),
)
SITE_ID = 1
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'Demo', 'templates'),],
'OPTIONS': {
'context_processors': [
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
'django.core.context_processors.i18n',
'django.core.context_processors.debug',
'django.core.context_processors.request',
'django.core.context_processors.media',
'django.core.context_processors.csrf',
'django.core.context_processors.tz',
'sekizai.context_processors.sekizai',
'django.core.context_processors.static',
'cms.context_processors.cms_settings'
],
'loaders': [
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
'django.template.loaders.eggs.Loader'
],
},
},
]
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.locale.LocaleMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'cms.middleware.user.CurrentUserMiddleware',
'cms.middleware.page.CurrentPageMiddleware',
'cms.middleware.toolbar.ToolbarMiddleware',
'cms.middleware.language.LanguageCookieMiddleware'
)
INSTALLED_APPS = (
'djangocms_admin_style',
'djangocms_text_ckeditor',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.admin',
'django.contrib.sites',
'django.contrib.sitemaps',
'django.contrib.staticfiles',
'django.contrib.messages',
'cms',
'menus',
'sekizai',
'treebeard',
'djangocms_style',
'djangocms_column',
'djangocms_file',
'djangocms_flash',
'djangocms_googlemap',
'djangocms_inherit',
'djangocms_link',
'djangocms_picture',
'djangocms_teaser',
'djangocms_video',
'reversion',
'Demo',
'ImmOrbit',
'rest_framework',
'markdown',
'django_filters',
)
LANGUAGES = (
## Customize this
('de', gettext('de')),
('en', gettext('en')),
)
CMS_LANGUAGES = {
## Customize this
'default': {
'public': True,
'hide_untranslated': False,
'redirect_on_fallback': True,
},
1: [
{
'public': True,
'code': 'de',
'hide_untranslated': False,
'name': gettext('de'),
'redirect_on_fallback': True,
},
{
'public': True,
'code': 'en',
'hide_untranslated': False,
'name': gettext('en'),
'redirect_on_fallback': True,
},
],
}
CMS_TEMPLATES = (
## Customize this
('page.html', 'Page'),
('feature.html', 'Page with Feature')
)
CMS_PERMISSION = True
CMS_PLACEHOLDER_CONF = {}
DATABASES = {
'default':
{'ENGINE': 'django.db.backends.sqlite3', 'NAME': 'project.db', 'HOST': 'localhost', 'USER': '', 'PASSWORD': '', 'PORT': ''}
}
MIGRATION_MODULES = {
'djangocms_column': 'djangocms_column.migrations_django',
'djangocms_flash': 'djangocms_flash.migrations_django',
'djangocms_googlemap': 'djangocms_googlemap.migrations_django',
'djangocms_inherit': 'djangocms_inherit.migrations_django',
'djangocms_link': 'djangocms_link.migrations_django',
'djangocms_style': 'djangocms_style.migrations_django',
'djangocms_file': 'djangocms_file.migrations_django',
'djangocms_picture': 'djangocms_picture.migrations_django',
'djangocms_teaser': 'djangocms_teaser.migrations_django',
'djangocms_video': 'djangocms_video.migrations_django'
}
from ImmOrbit.settings import *
| apache-2.0 | -2,142,586,347,139,064,300 | 24.869767 | 131 | 0.65534 | false |
dharmabumstead/ansible | lib/ansible/modules/cloud/centurylink/clc_blueprint_package.py | 48 | 10113 | #!/usr/bin/python
#
# Copyright (c) 2015 CenturyLink
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
module: clc_blueprint_package
short_description: deploys a blue print package on a set of servers in CenturyLink Cloud.
description:
- An Ansible module to deploy blue print package on a set of servers in CenturyLink Cloud.
version_added: "2.0"
options:
server_ids:
description:
- A list of server Ids to deploy the blue print package.
required: True
package_id:
description:
- The package id of the blue print.
required: True
package_params:
description:
- The dictionary of arguments required to deploy the blue print.
default: {}
required: False
state:
description:
- Whether to install or un-install the package. Currently it supports only "present" for install action.
required: False
default: present
choices: ['present']
wait:
description:
- Whether to wait for the tasks to finish before returning.
type: bool
default: True
required: False
requirements:
- python = 2.7
- requests >= 2.5.0
- clc-sdk
author: "CLC Runner (@clc-runner)"
notes:
- To use this module, it is required to set the below environment variables which enables access to the
Centurylink Cloud
- CLC_V2_API_USERNAME, the account login id for the centurylink cloud
- CLC_V2_API_PASSWORD, the account password for the centurylink cloud
- Alternatively, the module accepts the API token and account alias. The API token can be generated using the
CLC account login and password via the HTTP api call @ https://api.ctl.io/v2/authentication/login
- CLC_V2_API_TOKEN, the API token generated from https://api.ctl.io/v2/authentication/login
- CLC_ACCT_ALIAS, the account alias associated with the centurylink cloud
- Users can set CLC_V2_API_URL to specify an endpoint for pointing to a different CLC environment.
'''
EXAMPLES = '''
# Note - You must set the CLC_V2_API_USERNAME And CLC_V2_API_PASSWD Environment variables before running these examples
- name: Deploy package
clc_blueprint_package:
server_ids:
- UC1TEST-SERVER1
- UC1TEST-SERVER2
package_id: 77abb844-579d-478d-3955-c69ab4a7ba1a
package_params: {}
'''
RETURN = '''
server_ids:
description: The list of server ids that are changed
returned: success
type: list
sample:
[
"UC1TEST-SERVER1",
"UC1TEST-SERVER2"
]
'''
__version__ = '${version}'
import os
from distutils.version import LooseVersion
try:
import requests
except ImportError:
REQUESTS_FOUND = False
else:
REQUESTS_FOUND = True
#
# Requires the clc-python-sdk.
# sudo pip install clc-sdk
#
try:
import clc as clc_sdk
from clc import CLCException
except ImportError:
CLC_FOUND = False
clc_sdk = None
else:
CLC_FOUND = True
from ansible.module_utils.basic import AnsibleModule
class ClcBlueprintPackage:
clc = clc_sdk
module = None
def __init__(self, module):
"""
Construct module
"""
self.module = module
if not CLC_FOUND:
self.module.fail_json(
msg='clc-python-sdk required for this module')
if not REQUESTS_FOUND:
self.module.fail_json(
msg='requests library is required for this module')
if requests.__version__ and LooseVersion(
requests.__version__) < LooseVersion('2.5.0'):
self.module.fail_json(
msg='requests library version should be >= 2.5.0')
self._set_user_agent(self.clc)
def process_request(self):
"""
Process the request - Main Code Path
:return: Returns with either an exit_json or fail_json
"""
p = self.module.params
changed = False
changed_server_ids = []
self._set_clc_credentials_from_env()
server_ids = p['server_ids']
package_id = p['package_id']
package_params = p['package_params']
state = p['state']
if state == 'present':
changed, changed_server_ids, request_list = self.ensure_package_installed(
server_ids, package_id, package_params)
self._wait_for_requests_to_complete(request_list)
self.module.exit_json(changed=changed, server_ids=changed_server_ids)
@staticmethod
def define_argument_spec():
"""
This function defines the dictionary object required for
package module
:return: the package dictionary object
"""
argument_spec = dict(
server_ids=dict(type='list', required=True),
package_id=dict(required=True),
package_params=dict(type='dict', default={}),
wait=dict(default=True),
state=dict(default='present', choices=['present'])
)
return argument_spec
def ensure_package_installed(self, server_ids, package_id, package_params):
"""
Ensure the package is installed in the given list of servers
:param server_ids: the server list where the package needs to be installed
:param package_id: the blueprint package id
:param package_params: the package arguments
:return: (changed, server_ids, request_list)
changed: A flag indicating if a change was made
server_ids: The list of servers modified
request_list: The list of request objects from clc-sdk
"""
changed = False
request_list = []
servers = self._get_servers_from_clc(
server_ids,
'Failed to get servers from CLC')
for server in servers:
if not self.module.check_mode:
request = self.clc_install_package(
server,
package_id,
package_params)
request_list.append(request)
changed = True
return changed, server_ids, request_list
def clc_install_package(self, server, package_id, package_params):
"""
Install the package to a given clc server
:param server: The server object where the package needs to be installed
:param package_id: The blue print package id
:param package_params: the required argument dict for the package installation
:return: The result object from the CLC API call
"""
result = None
try:
result = server.ExecutePackage(
package_id=package_id,
parameters=package_params)
except CLCException as ex:
self.module.fail_json(msg='Failed to install package : {0} to server {1}. {2}'.format(
package_id, server.id, ex.message
))
return result
def _wait_for_requests_to_complete(self, request_lst):
"""
Waits until the CLC requests are complete if the wait argument is True
:param request_lst: The list of CLC request objects
:return: none
"""
if not self.module.params['wait']:
return
for request in request_lst:
request.WaitUntilComplete()
for request_details in request.requests:
if request_details.Status() != 'succeeded':
self.module.fail_json(
msg='Unable to process package install request')
def _get_servers_from_clc(self, server_list, message):
"""
Internal function to fetch list of CLC server objects from a list of server ids
:param server_list: the list of server ids
:param message: the error message to raise if there is any error
:return the list of CLC server objects
"""
try:
return self.clc.v2.Servers(server_list).servers
except CLCException as ex:
self.module.fail_json(msg=message + ': %s' % ex)
def _set_clc_credentials_from_env(self):
"""
Set the CLC Credentials on the sdk by reading environment variables
:return: none
"""
env = os.environ
v2_api_token = env.get('CLC_V2_API_TOKEN', False)
v2_api_username = env.get('CLC_V2_API_USERNAME', False)
v2_api_passwd = env.get('CLC_V2_API_PASSWD', False)
clc_alias = env.get('CLC_ACCT_ALIAS', False)
api_url = env.get('CLC_V2_API_URL', False)
if api_url:
self.clc.defaults.ENDPOINT_URL_V2 = api_url
if v2_api_token and clc_alias:
self.clc._LOGIN_TOKEN_V2 = v2_api_token
self.clc._V2_ENABLED = True
self.clc.ALIAS = clc_alias
elif v2_api_username and v2_api_passwd:
self.clc.v2.SetCredentials(
api_username=v2_api_username,
api_passwd=v2_api_passwd)
else:
return self.module.fail_json(
msg="You must set the CLC_V2_API_USERNAME and CLC_V2_API_PASSWD "
"environment variables")
@staticmethod
def _set_user_agent(clc):
if hasattr(clc, 'SetRequestsSession'):
agent_string = "ClcAnsibleModule/" + __version__
ses = requests.Session()
ses.headers.update({"Api-Client": agent_string})
ses.headers['User-Agent'] += " " + agent_string
clc.SetRequestsSession(ses)
def main():
"""
Main function
:return: None
"""
module = AnsibleModule(
argument_spec=ClcBlueprintPackage.define_argument_spec(),
supports_check_mode=True
)
clc_blueprint_package = ClcBlueprintPackage(module)
clc_blueprint_package.process_request()
if __name__ == '__main__':
main()
| gpl-3.0 | -4,141,179,309,504,323,000 | 32.936242 | 119 | 0.60971 | false |
OniOniOn-/MCEdit-Unified | compass.py | 5 | 1733 | """
compass
"""
import logging
from OpenGL import GL
from drawable import Drawable
from glutils import gl
from mceutils import loadPNGTexture
from config import config
import os
log = logging.getLogger(__name__)
def makeQuad(minx, miny, width, height):
return [minx, miny, minx + width, miny, minx + width, miny + height, minx, miny + height]
class CompassOverlay(Drawable):
_tex = None
_yawPitch = (0., 0.)
x, y = 0, 0
def __init__(self):
super(CompassOverlay, self).__init__()
@property
def yawPitch(self):
return self._yawPitch
@yawPitch.setter
def yawPitch(self, value):
self._yawPitch = value
self.invalidate()
def drawSelf(self):
if self._tex is None:
filename = os.path.join("toolicons", "compass.png")
self._tex = loadPNGTexture(filename)
self._tex.bind()
size = 0.001 * config.settings.compassSize.get()
with gl.glPushMatrix(GL.GL_MODELVIEW):
GL.glLoadIdentity()
yaw, pitch = self.yawPitch
if config.settings.viewMode.get() == "Chunk":
yaw = -180
GL.glTranslatef(1. - (size + self.x), size + self.y, 0.0) # position on upper right corner
GL.glRotatef(180 - yaw, 0., 0., 1.) # adjust to north
GL.glColor3f(1., 1., 1.)
with gl.glEnableClientState(GL.GL_TEXTURE_COORD_ARRAY):
GL.glVertexPointer(2, GL.GL_FLOAT, 0, makeQuad(-size, -size, 2 * size, 2 * size))
GL.glTexCoordPointer(2, GL.GL_FLOAT, 0, makeQuad(0, 0, 256, 256))
with gl.glEnable(GL.GL_BLEND, GL.GL_TEXTURE_2D):
GL.glDrawArrays(GL.GL_QUADS, 0, 4)
| isc | 221,945,318,323,689,020 | 26.951613 | 103 | 0.584535 | false |
VitalPet/c2c-rd-addons | project_gantt_webkit/__init__.py | 12 | 1160 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2012 Tiny SPRL (<http://tiny.be>).
# Copyright (C) 2004-2012 ChriCar Beteiligungs- und Beratungs- GmbH (<http://camptocamp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import project
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 | 7,616,749,584,926,011,000 | 43.615385 | 97 | 0.622414 | false |
Onager/plaso | tests/parsers/winreg_plugins/lfu.py | 1 | 7735 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""Tests for the Less Frequently Used (LFU) Windows Registry plugin."""
import unittest
from dfdatetime import filetime as dfdatetime_filetime
from dfwinreg import definitions as dfwinreg_definitions
from dfwinreg import fake as dfwinreg_fake
from plaso.parsers.winreg_plugins import lfu
from tests.parsers.winreg_plugins import test_lib
class BootExecutePluginTest(test_lib.RegistryPluginTestCase):
"""Tests for the LFU BootExecute Windows Registry plugin."""
def _CreateTestKey(self, key_path, time_string):
"""Creates Registry keys and values for testing.
Args:
key_path (str): Windows Registry key path.
time_string (str): key last written date and time.
Returns:
dfwinreg.WinRegistryKey: a Windows Registry key.
"""
filetime = dfdatetime_filetime.Filetime()
filetime.CopyFromDateTimeString(time_string)
registry_key = dfwinreg_fake.FakeWinRegistryKey(
'Session Manager', key_path=key_path,
last_written_time=filetime.timestamp, offset=153)
value_data = 'autocheck autochk *\x00'.encode('utf_16_le')
registry_value = dfwinreg_fake.FakeWinRegistryValue(
'BootExecute', data=value_data,
data_type=dfwinreg_definitions.REG_MULTI_SZ, offset=123)
registry_key.AddValue(registry_value)
value_data = '2592000'.encode('utf_16_le')
registry_value = dfwinreg_fake.FakeWinRegistryValue(
'CriticalSectionTimeout', data=value_data,
data_type=dfwinreg_definitions.REG_SZ, offset=153)
registry_key.AddValue(registry_value)
value_data = '\x00'.encode('utf_16_le')
registry_value = dfwinreg_fake.FakeWinRegistryValue(
'ExcludeFromKnownDlls', data=value_data,
data_type=dfwinreg_definitions.REG_MULTI_SZ, offset=163)
registry_key.AddValue(registry_value)
value_data = '0'.encode('utf_16_le')
registry_value = dfwinreg_fake.FakeWinRegistryValue(
'GlobalFlag', data=value_data, data_type=dfwinreg_definitions.REG_SZ,
offset=173)
registry_key.AddValue(registry_value)
value_data = '0'.encode('utf_16_le')
registry_value = dfwinreg_fake.FakeWinRegistryValue(
'HeapDeCommitFreeBlockThreshold', data=value_data,
data_type=dfwinreg_definitions.REG_SZ, offset=183)
registry_key.AddValue(registry_value)
value_data = '0'.encode('utf_16_le')
registry_value = dfwinreg_fake.FakeWinRegistryValue(
'HeapDeCommitTotalFreeThreshold', data=value_data,
data_type=dfwinreg_definitions.REG_SZ, offset=203)
registry_key.AddValue(registry_value)
value_data = '0'.encode('utf_16_le')
registry_value = dfwinreg_fake.FakeWinRegistryValue(
'HeapSegmentCommit', data=value_data,
data_type=dfwinreg_definitions.REG_SZ, offset=213)
registry_key.AddValue(registry_value)
value_data = '0'.encode('utf_16_le')
registry_value = dfwinreg_fake.FakeWinRegistryValue(
'HeapSegmentReserve', data=value_data,
data_type=dfwinreg_definitions.REG_SZ, offset=223)
registry_key.AddValue(registry_value)
value_data = '2'.encode('utf_16_le')
registry_value = dfwinreg_fake.FakeWinRegistryValue(
'NumberOfInitialSessions', data=value_data,
data_type=dfwinreg_definitions.REG_SZ, offset=243)
registry_key.AddValue(registry_value)
return registry_key
def testFilters(self):
"""Tests the FILTERS class attribute."""
plugin = lfu.BootExecutePlugin()
key_path = (
'HKEY_LOCAL_MACHINE\\System\\ControlSet001\\Control\\Session Manager')
self._AssertFiltersOnKeyPath(plugin, key_path)
self._AssertNotFiltersOnKeyPath(plugin, 'HKEY_LOCAL_MACHINE\\Bogus')
def testProcess(self):
"""Tests the Process function."""
key_path = (
'HKEY_LOCAL_MACHINE\\System\\ControlSet001\\Control\\Session Manager')
time_string = '2012-08-31 20:45:29'
registry_key = self._CreateTestKey(key_path, time_string)
plugin = lfu.BootExecutePlugin()
storage_writer = self._ParseKeyWithPlugin(registry_key, plugin)
self.assertEqual(storage_writer.number_of_warnings, 0)
self.assertEqual(storage_writer.number_of_events, 2)
events = list(storage_writer.GetEvents())
expected_event_values = {
'data_type': 'windows:registry:boot_execute',
'key_path': key_path,
# This should just be the plugin name, as we're invoking it directly,
# and not through the parser.
'parser': plugin.plugin_name,
'timestamp': '2012-08-31 20:45:29.000000',
'value': 'autocheck autochk *'}
self.CheckEventValues(storage_writer, events[0], expected_event_values)
expected_values = (
'CriticalSectionTimeout: [REG_SZ] 2592000 '
'ExcludeFromKnownDlls: [REG_MULTI_SZ] [] '
'GlobalFlag: [REG_SZ] 0 '
'HeapDeCommitFreeBlockThreshold: [REG_SZ] 0 '
'HeapDeCommitTotalFreeThreshold: [REG_SZ] 0 '
'HeapSegmentCommit: [REG_SZ] 0 '
'HeapSegmentReserve: [REG_SZ] 0 '
'NumberOfInitialSessions: [REG_SZ] 2')
expected_event_values = {
'data_type': 'windows:registry:key_value',
'key_path': key_path,
'timestamp': '2012-08-31 20:45:29.000000',
'values': expected_values}
self.CheckEventValues(storage_writer, events[1], expected_event_values)
class BootVerificationPluginTest(test_lib.RegistryPluginTestCase):
"""Tests for the LFU BootVerification Windows Registry plugin."""
def _CreateTestKey(self, key_path, time_string):
"""Creates Registry keys and values for testing.
Args:
key_path (str): Windows Registry key path.
time_string (str): key last written date and time.
Returns:
dfwinreg.WinRegistryKey: a Windows Registry key.
"""
filetime = dfdatetime_filetime.Filetime()
filetime.CopyFromDateTimeString(time_string)
registry_key = dfwinreg_fake.FakeWinRegistryKey(
'BootVerificationProgram', key_path=key_path,
last_written_time=filetime.timestamp, offset=153)
value_data = 'C:\\WINDOWS\\system32\\googleupdater.exe'.encode(
'utf_16_le')
registry_value = dfwinreg_fake.FakeWinRegistryValue(
'ImagePath', data=value_data, data_type=dfwinreg_definitions.REG_SZ,
offset=123)
registry_key.AddValue(registry_value)
return registry_key
def testFilters(self):
"""Tests the FILTERS class attribute."""
plugin = lfu.BootVerificationPlugin()
key_path = (
'HKEY_LOCAL_MACHINE\\System\\ControlSet001\\Control\\'
'BootVerificationProgram')
self._AssertFiltersOnKeyPath(plugin, key_path)
self._AssertNotFiltersOnKeyPath(plugin, 'HKEY_LOCAL_MACHINE\\Bogus')
def testProcess(self):
"""Tests the Process function."""
key_path = '\\ControlSet001\\Control\\BootVerificationProgram'
time_string = '2012-08-31 20:45:29'
registry_key = self._CreateTestKey(key_path, time_string)
plugin = lfu.BootVerificationPlugin()
storage_writer = self._ParseKeyWithPlugin(registry_key, plugin)
self.assertEqual(storage_writer.number_of_warnings, 0)
self.assertEqual(storage_writer.number_of_events, 2)
events = list(storage_writer.GetEvents())
expected_event_values = {
'data_type': 'windows:registry:boot_verification',
'image_path': 'C:\\WINDOWS\\system32\\googleupdater.exe',
'key_path': key_path,
# This should just be the plugin name, as we're invoking it directly,
# and not through the parser.
'parser': plugin.plugin_name,
'timestamp': '2012-08-31 20:45:29.000000'}
self.CheckEventValues(storage_writer, events[0], expected_event_values)
if __name__ == '__main__':
unittest.main()
| apache-2.0 | -2,753,194,831,766,504,000 | 35.485849 | 78 | 0.69011 | false |
deepchem/deepchem | deepchem/models/__init__.py | 2 | 2852 | """
Gathers all models in one place for convenient imports
"""
# flake8: noqa
from deepchem.models.models import Model
from deepchem.models.wandblogger import WandbLogger
# Tensorflow Depedency Models
try:
from deepchem.models.keras_model import KerasModel
from deepchem.models.multitask import SingletaskToMultitask
from deepchem.models.callbacks import ValidationCallback
from deepchem.models.IRV import MultitaskIRVClassifier
from deepchem.models.robust_multitask import RobustMultitaskClassifier
from deepchem.models.robust_multitask import RobustMultitaskRegressor
from deepchem.models.progressive_multitask import ProgressiveMultitaskRegressor, ProgressiveMultitaskClassifier
from deepchem.models.graph_models import WeaveModel, DTNNModel, DAGModel, GraphConvModel, MPNNModel
from deepchem.models.scscore import ScScoreModel
from deepchem.models.seqtoseq import SeqToSeq
from deepchem.models.gan import GAN, WGAN
from deepchem.models.molgan import BasicMolGANModel
from deepchem.models.cnn import CNN
from deepchem.models.text_cnn import TextCNNModel
from deepchem.models.atomic_conv import AtomicConvModel
from deepchem.models.chemnet_models import Smiles2Vec, ChemCeption
except ModuleNotFoundError:
pass
# scikit-learn model
from deepchem.models.sklearn_models import SklearnModel
from deepchem.models.gbdt_models import GBDTModel
# PyTorch models
try:
from deepchem.models.torch_models import TorchModel
from deepchem.models.torch_models import AttentiveFP, AttentiveFPModel
from deepchem.models.torch_models import CGCNN, CGCNNModel
from deepchem.models.torch_models import GAT, GATModel
from deepchem.models.torch_models import GCN, GCNModel
from deepchem.models.torch_models import LCNN, LCNNModel
from deepchem.models.torch_models import Pagtn, PagtnModel
from deepchem.models.fcnet import MultitaskRegressor, MultitaskClassifier, MultitaskFitTransformRegressor
except ModuleNotFoundError:
pass
#####################################################################################
# Compatibility imports for renamed XGBoost models. Remove below with DeepChem 3.0.
#####################################################################################
from deepchem.models.gbdt_models.gbdt_model import XGBoostModel
########################################################################################
# Compatibility imports for renamed TensorGraph models. Remove below with DeepChem 3.0.
########################################################################################
try:
from deepchem.models.text_cnn import TextCNNTensorGraph
from deepchem.models.graph_models import WeaveTensorGraph, DTNNTensorGraph, DAGTensorGraph, GraphConvTensorGraph, MPNNTensorGraph
from deepchem.models.IRV import TensorflowMultitaskIRVClassifier
except ModuleNotFoundError:
pass
| mit | 7,299,545,211,742,627,000 | 45 | 131 | 0.734923 | false |
82Flex/DCRM | suit/admin_filters.py | 1 | 1146 | from django.utils.translation import ugettext_lazy as _
from django.contrib.admin import FieldListFilter
class IsNullFieldListFilter(FieldListFilter):
notnull_label = _('Is present')
isnull_label = _('Is Null')
def __init__(self, field, request, params, model, model_admin, field_path):
self.lookup_kwarg = '%s__isnull' % field_path
self.lookup_val = request.GET.get(self.lookup_kwarg, None)
super(IsNullFieldListFilter, self).__init__(field,
request, params, model,
model_admin, field_path)
def expected_parameters(self):
return [self.lookup_kwarg]
def choices(self, cl):
for lookup, title in (
(None, _('All')),
('False', self.notnull_label),
('True', self.isnull_label),
):
yield {
'selected': self.lookup_val == lookup,
'query_string': cl.get_query_string({
self.lookup_kwarg: lookup,
}),
'display': title,
}
| agpl-3.0 | 3,246,084,407,563,657,000 | 35.967742 | 79 | 0.514834 | false |
thepaul/wokkel | wokkel/test/test_xmppim.py | 1 | 16551 | # Copyright (c) Ralph Meijer.
# See LICENSE for details
"""
Tests for L{wokkel.xmppim}.
"""
from twisted.internet import defer
from twisted.trial import unittest
from twisted.words.protocols.jabber.jid import JID
from twisted.words.protocols.jabber.xmlstream import toResponse
from twisted.words.xish import domish, utility
from wokkel import xmppim
from wokkel.generic import ErrorStanza, parseXml
from wokkel.test.helpers import XmlStreamStub
NS_XML = 'http://www.w3.org/XML/1998/namespace'
NS_ROSTER = 'jabber:iq:roster'
class PresenceClientProtocolTest(unittest.TestCase):
def setUp(self):
self.output = []
self.protocol = xmppim.PresenceClientProtocol()
self.protocol.parent = self
def send(self, obj):
self.output.append(obj)
def test_unavailableDirected(self):
"""
Test sending of directed unavailable presence broadcast.
"""
self.protocol.unavailable(JID('[email protected]'))
presence = self.output[-1]
self.assertEquals("presence", presence.name)
self.assertEquals(None, presence.uri)
self.assertEquals("[email protected]", presence.getAttribute('to'))
self.assertEquals("unavailable", presence.getAttribute('type'))
def test_unavailableWithStatus(self):
"""
Test sending of directed unavailable presence broadcast with status.
"""
self.protocol.unavailable(JID('[email protected]'),
{None: 'Disconnected'})
presence = self.output[-1]
self.assertEquals("presence", presence.name)
self.assertEquals(None, presence.uri)
self.assertEquals("[email protected]", presence.getAttribute('to'))
self.assertEquals("unavailable", presence.getAttribute('type'))
self.assertEquals("Disconnected", unicode(presence.status))
def test_unavailableBroadcast(self):
"""
Test sending of unavailable presence broadcast.
"""
self.protocol.unavailable(None)
presence = self.output[-1]
self.assertEquals("presence", presence.name)
self.assertEquals(None, presence.uri)
self.assertEquals(None, presence.getAttribute('to'))
self.assertEquals("unavailable", presence.getAttribute('type'))
def test_unavailableBroadcastNoEntityParameter(self):
"""
Test sending of unavailable presence broadcast by not passing entity.
"""
self.protocol.unavailable()
presence = self.output[-1]
self.assertEquals("presence", presence.name)
self.assertEquals(None, presence.uri)
self.assertEquals(None, presence.getAttribute('to'))
self.assertEquals("unavailable", presence.getAttribute('type'))
class AvailabilityPresenceTest(unittest.TestCase):
def test_fromElement(self):
xml = """<presence from='[email protected]' to='[email protected]'>
<show>chat</show>
<status>Let's chat!</status>
<priority>50</priority>
</presence>
"""
presence = xmppim.AvailabilityPresence.fromElement(parseXml(xml))
self.assertEquals(JID('[email protected]'), presence.sender)
self.assertEquals(JID('[email protected]'), presence.recipient)
self.assertTrue(presence.available)
self.assertEquals('chat', presence.show)
self.assertEquals({None: "Let's chat!"}, presence.statuses)
self.assertEquals(50, presence.priority)
class PresenceProtocolTest(unittest.TestCase):
"""
Tests for L{xmppim.PresenceProtocol}
"""
def setUp(self):
self.output = []
self.protocol = xmppim.PresenceProtocol()
self.protocol.parent = self
self.protocol.xmlstream = utility.EventDispatcher()
self.protocol.connectionInitialized()
def send(self, obj):
self.output.append(obj)
def test_errorReceived(self):
"""
Incoming presence stanzas are parsed and dispatched.
"""
xml = """<presence type="error"/>"""
def errorReceived(error):
xmppim.PresenceProtocol.errorReceived(self.protocol, error)
try:
self.assertIsInstance(error, ErrorStanza)
except:
d.errback()
else:
d.callback(None)
d = defer.Deferred()
self.protocol.errorReceived = errorReceived
self.protocol.xmlstream.dispatch(parseXml(xml))
return d
def test_availableReceived(self):
"""
Incoming presence stanzas are parsed and dispatched.
"""
xml = """<presence/>"""
def availableReceived(presence):
xmppim.PresenceProtocol.availableReceived(self.protocol, presence)
try:
self.assertIsInstance(presence, xmppim.AvailabilityPresence)
except:
d.errback()
else:
d.callback(None)
d = defer.Deferred()
self.protocol.availableReceived = availableReceived
self.protocol.xmlstream.dispatch(parseXml(xml))
return d
def test_unavailableReceived(self):
"""
Incoming presence stanzas are parsed and dispatched.
"""
xml = """<presence type='unavailable'/>"""
def unavailableReceived(presence):
xmppim.PresenceProtocol.unavailableReceived(self.protocol, presence)
try:
self.assertIsInstance(presence, xmppim.AvailabilityPresence)
except:
d.errback()
else:
d.callback(None)
d = defer.Deferred()
self.protocol.unavailableReceived = unavailableReceived
self.protocol.xmlstream.dispatch(parseXml(xml))
return d
def test_subscribeReceived(self):
"""
Incoming presence stanzas are parsed and dispatched.
"""
xml = """<presence type='subscribe'/>"""
def subscribeReceived(presence):
xmppim.PresenceProtocol.subscribeReceived(self.protocol, presence)
try:
self.assertIsInstance(presence, xmppim.SubscriptionPresence)
except:
d.errback()
else:
d.callback(None)
d = defer.Deferred()
self.protocol.subscribeReceived = subscribeReceived
self.protocol.xmlstream.dispatch(parseXml(xml))
return d
def test_unsubscribeReceived(self):
"""
Incoming presence stanzas are parsed and dispatched.
"""
xml = """<presence type='unsubscribe'/>"""
def unsubscribeReceived(presence):
xmppim.PresenceProtocol.unsubscribeReceived(self.protocol, presence)
try:
self.assertIsInstance(presence, xmppim.SubscriptionPresence)
except:
d.errback()
else:
d.callback(None)
d = defer.Deferred()
self.protocol.unsubscribeReceived = unsubscribeReceived
self.protocol.xmlstream.dispatch(parseXml(xml))
return d
def test_subscribedReceived(self):
"""
Incoming presence stanzas are parsed and dispatched.
"""
xml = """<presence type='subscribed'/>"""
def subscribedReceived(presence):
xmppim.PresenceProtocol.subscribedReceived(self.protocol, presence)
try:
self.assertIsInstance(presence, xmppim.SubscriptionPresence)
except:
d.errback()
else:
d.callback(None)
d = defer.Deferred()
self.protocol.subscribedReceived = subscribedReceived
self.protocol.xmlstream.dispatch(parseXml(xml))
return d
def test_unsubscribedReceived(self):
"""
Incoming presence stanzas are parsed and dispatched.
"""
xml = """<presence type='unsubscribed'/>"""
def unsubscribedReceived(presence):
xmppim.PresenceProtocol.unsubscribedReceived(self.protocol,
presence)
try:
self.assertIsInstance(presence, xmppim.SubscriptionPresence)
except:
d.errback()
else:
d.callback(None)
d = defer.Deferred()
self.protocol.unsubscribedReceived = unsubscribedReceived
self.protocol.xmlstream.dispatch(parseXml(xml))
return d
def test_probeReceived(self):
"""
Incoming presence stanzas are parsed and dispatched.
"""
xml = """<presence type='probe'/>"""
def probeReceived(presence):
xmppim.PresenceProtocol.probeReceived(self.protocol, presence)
try:
self.assertIsInstance(presence, xmppim.ProbePresence)
except:
d.errback()
else:
d.callback(None)
d = defer.Deferred()
self.protocol.probeReceived = probeReceived
self.protocol.xmlstream.dispatch(parseXml(xml))
return d
def test_available(self):
"""
It should be possible to pass a sender address.
"""
self.protocol.available(JID('[email protected]'),
show=u'chat',
status=u'Talk to me!',
priority=50)
element = self.output[-1]
self.assertEquals("[email protected]", element.getAttribute('to'))
self.assertIdentical(None, element.getAttribute('type'))
self.assertEquals(u'chat', unicode(element.show))
self.assertEquals(u'Talk to me!', unicode(element.status))
self.assertEquals(u'50', unicode(element.priority))
def test_availableLanguages(self):
"""
It should be possible to pass a sender address.
"""
self.protocol.available(JID('[email protected]'),
show=u'chat',
statuses={None: u'Talk to me!',
'nl': u'Praat met me!'},
priority=50)
element = self.output[-1]
self.assertEquals("[email protected]", element.getAttribute('to'))
self.assertIdentical(None, element.getAttribute('type'))
self.assertEquals(u'chat', unicode(element.show))
statuses = {}
for status in element.elements():
if status.name == 'status':
lang = status.getAttribute((NS_XML, 'lang'))
statuses[lang] = unicode(status)
self.assertIn(None, statuses)
self.assertEquals(u'Talk to me!', statuses[None])
self.assertIn('nl', statuses)
self.assertEquals(u'Praat met me!', statuses['nl'])
self.assertEquals(u'50', unicode(element.priority))
def test_availableSender(self):
"""
It should be possible to pass a sender address.
"""
self.protocol.available(JID('[email protected]'),
sender=JID('[email protected]'))
element = self.output[-1]
self.assertEquals("[email protected]", element.getAttribute('from'))
def test_unavailableDirected(self):
"""
Test sending of directed unavailable presence broadcast.
"""
self.protocol.unavailable(JID('[email protected]'))
element = self.output[-1]
self.assertEquals("presence", element.name)
self.assertEquals(None, element.uri)
self.assertEquals("[email protected]", element.getAttribute('to'))
self.assertEquals("unavailable", element.getAttribute('type'))
def test_unavailableWithStatus(self):
"""
Test sending of directed unavailable presence broadcast with status.
"""
self.protocol.unavailable(JID('[email protected]'),
{None: 'Disconnected'})
element = self.output[-1]
self.assertEquals("presence", element.name)
self.assertEquals(None, element.uri)
self.assertEquals("[email protected]", element.getAttribute('to'))
self.assertEquals("unavailable", element.getAttribute('type'))
self.assertEquals("Disconnected", unicode(element.status))
def test_unavailableBroadcast(self):
"""
Test sending of unavailable presence broadcast.
"""
self.protocol.unavailable(None)
element = self.output[-1]
self.assertEquals("presence", element.name)
self.assertEquals(None, element.uri)
self.assertEquals(None, element.getAttribute('to'))
self.assertEquals("unavailable", element.getAttribute('type'))
def test_unavailableBroadcastNoRecipientParameter(self):
"""
Test sending of unavailable presence broadcast by not passing entity.
"""
self.protocol.unavailable()
element = self.output[-1]
self.assertEquals("presence", element.name)
self.assertEquals(None, element.uri)
self.assertEquals(None, element.getAttribute('to'))
self.assertEquals("unavailable", element.getAttribute('type'))
def test_unavailableSender(self):
"""
It should be possible to pass a sender address.
"""
self.protocol.unavailable(JID('[email protected]'),
sender=JID('[email protected]'))
element = self.output[-1]
self.assertEquals("[email protected]", element.getAttribute('from'))
def test_subscribeSender(self):
"""
It should be possible to pass a sender address.
"""
self.protocol.subscribe(JID('[email protected]'),
sender=JID('[email protected]'))
element = self.output[-1]
self.assertEquals("[email protected]", element.getAttribute('from'))
def test_unsubscribeSender(self):
"""
It should be possible to pass a sender address.
"""
self.protocol.unsubscribe(JID('[email protected]'),
sender=JID('[email protected]'))
element = self.output[-1]
self.assertEquals("[email protected]", element.getAttribute('from'))
def test_subscribedSender(self):
"""
It should be possible to pass a sender address.
"""
self.protocol.subscribed(JID('[email protected]'),
sender=JID('[email protected]'))
element = self.output[-1]
self.assertEquals("[email protected]", element.getAttribute('from'))
def test_unsubscribedSender(self):
"""
It should be possible to pass a sender address.
"""
self.protocol.unsubscribed(JID('[email protected]'),
sender=JID('[email protected]'))
element = self.output[-1]
self.assertEquals("[email protected]", element.getAttribute('from'))
def test_probeSender(self):
"""
It should be possible to pass a sender address.
"""
self.protocol.probe(JID('[email protected]'),
sender=JID('[email protected]'))
element = self.output[-1]
self.assertEquals("[email protected]", element.getAttribute('from'))
class RosterClientProtocolTest(unittest.TestCase):
"""
Tests for L{xmppim.RosterClientProtocol}.
"""
def setUp(self):
self.stub = XmlStreamStub()
self.protocol = xmppim.RosterClientProtocol()
self.protocol.xmlstream = self.stub.xmlstream
self.protocol.connectionInitialized()
def test_removeItem(self):
"""
Removing a roster item is setting an item with subscription C{remove}.
"""
d = self.protocol.removeItem(JID('[email protected]'))
# Inspect outgoing iq request
iq = self.stub.output[-1]
self.assertEquals('set', iq.getAttribute('type'))
self.assertNotIdentical(None, iq.query)
self.assertEquals(NS_ROSTER, iq.query.uri)
children = list(domish.generateElementsQNamed(iq.query.children,
'item', NS_ROSTER))
self.assertEquals(1, len(children))
child = children[0]
self.assertEquals('[email protected]', child['jid'])
self.assertEquals('remove', child['subscription'])
# Fake successful response
response = toResponse(iq, 'result')
self.stub.send(response)
return d
| mit | 6,473,633,536,992,437,000 | 32.915984 | 80 | 0.598937 | false |
sstoma/CellProfiler | contrib/tifffile.py | 3 | 53026 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# tifffile.py
# Copyright (c) 2008-2009, The Regents of the University of California
# Produced by the Laboratory for Fluorescence Dynamics
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the copyright holders nor the names of any
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""Read TIFF, STK, LSM and FluoView files and access image data as Numpy array.
Only a subset of the TIFF specification is supported, mainly uncompressed and
losslessly compressed 1-32 bit integer as well as 32 and 64-bit float
images, which are commonly used in scientific imaging.
TIFF, the Tagged Image File Format, is under the control of Adobe Systems.
STK and LSM are TIFF with custom extensions used by MetaMorph respectively
Carl Zeiss MicroImaging. Currently only primary info records are read
for STK, LSM, FluoView, and NIH image formats.
For command line usage run ``python tifffile.py --help``
:Authors:
`Christoph Gohlke <http://www.lfd.uci.edu/~gohlke/>`__,
Laboratory for Fluorescence Dynamics, University of California, Irvine
:Version: 20090925
Requirements
------------
* `Python 2.6 <http://www.python.org>`__
* `Numpy 1.3 <http://numpy.scipy.org>`__
* `Matplotlib 0.99 <http://matplotlib.sourceforge.net>`__
(optional for plotting)
* `tifffile.c 20090925 <http://www.lfd.uci.edu/~gohlke/>`__
(optional for faster decoding of PackBits and LZW encoded strings)
References
----------
(1) TIFF 6.0 Specification and Supplements. Adobe Systems Incorporated.
http://partners.adobe.com/public/developer/tiff/
(2) TIFF File Format FAQ. http://www.awaresystems.be/imaging/tiff/faq.html
(3) MetaMorph Stack (STK) Image File Format.
http://support.meta.moleculardevices.com/docs/t10243.pdf
(4) File Format Description - LSM 5xx Release 2.0.
http://ibb.gsf.de/homepage/karsten.rodenacker/IDL/Lsmfile.doc
(5) BioFormats. http://www.openmicroscopy.org/site/support/bio-formats5/supported-formats.html
Examples
--------
>>> tif = TIFFfile('test.tif')
>>> images = tif.asarray()
>>> image0 = tif[0].asarray()
>>> for page in tif:
... image = page.asarray()
... if page.is_rgb: pass
... if page.is_reduced: pass
... if page.is_palette:
... t = page.color_map
... if page.is_stk:
... t = page.mm_uic_tags.number_planes
... if page.is_lsm:
... t = page.cz_lsm_info
>>> tif.close()
"""
from __future__ import division
import sys
import os
import math
import zlib
import time
import struct
import warnings
from contextlib import contextmanager
import numpy
class TIFFfile(object):
"""Read TIFF, STK, and LSM files. Return image data as NumPy array.
Attributes
----------
pages : tuple of TIFFpages.
Examples
--------
>>> tif = TIFFfile('test.tif')
... try:
... images = tif.asarray()
... except Exception, e:
... print e
... finally:
... tif.close()
"""
def __init__(self, filename):
"""Initialize object from file."""
if type(filename)==str:
self._fd = open(filename, 'rb')
self.fname = filename
self.fsize = os.fstat(self._fd.fileno())[6]
else:
# assume open file or StringIO
self._fd = filename
curpos = self._fd.tell()
self._fd.seek(0, 2)
self.fsize = self._fd.tell() - curpos
self._fd.seek(curpos)
try:
self._fromfile()
except Exception:
self._fd.close()
raise
def close(self):
"""Close the file object."""
self._fd.close()
self._fd = None
def _fromdata(self, data):
"""Create TIFF header, pages, and tags from numpy array."""
raise NotImplementedError()
def _fromfile(self):
"""Read TIFF header and all page records from file."""
try:
self.byte_order = TIFF_BYTE_ORDERS[self._fd.read(2)]
except KeyError:
raise ValueError("not a valid TIFF file")
if struct.unpack(self.byte_order+'H', self._fd.read(2))[0] != 42:
raise ValueError("not a TIFF file")
self.pages = []
while 1:
try:
self.pages.append(TIFFpage(self))
except StopIteration:
break
def asarray(self, key=None, skipreduced=True, squeeze=True,
colormapped=True, rgbonly=True):
"""Return image data of multiple TIFF pages as numpy array.
Raises ValueError if not all pages are of same shape in all but
first dimension.
Arguments
---------
key : int or slice
Defines which pages to return as array.
skipreduced : bool
If True any reduced images are skipped.
squeeze : bool
If True all length-1 dimensions are squeezed out from result.
colormapped : bool
If True color mapping is applied for palette-indexed images.
rgbonly : bool
If True return RGB(A) images without extra samples.
"""
pages = self.pages
if skipreduced:
pages = [p for p in pages if p.is_reduced]
if key:
pages = pages[key]
try:
pages[0]
except TypeError:
result = pages.asarray(False, colormapped, rgbonly)
else:
if colormapped and self.is_nih:
result = numpy.vstack(p.asarray(False, False) for p in pages)
if pages[0].is_palette:
result = pages[0].color_map[:, result]
result = numpy.swapaxes(result, 0, 1)
else:
try:
result = numpy.vstack(p.asarray(False, colormapped,
rgbonly) for p in pages)
except ValueError:
# dimensions of pages don't agree
result = pages[0].asarray(False, colormapped, rgbonly)
p = self.pages[0]
if p.is_lsm:
# adjust LSM data shape
lsmi = p.cz_lsm_info
order = CZ_SCAN_TYPES[lsmi.scan_type]
if p.is_rgb:
order = order.replace('C', '').replace('XY', 'XYC')
shape = []
for i in reversed(order):
shape.append(getattr(lsmi, CZ_DIMENSIONS[i]))
result.shape = shape
return result.squeeze() if squeeze else result
def __len__(self):
"""Return number of image pages in file."""
return len(self.pages)
def __getitem__(self, key):
"""Return specified page."""
return self.pages[key]
def __iter__(self):
"""Return iterator over pages."""
return iter(self.pages)
def __str__(self):
"""Return string containing information about file."""
fname = os.path.split(self.fname)[-1].capitalize()
return "%s, %.2f MB, %s, %i pages" % (fname, self.size/1048576,
{'<': 'little endian', '>': 'big endian'}[self.byte_order],
len(self.pages), )
def __getattr__(self, name):
"""Return special property."""
if name in ('is_rgb', 'is_palette', 'is_stk'):
return all(getattr(p, name) for p in self.pages)
if name in ('is_lsm', 'is_nih'):
return getattr(self.pages[0], name)
if name == 'is_fluoview':
return 'mm_header' in self.pages[0].tags
raise AttributeError(name)
@contextmanager
def tifffile(filename):
"""Support for 'with' statement.
>>> with tifffile('test.tif') as tif:
... image = tif.asarray()
"""
f = TIFFfile(filename)
try:
yield f
finally:
f.close()
class TIFFpage(object):
"""A TIFF image file directory (IDF).
Attributes
----------
shape : tuple of int
Dimensions of the image array in file.
dtype : str
Data type. One of TIFF_SAMPLE_DTYPES.
tags : TiffTags
Tag values are also directly accessible as attributes.
color_map : numpy array
Color look up table, palette, if existing.
mm_uic_tags: Record
Consolidated MetaMorph mm_uic# tags, if exists.
is_rgb : bool
True if page contains a RGB image.
is_reduced : bool
True if page is a reduced image of another image.
is_palette : bool
True if page contains a palette-colored image.
is_stk : bool
True if page contains MM_UIC2 tag.
is_lsm : bool
True if page contains CZ_LSM_INFO tag.
is_fluoview : bool
True if page contains MM_STAMP tag.
is_nih : bool
True if page contains NIH image header.
"""
def __init__(self, parent):
"""Initialize object from file."""
self._parent = parent
self.shape = ()
self.tags = TiffTags()
self._fromfile()
self._process_tags()
def _fromfile(self):
"""Read TIFF IDF structure and its tags from file.
File cursor must be at storage position of IDF offset and is left at
offset to next IDF.
Raises StopIteration if offset (first two bytes read) are 0.
"""
fd = self._parent._fd
byte_order = self._parent.byte_order
offset = struct.unpack(byte_order+'I', fd.read(4))[0]
if offset == 0:
raise StopIteration()
# read standard tags
tags = self.tags
fd.seek(offset, 0)
numtags = struct.unpack(byte_order+'H', fd.read(2))[0]
for i in xrange(numtags):
tag = TIFFtag(fd, byte_order=byte_order)
tags[tag.name] = tag
# read custom tags
pos = fd.tell()
for name, readtag in CUSTOM_TAGS.values():
if name in tags and readtag:
value = readtag(fd, byte_order, tags[name])
if isinstance(value, dict): # numpy.core.records.record
value = Record(value)
tags[name].value = value
fd.seek(pos)
def _process_tags(self):
"""Validate standard tags and initialize attributes."""
tags = self.tags
for code, (name, default, dtype, count, validate) in TIFF_TAGS.items():
if not (name in tags or default is None):
tags[name] = TIFFtag(code, dtype=dtype, count=count,
value=default, name=name)
if name in tags and validate:
try:
if tags[name].count == 1:
setattr(self, name, validate[tags[name].value])
else:
setattr(self, name, tuple(validate[value]
for value in tags[name].value))
except KeyError:
raise ValueError('%s.value (%s) not supported' %
(name, tags[name].value))
tag = tags['bits_per_sample']
if tag.count != 1:
bps = tag.value[0]
if all((i-bps for i in tag.value)):
raise ValueError("all samples must be of same type %s" %
str(tag))
self.bits_per_sample = bps
tag = tags['sample_format']
if tag.count != 1:
fmt = tag.value[0]
if all((i-fmt for i in tag.value)):
raise ValueError("all samples must be of same format %s" %
str(tag))
self.sample_format = TIFF_SAMPLE_FORMATS[fmt]
self.strips_per_image = int(math.floor((self.image_length +
self.rows_per_strip - 1) / self.rows_per_strip))
key = (self.sample_format, self.bits_per_sample)
try:
self.dtype = TIFF_SAMPLE_DTYPES[key]
except KeyError:
raise KeyError("Unsupported sample dtype %s" % str(key))
if self.is_palette:
dtype = self.tags['color_map'].dtype[1]
self.color_map = numpy.array(self.color_map,
dtype).reshape((3, -1))
planes = 0
if self.is_stk:
planes = tags['mm_uic2'].count
# consolidate mm_uci tags
self.mm_uic_tags = Record(tags['mm_uic2'].value)
for t in ('mm_uic3', 'mm_uic4', 'mm_uic1'):
if t in tags:
self.mm_uic_tags.update(tags[t].value)
if planes:
if self.planar_configuration == 'contig':
self.shape = (planes, self.image_length,
self.image_width, self.samples_per_pixel)
else:
self.shape = (planes, self.samples_per_pixel,
self.image_length, self.image_width, 1)
else:
if self.planar_configuration == 'contig':
self.shape = (1, self.image_length, self.image_width,
self.samples_per_pixel)
else:
self.shape = (self.samples_per_pixel, self.image_length,
self.image_width, 1)
if not self.compression and not 'strip_byte_counts' in tags:
self.strip_byte_counts = numpy.product(self.shape) * (
self.bits_per_sample // 8)
def asarray(self, squeeze=True, colormapped=True, rgbonly=True):
"""Read image data and return as numpy array in native byte order.
Arguments
---------
squeeze : bool
If True all length-1 dimensions are squeezed out from result.
colormapped : bool
If True color mapping is applied for palette-indexed images.
rgbonly : bool
If True return RGB(A) image without extra samples.
"""
fd = self._parent._fd
if not fd:
raise IOError("TIFF file is not open")
strip_offsets = self.strip_offsets
strip_byte_counts = self.strip_byte_counts
try:
strip_offsets[0]
except TypeError:
strip_offsets = (self.strip_offsets, )
strip_byte_counts = (self.strip_byte_counts, )
byte_order = self._parent.byte_order
bytes_per_sample = self.bits_per_sample // 8
typecode = byte_order + self.dtype
if self.is_stk:
fd.seek(strip_offsets[0], 0)
result = numpy.fromfile(fd, typecode, numpy.product(self.shape))
else:
# try speed up reading contiguous data by merging all strips
if not self.compression \
and self.bits_per_sample in (8, 16, 32) \
and all(strip_offsets[i] == \
strip_offsets[i+1]-strip_byte_counts[i]
for i in xrange(len(strip_offsets)-1)):
strip_byte_counts = (strip_offsets[-1] - strip_offsets[0] +
strip_byte_counts[-1], )
strip_offsets = (strip_offsets[0], )
result = numpy.empty(self.shape, self.dtype).reshape(-1)
index = 0
for offset, bytecount in zip(strip_offsets, strip_byte_counts):
fd.seek(offset, 0)
data = TIFF_DECOMPESSORS[self.compression](fd.read(bytecount))
l = self.image_width
if self.planar_configuration == 'contig':
l *= self.samples_per_pixel
data = unpackbits(data, typecode, self.bits_per_sample, l)
result[index:index+data.size] = data
index += data.size
result.shape = self.shape[:]
if self.predictor == 'horizontal':
numpy.cumsum(result, axis=2, dtype=self.dtype, out=result)
if colormapped and self.photometric == 'palette':
if self.color_map.shape[1] >= 2**self.bits_per_sample:
result = self.color_map[:, result]
result = numpy.swapaxes(result, 0, 1)
if rgbonly and 'extra_samples' in self.tags:
# return only RGB and first unassociated alpha channel if exists
extra_samples = self.tags['extra_samples'].value
if self.tags['extra_samples'].count == 1:
extra_samples = (extra_samples, )
for i, es in enumerate(extra_samples):
if es == 2: # unassociated alpha channel
if self.planar_configuration == 'contig':
result = result[..., [0, 1, 2, 3+i]]
else:
result = result[[0, 1, 2, 3+i]]
break
else:
if self.planar_configuration == 'contig':
result = result[..., :3]
else:
result = result[:3]
if result.shape[0] != 1:
result.shape = (1, ) + result.shape
return result.squeeze() if squeeze else result
def __getattr__(self, name):
"""Return tag value or special property."""
tags = self.tags
if name in tags:
return tags[name].value
if name == 'is_rgb':
return tags['photometric'].value == 2
if name == 'is_reduced':
return not tags['new_subfile_type'].value & 1
if name == 'is_palette':
return 'color_map' in tags
if name == 'is_stk':
return 'mm_uic2' in tags
if name == 'is_lsm':
return 'cz_lsm_info' in tags
if name == 'is_fluoview':
return 'mm_stamp' in tags
if name == 'is_nih':
return 'nih_image_header' in tags
raise AttributeError(name)
def __str__(self):
"""Return string containing information about page."""
t = ','.join(t[3:] for t in (
'is_stk', 'is_lsm', 'is_nih', 'is_fluoview') if getattr(self, t))
s = ', '.join(str(i) for i in (
(' x '.join(str(i) for i in self.shape if i > 1),
numpy.dtype(self.dtype),
"%i bit" % self.bits_per_sample,
self.photometric,
self.compression if self.compression else 'raw')))
if t:
s = ', '.join((s, t))
return s
class TIFFtag(object):
"""A TIFF tag structure.
Attributes
----------
name : string
Attribute name of tag.
code : int
Decimal code of tag.
dtype : str
Datatype of tag data. One of TIFF_DATA_TYPES.
count : int
Number of values.
value : various types
Tag data. For codes in CUSTOM_TAGS the 4 bytes file content.
"""
__slots__ = ('code', 'name', 'count', 'dtype', 'value')
def __init__(self, arg, **kwargs):
"""Initialize tag from file or arguments."""
if isinstance(arg, file) or hasattr(arg, 'read'):
self._fromfile(arg, **kwargs)
else:
self._fromdata(arg, **kwargs)
def _fromdata(self, code, dtype, count, value, name=None):
"""Initialize tag from arguments."""
self.code = int(code)
self.name = name if name else str(code)
self.dtype = TIFF_DATA_TYPES[dtype]
self.count = int(count)
self.value = value
def _fromfile(self, fd, byte_order):
"""Read tag structure from open file. Advances file cursor 12 bytes."""
code, dtype, count, value = struct.unpack(byte_order+'HHI4s',
fd.read(12))
if code in TIFF_TAGS:
name = TIFF_TAGS[code][0]
elif code in CUSTOM_TAGS:
name = CUSTOM_TAGS[code][0]
else:
name = str(code)
try:
dtype = TIFF_DATA_TYPES[dtype]
except KeyError:
raise ValueError("unknown TIFF tag data type %i" % dtype)
if not code in CUSTOM_TAGS:
format = '%s%i%s' % (byte_order, count*int(dtype[0]), dtype[1])
size = struct.calcsize(format)
if size <= 4:
value = struct.unpack(format, value[:size])
else:
pos = fd.tell()
fd.seek(struct.unpack(byte_order+'I', value)[0])
value = struct.unpack(format, fd.read(size))
fd.seek(pos)
if len(value) == 1:
value = value[0]
if dtype == '1s':
value = stripnull(value)
self.code = code
self.name = name
self.dtype = dtype
self.count = count
self.value = value
def __str__(self):
"""Return string containing information about tag."""
return ' '.join(str(getattr(self, s)) for s in self.__slots__)
class Record(dict):
"""Dictionary with attribute access.
Can also be initialized with numpy.core.records.record.
"""
__slots__ = ()
def __init__(self, arg={}):
try:
dict.__init__(self, arg)
except Exception:
for i, name in enumerate(arg.dtype.names):
v = arg[i]
self[name] = v if v.dtype.char != 'S' else stripnull(v)
def __getattr__(self, name):
return self[name]
def __setattr__(self, name, value):
self.__setitem__(name, value)
def __str__(self):
return '\n'.join("* %s: %s" % (k, str(self[k]))
for k in sorted(self.keys()) if not k.startswith('_'))
class TiffTags(Record):
"""Dictionary of TIFFtags with attribute access."""
def __str__(self):
"""Return string with information about all tags."""
sortbycode = lambda a, b: cmp(a[0], b[0])
s = []
for td in (TIFF_TAGS, CUSTOM_TAGS):
for i, t in sorted(td.iteritems(), sortbycode):
if t[0] in self:
t = self[t[0]]
typecode = "%i%s" % (t.count * int(t.dtype[0]), t.dtype[1])
s.append("* %i %s (%s) %s" % (t.code, t.name, typecode,
str(t.value).split('\n', 1)[0][:40]))
return '\n'.join(s)
def read_nih_image_header(fd, byte_order, tag):
"""Read NIH_IMAGE_HEADER tag from file and return as dictionary."""
fd.seek(12 + struct.unpack(byte_order+'I', tag.value)[0])
return {'version': struct.unpack(byte_order+'H', fd.read(2))[0]}
def read_cz_lsm_info(fd, byte_order, tag):
"""Read CS_LSM_INFO tag from file and return as numpy.rec.array."""
fd.seek(struct.unpack(byte_order+'I', tag.value)[0])
result = numpy.rec.fromfile(fd, CZ_LSM_INFO, 1, byteorder=byte_order)[0]
{50350412: '1.3', 67127628: '2.0'}[result.magic_number]
return result
def read_mm_header(fd, byte_order, tag):
"""Read MM_HEADER tag from file and return as numpy.rec.array."""
fd.seek(struct.unpack(byte_order+'I', tag.value)[0])
return numpy.rec.fromfile(fd, MM_HEADER, 1, byteorder=byte_order)[0]
def read_mm_stamp(fd, byte_order, tag):
"""Read MM_STAMP tag from file and return as numpy.array."""
fd.seek(struct.unpack(byte_order+'I', tag.value)[0])
return numpy.fromfile(fd, byte_order+'8f8', 1)[0]
def read_mm_uic1(fd, byte_order, tag):
"""Read MM_UIC1 tag from file and return as dictionary."""
fd.seek(struct.unpack(byte_order+'I', tag.value)[0])
t = fd.read(8*tag.count)
t = struct.unpack('%s%iI' % (byte_order, 2*tag.count), t)
return dict((MM_TAG_IDS[k], v) for k, v in zip(t[::2], t[1::2])
if k in MM_TAG_IDS)
def read_mm_uic2(fd, byte_order, tag):
"""Read MM_UIC2 tag from file and return as dictionary."""
result = {'number_planes': tag.count}
fd.seek(struct.unpack(byte_order+'I', tag.value)[0])
values = numpy.fromfile(fd, byte_order+'I', 6*tag.count)
result['z_distance'] = values[0::6] / values[1::6]
#result['date_created'] = tuple(values[2::6])
#result['time_created'] = tuple(values[3::6])
#result['date_modified'] = tuple(values[4::6])
#result['time_modified'] = tuple(values[5::6])
return result
def read_mm_uic3(fd, byte_order, tag):
"""Read MM_UIC3 tag from file and return as dictionary."""
fd.seek(struct.unpack(byte_order+'I', tag.value)[0])
t = numpy.fromfile(fd, '%sI' % byte_order, 2*tag.count)
return {'wavelengths': t[0::2] / t[1::2]}
def read_mm_uic4(fd, byte_order, tag):
"""Read MM_UIC4 tag from file and return as dictionary."""
fd.seek(struct.unpack(byte_order+'I', tag.value)[0])
t = struct.unpack(byte_order + 'hI'*tag.count, fd.read(6*tag.count))
return dict((MM_TAG_IDS[k], v) for k, v in zip(t[::2], t[1::2])
if k in MM_TAG_IDS)
def _replace_by(module_function, warn=False):
"""Try replace decorated function by module.function."""
def decorate(func, module_function=module_function, warn=warn):
try:
module, function = module_function.split('.')
func, oldfunc = getattr(__import__(module), function), func
globals()['__old_' + func.__name__] = oldfunc
except Exception, e:
if warn:
warnings.warn("Failed to import %s" % module_function)
return func
return decorate
def unpackbits(data, dtype, itemsize, length=0):
"""Decompress bit encoded byte string.
data : str
dtype : numpy.dtype or str
Only boolean or integer types are allowed.
itemsize : int
Number of bits per item.
length : int
Number of consecutive items, after which to start at next byte
"""
dtype = numpy.dtype(dtype)
# shortcut numpy types
if itemsize in (8, 16, 32, 64) and length*itemsize % 8 == 0:
if itemsize != dtype.itemsize*8:
raise ValueError("dtype, itemsize mismatch")
return numpy.fromstring(data, dtype)
# shortcut bitarray
if itemsize == 1:
data = numpy.fromstring(data, '|B')
data = numpy.unpackbits(data)
if length % 8 != 0:
data = data.reshape(-1, length+(8-length%8))
data = data[:, :length].reshape(-1)
return data.astype(dtype)
if 64 < itemsize < 1:
raise ValueError("itemsize out of range")
if dtype.kind not in "biu":
raise ValueError("invalid dtype")
if itemsize > dtype.itemsize * 8:
raise ValueError("dtype.itemsize too small")
for i in (8, 16, 32, 64):
if itemsize <= i:
itembytes = i // 8
break
if length == 0:
length = len(data) // itembytes
skipbits = (length*itemsize) % 8
if skipbits:
skipbits = 8 - skipbits
shrbits = itembytes*8 - itemsize
bitmask = int(itemsize*'1'+'0'*shrbits, 2)
if dtype.byteorder == '|':
dtypestr = '=' + dtype.char
else:
dtypestr = dtype.byteorder + dtype.char
unpack = struct.unpack
l = length * (len(data)*8 // (length*itemsize + skipbits))
result = numpy.empty((l,), dtype)
bitcount = 0
for i in xrange(len(result)):
start = bitcount // 8
s = data[start:start+itembytes]
try:
code = unpack(dtypestr, s)[0]
except Exception:
code = unpack(dtypestr, s + '\x00'*(itembytes-len(s)))[0]
code = code << (bitcount % 8)
code = code & bitmask
result[i] = code >> shrbits
bitcount += itemsize
if (i+1) % length == 0:
bitcount += skipbits
return result
@_replace_by('_tifffile.packbitsdecode')
def packbitsdecode(encoded):
"""Decompress PackBits encoded byte string.
PackBits is a simple byte-oriented run-length compression scheme.
"""
result = []
i = 0
try:
while 1:
n = ord(encoded[i]) + 1
i += 1
if n < 129:
result.extend(encoded[i:i+n])
i += n
elif n > 129:
result.extend(encoded[i:i+1] * (258-n))
i += 1
except IndexError:
return ''.join(result)
@_replace_by('_tifffile.lzwdecode')
def lzwdecode(encoded):
"""Decompress LZW (Lempel-Ziv-Welch) encoded TIFF strip (byte string).
The strip must begin with a CLEAR code and end with an EOI code.
This is an implementation of the LZW decoding algorithm described in (1).
"""
unpack = struct.unpack
def next_code():
"""Return integer of `bitw` bits at `bitcount` position in encoded."""
start = bitcount // 8
s = encoded[start:start+4]
try:
code = unpack('>I', s)[0]
except Exception:
code = unpack('>I', s + '\x00'*(4-len(s)))[0]
code = code << (bitcount % 8)
code = code & mask
return code >> shr
if len(encoded) < 4:
raise ValueError("strip must be at least 4 characters long")
if 256 != unpack('>I', encoded[0:4])[0]:
raise ValueError("strip must begin with CLEAR code")
switchbitch = { # code, bit-width, shr-bits, bit-mask
255: (9, 23, int(9*'1'+'0'*23, 2)),
511: (10, 22, int(10*'1'+'0'*22, 2)),
1023: (11, 21, int(11*'1'+'0'*21, 2)),
2047: (12, 20, int(12*'1'+'0'*20, 2)), }
bitw, shr, mask = switchbitch[255]
bitcount = 0
result = []
while 1:
code = next_code() # ~5% faster when inlining this function
bitcount += bitw
if code == 257: # EOI
break
if code == 256: # CLEAR
table = [chr(i) for i in xrange(256)]
table.extend((0, 0))
lentable = 258
bitw, shr, mask = switchbitch[255]
code = next_code()
bitcount += bitw
if code == 257: # EOI
break
result.append(table[code])
else:
if code < lentable:
decoded = table[code]
newcode = table[oldcode] + decoded[0]
else:
newcode = table[oldcode]
newcode += newcode[0]
decoded = newcode
result.append(decoded)
table.append(newcode)
lentable += 1
oldcode = code
if lentable in switchbitch:
bitw, shr, mask = switchbitch[lentable]
return ''.join(result)
def stripnull(string):
"""Return string truncated at first null character."""
i = string.find('\x00')
return string if (i < 0) else string[:i]
def test_tifffile(directory='testimages', verbose=True):
"""Read all images in directory. Print error message on failure.
>>> test_tifffile(verbose=False)
"""
import glob
successful = 0
failed = 0
start = time.time()
for f in glob.glob(os.path.join(directory, '*.*')):
if verbose:
print "\n%s>" % f.lower(),
t0 = time.time()
try:
tif = TIFFfile(f)
except Exception, e:
if not verbose:
print f,
print "ERROR:", e
failed += 1
continue
try:
img = tif.asarray()
except ValueError:
try:
img = tif[0].asarray()
except Exception, e:
if not verbose:
print f,
print "ERROR:", e
finally:
tif.close()
successful += 1
if verbose:
print "%s, %s %s, %s, %.0f ms" % (str(tif), str(img.shape),
img.dtype, tif[0].compression, (time.time()-t0) * 1e3)
if verbose:
print "\nSuccessfully read %i of %i files in %.3f s\n" % (
successful, successful+failed, time.time()-start)
# TIFF tag structures. Cases that are irrelevant or not implemented are
# commented out.
class TIFF_SUBFILE_TYPES(object):
def __getitem__(self, key):
result = []
if key & 1:
result.append('reduced_image')
if key & 2:
result.append('page')
if key & 4:
result.append('mask')
return tuple(result)
TIFF_OSUBFILE_TYPES = {
0: 'undefined',
1: 'image', # full-resolution image data
2: 'reduced_image', # reduced-resolution image data
3: 'page', # a single page of a multi-page image
}
TIFF_PHOTOMETRICS = {
0: 'miniswhite',
1: 'minisblack',
2: 'rgb',
3: 'palette',
#4: 'mask',
#5: 'separated',
#6: 'cielab',
#7: 'icclab',
#8: 'itulab',
#32844: 'logl',
#32845: 'logluv',
}
TIFF_COMPESSIONS = {
1: None,
#2: 'ccittrle',
#3: 'ccittfax3',
#4: 'cittfax4',
5: 'lzw',
#6: 'ojpeg',
#7: 'jpeg',
8: 'adobe_deflate',
#9: 't85',
#10: 't43',
#32766: 'next',
#32771: 'ccittrlew',
32773: 'packbits',
#32809: 'thunderscan',
#32895: 'it8ctpad',
#32896: 'it8lw',
#32897: 'it8mp',
#32898: 'it8bl',
#32908: 'pixarfilm',
#32909: 'pixarlog',
32946: 'deflate',
#32947: 'dcs',
#34661: 'jbig',
#34676: 'sgilog',
#34677: 'sgilog24',
#34712: 'jp2000',
}
TIFF_DECOMPESSORS = {
None: lambda x: x,
'adobe_deflate': zlib.decompress,
'deflate': zlib.decompress,
'packbits': packbitsdecode,
'lzw': lzwdecode,
}
TIFF_DATA_TYPES = {
1: '1B', # BYTE 8-bit unsigned integer.
2: '1s', # ASCII 8-bit byte that contains a 7-bit ASCII code;
# the last byte must be NUL (binary zero).
3: '1H', # SHORT 16-bit (2-byte) unsigned integer
4: '1I', # LONG 32-bit (4-byte) unsigned integer.
5: '2I', # RATIONAL Two LONGs: the first represents the numerator of
# a fraction; the second, the denominator.
6: '1b', # SBYTE An 8-bit signed (twos-complement) integer.
7: '1B', # UNDEFINED An 8-bit byte that may contain anything,
# depending on the definition of the field.
8: '1h', # SSHORT A 16-bit (2-byte) signed (twos-complement) integer.
9: '1i', # SLONG A 32-bit (4-byte) signed (twos-complement) integer.
10: '2i', # SRATIONAL Two SLONGs: the first represents the numerator
# of a fraction, the second the denominator.
11: '1f', # FLOAT Single precision (4-byte) IEEE format.
12: '1d', # DOUBLE Double precision (8-byte) IEEE format.
}
TIFF_BYTE_ORDERS = {
'II': '<', # little endian
'MM': '>', # big endian
}
TIFF_SAMPLE_FORMATS = {
1: 'uint',
2: 'int',
3: 'float',
#4: 'void',
#5: 'complex_int',
#6: 'complex',
}
TIFF_SAMPLE_DTYPES = {
('uint', 1): '?', # bitmap
('uint', 2): 'B',
('uint', 4): 'B',
('uint', 6): 'B',
('uint', 8): 'B',
('uint', 10): 'H',
('uint', 12): 'H',
('uint', 14): 'H',
('uint', 16): 'H',
('uint', 24): 'I',
('uint', 32): 'I',
('int', 8): 'b',
('int', 16): 'h',
('int', 32): 'i',
('float', 32): 'f',
('float', 64): 'd',
}
TIFF_PREDICTORS = {
1: None,
2: 'horizontal',
#3: 'floatingpoint',
}
TIFF_ORIENTATIONS = {
1: 'top_left',
2: 'top_right',
3: 'bottom_right',
4: 'bottom_left',
5: 'left_top',
6: 'right_top',
7: 'right_bottom',
8: 'left_bottom',
}
TIFF_FILLORDERS = {
1: 'msb2lsb',
2: 'lsb2msb',
}
TIFF_RESUNITS = {
1: 'none',
2: 'inch',
3: 'centimeter',
}
TIFF_PLANARCONFIGS = {
1: 'contig',
2: 'separate',
}
TIFF_EXTRA_SAMPLES = {
0: 'unspecified',
1: 'assocalpha',
2: 'unassalpha',
}
# MetaMorph STK tags
MM_TAG_IDS = {
0: 'auto_scale',
1: 'min_scale',
2: 'max_scale',
3: 'spatial_calibration',
#4: 'x_calibration',
#5: 'y_calibration',
#6: 'calibration_units',
#7: 'name',
8: 'thresh_state',
9: 'thresh_state_red',
11: 'thresh_state_green',
12: 'thresh_state_blue',
13: 'thresh_state_lo',
14: 'thresh_state_hi',
15: 'zoom',
#16: 'create_time',
#17: 'last_saved_time',
18: 'current_buffer',
19: 'gray_fit',
20: 'gray_point_count',
#21: 'gray_x',
#22: 'gray_y',
#23: 'gray_min',
#24: 'gray_max',
#25: 'gray_unit_name',
26: 'standard_lut',
27: 'wavelength',
#28: 'stage_position',
#29: 'camera_chip_offset',
#30: 'overlay_mask',
#31: 'overlay_compress',
#32: 'overlay',
#33: 'special_overlay_mask',
#34: 'special_overlay_compress',
#35: 'special_overlay',
36: 'image_property',
#37: 'stage_label',
#38: 'autoscale_lo_info',
#39: 'autoscale_hi_info',
#40: 'absolute_z',
#41: 'absolute_z_valid',
#42: 'gamma',
#43: 'gamma_red',
#44: 'gamma_green',
#45: 'gamma_blue',
#46: 'camera_bin',
47: 'new_lut',
#48: 'image_property_ex',
49: 'plane_property',
#50: 'user_lut_table',
51: 'red_autoscale_info',
#52: 'red_autoscale_lo_info',
#53: 'red_autoscale_hi_info',
54: 'red_minscale_info',
55: 'red_maxscale_info',
56: 'green_autoscale_info',
#57: 'green_autoscale_lo_info',
#58: 'green_autoscale_hi_info',
59: 'green_minscale_info',
60: 'green_maxscale_info',
61: 'blue_autoscale_info',
#62: 'blue_autoscale_lo_info',
#63: 'blue_autoscale_hi_info',
64: 'blue_min_scale_info',
65: 'blue_max_scale_info',
#66: 'overlay_plane_color',
}
# Olymus Fluoview
MM_DIMENSION = [
('name', 'a16'),
('size', 'i4'),
('origin', 'f8'),
('resolution', 'f8'),
('unit', 'a64'),
]
MM_HEADER = [
('header_flag', 'i2'),
('image_type', 'u1'),
('image_name', 'a257'),
('offset_data', 'u4'),
('palette_size', 'i4'),
('offset_palette0', 'u4'),
('offset_palette1', 'u4'),
('comment_size', 'i4'),
('offset_comment', 'u4'),
('dimensions', MM_DIMENSION, 10),
('offset_position', 'u4'),
('map_type', 'i2'),
('map_min', 'f8'),
('map_max', 'f8'),
('min_value', 'f8'),
('max_value', 'f8'),
('offset_map', 'u4'),
('gamma', 'f8'),
('offset', 'f8'),
('gray_channel', MM_DIMENSION),
('offset_thumbnail', 'u4'),
('voice_field', 'i4'),
('offset_voice_field', 'u4'),
]
# Carl Zeiss LSM record
CZ_LSM_INFO = [
('magic_number', 'i4'),
('structure_size', 'i4'),
('dimension_x', 'i4'),
('dimension_y', 'i4'),
('dimension_z', 'i4'),
('dimension_channels', 'i4'),
('dimension_time', 'i4'),
('dimension_data_type', 'i4'),
('thumbnail_x', 'i4'),
('thumbnail_y', 'i4'),
('voxel_size_x', 'f8'),
('voxel_size_y', 'f8'),
('voxel_size_z', 'f8'),
('origin_x', 'f8'),
('origin_y', 'f8'),
('origin_z', 'f8'),
('scan_type', 'u2'),
('spectral_scan', 'u2'),
('data_type', 'u4'),
('offset_vector_overlay', 'u4'),
('offset_input_lut', 'u4'),
('offset_output_lut', 'u4'),
('offset_channel_colors', 'u4'),
('time_interval', 'u4'),
('', 'u4'),
('offset_scan_information', 'u4'),
('', 'u4'),
('offset_time_stamps', 'u4'),
('offset_event_list', 'u4'),
('offset_roi', 'u4'),
('offset_bleach_roi', 'u4'),
('', 'u8'),
('display_aspect_x', 'f8'),
('display_aspect_y', 'f8'),
('display_aspect_z', 'f8'),
('display_aspect_time', 'f8'),
('offset_mean_of_roi_overlay', 'u4'),
('offset_topo_isoline_overlay', 'u4'),
('offset_topo_profile_overlay', 'u4'),
('offset_linescan_overlay', 'u4'),
('offset_toolbar_flags', 'u4'),
]
# Map cz_lsm_info.scan_type to dimension order
CZ_SCAN_TYPES = {
0: 'XYZCT', # x-y-z scan
1: 'XYZCT', # z scan (x-z plane)
2: 'XYZCT', # line scan
3: 'XYTCZ', # time series x-y
4: 'XYZTC', # time series x-z
5: 'XYTCZ', # time series 'Mean of ROIs'
6: 'XYZTC', # time series x-y-z
7: 'XYCTZ', # spline scan
8: 'XYCZT', # spline scan x-z
9: 'XYTCZ', # time series spline plane x-z
10: 'XYZCT', # point mode
}
# Map dimension codes to cz_lsm_info attribute
CZ_DIMENSIONS = {
'X': 'dimension_x',
'Y': 'dimension_y',
'Z': 'dimension_z',
'C': 'dimension_channels',
'T': 'dimension_time',
}
# Descriptions of cz_lsm_info.data_type
CZ_DATA_TYPES = {
0: 'varying data types',
2: '12 bit unsigned integer',
5: '32 bit float',
#default: '8 bit unsigned integer',
}
# Map TIFF tag codes to attribute names, default value, type, count, validator
TIFF_TAGS = {
254: ('new_subfile_type', 0, 4, 1, TIFF_SUBFILE_TYPES()),
255: ('subfile_type', None, 3, 1, TIFF_OSUBFILE_TYPES),
256: ('image_width', None, 4, 1, None),
257: ('image_length', None, 4, 1, None),
258: ('bits_per_sample', 1, 3, 1, None),
259: ('compression', 1, 3, 1, TIFF_COMPESSIONS),
262: ('photometric', None, 3, 1, TIFF_PHOTOMETRICS),
266: ('fill_order', 1, 3, 1, TIFF_FILLORDERS),
269: ('document_name', None, 2, None, None),
270: ('image_description', None, 2, None, None),
271: ('make', None, 2, None, None),
272: ('model', None, 2, None, None),
273: ('strip_offsets', None, 4, None, None),
274: ('orientation', 1, 3, 1, TIFF_ORIENTATIONS),
277: ('samples_per_pixel', 1, 3, 1, None),
278: ('rows_per_strip', 2**32-1, 4, 1, None),
279: ('strip_byte_counts', None, 4, None, None), # required
#280: ('min_sample_value', 0, 3, None, None),
#281: ('max_sample_value', None, 3, None, None), # 2**bits_per_sample
282: ('x_resolution', None, 5, 1, None),
283: ('y_resolution', None, 5, 1, None),
284: ('planar_configuration', 1, 3, 1, TIFF_PLANARCONFIGS),
285: ('page_name', None, 2, None, None),
296: ('resolution_unit', 2, 4, 1, TIFF_RESUNITS),
305: ('software', None, 2, None, None),
306: ('datetime', None, 2, None, None),
315: ('artist', None, 2, None, None),
316: ('host_computer', None, 2, None, None),
317: ('predictor', 1, 3, 1, TIFF_PREDICTORS),
320: ('color_map', None, 3, None, None),
338: ('extra_samples', None, 3, None, TIFF_EXTRA_SAMPLES),
339: ('sample_format', 1, 3, 1, TIFF_SAMPLE_FORMATS),
33432: ('copyright', None, 2, None, None),
}
# Map custom TIFF tag codes to attribute names and import functions
CUSTOM_TAGS = {
33628: ('mm_uic1', read_mm_uic1),
33629: ('mm_uic2', read_mm_uic2),
33630: ('mm_uic3', read_mm_uic3),
33631: ('mm_uic4', read_mm_uic4),
34361: ('mm_header', read_mm_header),
34362: ('mm_stamp', read_mm_stamp),
34386: ('mm_user_block', None),
34412: ('cz_lsm_info', read_cz_lsm_info),
43314: ('nih_image_header', read_nih_image_header),
}
def imshow(data, title=None, isrgb=True, vmin=0, vmax=None,
cmap=None, miniswhite=False, interpolation='bilinear',
dpi=96, figure=None, subplot=111, maxdim=4096, **kwargs):
"""Plot n-dimensional images using matplotlib.pyplot.
Return figure, subplot and plot axis.
Requires pyplot already imported ``from matplotlib import pyplot``.
Arguments
---------
isrgb : bool
If True, data will be displayed as RGB(A) images if possible.
miniswhite : bool
If True, gray scale palette will be inverted.
title : str
Window and subplot title.
figure : a matplotlib.figure.Figure instance (optional).
subplot : int
A matplotlib.pyplot.subplot axis.
maxdim: int
maximum image size in any dimension.
Other arguments are same as for matplotlib.pyplot.imshow.
"""
data = data.squeeze()
data = data[(slice(0, maxdim), ) * len(data.shape)]
dims = len(data.shape)
if dims < 2:
raise ValueError("not an image")
if dims == 2:
dims = 0
isrgb = False
else:
if (isrgb and data.shape[-3] in (3, 4)):
data = numpy.swapaxes(data, -3, -2)
data = numpy.swapaxes(data, -2, -1)
elif (not isrgb and data.shape[-1] in (3, 4)):
data = numpy.swapaxes(data, -3, -1)
data = numpy.swapaxes(data, -2, -1)
isrgb = isrgb and data.shape[-1] in (3, 4)
dims -= 3 if isrgb else 2
datamax = data.max()
if data.dtype in (numpy.int8, numpy.int16, numpy.int32,
numpy.uint8, numpy.uint16, numpy.uint32):
for bits in (1, 2, 4, 8, 10, 12, 14, 16, 24, 32):
if datamax <= 2**bits:
datamax = 2**bits
break
if isrgb:
data *= (255.0 / datamax) # better use digitize()
data = data.astype('B')
elif isrgb:
data /= datamax
if not isrgb and vmax is None:
vmax = datamax
pyplot = sys.modules['matplotlib.pyplot']
if figure is None:
pyplot.rc('font', family='sans-serif', weight='normal', size=8)
figure = pyplot.figure(dpi=dpi, figsize=(10.3, 6.3), frameon=True,
facecolor='1.0', edgecolor='w')
try:
figure.canvas.manager.window.title(title)
except Exception:
pass
pyplot.subplots_adjust(bottom=0.03*(dims+2), top=0.925,
left=0.1, right=0.95, hspace=0.05, wspace=0.0)
subplot = pyplot.subplot(subplot)
if title:
pyplot.title(title, size=11)
if cmap is None:
cmap = pyplot.cm.binary if miniswhite else pyplot.cm.gray
image = pyplot.imshow(data[(0, ) * dims].squeeze(), vmin=vmin, vmax=vmax,
cmap=cmap, interpolation=interpolation, **kwargs)
if not isrgb:
pyplot.colorbar()
def format_coord(x, y):
"""Callback to format coordinate display in toolbar."""
x = int(x + 0.5)
y = int(y + 0.5)
try:
if dims:
return "%s @ %s [%4i, %4i]" % (cur_ax_dat[1][y, x],
current, x, y)
else:
return "%s @ [%4i, %4i]" % (data[y, x], x, y)
except IndexError:
return ""
pyplot.gca().format_coord = format_coord
if dims:
current = list((0, ) * dims)
cur_ax_dat = [0, data[tuple(current)].squeeze()]
sliders = [pyplot.Slider(
pyplot.axes([0.125, 0.03*(axis+1), 0.725, 0.025]),
'Dimension %i' % axis, 0, data.shape[axis]-1, 0, facecolor='0.5',
valfmt='%%.0f of %i' % data.shape[axis]) for axis in range(dims)]
for slider in sliders:
slider.drawon = False
def set_image(current, sliders=sliders, data=data):
"""Change image and redraw canvas."""
cur_ax_dat[1] = data[tuple(current)].squeeze()
image.set_data(cur_ax_dat[1])
for ctrl, index in zip(sliders, current):
ctrl.eventson = False
ctrl.set_val(index)
ctrl.eventson = True
figure.canvas.draw()
def on_changed(index, axis, data=data, image=image, figure=figure,
current=current):
"""Callback for slider change event."""
index = int(round(index))
cur_ax_dat[0] = axis
if index == current[axis]:
return
if index >= data.shape[axis]:
index = 0
elif index < 0:
index = data.shape[axis] - 1
current[axis] = index
set_image(current)
def on_keypressed(event, data=data, current=current):
"""Callback for key press event."""
key = event.key
axis = cur_ax_dat[0]
if str(key) in '0123456789':
on_changed(key, axis)
elif key == 'right':
on_changed(current[axis] + 1, axis)
elif key == 'left':
on_changed(current[axis] - 1, axis)
elif key == 'up':
cur_ax_dat[0] = 0 if axis == len(data.shape)-1 else axis + 1
elif key == 'down':
cur_ax_dat[0] = len(data.shape)-1 if axis == 0 else axis - 1
elif key == 'end':
on_changed(data.shape[axis] - 1, axis)
elif key == 'home':
on_changed(0, axis)
figure.canvas.mpl_connect('key_press_event', on_keypressed)
for axis, ctrl in enumerate(sliders):
ctrl.on_changed(lambda k, a=axis: on_changed(k, a))
return figure, subplot, image
def main(argv=None):
"""Command line usage main function."""
if float(sys.version[0:3]) < 2.5:
print "This script requires Python version 2.5 or better."
print "This is Python version %s" % sys.version
return 0
if argv is None:
argv = sys.argv
import re
import optparse
import matplotlib
matplotlib.use('TkAgg')
from matplotlib import pyplot
search_doc = lambda r, d: re.search(r, __doc__).group(1) if __doc__ else d
parser = optparse.OptionParser(
usage="usage: %prog [options] path",
description=search_doc("\n\n([^|]*?)\n\n", ''),
version="%%prog %s" % search_doc(":Version: (.*)", "Unknown"))
opt = parser.add_option
opt('-p', '--page', dest='page', type='int', default=-1,
help="display single page")
opt('--norgb', dest='norgb', action='store_true', default=False,
help="don't try display as RGB(A) color images")
opt('--nocolmap', dest='nocolmap', action='store_true', default=False,
help="don't apply color mapping to paletted images")
opt('--interpol', dest='interpol', metavar='INTERPOL', default='bilinear',
help="image interpolation method")
opt('--dpi', dest='dpi', type='int', default=96,
help="set plot resolution")
opt('--test', dest='test', action='store_true', default=False,
help="try read all images in path")
opt('--doctest', dest='doctest', action='store_true', default=False,
help="runs the internal tests")
opt('-v', '--verbose', dest='verbose', action='store_true', default=True)
opt('-q', '--quiet', dest='verbose', action='store_false')
settings, path = parser.parse_args()
path = ' '.join(path)
if settings.doctest:
import doctest
doctest.testmod()
return 0
if not path:
parser.error("No file specified")
if settings.test:
test_tifffile(path, settings.verbose)
return 0
print "Reading file structure...",
start = time.time()
tif = TIFFfile(path)
print "%.3f ms" % ((time.time()-start) * 1e3)
print "Reading image data... ",
start = time.time()
try:
if settings.page < 0:
img = tif.asarray(colormapped=not settings.nocolmap,
rgbonly=not settings.norgb)
else:
img = tif[settings.page].asarray(colormapped=not settings.nocolmap,
rgbonly=not settings.norgb)
except Exception:
tif.close()
raise
print "%.3f ms" % ((time.time()-start) * 1e3)
tif.close()
print "\nTIFF file:", tif
page = 0 if settings.page < 0 else settings.page
print "\nPAGE %i:" % page, tif[page]
page = tif[page]
print page.tags
if page.is_palette:
print "\nColor Map:", page.color_map.shape, page.color_map.dtype
for test, attr in (('is_lsm', 'cz_lsm_info'),
('is_stk', 'mm_uic_tags'),
('is_fluoview', 'mm_header'),
('is_nih', 'nih_image_header'),):
if getattr(page, test):
print "\n", attr.upper(), "\n", Record(getattr(page, attr))
imshow(img, title=', '.join((str(tif), str(tif[0]))),
miniswhite=page.photometric=='miniswhite',
interpolation=settings.interpol,
dpi=settings.dpi, isrgb=not settings.norgb)
pyplot.show()
# Documentation in HTML format can be generated with Epydoc
__docformat__ = "restructuredtext en"
if __name__ == "__main__":
sys.exit(main())
| gpl-2.0 | -4,618,642,335,877,910,000 | 30.828331 | 94 | 0.547769 | false |
ajbouh/tfi | tests/broken/from_tensor_test.py | 1 | 1816 | import unittest
from collections import OrderedDict
# impls = OrderedDict()
# # try:
# from tfi.driver.pytorch.tensor_codec import from_tensor as pytorch_from_tensor
# impls['pytorch'] = pytorch_from_tensor
# # except ImportError:
# # pass
# # try:
# from tfi.driver.tf.tensor_codec import from_tensor as tf_from_tensor
# impls['tf'] = tf_from_tensor
# # except ImportError:
# # pass
from functools import partialmethod
import tfi.tensor.codec
class FromTensorTest(unittest.TestCase):
pass
import numpy as np
import numpy.testing as npt
mappings = [
(np.int8, (None, None, 3), lambda x: 'int8 image'),
(np.float32, (None, None, 3), lambda x: 'float32 image'),
]
_FROM_TENSOR_FIXTURES = [
# (name, (tensor, xfrm), score),
('nothing', (None, mappings), None),
('image_int8', (np.ones([8,8,3], np.int8), mappings), 'int8 image'),
('image_float32', (np.ones([8,8,3], np.float32), mappings), 'float32 image'),
('image_float64', (np.ones([8,8,3], np.float64), mappings), None),
('arr_image_float32', (np.ones([1,8,8,3], np.float32), mappings), np.array(['float32 image'])),
('arr2_image_float32', (np.ones([2,8,8,3], np.float32), mappings), np.array(['float32 image', 'float32 image'])),
('arr_2d_image_float32', (np.ones([1,1,8,8,3], np.float32), mappings), np.array([['float32 image']])),
('arr2_2d_image_float32', (np.ones([2,1,8,8,3], np.float32), mappings), np.array([['float32 image'], ['float32 image']])),
]
for (name, args, expect) in _FROM_TENSOR_FIXTURES:
def do_test(self, args, expect):
result = tfi.tensor.codec.encode(*args)
npt.assert_array_equal(expect, result)
setattr(FromTensorTest,
'test_%s' % name,
partialmethod(do_test, args, expect))
if __name__ == '__main__':
unittest.main()
| mit | 6,053,532,841,810,745,000 | 32.018182 | 126 | 0.636564 | false |
dencaval/swift | test/probe/test_object_handoff.py | 13 | 12947 | #!/usr/bin/python -u
# Copyright (c) 2010-2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from unittest import main
from uuid import uuid4
import random
from hashlib import md5
from collections import defaultdict
from swiftclient import client
from swift.common import direct_client
from swift.common.exceptions import ClientException
from swift.common.manager import Manager
from test.probe.common import (kill_server, start_server, ReplProbeTest,
ECProbeTest, Body)
class TestObjectHandoff(ReplProbeTest):
def test_main(self):
# Create container
container = 'container-%s' % uuid4()
client.put_container(self.url, self.token, container,
headers={'X-Storage-Policy':
self.policy.name})
# Kill one container/obj primary server
cpart, cnodes = self.container_ring.get_nodes(self.account, container)
cnode = cnodes[0]
obj = 'object-%s' % uuid4()
opart, onodes = self.object_ring.get_nodes(
self.account, container, obj)
onode = onodes[0]
kill_server((onode['ip'], onode['port']),
self.ipport2server, self.pids)
# Create container/obj (goes to two primary servers and one handoff)
client.put_object(self.url, self.token, container, obj, 'VERIFY')
odata = client.get_object(self.url, self.token, container, obj)[-1]
if odata != 'VERIFY':
raise Exception('Object GET did not return VERIFY, instead it '
'returned: %s' % repr(odata))
# Kill other two container/obj primary servers
# to ensure GET handoff works
for node in onodes[1:]:
kill_server((node['ip'], node['port']),
self.ipport2server, self.pids)
# Indirectly through proxy assert we can get container/obj
odata = client.get_object(self.url, self.token, container, obj)[-1]
if odata != 'VERIFY':
raise Exception('Object GET did not return VERIFY, instead it '
'returned: %s' % repr(odata))
# Restart those other two container/obj primary servers
for node in onodes[1:]:
start_server((node['ip'], node['port']),
self.ipport2server, self.pids)
# We've indirectly verified the handoff node has the container/object,
# but let's directly verify it.
another_onode = next(self.object_ring.get_more_nodes(opart))
odata = direct_client.direct_get_object(
another_onode, opart, self.account, container, obj, headers={
'X-Backend-Storage-Policy-Index': self.policy.idx})[-1]
if odata != 'VERIFY':
raise Exception('Direct object GET did not return VERIFY, instead '
'it returned: %s' % repr(odata))
# Assert container listing (via proxy and directly) has container/obj
objs = [o['name'] for o in
client.get_container(self.url, self.token, container)[1]]
if obj not in objs:
raise Exception('Container listing did not know about object')
for cnode in cnodes:
objs = [o['name'] for o in
direct_client.direct_get_container(
cnode, cpart, self.account, container)[1]]
if obj not in objs:
raise Exception(
'Container server %s:%s did not know about object' %
(cnode['ip'], cnode['port']))
# Bring the first container/obj primary server back up
start_server((onode['ip'], onode['port']),
self.ipport2server, self.pids)
# Assert that it doesn't have container/obj yet
try:
direct_client.direct_get_object(
onode, opart, self.account, container, obj, headers={
'X-Backend-Storage-Policy-Index': self.policy.idx})
except ClientException as err:
self.assertEqual(err.http_status, 404)
else:
self.fail("Expected ClientException but didn't get it")
# Run object replication, ensuring we run the handoff node last so it
# will remove its extra handoff partition
for node in onodes:
try:
port_num = node['replication_port']
except KeyError:
port_num = node['port']
node_id = (port_num - 6000) / 10
Manager(['object-replicator']).once(number=node_id)
try:
another_port_num = another_onode['replication_port']
except KeyError:
another_port_num = another_onode['port']
another_num = (another_port_num - 6000) / 10
Manager(['object-replicator']).once(number=another_num)
# Assert the first container/obj primary server now has container/obj
odata = direct_client.direct_get_object(
onode, opart, self.account, container, obj, headers={
'X-Backend-Storage-Policy-Index': self.policy.idx})[-1]
if odata != 'VERIFY':
raise Exception('Direct object GET did not return VERIFY, instead '
'it returned: %s' % repr(odata))
# Assert the handoff server no longer has container/obj
try:
direct_client.direct_get_object(
another_onode, opart, self.account, container, obj, headers={
'X-Backend-Storage-Policy-Index': self.policy.idx})
except ClientException as err:
self.assertEqual(err.http_status, 404)
else:
self.fail("Expected ClientException but didn't get it")
# Kill the first container/obj primary server again (we have two
# primaries and the handoff up now)
kill_server((onode['ip'], onode['port']),
self.ipport2server, self.pids)
# Delete container/obj
try:
client.delete_object(self.url, self.token, container, obj)
except client.ClientException as err:
if self.object_ring.replica_count > 2:
raise
# Object DELETE returning 503 for (404, 204)
# remove this with fix for
# https://bugs.launchpad.net/swift/+bug/1318375
self.assertEqual(503, err.http_status)
# Assert we can't head container/obj
try:
client.head_object(self.url, self.token, container, obj)
except client.ClientException as err:
self.assertEqual(err.http_status, 404)
else:
self.fail("Expected ClientException but didn't get it")
# Assert container/obj is not in the container listing, both indirectly
# and directly
objs = [o['name'] for o in
client.get_container(self.url, self.token, container)[1]]
if obj in objs:
raise Exception('Container listing still knew about object')
for cnode in cnodes:
objs = [o['name'] for o in
direct_client.direct_get_container(
cnode, cpart, self.account, container)[1]]
if obj in objs:
raise Exception(
'Container server %s:%s still knew about object' %
(cnode['ip'], cnode['port']))
# Restart the first container/obj primary server again
start_server((onode['ip'], onode['port']),
self.ipport2server, self.pids)
# Assert it still has container/obj
direct_client.direct_get_object(
onode, opart, self.account, container, obj, headers={
'X-Backend-Storage-Policy-Index': self.policy.idx})
# Run object replication, ensuring we run the handoff node last so it
# will remove its extra handoff partition
for node in onodes:
try:
port_num = node['replication_port']
except KeyError:
port_num = node['port']
node_id = (port_num - 6000) / 10
Manager(['object-replicator']).once(number=node_id)
another_node_id = (another_port_num - 6000) / 10
Manager(['object-replicator']).once(number=another_node_id)
# Assert primary node no longer has container/obj
try:
direct_client.direct_get_object(
another_onode, opart, self.account, container, obj, headers={
'X-Backend-Storage-Policy-Index': self.policy.idx})
except ClientException as err:
self.assertEqual(err.http_status, 404)
else:
self.fail("Expected ClientException but didn't get it")
class TestECObjectHandoffOverwrite(ECProbeTest):
def get_object(self, container_name, object_name):
headers, body = client.get_object(self.url, self.token,
container_name,
object_name,
resp_chunk_size=64 * 2 ** 10)
resp_checksum = md5()
for chunk in body:
resp_checksum.update(chunk)
return resp_checksum.hexdigest()
def test_ec_handoff_overwrite(self):
container_name = 'container-%s' % uuid4()
object_name = 'object-%s' % uuid4()
# create EC container
headers = {'X-Storage-Policy': self.policy.name}
client.put_container(self.url, self.token, container_name,
headers=headers)
# PUT object
old_contents = Body()
client.put_object(self.url, self.token, container_name,
object_name, contents=old_contents)
# get our node lists
opart, onodes = self.object_ring.get_nodes(
self.account, container_name, object_name)
# shutdown one of the primary data nodes
failed_primary = random.choice(onodes)
failed_primary_device_path = self.device_dir('object', failed_primary)
self.kill_drive(failed_primary_device_path)
# overwrite our object with some new data
new_contents = Body()
client.put_object(self.url, self.token, container_name,
object_name, contents=new_contents)
self.assertNotEqual(new_contents.etag, old_contents.etag)
# restore failed primary device
self.revive_drive(failed_primary_device_path)
# sanity - failed node has old contents
req_headers = {'X-Backend-Storage-Policy-Index': int(self.policy)}
headers = direct_client.direct_head_object(
failed_primary, opart, self.account, container_name,
object_name, headers=req_headers)
self.assertEqual(headers['X-Object-Sysmeta-EC-Etag'],
old_contents.etag)
# we have 1 primary with wrong old etag, and we should have 5 with
# new etag plus a handoff with the new etag, so killing 2 other
# primaries forces proxy to try to GET from all primaries plus handoff.
other_nodes = [n for n in onodes if n != failed_primary]
random.shuffle(other_nodes)
for node in other_nodes[:2]:
self.kill_drive(self.device_dir('object', node))
# sanity, after taking out two primaries we should be down to
# only four primaries, one of which has the old etag - but we
# also have a handoff with the new etag out there
found_frags = defaultdict(int)
req_headers = {'X-Backend-Storage-Policy-Index': int(self.policy)}
for node in onodes + list(self.object_ring.get_more_nodes(opart)):
try:
headers = direct_client.direct_head_object(
node, opart, self.account, container_name,
object_name, headers=req_headers)
except Exception:
continue
found_frags[headers['X-Object-Sysmeta-EC-Etag']] += 1
self.assertEqual(found_frags, {
new_contents.etag: 4, # this should be enough to rebuild!
old_contents.etag: 1,
})
# clear node error limiting
Manager(['proxy']).restart()
resp_etag = self.get_object(container_name, object_name)
self.assertEqual(resp_etag, new_contents.etag)
if __name__ == '__main__':
main()
| apache-2.0 | -4,243,140,290,677,483,500 | 41.729373 | 79 | 0.590175 | false |
Mhynlo/SickRage | sickbeard/nzbget.py | 6 | 6623 | # coding=utf-8
# Author: Nic Wolfe <[email protected]>
# URL: https://sickrage.github.io
# Git: https://github.com/SickRage/SickRage.git
#
# This file is part of SickRage.
#
# SickRage is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SickRage is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SickRage. If not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
import datetime
import httplib
import xmlrpclib
from base64 import standard_b64encode
import sickbeard
from sickbeard import logger
from sickbeard.common import Quality
from sickrage.helper.common import try_int
def sendNZB(nzb, proper=False): # pylint: disable=too-many-locals, too-many-statements, too-many-branches, too-many-return-statements
'''
Sends NZB to NZBGet client
:param nzb: nzb object
:param proper: True if this is a Proper download, False if not. Defaults to False
'''
if sickbeard.NZBGET_HOST is None:
logger.log('No NZBget host found in configuration. Please configure it.', logger.WARNING)
return False
addToTop = False
nzbgetprio = 0
category = sickbeard.NZBGET_CATEGORY
if nzb.show.is_anime:
category = sickbeard.NZBGET_CATEGORY_ANIME
url = 'http{0}://{1}:{2}@{3}/xmlrpc'.format(
's' if sickbeard.NZBGET_USE_HTTPS else '',
sickbeard.NZBGET_USERNAME,
sickbeard.NZBGET_PASSWORD,
sickbeard.NZBGET_HOST)
nzbGetRPC = xmlrpclib.ServerProxy(url)
try:
if nzbGetRPC.writelog('INFO', 'SickRage connected to drop off {0} any moment now.'.format(nzb.name + '.nzb')):
logger.log('Successful connected to NZBget', logger.DEBUG)
else:
logger.log('Successful connected to NZBget, but unable to send a message', logger.WARNING)
except httplib.socket.error:
logger.log(
'Please check your NZBget host and port (if it is running). NZBget is not responding to this combination',
logger.WARNING)
return False
except xmlrpclib.ProtocolError as e:
if e.errmsg == 'Unauthorized':
logger.log('NZBget username or password is incorrect.', logger.WARNING)
else:
logger.log('Protocol Error: ' + e.errmsg, logger.ERROR)
return False
dupekey = ''
dupescore = 0
# if it aired recently make it high priority and generate DupeKey/Score
for curEp in nzb.episodes:
if dupekey == '':
if curEp.show.indexer == 1:
dupekey = 'SickRage-' + str(curEp.show.indexerid)
elif curEp.show.indexer == 2:
dupekey = 'SickRage-tvr' + str(curEp.show.indexerid)
dupekey += '-' + str(curEp.season) + '.' + str(curEp.episode)
if datetime.date.today() - curEp.airdate <= datetime.timedelta(days=7):
addToTop = True
nzbgetprio = sickbeard.NZBGET_PRIORITY
else:
category = sickbeard.NZBGET_CATEGORY_BACKLOG
if nzb.show.is_anime:
category = sickbeard.NZBGET_CATEGORY_ANIME_BACKLOG
if nzb.quality != Quality.UNKNOWN:
dupescore = nzb.quality * 100
if proper:
dupescore += 10
nzbcontent64 = None
if nzb.resultType == 'nzbdata':
data = nzb.extraInfo[0]
nzbcontent64 = standard_b64encode(data)
logger.log('Sending NZB to NZBget')
logger.log('URL: ' + url, logger.DEBUG)
try:
# Find out if nzbget supports priority (Version 9.0+), old versions beginning with a 0.x will use the old command
nzbget_version_str = nzbGetRPC.version()
nzbget_version = try_int(nzbget_version_str[:nzbget_version_str.find('.')])
if nzbget_version == 0:
if nzbcontent64:
nzbget_result = nzbGetRPC.append(nzb.name + '.nzb', category, addToTop, nzbcontent64)
else:
if nzb.resultType == 'nzb':
if not nzb.provider.login():
return False
data = nzb.provider.get_url(nzb.url, returns='content')
if data is None:
return False
nzbcontent64 = standard_b64encode(data)
nzbget_result = nzbGetRPC.append(nzb.name + '.nzb', category, addToTop, nzbcontent64)
elif nzbget_version == 12:
if nzbcontent64 is not None:
nzbget_result = nzbGetRPC.append(nzb.name + '.nzb', category, nzbgetprio, False,
nzbcontent64, False, dupekey, dupescore, 'score')
else:
nzbget_result = nzbGetRPC.appendurl(nzb.name + '.nzb', category, nzbgetprio, False,
nzb.url, False, dupekey, dupescore, 'score')
# v13+ has a new combined append method that accepts both (url and content)
# also the return value has changed from boolean to integer
# (Positive number representing NZBID of the queue item. 0 and negative numbers represent error codes.)
elif nzbget_version >= 13:
nzbget_result = nzbGetRPC.append(nzb.name + '.nzb', nzbcontent64 if nzbcontent64 is not None else nzb.url,
category, nzbgetprio, False, False, dupekey, dupescore,
'score') > 0
else:
if nzbcontent64 is not None:
nzbget_result = nzbGetRPC.append(nzb.name + '.nzb', category, nzbgetprio, False,
nzbcontent64)
else:
nzbget_result = nzbGetRPC.appendurl(nzb.name + '.nzb', category, nzbgetprio, False,
nzb.url)
if nzbget_result:
logger.log('NZB sent to NZBget successfully', logger.DEBUG)
return True
else:
logger.log('NZBget could not add {0} to the queue'.format(nzb.name + '.nzb'), logger.WARNING)
return False
except Exception:
logger.log('Connect Error to NZBget: could not add {0} to the queue'.format(nzb.name + '.nzb'), logger.WARNING)
return False
| gpl-3.0 | -8,730,518,280,707,319,000 | 41.184713 | 134 | 0.611958 | false |
santisiri/popego | envs/ALPHA-POPEGO/lib/python2.5/site-packages/twisted/mail/mail.py | 1 | 8791 | # -*- test-case-name: twisted.mail.test.test_mail -*-
# Copyright (c) 2001-2004 Twisted Matrix Laboratories.
# See LICENSE for details.
"""Mail support for twisted python.
"""
# Twisted imports
from twisted.internet import defer
from twisted.application import service, internet
from twisted.python import util
from twisted.python import log
from twisted import cred
import twisted.cred.portal
# Sibling imports
from twisted.mail import protocols, smtp
# System imports
import os
from zope.interface import implements, Interface
class DomainWithDefaultDict:
'''Simulate a dictionary with a default value for non-existing keys.
'''
def __init__(self, domains, default):
self.domains = domains
self.default = default
def setDefaultDomain(self, domain):
self.default = domain
def has_key(self, name):
return 1
def fromkeys(klass, keys, value=None):
d = klass()
for k in keys:
d[k] = value
return d
fromkeys = classmethod(fromkeys)
def __contains__(self, name):
return 1
def __getitem__(self, name):
return self.domains.get(name, self.default)
def __setitem__(self, name, value):
self.domains[name] = value
def __delitem__(self, name):
del self.domains[name]
def __iter__(self):
return iter(self.domains)
def __len__(self):
return len(self.domains)
def __str__(self):
return '<DomainWithDefaultsDict %s>' % (self.domains,)
def __repr__(self):
return 'DomainWithDefaultsDict(%s)>' % (self.domains,)
def get(self, key, default=None):
return self.domains.get(key, default)
def copy(self):
return DomainWithDefaultsDict(self.domains.copy(), self.default)
def iteritems(self):
return self.domains.iteritems()
def iterkeys(self):
return self.domains.iterkeys()
def itervalues(self):
return self.domains.itervalues()
def keys(self):
return self.domains.keys()
def values(self):
return self.domains.values()
def items(self):
return self.domains.items()
def popitem(self):
return self.domains.popitem()
def update(self, other):
return self.domains.update(other)
def clear(self):
return self.domains.clear()
def setdefault(self, key, default):
return self.domains.setdefault(key, default)
class IDomain(Interface):
"""An email domain."""
def exists(user):
"""
Check whether or not the specified user exists in this domain.
@type user: C{twisted.protocols.smtp.User}
@param user: The user to check
@rtype: No-argument callable
@return: A C{Deferred} which becomes, or a callable which
takes no arguments and returns an object implementing C{IMessage}.
This will be called and the returned object used to deliver the
message when it arrives.
@raise twisted.protocols.smtp.SMTPBadRcpt: Raised if the given
user does not exist in this domain.
"""
def addUser(user, password):
"""Add a username/password to this domain."""
def startMessage(user):
"""Create and return a new message to be delivered to the given user.
DEPRECATED. Implement validateTo() correctly instead.
"""
def getCredentialsCheckers():
"""Return a list of ICredentialsChecker implementors for this domain.
"""
class IAliasableDomain(IDomain):
def setAliasGroup(aliases):
"""Set the group of defined aliases for this domain
@type aliases: C{dict}
@param aliases: Mapping of domain names to objects implementing
C{IAlias}
"""
def exists(user, memo=None):
"""
Check whether or not the specified user exists in this domain.
@type user: C{twisted.protocols.smtp.User}
@param user: The user to check
@type memo: C{dict}
@param memo: A record of the addresses already considered while
resolving aliases. The default value should be used by all
external code.
@rtype: No-argument callable
@return: A C{Deferred} which becomes, or a callable which
takes no arguments and returns an object implementing C{IMessage}.
This will be called and the returned object used to deliver the
message when it arrives.
@raise twisted.protocols.smtp.SMTPBadRcpt: Raised if the given
user does not exist in this domain.
"""
class BounceDomain:
"""A domain in which no user exists.
This can be used to block off certain domains.
"""
implements(IDomain)
def exists(self, user):
raise smtp.SMTPBadRcpt(user)
def willRelay(self, user, protocol):
return False
def addUser(self, user, password):
pass
def startMessage(self, user):
raise AssertionError, "No code should ever call this method for any reason"
def getCredentialsCheckers(self):
return []
class FileMessage:
"""A file we can write an email too."""
implements(smtp.IMessage)
def __init__(self, fp, name, finalName):
self.fp = fp
self.name = name
self.finalName = finalName
def lineReceived(self, line):
self.fp.write(line+'\n')
def eomReceived(self):
self.fp.close()
os.rename(self.name, self.finalName)
return defer.succeed(self.finalName)
def connectionLost(self):
self.fp.close()
os.remove(self.name)
class MailService(service.MultiService):
"""An email service."""
queue = None
domains = None
portals = None
aliases = None
smtpPortal = None
def __init__(self):
service.MultiService.__init__(self)
# Domains and portals for "client" protocols - POP3, IMAP4, etc
self.domains = DomainWithDefaultDict({}, BounceDomain())
self.portals = {}
self.monitor = FileMonitoringService()
self.monitor.setServiceParent(self)
self.smtpPortal = cred.portal.Portal(self)
def getPOP3Factory(self):
return protocols.POP3Factory(self)
def getSMTPFactory(self):
return protocols.SMTPFactory(self, self.smtpPortal)
def getESMTPFactory(self):
return protocols.ESMTPFactory(self, self.smtpPortal)
def addDomain(self, name, domain):
portal = cred.portal.Portal(domain)
map(portal.registerChecker, domain.getCredentialsCheckers())
self.domains[name] = domain
self.portals[name] = portal
if self.aliases and IAliasableDomain.providedBy(domain):
domain.setAliasGroup(self.aliases)
def setQueue(self, queue):
"""Set the queue for outgoing emails."""
self.queue = queue
def requestAvatar(self, avatarId, mind, *interfaces):
if smtp.IMessageDelivery in interfaces:
a = protocols.ESMTPDomainDelivery(self, avatarId)
return smtp.IMessageDelivery, a, lambda: None
raise NotImplementedError()
def lookupPortal(self, name):
return self.portals[name]
def defaultPortal(self):
return self.portals['']
class FileMonitoringService(internet.TimerService):
def __init__(self):
self.files = []
self.intervals = iter(util.IntervalDifferential([], 60))
def startService(self):
service.Service.startService(self)
self._setupMonitor()
def _setupMonitor(self):
from twisted.internet import reactor
t, self.index = self.intervals.next()
self._call = reactor.callLater(t, self._monitor)
def stopService(self):
service.Service.stopService(self)
if self._call:
self._call.cancel()
self._call = None
def monitorFile(self, name, callback, interval=10):
try:
mtime = os.path.getmtime(name)
except:
mtime = 0
self.files.append([interval, name, callback, mtime])
self.intervals.addInterval(interval)
def unmonitorFile(self, name):
for i in range(len(self.files)):
if name == self.files[i][1]:
self.intervals.removeInterval(self.files[i][0])
del self.files[i]
break
def _monitor(self):
self._call = None
if self.index is not None:
name, callback, mtime = self.files[self.index][1:]
try:
now = os.path.getmtime(name)
except:
now = 0
if now > mtime:
log.msg("%s changed, notifying listener" % (name,))
self.files[self.index][3] = now
callback(name)
self._setupMonitor()
| bsd-3-clause | 8,406,159,392,092,091,000 | 26.644654 | 83 | 0.625526 | false |
spaceninja/mltshp | test/functional/request_invitation_tests.py | 1 | 4896 | import test.base
from models import User, Shake, Notification, ShakeManager
from tornado.escape import json_decode
class RequestInvitationTests(test.base.BaseAsyncTestCase):
def setUp(self):
super(RequestInvitationTests, self).setUp()
self.user = User(name='joe', email='[email protected]', email_confirmed=1)
self.user.set_password('asdfasdf')
self.user.save()
self.sign_in("joe", "asdfasdf")
self.manager = User(name='admin', email='[email protected]', email_confirmed=1)
self.manager.set_password('asdfasdf')
self.manager.save()
self.shake = Shake(user_id=self.manager.id, type='group', title="derp", name='derp')
self.shake.save()
def test_posting_request_creates_request(self):
response = self.post_url('/shake/derp/request_invitation?json=1')
j_response = json_decode(response.body)
self.assertEqual(j_response['status'], 'ok')
no = Notification.all()[0]
self.assertEqual(no.sender_id, self.user.id)
self.assertEqual(no.receiver_id, self.manager.id)
self.assertEqual(no.action_id, self.shake.id)
def test_cannot_request_after_one_already_exists(self):
response = self.post_url('/shake/derp/request_invitation?json=1')
j_response = json_decode(response.body)
self.assertEqual(j_response['status'], 'ok')
response = self.post_url('/shake/derp/request_invitation?json=1')
j_response = json_decode(response.body)
self.assertEqual(j_response['status'], 'error')
def test_posting_request_doesnt_recreate_request(self):
response = self.post_url('/shake/derp/request_invitation?json=1')
j_response = json_decode(response.body)
self.assertEqual(j_response['status'], 'ok')
no = Notification.all()[0]
self.assertEqual(no.sender_id, self.user.id)
self.assertEqual(no.receiver_id, self.manager.id)
self.assertEqual(no.action_id, self.shake.id)
response = self.post_url('/shake/derp/request_invitation?json=1')
self.assertEqual(len(Notification.all()), 1)
def test_no_button_shows_when_request_has_been_made(self):
response = self.post_url('/shake/derp/request_invitation?json=1')
response = self.fetch_url('/derp')
self.assertTrue(response.body.find('/request_invitation') == -1)
def test_shake_manager_gets_notification_created(self):
response = self.post_url('/shake/derp/request_invitation?json=1')
n = Notification.get('receiver_id = %s', self.manager.id)
self.assertEqual(n.sender_id, self.user.id)
self.assertEqual(n.action_id, self.shake.id)
def test_shake_accept_request_creates_editor(self):
self.post_url('/shake/derp/request_invitation?json=1')
self.sign_in("admin", "asdfasdf")
response = self.post_url('/shake/derp/approve_invitation?json=1', arguments={'user_id':self.user.id})
manager = ShakeManager.get('user_id = %s', self.user.id)
self.assertTrue(manager)
def test_shake_accept_request_deletes_notification(self):
self.post_url('/shake/derp/request_invitation?json=1')
self.sign_in("admin", "asdfasdf")
response = self.post_url('/shake/derp/approve_invitation?json=1', arguments={'user_id' : self.user.id})
n = Notification.get('receiver_id = %s', self.manager.id)
self.assertTrue(n.deleted)
def test_shake_accept_request_creates_notification(self):
self.post_url('/shake/derp/request_invitation?json=1')
self.sign_in("admin", "asdfasdf")
response = self.post_url('/shake/derp/approve_invitation?json=1', arguments={'user_id' : self.user.id})
n = Notification.get('receiver_id = %s and type=%s', self.manager.id, "invitation_request")
self.assertTrue(n.deleted)
n = Notification.get('receiver_id = %s and type=%s', self.user.id, "invitation_approved")
self.assertTrue(n)
def test_shake_decline_request_deletes_notification(self):
self.post_url('/shake/derp/request_invitation?json=1')
self.sign_in("admin", "asdfasdf")
response = self.post_url('/shake/derp/decline_invitation?json=1', arguments={'user_id':self.user.id})
manager = ShakeManager.get('user_id = %s', self.user.id)
self.assertFalse(manager)
n = Notification.get('receiver_id = %s', self.manager.id)
self.assertTrue(n.deleted)
def test_already_a_member_do_not_see_request_button(self):
self.shake.add_manager(self.user)
response = self.fetch_url('/derp')
self.assertTrue(response.body.find('join this shake') == -1)
| mpl-2.0 | -2,256,360,406,225,291,000 | 41.947368 | 111 | 0.629289 | false |
Azure/azure-sdk-for-python | sdk/iothub/azure-mgmt-iothub/azure/mgmt/iothub/v2018_01_22/_iot_hub_client.py | 1 | 4433 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
from azure.mgmt.core import ARMPipelineClient
from msrest import Deserializer, Serializer
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Optional
from azure.core.credentials import TokenCredential
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from ._configuration import IotHubClientConfiguration
from .operations import Operations
from .operations import IotHubResourceOperations
from .operations import CertificatesOperations
from . import models
class IotHubClient(object):
"""Use this API to manage the IoT hubs in your Azure subscription.
:ivar operations: Operations operations
:vartype operations: azure.mgmt.iothub.v2018_01_22.operations.Operations
:ivar iot_hub_resource: IotHubResourceOperations operations
:vartype iot_hub_resource: azure.mgmt.iothub.v2018_01_22.operations.IotHubResourceOperations
:ivar certificates: CertificatesOperations operations
:vartype certificates: azure.mgmt.iothub.v2018_01_22.operations.CertificatesOperations
:param credential: Credential needed for the client to connect to Azure.
:type credential: ~azure.core.credentials.TokenCredential
:param subscription_id: The subscription identifier.
:type subscription_id: str
:param str base_url: Service URL
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
"""
def __init__(
self,
credential, # type: "TokenCredential"
subscription_id, # type: str
base_url=None, # type: Optional[str]
**kwargs # type: Any
):
# type: (...) -> None
if not base_url:
base_url = 'https://management.azure.com'
self._config = IotHubClientConfiguration(credential, subscription_id, **kwargs)
self._client = ARMPipelineClient(base_url=base_url, config=self._config, **kwargs)
client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
self._serialize = Serializer(client_models)
self._serialize.client_side_validation = False
self._deserialize = Deserializer(client_models)
self.operations = Operations(
self._client, self._config, self._serialize, self._deserialize)
self.iot_hub_resource = IotHubResourceOperations(
self._client, self._config, self._serialize, self._deserialize)
self.certificates = CertificatesOperations(
self._client, self._config, self._serialize, self._deserialize)
def _send_request(self, http_request, **kwargs):
# type: (HttpRequest, Any) -> HttpResponse
"""Runs the network request through the client's chained policies.
:param http_request: The network request you want to make. Required.
:type http_request: ~azure.core.pipeline.transport.HttpRequest
:keyword bool stream: Whether the response payload will be streamed. Defaults to True.
:return: The response of your network call. Does not do error handling on your response.
:rtype: ~azure.core.pipeline.transport.HttpResponse
"""
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
http_request.url = self._client.format_url(http_request.url, **path_format_arguments)
stream = kwargs.pop("stream", True)
pipeline_response = self._client._pipeline.run(http_request, stream=stream, **kwargs)
return pipeline_response.http_response
def close(self):
# type: () -> None
self._client.close()
def __enter__(self):
# type: () -> IotHubClient
self._client.__enter__()
return self
def __exit__(self, *exc_details):
# type: (Any) -> None
self._client.__exit__(*exc_details)
| mit | 1,720,994,700,976,503,800 | 43.777778 | 129 | 0.671103 | false |
OmeGak/indico | indico/modules/events/abstracts/clone.py | 2 | 3771 | # This file is part of Indico.
# Copyright (C) 2002 - 2019 CERN
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the MIT License; see the
# LICENSE file for more details.
from __future__ import unicode_literals
from indico.core.db import db
from indico.core.db.sqlalchemy.util.models import get_simple_column_attrs
from indico.core.db.sqlalchemy.util.session import no_autoflush
from indico.modules.events.abstracts.models.email_templates import AbstractEmailTemplate
from indico.modules.events.abstracts.models.review_questions import AbstractReviewQuestion
from indico.modules.events.abstracts.settings import abstracts_reviewing_settings, abstracts_settings, boa_settings
from indico.modules.events.cloning import EventCloner
from indico.modules.events.models.events import EventType
from indico.util.i18n import _
class AbstractSettingsCloner(EventCloner):
name = 'abstracts_settings'
friendly_name = _('Call for Abstracts (settings, email templates, review questions)')
requires = {'contribution_types', 'tracks'}
@property
def is_visible(self):
return self.old_event.type_ == EventType.conference
@no_autoflush
def run(self, new_event, cloners, shared_data):
self._contrib_type_id_map = {old.id: new.id
for old, new in shared_data['contribution_types']['contrib_type_map'].iteritems()}
self._track_id_map = {old.id: new.id for old, new in shared_data['tracks']['track_map'].iteritems()}
self._clone_settings(new_event)
self._clone_email_templates(new_event)
self._clone_review_questions(new_event)
db.session.flush()
def _clone_settings(self, new_event):
old_settings = abstracts_settings.get_all(self.old_event, no_defaults=True)
offset = new_event.start_dt - self.old_event.start_dt
for key in ('start_dt', 'end_dt', 'modification_end_dt'):
if not old_settings.get(key):
continue
old_settings[key] += offset
abstracts_settings.set_multi(new_event, old_settings)
abstracts_reviewing_settings.set_multi(new_event, abstracts_reviewing_settings.get_all(self.old_event,
no_defaults=True))
boa_settings.set_multi(new_event, boa_settings.get_all(self.old_event, no_defaults=True))
def _clone_email_templates(self, new_event):
attrs = get_simple_column_attrs(AbstractEmailTemplate) - {'rules'}
for old_tpl in self.old_event.abstract_email_templates:
tpl = AbstractEmailTemplate()
tpl.populate_from_attrs(old_tpl, attrs)
tpl.rules = filter(None, map(self._clone_email_template_rule, old_tpl.rules))
new_event.abstract_email_templates.append(tpl)
def _clone_email_template_rule(self, old_rule):
rule = {'state': old_rule['state']}
if 'track' in old_rule:
try:
rule['track'] = [self._track_id_map[t] for t in old_rule['track']]
except KeyError:
return None
if 'contribution_type' in old_rule:
try:
rule['contribution_type'] = [self._contrib_type_id_map[ct] for ct in old_rule['contribution_type']]
except KeyError:
return None
return rule
def _clone_review_questions(self, new_event):
attrs = get_simple_column_attrs(AbstractReviewQuestion)
for old_question in self.old_event.abstract_review_questions:
question = AbstractReviewQuestion()
question.populate_from_attrs(old_question, attrs)
new_event.abstract_review_questions.append(question)
| mit | -7,078,245,616,350,351,000 | 46.734177 | 119 | 0.655794 | false |
jeremiedecock/snippets | python/pyqt/pyqt5/widget_QPlainTextEdit.py | 1 | 1297 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
# See http://doc.qt.io/qt-5/qtextedit.html#details
import sys
from PyQt5.QtWidgets import QApplication, QWidget, QPlainTextEdit, QPushButton, QVBoxLayout
class Window(QWidget):
def __init__(self):
super().__init__()
# Make widgets #################
self.edit = QPlainTextEdit()
self.btn = QPushButton("Print")
self.edit.setPlaceholderText("Type something here and press the 'Print' button")
# Set button slot ##############
self.btn.clicked.connect(self.printText)
# Set the layout ###############
vbox = QVBoxLayout()
vbox.addWidget(self.edit)
vbox.addWidget(self.btn)
self.setLayout(vbox)
def printText(self):
print(self.edit.toPlainText())
if __name__ == '__main__':
app = QApplication(sys.argv)
window = Window()
window.show()
# The mainloop of the application. The event handling starts from this point.
# The exec_() method has an underscore. It is because the exec is a Python keyword. And thus, exec_() was used instead.
exit_code = app.exec_()
# The sys.exit() method ensures a clean exit.
# The environment will be informed, how the application ended.
sys.exit(exit_code)
| mit | 8,983,961,969,254,621,000 | 24.431373 | 123 | 0.615266 | false |
salimfadhley/jenkinsapi | jenkinsapi_tests/systests/test_credentials.py | 3 | 3949 | """
System tests for `jenkinsapi.jenkins` module.
"""
import logging
import pytest
from jenkinsapi_tests.test_utils.random_strings import random_string
from jenkinsapi.credentials import Credentials
from jenkinsapi.credentials import UsernamePasswordCredential
from jenkinsapi.credentials import SecretTextCredential
from jenkinsapi.credential import SSHKeyCredential
log = logging.getLogger(__name__)
def test_get_credentials(jenkins):
creds = jenkins.credentials
assert isinstance(creds, Credentials) is True
def test_delete_inexistant_credential(jenkins):
with pytest.raises(KeyError):
creds = jenkins.credentials
del creds[random_string()]
def test_create_user_pass_credential(jenkins):
creds = jenkins.credentials
cred_descr = random_string()
cred_dict = {
'description': cred_descr,
'userName': 'userName',
'password': 'password'
}
creds[cred_descr] = UsernamePasswordCredential(cred_dict)
assert cred_descr in creds
cred = creds[cred_descr]
assert isinstance(cred, UsernamePasswordCredential) is True
assert cred.password is None
assert cred.description == cred_descr
del creds[cred_descr]
def test_update_user_pass_credential(jenkins):
creds = jenkins.credentials
cred_descr = random_string()
cred_dict = {
'description': cred_descr,
'userName': 'userName',
'password': 'password'
}
creds[cred_descr] = UsernamePasswordCredential(cred_dict)
cred = creds[cred_descr]
cred.userName = 'anotheruser'
cred.password = 'password2'
cred = creds[cred_descr]
assert isinstance(cred, UsernamePasswordCredential) is True
assert cred.userName == 'anotheruser'
assert cred.password == 'password2'
def test_create_ssh_credential(jenkins):
creds = jenkins.credentials
cred_descr = random_string()
cred_dict = {
'description': cred_descr,
'userName': 'userName',
'passphrase': '',
'private_key': '-----BEGIN RSA PRIVATE KEY-----'
}
creds[cred_descr] = SSHKeyCredential(cred_dict)
assert cred_descr in creds
cred = creds[cred_descr]
assert isinstance(cred, SSHKeyCredential) is True
assert cred.description == cred_descr
del creds[cred_descr]
cred_dict = {
'description': cred_descr,
'userName': 'userName',
'passphrase': '',
'private_key': '/tmp/key'
}
with pytest.raises(ValueError):
creds[cred_descr] = SSHKeyCredential(cred_dict)
cred_dict = {
'description': cred_descr,
'userName': 'userName',
'passphrase': '',
'private_key': '~/.ssh/key'
}
with pytest.raises(ValueError):
creds[cred_descr] = SSHKeyCredential(cred_dict)
cred_dict = {
'description': cred_descr,
'userName': 'userName',
'passphrase': '',
'private_key': 'invalid'
}
with pytest.raises(ValueError):
creds[cred_descr] = SSHKeyCredential(cred_dict)
def test_delete_credential(jenkins):
creds = jenkins.credentials
cred_descr = random_string()
cred_dict = {
'description': cred_descr,
'userName': 'userName',
'password': 'password'
}
creds[cred_descr] = UsernamePasswordCredential(cred_dict)
assert cred_descr in creds
del creds[cred_descr]
assert cred_descr not in creds
def test_create_secret_text_credential(jenkins):
"""
Tests the creation of a secret text.
"""
creds = jenkins.credentials
cred_descr = random_string()
cred_dict = {
'description': cred_descr,
'secret': 'newsecret'
}
creds[cred_descr] = SecretTextCredential(cred_dict)
assert cred_descr in creds
cred = creds[cred_descr]
assert isinstance(cred, SecretTextCredential) is True
assert cred.secret is None
assert cred.description == cred_descr
del creds[cred_descr]
| mit | -9,153,293,860,681,182,000 | 24.980263 | 68 | 0.657128 | false |
dellis23/parsedatetime | parsedatetime/tests/TestContext.py | 4 | 1854 | # -*- coding: utf-8 -*-
"""
Test pdtContext
"""
import time
import unittest
import parsedatetime as pdt
from parsedatetime.context import pdtContext
class test(unittest.TestCase):
def setUp(self):
self.cal = pdt.Calendar(version=pdt.VERSION_CONTEXT_STYLE)
(self.yr, self.mth, self.dy, self.hr, self.mn,
self.sec, self.wd, self.yd, self.isdst) = time.localtime()
def testContext(self):
self.assertEqual(self.cal.parse('5 min from now')[1],
pdtContext(pdtContext.ACU_MIN | pdtContext.ACU_NOW))
self.assertEqual(self.cal.parse('5 min from now',
version=pdt.VERSION_FLAG_STYLE)[1], 2)
self.assertEqual(self.cal.parse('7/11/2015')[1],
pdtContext(pdtContext.ACU_YEAR |
pdtContext.ACU_MONTH | pdtContext.ACU_DAY))
self.assertEqual(self.cal.parse('7/11/2015',
version=pdt.VERSION_FLAG_STYLE)[1], 1)
self.assertEqual(self.cal.parse('14/32/2015')[1],
pdtContext(0))
self.assertEqual(self.cal.parse('25:23')[1],
pdtContext())
def testSources(self):
self.assertEqual(self.cal.parse('afternoon 5pm')[1],
pdtContext(pdtContext.ACU_HALFDAY |
pdtContext.ACU_HOUR))
self.assertEqual(self.cal.parse('morning')[1],
pdtContext(pdtContext.ACU_HALFDAY))
self.assertEqual(self.cal.parse('night', version=1)[1], 2)
def testThreadRun(self):
from threading import Thread
t = Thread(target=lambda: self.cal.evalRanges('4p-6p'))
# should not throw out AttributeError
t.start()
if __name__ == "__main__":
unittest.main()
| apache-2.0 | -6,461,932,173,808,929,000 | 34.653846 | 79 | 0.558792 | false |
moopie/botologist | botologist/protocol/irc.py | 1 | 15721 | import logging
log = logging.getLogger(__name__)
import re
import signal
import socket
import ssl
import threading
import botologist.protocol
# https://github.com/myano/jenni/wiki/IRC-String-Formatting
irc_format_pattern = re.compile(r'(\x03\d{1,2}(,\d{1,2})?)|[\x02\x03\x0F\x16\x1D\x1F]')
def strip_irc_formatting(string):
return irc_format_pattern.sub('', string)
def decode(bytestring):
try:
return bytestring.decode('utf-8').strip()
except UnicodeDecodeError:
try:
return bytestring.decode('iso-8859-1').strip()
except:
log.error('Could not decode string: %r', bytestring)
return None
def decode_lines(bytestring):
for substring in bytestring.split(b'\r\n'):
line = decode(substring)
if line:
yield line
def get_client(config):
nick = config.get('nick', 'botologist')
def _make_server_obj(cfg):
if isinstance(cfg, dict):
if 'ssl' in cfg:
log.warning('"ssl" config is deprecated, rename to "use_ssl"')
cfg['use_ssl'] = cfg['ssl']
del cfg['ssl']
return Server(**cfg)
elif isinstance(cfg, str):
return Server(cfg)
else:
raise ValueError(
'server config must be dict or str, {} given'.format(type(cfg))
)
if 'servers' in config:
assert isinstance(config['servers'], list)
servers = (_make_server_obj(s) for s in config['servers'])
else:
servers = (_make_server_obj(config['server']),)
server_pool = ServerPool(servers)
return Client(
server_pool,
nick=nick,
username=config.get('username', nick),
realname=config.get('realname', nick),
)
def _find_user(channel, host, nick):
if channel:
user = channel.find_user(identifier=host, name=nick)
if user:
return user
if host and nick:
return User(nick, host)
return None
class Client(botologist.protocol.Client):
MAX_MSG_CHARS = 500
PING_EVERY = 3 * 60 # seconds
PING_TIMEOUT = 20 # seconds
def __init__(self, server_pool, nick, username=None, realname=None):
super().__init__(nick)
self.server_pool = server_pool
self.server = None
self.username = username or nick
self.realname = realname or nick
self.irc_socket = None
self.quitting = False
self.reconnect_timer = False
self.ping_timer = None
self.ping_response_timer = None
self.connect_thread = None
def join_channels():
for channel in self.channels.values():
self.join_channel(channel)
self.on_connect.append(join_channels)
def run_forever(self):
log.info('Starting IRC client')
def sigterm_handler(signo, stack_frame): # pylint: disable=unused-argument
self.stop('Terminating, probably back soon!')
signal.signal(signal.SIGQUIT, sigterm_handler)
signal.signal(signal.SIGTERM, sigterm_handler)
signal.signal(signal.SIGINT, sigterm_handler)
try:
self.connect()
except (InterruptedError, SystemExit, KeyboardInterrupt):
self.stop('Terminating, probably back soon!')
except:
self.stop('An error occured!')
raise
def connect(self):
if self.irc_socket is not None:
self.disconnect()
if self.connect_thread is not None and self.connect_thread.isAlive():
log.warning('connect_thread is already alive, not doing anything')
return
self.connect_thread = threading.Thread(
target=self._wrap_error_handler(self._connect)
)
self.connect_thread.start()
def disconnect(self):
for callback in self.on_disconnect:
callback()
if self.connect_thread is None or not self.connect_thread.isAlive():
log.warning('connect_thread is not alive, not doing anything')
return
log.info('Disconnecting')
self.quitting = True
self.irc_socket.close()
self.irc_socket = None
def reconnect(self, time=None):
if self.irc_socket:
self.disconnect()
if self.connect_thread is not None and self.connect_thread.isAlive():
log.warning('connect_thread is already alive, not doing anything')
return
if time:
log.info('Reconnecting in %d seconds', time)
self.connect_thread = threading.Timer(time, self._connect)
self.reconnect_timer = self.connect_thread
else:
self.connect_thread = threading.Thread(self._connect)
self.connect_thread.start()
def _connect(self):
self.quitting = False
if self.reconnect_timer:
self.reconnect_timer = None
self.server = self.server_pool.get()
log.info('Connecting to %s:%s', self.server.host, self.server.port)
self.irc_socket = IRCSocket(self.server)
self.irc_socket.connect()
log.info('Successfully connected to server!')
self.send('NICK ' + self.nick)
self.send('USER ' + self.username + ' 0 * :' + self.realname)
self.loop()
def loop(self):
handle_func = self._wrap_error_handler(self.handle_msg)
while self.irc_socket:
try:
data = self.irc_socket.recv()
except OSError:
if self.quitting:
log.info('socket.recv threw an OSError, but quitting, '
'so exiting loop', exc_info=True)
else:
log.exception('socket.recv threw an exception')
self.reconnect(5)
return
if data == b'':
if self.quitting:
log.info('received empty binary data, but quitting, so exiting loop')
return
else:
raise IRCSocketError('Received empty binary data')
for msg in decode_lines(data):
if not msg:
continue
log.debug('[recv] %r', msg)
if self.quitting and msg.startswith('ERROR :'):
log.info('received an IRC ERROR, but quitting, so exiting loop')
return
handle_func(msg)
def join_channel(self, channel):
assert isinstance(channel, Channel)
log.info('Joining channel: %s', channel.name)
self.channels[channel.name] = channel
self.send('JOIN ' + channel.name)
def handle_msg(self, msg):
words = msg.split()
if words[0] == 'PING':
self.reset_ping_timer()
self.send('PONG ' + words[1])
elif words[0] == 'ERROR':
if ':Your host is trying to (re)connect too fast -- throttled' in msg:
log.warning('Throttled for (re)connecting too fast')
self.reconnect(60)
else:
log.warning('Received error: %s', msg)
self.reconnect(10)
elif '600' > words[0] > '400':
log.warning('Received error reply: %s', msg)
elif len(words) > 1:
try:
nick, host, _ = User.split_ircformat(words[0])
except:
nick = host = None
# welcome message, lets us know that we're connected
if words[1] == '001':
for callback in self.on_connect:
callback()
elif words[1] == 'PONG':
self.reset_ping_timer()
elif words[1] == 'JOIN':
channel = words[2]
user = User.from_ircformat(words[0])
log.debug('User %s (%s @ %s) joined channel %s',
user.nick, user.ident, user.host, channel)
if user.nick == self.nick:
self.send('WHO '+channel)
else:
channel = words[2].lstrip(':')
self.channels[channel].add_user(user)
for callback in self.on_join:
callback(self.channels[channel], user)
# response to WHO command
elif words[1] == '352':
channel = self.channels[words[3].lstrip(':')]
host = words[5]
nick = words[7]
if not channel.find_user(identifier=host, name=nick):
ident = words[4]
user = User(nick, host, ident)
channel.add_user(user)
elif words[1] == 'NICK':
new_nick = words[2][1:]
log.debug('User %s changing nick: %s', host, new_nick)
for channel in self.channels.values():
channel_user = channel.find_user(identifier=host)
if channel_user:
log.debug('Updating nick for user %r in channel %s',
channel_user, channel.name)
channel_user.name = new_nick
elif words[1] == 'PART':
channel = self.channels[words[2].lstrip(':')]
log.debug('User %s parted from channel %s', host, channel)
channel.remove_user(name=nick, identifier=host)
elif words[1] == 'KICK':
channel = self.channels[words[2].lstrip(':')]
user = _find_user(channel, host, nick)
kicked_nick = words[3]
kicked_user = _find_user(channel, None, kicked_nick)
log.debug('User %s was kicked by %s from channel %s',
kicked_nick, user.nick, channel.name)
channel.remove_user(name=kicked_nick)
for callback in self.on_kick:
callback(channel, kicked_user, user)
if kicked_nick == self.nick:
self.join_channel(channel)
elif words[1] == 'QUIT':
log.debug('User %s!%s quit', nick, host)
for channel in self.channels.values():
channel.remove_user(name=nick, identifier=host)
elif words[1] == 'PRIVMSG':
channel = self.channels.get(words[2].lstrip(':'))
user = _find_user(channel, host, nick)
message = Message.from_privmsg(msg, user)
message.channel = channel
if not message.is_private:
message.channel = self.channels[message.target]
if not user:
log.debug('Unknown user %s (%s) added to channel %s',
user.nick, user.host, message.target)
self.channels[message.target].add_user(user)
for callback in self.on_privmsg:
callback(message)
def send_msg(self, target, message):
if isinstance(target, Channel):
target = target.name
if target in self.channels:
if not self.channels[target].allow_colors:
message = strip_irc_formatting(message)
messages = self._parse_messages(message)
for privmsg in messages:
self.send('PRIVMSG ' + target + ' :' + privmsg)
def send(self, msg):
if len(msg) > self.MAX_MSG_CHARS:
log.warning('Message too long (%d characters), upper limit %d',
len(msg), self.MAX_MSG_CHARS)
msg = msg[:(self.MAX_MSG_CHARS - 3)] + '...'
log.debug('[send] %s', repr(msg))
self.irc_socket.send(msg + '\r\n')
def stop(self, reason='Leaving'):
super().stop()
if self.reconnect_timer:
log.info('Aborting reconnect timer')
self.reconnect_timer.cancel()
self.reconnect_timer = None
return
if self.ping_timer:
self.ping_timer.cancel()
self.ping_timer = None
if self.ping_response_timer:
self.ping_response_timer.cancel()
self.ping_response_timer = None
if not self.irc_socket:
log.warning('Tried to quit, but irc_socket is None')
return
log.info('Quitting, reason: %s', reason)
self.quitting = True
self.send('QUIT :' + reason)
def reset_ping_timer(self):
if self.ping_response_timer:
self.ping_response_timer.cancel()
self.ping_response_timer = None
if self.ping_timer:
self.ping_timer.cancel()
self.ping_timer = None
self.ping_timer = threading.Timer(
self.PING_EVERY,
self._wrap_error_handler(self.send_ping),
)
self.ping_timer.start()
def send_ping(self):
if self.ping_response_timer:
log.warning('Already waiting for PONG, cannot send another PING')
return
self.send('PING ' + self.server.host)
self.ping_response_timer = threading.Timer(
self.PING_TIMEOUT,
self._wrap_error_handler(self.handle_ping_timeout),
)
self.ping_response_timer.start()
def handle_ping_timeout(self):
log.warning('Ping timeout')
self.ping_response_timer = None
self.reconnect()
class User(botologist.protocol.User):
def __init__(self, nick, host=None, ident=None):
if host and '@' in host:
ident, host = host.split('@')
self.host = host
if ident and ident[0] == '~':
ident = ident[1:]
self.ident = ident
super().__init__(nick, host)
@staticmethod
def split_ircformat(string):
if string[0] == ':':
string = string[1:]
parts = string.split('!')
nick = parts[0]
ident, host = parts[1].split('@')
return (nick, host, ident)
@classmethod
def from_ircformat(cls, string):
nick, host, ident = cls.split_ircformat(string)
return cls(nick, host, ident)
def __repr__(self):
return '<botologist.protocol.irc.User "{}!{}@{}" at {}>'.format(
self.name, self.ident, self.host, hex(id(self)))
class Message(botologist.protocol.Message):
def __init__(self, user, target, message):
if not isinstance(user, User):
user = User.from_ircformat(user)
super().__init__(message, user, target)
self.is_private = self.target[0] != '#'
@classmethod
def from_privmsg(cls, msg, user=None):
words = msg.split()
return cls(user or words[0][1:], words[2], ' '.join(words[3:])[1:])
class Server:
def __init__(self, address, use_ssl=False):
parts = address.split(':')
self.host = parts[0]
if len(parts) > 1:
self.port = int(parts[1])
else:
self.port = 6667
self.use_ssl = use_ssl
class ServerPool:
def __init__(self, servers=None):
self.index = 0
self.servers = []
if servers:
for server in servers:
self.add_server(server)
def add_server(self, server):
assert isinstance(server, Server)
self.servers.append(server)
def get(self):
server = self.servers[self.index]
self.index += 1
if self.index >= len(self.servers):
self.index = 0
return server
class Channel(botologist.protocol.Channel):
def __init__(self, name):
if name[0] != '#':
name = '#' + name
super().__init__(name)
self.allow_colors = True
def find_nick_from_host(self, host):
if '@' in host:
host = host[host.index('@')+1:]
user = self.find_user(identifier=host)
if user:
return user.name
def find_host_from_nick(self, nick):
user = self.find_user(name=nick)
if user:
return user.host
def remove_user(self, user=None, name=None, identifier=None):
if not user and identifier and '@' in identifier:
identifier = identifier[identifier.index('@')+1:]
return super().remove_user(user=user, name=name, identifier=identifier)
class IRCSocketError(OSError):
pass
class IRCSocket:
def __init__(self, server):
self.server = server
self.socket = None
self.ssl_context = None
if self.server.use_ssl:
# https://docs.python.org/3/library/ssl.html#protocol-versions
self.ssl_context = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
self.ssl_context.options |= ssl.OP_NO_SSLv2
self.ssl_context.options |= ssl.OP_NO_SSLv3
if hasattr(self.ssl_context, 'load_default_certs'):
self.ssl_context.verify_mode = ssl.CERT_REQUIRED
self.ssl_context.check_hostname = True
self.ssl_context.load_default_certs() # pylint: disable=no-member
else:
log.warning('TLS connections may not be secure in Python 3.3 - upgrade to 3.4 or newer!')
self.ssl_context.verify_mode = ssl.CERT_OPTIONAL
def connect(self):
log.debug('Looking up address info for %s:%s',
self.server.host, self.server.port)
addrinfo = socket.getaddrinfo(
self.server.host, self.server.port,
socket.AF_UNSPEC, socket.SOCK_STREAM
)
for res in addrinfo:
af, socktype, proto, canonname, address = res
try:
self.socket = socket.socket(af, socktype, proto)
except OSError:
log.warning('uncaught exception while initialising socket', exc_info=True)
self.socket = None
continue
if self.server.use_ssl:
log.debug('server is using SSL')
self.socket = self.ssl_context.wrap_socket(
self.socket, server_hostname=self.server.host)
try:
self.socket.settimeout(10)
log.debug('Trying to connect to %s:%s', address[0], address[1])
self.socket.connect(address)
except OSError:
log.warning('uncaught exception while connecting to socket', exc_info=True)
self.close()
continue
# if we reach this point, the socket has been successfully created,
# so break out of the loop
break
if self.socket is None:
raise IRCSocketError('Could not open socket')
self.socket.settimeout(None)
def recv(self, bufsize=4096):
data = self.socket.recv(bufsize)
# 13 = \r -- 10 = \n
while data != b'' and (data[-1] != 10 and data[-2] != 13):
data += self.socket.recv(bufsize)
return data
def send(self, data):
if isinstance(data, str):
data = data.encode('utf-8')
self.socket.send(data)
def close(self):
try:
self.socket.shutdown(socket.SHUT_RDWR)
except OSError:
# shutdown will fail if the socket has already been closed by the
# server, which will happen if we get throttled for example
pass
self.socket.close()
self.socket = None
| mit | -1,352,838,157,569,138,200 | 25.919521 | 93 | 0.670377 | false |
markgw/jazzparser | lib/nltk/sem/boxer.py | 1 | 46967 | # Natural Language Toolkit: Interface to Boxer
# <http://svn.ask.it.usyd.edu.au/trac/candc/wiki/boxer>
#
# Author: Dan Garrette <[email protected]>
#
# Copyright (C) 2001-2010 NLTK Project
# URL: <http://www.nltk.org/>
# For license information, see LICENSE.TXT
from __future__ import with_statement
import os
import subprocess
from optparse import OptionParser
import tempfile
import nltk
from nltk.sem.logic import *
from nltk.sem.drt import *
"""
An interface to Boxer.
Usage:
Set the environment variable CANDCHOME to the bin directory of your CandC installation.
The models directory should be in the CandC root directory.
For example:
/path/to/candc/
bin/
candc
boxer
models/
boxer/
"""
class Boxer(object):
"""
This class is an interface to Johan Bos's program Boxer, a wide-coverage
semantic parser that produces Discourse Representation Structures (DRSs).
"""
def __init__(self, boxer_drs_interpreter=None, elimeq=False):
"""
@param boxer_drs_interpreter: A class that converts from the
C{AbstractBoxerDrs} object hierarchy to a different object. The
default is C{NltkDrtBoxerDrsInterpreter}, which converts to the NLTK
DRT hierarchy.
@param elimeq: When set to true, Boxer removes all equalities from the
DRSs and discourse referents standing in the equality relation are
unified, but only if this can be done in a meaning-preserving manner.
"""
if boxer_drs_interpreter is None:
boxer_drs_interpreter = NltkDrtBoxerDrsInterpreter()
self._boxer_drs_interpreter = boxer_drs_interpreter
self._elimeq = elimeq
self._boxer_bin = None
self._candc_bin = None
self._candc_models_path = None
def interpret(self, input, discourse_id=None, question=False, verbose=False):
"""
Use Boxer to give a first order representation.
@param input: C{str} Input sentence to parse
@param occur_index: C{boolean} Should predicates be occurrence indexed?
@param discourse_id: C{str} An identifier to be inserted to each occurrence-indexed predicate.
@return: C{drt.AbstractDrs}
"""
discourse_ids = [discourse_id] if discourse_id is not None else None
return self.batch_interpret_multisentence([[input]], discourse_ids, question, verbose)[0]
def interpret_multisentence(self, input, discourse_id=None, question=False, verbose=False):
"""
Use Boxer to give a first order representation.
@param input: C{list} of C{str} Input sentences to parse as a single discourse
@param occur_index: C{boolean} Should predicates be occurrence indexed?
@param discourse_id: C{str} An identifier to be inserted to each occurrence-indexed predicate.
@return: C{drt.AbstractDrs}
"""
discourse_ids = [discourse_id] if discourse_id is not None else None
return self.batch_interpret_multisentence([input], discourse_ids, question, verbose)[0]
def batch_interpret(self, inputs, discourse_ids=None, question=False, verbose=False):
"""
Use Boxer to give a first order representation.
@param inputs: C{list} of C{str} Input sentences to parse as individual discourses
@param occur_index: C{boolean} Should predicates be occurrence indexed?
@param discourse_ids: C{list} of C{str} Identifiers to be inserted to each occurrence-indexed predicate.
@return: C{list} of C{drt.AbstractDrs}
"""
return self.batch_interpret_multisentence([[input] for input in inputs], discourse_ids, question, verbose)
def batch_interpret_multisentence(self, inputs, discourse_ids=None, question=False, verbose=False):
"""
Use Boxer to give a first order representation.
@param inputs: C{list} of C{list} of C{str} Input discourses to parse
@param occur_index: C{boolean} Should predicates be occurrence indexed?
@param discourse_ids: C{list} of C{str} Identifiers to be inserted to each occurrence-indexed predicate.
@return: C{drt.AbstractDrs}
"""
_, temp_filename = tempfile.mkstemp(prefix='boxer-', suffix='.in', text=True)
if discourse_ids is not None:
assert len(inputs) == len(discourse_ids)
assert all(id is not None for id in discourse_ids)
use_disc_id = True
else:
discourse_ids = map(str, xrange(len(inputs)))
use_disc_id = False
candc_out = self._call_candc(inputs, discourse_ids, question, temp_filename, verbose=verbose)
boxer_out = self._call_boxer(temp_filename, verbose=verbose)
os.remove(temp_filename)
# if 'ERROR: input file contains no ccg/2 terms.' in boxer_out:
# raise UnparseableInputException('Could not parse with candc: "%s"' % input_str)
drs_dict = self._parse_to_drs_dict(boxer_out, use_disc_id)
return [drs_dict.get(id, None) for id in discourse_ids]
def _call_candc(self, inputs, discourse_ids, question, filename, verbose=False):
"""
Call the C{candc} binary with the given input.
@param inputs: C{list} of C{list} of C{str} Input discourses to parse
@param discourse_ids: C{list} of C{str} Identifiers to be inserted to each occurrence-indexed predicate.
@param filename: C{str} A filename for the output file
@return: stdout
"""
if self._candc_bin is None:
self._candc_bin = self._find_binary('candc', verbose)
if self._candc_models_path is None:
self._candc_models_path = os.path.normpath(os.path.join(self._candc_bin[:-5], '../models'))
args = ['--models', os.path.join(self._candc_models_path, 'questions' if question else 'boxer'),
'--output', filename]
return self._call('\n'.join(sum((["<META>'%s'" % id] + d for d,id in zip(inputs,discourse_ids)), [])), self._candc_bin, args, verbose)
def _call_boxer(self, filename, verbose=False):
"""
Call the C{boxer} binary with the given input.
@param filename: C{str} A filename for the input file
@return: stdout
"""
if self._boxer_bin is None:
self._boxer_bin = self._find_binary('boxer', verbose)
args = ['--box', 'false',
'--semantics', 'drs',
'--flat', 'false',
'--resolve', 'true',
'--elimeq', 'true' if self._elimeq else 'false',
'--format', 'prolog',
'--instantiate', 'true',
'--input', filename]
return self._call(None, self._boxer_bin, args, verbose)
def _find_binary(self, name, verbose=False):
return nltk.internals.find_binary(name,
env_vars=['CANDCHOME'],
url='http://svn.ask.it.usyd.edu.au/trac/candc/',
binary_names=[name, name + '.exe'],
verbose=verbose)
def _call(self, input_str, binary, args=[], verbose=False):
"""
Call the binary with the given input.
@param input_str: A string whose contents are used as stdin.
@param binary: The location of the binary to call
@param args: A list of command-line arguments.
@return: stdout
"""
if verbose:
print 'Calling:', binary
print 'Args:', args
print 'Input:', input_str
print 'Command:', binary + ' ' + ' '.join(args)
# Call via a subprocess
if input_str is None:
cmd = [binary] + args
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
else:
cmd = 'echo "%s" | %s %s' % (input_str, binary, ' '.join(args))
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
stdout, stderr = p.communicate()
if verbose:
print 'Return code:', p.returncode
if stdout: print 'stdout:\n', stdout, '\n'
if stderr: print 'stderr:\n', stderr, '\n'
if p.returncode != 0:
raise Exception('ERROR CALLING: %s %s\nReturncode: %d\n%s' % (binary, ' '.join(args), p.returncode, stderr))
return stdout
def _parse_to_drs_dict(self, boxer_out, use_disc_id):
lines = boxer_out.split('\n')
drs_dict = {}
i = 0
while i < len(lines):
line = lines[i]
if line.startswith('id('):
comma_idx = line.index(',')
discourse_id = line[3:comma_idx]
if discourse_id[0] == "'" and discourse_id[-1] == "'":
discourse_id = discourse_id[1:-1]
drs_id = line[comma_idx+1:line.index(')')]
line = lines[i+4]
assert line.startswith('sem(%s,' % drs_id)
line = lines[i+8]
assert line.endswith(').')
drs_input = line[:-2].strip()
parsed = self._parse_drs(drs_input, discourse_id, use_disc_id)
drs_dict[discourse_id] = self._boxer_drs_interpreter.interpret(parsed)
i += 8
i += 1
return drs_dict
def _parse_drs(self, drs_string, discourse_id, use_disc_id):
return BoxerOutputDrsParser(discourse_id if use_disc_id else None).parse(drs_string)
class BoxerOutputDrsParser(DrtParser):
def __init__(self, discourse_id=None):
"""
This class is used to parse the Prolog DRS output from Boxer into a
hierarchy of python objects.
"""
DrtParser.__init__(self)
self.discourse_id = discourse_id
self.sentence_id_offset = None
self.quote_chars = [("'", "'", "\\", False)]
self._label_counter = None
def parse(self, data, signature=None):
self._label_counter = Counter(-1)
return DrtParser.parse(self, data, signature)
def get_all_symbols(self):
return ['(', ')', ',', '[', ']',':']
def handle(self, tok, context):
return self.handle_drs(tok)
def attempt_adjuncts(self, expression, context):
return expression
def parse_condition(self, indices):
"""
Parse a DRS condition
@return: C{list} of C{AbstractDrs}
"""
tok = self.token()
accum = self.handle_condition(tok, indices)
if accum is None:
raise UnexpectedTokenException(tok)
return accum
def handle_drs(self, tok):
if tok == 'drs':
return self.parse_drs()
elif tok in ['merge', 'smerge']:
return self._handle_binary_expression(self._make_merge_expression)(None, [])
def handle_condition(self, tok, indices):
"""
Handle a DRS condition
@param indices: C{list} of C{int}
@return: C{list} of C{AbstractDrs}
"""
if tok == 'not':
return [self._handle_not()]
if tok == 'or':
conds = [self._handle_binary_expression(self._make_or_expression)]
elif tok == 'imp':
conds = [self._handle_binary_expression(self._make_imp_expression)]
elif tok == 'eq':
conds = [self._handle_eq()]
elif tok == 'prop':
conds = [self._handle_prop()]
elif tok == 'pred':
conds = [self._handle_pred()]
elif tok == 'named':
conds = [self._handle_named()]
elif tok == 'rel':
conds = [self._handle_rel()]
elif tok == 'timex':
conds = self._handle_timex()
elif tok == 'card':
conds = [self._handle_card()]
elif tok == 'whq':
conds = [self._handle_whq()]
else:
conds = []
return sum([[cond(sent_index, word_indices) for cond in conds] for sent_index, word_indices in self._sent_and_word_indices(indices)], [])
def _handle_not(self):
self.assertToken(self.token(), '(')
drs = self.parse_Expression(None)
self.assertToken(self.token(), ')')
return BoxerNot(drs)
def _handle_pred(self):
#pred(_G3943, dog, n, 0)
self.assertToken(self.token(), '(')
variable = self.parse_variable()
self.assertToken(self.token(), ',')
name = self.token()
self.assertToken(self.token(), ',')
pos = self.token()
self.assertToken(self.token(), ',')
sense = int(self.token())
self.assertToken(self.token(), ')')
def _handle_pred_f(sent_index, word_indices):
if name=='event' and sent_index is None and ((pos=='n' and sense==1) or (pos=='v' and sense==0)):
return BoxerEvent(variable)
else:
return BoxerPred(self.discourse_id, sent_index, word_indices, variable, name, pos, sense)
return _handle_pred_f
def _handle_named(self):
#named(x0, john, per, 0)
self.assertToken(self.token(), '(')
variable = self.parse_variable()
self.assertToken(self.token(), ',')
name = self.token()
self.assertToken(self.token(), ',')
type = self.token()
self.assertToken(self.token(), ',')
sense = int(self.token())
self.assertToken(self.token(), ')')
return lambda sent_index, word_indices: BoxerNamed(self.discourse_id, sent_index, word_indices, variable, name, type, sense)
def _handle_rel(self):
#rel(_G3993, _G3943, agent, 0)
self.assertToken(self.token(), '(')
var1 = self.parse_variable()
self.assertToken(self.token(), ',')
var2 = self.parse_variable()
self.assertToken(self.token(), ',')
rel = self.token()
self.assertToken(self.token(), ',')
sense = int(self.token())
self.assertToken(self.token(), ')')
return lambda sent_index, word_indices: BoxerRel(self.discourse_id, sent_index, word_indices, var1, var2, rel, sense)
def _handle_timex(self):
#timex(_G18322, date([]: +, []:'XXXX', [1004]:'04', []:'XX'))
self.assertToken(self.token(), '(')
arg = self.parse_variable()
self.assertToken(self.token(), ',')
new_conds = self._handle_time_expression(arg)
self.assertToken(self.token(), ')')
return new_conds
def _handle_time_expression(self, arg):
#date([]: +, []:'XXXX', [1004]:'04', []:'XX')
tok = self.token()
self.assertToken(self.token(), '(')
if tok == 'date':
conds = self._handle_date(arg)
elif tok == 'time':
conds = self._handle_time(arg)
else:
return None
self.assertToken(self.token(), ')')
return [lambda sent_index, word_indices: BoxerPred(self.discourse_id, sent_index, word_indices, arg, tok, 'n', 0)] + \
[lambda sent_index, word_indices: cond for cond in conds]
def _handle_date(self, arg):
#[]: +, []:'XXXX', [1004]:'04', []:'XX'
conds = []
(sent_index, word_indices), = self._sent_and_word_indices(self._parse_index_list())
pol = self.token()
conds.append(BoxerPred(self.discourse_id, sent_index, word_indices, arg, 'date_pol_%s' % (pol), 'a', 0))
self.assertToken(self.token(), ',')
(sent_index, word_indices), = self._sent_and_word_indices(self._parse_index_list())
year = self.token()
if year != 'XXXX':
year = year.replace(':', '_')
conds.append(BoxerPred(self.discourse_id, sent_index, word_indices, arg, 'date_year_%s' % (year), 'a', 0))
self.assertToken(self.token(), ',')
(sent_index, word_indices), = self._sent_and_word_indices(self._parse_index_list())
month = self.token()
if month != 'XX':
conds.append(BoxerPred(self.discourse_id, sent_index, word_indices, arg, 'date_month_%s' % (month), 'a', 0))
self.assertToken(self.token(), ',')
(sent_index, word_indices), = self._sent_and_word_indices(self._parse_index_list())
day = self.token()
if day != 'XX':
conds.append(BoxerPred(self.discourse_id, sent_index, word_indices, arg, 'date_day_%s' % (day), 'a', 0))
return conds
def _handle_time(self, arg):
#time([1018]:'18', []:'XX', []:'XX')
conds = []
self._parse_index_list()
hour = self.token()
if hour != 'XX':
conds.append(self._make_atom('r_hour_2',arg,hour))
self.assertToken(self.token(), ',')
self._parse_index_list()
min = self.token()
if min != 'XX':
conds.append(self._make_atom('r_min_2',arg,min))
self.assertToken(self.token(), ',')
self._parse_index_list()
sec = self.token()
if sec != 'XX':
conds.append(self._make_atom('r_sec_2',arg,sec))
return conds
def _handle_card(self):
#card(_G18535, 28, ge)
self.assertToken(self.token(), '(')
variable = self.parse_variable()
self.assertToken(self.token(), ',')
value = self.token()
self.assertToken(self.token(), ',')
type = self.token()
self.assertToken(self.token(), ')')
return lambda sent_index, word_indices: BoxerCard(self.discourse_id, sent_index, word_indices, variable, value, type)
def _handle_prop(self):
#prop(_G15949, drs(...))
self.assertToken(self.token(), '(')
variable = self.parse_variable()
self.assertToken(self.token(), ',')
drs = self.parse_Expression(None)
self.assertToken(self.token(), ')')
return lambda sent_index, word_indices: BoxerProp(self.discourse_id, sent_index, word_indices, variable, drs)
def _parse_index_list(self):
#[1001,1002]:
indices = []
self.assertToken(self.token(), '[')
while self.token(0) != ']':
indices.append(self.parse_index())
if self.token(0) == ',':
self.token() #swallow ','
self.token() #swallow ']'
self.assertToken(self.token(), ':')
return indices
def parse_drs(self):
#drs([[1001]:_G3943],
# [[1002]:pred(_G3943, dog, n, 0)]
# )
label = self._label_counter.get()
self.assertToken(self.token(), '(')
self.assertToken(self.token(), '[')
refs = set()
while self.token(0) != ']':
indices = self._parse_index_list()
refs.add(self.parse_variable())
if self.token(0) == ',':
self.token() #swallow ','
self.token() #swallow ']'
self.assertToken(self.token(), ',')
self.assertToken(self.token(), '[')
conds = []
while self.token(0) != ']':
indices = self._parse_index_list()
conds.extend(self.parse_condition(indices))
if self.token(0) == ',':
self.token() #swallow ','
self.token() #swallow ']'
self.assertToken(self.token(), ')')
return BoxerDrs(label, list(refs), conds)
def _handle_binary_expression(self, make_callback):
self.assertToken(self.token(), '(')
drs1 = self.parse_Expression(None)
self.assertToken(self.token(), ',')
drs2 = self.parse_Expression(None)
self.assertToken(self.token(), ')')
return lambda sent_index, word_indices: make_callback(sent_index, word_indices, drs1, drs2)
def _handle_eq(self):
self.assertToken(self.token(), '(')
var1 = self.parse_variable()
self.assertToken(self.token(), ',')
var2 = self.parse_variable()
self.assertToken(self.token(), ')')
return lambda sent_index, word_indices: BoxerEq(self.discourse_id, sent_index, word_indices, var1, var2)
def _handle_whq(self):
self.assertToken(self.token(), '(')
self.assertToken(self.token(), '[')
ans_types = []
while self.token(0) != ']':
cat = self.token()
self.assertToken(self.token(), ':')
if cat == 'des':
ans_types.append(self.token())
elif cat == 'num':
ans_types.append('number')
typ = self.token()
if typ == 'cou':
ans_types.append('count')
else:
ans_types.append(typ)
else:
ans_types.append(self.token())
self.token() #swallow the ']'
self.assertToken(self.token(), ',')
d1 = self.parse_Expression(None)
self.assertToken(self.token(), ',')
ref = self.parse_variable()
self.assertToken(self.token(), ',')
d2 = self.parse_Expression(None)
self.assertToken(self.token(), ')')
return lambda sent_index, word_indices: BoxerWhq(self.discourse_id, sent_index, word_indices, ans_types, d1, ref, d2)
def _make_merge_expression(self, sent_index, word_indices, drs1, drs2):
return BoxerDrs(drs1.label, drs1.refs + drs2.refs, drs1.conds + drs2.conds)
def _make_or_expression(self, sent_index, word_indices, drs1, drs2):
return BoxerOr(self.discourse_id, sent_index, word_indices, drs1, drs2)
def _make_imp_expression(self, sent_index, word_indices, drs1, drs2):
return BoxerDrs(drs1.label, drs1.refs, drs1.conds, drs2)
def parse_variable(self):
var = self.token()
assert re.match('^x\d+$', var)
return int(var[1:])
def parse_index(self):
return int(self.token())
def _sent_and_word_indices(self, indices):
"""
@return: C{list} of (sent_index, word_indices) tuples
"""
sent_indices = set((i / 1000)-1 for i in indices if i>=0)
if sent_indices:
pairs = []
for sent_index in sent_indices:
word_indices = [(i % 1000)-1 for i in indices if sent_index == (i / 1000)-1]
pairs.append((sent_index, word_indices))
return pairs
else:
word_indices = [(i % 1000)-1 for i in indices]
return [(None, word_indices)]
class BoxerDrsParser(DrtParser):
"""
Reparse the str form of subclasses of C{AbstractBoxerDrs}
"""
def __init__(self, discourse_id=None):
DrtParser.__init__(self)
self.discourse_id = discourse_id
def get_all_symbols(self):
return [DrtTokens.OPEN, DrtTokens.CLOSE, DrtTokens.COMMA, DrtTokens.OPEN_BRACKET, DrtTokens.CLOSE_BRACKET]
def attempt_adjuncts(self, expression, context):
return expression
def handle(self, tok, context):
try:
if tok == 'drs':
self.assertNextToken(DrtTokens.OPEN)
label = int(self.token())
self.assertNextToken(DrtTokens.COMMA)
refs = map(int, self.handle_refs())
self.assertNextToken(DrtTokens.COMMA)
conds = self.handle_conds(None)
self.assertNextToken(DrtTokens.CLOSE)
return BoxerDrs(label, refs, conds)
elif tok == 'pred':
self.assertNextToken(DrtTokens.OPEN)
disc_id = (self.token(), self.discourse_id)[self.discourse_id is not None]
self.assertNextToken(DrtTokens.COMMA)
sent_id = self.nullableIntToken()
self.assertNextToken(DrtTokens.COMMA)
word_ids = map(int, self.handle_refs())
self.assertNextToken(DrtTokens.COMMA)
variable = int(self.token())
self.assertNextToken(DrtTokens.COMMA)
name = self.token()
self.assertNextToken(DrtTokens.COMMA)
pos = self.token()
self.assertNextToken(DrtTokens.COMMA)
sense = int(self.token())
self.assertNextToken(DrtTokens.CLOSE)
return BoxerPred(disc_id, sent_id, word_ids, variable, name, pos, sense)
elif tok == 'named':
self.assertNextToken(DrtTokens.OPEN)
disc_id = (self.token(), self.discourse_id)[self.discourse_id is not None]
self.assertNextToken(DrtTokens.COMMA)
sent_id = int(self.token())
self.assertNextToken(DrtTokens.COMMA)
word_ids = map(int, self.handle_refs())
self.assertNextToken(DrtTokens.COMMA)
variable = int(self.token())
self.assertNextToken(DrtTokens.COMMA)
name = self.token()
self.assertNextToken(DrtTokens.COMMA)
type = self.token()
self.assertNextToken(DrtTokens.COMMA)
sense = int(self.token())
self.assertNextToken(DrtTokens.CLOSE)
return BoxerNamed(disc_id, sent_id, word_ids, variable, name, type, sense)
elif tok == 'rel':
self.assertNextToken(DrtTokens.OPEN)
disc_id = (self.token(), self.discourse_id)[self.discourse_id is not None]
self.assertNextToken(DrtTokens.COMMA)
sent_id = self.nullableIntToken()
self.assertNextToken(DrtTokens.COMMA)
word_ids = map(int, self.handle_refs())
self.assertNextToken(DrtTokens.COMMA)
var1 = int(self.token())
self.assertNextToken(DrtTokens.COMMA)
var2 = int(self.token())
self.assertNextToken(DrtTokens.COMMA)
rel = self.token()
self.assertNextToken(DrtTokens.COMMA)
sense = int(self.token())
self.assertNextToken(DrtTokens.CLOSE)
return BoxerRel(disc_id, sent_id, word_ids, var1, var2, rel, sense)
elif tok == 'event':
self.assertNextToken(DrtTokens.OPEN)
var = int(self.token())
self.assertNextToken(DrtTokens.CLOSE)
return BoxerEvent(var)
elif tok == 'prop':
self.assertNextToken(DrtTokens.OPEN)
disc_id = (self.token(), self.discourse_id)[self.discourse_id is not None]
self.assertNextToken(DrtTokens.COMMA)
sent_id = int(self.token())
self.assertNextToken(DrtTokens.COMMA)
word_ids = map(int, self.handle_refs())
self.assertNextToken(DrtTokens.COMMA)
variable = int(self.token())
self.assertNextToken(DrtTokens.COMMA)
drs = self.parse_Expression(None)
self.assertNextToken(DrtTokens.CLOSE)
return BoxerProp(disc_id, sent_id, word_ids, variable, drs)
elif tok == 'not':
self.assertNextToken(DrtTokens.OPEN)
drs = self.parse_Expression(None)
self.assertNextToken(DrtTokens.CLOSE)
return BoxerNot(drs)
elif tok == 'imp':
self.assertNextToken(DrtTokens.OPEN)
drs1 = self.parse_Expression(None)
self.assertNextToken(DrtTokens.COMMA)
drs2 = self.parse_Expression(None)
self.assertNextToken(DrtTokens.CLOSE)
return BoxerDrs(drs1.label, drs1.refs, drs1.conds, drs2)
elif tok == 'or':
self.assertNextToken(DrtTokens.OPEN)
disc_id = (self.token(), self.discourse_id)[self.discourse_id is not None]
self.assertNextToken(DrtTokens.COMMA)
sent_id = self.nullableIntToken()
self.assertNextToken(DrtTokens.COMMA)
word_ids = map(int, self.handle_refs())
self.assertNextToken(DrtTokens.COMMA)
drs1 = self.parse_Expression(None)
self.assertNextToken(DrtTokens.COMMA)
drs2 = self.parse_Expression(None)
self.assertNextToken(DrtTokens.CLOSE)
return BoxerOr(disc_id, sent_id, word_ids, drs1, drs2)
elif tok == 'eq':
self.assertNextToken(DrtTokens.OPEN)
disc_id = (self.token(), self.discourse_id)[self.discourse_id is not None]
self.assertNextToken(DrtTokens.COMMA)
sent_id = self.nullableIntToken()
self.assertNextToken(DrtTokens.COMMA)
word_ids = map(int, self.handle_refs())
self.assertNextToken(DrtTokens.COMMA)
var1 = int(self.token())
self.assertNextToken(DrtTokens.COMMA)
var2 = int(self.token())
self.assertNextToken(DrtTokens.CLOSE)
return BoxerEq(disc_id, sent_id, word_ids, var1, var2)
elif tok == 'card':
self.assertNextToken(DrtTokens.OPEN)
disc_id = (self.token(), self.discourse_id)[self.discourse_id is not None]
self.assertNextToken(DrtTokens.COMMA)
sent_id = self.nullableIntToken()
self.assertNextToken(DrtTokens.COMMA)
word_ids = map(int, self.handle_refs())
self.assertNextToken(DrtTokens.COMMA)
var = int(self.token())
self.assertNextToken(DrtTokens.COMMA)
value = self.token()
self.assertNextToken(DrtTokens.COMMA)
type = self.token()
self.assertNextToken(DrtTokens.CLOSE)
return BoxerCard(disc_id, sent_id, word_ids, var, value, type)
elif tok == 'whq':
self.assertNextToken(DrtTokens.OPEN)
disc_id = (self.token(), self.discourse_id)[self.discourse_id is not None]
self.assertNextToken(DrtTokens.COMMA)
sent_id = self.nullableIntToken()
self.assertNextToken(DrtTokens.COMMA)
word_ids = map(int, self.handle_refs())
self.assertNextToken(DrtTokens.COMMA)
ans_types = self.handle_refs()
self.assertNextToken(DrtTokens.COMMA)
drs1 = self.parse_Expression(None)
self.assertNextToken(DrtTokens.COMMA)
var = int(self.token())
self.assertNextToken(DrtTokens.COMMA)
drs2 = self.parse_Expression(None)
self.assertNextToken(DrtTokens.CLOSE)
return BoxerWhq(disc_id, sent_id, word_ids, ans_types, drs1, var, drs2)
except Exception, e:
raise ParseException(self._currentIndex, str(e))
assert False, repr(tok)
def nullableIntToken(self):
t = self.token()
return int(t) if t != 'None' else None
def get_next_token_variable(self, description):
try:
return self.token()
except ExpectedMoreTokensException, e:
raise ExpectedMoreTokensException(e.index, 'Variable expected.')
class AbstractBoxerDrs(object):
def variables(self):
"""
@return: (set<variables>, set<events>, set<propositions>)
"""
variables, events, propositions = self._variables()
return (variables - (events | propositions), events, propositions - events)
def variable_types(self):
vartypes = {}
for t,vars in zip(('z','e','p'), self.variables()):
for v in vars:
vartypes[v] = t
return vartypes
def _variables(self):
"""
@return: (set<variables>, set<events>, set<propositions>)
"""
return (set(), set(), set())
def atoms(self):
return set()
def clean(self):
return self
def _clean_name(self, name):
return name.replace('-','_').replace("'", "_")
def renumber_sentences(self, f):
return self
def __hash__(self):
return hash(str(self))
class BoxerDrs(AbstractBoxerDrs):
def __init__(self, label, refs, conds, consequent=None):
AbstractBoxerDrs.__init__(self)
self.label = label
self.refs = refs
self.conds = conds
self.consequent = consequent
def _variables(self):
variables = (set(), set(), set())
for cond in self.conds:
for s,v in zip(variables, cond._variables()):
s.update(v)
if self.consequent is not None:
for s,v in zip(variables, self.consequent._variables()):
s.update(v)
return variables
def atoms(self):
atoms = reduce(operator.or_, (cond.atoms() for cond in self.conds), set())
if self.consequent is not None:
atoms.update(self.consequent.atoms())
return atoms
def clean(self):
return BoxerDrs(self.label, self.refs, [c.clean() for c in self.conds], self.consequent.clean() if self.consequent else None)
def renumber_sentences(self, f):
return BoxerDrs(self.label, self.refs, [c.renumber_sentences(f) for c in self.conds], self.consequent.renumber_sentences(f) if self.consequent else None)
def __repr__(self):
s = 'drs(%s, [%s], [%s])' % (self.label,
', '.join(map(str, self.refs)),
', '.join(map(str, self.conds)))
if self.consequent is not None:
s = 'imp(%s, %s)' % (s, self.consequent)
return s
def __eq__(self, other):
return self.__class__ == other.__class__ and \
self.label == other.label and \
self.refs == other.refs and \
len(self.conds) == len(other.conds) and \
all(c1==c2 for c1,c2 in zip(self.conds, other.conds)) and \
self.consequent == other.consequent
class BoxerNot(AbstractBoxerDrs):
def __init__(self, drs):
AbstractBoxerDrs.__init__(self)
self.drs = drs
def _variables(self):
return self.drs._variables()
def atoms(self):
return self.drs.atoms()
def clean(self):
return BoxerNot(self.drs.clean())
def renumber_sentences(self, f):
return BoxerNot(self.drs.renumber_sentences(f))
def __repr__(self):
return 'not(%s)' % (self.drs)
def __eq__(self, other):
return self.__class__ == other.__class__ and self.drs == other.drs
class BoxerEvent(AbstractBoxerDrs):
def __init__(self, var):
AbstractBoxerDrs.__init__(self)
self.var = var
def _variables(self):
return (set(), set([self.var]), set())
def __repr__(self):
return 'event(%s)' % (self.var)
def __eq__(self, other):
return self.__class__ == other.__class__ and self.var == other.var
class BoxerIndexed(AbstractBoxerDrs):
def __init__(self, discourse_id, sent_index, word_indices):
AbstractBoxerDrs.__init__(self)
self.discourse_id = discourse_id
self.sent_index = sent_index
self.word_indices = word_indices
def atoms(self):
return set([self])
def __eq__(self, other):
return self.__class__ == other.__class__ and \
self.discourse_id == other.discourse_id and \
self.sent_index == other.sent_index and \
self.word_indices == other.word_indices and \
all(s==o for s,o in zip(self, other))
def __repr__(self):
s = '%s(%s, %s, [%s]' % (self._pred(), self.discourse_id, self.sent_index, ', '.join(map(str, self.word_indices)))
for v in self:
s += ', %s' % v
return s + ')'
class BoxerPred(BoxerIndexed):
def __init__(self, discourse_id, sent_index, word_indices, var, name, pos, sense):
BoxerIndexed.__init__(self, discourse_id, sent_index, word_indices)
self.var = var
self.name = name
self.pos = pos
self.sense = sense
def _variables(self):
return (set([self.var]), set(), set())
def change_var(self, var):
return BoxerPred(self.discourse_id, self.sent_index, self.word_indices, var, self.name, self.pos, self.sense)
def clean(self):
return BoxerPred(self.discourse_id, self.sent_index, self.word_indices, self.var, self._clean_name(self.name), self.pos, self.sense)
def renumber_sentences(self, f):
new_sent_index = f(self.sent_index)
return BoxerPred(self.discourse_id, new_sent_index, self.word_indices, self.var, self.name, self.pos, self.sense)
def __iter__(self):
return iter((self.var, self.name, self.pos, self.sense))
def _pred(self):
return 'pred'
class BoxerNamed(BoxerIndexed):
def __init__(self, discourse_id, sent_index, word_indices, var, name, type, sense):
BoxerIndexed.__init__(self, discourse_id, sent_index, word_indices)
self.var = var
self.name = name
self.type = type
self.sense = sense
def _variables(self):
return (set([self.var]), set(), set())
def change_var(self, var):
return BoxerNamed(self.discourse_id, self.sent_index, self.word_indices, var, self.name, self.type, self.sense)
def clean(self):
return BoxerNamed(self.discourse_id, self.sent_index, self.word_indices, self.var, self._clean_name(self.name), self.type, self.sense)
def renumber_sentences(self, f):
return BoxerNamed(self.discourse_id, f(self.sent_index), self.word_indices, self.var, self.name, self.type, self.sense)
def __iter__(self):
return iter((self.var, self.name, self.type, self.sense))
def _pred(self):
return 'named'
class BoxerRel(BoxerIndexed):
def __init__(self, discourse_id, sent_index, word_indices, var1, var2, rel, sense):
BoxerIndexed.__init__(self, discourse_id, sent_index, word_indices)
self.var1 = var1
self.var2 = var2
self.rel = rel
self.sense = sense
def _variables(self):
return (set([self.var1, self.var2]), set(), set())
def clean(self):
return BoxerRel(self.discourse_id, self.sent_index, self.word_indices, self.var1, self.var2, self._clean_name(self.rel), self.sense)
def renumber_sentences(self, f):
return BoxerRel(self.discourse_id, f(self.sent_index), self.word_indices, self.var1, self.var2, self.rel, self.sense)
def __iter__(self):
return iter((self.var1, self.var2, self.rel, self.sense))
def _pred(self):
return 'rel'
class BoxerProp(BoxerIndexed):
def __init__(self, discourse_id, sent_index, word_indices, var, drs):
BoxerIndexed.__init__(self, discourse_id, sent_index, word_indices)
self.var = var
self.drs = drs
def _variables(self):
return tuple(map(operator.or_, (set(), set(), set([self.var])), self.drs._variables()))
def referenced_labels(self):
return set([self.drs])
def atoms(self):
return self.drs.atoms()
def clean(self):
return BoxerProp(self.discourse_id, self.sent_index, self.word_indices, self.var, self.drs.clean())
def renumber_sentences(self, f):
return BoxerProp(self.discourse_id, f(self.sent_index), self.word_indices, self.var, self.drs.renumber_sentences(f))
def __iter__(self):
return iter((self.var, self.drs))
def _pred(self):
return 'prop'
class BoxerEq(BoxerIndexed):
def __init__(self, discourse_id, sent_index, word_indices, var1, var2):
BoxerIndexed.__init__(self, discourse_id, sent_index, word_indices)
self.var1 = var1
self.var2 = var2
def _variables(self):
return (set([self.var1, self.var2]), set(), set())
def atoms(self):
return set()
def renumber_sentences(self, f):
return BoxerEq(self.discourse_id, f(self.sent_index), self.word_indices, self.var1, self.var2)
def __iter__(self):
return iter((self.var1, self.var2))
def _pred(self):
return 'eq'
class BoxerCard(BoxerIndexed):
def __init__(self, discourse_id, sent_index, word_indices, var, value, type):
BoxerIndexed.__init__(self, discourse_id, sent_index, word_indices)
self.var = var
self.value = value
self.type = type
def _variables(self):
return (set([self.var]), set(), set())
def renumber_sentences(self, f):
return BoxerCard(self.discourse_id, f(self.sent_index), self.word_indices, self.var, self.value, self.type)
def __iter__(self):
return iter((self.var, self.value, self.type))
def _pred(self):
return 'card'
class BoxerOr(BoxerIndexed):
def __init__(self, discourse_id, sent_index, word_indices, drs1, drs2):
BoxerIndexed.__init__(self, discourse_id, sent_index, word_indices)
self.drs1 = drs1
self.drs2 = drs2
def _variables(self):
return tuple(map(operator.or_, self.drs1._variables(), self.drs2._variables()))
def atoms(self):
return self.drs1.atoms() | self.drs2.atoms()
def clean(self):
return BoxerOr(self.discourse_id, self.sent_index, self.word_indices, self.drs1.clean(), self.drs2.clean())
def renumber_sentences(self, f):
return BoxerOr(self.discourse_id, f(self.sent_index), self.word_indices, self.drs1, self.drs2)
def __iter__(self):
return iter((self.drs1, self.drs2))
def _pred(self):
return 'or'
class BoxerWhq(BoxerIndexed):
def __init__(self, discourse_id, sent_index, word_indices, ans_types, drs1, variable, drs2):
BoxerIndexed.__init__(self, discourse_id, sent_index, word_indices)
self.ans_types = ans_types
self.drs1 = drs1
self.variable = variable
self.drs2 = drs2
def _variables(self):
return tuple(map(operator.or_, (set([self.variable]), set(), set()), self.drs1._variables(), self.drs2._variables()))
def atoms(self):
return self.drs1.atoms() | self.drs2.atoms()
def clean(self):
return BoxerWhq(self.discourse_id, self.sent_index, self.word_indices, self.ans_types, self.drs1.clean(), self.variable, self.drs2.clean())
def renumber_sentences(self, f):
return BoxerWhq(self.discourse_id, f(self.sent_index), self.word_indices, self.ans_types, self.drs1, self.variable, self.drs2)
def __iter__(self):
return iter((self.ans_types, self.drs1, self.variable, self.drs2))
def _pred(self):
return 'whq'
class PassthroughBoxerDrsInterpreter(object):
def interpret(self, ex):
return ex
class NltkDrtBoxerDrsInterpreter(object):
def __init__(self, occur_index=False):
self._occur_index = occur_index
def interpret(self, ex):
"""
@param ex: C{AbstractBoxerDrs}
@return: C{AbstractDrs}
"""
if isinstance(ex, BoxerDrs):
drs = DRS([Variable('x%d' % r) for r in ex.refs], map(self.interpret, ex.conds))
if ex.label is not None:
drs.label = Variable('x%d' % ex.label)
if ex.consequent is not None:
drs.consequent = self.interpret(ex.consequent)
return drs
elif isinstance(ex, BoxerNot):
return DrtNegatedExpression(self.interpret(ex.drs))
elif isinstance(ex, BoxerEvent):
return self._make_atom('event', 'x%d' % ex.var)
elif isinstance(ex, BoxerPred):
pred = self._add_occur_indexing('%s_%s' % (ex.pos, ex.name), ex)
return self._make_atom(pred, 'x%d' % ex.var)
elif isinstance(ex, BoxerNamed):
pred = self._add_occur_indexing('ne_%s_%s' % (ex.type, ex.name), ex)
return self._make_atom(pred, 'x%d' % ex.var)
elif isinstance(ex, BoxerRel):
pred = self._add_occur_indexing('%s' % (ex.rel), ex)
return self._make_atom(pred, 'x%d' % ex.var1, 'x%d' % ex.var2)
elif isinstance(ex, BoxerProp):
return DrtProposition(Variable('x%d' % ex.var), self.interpret(ex.drs))
elif isinstance(ex, BoxerEq):
return DrtEqualityExpression(DrtVariableExpression(Variable('x%d' % ex.var1)),
DrtVariableExpression(Variable('x%d' % ex.var2)))
elif isinstance(ex, BoxerCard):
pred = self._add_occur_indexing('card_%s_%s' % (ex.type, ex.value), ex)
return self._make_atom(pred, 'x%d' % ex.var)
elif isinstance(ex, BoxerOr):
return DrtOrExpression(self.interpret(ex.drs1), self.interpret(ex.drs2))
elif isinstance(ex, BoxerWhq):
drs1 = self.interpret(ex.drs1)
drs2 = self.interpret(ex.drs2)
return DRS(drs1.refs + drs2.refs, drs1.conds + drs2.conds)
assert False, '%s: %s' % (ex.__class__.__name__, ex)
def _make_atom(self, pred, *args):
accum = DrtVariableExpression(Variable(pred))
for arg in args:
accum = DrtApplicationExpression(accum, DrtVariableExpression(Variable(arg)))
return accum
def _add_occur_indexing(self, base, ex):
if self._occur_index and ex.sent_index is not None:
if ex.discourse_id:
base += '_%s' % ex.discourse_id
base += '_s%s' % ex.sent_index
base += '_w%s' % sorted(ex.word_indices)[0]
return base
class UnparseableInputException(Exception):
pass
if __name__ == '__main__':
opts = OptionParser("usage: %prog TEXT [options]")
opts.add_option("--verbose", "-v", help="display verbose logs", action="store_true", default=False, dest="verbose")
opts.add_option("--fol", "-f", help="output FOL", action="store_true", default=False, dest="fol")
opts.add_option("--question", "-q", help="input is a question", action="store_true", default=False, dest="question")
opts.add_option("--occur", "-o", help="occurrence index", action="store_true", default=False, dest="occur_index")
(options, args) = opts.parse_args()
if len(args) != 1:
opts.error("incorrect number of arguments")
interpreter = NltkDrtBoxerDrsInterpreter(occur_index=options.occur_index)
drs = Boxer(interpreter).interpret_multisentence(args[0].split(r'\n'), question=options.question, verbose=options.verbose)
if drs is None:
print None
else:
drs = drs.simplify().eliminate_equality()
if options.fol:
print drs.fol().normalize()
else:
drs.normalize().pprint()
| gpl-3.0 | -80,528,639,685,986,600 | 38.836302 | 161 | 0.569698 | false |
jmsolano/picongpu | src/tools/bin/smooth.py | 10 | 6969 | #
# Copyright 2013-2014 Richard Pausch
#
# This file is part of PIConGPU.
#
# PIConGPU is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PIConGPU is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with PIConGPU.
# If not, see <http://www.gnu.org/licenses/>.
#
import numpy
import sys
__doc__ = "This is the 'smooth' module which provides several functions that\n\
provide methods to smooth data from simulation or experiments.\n\
It can be applied to 1D and 2D data sets."
def __info__():
"""
This is the 'smooth' module which provides several functions that
provide methods to smooth data from simulation or experiments.
It can be applied to 1D and 2D data sets.
If you are running this module as executable program from your
shell, you will now have a look at all manuals of the functions
provided by this module.
To contine press 'q'.
"""
def makeOddNumber(number, larger=True):
"""
This function takes a number and returns the next odd number.
By default, the next larger number will be returned, but by
setting larger=False the next smaller odd number will be
returned.
Example:
makeOddNumber(13) --> 13
makeOddNumber(6) --> 7
makeOddNumber(22, larger=False) --> 21
Parameters:
-----------
number int
number to which the next odd number is requested
larger bool (optinal, default=True)
select wheter nnext odd number should be larger (True)
or smaler (False) than number
Return:
-------
returns next odd number
"""
if number % 2 is 1:
# in case number is odd
return number
elif number % 2 is 0:
# in case number is even
if larger:
return number +1
else:
return number -1
else:
error_msg = "ERROR: {} -> number (= {}) neather odd nor even".format(self.func_name, number)
raise Exception(error_msg)
def gaussWindow(N, sigma):
"""
This function returns N discrete points of a Gauss function
with a standard deviation of sigma (in units of discrete points).
The return values are symetric and strech from 0 to N-1.
ATTENTION: this gauss function is NOT normalized.
Parameters:
-----------
N - int
number of sample and return points
sigma - float
standard deviation in units of descrete points
Returns:
--------
returns N symetric samples of e^(-0.5*(x/sigma)^2)
"""
length = (N/float(sigma)) # +/- range bins to calculate
return numpy.exp(-0.5 * (numpy.linspace(-length, length, N))**2) # not normalized
def smooth(x, sigma, window_len = 11, fkt=gaussWindow):
"""
A function that returns smoothed 1D-data from given data.
Parameters:
-----------
x - numpy.ndarray (1D)
original (noisy) data
sigma - float
standard deviation used by the window function 'fkt'
window_len - int (optinal)
number of bins used for the window function 'fkt'
default: 11 bins
fkt - function (optional)
window function used for smoothing
default: smooth.gaussWindow
Returns:
--------
returns smoothed data with samle length as x
"""
# check input:
if type(x) != numpy.ndarray:
error_msg = "ERROR: {} input needs to by a 1D numpy array. Data type is {}".format(
self.func_name, type(x))
raise Exception(error_msg)
if len(x.shape) != 1:
# not a 1D array
error_msg = "ERROR: {} input needs to by a 1D numpy array. Data shape is {}".format(
self.func_name, x.shape )
raise Exception(error_msg)
# extending the data at the beginning and at the end
# to apply the window at the borders
s = numpy.r_[x[window_len-1:0:-1], x, x[-1:-window_len:-1]]
w = fkt(window_len, sigma) # compute window values
# smooth data by convolution with window function
y = numpy.convolve(w/w.sum(), s, mode='valid') #smoothed data with borders
overlap = window_len/2 # usually window_len is odd, and int-devision is used
return y[overlap:len(y)-overlap] # smoothed data without added borders
def smooth2D(data, sigma_x = 10, len_x = 50, sigma_y = 10, len_y = 50, fkt=gaussWindow):
"""
This function smoothes the noisy data of a 2D array.
Parameters:
-----------
data - numpy.ndaray (2D)
original (noisy) data - needs to be a 2D array
sigma_x - float (optinal)
standard deviation of the window function 'fkt' in x-direction
default: 10 bins
len_x - int (optional)
number of bins used for the window function 'fkt' in x-direction
default: 50
sigma_y - float (optinal)
standard deviation of the window function 'fkt' in y-direction
default: 10 bins
len_y - int (optinal)
number of bins used for the window function 'fkt' in y-direction
default: 50
fkt - function (optinal)
window function
default: smooth.gaussWindow
Returns:
--------
smooth 2D-data with same dimensions as 'data'
"""
# cehck input
if type(data) != numpy.ndarray:
error_msg = "ERROR: {} input needs to by a 2D numpy array. Data type is {}".format(
self.func_name, type(data))
raise Exception(error_msg)
data_cp = data.copy() # make a copy since python is handling arrays by reference
if len(data.shape) != 2:
# not a 2D array
error_msg = "ERROR: {} input needs to by a 2D numpy array. Data shape is {}".format(
self.func_name, data.shape )
raise Exception(error_msg)
# make add window bins (maximum value included)
len_x = makeOddNumber(len_x)
len_y = makeOddNumber(len_y)
# smooth x
for i in range(len(data_cp)):
data_cp[i] = smooth(data_cp[i], sigma_x, window_len=len_x, fkt=gaussWindow)
# smooth y
for j in range(len(data_cp[0])):
data_cp[:, j] = smooth(data_cp[:, j], sigma_y, window_len=len_y, fkt=gaussWindow)
# return smoothed copy
return data_cp
if __name__ == "__main__":
# call all function manuals
help(__info__)
help(makeOddNumber)
help(gaussWindow)
help(smooth)
help(smooth2D)
| gpl-3.0 | -726,108,128,944,123,500 | 30.391892 | 100 | 0.610131 | false |
timvandermeij/unmanned-vehicle-tomography | tests/environment.py | 3 | 20734 | import math
from dronekit import LocationLocal, LocationGlobal
from mock import patch, MagicMock, PropertyMock
from ..bench.Method_Coverage import covers
from ..core.Import_Manager import Import_Manager
from ..core.Thread_Manager import Thread_Manager
from ..core.USB_Manager import USB_Manager
from ..distance.Distance_Sensor_Simulator import Distance_Sensor_Simulator
from ..environment.Environment import Environment
from ..environment.Environment_Simulator import Environment_Simulator
from ..geometry.Geometry_Spherical import Geometry_Spherical
from ..settings import Arguments
from ..trajectory.Servo import Servo
from ..vehicle.Mock_Vehicle import Mock_Vehicle, MockAttitude
from ..zigbee.Packet import Packet
from ..zigbee.RF_Sensor import RF_Sensor, RSSI_Validity_Request
from geometry import LocationTestCase
from settings import SettingsTestCase
from core_thread_manager import ThreadableTestCase
from core_usb_manager import USBManagerTestCase
from core_wiringpi import WiringPiTestCase
class EnvironmentTestCase(LocationTestCase, SettingsTestCase,
ThreadableTestCase, USBManagerTestCase,
WiringPiTestCase):
"""
Test case class for tests that make use of the `Environment` class,
including mission and distance sensor tests.
This class handles initializing the settings in a generic manner and
providing various means of simulating sensors and other modules.
"""
def __init__(self, *a, **kw):
super(EnvironmentTestCase, self).__init__(*a, **kw)
self._argv = []
self._modules = {}
self._simulated = True
def register_arguments(self, argv, simulated=True, distance_sensors=None,
use_infrared_sensor=True):
self._argv = argv
self._argv.extend([
"--rf-sensor-class", "RF_Sensor_Simulator", "--rf-sensor-id", "1"
])
self._simulated = simulated
# WiringPiTestCase provides a patcher for RPi.GPIO, which is necessary
# when not in simulation mode to be able to run those tests on PC.
# Additionally, it always handles the WiringPi setup singleton.
self.set_rpi_patch(rpi_patch=not simulated)
self._modules = {}
if use_infrared_sensor:
# We need to mock the Infrared_Sensor module as it is only
# available when LIRC is installed which is not a requirement for
# running tests.
package = __package__.split('.')[0]
self._modules[package + '.control.Infrared_Sensor'] = MagicMock()
self._argv.append("--infrared-sensor")
else:
self._argv.append("--no-infrared-sensor")
if distance_sensors is not None:
self._argv.append("--distance-sensors")
self._argv.extend([str(sensor) for sensor in distance_sensors])
def setUp(self):
super(EnvironmentTestCase, self).setUp()
self.arguments = Arguments("settings.json", self._argv)
if self._modules:
self._module_patcher = patch.dict('sys.modules', self._modules)
self._module_patcher.start()
self.environment = Environment.setup(self.arguments,
usb_manager=self.usb_manager,
simulated=self._simulated)
# Make the environment thread manager available to the tearDown method
# of the ThreadableTestCase.
self.thread_manager = self.environment.thread_manager
def location_valid(self, is_broadcast, **kw):
if is_broadcast:
specification = "rssi_broadcast"
else:
specification = "rssi_ground_station"
request = RSSI_Validity_Request(specification, **kw)
return self.environment.location_valid(request)[0]
def tearDown(self):
super(EnvironmentTestCase, self).tearDown()
if self._modules:
self._module_patcher.stop()
class TestEnvironment(EnvironmentTestCase):
def setUp(self):
self.register_arguments([
"--geometry-class", "Geometry_Spherical",
"--vehicle-class", "Mock_Vehicle", "--number-of-sensors", "3"
], distance_sensors=[0, 90], use_infrared_sensor=True)
super(TestEnvironment, self).setUp()
self.servos = []
for pin, value in [(6, 45), (7, 90)]:
methods = {
"get_pin.return_value": pin,
"get_value.return_value": value
}
self.servos.append(MagicMock(spec=Servo, **methods))
self.environment._servos = self.servos
def test_setup(self):
settings = self.arguments.get_settings("environment")
settings.set("rf_sensor_class", "")
environment = Environment.setup(self.arguments,
simulated=self._simulated)
self.assertIsInstance(environment.usb_manager, USB_Manager)
geometry = Geometry_Spherical()
import_manager = Import_Manager()
thread_manager = Thread_Manager()
usb_manager = USB_Manager()
vehicle = Mock_Vehicle(self.arguments, geometry, import_manager,
thread_manager, usb_manager)
environment = Environment.setup(self.arguments,
geometry_class="Geometry_Spherical",
vehicle=vehicle,
thread_manager=thread_manager,
usb_manager=usb_manager)
self.assertIsInstance(environment, Environment_Simulator)
self.assertIsInstance(environment.geometry, Geometry_Spherical)
self.assertEqual(environment.vehicle, vehicle)
self.assertEqual(environment.thread_manager, thread_manager)
self.assertEqual(environment.usb_manager, usb_manager)
self.assertIsNone(environment.get_rf_sensor())
self.assertEqual(environment._required_sensors, set())
for servo in environment.get_servos():
self.assertIsInstance(servo, Servo)
with self.assertRaises(ValueError):
environment = Environment.setup(self.arguments, vehicle=vehicle)
simulation_mock = PropertyMock(return_value=False)
with patch.object(Mock_Vehicle, 'use_simulation', new_callable=simulation_mock):
vehicle = Mock_Vehicle(self.arguments, geometry, import_manager,
thread_manager, usb_manager)
with self.assertRaises(ValueError):
environment = Environment.setup(self.arguments, vehicle=vehicle,
thread_manager=thread_manager,
usb_manager=usb_manager,
simulated=True)
@covers(["get_objects", "get_distance_sensors"])
def test_base_interface(self):
geometry = Geometry_Spherical()
import_manager = Import_Manager()
thread_manager = Thread_Manager()
usb_manager = USB_Manager()
vehicle = Mock_Vehicle(self.arguments, geometry, import_manager,
thread_manager, usb_manager)
environment = Environment(vehicle, geometry, self.arguments,
import_manager, thread_manager, usb_manager)
# Base class does not provide simulated objects or distance sensors.
self.assertEqual(environment.get_objects(), [])
with self.assertRaises(NotImplementedError):
environment.get_distance_sensors()
def test_initialization(self):
self.assertIsInstance(self.environment, Environment)
self.assertIsInstance(self.environment.vehicle, Mock_Vehicle)
self.assertIsInstance(self.environment.geometry, Geometry_Spherical)
self.assertIsInstance(self.environment.import_manager, Import_Manager)
self.assertIsInstance(self.environment.thread_manager, Thread_Manager)
self.assertEqual(self.environment.usb_manager, self.usb_manager)
self.assertEqual(self.environment.arguments, self.arguments)
self.assertTrue(self.environment.settings.get("infrared_sensor"))
@covers([
"get_vehicle", "get_arguments", "get_import_manager",
"get_thread_manager", "get_usb_manager", "get_distance_sensors",
"get_rf_sensor", "get_infrared_sensor", "get_servos"
])
def test_interface(self):
self.assertEqual(self.environment.get_vehicle(),
self.environment.vehicle)
self.assertEqual(self.environment.get_arguments(),
self.environment.arguments)
self.assertEqual(self.environment.get_import_manager(),
self.environment.import_manager)
self.assertEqual(self.environment.get_thread_manager(),
self.environment.thread_manager)
self.assertEqual(self.environment.get_usb_manager(),
self.environment.usb_manager)
distance_sensors = self.environment.get_distance_sensors()
expected_angles = [0, 90]
self.assertEqual(len(distance_sensors), len(expected_angles))
for i, expected_angle in enumerate(expected_angles):
self.assertIsInstance(distance_sensors[i], Distance_Sensor_Simulator)
self.assertEqual(distance_sensors[i].id, i)
self.assertEqual(distance_sensors[i].angle, expected_angle)
self.assertIsInstance(self.environment.get_rf_sensor(), RF_Sensor)
self.assertIsNotNone(self.environment.get_infrared_sensor())
self.assertEqual(self.environment.get_servos(), self.servos)
def test_on_servos(self):
pwms = {
6: 500,
7: 1000,
9: 1234,
"abc": 42
}
self.environment.on_servos(self.environment.vehicle, "servos", pwms)
self.servos[0].set_current_pwm.assert_called_once_with(500)
self.servos[1].set_current_pwm.assert_called_once_with(1000)
def test_on_home_location(self):
loc = LocationGlobal(1.0, 2.0, 3.0)
self.environment.on_home_location(self.environment.vehicle,
"home_location", loc)
self.assertEqual(self.environment.geometry.home_location, loc)
@covers(["add_packet_action", "receive_packet"])
def test_packet_action(self):
# Callback must be callable
with self.assertRaises(TypeError):
self.environment.add_packet_action("waypoint_add", "no_function")
mock_callback = MagicMock()
self.environment.add_packet_action("waypoint_add", mock_callback)
# Not allowed to add more than one callback for a packet specification.
with self.assertRaises(KeyError):
self.environment.add_packet_action("waypoint_add", MagicMock())
# Callback is called for correct specification.
packet = Packet()
packet.set("specification", "waypoint_add")
packet.set("latitude", 12.345)
packet.set("longitude", 32.109)
packet.set("to_id", 1)
self.environment.receive_packet(packet)
mock_callback.assert_called_once_with(packet)
# Callback is not called for another specification.
mock_callback.reset_mock()
packet = Packet()
packet.set("specification", "waypoint_clear")
packet.set("to_id", 1)
self.environment.receive_packet(packet)
mock_callback.assert_not_called()
@covers("get_raw_location")
def test_location(self):
vehicle = self.environment.vehicle
self.assertEqual(self.environment.location, vehicle.location)
location = self.environment.vehicle.location.global_relative_frame
self.assertEqual(location, self.environment.get_location())
# Raw location provides the correct return value corresponding to the
# real location.
raw_location, waypoint_index = self.environment.get_raw_location()
self.assertEqual(raw_location, (location.lat, location.lon))
self.assertEqual(waypoint_index, -1)
self.environment.invalidate_measurement(own_waypoint=42)
loc = LocationLocal(1.2, 3.4, -5.6)
with patch.object(vehicle, "_locations", new=loc):
raw_location, waypoint_index = self.environment.get_raw_location()
self.assertEqual(raw_location, (loc.north, loc.east))
self.assertEqual(waypoint_index, 42)
@covers([
"location_valid", "is_measurement_valid", "set_waypoint_valid",
"invalidate_measurement"
])
def test_valid_initialization(self):
rf_sensor = self.environment.get_rf_sensor()
other_id = rf_sensor.id + 1
required_sensors = set(range(1, rf_sensor.number_of_sensors + 1))
required_sensors.remove(rf_sensor.id)
# Check the initial state of internal member variables.
self.assertEqual(self.environment._valid_waypoints, {})
self.assertEqual(self.environment._valid_pairs, {
rf_sensor.id: True,
other_id: False,
other_id + 1: False
})
self.assertEqual(self.environment._required_sensors, required_sensors)
# Initially, the location and measurement is invalid; the former
# because the waypoint is not yet reached and the second because the
# required sensors have not yet triggered a location valid callback.
self.assertFalse(self.location_valid(True, other_id=other_id))
self.assertFalse(self.environment.is_measurement_valid())
@covers([
"location_valid", "is_measurement_valid", "set_waypoint_valid",
"invalidate_measurement"
])
@patch.object(Mock_Vehicle, "is_current_location_valid")
def test_valid_state(self, current_location_valid_mock):
rf_sensor = self.environment.get_rf_sensor()
other_id = rf_sensor.id + 1
# By default, we require all sensors to become valid once.
current_location_valid_mock.configure_mock(return_value=False)
self.environment.set_waypoint_valid()
self.assertFalse(self.location_valid(True, other_id=other_id))
current_location_valid_mock.configure_mock(return_value=True)
self.assertTrue(self.location_valid(True, other_id=other_id))
self.assertFalse(self.environment.is_measurement_valid())
self.assertEqual(self.environment._valid_waypoints, {rf_sensor.id: -1})
self.assertTrue(self.location_valid(False, other_id=other_id,
other_index=0, other_valid=True,
other_valid_pair=False))
self.assertFalse(self.environment.is_measurement_valid())
self.assertEqual(self.environment._valid_waypoints,
{rf_sensor.id: -1, other_id: 0})
self.assertEqual(self.environment._valid_pairs, {
rf_sensor.id: True,
other_id: False,
other_id + 1: False
})
self.assertTrue(self.location_valid(False, other_id=other_id,
other_index=0, other_valid=True,
other_valid_pair=True))
self.assertFalse(self.environment.is_measurement_valid())
self.assertEqual(self.environment._valid_pairs, {
rf_sensor.id: True,
other_id: True,
other_id + 1: False
})
self.assertTrue(self.location_valid(False, other_id=other_id + 1,
other_index=0, other_valid=True,
other_valid_pair=True))
self.assertFalse(self.environment.is_measurement_valid())
self.assertTrue(self.location_valid(True, other_id=other_id))
self.assertTrue(self.environment.is_measurement_valid())
# If another sensor's location becomes invalid again, then this does
# not invalidate the measurement.
self.assertTrue(self.location_valid(False, other_id=other_id + 1,
other_index=0, other_valid=False,
other_valid_pair=True))
self.assertTrue(self.environment.is_measurement_valid())
# Invalidate the measurement, and require a specific set of sensors
# and wait waypoint IDs.
self.environment.invalidate_measurement(required_sensors=[other_id],
own_waypoint=1, wait_waypoint=2)
self.assertFalse(self.location_valid(True, other_id=other_id))
self.assertFalse(self.environment.is_measurement_valid())
self.environment.set_waypoint_valid()
self.assertTrue(self.location_valid(True, other_id=other_id))
self.assertEqual(self.environment._valid_waypoints, {rf_sensor.id: 1})
self.assertTrue(self.location_valid(False, other_id=other_id,
other_index=1, other_valid=True,
other_valid_pair=True))
self.assertTrue(self.location_valid(True, other_id=other_id))
self.assertFalse(self.environment.is_measurement_valid())
self.assertTrue(self.location_valid(False, other_id=other_id,
other_index=2, other_valid=True,
other_valid_pair=True))
self.assertTrue(self.location_valid(True, other_id=other_id))
self.assertTrue(self.environment.is_measurement_valid())
# Check that receiving valid measurements in another order works as
# expected, i.e., it waits a full sweep. The other sensors need to
# receive at least one measurement from the current vehicle,
self.environment.invalidate_measurement(wait_waypoint=3)
self.assertFalse(self.location_valid(False, other_id=other_id,
other_index=3, other_valid=True,
other_valid_pair=False))
self.assertFalse(self.location_valid(False, other_id=other_id + 1,
other_index=3, other_valid=True,
other_valid_pair=False))
self.assertFalse(self.environment.is_measurement_valid())
self.assertFalse(self.location_valid(True, other_id=other_id))
self.environment.set_waypoint_valid()
self.assertTrue(self.location_valid(True, other_id=other_id))
self.assertFalse(self.environment.is_measurement_valid())
self.assertTrue(self.location_valid(False, other_id=other_id,
other_index=3, other_valid=True,
other_valid_pair=True))
self.assertFalse(self.environment.is_measurement_valid())
self.assertTrue(self.location_valid(False, other_id=other_id + 1,
other_index=3, other_valid=True,
other_valid_pair=True))
self.assertTrue(self.location_valid(True, other_id=other_id))
self.assertTrue(self.environment.is_measurement_valid())
def test_get_distance(self):
loc = LocationLocal(12.0, 5.0, 0.0)
# 12**2 + 5**2 = 144 + 25 which is 13 squared.
self.assertEqual(self.environment.get_distance(loc), 13.0)
def test_get_yaw(self):
vehicle = self.environment.vehicle
vehicle.attitude = MockAttitude(0.0, 0.25*math.pi, 0.0, vehicle)
self.assertEqual(self.environment.get_yaw(), 0.25*math.pi)
def test_get_sensor_yaw(self):
vehicle = self.environment.vehicle
vehicle.attitude = MockAttitude(0.0, 0.5*math.pi, 0.0, vehicle)
self.assertEqual(self.environment.get_sensor_yaw(), 0.75*math.pi)
self.assertEqual(self.environment.get_sensor_yaw(id=1), math.pi)
# A sensor ID outside the scope of the number of servos means that we
# receive the vehicle's yaw.
self.assertEqual(self.environment.get_sensor_yaw(id=6), 0.5*math.pi)
def test_get_angle(self):
vehicle = self.environment.vehicle
vehicle.attitude = MockAttitude(0.0, 0.5*math.pi, 0.0, vehicle)
self.assertEqual(self.environment.get_angle(), 0.0)
def test_get_pitch(self):
vehicle = self.environment.vehicle
vehicle.attitude = MockAttitude(0.75*math.pi, 0.0, 0.0, vehicle)
self.assertEqual(self.environment.get_pitch(), 0.75*math.pi)
| gpl-3.0 | 4,419,482,963,407,597,000 | 45.28125 | 88 | 0.618887 | false |
apple/swift | utils/bug_reducer/bug_reducer/opt_bug_reducer.py | 12 | 7927 | from __future__ import print_function
import json
import md5
import subprocess
import func_bug_reducer
import list_reducer
from list_reducer import TESTRESULT_KEEPPREFIX
from list_reducer import TESTRESULT_KEEPSUFFIX
from list_reducer import TESTRESULT_NOFAILURE
import swift_tools
class ReduceMiscompilingPasses(list_reducer.ListReducer):
def __init__(self, lst, invoker):
list_reducer.ListReducer.__init__(self, lst)
self.invoker = invoker
def run_test(self, prefix, suffix):
# First, run the program with just the Suffix passes. If it is still
# broken with JUST the kept passes, discard the prefix passes.
suffix_joined = ' '.join(suffix)
suffix_hash = md5.md5(suffix_joined).hexdigest()
print("Checking to see if suffix '%s' compiles correctly" %
suffix_joined)
result = self.invoker.invoke_with_passlist(
suffix,
self.invoker.get_suffixed_filename(suffix_hash))
# Found a miscompile! Keep the suffix
if result['exit_code'] != 0:
print("Suffix crashes! Returning suffix")
return (TESTRESULT_KEEPSUFFIX, prefix, suffix)
if len(prefix) == 0:
print("Suffix passes and no prefix passes, returning nofailure")
return (TESTRESULT_NOFAILURE, prefix, suffix)
print("Suffix '' does not crash! Current reduced program compiles "
"without optimization!")
# Next see if the program is broken if we run the "prefix" passes
# first, then separately run the "kept" passes.
prefix_joined = ' '.join(prefix)
prefix_hash = md5.md5(prefix_joined).hexdigest()
print("Checking to see if '%s' compiles correctly" % prefix_joined)
# If it is not broken with the kept passes, it's possible that the
# prefix passes must be run before the kept passes to break it. If
# the program WORKS after the prefix passes, but then fails if running
# the prefix AND kept passes, we can update our bitcode file to
# include the result of the prefix passes, then discard the prefix
# passes.
prefix_path = self.invoker.get_suffixed_filename(prefix_hash)
result = self.invoker.invoke_with_passlist(
prefix,
prefix_path)
if result['exit_code'] != 0:
print("Prefix crashes the input by itself. Returning keep "
"prefix")
return (TESTRESULT_KEEPPREFIX, prefix, suffix)
# Ok, so now we know that the prefix passes work, first check if we
# actually have any suffix passes. If we don't, just return.
if len(suffix) == 0:
print("No suffix, and prefix passes, returning no failure")
return (TESTRESULT_NOFAILURE, prefix, suffix)
# Otherwise, treat the prefix as our new baseline and see if suffix on
# the new baseline finds the crash.
original_input_file = self.invoker.input_file
self.invoker.input_file = prefix_path
print("Checking to see if '%s' compiles correctly after the '%s' "
"passes" % (suffix_joined, prefix_joined))
result = self.invoker.invoke_with_passlist(
suffix,
self.invoker.get_suffixed_filename(suffix_hash))
# If we failed at this point, then the prefix is our new
# baseline. Return keep suffix.
if result['exit_code'] != 0:
print("Suffix failed. Keeping prefix as new baseline")
return (TESTRESULT_KEEPSUFFIX, prefix, suffix)
# Otherwise, we must not be running the bad pass anymore. Restore the
# original input_file and return NoFailure.
self.invoker.input_file = original_input_file
return (TESTRESULT_NOFAILURE, prefix, suffix)
def pass_bug_reducer(tools, config, passes, sil_opt_invoker, reduce_sil):
# Make sure that the base case /does/ crash.
filename = sil_opt_invoker.get_suffixed_filename('base_case')
result = sil_opt_invoker.invoke_with_passlist(passes, filename)
# If we succeed, there is no further work to do.
if result['exit_code'] == 0:
print("Does not crash on input passes!")
print("Base Case: {}. Passes: {}".format(filename, ' '.join(passes)))
return True
print("Crashes with initial pass list! First trying to reduce the pass "
"list!")
# Otherwise, reduce the list of passes that cause the optimizer to crash.
r = ReduceMiscompilingPasses(passes, sil_opt_invoker)
if not r.reduce_list():
print("Failed to find miscompiling pass list!")
cmdline = sil_opt_invoker.cmdline_with_passlist(r.target_list)
print("*** Found miscompiling passes!")
print("*** Final File: %s" % sil_opt_invoker.input_file)
print("*** Final Passes: %s" % (' '.join(r.target_list)))
print("*** Repro command line: %s" % (' '.join(cmdline)))
if not reduce_sil:
return False
print("*** User requested that we try to reduce SIL. Lets try.")
input_file = sil_opt_invoker.input_file
nm = swift_tools.SILNMInvoker(config, tools)
sil_extract_invoker = swift_tools.SILFuncExtractorInvoker(config,
tools,
input_file)
func_bug_reducer.function_bug_reducer(input_file, nm, sil_opt_invoker,
sil_extract_invoker,
r.target_list)
print("*** Final Passes: %s" % (' '.join(r.target_list)))
return False
def invoke_pass_bug_reducer(args):
"""Given a path to a sib file with canonical sil, attempt to find a
perturbed list of passes that the perf pipeline"""
tools = swift_tools.SwiftTools(args.swift_build_dir)
config = swift_tools.SILToolInvokerConfig(args)
passes = []
if args.pass_list is None:
json_data = json.loads(subprocess.check_output(
[tools.sil_passpipeline_dumper, '-Performance']))
passes = sum((p[1:] for p in json_data), [])
passes = ['-' + x[1] for x in passes]
else:
passes = ['-' + x for x in args.pass_list]
extra_args = []
if args.extra_args is not None:
extra_args = args.extra_args
sil_opt_invoker = swift_tools.SILOptInvoker(config, tools,
args.input_file,
extra_args)
pass_bug_reducer(tools, config, passes, sil_opt_invoker, args.reduce_sil)
def add_parser_arguments(parser):
"""Add parser arguments for opt_bug_reducer"""
parser.set_defaults(func=invoke_pass_bug_reducer)
parser.add_argument('input_file', help='The input file to optimize')
parser.add_argument('--module-cache', help='The module cache to use')
parser.add_argument('--sdk', help='The sdk to pass to sil-opt')
parser.add_argument('--target', help='The target to pass to sil-opt')
parser.add_argument('--resource-dir',
help='The resource-dir to pass to sil-opt')
parser.add_argument('--work-dir',
help='Working directory to use for temp files',
default='bug_reducer')
parser.add_argument('--module-name',
help='The name of the module we are optimizing')
parser.add_argument('--pass', help='pass to test', dest='pass_list',
action='append')
parser.add_argument('--extra-arg',
help='extra argument to pass to sil-opt',
dest='extra_args', action='append')
parser.add_argument('--reduce-sil', help='After finding the relevant '
'passes, try to reduce the SIL by eliminating '
'functions, blocks, etc',
action='store_true')
| apache-2.0 | -4,833,672,586,130,806,000 | 42.79558 | 78 | 0.609815 | false |
mithrandir123/director | src/python/ddapp/tasks/taskmanagerwidget.py | 6 | 13144 | from ddapp.tasks.robottasks import *
from ddapp.tasks.descriptions import loadTaskDescription
import ddapp.applogic as app
def _splitCamelCase(name):
name = re.sub('(.)([A-Z][a-z]+)', r'\1 \2', name)
return re.sub('([a-z0-9])([A-Z])', r'\1 \2', name)
class TaskItem(om.ObjectModelItem):
def __init__(self, task):
om.ObjectModelItem.__init__(self, task.properties.getProperty('Name'), properties=task.properties)
self.addProperty('Visible', False)
self.setPropertyAttribute('Visible', 'hidden', True)
self.task = task
class TaskLibraryWidget(object):
def __init__(self):
self.taskTree = TaskTree()
self.widget = QtGui.QWidget()
self.addButton = QtGui.QPushButton('add task')
l = QtGui.QGridLayout(self.widget)
l.addWidget(self.taskTree.treeWidget, 0, 0)
l.addWidget(self.taskTree.propertiesPanel, 1, 0)
l.addWidget(self.addButton, 2, 0)
self.widget.setWindowTitle('Task Library')
self.addButton.connect('clicked()', self.onAddTaskClicked)
self.callbacks = callbacks.CallbackRegistry(['OnAddTask'])
#p = self.treeWidget.palette
#p.setColor(QtGui.QPalette.Highlight, QtCore.Qt.green)
#p.setColor(QtGui.QPalette.Normal, QtGui.QPalette.Highlight, QtCore.Qt.red)
#self.treeWidget.setPalette(p)
#item.setBackground(0, QtGui.QBrush(QtCore.Qt.green))
def onAddTaskClicked(self):
task = self.taskTree.getSelectedTask()
if task:
self.callbacks.process('OnAddTask', task.task)
def addTask(self, task, parent=None):
self.taskTree.onAddTask(task, parent)
class TaskTree(object):
def __init__(self):
self.treeWidget = QtGui.QTreeWidget()
self.propertiesPanel = PythonQt.dd.ddPropertiesPanel()
self.objectModel = om.ObjectModelTree()
self.objectModel.init(self.treeWidget, self.propertiesPanel)
#self.treeWidget.setColumnCount(1)
def onSave(self):
def helper(obj, children):
if isinstance(obj, TaskItem):
obj = obj.task
elif obj:
obj = obj.getProperty('Name')
result = [obj, []]
for child in children:
result[1].append(helper(child, child.children()))
return result
state = helper(None, self.objectModel.getTopLevelObjects())
return pickle.dumps(state)
def onLoad(self, serializedData):
state = pickle.loads(serializedData)
assert len(state) == 2
assert state[0] is None
def helper(parent, children):
for child in children:
child, grandChildren = child
if isinstance(child, str):
child = self.addGroup(child, parent)
else:
child = self.onAddTask(child, parent)
helper(child, grandChildren)
helper(state[0], state[1])
def loadTaskDescription(self, description):
taskQueue = self
def helper(obj, children):
for child in children:
assert isinstance(child, list)
assert len(child) == 2
if isinstance(child[0], str):
child, grandChildren = child
group = taskQueue.addGroup(child, obj)
helper(group, grandChildren)
self.objectModel.collapse(group)
else:
taskClass, args = child
assert isinstance(args, dict)
task = taskClass()
for propertyName, propertyValue in args.iteritems():
assert isinstance(propertyName, str)
task.properties.setProperty(propertyName, propertyValue)
taskQueue.onAddTask(task, obj)
helper(None, description)
def assureSelection(self):
objs = self.objectModel.getTopLevelObjects()
if len(objs) == 1:
self.objectModel.setActiveObject(objs[0])
def onAddTask(self, task, parent=None, copy=True):
if copy:
task = task.copy()
obj = TaskItem(task)
parent = parent or self.getSelectedFolder()
self.objectModel.addToObjectModel(obj, parentObj=parent)
self.assureSelection()
return obj
def addGroup(self, groupName, parent=None):
obj = self.objectModel.addContainer(groupName, parent)
obj.setProperty('Visible', False)
obj.setPropertyAttribute('Visible', 'hidden', True)
obj.setPropertyAttribute('Name', 'hidden', False)
self.assureSelection()
return obj
def removeAllTasks(self):
for obj in self.objectModel.getObjects():
self.objectModel.removeFromObjectModel(obj)
def findTaskItem(self, task):
for obj in self.objectModel.getObjects():
if isinstance(obj, TaskItem) and obj.task == task:
return obj
def selectTask(self, task):
obj = self.findTaskItem(task)
if obj:
self.objectModel.setActiveObject(obj)
def getSelectedTask(self):
if not isinstance(self.objectModel.getActiveObject(), TaskItem):
return None
return self.objectModel.getActiveObject()
def getSelectedFolder(self):
obj = self.objectModel.getActiveObject()
if obj is None:
return None
container = obj if isinstance(obj, om.ContainerItem) else obj.parent()
return container
def getSelectedTasks(self):
obj = self.objectModel.getActiveObject()
folder = self.getSelectedFolder()
tasks = self.getTasks(folder)
if obj != folder:
tasks = tasks[tasks.index(obj):]
return tasks
def getTasks(self, parent=None, fromSelected=False):
selected = self.objectModel.getActiveObject()
tasks = []
add = not fromSelected
queue = self.objectModel.getTopLevelObjects() if parent is None else parent.children()
while queue:
obj = queue.pop(0)
if obj == selected:
add = True
if isinstance(obj, om.ContainerItem):
queue = obj.children() + queue
continue
if add:
tasks.append(obj)
return tasks
class TaskQueueWidget(object):
def __del__(self):
self.timer.stop()
def __init__(self):
self.taskTree = TaskTree()
self.taskQueue = atq.AsyncTaskQueue()
self.taskQueue.connectTaskStarted(self.onTaskStarted)
self.taskQueue.connectTaskEnded(self.onTaskEnded)
self.completedTasks = []
self.timer = TimerCallback(targetFps=5)
self.timer.callback = self.updateDisplay
self.timer.start()
self.widget = QtGui.QWidget()
l = QtGui.QGridLayout(self.widget)
self.queueCombo = QtGui.QComboBox()
self.startButton = QtGui.QPushButton('start')
self.stepButton = QtGui.QPushButton('step')
self.clearButton = QtGui.QPushButton('clear')
self.currentTaskLabel = QtGui.QLabel('')
self.statusTaskLabel = QtGui.QLabel('')
self.statusTaskLabel.visible = False
l.addWidget(self.queueCombo, 0, 0)
l.addWidget(self.taskTree.treeWidget, 1, 0)
l.addWidget(self.taskTree.propertiesPanel, 2, 0)
l.addWidget(self.startButton, 3, 0)
l.addWidget(self.stepButton, 4, 0)
l.addWidget(self.clearButton, 5, 0)
l.addWidget(self.currentTaskLabel, 6, 0)
l.addWidget(self.statusTaskLabel, 7, 0)
self.widget.setWindowTitle('Task Queue')
self.descriptions = {}
self.queueCombo.insertSeparator(0)
self.queueCombo.addItem('Create new...')
self.queueCombo.connect('currentIndexChanged(const QString&)', self.onQueueComboChanged)
self.startButton.connect('clicked()', self.onStart)
self.stepButton.connect('clicked()', self.onStep)
self.clearButton.connect('clicked()', self.onClear)
def loadTaskDescription(self, name, description):
name = _splitCamelCase(name).capitalize()
self.descriptions[name] = description
insertIndex = self.queueCombo.count - 2
self.queueCombo.blockSignals(True)
self.queueCombo.insertItem(insertIndex, name)
self.queueCombo.blockSignals(False)
def onAddQueue(self):
inputDialog = QtGui.QInputDialog()
inputDialog.setInputMode(inputDialog.TextInput)
inputDialog.setLabelText('New queue name:')
inputDialog.setWindowTitle('Enter Name')
inputDialog.setTextValue('')
result = inputDialog.exec_()
if not result:
return
name = inputDialog.textValue()
if name in self.descriptions:
return
emptyDescription = [
['empty', [
]],
]
self.descriptions[name] = emptyDescription
insertIndex = self.queueCombo.count - 2
self.queueCombo.blockSignals(True)
self.queueCombo.insertItem(insertIndex, name)
self.queueCombo.blockSignals(False)
self.setCurrentQueue(name)
def setCurrentQueue(self, name):
assert name in self.descriptions
self.queueCombo.setCurrentIndex(self.queueCombo.findText(name))
def onQueueComboChanged(self, name):
assert len(name)
self.taskTree.removeAllTasks()
if name == 'Create new...':
self.onAddQueue()
else:
description = self.descriptions[name]
self.taskTree.loadTaskDescription(description)
def onClear(self):
assert not self.taskQueue.isRunning
self.taskQueue.reset()
self.taskTree.removeAllTasks()
self.updateDisplay()
def onTaskStarted(self, taskQueue, task):
print 'starting task:', task.properties.getProperty('Name')
self.taskTree.selectTask(task)
item = self.taskTree.findTaskItem(task)
if len(self.completedTasks) and item.getProperty('Visible'):
raise atq.AsyncTaskQueue.PauseException()
def onTaskEnded(self, taskQueue, task):
self.completedTasks.append(task)
self.taskTree.selectTask(self.completedTasks[0])
def updateStatusMessage(self):
currentTask = self.taskQueue.currentTask
if currentTask and currentTask.statusMessage:
text = vis.updateText(currentTask.statusMessage, 'task status message', parent='planning')
text.setProperty('Visible', True)
else:
text = om.findObjectByName('task status message')
if text:
text.setProperty('Visible', False)
def updateDisplay(self):
isRunning = self.taskQueue.isRunning
isEmpty = len(self.taskTree.objectModel._objects) == 0
if isRunning:
self.startButton.text = 'stop'
else:
self.startButton.text = 'start'
self.startButton.setEnabled(not isEmpty)
self.stepButton.setEnabled(not isRunning and not isEmpty)
self.clearButton.setEnabled(not isRunning and not isEmpty)
currentTask = self.taskQueue.currentTask
if currentTask:
self.currentTaskLabel.text = 'Task: <b>%s</b>' % currentTask.properties.getProperty('Name')
else:
self.currentTaskLabel.text = ''
self.updateStatusMessage()
def onStep(self):
assert not self.taskQueue.isRunning
task = self.taskTree.getSelectedTask()
if not task:
return
self.completedTasks = []
self.taskQueue.reset()
self.taskQueue.addTask(task.task)
self.taskQueue.start()
self.updateDisplay()
def onStart(self):
if self.taskQueue.isRunning:
currentTask = self.taskQueue.currentTask
self.taskQueue.stop()
if currentTask:
currentTask.stop()
else:
self.completedTasks = []
self.taskQueue.reset()
for obj in self.taskTree.getSelectedTasks():
#print 'adding task:', obj.task.properties.name
self.taskQueue.addTask(obj.task)
self.taskQueue.start()
self.updateDisplay()
class TaskWidgetManager(object):
def __init__(self):
self.taskLibraryWidget = TaskLibraryWidget()
self.taskQueueWidget = TaskQueueWidget()
self.taskLibraryWidget.callbacks.connect('OnAddTask', self.taskQueueWidget.taskTree.onAddTask)
self.addDefaultTasksToLibrary()
def addDefaultTasksToLibrary(self):
desc = loadTaskDescription('taskLibrary')
self.taskLibraryWidget.taskTree.loadTaskDescription(desc)
def init():
global panel
panel = TaskWidgetManager()
dock = app.addWidgetToDock(panel.taskQueueWidget.widget, action=app.getToolBarActions()['ActionTaskManagerPanel'])
dock.hide()
dock = app.addWidgetToDock(panel.taskLibraryWidget.widget)
dock.hide()
return panel
| bsd-3-clause | -3,741,546,199,701,315,600 | 29.146789 | 118 | 0.617848 | false |
sunrin92/LearnPython | 1-lpthw/ex35.py | 1 | 1857 | # -*- coding: utf-8 -*-
from sys import exit
def gold_room():
next = input("This room is full of gold. How much do you take?\n> ")
if next.isdigit():
how_much = int(next)
else:
dead("Man, learn to type a number.")
if how_much < 50:
print("Nice, you're not greedy. you win!")
exit(0)
else:
dead("You greedy bastard!")
def bear_room():
print("""
There is a bear here.
The bear has a bunch of honey.
the fat bear is in front of another door.
How are you going to move the bear?
""")
bear_moved = False
while True:
next = input(">")
if next == "take honey":
dead("The bear looks at you then slaps your face off.")
elif next == "taunt bear" and not bear_moved:
print("The bear has moved from the door. You can go through it now.")
bear_moved = True
elif next == "taunt bear" and bear_moved:
dead("The bear gets pissed off and chews your leg off")
elif next == "open door" and bear_moved:
gold_room()
else:
print("I got no idea what that means")
def cthulhu_room():
print("""
Here you see the great evil Cthulhu.
He, it , whatever stares at you and you go insane.
Do you flee for your life or eat your head?
""")
next = input("> ")
if "flee" in next:
start()
elif "head" in next:
dead("Well that was tasty!")
else:
cthulhu_room()
def dead(why):
print(why, "Good Job!")
exit(0)
def start():
print("""
You are in a dark room.
There is a door to your right and left.
which one do you take?
""")
next = input(">")
if next == "left":
bear_room()
if next == "right":
cthulhu_room()
else:
dead("You stumble around the room until you starve.")
start()
| mit | 8,081,625,006,594,868,000 | 21.373494 | 81 | 0.562736 | false |
thequbit/accident-mapper | mkjson.py | 1 | 3006 |
import csv
import utm
import json
QUERYID = 0
CASE_NUM = 1
CASE_YEAR = 2
REGN_CNTY_CDE = 3
COMP_MUNI = 4
MUNITYPE = 5
REF_MRKR = 6
ATINTERSECTION_IND = 7
COMPX = 8
COMPY = 9
ACC_DATE = 10
ACC_TIME = 11
DMV_ACCD_CLSF = 12
NUM_OF_INJURIES = 13
NUM_OF_FATALITIES = 14
NUM_OF_VEH = 15
ACCD_TYP = 16
COLLISION_TYP = 17
TRAF_CNTL = 18
LIGHT_COND = 19
WEATHER = 20
ROAD_SURF_COND = 21
def readdata(csvfile):
f = open(csvfile,'r')
reader = csv.reader(f)
count = 0
accidents = []
for row in reader:
#print row
#raise Exception('debug')
if count == 0:
count += 1
continue
#print "{0}: {1}".format(count,row)
#cells = line.split(',')
cells = row
event = {}
event['queryid'] = cells[QUERYID].strip()
event['casenum'] = cells[CASE_NUM].strip()
event['year'] = cells[CASE_YEAR].strip()
event['region'] = cells[REGN_CNTY_CDE].strip()
event['municipality'] = cells[REGN_CNTY_CDE].strip()
event['municipalitytype'] = cells[MUNITYPE].strip()
event['markerreference'] = cells[REF_MRKR].strip()
event['atintersection'] = cells[ATINTERSECTION_IND].strip()
event['compx'] = cells[COMPX].strip()
event['compy'] = cells[COMPY].strip()
event['date'] = cells[ACC_DATE].strip()
event['time'] = cells[ACC_TIME].strip()
event['class'] = cells[DMV_ACCD_CLSF].strip()
event['injuries'] = cells[NUM_OF_INJURIES].strip()
event['fatalities'] = cells[NUM_OF_FATALITIES].strip()
event['vehiclecount'] = cells[NUM_OF_VEH].strip()
event['accidenttype'] = cells[ACCD_TYP].strip()
event['collisiontype'] = cells[COLLISION_TYP].strip()
event['trafficcontrol'] = cells[TRAF_CNTL].strip()
event['lightcondition'] = cells[LIGHT_COND].strip()
event['weather'] = cells[WEATHER].strip()
event['roadconditions'] = cells[ROAD_SURF_COND].strip()
event['municipality'] = cells[COMP_MUNI].strip()
#easting = int(event['compx'])
#northing = int(event['compy'])
easting = int(cells[COMPX])
northing = int(cells[COMPY])
(lat,lng) = utm.to_latlon(easting,northing,18,'T')
event['lat'] = lat
event['lng'] = lng
accidents.append(event)
count += 1
if ( count % 100 == 0 ):
print "{0} ...".format(count)
#print "{0}: {1}".format(count,row)
f.close()
#for key in accidents.keys():
# with open('./web/static/accidents-{0}.json'.format(key.replace(' ','')),'w') as outfile:
# outfile.write(json.dumps(accidents[key]))
#
#with open('./web/static/places.json','w') as outfile:
# outfile.write(json.dumps(places))
#
#with open('./web/static/years.json','w') as outfile:
# outfile.write(json.dumps(years))
print 'done.'
return accidents;
if __name__ == '__main__':
readdata('traffic.csv')
| gpl-3.0 | -4,014,027,125,590,093,300 | 27.093458 | 97 | 0.571524 | false |
jbenden/ansible | lib/ansible/modules/cloud/cloudstack/cs_router.py | 18 | 10542 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# (c) 2016, René Moser <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['stableinterface'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: cs_router
short_description: Manages routers on Apache CloudStack based clouds.
description:
- Start, restart, stop and destroy routers.
- C(state=present) is not able to create routers, use M(cs_network) instead.
version_added: "2.2"
author: "René Moser (@resmo)"
options:
name:
description:
- Name of the router.
required: true
service_offering:
description:
- Name or id of the service offering of the router.
required: false
default: null
domain:
description:
- Domain the router is related to.
required: false
default: null
account:
description:
- Account the router is related to.
required: false
default: null
project:
description:
- Name of the project the router is related to.
required: false
default: null
zone:
description:
- Name of the zone the router is deployed in.
- If not set, all zones are used.
required: false
default: null
version_added: "2.4"
state:
description:
- State of the router.
required: false
default: 'present'
choices: [ 'present', 'absent', 'started', 'stopped', 'restarted' ]
extends_documentation_fragment: cloudstack
'''
EXAMPLES = '''
# Ensure the router has the desired service offering, no matter if
# the router is running or not.
- local_action:
module: cs_router
name: r-40-VM
service_offering: System Offering for Software Router
# Ensure started
- local_action:
module: cs_router
name: r-40-VM
state: started
# Ensure started with desired service offering.
# If the service offerings changes, router will be rebooted.
- local_action:
module: cs_router
name: r-40-VM
service_offering: System Offering for Software Router
state: started
# Ensure stopped
- local_action:
module: cs_router
name: r-40-VM
state: stopped
# Remove a router
- local_action:
module: cs_router
name: r-40-VM
state: absent
'''
RETURN = '''
---
id:
description: UUID of the router.
returned: success
type: string
sample: 04589590-ac63-4ffc-93f5-b698b8ac38b6
name:
description: Name of the router.
returned: success
type: string
sample: r-40-VM
created:
description: Date of the router was created.
returned: success
type: string
sample: 2014-12-01T14:57:57+0100
template_version:
description: Version of the system VM template.
returned: success
type: string
sample: 4.5.1
requires_upgrade:
description: Whether the router needs to be upgraded to the new template.
returned: success
type: bool
sample: false
redundant_state:
description: Redundant state of the router.
returned: success
type: string
sample: UNKNOWN
role:
description: Role of the router.
returned: success
type: string
sample: VIRTUAL_ROUTER
zone:
description: Name of zone the router is in.
returned: success
type: string
sample: ch-gva-2
service_offering:
description: Name of the service offering the router has.
returned: success
type: string
sample: System Offering For Software Router
state:
description: State of the router.
returned: success
type: string
sample: Active
domain:
description: Domain the router is related to.
returned: success
type: string
sample: ROOT
account:
description: Account the router is related to.
returned: success
type: string
sample: admin
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.cloudstack import (
AnsibleCloudStack,
cs_argument_spec,
cs_required_together,
)
class AnsibleCloudStackRouter(AnsibleCloudStack):
def __init__(self, module):
super(AnsibleCloudStackRouter, self).__init__(module)
self.returns = {
'serviceofferingname': 'service_offering',
'version': 'template_version',
'requiresupgrade': 'requires_upgrade',
'redundantstate': 'redundant_state',
'role': 'role'
}
self.router = None
def get_service_offering_id(self):
service_offering = self.module.params.get('service_offering')
if not service_offering:
return None
args = {
'issystem': True
}
service_offerings = self.query_api('listServiceOfferings', **args)
if service_offerings:
for s in service_offerings['serviceoffering']:
if service_offering in [s['name'], s['id']]:
return s['id']
self.module.fail_json(msg="Service offering '%s' not found" % service_offering)
def get_router(self):
if not self.router:
router = self.module.params.get('name')
args = {
'projectid': self.get_project(key='id'),
'account': self.get_account(key='name'),
'domainid': self.get_domain(key='id'),
'listall': True
}
if self.module.params.get('zone'):
args['zoneid'] = self.get_zone(key='id')
routers = self.query_api('listRouters', **args)
if routers:
for r in routers['router']:
if router.lower() in [r['name'].lower(), r['id']]:
self.router = r
break
return self.router
def start_router(self):
router = self.get_router()
if not router:
self.module.fail_json(msg="Router not found")
if router['state'].lower() != "running":
self.result['changed'] = True
args = {
'id': router['id'],
}
if not self.module.check_mode:
res = self.query_api('startRouter', **args)
poll_async = self.module.params.get('poll_async')
if poll_async:
router = self.poll_job(res, 'router')
return router
def stop_router(self):
router = self.get_router()
if not router:
self.module.fail_json(msg="Router not found")
if router['state'].lower() != "stopped":
self.result['changed'] = True
args = {
'id': router['id'],
}
if not self.module.check_mode:
res = self.query_api('stopRouter', **args)
poll_async = self.module.params.get('poll_async')
if poll_async:
router = self.poll_job(res, 'router')
return router
def reboot_router(self):
router = self.get_router()
if not router:
self.module.fail_json(msg="Router not found")
self.result['changed'] = True
args = {
'id': router['id'],
}
if not self.module.check_mode:
res = self.query_api('rebootRouter', **args)
poll_async = self.module.params.get('poll_async')
if poll_async:
router = self.poll_job(res, 'router')
return router
def absent_router(self):
router = self.get_router()
if router:
self.result['changed'] = True
args = {
'id': router['id'],
}
if not self.module.check_mode:
res = self.query_api('destroyRouter', **args)
poll_async = self.module.params.get('poll_async')
if poll_async:
self.poll_job(res, 'router')
return router
def present_router(self):
router = self.get_router()
if not router:
self.module.fail_json(msg="Router can not be created using the API, see cs_network.")
args = {
'id': router['id'],
'serviceofferingid': self.get_service_offering_id(),
}
state = self.module.params.get('state')
if self.has_changed(args, router):
self.result['changed'] = True
if not self.module.check_mode:
current_state = router['state'].lower()
self.stop_router()
router = self.query_api('changeServiceForRouter', **args)
if state in ['restarted', 'started']:
router = self.start_router()
# if state=present we get to the state before the service
# offering change.
elif state == "present" and current_state == "running":
router = self.start_router()
elif state == "started":
router = self.start_router()
elif state == "stopped":
router = self.stop_router()
elif state == "restarted":
router = self.reboot_router()
return router
def main():
argument_spec = cs_argument_spec()
argument_spec.update(dict(
name=dict(required=True),
service_offering=dict(),
state=dict(choices=['present', 'started', 'stopped', 'restarted', 'absent'], default="present"),
domain=dict(),
account=dict(),
project=dict(),
zone=dict(),
poll_async=dict(type='bool', default=True),
))
module = AnsibleModule(
argument_spec=argument_spec,
required_together=cs_required_together(),
supports_check_mode=True
)
acs_router = AnsibleCloudStackRouter(module)
state = module.params.get('state')
if state in ['absent']:
router = acs_router.absent_router()
else:
router = acs_router.present_router()
result = acs_router.get_result(router)
module.exit_json(**result)
if __name__ == '__main__':
main()
| gpl-3.0 | 4,121,007,887,277,354,000 | 26.447917 | 104 | 0.594118 | false |
jumpserver/jumpserver | apps/assets/serializers/asset.py | 1 | 7375 | # -*- coding: utf-8 -*-
#
from rest_framework import serializers
from django.db.models import F
from django.core.validators import RegexValidator
from django.utils.translation import ugettext_lazy as _
from orgs.mixins.serializers import BulkOrgResourceModelSerializer
from ..models import Asset, Node, Platform
from .base import ConnectivitySerializer
__all__ = [
'AssetSerializer', 'AssetSimpleSerializer',
'AssetDisplaySerializer',
'ProtocolsField', 'PlatformSerializer',
'AssetDetailSerializer', 'AssetTaskSerializer',
]
class ProtocolField(serializers.RegexField):
protocols = '|'.join(dict(Asset.PROTOCOL_CHOICES).keys())
default_error_messages = {
'invalid': _('Protocol format should {}/{}'.format(protocols, '1-65535'))
}
regex = r'^(%s)/(\d{1,5})$' % protocols
def __init__(self, *args, **kwargs):
super().__init__(self.regex, **kwargs)
def validate_duplicate_protocols(values):
errors = []
names = []
for value in values:
if not value or '/' not in value:
continue
name = value.split('/')[0]
if name in names:
errors.append(_("Protocol duplicate: {}").format(name))
names.append(name)
errors.append('')
if any(errors):
raise serializers.ValidationError(errors)
class ProtocolsField(serializers.ListField):
default_validators = [validate_duplicate_protocols]
def __init__(self, *args, **kwargs):
kwargs['child'] = ProtocolField()
kwargs['allow_null'] = True
kwargs['allow_empty'] = True
kwargs['min_length'] = 1
kwargs['max_length'] = 4
super().__init__(*args, **kwargs)
def to_representation(self, value):
if not value:
return []
return value.split(' ')
class AssetSerializer(BulkOrgResourceModelSerializer):
platform = serializers.SlugRelatedField(
slug_field='name', queryset=Platform.objects.all(), label=_("Platform")
)
protocols = ProtocolsField(label=_('Protocols'), required=False, default=['ssh/22'])
domain_display = serializers.ReadOnlyField(source='domain.name', label=_('Domain name'))
admin_user_display = serializers.ReadOnlyField(source='admin_user.name', label=_('Admin user name'))
nodes_display = serializers.ListField(child=serializers.CharField(), label=_('Nodes name'), required=False)
"""
资产的数据结构
"""
class Meta:
model = Asset
fields_mini = ['id', 'hostname', 'ip']
fields_small = fields_mini + [
'protocol', 'port', 'protocols', 'is_active', 'public_ip',
'number', 'vendor', 'model', 'sn', 'cpu_model', 'cpu_count',
'cpu_cores', 'cpu_vcpus', 'memory', 'disk_total', 'disk_info',
'os', 'os_version', 'os_arch', 'hostname_raw', 'comment',
'created_by', 'date_created', 'hardware_info',
]
fields_fk = [
'admin_user', 'admin_user_display', 'domain', 'domain_display', 'platform'
]
fk_only_fields = {
'platform': ['name']
}
fields_m2m = [
'nodes', 'nodes_display', 'labels',
]
annotates_fields = {
# 'admin_user_display': 'admin_user__name'
}
fields_as = list(annotates_fields.keys())
fields = fields_small + fields_fk + fields_m2m + fields_as
read_only_fields = [
'created_by', 'date_created',
] + fields_as
extra_kwargs = {
'protocol': {'write_only': True},
'port': {'write_only': True},
'hardware_info': {'label': _('Hardware info')},
'org_name': {'label': _('Org name')}
}
@classmethod
def setup_eager_loading(cls, queryset):
""" Perform necessary eager loading of data. """
queryset = queryset.prefetch_related('admin_user', 'domain', 'platform')
queryset = queryset.prefetch_related('nodes', 'labels')
return queryset
def compatible_with_old_protocol(self, validated_data):
protocols_data = validated_data.pop("protocols", [])
# 兼容老的api
name = validated_data.get("protocol")
port = validated_data.get("port")
if not protocols_data and name and port:
protocols_data.insert(0, '/'.join([name, str(port)]))
elif not name and not port and protocols_data:
protocol = protocols_data[0].split('/')
validated_data["protocol"] = protocol[0]
validated_data["port"] = int(protocol[1])
if protocols_data:
validated_data["protocols"] = ' '.join(protocols_data)
def perform_nodes_display_create(self, instance, nodes_display):
if not nodes_display:
return
nodes_to_set = []
for full_value in nodes_display:
node = Node.objects.filter(full_value=full_value).first()
if node:
nodes_to_set.append(node)
else:
node = Node.create_node_by_full_value(full_value)
nodes_to_set.append(node)
instance.nodes.set(nodes_to_set)
def create(self, validated_data):
self.compatible_with_old_protocol(validated_data)
nodes_display = validated_data.pop('nodes_display', '')
instance = super().create(validated_data)
self.perform_nodes_display_create(instance, nodes_display)
return instance
def update(self, instance, validated_data):
nodes_display = validated_data.pop('nodes_display', '')
self.compatible_with_old_protocol(validated_data)
instance = super().update(instance, validated_data)
self.perform_nodes_display_create(instance, nodes_display)
return instance
class AssetDisplaySerializer(AssetSerializer):
connectivity = ConnectivitySerializer(read_only=True, label=_("Connectivity"))
class Meta(AssetSerializer.Meta):
fields = AssetSerializer.Meta.fields + [
'connectivity',
]
class PlatformSerializer(serializers.ModelSerializer):
meta = serializers.DictField(required=False, allow_null=True, label=_('Meta'))
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
# TODO 修复 drf SlugField RegexValidator bug,之后记得删除
validators = self.fields['name'].validators
if isinstance(validators[-1], RegexValidator):
validators.pop()
class Meta:
model = Platform
fields = [
'id', 'name', 'base', 'charset',
'internal', 'meta', 'comment'
]
class AssetDetailSerializer(AssetSerializer):
platform = PlatformSerializer(read_only=True)
class AssetSimpleSerializer(serializers.ModelSerializer):
connectivity = ConnectivitySerializer(read_only=True, label=_("Connectivity"))
class Meta:
model = Asset
fields = ['id', 'hostname', 'ip', 'connectivity', 'port']
class AssetTaskSerializer(serializers.Serializer):
ACTION_CHOICES = (
('refresh', 'refresh'),
('test', 'test'),
)
task = serializers.CharField(read_only=True)
action = serializers.ChoiceField(choices=ACTION_CHOICES, write_only=True)
assets = serializers.PrimaryKeyRelatedField(
queryset=Asset.objects, required=False, allow_empty=True, many=True
)
| gpl-2.0 | 3,699,539,720,247,772,000 | 33.928571 | 111 | 0.611588 | false |
mtbc/openmicroscopy | components/tools/OmeroPy/test/integration/scriptstest/test_coverage.py | 4 | 1692 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Integration test demonstrating various script creation methods
Copyright 2010-2013 Glencoe Software, Inc. All rights reserved.
Use is subject to license terms supplied in LICENSE.txt
"""
import test.integration.library as lib
import pytest
import os, sys
import omero
class TestCoverage(lib.ITest):
def setup_method(self, method):
"""
getScripts returns official scripts, several of which are shipped with OMERO.
"""
lib.ITest.setup_method(self, method)
self.rs = self.root.sf.getScriptService()
self.us = self.client.sf.getScriptService()
assert len(self.rs.getScripts()) > 0
assert len(self.us.getScripts()) > 0
assert len(self.us.getUserScripts([])) == 0 # New user. No scripts
def testGetScriptWithDetails(self):
scriptList = self.us.getScripts()
script = scriptList[0]
scriptMap = self.us.getScriptWithDetails(script.id.val)
assert len(scriptMap) == 1
scriptText = scriptMap.keys()[0]
scriptObj = scriptMap.values()[0]
def testUploadAndScript(self):
scriptID = self.us.uploadScript("/OME/Foo.py", """if True:
import omero
import omero.grid as OG
import omero.rtypes as OR
import omero.scripts as OS
client = OS.client("testUploadScript")
print "done"
""")
return scriptID
def testUserCantUploadOfficalScript(self):
with pytest.raises(omero.SecurityViolation):
self.us.uploadOfficialScript( "/%s/fails.py" % self.uuid(),\
"""if True:
import omero
""")
| gpl-2.0 | 495,687,552,974,198,000 | 28.172414 | 85 | 0.631206 | false |
navrasio/mxnet | example/recommenders/negativesample.py | 19 | 7191 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""DataIter for negative sampling.
"""
import mxnet as mx
import numpy as np
class NegativeSamplingDataIter(mx.io.DataIter):
"""Wraps an existing DataIter to produce a new DataIter with negative samples.
Assumes that all the relevant inputs are in data, not labels.
Drops (replaces) any labels in the original DataIter.
It only shuffles one of the input data columns, specified in the
constructor as shuffle_data_idx. So if the original input data
has three columns, ('item_ids', 'item_words', 'users') and you want
to keep the two "item_*" together, then set `shuffle_data_idx=2`
and `users` will be shuffled for the negative samples.
Output batches will be larger than input batches by a factor
of (1+sample_ratio)
Negative samples are always drawn from the same minibatch.
So they're effectively sampled according to the frequency at
which they appear in the training data. (Other reasonable
samling strategies are not implemented here.)
The shuffling is checked to ensure that a true positive sample
isn't returned as a negative sample.
"""
def __init__(self, source_dataiter, sample_ratio=1, shuffle_data_idx=1,
positive_label=1, negative_label=0):
self._sourcedata = source_dataiter
source_dataiter.reset()
self.positive_label = positive_label # output shapes = input shapes
self.negative_label = negative_label
self.shuffle_data_idx = shuffle_data_idx
if sample_ratio == int(sample_ratio):
self.sample_ratio = int(sample_ratio)
else:
raise ValueError("sample_ratio must be an integer, not %s" % sample_ratio)
self._clear_queue()
self.provide_data = source_dataiter.provide_data
self.provide_label = source_dataiter.provide_label
self.batch_size = source_dataiter.batch_size
def _clear_queue(self):
self._sampled_queue = []
def _push_queue(self, data_list, labels):
"""Takes a list of numpy arrays for data,
and a numpy array for labels.
Converts to minibatches and puts it on the queue.
"""
num_minibatches = 1+self.sample_ratio
total_size = len(labels)
slice_size = total_size / num_minibatches
def slicer(x, s):
idx = range(int(s*slice_size), int((s+1)*slice_size))
return np.take(x,idx,0)
for i in range(1+self.sample_ratio):
nddata = [mx.nd.array(slicer(x,i)) for x in data_list]
ndlabels = mx.nd.array(slicer(labels,i))
batch = mx.io.DataBatch(nddata, [ndlabels], provide_data=self.provide_data,
provide_label=self.provide_label)
self._sampled_queue.append(batch)
def next(self):
if not self._sampled_queue:
self._refill_queue()
batch = self._sampled_queue.pop()
return batch
def reset(self):
self._sourcedata.reset()
self._clear_queue()
def _shuffle_batch(self, data):
# Takes a list of NDArrays. Returns a shuffled version as numpy
a = data[self.shuffle_data_idx].asnumpy()
# Come up with a shuffled index
batch_size = data[0].shape[0]
si = np.arange(batch_size)
np.random.shuffle(si)
matches = (si == np.arange(batch_size)) # everywhere it didn't shuffle
si -= matches # Shifts down by 1 when true, ensuring it differs
# Note shifting down by 1 works in python because -1 is a valid index.
#Q: Is this shifting introducing bias?
# Shuffle the data with the shuffle index
shuf_a = np.take(a,si,0) # like a[si,:] but general for ndarray's
# Return similar datastructure to what we got. Convert all to numpy
out = [d.asnumpy() for d in data]
out[self.shuffle_data_idx] = shuf_a
return out
def _refill_queue(self):
"""Fetch another batch from the source, and shuffle it to make
negative samples.
"""
original = self._sourcedata.next().data # List of NDArrays: one per input
batch_size = original[0].shape[0]
num_inputs = len(original)
#Start with positive examples, copied straight
outdata = [[o.asnumpy()] for o in original] # list of lists of numpy arrays
outlabels = [np.ones(batch_size) * self.positive_label] # list of numpy arrays
# The inner list of both is the set of samples. We'll recombine later.
# Construct negative samples.
for _ in range(self.sample_ratio):
shuffled = self._shuffle_batch(original)
for i,d in enumerate(shuffled):
outdata[i].append(d)
outlabels.append(np.ones(batch_size) * self.negative_label)
def stacker(x):
if len(x[0].shape)==1:
return np.hstack(x)
else:
return np.vstack(x)
outdata = [stacker(x) for x in outdata] # Single tall vectors
outlabels = stacker(outlabels)
# Record-level shuffle so the negatives are mixed in.
def shuffler(x, idx):
return np.take(x,idx,0)
shuf_idx = np.arange(len(outlabels))
np.random.shuffle(shuf_idx)
outdata = [shuffler(o,shuf_idx) for o in outdata]
outlabels = shuffler(outlabels,shuf_idx)
self._push_queue(outdata,outlabels)
if __name__ == "__main__":
print("Simple test of NegativeSamplingDataIter")
np.random.seed(123)
A = np.random.randint(-20,150,size=(100,5))
B = np.random.randint(-2,15,size=(100,2))
R = np.random.randint(1,5,size=(100,))
batch_size=3
oridi = mx.io.NDArrayIter(data={'a':A,'b':B},label=R, batch_size=batch_size)
oribatch = oridi.next()
oridi.reset()
for ratio in range(0,5):
nsdi = NegativeSamplingDataIter(oridi, sample_ratio=ratio)
# Check sizes of output
bat = nsdi.next()
for i in range(len(bat.data)):
assert bat.data[i].shape[0] == batch_size
assert bat.data[i].shape[1] == oribatch.data[i].shape[1]
assert bat.label.shape[0] == batch_size
# Check that we get more minibatches
oridi.reset()
ori_cnt = len(list(oridi))
nsdi.reset()
ns_cnt = len(list(nsdi))
assert ns_cnt == ori_cnt * (1+ratio)
print("Tests done")
| apache-2.0 | -3,277,792,955,521,485,300 | 38.95 | 87 | 0.635099 | false |
ahmetdaglarbas/e-commerce | oscar/apps/address/south_migrations/0011_auto__chg_field_useraddress_search_text.py | 6 | 7043 | # -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
from oscar.core.compat import AUTH_USER_MODEL, AUTH_USER_MODEL_NAME
class Migration(SchemaMigration):
def forwards(self, orm):
# Changing field 'UserAddress.search_text'
db.alter_column(u'address_useraddress', 'search_text', self.gf('django.db.models.fields.TextField')())
def backwards(self, orm):
# Changing field 'UserAddress.search_text'
db.alter_column(u'address_useraddress', 'search_text', self.gf('django.db.models.fields.CharField')(max_length=1000))
models = {
u'address.country': {
'Meta': {'ordering': "('-display_order', 'name')", 'object_name': 'Country'},
'display_order': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0', 'db_index': 'True'}),
'is_shipping_country': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'iso_3166_1_a2': ('django.db.models.fields.CharField', [], {'max_length': '2', 'primary_key': 'True'}),
'iso_3166_1_a3': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '3', 'blank': 'True'}),
'iso_3166_1_numeric': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True', 'db_index': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'printable_name': ('django.db.models.fields.CharField', [], {'max_length': '128'})
},
u'address.useraddress': {
'Meta': {'ordering': "['-num_orders']", 'unique_together': "(('user', 'hash'),)", 'object_name': 'UserAddress'},
'country': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['address.Country']"}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'hash': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_default_for_billing': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_default_for_shipping': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'line1': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'line2': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'line3': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'line4': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'notes': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'num_orders': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'phone_number': ('oscar.models.fields.PhoneNumberField', [], {'max_length': '128', 'blank': 'True'}),
'postcode': ('oscar.models.fields.UppercaseCharField', [], {'max_length': '64', 'blank': 'True'}),
'search_text': ('django.db.models.fields.TextField', [], {}),
'state': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '64', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'addresses'", 'to': u"orm['{0}']".format(AUTH_USER_MODEL)})
},
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
AUTH_USER_MODEL: {
'Meta': {'object_name': AUTH_USER_MODEL_NAME},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
}
}
complete_apps = ['address'] | bsd-3-clause | -6,425,532,738,500,882,000 | 73.93617 | 187 | 0.565242 | false |
eangelou/Uncooked | Virtio-serial-askisi/qemu-1.2.0/tests/qemu-iotests/qcow2.py | 29 | 6778 | #!/usr/bin/env python
import sys
import struct
import string
class QcowHeaderExtension:
def __init__(self, magic, length, data):
self.magic = magic
self.length = length
self.data = data
@classmethod
def create(cls, magic, data):
return QcowHeaderExtension(magic, len(data), data)
class QcowHeader:
uint32_t = 'I'
uint64_t = 'Q'
fields = [
# Version 2 header fields
[ uint32_t, '%#x', 'magic' ],
[ uint32_t, '%d', 'version' ],
[ uint64_t, '%#x', 'backing_file_offset' ],
[ uint32_t, '%#x', 'backing_file_size' ],
[ uint32_t, '%d', 'cluster_bits' ],
[ uint64_t, '%d', 'size' ],
[ uint32_t, '%d', 'crypt_method' ],
[ uint32_t, '%d', 'l1_size' ],
[ uint64_t, '%#x', 'l1_table_offset' ],
[ uint64_t, '%#x', 'refcount_table_offset' ],
[ uint32_t, '%d', 'refcount_table_clusters' ],
[ uint32_t, '%d', 'nb_snapshots' ],
[ uint64_t, '%#x', 'snapshot_offset' ],
# Version 3 header fields
[ uint64_t, '%#x', 'incompatible_features' ],
[ uint64_t, '%#x', 'compatible_features' ],
[ uint64_t, '%#x', 'autoclear_features' ],
[ uint32_t, '%d', 'refcount_order' ],
[ uint32_t, '%d', 'header_length' ],
];
fmt = '>' + ''.join(field[0] for field in fields)
def __init__(self, fd):
buf_size = struct.calcsize(QcowHeader.fmt)
fd.seek(0)
buf = fd.read(buf_size)
header = struct.unpack(QcowHeader.fmt, buf)
self.__dict__ = dict((field[2], header[i])
for i, field in enumerate(QcowHeader.fields))
self.set_defaults()
self.cluster_size = 1 << self.cluster_bits
fd.seek(self.header_length)
self.load_extensions(fd)
if self.backing_file_offset:
fd.seek(self.backing_file_offset)
self.backing_file = fd.read(self.backing_file_size)
else:
self.backing_file = None
def set_defaults(self):
if self.version == 2:
self.incompatible_features = 0
self.compatible_features = 0
self.autoclear_features = 0
self.refcount_order = 4
self.header_length = 72
def load_extensions(self, fd):
self.extensions = []
if self.backing_file_offset != 0:
end = min(self.cluster_size, self.backing_file_offset)
else:
end = self.cluster_size
while fd.tell() < end:
(magic, length) = struct.unpack('>II', fd.read(8))
if magic == 0:
break
else:
padded = (length + 7) & ~7
data = fd.read(padded)
self.extensions.append(QcowHeaderExtension(magic, length, data))
def update_extensions(self, fd):
fd.seek(self.header_length)
extensions = self.extensions
extensions.append(QcowHeaderExtension(0, 0, ""))
for ex in extensions:
buf = struct.pack('>II', ex.magic, ex.length)
fd.write(buf)
fd.write(ex.data)
if self.backing_file != None:
self.backing_file_offset = fd.tell()
fd.write(self.backing_file)
if fd.tell() > self.cluster_size:
raise Exception("I think I just broke the image...")
def update(self, fd):
header_bytes = self.header_length
self.update_extensions(fd)
fd.seek(0)
header = tuple(self.__dict__[f] for t, p, f in QcowHeader.fields)
buf = struct.pack(QcowHeader.fmt, *header)
buf = buf[0:header_bytes-1]
fd.write(buf)
def dump(self):
for f in QcowHeader.fields:
print "%-25s" % f[2], f[1] % self.__dict__[f[2]]
print ""
def dump_extensions(self):
for ex in self.extensions:
data = ex.data[:ex.length]
if all(c in string.printable for c in data):
data = "'%s'" % data
else:
data = "<binary>"
print "Header extension:"
print "%-25s %#x" % ("magic", ex.magic)
print "%-25s %d" % ("length", ex.length)
print "%-25s %s" % ("data", data)
print ""
def cmd_dump_header(fd):
h = QcowHeader(fd)
h.dump()
h.dump_extensions()
def cmd_add_header_ext(fd, magic, data):
try:
magic = int(magic, 0)
except:
print "'%s' is not a valid magic number" % magic
sys.exit(1)
h = QcowHeader(fd)
h.extensions.append(QcowHeaderExtension.create(magic, data))
h.update(fd)
def cmd_del_header_ext(fd, magic):
try:
magic = int(magic, 0)
except:
print "'%s' is not a valid magic number" % magic
sys.exit(1)
h = QcowHeader(fd)
found = False
for ex in h.extensions:
if ex.magic == magic:
found = True
h.extensions.remove(ex)
if not found:
print "No such header extension"
return
h.update(fd)
def cmd_set_feature_bit(fd, group, bit):
try:
bit = int(bit, 0)
if bit < 0 or bit >= 64:
raise ValueError
except:
print "'%s' is not a valid bit number in range [0, 64)" % bit
sys.exit(1)
h = QcowHeader(fd)
if group == 'incompatible':
h.incompatible_features |= 1 << bit
elif group == 'compatible':
h.compatible_features |= 1 << bit
elif group == 'autoclear':
h.autoclear_features |= 1 << bit
else:
print "'%s' is not a valid group, try 'incompatible', 'compatible', or 'autoclear'" % group
sys.exit(1)
h.update(fd)
cmds = [
[ 'dump-header', cmd_dump_header, 0, 'Dump image header and header extensions' ],
[ 'add-header-ext', cmd_add_header_ext, 2, 'Add a header extension' ],
[ 'del-header-ext', cmd_del_header_ext, 1, 'Delete a header extension' ],
[ 'set-feature-bit', cmd_set_feature_bit, 2, 'Set a feature bit'],
]
def main(filename, cmd, args):
fd = open(filename, "r+b")
try:
for name, handler, num_args, desc in cmds:
if name != cmd:
continue
elif len(args) != num_args:
usage()
return
else:
handler(fd, *args)
return
print "Unknown command '%s'" % cmd
finally:
fd.close()
def usage():
print "Usage: %s <file> <cmd> [<arg>, ...]" % sys.argv[0]
print ""
print "Supported commands:"
for name, handler, num_args, desc in cmds:
print " %-20s - %s" % (name, desc)
if len(sys.argv) < 3:
usage()
sys.exit(1)
main(sys.argv[1], sys.argv[2], sys.argv[3:])
| gpl-3.0 | 2,073,954,504,384,629,800 | 27.241667 | 99 | 0.524196 | false |
tedder/ansible | test/units/modules/network/f5/test_bigip_device_group_member.py | 21 | 2906 | # -*- coding: utf-8 -*-
#
# Copyright (c) 2017 F5 Networks Inc.
# GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import json
import pytest
import sys
if sys.version_info < (2, 7):
pytestmark = pytest.mark.skip("F5 Ansible modules require Python >= 2.7")
from ansible.module_utils.basic import AnsibleModule
try:
from library.modules.bigip_device_group_member import Parameters
from library.modules.bigip_device_group_member import ModuleManager
from library.modules.bigip_device_group_member import ArgumentSpec
# In Ansible 2.8, Ansible changed import paths.
from test.units.compat import unittest
from test.units.compat.mock import Mock
from test.units.compat.mock import patch
from test.units.modules.utils import set_module_args
except ImportError:
from ansible.modules.network.f5.bigip_device_group_member import Parameters
from ansible.modules.network.f5.bigip_device_group_member import ModuleManager
from ansible.modules.network.f5.bigip_device_group_member import ArgumentSpec
# Ansible 2.8 imports
from units.compat import unittest
from units.compat.mock import Mock
from units.compat.mock import patch
from units.modules.utils import set_module_args
fixture_path = os.path.join(os.path.dirname(__file__), 'fixtures')
fixture_data = {}
def load_fixture(name):
path = os.path.join(fixture_path, name)
if path in fixture_data:
return fixture_data[path]
with open(path) as f:
data = f.read()
try:
data = json.loads(data)
except Exception:
pass
fixture_data[path] = data
return data
class TestParameters(unittest.TestCase):
def test_module_parameters(self):
args = dict(
name='bigip1',
device_group='dg1'
)
p = Parameters(params=args)
assert p.name == 'bigip1'
assert p.device_group == 'dg1'
class TestManager(unittest.TestCase):
def setUp(self):
self.spec = ArgumentSpec()
def test_create(self, *args):
set_module_args(
dict(
name="bigip1",
device_group="dg1",
state="present",
server='localhost',
user='admin',
password='password'
)
)
module = AnsibleModule(
argument_spec=self.spec.argument_spec,
supports_check_mode=self.spec.supports_check_mode
)
mm = ModuleManager(module=module)
# Override methods to force specific logic in the module to happen
mm.create_on_device = Mock(return_value=True)
mm.exists = Mock(return_value=False)
results = mm.exec_module()
assert results['changed'] is True
| gpl-3.0 | -4,019,801,443,828,458,000 | 26.942308 | 91 | 0.651411 | false |
nke001/attention-lvcsr | libs/Theano/theano/gof/callcache.py | 3 | 1196 | import logging
import six.moves.cPickle as pickle
_logger = logging.getLogger("theano.gof.callcache")
class CallCache(object):
def __init__(self, filename=None):
self.filename = filename
try:
if filename is None:
raise IOError('bad filename') # just goes to except
f = open(filename, 'r')
self.cache = pickle.load(f)
f.close()
except IOError:
self.cache = {}
def persist(self, filename=None):
if filename is None:
filename = self.filename
f = open(filename, 'w')
pickle.dump(self.cache, f)
f.close()
def call(self, fn, args=(), key=None):
if key is None:
key = (fn, tuple(args))
if key not in self.cache:
_logger.debug('cache miss %i', len(self.cache))
self.cache[key] = fn(*args)
else:
_logger.debug('cache hit %i', len(self.cache))
return self.cache[key]
def __del__(self):
try:
if self.filename:
self.persist()
except Exception as e:
_logger.error('persist failed %s %s', self.filename, e)
| mit | 8,521,262,109,235,789,000 | 28.170732 | 68 | 0.530936 | false |
stanmoore1/lammps | tools/python/pizza/log.py | 11 | 9803 | # Pizza.py toolkit, www.cs.sandia.gov/~sjplimp/pizza.html
# Steve Plimpton, [email protected], Sandia National Laboratories
#
# Copyright (2005) Sandia Corporation. Under the terms of Contract
# DE-AC04-94AL85000 with Sandia Corporation, the U.S. Government retains
# certain rights in this software. This software is distributed under
# the GNU General Public License.
# log tool
oneline = "Read LAMMPS log files and extract thermodynamic data"
docstr = """
l = log("file1") read in one or more log files
l = log("log1 log2.gz") can be gzipped
l = log("file*") wildcard expands to multiple files
l = log("log.lammps",0) two args = store filename, but don't read
incomplete and duplicate thermo entries are deleted
time = l.next() read new thermo info from file
used with 2-argument constructor to allow reading thermo incrementally
return time stamp of last thermo read
return -1 if no new thermo since last read
nvec = l.nvec # of vectors of thermo info
nlen = l.nlen length of each vectors
names = l.names list of vector names
t,pe,... = l.get("Time","KE",...) return one or more vectors of values
l.write("file.txt") write all vectors to a file
l.write("file.txt","Time","PE",...) write listed vectors to a file
get and write allow abbreviated (uniquely) vector names
"""
# History
# 8/05, Steve Plimpton (SNL): original version
# ToDo list
# Variables
# nvec = # of vectors
# nlen = length of each vector
# names = list of vector names
# ptr = dictionary, key = name, value = index into data for which column
# data[i][j] = 2d array of floats, i = 0 to # of entries, j = 0 to nvecs-1
# style = style of LAMMPS log file, 1 = multi, 2 = one, 3 = gran
# firststr = string that begins a thermo section in log file
# increment = 1 if log file being read incrementally
# eof = ptr into incremental file for where to start next read
# Imports and external programs
import sys, re, glob
from os import popen
try: tmp = PIZZA_GUNZIP
except: PIZZA_GUNZIP = "gunzip"
# Class definition
class log:
# --------------------------------------------------------------------
def __init__(self,*list):
self.nvec = 0
self.names = []
self.ptr = {}
self.data = []
# flist = list of all log file names
words = list[0].split()
self.flist = []
for word in words: self.flist += glob.glob(word)
if len(self.flist) == 0 and len(list) == 1:
raise StandardError,"no log file specified"
if len(list) == 1:
self.increment = 0
self.read_all()
else:
if len(self.flist) > 1:
raise StandardError,"can only incrementally read one log file"
self.increment = 1
self.eof = 0
# --------------------------------------------------------------------
# read all thermo from all files
def read_all(self):
self.read_header(self.flist[0])
if self.nvec == 0: raise StandardError,"log file has no values"
# read all files
for file in self.flist: self.read_one(file)
print
# sort entries by timestep, cull duplicates
self.data.sort(self.compare)
self.cull()
self.nlen = len(self.data)
print "read %d log entries" % self.nlen
# --------------------------------------------------------------------
def next(self):
if not self.increment: raise StandardError,"cannot read incrementally"
if self.nvec == 0:
try: open(self.flist[0],'r')
except: return -1
self.read_header(self.flist[0])
if self.nvec == 0: return -1
self.eof = self.read_one(self.flist[0],self.eof)
return int(self.data[-1][0])
# --------------------------------------------------------------------
def get(self,*keys):
if len(keys) == 0:
raise StandardError, "no log vectors specified"
map = []
for key in keys:
if self.ptr.has_key(key):
map.append(self.ptr[key])
else:
count = 0
for i in range(self.nvec):
if self.names[i].find(key) == 0:
count += 1
index = i
if count == 1:
map.append(index)
else:
raise StandardError, "unique log vector %s not found" % key
vecs = []
for i in range(len(keys)):
vecs.append(self.nlen * [0])
for j in xrange(self.nlen):
vecs[i][j] = self.data[j][map[i]]
if len(keys) == 1: return vecs[0]
else: return vecs
# --------------------------------------------------------------------
def write(self,filename,*keys):
if len(keys):
map = []
for key in keys:
if self.ptr.has_key(key):
map.append(self.ptr[key])
else:
count = 0
for i in range(self.nvec):
if self.names[i].find(key) == 0:
count += 1
index = i
if count == 1:
map.append(index)
else:
raise StandardError, "unique log vector %s not found" % key
else:
map = range(self.nvec)
f = open(filename,"w")
for i in xrange(self.nlen):
for j in xrange(len(map)):
print >>f,self.data[i][map[j]],
print >>f
f.close()
# --------------------------------------------------------------------
def compare(self,a,b):
if a[0] < b[0]:
return -1
elif a[0] > b[0]:
return 1
else:
return 0
# --------------------------------------------------------------------
def cull(self):
i = 1
while i < len(self.data):
if self.data[i][0] == self.data[i-1][0]: del self.data[i]
else: i += 1
# --------------------------------------------------------------------
def read_header(self,file):
str_multi = "----- Step"
str_one = "Step "
if file[-3:] == ".gz":
txt = popen("%s -c %s" % (PIZZA_GUNZIP,file),'r').read()
else:
txt = open(file).read()
if txt.find(str_multi) >= 0:
self.firststr = str_multi
self.style = 1
elif txt.find(str_one) >= 0:
self.firststr = str_one
self.style = 2
else:
return
if self.style == 1:
s1 = txt.find(self.firststr)
s2 = txt.find("\n--",s1)
if (s2 == -1):
s2 = txt.find("\nLoop time of",s1)
pattern = "\s(\S*)\s*="
keywords = re.findall(pattern,txt[s1:s2])
keywords.insert(0,"Step")
i = 0
for keyword in keywords:
self.names.append(keyword)
self.ptr[keyword] = i
i += 1
else:
s1 = txt.find(self.firststr)
s2 = txt.find("\n",s1)
line = txt[s1:s2]
words = line.split()
for i in range(len(words)):
self.names.append(words[i])
self.ptr[words[i]] = i
self.nvec = len(self.names)
# --------------------------------------------------------------------
def read_one(self,*list):
# if 2nd arg exists set file ptr to that value
# read entire (rest of) file into txt
file = list[0]
if file[-3:] == ".gz":
f = popen("%s -c %s" % (PIZZA_GUNZIP,file),'rb')
else:
f = open(file,'rb')
if len(list) == 2: f.seek(list[1])
txt = f.read()
if file[-3:] == ".gz": eof = 0
else: eof = f.tell()
f.close()
start = last = 0
while not last:
# chunk = contiguous set of thermo entries (line or multi-line)
# s1 = 1st char on 1st line of chunk
# s2 = 1st char on line after chunk
# set last = 1 if this is last chunk in file, leave 0 otherwise
# set start = position in file to start looking for next chunk
# rewind eof if final entry is incomplete
s1 = txt.find(self.firststr,start)
s2 = txt.find("Loop time of",start+1)
if s1 >= 0 and s2 >= 0 and s1 < s2: # found s1,s2 with s1 before s2
if self.style == 2:
s1 = txt.find("\n",s1) + 1
elif s1 >= 0 and s2 >= 0 and s2 < s1: # found s1,s2 with s2 before s1
s1 = 0
elif s1 == -1 and s2 >= 0: # found s2, but no s1
last = 1
s1 = 0
elif s1 >= 0 and s2 == -1: # found s1, but no s2
last = 1
if self.style == 1:
s2 = txt.rfind("\n--",s1) + 1
else:
s1 = txt.find("\n",s1) + 1
s2 = txt.rfind("\n",s1) + 1
eof -= len(txt) - s2
elif s1 == -1 and s2 == -1: # found neither
# could be end-of-file section
# or entire read was one chunk
if txt.find("Loop time of",start) == start: # end of file, so exit
eof -= len(txt) - start # reset eof to "Loop"
break
last = 1 # entire read is a chunk
s1 = 0
if self.style == 1:
s2 = txt.rfind("\n--",s1) + 1
else:
s2 = txt.rfind("\n",s1) + 1
eof -= len(txt) - s2
if s1 == s2: break
chunk = txt[s1:s2-1]
start = s2
# split chunk into entries
# parse each entry for numeric fields, append to data
if self.style == 1:
sections = chunk.split("\n--")
pat1 = re.compile("Step\s*(\S*)\s")
pat2 = re.compile("=\s*(\S*)")
for section in sections:
word1 = [re.search(pat1,section).group(1)]
word2 = re.findall(pat2,section)
words = word1 + word2
self.data.append(map(float,words))
else:
lines = chunk.split("\n")
for line in lines:
words = line.split()
self.data.append(map(float,words))
# print last timestep of chunk
print int(self.data[len(self.data)-1][0]),
sys.stdout.flush()
return eof
| gpl-2.0 | 8,573,096,235,446,023,000 | 28.175595 | 78 | 0.511272 | false |
zauonlok/luatable | luatable/parser.py | 1 | 17063 | """
luatable.parser
~~~~~~~~~~~~~~~
Implements a recursive descent Lua table parser (decoder)
"""
class Parser:
# end of source indicator
_NOMORE = ''
def __init__(self, source):
assert isinstance(source, str)
self._source = source
self._index = 0
self._current = source[0] if len(source) > 0 else self._NOMORE
def _peak_next(self):
"""
return the next character, leave the index unchanged
"""
if self._index + 1 < len(self._source):
return self._source[self._index + 1]
else:
return self._NOMORE
def _take_next(self):
"""
return the next character, advance the index
"""
if self._index + 1 < len(self._source):
self._index += 1
self._current = self._source[self._index]
else:
self._index = len(self._source)
self._current = self._NOMORE
return self._current
def _reset_index(self, index):
"""
reset the index to an old one
"""
assert index < len(self._source)
self._index = index
self._current = self._source[index]
def _in_sequence(self, char, sequence):
"""
check whether a character is in a sequence
"""
if char != self._NOMORE and char in sequence:
return True
else:
return False
def _skip_comment(self):
"""
skip a short/long comment
"""
assert self._comment_coming()
self._take_next() # for the 1st '-'
self._take_next() # for the 2nd '-'
if self._current == '[':
level = self._parse_long_bracket(reset_if_fail=True,
reset_if_succeed=True)
if level >= 0: # long comment
try:
self._parse_long_string()
except SyntaxError:
raise SyntaxError('bad long comment')
return
# short comment
while self._current != self._NOMORE:
if self._in_sequence(self._current, '\n\r'):
self._skip_newline()
break
else:
self._take_next()
def _skip_spaces(self):
"""
skip whitespaces and comments
"""
while True:
converged = True
while self._current.isspace():
converged = False
self._take_next()
if self._comment_coming():
converged = False
self._skip_comment()
if converged:
break
def _skip_newline(self):
"""
skip newline sequence (\\n, \\r, \\n\\r, or \\r\\n)
"""
assert self._in_sequence(self._current, '\n\r')
old = self._current
self._take_next() # \n or \r
if self._in_sequence(self._current, '\n\r') and self._current != old:
self._take_next() # \n\r or \r\n
def _number_coming(self):
"""
check whether a number is coming
"""
return self._current.isdigit() or (self._current == '.' and
self._peak_next().isdigit())
def _string_coming(self):
"""
check whether a short string is coming
"""
return self._in_sequence(self._current, ['"', "'"])
def _long_string_coming(self):
"""
check whether a long string is coming
"""
return (self._current == '[' and
self._in_sequence(self._peak_next(), '=['))
def _word_coming(self):
"""
check whether a word is coming
"""
return self._current.isalpha() or self._current == '_'
def _table_coming(self):
"""
check whether a table is coming
"""
return self._current == '{'
def _comment_coming(self):
"""
check whether a comment is coming
"""
return self._current == '-' and self._peak_next() == '-'
def _parse_number(self):
"""
parse a string to a number
"""
assert self._number_coming()
if self._current == '0' and self._in_sequence(self._peak_next(), 'xX'):
base = 16
e_symbols = 'pP'
e_base = 2
self._take_next() # '0'
self._take_next() # 'x' or 'X'
else:
base = 10
e_symbols = 'eE'
e_base = 10
# integer part
i_value, i_count = self._parse_digits(base)
# fraction part
f_value, f_count = 0, 0
if self._current == '.':
self._take_next()
f_value, f_count = self._parse_digits(base)
f_value = f_value / float(base ** f_count)
# exponent part
e_value, e_count = 0, 0
if self._in_sequence(self._current, e_symbols):
self._take_next()
e_sign = +1
if self._in_sequence(self._current, '-+'):
e_sign = -1 if self._current == '-' else +1
self._take_next()
e_value, e_count = self._parse_digits(base)
e_value *= e_sign
if e_count == 0:
raise SyntaxError('bad number: empty exponent part')
if i_count == 0 and f_count == 0:
raise SyntaxError('bad number: empty integer and fraction part')
return (i_value + f_value) * (e_base ** e_value)
def _parse_digits(self, base, limit=None):
"""
parse a (maybe empty) sequence of digits to an integer
"""
assert base in (10, 16)
valid_digits = '0123456789abcdefABCDEF' if base == 16 else '0123456789'
value, count = 0, 0
while self._in_sequence(self._current, valid_digits):
count += 1
digit = int(self._current, base=base)
value = value * base + digit
self._take_next()
if limit is not None and count >= limit:
break
return value, count
def _parse_string(self):
"""
parse a literal short string
"""
assert self._string_coming()
delimiter = self._current
self._take_next()
string = ''
while self._current != self._NOMORE:
if self._current == delimiter:
self._take_next()
return string
elif self._current == '\\':
self._take_next()
string += self._parse_escapee()
elif self._in_sequence(self._current, '\n\r'):
break
else:
string += self._current
self._take_next()
raise SyntaxError('bad string: unfinished string')
_ESCAPEES = {'a': '\a', 'b': '\b', 'f': '\f', 'n': '\n', 'r': '\r',
't': '\t', 'v': '\v', '"': '"', "'": "'", '\\': '\\'}
def _parse_escapee(self):
"""
parse an escape sequence
"""
if self._current in self._ESCAPEES: # abfnrtv\"'
char = self._ESCAPEES[self._current]
self._take_next()
elif self._in_sequence(self._current, '\n\r'): # real newline
char = '\n'
self._skip_newline()
elif self._current == 'z': # zap following spaces
char = ''
self._take_next()
self._skip_spaces()
elif self._current.isdigit(): # \ddd, up to 3 dec
d_value, d_count = self._parse_digits(10, 3)
if d_value > 255:
raise SyntaxError('bad string: esc: decimal value exceeds 255')
char = chr(d_value)
elif self._current == 'x': # \xXX, exactly 2 hex
self._take_next()
x_value, x_count = self._parse_digits(16, 2)
if x_count != 2:
raise SyntaxError('bad string: esc: need exactly 2 hex digits')
char = chr(x_value)
else: # whatever
raise SyntaxError('bad string: esc: invalid escape sequence')
return char
def _parse_long_string(self):
"""
parse a literal long string
"""
assert self._long_string_coming()
level = self._parse_long_bracket()
if level < 0:
raise SyntaxError('bad long string: invalid long string delimiter')
if self._in_sequence(self._current, '\n\r'): # starts with a newline
self._skip_newline()
string = ''
while self._current != self._NOMORE:
if self._current == ']':
close_level = self._parse_long_bracket(level,
reset_if_fail=True)
if close_level < 0:
string += ']'
self._take_next()
else:
return string
elif self._in_sequence(self._current, '\n\r'): # real newline
string += '\n'
self._skip_newline()
else:
string += self._current
self._take_next()
raise SyntaxError('bad long string: unfinished long string')
def _parse_long_bracket(self, expected_level=None,
reset_if_fail=False, reset_if_succeed=False):
"""
try to find the level of a long bracket, return negative level if fail
"""
assert self._in_sequence(self._current, '[]')
delimiter = self._current
old_index = self._index
level = 0
self._take_next()
while self._current == '=':
level += 1
self._take_next()
level_not_matched = (expected_level is not None and
level != expected_level)
delimiter_not_matched = self._current != delimiter
if level_not_matched or delimiter_not_matched:
if reset_if_fail:
self._reset_index(old_index)
return -1
else:
self._take_next()
if reset_if_succeed:
self._reset_index(old_index)
return level
_KWORDS = {
'and', 'break', 'do', 'else', 'elseif', 'end',
'false', 'for', 'function', 'goto', 'if', 'in',
'local', 'nil', 'not', 'or', 'repeat', 'return',
'then', 'true', 'until', 'while'
}
def _parse_word(self, allow_bool=False, allow_nil=False):
"""
parse a word (nil, true, false, or identifier)
"""
assert self._word_coming()
word = self._current
self._take_next()
while self._current.isalnum() or self._current == '_':
word += self._current
self._take_next()
not_allowed = self._KWORDS.copy()
if allow_bool:
not_allowed -= {'true', 'false'}
if allow_nil:
not_allowed -= {'nil'}
if word in not_allowed:
raise SyntaxError("bad word: '%s' not allowed here" % word)
if word in {'true', 'false'}:
return True if word == 'true' else False
elif word == 'nil':
return None
else:
return word
def _equal_behind_word(self):
"""
check whether there is a '=' behind the current word
"""
assert self._word_coming()
old_index = self._index
word = self._parse_word(allow_bool=True, allow_nil=True)
self._skip_spaces()
equal_behind = True if self._current == '=' else False
self._reset_index(old_index)
return equal_behind
def _parse_table(self):
"""
parse a table to a dict or a list
"""
assert self._table_coming()
self._take_next() # for '{'
table = {}
count = {'rec': 0, 'lst': 0} # number of record and list elements
self._skip_spaces()
while self._current != self._NOMORE:
if self._current == '}':
self._take_next()
return self._finalize_table(table, count)
else:
self._parse_field(table, count)
self._skip_spaces()
if self._current == '}':
continue # will finish in the next loop
elif self._in_sequence(self._current, ',;'):
self._take_next()
else:
raise SyntaxError("bad table: unexpected '%s'" %
self._current)
self._skip_spaces()
raise SyntaxError("bad table: expect '}'")
def _parse_field(self, table, count):
"""
parse a record-style field or a list-style field
recfield ::= [ exp ] = exp | Name = exp
lstfield ::= exp
"""
record_style1 = self._current == '[' and not self._long_string_coming()
record_style2 = self._word_coming() and self._equal_behind_word()
is_record_field = record_style1 or record_style2
if is_record_field:
key, value = self._parse_record_field()
# only support number or string as key
if not isinstance(key, (int, float, str)):
raise TypeError("bad table: unsupported key type '%s'" %
type(key))
if value is not None: # only insert not nil value
table[key] = value
count['rec'] += 1
else:
# nil may need further processing if the current table is a dict
value = self._parse_expression()
count['lst'] += 1
table[count['lst']] = value
def _parse_record_field(self):
"""
parse a record field
recfield ::= [ exp ] = exp | Name = exp
"""
if self._current == '[':
self._take_next()
self._skip_spaces()
key = self._parse_expression()
self._skip_spaces()
if self._current != ']':
raise SyntaxError("bad table: record filed expect ']'")
self._take_next()
else:
key = self._parse_word(allow_bool=False, allow_nil=False)
self._skip_spaces()
if self._current != '=':
raise SyntaxError("bad table: record filed expect '='")
self._take_next()
self._skip_spaces()
value = self._parse_expression()
return key, value
def _finalize_table(self, table, count):
"""
convert dict to list if no record field occurred
"""
if count['rec'] > 0: # a dict, filter out nil values
result = {}
for key, value in table.items():
if value is not None:
result[key] = value
return result
else: # list fields only, convert to a list
result = []
for i in range(count['lst']):
result.append(table[i + 1])
return result
def _parse_expression(self):
"""
parse an expression (nil, boolean, number, string, or table)
"""
assert not self._comment_coming()
if self._word_coming(): # [_a-zA-Z]
word = self._parse_word(allow_bool=True, allow_nil=True)
if word not in {None, True, False}:
raise SyntaxError("bad expression: unexpected word '%s'" % word)
return word
elif self._current == '-': # -, not comment, as asserted
self._take_next()
self._skip_spaces()
if self._number_coming(): # negative number
return -1 * self._parse_number()
else:
raise SyntaxError("bad expression: unexpected '-'")
elif self._number_coming(): # [0-9] or .[0-9]
return self._parse_number()
elif self._string_coming(): # ' or "
return self._parse_string()
elif self._long_string_coming(): # [= or [[
return self._parse_long_string()
elif self._table_coming(): # {
return self._parse_table()
else:
raise SyntaxError("bad expression: unexpected '%s'" % self._current)
def parse(self):
"""
parse a given Lua representation to a Python object
"""
self._skip_spaces()
value = self._parse_expression()
self._skip_spaces()
if self._current != self._NOMORE:
raise SyntaxError("unexpected '%s'" % self._current)
return value
def fromlua(src):
"""
return a reconstituted object from the given Lua representation
"""
if not isinstance(src, str):
raise TypeError('require a string to parse')
parser = Parser(src)
return parser.parse()
| mit | -2,336,642,265,026,753,500 | 32.522593 | 80 | 0.483268 | false |
joshloyal/scikit-learn | sklearn/linear_model/stochastic_gradient.py | 16 | 50617 | # Authors: Peter Prettenhofer <[email protected]> (main author)
# Mathieu Blondel (partial_fit support)
#
# License: BSD 3 clause
"""Classification and regression using Stochastic Gradient Descent (SGD)."""
import numpy as np
from abc import ABCMeta, abstractmethod
from ..externals.joblib import Parallel, delayed
from .base import LinearClassifierMixin, SparseCoefMixin
from .base import make_dataset
from ..base import BaseEstimator, RegressorMixin
from ..utils import check_array, check_random_state, check_X_y
from ..utils.extmath import safe_sparse_dot
from ..utils.multiclass import _check_partial_fit_first_call
from ..utils.validation import check_is_fitted
from ..externals import six
from .sgd_fast import plain_sgd, average_sgd
from ..utils.fixes import astype
from ..utils import compute_class_weight
from ..utils import deprecated
from .sgd_fast import Hinge
from .sgd_fast import SquaredHinge
from .sgd_fast import Log
from .sgd_fast import ModifiedHuber
from .sgd_fast import SquaredLoss
from .sgd_fast import Huber
from .sgd_fast import EpsilonInsensitive
from .sgd_fast import SquaredEpsilonInsensitive
LEARNING_RATE_TYPES = {"constant": 1, "optimal": 2, "invscaling": 3,
"pa1": 4, "pa2": 5}
PENALTY_TYPES = {"none": 0, "l2": 2, "l1": 1, "elasticnet": 3}
DEFAULT_EPSILON = 0.1
# Default value of ``epsilon`` parameter.
class BaseSGD(six.with_metaclass(ABCMeta, BaseEstimator, SparseCoefMixin)):
"""Base class for SGD classification and regression."""
def __init__(self, loss, penalty='l2', alpha=0.0001, C=1.0,
l1_ratio=0.15, fit_intercept=True, n_iter=5, shuffle=True,
verbose=0, epsilon=0.1, random_state=None,
learning_rate="optimal", eta0=0.0, power_t=0.5,
warm_start=False, average=False):
self.loss = loss
self.penalty = penalty
self.learning_rate = learning_rate
self.epsilon = epsilon
self.alpha = alpha
self.C = C
self.l1_ratio = l1_ratio
self.fit_intercept = fit_intercept
self.n_iter = n_iter
self.shuffle = shuffle
self.random_state = random_state
self.verbose = verbose
self.eta0 = eta0
self.power_t = power_t
self.warm_start = warm_start
self.average = average
self._validate_params()
def set_params(self, *args, **kwargs):
super(BaseSGD, self).set_params(*args, **kwargs)
self._validate_params()
return self
@abstractmethod
def fit(self, X, y):
"""Fit model."""
def _validate_params(self):
"""Validate input params. """
if not isinstance(self.shuffle, bool):
raise ValueError("shuffle must be either True or False")
if self.n_iter <= 0:
raise ValueError("n_iter must be > zero")
if not (0.0 <= self.l1_ratio <= 1.0):
raise ValueError("l1_ratio must be in [0, 1]")
if self.alpha < 0.0:
raise ValueError("alpha must be >= 0")
if self.learning_rate in ("constant", "invscaling"):
if self.eta0 <= 0.0:
raise ValueError("eta0 must be > 0")
if self.learning_rate == "optimal" and self.alpha == 0:
raise ValueError("alpha must be > 0 since "
"learning_rate is 'optimal'. alpha is used "
"to compute the optimal learning rate.")
# raises ValueError if not registered
self._get_penalty_type(self.penalty)
self._get_learning_rate_type(self.learning_rate)
if self.loss not in self.loss_functions:
raise ValueError("The loss %s is not supported. " % self.loss)
def _get_loss_function(self, loss):
"""Get concrete ``LossFunction`` object for str ``loss``. """
try:
loss_ = self.loss_functions[loss]
loss_class, args = loss_[0], loss_[1:]
if loss in ('huber', 'epsilon_insensitive',
'squared_epsilon_insensitive'):
args = (self.epsilon, )
return loss_class(*args)
except KeyError:
raise ValueError("The loss %s is not supported. " % loss)
def _get_learning_rate_type(self, learning_rate):
try:
return LEARNING_RATE_TYPES[learning_rate]
except KeyError:
raise ValueError("learning rate %s "
"is not supported. " % learning_rate)
def _get_penalty_type(self, penalty):
penalty = str(penalty).lower()
try:
return PENALTY_TYPES[penalty]
except KeyError:
raise ValueError("Penalty %s is not supported. " % penalty)
def _validate_sample_weight(self, sample_weight, n_samples):
"""Set the sample weight array."""
if sample_weight is None:
# uniform sample weights
sample_weight = np.ones(n_samples, dtype=np.float64, order='C')
else:
# user-provided array
sample_weight = np.asarray(sample_weight, dtype=np.float64,
order="C")
if sample_weight.shape[0] != n_samples:
raise ValueError("Shapes of X and sample_weight do not match.")
return sample_weight
def _allocate_parameter_mem(self, n_classes, n_features, coef_init=None,
intercept_init=None):
"""Allocate mem for parameters; initialize if provided."""
if n_classes > 2:
# allocate coef_ for multi-class
if coef_init is not None:
coef_init = np.asarray(coef_init, order="C")
if coef_init.shape != (n_classes, n_features):
raise ValueError("Provided ``coef_`` does not match "
"dataset. ")
self.coef_ = coef_init
else:
self.coef_ = np.zeros((n_classes, n_features),
dtype=np.float64, order="C")
# allocate intercept_ for multi-class
if intercept_init is not None:
intercept_init = np.asarray(intercept_init, order="C")
if intercept_init.shape != (n_classes, ):
raise ValueError("Provided intercept_init "
"does not match dataset.")
self.intercept_ = intercept_init
else:
self.intercept_ = np.zeros(n_classes, dtype=np.float64,
order="C")
else:
# allocate coef_ for binary problem
if coef_init is not None:
coef_init = np.asarray(coef_init, dtype=np.float64,
order="C")
coef_init = coef_init.ravel()
if coef_init.shape != (n_features,):
raise ValueError("Provided coef_init does not "
"match dataset.")
self.coef_ = coef_init
else:
self.coef_ = np.zeros(n_features,
dtype=np.float64,
order="C")
# allocate intercept_ for binary problem
if intercept_init is not None:
intercept_init = np.asarray(intercept_init, dtype=np.float64)
if intercept_init.shape != (1,) and intercept_init.shape != ():
raise ValueError("Provided intercept_init "
"does not match dataset.")
self.intercept_ = intercept_init.reshape(1,)
else:
self.intercept_ = np.zeros(1, dtype=np.float64, order="C")
# initialize average parameters
if self.average > 0:
self.standard_coef_ = self.coef_
self.standard_intercept_ = self.intercept_
self.average_coef_ = np.zeros(self.coef_.shape,
dtype=np.float64,
order="C")
self.average_intercept_ = np.zeros(self.standard_intercept_.shape,
dtype=np.float64,
order="C")
def _prepare_fit_binary(est, y, i):
"""Initialization for fit_binary.
Returns y, coef, intercept.
"""
y_i = np.ones(y.shape, dtype=np.float64, order="C")
y_i[y != est.classes_[i]] = -1.0
average_intercept = 0
average_coef = None
if len(est.classes_) == 2:
if not est.average:
coef = est.coef_.ravel()
intercept = est.intercept_[0]
else:
coef = est.standard_coef_.ravel()
intercept = est.standard_intercept_[0]
average_coef = est.average_coef_.ravel()
average_intercept = est.average_intercept_[0]
else:
if not est.average:
coef = est.coef_[i]
intercept = est.intercept_[i]
else:
coef = est.standard_coef_[i]
intercept = est.standard_intercept_[i]
average_coef = est.average_coef_[i]
average_intercept = est.average_intercept_[i]
return y_i, coef, intercept, average_coef, average_intercept
def fit_binary(est, i, X, y, alpha, C, learning_rate, n_iter,
pos_weight, neg_weight, sample_weight):
"""Fit a single binary classifier.
The i'th class is considered the "positive" class.
"""
# if average is not true, average_coef, and average_intercept will be
# unused
y_i, coef, intercept, average_coef, average_intercept = \
_prepare_fit_binary(est, y, i)
assert y_i.shape[0] == y.shape[0] == sample_weight.shape[0]
dataset, intercept_decay = make_dataset(X, y_i, sample_weight)
penalty_type = est._get_penalty_type(est.penalty)
learning_rate_type = est._get_learning_rate_type(learning_rate)
# XXX should have random_state_!
random_state = check_random_state(est.random_state)
# numpy mtrand expects a C long which is a signed 32 bit integer under
# Windows
seed = random_state.randint(0, np.iinfo(np.int32).max)
if not est.average:
return plain_sgd(coef, intercept, est.loss_function_,
penalty_type, alpha, C, est.l1_ratio,
dataset, n_iter, int(est.fit_intercept),
int(est.verbose), int(est.shuffle), seed,
pos_weight, neg_weight,
learning_rate_type, est.eta0,
est.power_t, est.t_, intercept_decay)
else:
standard_coef, standard_intercept, average_coef, \
average_intercept = average_sgd(coef, intercept, average_coef,
average_intercept,
est.loss_function_, penalty_type,
alpha, C, est.l1_ratio, dataset,
n_iter, int(est.fit_intercept),
int(est.verbose), int(est.shuffle),
seed, pos_weight, neg_weight,
learning_rate_type, est.eta0,
est.power_t, est.t_,
intercept_decay,
est.average)
if len(est.classes_) == 2:
est.average_intercept_[0] = average_intercept
else:
est.average_intercept_[i] = average_intercept
return standard_coef, standard_intercept
class BaseSGDClassifier(six.with_metaclass(ABCMeta, BaseSGD,
LinearClassifierMixin)):
loss_functions = {
"hinge": (Hinge, 1.0),
"squared_hinge": (SquaredHinge, 1.0),
"perceptron": (Hinge, 0.0),
"log": (Log, ),
"modified_huber": (ModifiedHuber, ),
"squared_loss": (SquaredLoss, ),
"huber": (Huber, DEFAULT_EPSILON),
"epsilon_insensitive": (EpsilonInsensitive, DEFAULT_EPSILON),
"squared_epsilon_insensitive": (SquaredEpsilonInsensitive,
DEFAULT_EPSILON),
}
@abstractmethod
def __init__(self, loss="hinge", penalty='l2', alpha=0.0001, l1_ratio=0.15,
fit_intercept=True, n_iter=5, shuffle=True, verbose=0,
epsilon=DEFAULT_EPSILON, n_jobs=1, random_state=None,
learning_rate="optimal", eta0=0.0, power_t=0.5,
class_weight=None, warm_start=False, average=False):
super(BaseSGDClassifier, self).__init__(loss=loss, penalty=penalty,
alpha=alpha, l1_ratio=l1_ratio,
fit_intercept=fit_intercept,
n_iter=n_iter, shuffle=shuffle,
verbose=verbose,
epsilon=epsilon,
random_state=random_state,
learning_rate=learning_rate,
eta0=eta0, power_t=power_t,
warm_start=warm_start,
average=average)
self.class_weight = class_weight
self.n_jobs = int(n_jobs)
@property
@deprecated("Attribute loss_function was deprecated in version 0.19 and "
"will be removed in 0.21. Use 'loss_function_' instead")
def loss_function(self):
return self.loss_function_
def _partial_fit(self, X, y, alpha, C,
loss, learning_rate, n_iter,
classes, sample_weight,
coef_init, intercept_init):
X, y = check_X_y(X, y, 'csr', dtype=np.float64, order="C")
n_samples, n_features = X.shape
self._validate_params()
_check_partial_fit_first_call(self, classes)
n_classes = self.classes_.shape[0]
# Allocate datastructures from input arguments
self._expanded_class_weight = compute_class_weight(self.class_weight,
self.classes_, y)
sample_weight = self._validate_sample_weight(sample_weight, n_samples)
if getattr(self, "coef_", None) is None or coef_init is not None:
self._allocate_parameter_mem(n_classes, n_features,
coef_init, intercept_init)
elif n_features != self.coef_.shape[-1]:
raise ValueError("Number of features %d does not match previous "
"data %d." % (n_features, self.coef_.shape[-1]))
self.loss_function_ = self._get_loss_function(loss)
if not hasattr(self, "t_"):
self.t_ = 1.0
# delegate to concrete training procedure
if n_classes > 2:
self._fit_multiclass(X, y, alpha=alpha, C=C,
learning_rate=learning_rate,
sample_weight=sample_weight, n_iter=n_iter)
elif n_classes == 2:
self._fit_binary(X, y, alpha=alpha, C=C,
learning_rate=learning_rate,
sample_weight=sample_weight, n_iter=n_iter)
else:
raise ValueError("The number of class labels must be "
"greater than one.")
return self
def _fit(self, X, y, alpha, C, loss, learning_rate, coef_init=None,
intercept_init=None, sample_weight=None):
if hasattr(self, "classes_"):
self.classes_ = None
X, y = check_X_y(X, y, 'csr', dtype=np.float64, order="C")
n_samples, n_features = X.shape
# labels can be encoded as float, int, or string literals
# np.unique sorts in asc order; largest class id is positive class
classes = np.unique(y)
if self.warm_start and hasattr(self, "coef_"):
if coef_init is None:
coef_init = self.coef_
if intercept_init is None:
intercept_init = self.intercept_
else:
self.coef_ = None
self.intercept_ = None
if self.average > 0:
self.standard_coef_ = self.coef_
self.standard_intercept_ = self.intercept_
self.average_coef_ = None
self.average_intercept_ = None
# Clear iteration count for multiple call to fit.
self.t_ = 1.0
self._partial_fit(X, y, alpha, C, loss, learning_rate, self.n_iter,
classes, sample_weight, coef_init, intercept_init)
return self
def _fit_binary(self, X, y, alpha, C, sample_weight,
learning_rate, n_iter):
"""Fit a binary classifier on X and y. """
coef, intercept = fit_binary(self, 1, X, y, alpha, C,
learning_rate, n_iter,
self._expanded_class_weight[1],
self._expanded_class_weight[0],
sample_weight)
self.t_ += n_iter * X.shape[0]
# need to be 2d
if self.average > 0:
if self.average <= self.t_ - 1:
self.coef_ = self.average_coef_.reshape(1, -1)
self.intercept_ = self.average_intercept_
else:
self.coef_ = self.standard_coef_.reshape(1, -1)
self.standard_intercept_ = np.atleast_1d(intercept)
self.intercept_ = self.standard_intercept_
else:
self.coef_ = coef.reshape(1, -1)
# intercept is a float, need to convert it to an array of length 1
self.intercept_ = np.atleast_1d(intercept)
def _fit_multiclass(self, X, y, alpha, C, learning_rate,
sample_weight, n_iter):
"""Fit a multi-class classifier by combining binary classifiers
Each binary classifier predicts one class versus all others. This
strategy is called OVA: One Versus All.
"""
# Use joblib to fit OvA in parallel.
result = Parallel(n_jobs=self.n_jobs, backend="threading",
verbose=self.verbose)(
delayed(fit_binary)(self, i, X, y, alpha, C, learning_rate,
n_iter, self._expanded_class_weight[i], 1.,
sample_weight)
for i in range(len(self.classes_)))
for i, (_, intercept) in enumerate(result):
self.intercept_[i] = intercept
self.t_ += n_iter * X.shape[0]
if self.average > 0:
if self.average <= self.t_ - 1.0:
self.coef_ = self.average_coef_
self.intercept_ = self.average_intercept_
else:
self.coef_ = self.standard_coef_
self.standard_intercept_ = np.atleast_1d(self.intercept_)
self.intercept_ = self.standard_intercept_
def partial_fit(self, X, y, classes=None, sample_weight=None):
"""Fit linear model with Stochastic Gradient Descent.
Parameters
----------
X : {array-like, sparse matrix}, shape (n_samples, n_features)
Subset of the training data
y : numpy array, shape (n_samples,)
Subset of the target values
classes : array, shape (n_classes,)
Classes across all calls to partial_fit.
Can be obtained by via `np.unique(y_all)`, where y_all is the
target vector of the entire dataset.
This argument is required for the first call to partial_fit
and can be omitted in the subsequent calls.
Note that y doesn't need to contain all labels in `classes`.
sample_weight : array-like, shape (n_samples,), optional
Weights applied to individual samples.
If not provided, uniform weights are assumed.
Returns
-------
self : returns an instance of self.
"""
if self.class_weight in ['balanced']:
raise ValueError("class_weight '{0}' is not supported for "
"partial_fit. In order to use 'balanced' weights,"
" use compute_class_weight('{0}', classes, y). "
"In place of y you can us a large enough sample "
"of the full training set target to properly "
"estimate the class frequency distributions. "
"Pass the resulting weights as the class_weight "
"parameter.".format(self.class_weight))
return self._partial_fit(X, y, alpha=self.alpha, C=1.0, loss=self.loss,
learning_rate=self.learning_rate, n_iter=1,
classes=classes, sample_weight=sample_weight,
coef_init=None, intercept_init=None)
def fit(self, X, y, coef_init=None, intercept_init=None,
sample_weight=None):
"""Fit linear model with Stochastic Gradient Descent.
Parameters
----------
X : {array-like, sparse matrix}, shape (n_samples, n_features)
Training data
y : numpy array, shape (n_samples,)
Target values
coef_init : array, shape (n_classes, n_features)
The initial coefficients to warm-start the optimization.
intercept_init : array, shape (n_classes,)
The initial intercept to warm-start the optimization.
sample_weight : array-like, shape (n_samples,), optional
Weights applied to individual samples.
If not provided, uniform weights are assumed. These weights will
be multiplied with class_weight (passed through the
constructor) if class_weight is specified
Returns
-------
self : returns an instance of self.
"""
return self._fit(X, y, alpha=self.alpha, C=1.0,
loss=self.loss, learning_rate=self.learning_rate,
coef_init=coef_init, intercept_init=intercept_init,
sample_weight=sample_weight)
class SGDClassifier(BaseSGDClassifier):
"""Linear classifiers (SVM, logistic regression, a.o.) with SGD training.
This estimator implements regularized linear models with stochastic
gradient descent (SGD) learning: the gradient of the loss is estimated
each sample at a time and the model is updated along the way with a
decreasing strength schedule (aka learning rate). SGD allows minibatch
(online/out-of-core) learning, see the partial_fit method.
For best results using the default learning rate schedule, the data should
have zero mean and unit variance.
This implementation works with data represented as dense or sparse arrays
of floating point values for the features. The model it fits can be
controlled with the loss parameter; by default, it fits a linear support
vector machine (SVM).
The regularizer is a penalty added to the loss function that shrinks model
parameters towards the zero vector using either the squared euclidean norm
L2 or the absolute norm L1 or a combination of both (Elastic Net). If the
parameter update crosses the 0.0 value because of the regularizer, the
update is truncated to 0.0 to allow for learning sparse models and achieve
online feature selection.
Read more in the :ref:`User Guide <sgd>`.
Parameters
----------
loss : str, 'hinge', 'log', 'modified_huber', 'squared_hinge',\
'perceptron', or a regression loss: 'squared_loss', 'huber',\
'epsilon_insensitive', or 'squared_epsilon_insensitive'
The loss function to be used. Defaults to 'hinge', which gives a
linear SVM.
The 'log' loss gives logistic regression, a probabilistic classifier.
'modified_huber' is another smooth loss that brings tolerance to
outliers as well as probability estimates.
'squared_hinge' is like hinge but is quadratically penalized.
'perceptron' is the linear loss used by the perceptron algorithm.
The other losses are designed for regression but can be useful in
classification as well; see SGDRegressor for a description.
penalty : str, 'none', 'l2', 'l1', or 'elasticnet'
The penalty (aka regularization term) to be used. Defaults to 'l2'
which is the standard regularizer for linear SVM models. 'l1' and
'elasticnet' might bring sparsity to the model (feature selection)
not achievable with 'l2'.
alpha : float
Constant that multiplies the regularization term. Defaults to 0.0001
Also used to compute learning_rate when set to 'optimal'.
l1_ratio : float
The Elastic Net mixing parameter, with 0 <= l1_ratio <= 1.
l1_ratio=0 corresponds to L2 penalty, l1_ratio=1 to L1.
Defaults to 0.15.
fit_intercept : bool
Whether the intercept should be estimated or not. If False, the
data is assumed to be already centered. Defaults to True.
n_iter : int, optional
The number of passes over the training data (aka epochs). The number
of iterations is set to 1 if using partial_fit.
Defaults to 5.
shuffle : bool, optional
Whether or not the training data should be shuffled after each epoch.
Defaults to True.
random_state : int seed, RandomState instance, or None (default)
The seed of the pseudo random number generator to use when
shuffling the data.
verbose : integer, optional
The verbosity level
epsilon : float
Epsilon in the epsilon-insensitive loss functions; only if `loss` is
'huber', 'epsilon_insensitive', or 'squared_epsilon_insensitive'.
For 'huber', determines the threshold at which it becomes less
important to get the prediction exactly right.
For epsilon-insensitive, any differences between the current prediction
and the correct label are ignored if they are less than this threshold.
n_jobs : integer, optional
The number of CPUs to use to do the OVA (One Versus All, for
multi-class problems) computation. -1 means 'all CPUs'. Defaults
to 1.
learning_rate : string, optional
The learning rate schedule:
- 'constant': eta = eta0
- 'optimal': eta = 1.0 / (alpha * (t + t0)) [default]
- 'invscaling': eta = eta0 / pow(t, power_t)
where t0 is chosen by a heuristic proposed by Leon Bottou.
eta0 : double
The initial learning rate for the 'constant' or 'invscaling'
schedules. The default value is 0.0 as eta0 is not used by the
default schedule 'optimal'.
power_t : double
The exponent for inverse scaling learning rate [default 0.5].
class_weight : dict, {class_label: weight} or "balanced" or None, optional
Preset for the class_weight fit parameter.
Weights associated with classes. If not given, all classes
are supposed to have weight one.
The "balanced" mode uses the values of y to automatically adjust
weights inversely proportional to class frequencies in the input data
as ``n_samples / (n_classes * np.bincount(y))``
warm_start : bool, optional
When set to True, reuse the solution of the previous call to fit as
initialization, otherwise, just erase the previous solution.
average : bool or int, optional
When set to True, computes the averaged SGD weights and stores the
result in the ``coef_`` attribute. If set to an int greater than 1,
averaging will begin once the total number of samples seen reaches
average. So ``average=10`` will begin averaging after seeing 10
samples.
Attributes
----------
coef_ : array, shape (1, n_features) if n_classes == 2 else (n_classes,\
n_features)
Weights assigned to the features.
intercept_ : array, shape (1,) if n_classes == 2 else (n_classes,)
Constants in decision function.
loss_function_ : concrete ``LossFunction``
Examples
--------
>>> import numpy as np
>>> from sklearn import linear_model
>>> X = np.array([[-1, -1], [-2, -1], [1, 1], [2, 1]])
>>> Y = np.array([1, 1, 2, 2])
>>> clf = linear_model.SGDClassifier()
>>> clf.fit(X, Y)
... #doctest: +NORMALIZE_WHITESPACE
SGDClassifier(alpha=0.0001, average=False, class_weight=None, epsilon=0.1,
eta0=0.0, fit_intercept=True, l1_ratio=0.15,
learning_rate='optimal', loss='hinge', n_iter=5, n_jobs=1,
penalty='l2', power_t=0.5, random_state=None, shuffle=True,
verbose=0, warm_start=False)
>>> print(clf.predict([[-0.8, -1]]))
[1]
See also
--------
LinearSVC, LogisticRegression, Perceptron
"""
def __init__(self, loss="hinge", penalty='l2', alpha=0.0001, l1_ratio=0.15,
fit_intercept=True, n_iter=5, shuffle=True, verbose=0,
epsilon=DEFAULT_EPSILON, n_jobs=1, random_state=None,
learning_rate="optimal", eta0=0.0, power_t=0.5,
class_weight=None, warm_start=False, average=False):
super(SGDClassifier, self).__init__(
loss=loss, penalty=penalty, alpha=alpha, l1_ratio=l1_ratio,
fit_intercept=fit_intercept, n_iter=n_iter, shuffle=shuffle,
verbose=verbose, epsilon=epsilon, n_jobs=n_jobs,
random_state=random_state, learning_rate=learning_rate, eta0=eta0,
power_t=power_t, class_weight=class_weight, warm_start=warm_start,
average=average)
def _check_proba(self):
check_is_fitted(self, "t_")
if self.loss not in ("log", "modified_huber"):
raise AttributeError("probability estimates are not available for"
" loss=%r" % self.loss)
@property
def predict_proba(self):
"""Probability estimates.
This method is only available for log loss and modified Huber loss.
Multiclass probability estimates are derived from binary (one-vs.-rest)
estimates by simple normalization, as recommended by Zadrozny and
Elkan.
Binary probability estimates for loss="modified_huber" are given by
(clip(decision_function(X), -1, 1) + 1) / 2. For other loss functions
it is necessary to perform proper probability calibration by wrapping
the classifier with
:class:`sklearn.calibration.CalibratedClassifierCV` instead.
Parameters
----------
X : {array-like, sparse matrix}, shape (n_samples, n_features)
Returns
-------
array, shape (n_samples, n_classes)
Returns the probability of the sample for each class in the model,
where classes are ordered as they are in `self.classes_`.
References
----------
Zadrozny and Elkan, "Transforming classifier scores into multiclass
probability estimates", SIGKDD'02,
http://www.research.ibm.com/people/z/zadrozny/kdd2002-Transf.pdf
The justification for the formula in the loss="modified_huber"
case is in the appendix B in:
http://jmlr.csail.mit.edu/papers/volume2/zhang02c/zhang02c.pdf
"""
self._check_proba()
return self._predict_proba
def _predict_proba(self, X):
if self.loss == "log":
return self._predict_proba_lr(X)
elif self.loss == "modified_huber":
binary = (len(self.classes_) == 2)
scores = self.decision_function(X)
if binary:
prob2 = np.ones((scores.shape[0], 2))
prob = prob2[:, 1]
else:
prob = scores
np.clip(scores, -1, 1, prob)
prob += 1.
prob /= 2.
if binary:
prob2[:, 0] -= prob
prob = prob2
else:
# the above might assign zero to all classes, which doesn't
# normalize neatly; work around this to produce uniform
# probabilities
prob_sum = prob.sum(axis=1)
all_zero = (prob_sum == 0)
if np.any(all_zero):
prob[all_zero, :] = 1
prob_sum[all_zero] = len(self.classes_)
# normalize
prob /= prob_sum.reshape((prob.shape[0], -1))
return prob
else:
raise NotImplementedError("predict_(log_)proba only supported when"
" loss='log' or loss='modified_huber' "
"(%r given)" % self.loss)
@property
def predict_log_proba(self):
"""Log of probability estimates.
This method is only available for log loss and modified Huber loss.
When loss="modified_huber", probability estimates may be hard zeros
and ones, so taking the logarithm is not possible.
See ``predict_proba`` for details.
Parameters
----------
X : array-like, shape (n_samples, n_features)
Returns
-------
T : array-like, shape (n_samples, n_classes)
Returns the log-probability of the sample for each class in the
model, where classes are ordered as they are in
`self.classes_`.
"""
self._check_proba()
return self._predict_log_proba
def _predict_log_proba(self, X):
return np.log(self.predict_proba(X))
class BaseSGDRegressor(BaseSGD, RegressorMixin):
loss_functions = {
"squared_loss": (SquaredLoss, ),
"huber": (Huber, DEFAULT_EPSILON),
"epsilon_insensitive": (EpsilonInsensitive, DEFAULT_EPSILON),
"squared_epsilon_insensitive": (SquaredEpsilonInsensitive,
DEFAULT_EPSILON),
}
@abstractmethod
def __init__(self, loss="squared_loss", penalty="l2", alpha=0.0001,
l1_ratio=0.15, fit_intercept=True, n_iter=5, shuffle=True,
verbose=0, epsilon=DEFAULT_EPSILON, random_state=None,
learning_rate="invscaling", eta0=0.01, power_t=0.25,
warm_start=False, average=False):
super(BaseSGDRegressor, self).__init__(loss=loss, penalty=penalty,
alpha=alpha, l1_ratio=l1_ratio,
fit_intercept=fit_intercept,
n_iter=n_iter, shuffle=shuffle,
verbose=verbose,
epsilon=epsilon,
random_state=random_state,
learning_rate=learning_rate,
eta0=eta0, power_t=power_t,
warm_start=warm_start,
average=average)
def _partial_fit(self, X, y, alpha, C, loss, learning_rate,
n_iter, sample_weight,
coef_init, intercept_init):
X, y = check_X_y(X, y, "csr", copy=False, order='C', dtype=np.float64)
y = astype(y, np.float64, copy=False)
n_samples, n_features = X.shape
self._validate_params()
# Allocate datastructures from input arguments
sample_weight = self._validate_sample_weight(sample_weight, n_samples)
if getattr(self, "coef_", None) is None:
self._allocate_parameter_mem(1, n_features,
coef_init, intercept_init)
elif n_features != self.coef_.shape[-1]:
raise ValueError("Number of features %d does not match previous "
"data %d." % (n_features, self.coef_.shape[-1]))
if self.average > 0 and getattr(self, "average_coef_", None) is None:
self.average_coef_ = np.zeros(n_features,
dtype=np.float64,
order="C")
self.average_intercept_ = np.zeros(1,
dtype=np.float64,
order="C")
self._fit_regressor(X, y, alpha, C, loss, learning_rate,
sample_weight, n_iter)
return self
def partial_fit(self, X, y, sample_weight=None):
"""Fit linear model with Stochastic Gradient Descent.
Parameters
----------
X : {array-like, sparse matrix}, shape (n_samples, n_features)
Subset of training data
y : numpy array of shape (n_samples,)
Subset of target values
sample_weight : array-like, shape (n_samples,), optional
Weights applied to individual samples.
If not provided, uniform weights are assumed.
Returns
-------
self : returns an instance of self.
"""
return self._partial_fit(X, y, self.alpha, C=1.0,
loss=self.loss,
learning_rate=self.learning_rate, n_iter=1,
sample_weight=sample_weight,
coef_init=None, intercept_init=None)
def _fit(self, X, y, alpha, C, loss, learning_rate, coef_init=None,
intercept_init=None, sample_weight=None):
if self.warm_start and getattr(self, "coef_", None) is not None:
if coef_init is None:
coef_init = self.coef_
if intercept_init is None:
intercept_init = self.intercept_
else:
self.coef_ = None
self.intercept_ = None
if self.average > 0:
self.standard_intercept_ = self.intercept_
self.standard_coef_ = self.coef_
self.average_coef_ = None
self.average_intercept_ = None
# Clear iteration count for multiple call to fit.
self.t_ = 1.0
return self._partial_fit(X, y, alpha, C, loss, learning_rate,
self.n_iter, sample_weight,
coef_init, intercept_init)
def fit(self, X, y, coef_init=None, intercept_init=None,
sample_weight=None):
"""Fit linear model with Stochastic Gradient Descent.
Parameters
----------
X : {array-like, sparse matrix}, shape (n_samples, n_features)
Training data
y : numpy array, shape (n_samples,)
Target values
coef_init : array, shape (n_features,)
The initial coefficients to warm-start the optimization.
intercept_init : array, shape (1,)
The initial intercept to warm-start the optimization.
sample_weight : array-like, shape (n_samples,), optional
Weights applied to individual samples (1. for unweighted).
Returns
-------
self : returns an instance of self.
"""
return self._fit(X, y, alpha=self.alpha, C=1.0,
loss=self.loss, learning_rate=self.learning_rate,
coef_init=coef_init,
intercept_init=intercept_init,
sample_weight=sample_weight)
def _decision_function(self, X):
"""Predict using the linear model
Parameters
----------
X : {array-like, sparse matrix}, shape (n_samples, n_features)
Returns
-------
array, shape (n_samples,)
Predicted target values per element in X.
"""
check_is_fitted(self, ["t_", "coef_", "intercept_"], all_or_any=all)
X = check_array(X, accept_sparse='csr')
scores = safe_sparse_dot(X, self.coef_.T,
dense_output=True) + self.intercept_
return scores.ravel()
def predict(self, X):
"""Predict using the linear model
Parameters
----------
X : {array-like, sparse matrix}, shape (n_samples, n_features)
Returns
-------
array, shape (n_samples,)
Predicted target values per element in X.
"""
return self._decision_function(X)
def _fit_regressor(self, X, y, alpha, C, loss, learning_rate,
sample_weight, n_iter):
dataset, intercept_decay = make_dataset(X, y, sample_weight)
loss_function = self._get_loss_function(loss)
penalty_type = self._get_penalty_type(self.penalty)
learning_rate_type = self._get_learning_rate_type(learning_rate)
if not hasattr(self, "t_"):
self.t_ = 1.0
random_state = check_random_state(self.random_state)
# numpy mtrand expects a C long which is a signed 32 bit integer under
# Windows
seed = random_state.randint(0, np.iinfo(np.int32).max)
if self.average > 0:
self.standard_coef_, self.standard_intercept_, \
self.average_coef_, self.average_intercept_ =\
average_sgd(self.standard_coef_,
self.standard_intercept_[0],
self.average_coef_,
self.average_intercept_[0],
loss_function,
penalty_type,
alpha, C,
self.l1_ratio,
dataset,
n_iter,
int(self.fit_intercept),
int(self.verbose),
int(self.shuffle),
seed,
1.0, 1.0,
learning_rate_type,
self.eta0, self.power_t, self.t_,
intercept_decay, self.average)
self.average_intercept_ = np.atleast_1d(self.average_intercept_)
self.standard_intercept_ = np.atleast_1d(self.standard_intercept_)
self.t_ += n_iter * X.shape[0]
if self.average <= self.t_ - 1.0:
self.coef_ = self.average_coef_
self.intercept_ = self.average_intercept_
else:
self.coef_ = self.standard_coef_
self.intercept_ = self.standard_intercept_
else:
self.coef_, self.intercept_ = \
plain_sgd(self.coef_,
self.intercept_[0],
loss_function,
penalty_type,
alpha, C,
self.l1_ratio,
dataset,
n_iter,
int(self.fit_intercept),
int(self.verbose),
int(self.shuffle),
seed,
1.0, 1.0,
learning_rate_type,
self.eta0, self.power_t, self.t_,
intercept_decay)
self.t_ += n_iter * X.shape[0]
self.intercept_ = np.atleast_1d(self.intercept_)
class SGDRegressor(BaseSGDRegressor):
"""Linear model fitted by minimizing a regularized empirical loss with SGD
SGD stands for Stochastic Gradient Descent: the gradient of the loss is
estimated each sample at a time and the model is updated along the way with
a decreasing strength schedule (aka learning rate).
The regularizer is a penalty added to the loss function that shrinks model
parameters towards the zero vector using either the squared euclidean norm
L2 or the absolute norm L1 or a combination of both (Elastic Net). If the
parameter update crosses the 0.0 value because of the regularizer, the
update is truncated to 0.0 to allow for learning sparse models and achieve
online feature selection.
This implementation works with data represented as dense numpy arrays of
floating point values for the features.
Read more in the :ref:`User Guide <sgd>`.
Parameters
----------
loss : str, 'squared_loss', 'huber', 'epsilon_insensitive', \
or 'squared_epsilon_insensitive'
The loss function to be used. Defaults to 'squared_loss' which refers
to the ordinary least squares fit. 'huber' modifies 'squared_loss' to
focus less on getting outliers correct by switching from squared to
linear loss past a distance of epsilon. 'epsilon_insensitive' ignores
errors less than epsilon and is linear past that; this is the loss
function used in SVR. 'squared_epsilon_insensitive' is the same but
becomes squared loss past a tolerance of epsilon.
penalty : str, 'none', 'l2', 'l1', or 'elasticnet'
The penalty (aka regularization term) to be used. Defaults to 'l2'
which is the standard regularizer for linear SVM models. 'l1' and
'elasticnet' might bring sparsity to the model (feature selection)
not achievable with 'l2'.
alpha : float
Constant that multiplies the regularization term. Defaults to 0.0001
Also used to compute learning_rate when set to 'optimal'.
l1_ratio : float
The Elastic Net mixing parameter, with 0 <= l1_ratio <= 1.
l1_ratio=0 corresponds to L2 penalty, l1_ratio=1 to L1.
Defaults to 0.15.
fit_intercept : bool
Whether the intercept should be estimated or not. If False, the
data is assumed to be already centered. Defaults to True.
n_iter : int, optional
The number of passes over the training data (aka epochs). The number
of iterations is set to 1 if using partial_fit.
Defaults to 5.
shuffle : bool, optional
Whether or not the training data should be shuffled after each epoch.
Defaults to True.
random_state : int seed, RandomState instance, or None (default)
The seed of the pseudo random number generator to use when
shuffling the data.
verbose : integer, optional
The verbosity level.
epsilon : float
Epsilon in the epsilon-insensitive loss functions; only if `loss` is
'huber', 'epsilon_insensitive', or 'squared_epsilon_insensitive'.
For 'huber', determines the threshold at which it becomes less
important to get the prediction exactly right.
For epsilon-insensitive, any differences between the current prediction
and the correct label are ignored if they are less than this threshold.
learning_rate : string, optional
The learning rate schedule:
- 'constant': eta = eta0
- 'optimal': eta = 1.0 / (alpha * (t + t0)) [default]
- 'invscaling': eta = eta0 / pow(t, power_t)
where t0 is chosen by a heuristic proposed by Leon Bottou.
eta0 : double, optional
The initial learning rate [default 0.01].
power_t : double, optional
The exponent for inverse scaling learning rate [default 0.25].
warm_start : bool, optional
When set to True, reuse the solution of the previous call to fit as
initialization, otherwise, just erase the previous solution.
average : bool or int, optional
When set to True, computes the averaged SGD weights and stores the
result in the ``coef_`` attribute. If set to an int greater than 1,
averaging will begin once the total number of samples seen reaches
average. So ``average=10`` will begin averaging after seeing 10
samples.
Attributes
----------
coef_ : array, shape (n_features,)
Weights assigned to the features.
intercept_ : array, shape (1,)
The intercept term.
average_coef_ : array, shape (n_features,)
Averaged weights assigned to the features.
average_intercept_ : array, shape (1,)
The averaged intercept term.
Examples
--------
>>> import numpy as np
>>> from sklearn import linear_model
>>> n_samples, n_features = 10, 5
>>> np.random.seed(0)
>>> y = np.random.randn(n_samples)
>>> X = np.random.randn(n_samples, n_features)
>>> clf = linear_model.SGDRegressor()
>>> clf.fit(X, y)
... #doctest: +NORMALIZE_WHITESPACE
SGDRegressor(alpha=0.0001, average=False, epsilon=0.1, eta0=0.01,
fit_intercept=True, l1_ratio=0.15, learning_rate='invscaling',
loss='squared_loss', n_iter=5, penalty='l2', power_t=0.25,
random_state=None, shuffle=True, verbose=0, warm_start=False)
See also
--------
Ridge, ElasticNet, Lasso, SVR
"""
def __init__(self, loss="squared_loss", penalty="l2", alpha=0.0001,
l1_ratio=0.15, fit_intercept=True, n_iter=5, shuffle=True,
verbose=0, epsilon=DEFAULT_EPSILON, random_state=None,
learning_rate="invscaling", eta0=0.01, power_t=0.25,
warm_start=False, average=False):
super(SGDRegressor, self).__init__(loss=loss, penalty=penalty,
alpha=alpha, l1_ratio=l1_ratio,
fit_intercept=fit_intercept,
n_iter=n_iter, shuffle=shuffle,
verbose=verbose,
epsilon=epsilon,
random_state=random_state,
learning_rate=learning_rate,
eta0=eta0, power_t=power_t,
warm_start=warm_start,
average=average)
| bsd-3-clause | 3,911,030,700,560,456,000 | 40.219055 | 79 | 0.553648 | false |
Subarno/MachineLearningPracticePrograms | linear_reg.py | 1 | 1716 | from math import sqrt
# Calculate root mean squared error
def rmse_metric(actual, predicted):
sum_error = 0.0
for i in range(len(actual)):
prediction_error = predicted[i] - actual[i]
sum_error += (prediction_error ** 2)
mean_error = sum_error / float(len(actual))
return sqrt(mean_error)
# Evaluate regression algorithm on training dataset
def evaluate_algorithm(dataset, algorithm):
test_set = list()
for row in dataset:
row_copy = list(row)
row_copy[-1] = None
test_set.append(row_copy)
predicted = algorithm(dataset, test_set)
print(predicted)
actual = [row[-1] for row in dataset]
rmse = rmse_metric(actual, predicted)
return rmse
# Calculate the mean value of a list of numbers
def mean(values):
return sum(values) / float(len(values))
# Calculate covariance between x and y
def covariance(x, mean_x, y, mean_y):
covar = 0.0
for i in range(len(x)):
covar += (x[i] - mean_x) * (y[i] - mean_y)
return covar
# Calculate the variance of a list of numbers
def variance(values, mean):
return sum([(x-mean)**2 for x in values])
# Calculate coefficients
def coefficients(dataset):
x = [row[0] for row in dataset]
y = [row[1] for row in dataset]
x_mean, y_mean = mean(x), mean(y)
b1 = covariance(x, x_mean, y, y_mean) / variance(x, x_mean)
b0 = y_mean - b1 * x_mean
return [b0, b1]
# Simple linear regression algorithm
def simple_linear_regression(train, test):
predictions = list()
b0, b1 = coefficients(train)
for row in test:
yhat = b0 + b1 * row[0]
predictions.append(yhat)
return predictions
# Test simple linear regression
dataset = [[1, 1], [2, 3], [4, 3], [3, 2], [5, 5]]
rmse = evaluate_algorithm(dataset, simple_linear_regression)
print('RMSE: %.3f' % (rmse))
| gpl-3.0 | -8,269,938,085,554,947,000 | 27.131148 | 60 | 0.687646 | false |
RealTimeWeb/wikisite | MoinMoin/script/old/migration/12_to_13_mig11.py | 1 | 2642 | #!/usr/bin/env python
"""
migration from moin 1.2 to moin 1.3
For 1.3, the plugin module loader needs some __init__.py files.
Although we supply those files in the new "empty wiki template" in
wiki/data, many people forgot to update their plugin directories,
so we do that via this mig script now.
Steps for a successful migration:
1. Stop your wiki and make a backup of old data and code
2. Make a copy of the wiki's "data" directory to your working dir
3. If there was no error, you will find:
data.pre-mig11 - the script renames your data directory copy to that name
data - converted data dir
4. Copy additional files from data.pre-mig11 to data (maybe intermaps, logs,
etc.). Be aware that the file contents AND file names of wiki content
may have changed, so DO NOT copy the files inside the cache/ directory,
let the wiki refill it.
5. Replace the data directory your wiki uses with the data directory
you created by previous steps. DO NOT simply copy the converted stuff
into the original or you will duplicate pages and create chaos!
6. Test it - if something has gone wrong, you still have your backup.
@copyright: 2005 Thomas Waldmann
@license: GPL, see COPYING for details
"""
import os.path, sys, urllib
# Insert THIS moin dir first into sys path, or you would run another
# version of moin!
sys.path.insert(0, '../../../..')
from MoinMoin import wikiutil
from MoinMoin.script.migration.migutil import opj, listdir, copy_file, move_file, copy_dir, makedir
def migrate(destdir):
plugindir = opj(destdir, 'plugin')
makedir(plugindir)
fname = opj(plugindir, '__init__.py')
f = open(fname, 'w')
f.write('''\
# *** Do not remove this! ***
# Although being empty, the presence of this file is important for plugins
# working correctly.
''')
f.close()
for d in ['action', 'formatter', 'macro', 'parser', 'processor', 'theme', 'xmlrpc', ]:
thisdir = opj(plugindir, d)
makedir(thisdir)
fname = opj(thisdir, '__init__.py')
f = open(fname, 'w')
f.write('''\
# -*- coding: iso-8859-1 -*-
from MoinMoin.util import pysupport
modules = pysupport.getPackageModules(__file__)
''')
f.close()
origdir = 'data.pre-mig11'
destdir = 'data'
# Backup original dir and create new empty dir
try:
os.rename(destdir, origdir)
except OSError:
print "You need to be in the directory where your copy of the 'data' directory is located."
sys.exit(1)
copy_dir(origdir, destdir)
migrate(destdir)
| apache-2.0 | -5,731,366,479,976,064,000 | 31.219512 | 99 | 0.660863 | false |
Acehaidrey/incubator-airflow | tests/providers/google/cloud/operators/test_bigquery.py | 7 | 36678 | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import unittest
from datetime import datetime
from unittest import mock
from unittest.mock import MagicMock
import pytest
from google.cloud.exceptions import Conflict
from parameterized import parameterized
from airflow import models
from airflow.exceptions import AirflowException
from airflow.models import DAG, TaskFail, TaskInstance, XCom
from airflow.providers.google.cloud.operators.bigquery import (
BigQueryCheckOperator,
BigQueryConsoleIndexableLink,
BigQueryConsoleLink,
BigQueryCreateEmptyDatasetOperator,
BigQueryCreateEmptyTableOperator,
BigQueryCreateExternalTableOperator,
BigQueryDeleteDatasetOperator,
BigQueryDeleteTableOperator,
BigQueryExecuteQueryOperator,
BigQueryGetDataOperator,
BigQueryGetDatasetOperator,
BigQueryGetDatasetTablesOperator,
BigQueryInsertJobOperator,
BigQueryIntervalCheckOperator,
BigQueryPatchDatasetOperator,
BigQueryUpdateDatasetOperator,
BigQueryUpsertTableOperator,
BigQueryValueCheckOperator,
)
from airflow.serialization.serialized_objects import SerializedDAG
from airflow.settings import Session
from airflow.utils.session import provide_session
TASK_ID = 'test-bq-generic-operator'
TEST_DATASET = 'test-dataset'
TEST_DATASET_LOCATION = 'EU'
TEST_GCP_PROJECT_ID = 'test-project'
TEST_DELETE_CONTENTS = True
TEST_TABLE_ID = 'test-table-id'
TEST_GCS_BUCKET = 'test-bucket'
TEST_GCS_DATA = ['dir1/*.csv']
TEST_SOURCE_FORMAT = 'CSV'
DEFAULT_DATE = datetime(2015, 1, 1)
TEST_DAG_ID = 'test-bigquery-operators'
TEST_TABLE_RESOURCES = {"tableReference": {"tableId": TEST_TABLE_ID}, "expirationTime": 1234567}
VIEW_DEFINITION = {
"query": f"SELECT * FROM `{TEST_DATASET}.{TEST_TABLE_ID}`",
"useLegacySql": False,
}
class TestBigQueryCreateEmptyTableOperator(unittest.TestCase):
@mock.patch('airflow.providers.google.cloud.operators.bigquery.BigQueryHook')
def test_execute(self, mock_hook):
operator = BigQueryCreateEmptyTableOperator(
task_id=TASK_ID, dataset_id=TEST_DATASET, project_id=TEST_GCP_PROJECT_ID, table_id=TEST_TABLE_ID
)
operator.execute(None)
mock_hook.return_value.create_empty_table.assert_called_once_with(
dataset_id=TEST_DATASET,
project_id=TEST_GCP_PROJECT_ID,
table_id=TEST_TABLE_ID,
schema_fields=None,
time_partitioning={},
cluster_fields=None,
labels=None,
view=None,
encryption_configuration=None,
table_resource=None,
exists_ok=False,
)
@mock.patch('airflow.providers.google.cloud.operators.bigquery.BigQueryHook')
def test_create_view(self, mock_hook):
operator = BigQueryCreateEmptyTableOperator(
task_id=TASK_ID,
dataset_id=TEST_DATASET,
project_id=TEST_GCP_PROJECT_ID,
table_id=TEST_TABLE_ID,
view=VIEW_DEFINITION,
)
operator.execute(None)
mock_hook.return_value.create_empty_table.assert_called_once_with(
dataset_id=TEST_DATASET,
project_id=TEST_GCP_PROJECT_ID,
table_id=TEST_TABLE_ID,
schema_fields=None,
time_partitioning={},
cluster_fields=None,
labels=None,
view=VIEW_DEFINITION,
encryption_configuration=None,
table_resource=None,
exists_ok=False,
)
@mock.patch('airflow.providers.google.cloud.operators.bigquery.BigQueryHook')
def test_create_clustered_empty_table(self, mock_hook):
schema_fields = [
{"name": "emp_name", "type": "STRING", "mode": "REQUIRED"},
{"name": "date_hired", "type": "DATE", "mode": "REQUIRED"},
{"name": "date_birth", "type": "DATE", "mode": "NULLABLE"},
]
time_partitioning = {"type": "DAY", "field": "date_hired"}
cluster_fields = ["date_birth"]
operator = BigQueryCreateEmptyTableOperator(
task_id=TASK_ID,
dataset_id=TEST_DATASET,
project_id=TEST_GCP_PROJECT_ID,
table_id=TEST_TABLE_ID,
schema_fields=schema_fields,
time_partitioning=time_partitioning,
cluster_fields=cluster_fields,
)
operator.execute(None)
mock_hook.return_value.create_empty_table.assert_called_once_with(
dataset_id=TEST_DATASET,
project_id=TEST_GCP_PROJECT_ID,
table_id=TEST_TABLE_ID,
schema_fields=schema_fields,
time_partitioning=time_partitioning,
cluster_fields=cluster_fields,
labels=None,
view=None,
encryption_configuration=None,
table_resource=None,
exists_ok=False,
)
class TestBigQueryCreateExternalTableOperator(unittest.TestCase):
@mock.patch('airflow.providers.google.cloud.operators.bigquery.BigQueryHook')
def test_execute(self, mock_hook):
operator = BigQueryCreateExternalTableOperator(
task_id=TASK_ID,
destination_project_dataset_table=f'{TEST_DATASET}.{TEST_TABLE_ID}',
schema_fields=[],
bucket=TEST_GCS_BUCKET,
source_objects=TEST_GCS_DATA,
source_format=TEST_SOURCE_FORMAT,
)
operator.execute(None)
mock_hook.return_value.create_external_table.assert_called_once_with(
external_project_dataset_table=f'{TEST_DATASET}.{TEST_TABLE_ID}',
schema_fields=[],
source_uris=[f'gs://{TEST_GCS_BUCKET}/{source_object}' for source_object in TEST_GCS_DATA],
source_format=TEST_SOURCE_FORMAT,
compression='NONE',
skip_leading_rows=0,
field_delimiter=',',
max_bad_records=0,
quote_character=None,
allow_quoted_newlines=False,
allow_jagged_rows=False,
src_fmt_configs={},
labels=None,
encryption_configuration=None,
)
class TestBigQueryDeleteDatasetOperator(unittest.TestCase):
@mock.patch('airflow.providers.google.cloud.operators.bigquery.BigQueryHook')
def test_execute(self, mock_hook):
operator = BigQueryDeleteDatasetOperator(
task_id=TASK_ID,
dataset_id=TEST_DATASET,
project_id=TEST_GCP_PROJECT_ID,
delete_contents=TEST_DELETE_CONTENTS,
)
operator.execute(None)
mock_hook.return_value.delete_dataset.assert_called_once_with(
dataset_id=TEST_DATASET, project_id=TEST_GCP_PROJECT_ID, delete_contents=TEST_DELETE_CONTENTS
)
class TestBigQueryCreateEmptyDatasetOperator(unittest.TestCase):
@mock.patch('airflow.providers.google.cloud.operators.bigquery.BigQueryHook')
def test_execute(self, mock_hook):
operator = BigQueryCreateEmptyDatasetOperator(
task_id=TASK_ID,
dataset_id=TEST_DATASET,
project_id=TEST_GCP_PROJECT_ID,
location=TEST_DATASET_LOCATION,
)
operator.execute(None)
mock_hook.return_value.create_empty_dataset.assert_called_once_with(
dataset_id=TEST_DATASET,
project_id=TEST_GCP_PROJECT_ID,
location=TEST_DATASET_LOCATION,
dataset_reference={},
exists_ok=False,
)
class TestBigQueryGetDatasetOperator(unittest.TestCase):
@mock.patch('airflow.providers.google.cloud.operators.bigquery.BigQueryHook')
def test_execute(self, mock_hook):
operator = BigQueryGetDatasetOperator(
task_id=TASK_ID, dataset_id=TEST_DATASET, project_id=TEST_GCP_PROJECT_ID
)
operator.execute(None)
mock_hook.return_value.get_dataset.assert_called_once_with(
dataset_id=TEST_DATASET, project_id=TEST_GCP_PROJECT_ID
)
class TestBigQueryPatchDatasetOperator(unittest.TestCase):
@mock.patch('airflow.providers.google.cloud.operators.bigquery.BigQueryHook')
def test_execute(self, mock_hook):
dataset_resource = {"friendlyName": 'Test DS'}
operator = BigQueryPatchDatasetOperator(
dataset_resource=dataset_resource,
task_id=TASK_ID,
dataset_id=TEST_DATASET,
project_id=TEST_GCP_PROJECT_ID,
)
operator.execute(None)
mock_hook.return_value.patch_dataset.assert_called_once_with(
dataset_resource=dataset_resource, dataset_id=TEST_DATASET, project_id=TEST_GCP_PROJECT_ID
)
class TestBigQueryUpdateDatasetOperator(unittest.TestCase):
@mock.patch('airflow.providers.google.cloud.operators.bigquery.BigQueryHook')
def test_execute(self, mock_hook):
dataset_resource = {"friendlyName": 'Test DS'}
operator = BigQueryUpdateDatasetOperator(
dataset_resource=dataset_resource,
task_id=TASK_ID,
dataset_id=TEST_DATASET,
project_id=TEST_GCP_PROJECT_ID,
)
operator.execute(None)
mock_hook.return_value.update_dataset.assert_called_once_with(
dataset_resource=dataset_resource,
dataset_id=TEST_DATASET,
project_id=TEST_GCP_PROJECT_ID,
fields=list(dataset_resource.keys()),
)
class TestBigQueryOperator(unittest.TestCase):
def setUp(self):
self.dagbag = models.DagBag(dag_folder='/dev/null', include_examples=True)
self.args = {'owner': 'airflow', 'start_date': DEFAULT_DATE}
self.dag = DAG(TEST_DAG_ID, default_args=self.args)
def tearDown(self):
session = Session()
session.query(models.TaskInstance).filter_by(dag_id=TEST_DAG_ID).delete()
session.query(TaskFail).filter_by(dag_id=TEST_DAG_ID).delete()
session.commit()
session.close()
@mock.patch('airflow.providers.google.cloud.operators.bigquery.BigQueryHook')
def test_execute(self, mock_hook):
encryption_configuration = {'key': 'kk'}
operator = BigQueryExecuteQueryOperator(
task_id=TASK_ID,
sql='Select * from test_table',
destination_dataset_table=None,
write_disposition='WRITE_EMPTY',
allow_large_results=False,
flatten_results=None,
gcp_conn_id='google_cloud_default',
udf_config=None,
use_legacy_sql=True,
maximum_billing_tier=None,
maximum_bytes_billed=None,
create_disposition='CREATE_IF_NEEDED',
schema_update_options=(),
query_params=None,
labels=None,
priority='INTERACTIVE',
time_partitioning=None,
api_resource_configs=None,
cluster_fields=None,
encryption_configuration=encryption_configuration,
)
operator.execute(MagicMock())
mock_hook.return_value.run_query.assert_called_once_with(
sql='Select * from test_table',
destination_dataset_table=None,
write_disposition='WRITE_EMPTY',
allow_large_results=False,
flatten_results=None,
udf_config=None,
maximum_billing_tier=None,
maximum_bytes_billed=None,
create_disposition='CREATE_IF_NEEDED',
schema_update_options=(),
query_params=None,
labels=None,
priority='INTERACTIVE',
time_partitioning=None,
api_resource_configs=None,
cluster_fields=None,
encryption_configuration=encryption_configuration,
)
@mock.patch('airflow.providers.google.cloud.operators.bigquery.BigQueryHook')
def test_execute_list(self, mock_hook):
operator = BigQueryExecuteQueryOperator(
task_id=TASK_ID,
sql=[
'Select * from test_table',
'Select * from other_test_table',
],
destination_dataset_table=None,
write_disposition='WRITE_EMPTY',
allow_large_results=False,
flatten_results=None,
gcp_conn_id='google_cloud_default',
udf_config=None,
use_legacy_sql=True,
maximum_billing_tier=None,
maximum_bytes_billed=None,
create_disposition='CREATE_IF_NEEDED',
schema_update_options=(),
query_params=None,
labels=None,
priority='INTERACTIVE',
time_partitioning=None,
api_resource_configs=None,
cluster_fields=None,
encryption_configuration=None,
)
operator.execute(MagicMock())
mock_hook.return_value.run_query.assert_has_calls(
[
mock.call(
sql='Select * from test_table',
destination_dataset_table=None,
write_disposition='WRITE_EMPTY',
allow_large_results=False,
flatten_results=None,
udf_config=None,
maximum_billing_tier=None,
maximum_bytes_billed=None,
create_disposition='CREATE_IF_NEEDED',
schema_update_options=(),
query_params=None,
labels=None,
priority='INTERACTIVE',
time_partitioning=None,
api_resource_configs=None,
cluster_fields=None,
encryption_configuration=None,
),
mock.call(
sql='Select * from other_test_table',
destination_dataset_table=None,
write_disposition='WRITE_EMPTY',
allow_large_results=False,
flatten_results=None,
udf_config=None,
maximum_billing_tier=None,
maximum_bytes_billed=None,
create_disposition='CREATE_IF_NEEDED',
schema_update_options=(),
query_params=None,
labels=None,
priority='INTERACTIVE',
time_partitioning=None,
api_resource_configs=None,
cluster_fields=None,
encryption_configuration=None,
),
]
)
@mock.patch('airflow.providers.google.cloud.operators.bigquery.BigQueryHook')
def test_execute_bad_type(self, mock_hook):
operator = BigQueryExecuteQueryOperator(
task_id=TASK_ID,
sql=1,
destination_dataset_table=None,
write_disposition='WRITE_EMPTY',
allow_large_results=False,
flatten_results=None,
gcp_conn_id='google_cloud_default',
udf_config=None,
use_legacy_sql=True,
maximum_billing_tier=None,
maximum_bytes_billed=None,
create_disposition='CREATE_IF_NEEDED',
schema_update_options=(),
query_params=None,
labels=None,
priority='INTERACTIVE',
time_partitioning=None,
api_resource_configs=None,
cluster_fields=None,
)
with self.assertRaises(AirflowException):
operator.execute(MagicMock())
@mock.patch('airflow.providers.google.cloud.operators.bigquery.BigQueryHook')
def test_bigquery_operator_defaults(self, mock_hook):
operator = BigQueryExecuteQueryOperator(
task_id=TASK_ID,
sql='Select * from test_table',
dag=self.dag,
default_args=self.args,
schema_update_options=None,
)
operator.execute(MagicMock())
mock_hook.return_value.run_query.assert_called_once_with(
sql='Select * from test_table',
destination_dataset_table=None,
write_disposition='WRITE_EMPTY',
allow_large_results=False,
flatten_results=None,
udf_config=None,
maximum_billing_tier=None,
maximum_bytes_billed=None,
create_disposition='CREATE_IF_NEEDED',
schema_update_options=None,
query_params=None,
labels=None,
priority='INTERACTIVE',
time_partitioning=None,
api_resource_configs=None,
cluster_fields=None,
encryption_configuration=None,
)
self.assertTrue(isinstance(operator.sql, str))
ti = TaskInstance(task=operator, execution_date=DEFAULT_DATE)
ti.render_templates()
self.assertTrue(isinstance(ti.task.sql, str))
def test_bigquery_operator_extra_serialized_field_when_single_query(self):
with self.dag:
BigQueryExecuteQueryOperator(
task_id=TASK_ID,
sql='SELECT * FROM test_table',
)
serialized_dag = SerializedDAG.to_dict(self.dag)
self.assertIn("sql", serialized_dag["dag"]["tasks"][0])
dag = SerializedDAG.from_dict(serialized_dag)
simple_task = dag.task_dict[TASK_ID]
self.assertEqual(getattr(simple_task, "sql"), 'SELECT * FROM test_table')
#########################################################
# Verify Operator Links work with Serialized Operator
#########################################################
# Check Serialized version of operator link
self.assertEqual(
serialized_dag["dag"]["tasks"][0]["_operator_extra_links"],
[{'airflow.providers.google.cloud.operators.bigquery.BigQueryConsoleLink': {}}],
)
# Check DeSerialized version of operator link
self.assertIsInstance(list(simple_task.operator_extra_links)[0], BigQueryConsoleLink)
ti = TaskInstance(task=simple_task, execution_date=DEFAULT_DATE)
ti.xcom_push('job_id', 12345)
# check for positive case
url = simple_task.get_extra_links(DEFAULT_DATE, BigQueryConsoleLink.name)
self.assertEqual(url, 'https://console.cloud.google.com/bigquery?j=12345')
# check for negative case
url2 = simple_task.get_extra_links(datetime(2017, 1, 2), BigQueryConsoleLink.name)
self.assertEqual(url2, '')
def test_bigquery_operator_extra_serialized_field_when_multiple_queries(self):
with self.dag:
BigQueryExecuteQueryOperator(
task_id=TASK_ID,
sql=['SELECT * FROM test_table', 'SELECT * FROM test_table2'],
)
serialized_dag = SerializedDAG.to_dict(self.dag)
self.assertIn("sql", serialized_dag["dag"]["tasks"][0])
dag = SerializedDAG.from_dict(serialized_dag)
simple_task = dag.task_dict[TASK_ID]
self.assertEqual(
getattr(simple_task, "sql"), ['SELECT * FROM test_table', 'SELECT * FROM test_table2']
)
#########################################################
# Verify Operator Links work with Serialized Operator
#########################################################
# Check Serialized version of operator link
self.assertEqual(
serialized_dag["dag"]["tasks"][0]["_operator_extra_links"],
[
{
'airflow.providers.google.cloud.operators.bigquery.BigQueryConsoleIndexableLink': {
'index': 0
}
},
{
'airflow.providers.google.cloud.operators.bigquery.BigQueryConsoleIndexableLink': {
'index': 1
}
},
],
)
# Check DeSerialized version of operator link
self.assertIsInstance(list(simple_task.operator_extra_links)[0], BigQueryConsoleIndexableLink)
ti = TaskInstance(task=simple_task, execution_date=DEFAULT_DATE)
job_id = ['123', '45']
ti.xcom_push(key='job_id', value=job_id)
self.assertEqual(
{'BigQuery Console #1', 'BigQuery Console #2'}, simple_task.operator_extra_link_dict.keys()
)
self.assertEqual(
'https://console.cloud.google.com/bigquery?j=123',
simple_task.get_extra_links(DEFAULT_DATE, 'BigQuery Console #1'),
)
self.assertEqual(
'https://console.cloud.google.com/bigquery?j=45',
simple_task.get_extra_links(DEFAULT_DATE, 'BigQuery Console #2'),
)
@provide_session
@mock.patch('airflow.providers.google.cloud.operators.bigquery.BigQueryHook')
def test_bigquery_operator_extra_link_when_missing_job_id(self, mock_hook, session):
bigquery_task = BigQueryExecuteQueryOperator(
task_id=TASK_ID,
sql='SELECT * FROM test_table',
dag=self.dag,
)
self.dag.clear()
session.query(XCom).delete()
self.assertEqual(
'',
bigquery_task.get_extra_links(DEFAULT_DATE, BigQueryConsoleLink.name),
)
@provide_session
@mock.patch('airflow.providers.google.cloud.operators.bigquery.BigQueryHook')
def test_bigquery_operator_extra_link_when_single_query(self, mock_hook, session):
bigquery_task = BigQueryExecuteQueryOperator(
task_id=TASK_ID,
sql='SELECT * FROM test_table',
dag=self.dag,
)
self.dag.clear()
session.query(XCom).delete()
ti = TaskInstance(
task=bigquery_task,
execution_date=DEFAULT_DATE,
)
job_id = '12345'
ti.xcom_push(key='job_id', value=job_id)
self.assertEqual(
f'https://console.cloud.google.com/bigquery?j={job_id}',
bigquery_task.get_extra_links(DEFAULT_DATE, BigQueryConsoleLink.name),
)
self.assertEqual(
'',
bigquery_task.get_extra_links(datetime(2019, 1, 1), BigQueryConsoleLink.name),
)
@provide_session
@mock.patch('airflow.providers.google.cloud.operators.bigquery.BigQueryHook')
def test_bigquery_operator_extra_link_when_multiple_query(self, mock_hook, session):
bigquery_task = BigQueryExecuteQueryOperator(
task_id=TASK_ID,
sql=['SELECT * FROM test_table', 'SELECT * FROM test_table2'],
dag=self.dag,
)
self.dag.clear()
session.query(XCom).delete()
ti = TaskInstance(
task=bigquery_task,
execution_date=DEFAULT_DATE,
)
job_id = ['123', '45']
ti.xcom_push(key='job_id', value=job_id)
self.assertEqual(
{'BigQuery Console #1', 'BigQuery Console #2'}, bigquery_task.operator_extra_link_dict.keys()
)
self.assertEqual(
'https://console.cloud.google.com/bigquery?j=123',
bigquery_task.get_extra_links(DEFAULT_DATE, 'BigQuery Console #1'),
)
self.assertEqual(
'https://console.cloud.google.com/bigquery?j=45',
bigquery_task.get_extra_links(DEFAULT_DATE, 'BigQuery Console #2'),
)
class TestBigQueryGetDataOperator(unittest.TestCase):
@mock.patch('airflow.providers.google.cloud.operators.bigquery.BigQueryHook')
def test_execute(self, mock_hook):
max_results = 100
selected_fields = 'DATE'
operator = BigQueryGetDataOperator(
task_id=TASK_ID,
dataset_id=TEST_DATASET,
table_id=TEST_TABLE_ID,
max_results=max_results,
selected_fields=selected_fields,
location=TEST_DATASET_LOCATION,
)
operator.execute(None)
mock_hook.return_value.list_rows.assert_called_once_with(
dataset_id=TEST_DATASET,
table_id=TEST_TABLE_ID,
max_results=max_results,
selected_fields=selected_fields,
location=TEST_DATASET_LOCATION,
)
class TestBigQueryTableDeleteOperator(unittest.TestCase):
@mock.patch('airflow.providers.google.cloud.operators.bigquery.BigQueryHook')
def test_execute(self, mock_hook):
ignore_if_missing = True
deletion_dataset_table = f'{TEST_DATASET}.{TEST_TABLE_ID}'
operator = BigQueryDeleteTableOperator(
task_id=TASK_ID,
deletion_dataset_table=deletion_dataset_table,
ignore_if_missing=ignore_if_missing,
)
operator.execute(None)
mock_hook.return_value.delete_table.assert_called_once_with(
table_id=deletion_dataset_table, not_found_ok=ignore_if_missing
)
class TestBigQueryGetDatasetTablesOperator(unittest.TestCase):
@mock.patch('airflow.providers.google.cloud.operators.bigquery.BigQueryHook')
def test_execute(self, mock_hook):
operator = BigQueryGetDatasetTablesOperator(
task_id=TASK_ID, dataset_id=TEST_DATASET, project_id=TEST_GCP_PROJECT_ID, max_results=2
)
operator.execute(None)
mock_hook.return_value.get_dataset_tables.assert_called_once_with(
dataset_id=TEST_DATASET,
project_id=TEST_GCP_PROJECT_ID,
max_results=2,
)
class TestBigQueryConnIdDeprecationWarning(unittest.TestCase):
@parameterized.expand(
[
(BigQueryCheckOperator, dict(sql='Select * from test_table', task_id=TASK_ID)),
(
BigQueryValueCheckOperator,
dict(sql='Select * from test_table', pass_value=95, task_id=TASK_ID),
),
(
BigQueryIntervalCheckOperator,
dict(table=TEST_TABLE_ID, metrics_thresholds={'COUNT(*)': 1.5}, task_id=TASK_ID),
),
(BigQueryGetDataOperator, dict(dataset_id=TEST_DATASET, table_id=TEST_TABLE_ID, task_id=TASK_ID)),
(BigQueryExecuteQueryOperator, dict(sql='Select * from test_table', task_id=TASK_ID)),
(BigQueryDeleteDatasetOperator, dict(dataset_id=TEST_DATASET, task_id=TASK_ID)),
(BigQueryCreateEmptyDatasetOperator, dict(dataset_id=TEST_DATASET, task_id=TASK_ID)),
(BigQueryDeleteTableOperator, dict(deletion_dataset_table=TEST_DATASET, task_id=TASK_ID)),
]
)
def test_bigquery_conn_id_deprecation_warning(self, operator_class, kwargs):
bigquery_conn_id = 'google_cloud_default'
with self.assertWarnsRegex(
DeprecationWarning,
"The bigquery_conn_id parameter has been deprecated. You should pass the gcp_conn_id parameter.",
):
operator = operator_class(bigquery_conn_id=bigquery_conn_id, **kwargs)
self.assertEqual(bigquery_conn_id, operator.gcp_conn_id)
class TestBigQueryUpsertTableOperator(unittest.TestCase):
@mock.patch('airflow.providers.google.cloud.operators.bigquery.BigQueryHook')
def test_execute(self, mock_hook):
operator = BigQueryUpsertTableOperator(
task_id=TASK_ID,
dataset_id=TEST_DATASET,
table_resource=TEST_TABLE_RESOURCES,
project_id=TEST_GCP_PROJECT_ID,
)
operator.execute(None)
mock_hook.return_value.run_table_upsert.assert_called_once_with(
dataset_id=TEST_DATASET, project_id=TEST_GCP_PROJECT_ID, table_resource=TEST_TABLE_RESOURCES
)
class TestBigQueryInsertJobOperator:
@mock.patch('airflow.providers.google.cloud.operators.bigquery.hashlib.md5')
@mock.patch('airflow.providers.google.cloud.operators.bigquery.BigQueryHook')
def test_execute_success(self, mock_hook, mock_md5):
job_id = "123456"
hash_ = "hash"
real_job_id = f"{job_id}_{hash_}"
mock_md5.return_value.hexdigest.return_value = hash_
configuration = {
"query": {
"query": "SELECT * FROM any",
"useLegacySql": False,
}
}
mock_hook.return_value.insert_job.return_value = MagicMock(job_id=real_job_id, error_result=False)
op = BigQueryInsertJobOperator(
task_id="insert_query_job",
configuration=configuration,
location=TEST_DATASET_LOCATION,
job_id=job_id,
project_id=TEST_GCP_PROJECT_ID,
)
result = op.execute({})
mock_hook.return_value.insert_job.assert_called_once_with(
configuration=configuration,
location=TEST_DATASET_LOCATION,
job_id=real_job_id,
project_id=TEST_GCP_PROJECT_ID,
)
assert result == real_job_id
@mock.patch('airflow.providers.google.cloud.operators.bigquery.hashlib.md5')
@mock.patch('airflow.providers.google.cloud.operators.bigquery.BigQueryHook')
def test_on_kill(self, mock_hook, mock_md5):
job_id = "123456"
hash_ = "hash"
real_job_id = f"{job_id}_{hash_}"
mock_md5.return_value.hexdigest.return_value = hash_
configuration = {
"query": {
"query": "SELECT * FROM any",
"useLegacySql": False,
}
}
mock_hook.return_value.insert_job.return_value = MagicMock(job_id=real_job_id, error_result=False)
op = BigQueryInsertJobOperator(
task_id="insert_query_job",
configuration=configuration,
location=TEST_DATASET_LOCATION,
job_id=job_id,
project_id=TEST_GCP_PROJECT_ID,
cancel_on_kill=False,
)
op.execute({})
op.on_kill()
mock_hook.return_value.cancel_job.assert_not_called()
op.cancel_on_kill = True
op.on_kill()
mock_hook.return_value.cancel_job.assert_called_once_with(
job_id=real_job_id,
location=TEST_DATASET_LOCATION,
project_id=TEST_GCP_PROJECT_ID,
)
@mock.patch('airflow.providers.google.cloud.operators.bigquery.hashlib.md5')
@mock.patch('airflow.providers.google.cloud.operators.bigquery.BigQueryHook')
def test_execute_failure(self, mock_hook, mock_md5):
job_id = "123456"
hash_ = "hash"
real_job_id = f"{job_id}_{hash_}"
mock_md5.return_value.hexdigest.return_value = hash_
configuration = {
"query": {
"query": "SELECT * FROM any",
"useLegacySql": False,
}
}
mock_hook.return_value.insert_job.return_value = MagicMock(job_id=real_job_id, error_result=True)
op = BigQueryInsertJobOperator(
task_id="insert_query_job",
configuration=configuration,
location=TEST_DATASET_LOCATION,
job_id=job_id,
project_id=TEST_GCP_PROJECT_ID,
)
with pytest.raises(AirflowException):
op.execute({})
@mock.patch('airflow.providers.google.cloud.operators.bigquery.hashlib.md5')
@mock.patch('airflow.providers.google.cloud.operators.bigquery.BigQueryHook')
def test_execute_reattach(self, mock_hook, mock_md5):
job_id = "123456"
hash_ = "hash"
real_job_id = f"{job_id}_{hash_}"
mock_md5.return_value.hexdigest.return_value = hash_
configuration = {
"query": {
"query": "SELECT * FROM any",
"useLegacySql": False,
}
}
mock_hook.return_value.insert_job.return_value.result.side_effect = Conflict("any")
job = MagicMock(
job_id=real_job_id,
error_result=False,
state="PENDING",
done=lambda: False,
)
mock_hook.return_value.get_job.return_value = job
op = BigQueryInsertJobOperator(
task_id="insert_query_job",
configuration=configuration,
location=TEST_DATASET_LOCATION,
job_id=job_id,
project_id=TEST_GCP_PROJECT_ID,
reattach_states={"PENDING"},
)
result = op.execute({})
mock_hook.return_value.get_job.assert_called_once_with(
location=TEST_DATASET_LOCATION,
job_id=real_job_id,
project_id=TEST_GCP_PROJECT_ID,
)
job.result.assert_called_once_with()
assert result == real_job_id
@mock.patch('airflow.providers.google.cloud.operators.bigquery.hashlib.md5')
@mock.patch('airflow.providers.google.cloud.operators.bigquery.uuid')
@mock.patch('airflow.providers.google.cloud.operators.bigquery.BigQueryHook')
def test_execute_force_rerun(self, mock_hook, mock_uuid, mock_md5):
job_id = "123456"
hash_ = mock_uuid.uuid4.return_value.encode.return_value
real_job_id = f"{job_id}_{hash_}"
mock_md5.return_value.hexdigest.return_value = hash_
configuration = {
"query": {
"query": "SELECT * FROM any",
"useLegacySql": False,
}
}
job = MagicMock(
job_id=real_job_id,
error_result=False,
)
mock_hook.return_value.insert_job.return_value = job
op = BigQueryInsertJobOperator(
task_id="insert_query_job",
configuration=configuration,
location=TEST_DATASET_LOCATION,
job_id=job_id,
project_id=TEST_GCP_PROJECT_ID,
force_rerun=True,
)
result = op.execute({})
mock_hook.return_value.insert_job.assert_called_once_with(
configuration=configuration,
location=TEST_DATASET_LOCATION,
job_id=real_job_id,
project_id=TEST_GCP_PROJECT_ID,
)
assert result == real_job_id
@mock.patch('airflow.providers.google.cloud.operators.bigquery.hashlib.md5')
@mock.patch('airflow.providers.google.cloud.operators.bigquery.BigQueryHook')
def test_execute_no_force_rerun(self, mock_hook, mock_md5):
job_id = "123456"
hash_ = "hash"
real_job_id = f"{job_id}_{hash_}"
mock_md5.return_value.hexdigest.return_value = hash_
configuration = {
"query": {
"query": "SELECT * FROM any",
"useLegacySql": False,
}
}
mock_hook.return_value.insert_job.return_value.result.side_effect = Conflict("any")
job = MagicMock(
job_id=real_job_id,
error_result=False,
state="DONE",
done=lambda: True,
)
mock_hook.return_value.get_job.return_value = job
op = BigQueryInsertJobOperator(
task_id="insert_query_job",
configuration=configuration,
location=TEST_DATASET_LOCATION,
job_id=job_id,
project_id=TEST_GCP_PROJECT_ID,
reattach_states={"PENDING"},
)
# No force rerun
with pytest.raises(AirflowException):
op.execute({})
@mock.patch('airflow.providers.google.cloud.operators.bigquery.hashlib.md5')
@pytest.mark.parametrize(
"test_dag_id, expected_job_id",
[("test-dag-id-1.1", "airflow_test_dag_id_1_1_test_job_id_2020_01_23T00_00_00_hash")],
)
def test_job_id_validity(self, mock_md5, test_dag_id, expected_job_id):
hash_ = "hash"
mock_md5.return_value.hexdigest.return_value = hash_
context = {"execution_date": datetime(2020, 1, 23)}
configuration = {
"query": {
"query": "SELECT * FROM any",
"useLegacySql": False,
}
}
with DAG(dag_id=test_dag_id, start_date=datetime(2020, 1, 23)):
op = BigQueryInsertJobOperator(
task_id="test_job_id", configuration=configuration, project_id=TEST_GCP_PROJECT_ID
)
assert op._job_id(context) == expected_job_id
| apache-2.0 | 4,811,003,768,862,688,000 | 36.388379 | 110 | 0.598479 | false |
beni55/sentry | src/sentry/migrations/0100_auto__add_field_tagkey_label.py | 3 | 26393 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'TagKey.label'
db.add_column('sentry_filterkey', 'label',
self.gf('django.db.models.fields.CharField')(max_length=64, null=True),
keep_default=False)
def backwards(self, orm):
# Deleting field 'TagKey.label'
db.delete_column('sentry_filterkey', 'label')
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'sentry.accessgroup': {
'Meta': {'unique_together': "(('team', 'name'),)", 'object_name': 'AccessGroup'},
'data': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'managed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'members': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.User']", 'symmetrical': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'projects': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['sentry.Project']", 'symmetrical': 'False'}),
'team': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['sentry.Team']"}),
'type': ('django.db.models.fields.IntegerField', [], {'default': '50'})
},
u'sentry.activity': {
'Meta': {'object_name': 'Activity'},
'data': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'event': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['sentry.Event']", 'null': 'True'}),
'group': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['sentry.Group']", 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ident': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['sentry.Project']"}),
'type': ('django.db.models.fields.PositiveIntegerField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'null': 'True'})
},
u'sentry.alert': {
'Meta': {'object_name': 'Alert'},
'data': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'group': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['sentry.Group']", 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'message': ('django.db.models.fields.TextField', [], {}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['sentry.Project']"}),
'related_groups': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'related_alerts'", 'symmetrical': 'False', 'through': u"orm['sentry.AlertRelatedGroup']", 'to': u"orm['sentry.Group']"}),
'status': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'db_index': 'True'})
},
u'sentry.alertrelatedgroup': {
'Meta': {'unique_together': "(('group', 'alert'),)", 'object_name': 'AlertRelatedGroup'},
'alert': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['sentry.Alert']"}),
'data': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'group': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['sentry.Group']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
u'sentry.event': {
'Meta': {'unique_together': "(('project', 'event_id'),)", 'object_name': 'Event', 'db_table': "'sentry_message'"},
'checksum': ('django.db.models.fields.CharField', [], {'max_length': '32', 'db_index': 'True'}),
'culprit': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'db_column': "'view'", 'blank': 'True'}),
'data': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'event_id': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True', 'db_column': "'message_id'"}),
'group': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'event_set'", 'null': 'True', 'to': u"orm['sentry.Group']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'default': '40', 'db_index': 'True', 'blank': 'True'}),
'logger': ('django.db.models.fields.CharField', [], {'default': "'root'", 'max_length': '64', 'db_index': 'True', 'blank': 'True'}),
'message': ('django.db.models.fields.TextField', [], {}),
'num_comments': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'null': 'True'}),
'platform': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['sentry.Project']", 'null': 'True'}),
'server_name': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'db_index': 'True'}),
'site': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'db_index': 'True'}),
'time_spent': ('django.db.models.fields.FloatField', [], {'null': 'True'})
},
u'sentry.eventmapping': {
'Meta': {'unique_together': "(('project', 'event_id'),)", 'object_name': 'EventMapping'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'event_id': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'group': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['sentry.Group']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['sentry.Project']"})
},
u'sentry.group': {
'Meta': {'unique_together': "(('project', 'checksum'),)", 'object_name': 'Group', 'db_table': "'sentry_groupedmessage'"},
'active_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'db_index': 'True'}),
'checksum': ('django.db.models.fields.CharField', [], {'max_length': '32', 'db_index': 'True'}),
'culprit': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'db_column': "'view'", 'blank': 'True'}),
'data': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'first_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_public': ('django.db.models.fields.NullBooleanField', [], {'default': 'False', 'null': 'True', 'blank': 'True'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'default': '40', 'db_index': 'True', 'blank': 'True'}),
'logger': ('django.db.models.fields.CharField', [], {'default': "'root'", 'max_length': '64', 'db_index': 'True', 'blank': 'True'}),
'message': ('django.db.models.fields.TextField', [], {}),
'num_comments': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'null': 'True'}),
'platform': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['sentry.Project']", 'null': 'True'}),
'resolved_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'db_index': 'True'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'status': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'db_index': 'True'}),
'time_spent_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'time_spent_total': ('django.db.models.fields.FloatField', [], {'default': '0'}),
'times_seen': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1', 'db_index': 'True'})
},
u'sentry.groupbookmark': {
'Meta': {'unique_together': "(('project', 'user', 'group'),)", 'object_name': 'GroupBookmark'},
'group': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'bookmark_set'", 'to': u"orm['sentry.Group']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'bookmark_set'", 'to': u"orm['sentry.Project']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'sentry_bookmark_set'", 'to': u"orm['auth.User']"})
},
u'sentry.groupcountbyminute': {
'Meta': {'unique_together': "(('project', 'group', 'date'),)", 'object_name': 'GroupCountByMinute', 'db_table': "'sentry_messagecountbyminute'"},
'date': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True'}),
'group': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['sentry.Group']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['sentry.Project']", 'null': 'True'}),
'time_spent_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'time_spent_total': ('django.db.models.fields.FloatField', [], {'default': '0'}),
'times_seen': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'})
},
u'sentry.groupmeta': {
'Meta': {'unique_together': "(('group', 'key'),)", 'object_name': 'GroupMeta'},
'group': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['sentry.Group']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'value': ('django.db.models.fields.TextField', [], {})
},
u'sentry.grouptag': {
'Meta': {'unique_together': "(('project', 'key', 'value', 'group'),)", 'object_name': 'GroupTag', 'db_table': "'sentry_messagefiltervalue'"},
'first_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'db_index': 'True'}),
'group': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['sentry.Group']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'db_index': 'True'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['sentry.Project']", 'null': 'True'}),
'times_seen': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
u'sentry.grouptagkey': {
'Meta': {'unique_together': "(('project', 'group', 'key'),)", 'object_name': 'GroupTagKey'},
'group': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['sentry.Group']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['sentry.Project']", 'null': 'True'}),
'values_seen': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'})
},
u'sentry.lostpasswordhash': {
'Meta': {'object_name': 'LostPasswordHash'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'hash': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'unique': 'True'})
},
u'sentry.option': {
'Meta': {'object_name': 'Option'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '64'}),
'value': ('picklefield.fields.PickledObjectField', [], {})
},
u'sentry.pendingteammember': {
'Meta': {'unique_together': "(('team', 'email'),)", 'object_name': 'PendingTeamMember'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'team': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'pending_member_set'", 'to': u"orm['sentry.Team']"}),
'type': ('django.db.models.fields.IntegerField', [], {'default': '50'})
},
u'sentry.project': {
'Meta': {'unique_together': "(('team', 'slug'),)", 'object_name': 'Project'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'sentry_owned_project_set'", 'null': 'True', 'to': u"orm['auth.User']"}),
'platform': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True'}),
'public': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50', 'null': 'True'}),
'status': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'db_index': 'True'}),
'team': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['sentry.Team']", 'null': 'True'})
},
u'sentry.projectcountbyminute': {
'Meta': {'unique_together': "(('project', 'date'),)", 'object_name': 'ProjectCountByMinute'},
'date': ('django.db.models.fields.DateTimeField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['sentry.Project']", 'null': 'True'}),
'time_spent_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'time_spent_total': ('django.db.models.fields.FloatField', [], {'default': '0'}),
'times_seen': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'})
},
u'sentry.projectkey': {
'Meta': {'object_name': 'ProjectKey'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'key_set'", 'to': u"orm['sentry.Project']"}),
'public_key': ('django.db.models.fields.CharField', [], {'max_length': '32', 'unique': 'True', 'null': 'True'}),
'secret_key': ('django.db.models.fields.CharField', [], {'max_length': '32', 'unique': 'True', 'null': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'null': 'True'}),
'user_added': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'keys_added_set'", 'null': 'True', 'to': u"orm['auth.User']"})
},
u'sentry.projectoption': {
'Meta': {'unique_together': "(('project', 'key'),)", 'object_name': 'ProjectOption', 'db_table': "'sentry_projectoptions'"},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['sentry.Project']"}),
'value': ('picklefield.fields.PickledObjectField', [], {})
},
u'sentry.searchdocument': {
'Meta': {'unique_together': "(('project', 'group'),)", 'object_name': 'SearchDocument'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'date_changed': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'group': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['sentry.Group']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['sentry.Project']"}),
'status': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'total_events': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'})
},
u'sentry.searchtoken': {
'Meta': {'unique_together': "(('document', 'field', 'token'),)", 'object_name': 'SearchToken'},
'document': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'token_set'", 'to': u"orm['sentry.SearchDocument']"}),
'field': ('django.db.models.fields.CharField', [], {'default': "'text'", 'max_length': '64'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'times_seen': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}),
'token': ('django.db.models.fields.CharField', [], {'max_length': '128'})
},
u'sentry.tagkey': {
'Meta': {'unique_together': "(('project', 'key'),)", 'object_name': 'TagKey', 'db_table': "'sentry_filterkey'"},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['sentry.Project']"}),
'values_seen': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'})
},
u'sentry.tagvalue': {
'Meta': {'unique_together': "(('project', 'key', 'value'),)", 'object_name': 'TagValue', 'db_table': "'sentry_filtervalue'"},
'data': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'first_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'db_index': 'True'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['sentry.Project']", 'null': 'True'}),
'times_seen': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
u'sentry.team': {
'Meta': {'object_name': 'Team'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'members': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'team_memberships'", 'symmetrical': 'False', 'through': u"orm['sentry.TeamMember']", 'to': u"orm['auth.User']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50'})
},
u'sentry.teammember': {
'Meta': {'unique_together': "(('team', 'user'),)", 'object_name': 'TeamMember'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'team': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'member_set'", 'to': u"orm['sentry.Team']"}),
'type': ('django.db.models.fields.IntegerField', [], {'default': '50'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'sentry_teammember_set'", 'to': u"orm['auth.User']"})
},
u'sentry.useroption': {
'Meta': {'unique_together': "(('user', 'project', 'key'),)", 'object_name': 'UserOption'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['sentry.Project']", 'null': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"}),
'value': ('picklefield.fields.PickledObjectField', [], {})
}
}
complete_apps = ['sentry'] | bsd-3-clause | 4,254,691,550,191,186,000 | 82.26183 | 225 | 0.548441 | false |
cevaris/pants | src/python/pants/ivy/bootstrapper.py | 12 | 7022 | # coding=utf-8
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
import hashlib
import logging
import os
import shutil
from pants.base.build_environment import get_buildroot
from pants.ivy.ivy import Ivy
from pants.ivy.ivy_subsystem import IvySubsystem
from pants.net.http.fetcher import Fetcher
from pants.util.contextutil import temporary_file
from pants.util.dirutil import safe_delete, touch
logger = logging.getLogger(__name__)
class Bootstrapper(object):
"""Bootstraps a working ivy resolver.
By default a working resolver will be bootstrapped from maven central and it will use standard
public jar repositories and a standard ivy local cache directory to execute resolve operations.
By default ivy will be bootstrapped from a stable ivy jar version found in maven central, but
this can be over-ridden with the ``--ivy-bootstrap-jar-url`` option. Additionally the
bootstrapping will use a connect/read timeout of 1 second by default, but this can be raised by
specifying a ``--ivy-bootstrap-fetch-timeout-secs`` option.
After bootstrapping, ivy will re-resolve itself. By default it does this via maven central, but
a custom ivy tool classpath can be specified by using the ``--ivy-ivy-profile`` option to point to
a custom ivy profile ivy.xml. This can be useful to upgrade ivy to a version released after pants
or else mix in auxiliary jars that provide ivy plugins.
Finally, by default the ivysettings.xml embedded in the ivy jar will be used in conjunction with
the default ivy local cache directory of ~/.ivy2/cache. To specify custom values for these you
can either provide ``--ivy-ivy-settings`` and ``--ivy-cache-dir`` options.
"""
class Error(Exception):
"""Indicates an error bootstrapping an ivy classpath."""
_INSTANCE = None
@classmethod
def default_ivy(cls, bootstrap_workunit_factory=None):
"""Returns an Ivy instance using the default global bootstrapper.
By default runs ivy via a subprocess java executor. Callers of execute() on the returned
Ivy instance can provide their own executor.
:param bootstrap_workunit_factory: the optional workunit to bootstrap under.
:returns: an Ivy instance.
:raises: Bootstrapper.Error if the default ivy instance could not be bootstrapped
"""
return cls.instance().ivy(bootstrap_workunit_factory=bootstrap_workunit_factory)
def __init__(self, ivy_subsystem=None):
"""Creates an ivy bootstrapper."""
self._ivy_subsystem = ivy_subsystem or IvySubsystem.global_instance()
self._version_or_ivyxml = self._ivy_subsystem.get_options().ivy_profile
self._classpath = None
@classmethod
def instance(cls):
""":returns: the default global ivy bootstrapper.
:rtype: Bootstrapper
"""
if cls._INSTANCE is None:
cls._INSTANCE = Bootstrapper()
return cls._INSTANCE
@classmethod
def reset_instance(cls):
cls._INSTANCE = None
def ivy(self, bootstrap_workunit_factory=None):
"""Returns an ivy instance bootstrapped by this bootstrapper.
:param bootstrap_workunit_factory: the optional workunit to bootstrap under.
:raises: Bootstrapper.Error if ivy could not be bootstrapped
"""
return Ivy(self._get_classpath(bootstrap_workunit_factory),
ivy_settings=self._ivy_subsystem.get_options().ivy_settings,
ivy_cache_dir=self._ivy_subsystem.get_options().cache_dir,
extra_jvm_options=self._ivy_subsystem.extra_jvm_options())
def _get_classpath(self, workunit_factory):
"""Returns the bootstrapped ivy classpath as a list of jar paths.
:raises: Bootstrapper.Error if the classpath could not be bootstrapped
"""
if not self._classpath:
self._classpath = self._bootstrap_ivy_classpath(workunit_factory)
return self._classpath
def _bootstrap_ivy_classpath(self, workunit_factory, retry=True):
# TODO(John Sirois): Extract a ToolCache class to control the path structure:
# https://jira.twitter.biz/browse/DPB-283
ivy_bootstrap_dir = os.path.join(self._ivy_subsystem.get_options().pants_bootstrapdir,
'tools', 'jvm', 'ivy')
digest = hashlib.sha1()
if os.path.isfile(self._version_or_ivyxml):
with open(self._version_or_ivyxml) as fp:
digest.update(fp.read())
else:
digest.update(self._version_or_ivyxml)
classpath = os.path.join(ivy_bootstrap_dir, '{}.classpath'.format(digest.hexdigest()))
if not os.path.exists(classpath):
ivy = self._bootstrap_ivy(os.path.join(ivy_bootstrap_dir, 'bootstrap.jar'))
args = ['-confs', 'default', '-cachepath', classpath]
if os.path.isfile(self._version_or_ivyxml):
args.extend(['-ivy', self._version_or_ivyxml])
else:
args.extend(['-dependency', 'org.apache.ivy', 'ivy', self._version_or_ivyxml])
try:
ivy.execute(args=args, workunit_factory=workunit_factory, workunit_name='ivy-bootstrap')
except ivy.Error as e:
safe_delete(classpath)
raise self.Error('Failed to bootstrap an ivy classpath! {}'.format(e))
with open(classpath) as fp:
cp = fp.read().strip().split(os.pathsep)
if not all(map(os.path.exists, cp)):
safe_delete(classpath)
if retry:
return self._bootstrap_ivy_classpath(workunit_factory, retry=False)
raise self.Error('Ivy bootstrapping failed - invalid classpath: {}'.format(':'.join(cp)))
return cp
def _bootstrap_ivy(self, bootstrap_jar_path):
options = self._ivy_subsystem.get_options()
if not os.path.exists(bootstrap_jar_path):
with temporary_file() as bootstrap_jar:
fetcher = Fetcher(get_buildroot())
checksummer = fetcher.ChecksumListener(digest=hashlib.sha1())
try:
logger.info('\nDownloading {}'.format(options.bootstrap_jar_url))
# TODO: Capture the stdout of the fetcher, instead of letting it output
# to the console directly.
fetcher.download(options.bootstrap_jar_url,
listener=fetcher.ProgressListener().wrap(checksummer),
path_or_fd=bootstrap_jar,
timeout_secs=options.bootstrap_fetch_timeout_secs)
logger.info('sha1: {}'.format(checksummer.checksum))
bootstrap_jar.close()
touch(bootstrap_jar_path)
shutil.move(bootstrap_jar.name, bootstrap_jar_path)
except fetcher.Error as e:
raise self.Error('Problem fetching the ivy bootstrap jar! {}'.format(e))
return Ivy(bootstrap_jar_path,
ivy_settings=options.bootstrap_ivy_settings or options.ivy_settings,
ivy_cache_dir=options.cache_dir,
extra_jvm_options=self._ivy_subsystem.extra_jvm_options())
| apache-2.0 | -6,217,470,393,869,496,000 | 42.079755 | 100 | 0.691114 | false |
davebshow/ipython-gremlin | gremlin/utils.py | 1 | 1441 | """Utility functions to support GremlinMagic operations"""
import asyncio
import json
import re
from gremlin_python.driver import request
from gremlin import registry, resultset
def parse(connection_str):
"""Parse connection string passed by user"""
if ' as ' in connection_str.lower():
descriptors = re.split(' as ', connection_str, flags=re.IGNORECASE)
else:
descriptors = (connection_str, None)
return descriptors
def _sanitize_namespace(user_ns):
bindings = {}
for k, v in user_ns.items():
try:
json.dumps(v)
except:
pass
else:
bindings[k] = v
return bindings
def submit(gremlin, user_ns, aliases, conn):
"""
Submit a script to the Gremlin Server using the IPython namespace
using the IPython namespace to pass bindings using Magics configuration
and a connection registered with
:py:class:`ConnectionRegistry<gremlin.registry.ConnectionRegistry>`
"""
bindings = _sanitize_namespace(user_ns)
message = request.RequestMessage(
processor='', op='eval',
args={'gremlin': gremlin, 'aliases': aliases, 'bindings': bindings})
return asyncio.run_coroutine_threadsafe(_submit(conn, message), registry.LOOP).result()
async def _submit(conn, message):
result_set = await conn.write(message)
results = await result_set.all()
return resultset.ResultSet(results, message)
| mit | 4,764,980,737,259,181,000 | 29.020833 | 91 | 0.675226 | false |
rdegraw/numbers-py | change_machine.py | 1 | 1060 | #--------------------------------------------------
#
# Change Machine
# - user enters cost and amount give
# - we give the proper change back
#
#--------------------------------------------------
import math
dollar = 1
quarter = .25
dime = .10
nickle = .05
penny = .01
cost = float( raw_input( "How much does it cost? ") )
paid = float( raw_input( "How much did you pay? ") )
difference = paid - cost
dollar_change = math.floor(difference)
difference -= dollar_change
quarter_change = math.floor( difference / quarter )
difference = difference % quarter
dime_change = math.floor( difference / dime )
difference = difference % dime
nickle_change = math.floor( difference / nickle )
difference = difference % nickle
penny_change = difference * 100
if dollar_change > 0:
print int(dollar_change), " dollars"
if quarter_change > 0:
print int(quarter_change), " quarters"
if dime_change > 0:
print int(dime_change), " dimes"
if nickle_change > 0:
print int(nickle_change), " nickles"
if penny_change > 0:
print int(penny_change), " pennies"
| unlicense | -3,052,821,139,406,254,000 | 21.083333 | 53 | 0.627358 | false |
masashi-y/myccg | src/py/cat.py | 1 | 10714 |
from py.py_utils import find_closing_bracket, \
find_non_nested_char, drop_brackets
import re
reg_non_punct = re.compile(r"[A-Za-z]+")
WILDCARD = "X"
bracket_and_quote_cat = ["LRB", "RRB", "LQU", "RQU"]
num_cats = 0
cache = {}
class Slash(object):
FwdApp = 0
BwdApp = 1
EitherApp = 2
def __init__(self, string):
if string == "/":
self._slash = Slash.FwdApp
elif string == "\\":
self._slash = Slash.BwdApp
elif string == "|":
self._slash = Slash.EitherApp
else:
raise RuntimeError("Invalid slash: " + string)
def __str__(self):
return "/\\|"[self._slash]
def __eq__(self, other):
if isinstance(other, Slash):
return self._slash == other._slash
elif isinstance(other, int):
return self._slash == other
else:
return False
@staticmethod
def Fwd():
return Slash("/")
@staticmethod
def Bwd():
return Slash("\\")
@staticmethod
def Either():
return Slash("|")
def matches(self, other):
return self._slash == Slash.EitherApp or \
self._slash == other._slash
class Cat(object):
def __init__(self, string, semantics):
self.string = string + ("" if semantics == None \
else "{{{0}}}".format(semantics))
global num_cats
self.id = num_cats
num_cats += 1
def __hash__(self):
return self.id
def __eq__(self, other):
return self.id == other.id
def __str__(self):
return self.string
def __repr__(self):
return self.string
def hashcode(self):
return self.id
def substitute(self, sub):
if sub is None:
return self
return Cat.parse(self.string.replace(WILDCARD, sub))
def has_functor_at_left(self, order):
return self.left.has_functor_at_left(order-1) \
if self.is_functor else False
def get_left(self, order):
if order == 0: return self
return self.left.get_left(order-1)
class Functor(Cat):
def __init__(self, left, slash, right, semantics):
base = left.with_brackets + str(slash) + right.with_brackets
super(Functor, self).__init__(
base if semantics is None else "({})".format(base),
semantics)
self.left = left
self.slash = slash
self.right = right
self.semantics = semantics
@property
def without_feat(self):
return self.left.without_feat + \
str(self.slash) + self.right.without_feat
@property
def without_semantics(self):
base = self.left.without_semantics + \
str(self.slash) + self.right.without_semantics
return base if self.semantics is None else "({})".format(base)
@property
def with_brackets(self):
return "({})".format(self.string)
@property
def is_modifier(self):
return self.left == self.right
@property
def is_modifier_without_feat(self):
return self.left.without_feat == self.right.without_feat
@property
def is_type_raised(self):
"""
X|(X|Y)
"""
return self.right.is_functor and \
self.right.left == self.left
@property
def is_type_raised_without_feat(self):
return self.right.is_functor and \
self.right.left.without_feat == self.left.without_feat
@property
def is_forward_type_raised(self):
"""
X/(X\Y)
"""
return self.is_type_raised and \
self.slash == Slash_E.FwdApp
@property
def is_backward_type_raised(self):
"""
X\(X/Y)
"""
return self.is_type_raised and \
self.slash == Slash_E.BwdApp
@property
def is_functor(self):
return True
@property
def is_punct(self):
return False
@property
def is_N_or_NP(self):
return False
@property
def n_args(self):
return 1 + self.left.n_args
@property
def feat(self):
raise NotImplementedError()
@property
def type(self):
raise NotImplementedError()
def get_substitution(self, other):
res = self.right.get_substitution(other.right)
if res is None:
res = self.left.get_substitution(other.left)
return res
def matches(self, other):
return other.is_functor and \
self.left.matches(other.left) and \
self.right.matches(other.right) and \
self.slash.matches(other.slash)
def replace_arg(self, argn, new_cat):
if argn == self.n_args:
return Cat.make(self.left, self.slash, new_cat)
else:
return Cat.make(
self.left.replace_arg(argn, new_cat), self.slash, self.right)
def arg(self, argn):
if argn == self.n_args:
return self.right
else:
return self.left.arg(argn)
@property
def head_cat(self):
return self.left.head_cat
def is_function_into(self, cat):
return cat.matches(self) or \
self.left.is_function_into(cat)
def is_function_into_modifier(self):
return self.is_modifier or \
self.left.is_modifier
def drop_PP_and_PR_feat(self):
return Cat.make(self.left.drop_PP_and_PR_feat(),
self.slash,
self.right.drop_PP_and_PR_feat())
class Atomic(Cat):
def __init__(self, base, feat, semantics):
super(Atomic, self).__init__(
base + ("" if feat is None else "[{}]".format(feat)),
semantics)
self.type = base
self.feat = feat
self.semantics = semantics
@property
def without_feat(self):
return self.type
@property
def without_semantics(self):
return self.type + ("" if self.feat is None else "[{}]".format(self.feat))
@property
def with_brackets(self):
return self.string
@property
def is_modifier(self):
return False
@property
def is_modifier_without_feat(self):
return False
@property
def is_type_raised(self):
return False
@property
def is_type_raised_without_feat(self):
return False
@property
def is_forward_type_raised(self):
return False
@property
def is_backward_type_raised(self):
return False
@property
def is_functor(self):
return False
@property
def is_punct(self):
return not reg_non_punct.match(self.type) or \
self.type in bracket_and_quote_cat
@property
def is_N_or_NP(self):
return self.type == "N" or self.type == "NP"
@property
def n_args(self):
return 0
def get_substitution(self, other):
if self.feat == WILDCARD:
return other.feat
elif other.feat == WILDCARD:
return self.feat
return None
def matches(self, other):
return not other.is_functor and \
self.type == other.type and \
(self.feat == None or \
self.feat == other.feat or \
WILDCARD == self.feat or \
WILDCARD == other.feat or \
self.feat == "nb")
def replace_arg(self, argn, new_cat):
if argn == 0: return new_cat
raise RuntimeError("Error replacing argument of category")
def arg(self, argn):
if argn == 0: return self
raise RuntimeError("Error getting argument of category")
@property
def head_cat(self):
return self
def is_function_into(self, cat):
return cat.matches(self)
def is_function_into_modifier(self):
return False
def add_feat(self, new_feat):
if self.feat is not None:
raise RuntimeError("Only one feat is allowed")
new_feat = new_feat.replace("/", "")
new_feat = new_feat.replace("\\", "")
return parse("{}[{}]".format(self.type, new_feat))
def drop_PP_and_PR_feat(self):
if self.type == "PP" or self.type == "PR":
return parse(self.type)
else:
return self
def parse(cat):
global cache
if cat in cache:
return cache[cat]
else:
name = drop_brackets(cat)
if name in cache:
res = cache[name]
else:
res = parse_uncached(name)
if name != cat:
cache[name] = res
cache[cat] = res
return res
def parse_uncached(cat):
new_cat = cat
if new_cat.endswith("}"):
open_idx = new_cat.rfind("{")
semantics = new_cat[open_idx + 1:-1]
new_cat = new_cat[0:open_idx]
else:
semantics = None
new_cat = drop_brackets(new_cat)
# if new_cat.startswith("("):
# close_idx = find_closing_bracket(new_cat, 0)
#
# if not any(slash in new_cat for slash in "/\\|"):
# new_cat = new_cat[1:close_idx]
# res = parse_uncached(new_cat)
# return res
end_idx = len(new_cat)
op_idx = find_non_nested_char(new_cat, "/\\|")
if op_idx == -1:
# atomic category
feat_idx = new_cat.find("[")
feats = []
base = new_cat if feat_idx == -1 else new_cat[0:feat_idx]
while feat_idx > -1:
feats.append(new_cat[feat_idx + 1:new_cat.find("]", feat_idx)])
feat_idx = new_cat.find("[", feat_idx + 1)
if len(feats) > 1:
pass
# raise RuntimeError("Can only handle single features: " + cat)
feat = None if len(feats) == 0 else feats[0]
return Atomic(base, feat, semantics)
else:
# functor category
left = parse(new_cat[:op_idx])
slash = Slash(new_cat[op_idx:op_idx + 1])
right = parse(new_cat[op_idx + 1:end_idx])
return Functor(left, slash, right, semantics)
def make(left, op, right):
return parse(left.with_brackets + str(op) + right.with_brackets)
def compose(order, head, slash, tail):
if order == 0:
return make(head, op, tail.right)
target = tail.get_left(order).right
return compose(order-1, make(head, op, target),
tail.get_left(order-1).slash, tail)
COMMA = parse(",")
SEMICOLON = parse(";")
CONJ = parse("conj")
N = parse("N")
LQU = parse("LQU")
LRB = parse("LRB")
NP = parse("NP")
PP = parse("PP")
PREPOSITION = parse("PP/NP")
PR = parse("PR")
| mit | 1,595,455,010,654,495,200 | 24.754808 | 82 | 0.543588 | false |
AMOboxTV/AMOBox.LegoBuild | plugin.video.titan/resources/lib/resolvers/streamcloud.py | 3 | 1630 | # -*- coding: utf-8 -*-
'''
Genesis Add-on
Copyright (C) 2015 lambda
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import re,urllib
from resources.lib.libraries import client
def resolve(url):
try:
headers = '|%s' % urllib.urlencode({'User-Agent': client.agent(), 'Referer': url})
url = re.compile('//.+?/([\w]+)').findall(url)[0]
url = 'http://streamcloud.eu/%s' % url
result = client.request(url)
post = {}
f = client.parseDOM(result, 'form', attrs = {'class': 'proform'})[0]
k = client.parseDOM(f, 'input', ret='name', attrs = {'type': 'hidden'})
for i in k: post.update({i: client.parseDOM(f, 'input', ret='value', attrs = {'name': i})[0]})
post = urllib.urlencode(post)
post = post.replace('op=download1', 'op=download2')
result = client.request(url, post=post)
url = re.compile('file *: *"(http.+?)"').findall(result)[-1]
url += headers
return url
except:
return
| gpl-2.0 | 7,135,748,257,723,958,000 | 30.960784 | 102 | 0.625767 | false |
infosiftr/openxenmanager | oxcSERVER_newvm.py | 2 | 9710 | # -----------------------------------------------------------------------
# OpenXenManager
#
# Copyright (C) 2009 Alberto Gonzalez Rodriguez [email protected]
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of MER-
# CHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# -----------------------------------------------------------------------
import xmlrpclib, urllib
import asyncore, socket
import select
import gtk
from os import chdir
import platform
import sys, shutil
import datetime
from threading import Thread
from configobj import ConfigObj
import xml.dom.minidom
from operator import itemgetter
import pdb
import rrdinfo
import time
import gobject
from messages import messages, messages_header
class oxcSERVERnewvm:
def get_path_available_host(self):
path = 0
i = 0
for host in self.all_hosts.keys():
if self.all_hosts[host]['enabled']:
path = i
i = i + 1
return path
def first_network(self):
for network in self.all_network:
return self.all_network[network]['name_label'].replace('Pool-wide network associated with eth','Network ')
def first_network_ref(self):
for network in self.all_network:
return network
def fill_listnewvmstorage(self, list, vm, host, ref):
list.clear()
if "disks" in self.all_vms[vm]['other_config']:
dom = xml.dom.minidom.parseString(self.all_vms[vm]['other_config']['disks'])
nodes = dom.getElementsByTagName("disk")
for node in nodes:
if self.default_sr == "OpaqueRef:NULL" or self.default_sr not in self.all_storage:
self.default_sr = self.all_storage.keys()[0]
list.append(["%0.2f" % (float(node.attributes.getNamedItem("size").value)/1024/1024/1024),
self.all_storage[self.default_sr]['name_label'] + " on " +
self.all_hosts[host]['name_label'],
str(self.all_storage[self.default_sr]['shared']),ref])
else:
for vbd in self.all_vbd:
if self.all_vbd[vbd]['VM'] == vm:
if self.all_vbd[vbd]["type"] == "Disk":
vdi = self.all_vbd[vbd]["VDI"]
list.append(["%0.2f" % (float(self.all_vdi[vdi]["virtual_size"])/1024/1024/1024),
self.all_storage[self.default_sr]['name_label'] + " on " +
self.all_hosts[host]['name_label'],
str(self.all_storage[self.default_sr]['shared']),ref])
def fill_listnewvmdisk(self, list, host):
list.clear()
i = 0
default_sr = 0
for sr in self.all_storage.keys():
storage = self.all_storage[sr]
if storage['type'] != "iso" and storage['type'] != "udev":
if self.default_sr == sr:
default_sr = i
try:
list.append([storage['name_label'],
storage['name_description'],
self.convert_bytes(storage['physical_size']),
self.convert_bytes(int(storage['physical_size'])-int(storage['virtual_allocation'])), sr])
except:
pass
i = i + 1
return default_sr
def create_newvm(self, data):
res = self.connection.VM.clone(self.session_uuid, data['ref'], data['name'])
if not "Value" in res:
self.wine.show_error_dlg(str(res["ErrorDescription"]))
return
vm_uuid = res['Value']
if data["startvm"]:
self.autostart[vm_uuid] = data['host']
self.connection.VM.set_name_description(self.session_uuid, vm_uuid, data['description'])
other_config = self.all_vms[data['ref']]['other_config']
other_config["default_template"] = "false"
selection = self.wine.builder.get_object("treenewvmstorage").get_selection()
selection.set_mode(gtk.SELECTION_MULTIPLE)
selection.select_all()
model, selected = selection.get_selected_rows()
iters = [model.get_iter(path) for path in selected]
i = 0
disk = "<provision>"
for iter_ref in iters:
size = int(float(self.wine.builder.get_object("listnewvmstorage").get_value(iter_ref, 0))*1024*1024*1024)
sr = self.all_storage[self.wine.builder.get_object("listnewvmstorage").get_value(iter_ref, 3)]["uuid"]
if "postinstall" not in other_config and data["location"] != "radiobutton1":
disk += '<disk device="%d" size="%d" sr="%s" bootable="false" type="system" ionice="0" readonly="False" />' % (i, size, sr)
else:
if i == 0:
disk += '<disk device="%d" size="%d" sr="%s" bootable="true" type="system" ionice="0" readonly="False" />' % (i, size, sr)
else:
disk += '<disk device="%d" size="%d" sr="%s" bootable="false" type="system" ionice="0" readonly="False" />' % (i, size, sr)
i = i + 1
disk += "</provision>"
setdisks = True
for vbd in self.all_vbd:
if self.all_vbd[vbd]['VM'] == data["ref"]:
if self.all_vbd[vbd]["type"] == "Disk":
setdisks = False
if setdisks:
other_config['disks'] = disk
selection.unselect_all()
self.connection.VM.set_affinity(self.session_uuid, data['host'])
if "postinstall" not in other_config:
if data["location"] == "radiobutton1":
other_config["install-repository"] = data['location_url']
else:
other_config["install-repository"] = "cdrom"
from uuid import uuid1 as uuid
other_config["mac_seed"] = str(uuid())
self.connection.VM.set_other_config(self.session_uuid, vm_uuid, other_config)
ref = self.connection.VM.provision(
self.session_uuid, vm_uuid)
self.track_tasks[ref['Value']] = vm_uuid
vif_cfg = {
'uuid': '',
'allowed_operations': [],
'current_operations': {},
'device': '0',
'network': '',
'VM': '',
'MAC': '',
'MTU': '0',
"other_config": {},
'currently_attached': False,
'status_code': "0",
'status_detail': "",
"runtime_properties": {},
"qos_algorithm_type": "",
"qos_algorithm_params": {},
"metrics": "",
'MAC_autogenerated': False
}
vbd_cfg = {
'VM': vm_uuid,
'VDI': data['vdi'],
'userdevice': str(len(iters)+1),
'bootable': True,
'mode': "RO",
'type': "CD",
'unplugabble': "0",
'storage_lock': "0",
'empty': False,
'currently_attached': "0",
'status_code': "0",
'other_config': {},
'qos_algorithm_type': "",
'qos_algorithm_params': {},
}
if data['vdi']:
res = self.connection.VBD.create(self.session_uuid, vbd_cfg)
self.connection.VBD.insert(self.session_uuid, res['Value'], data['vdi'])
selection = self.wine.builder.get_object("treenewvmnetwork").get_selection()
selection.set_mode(gtk.SELECTION_MULTIPLE)
selection.select_all()
model, selected = selection.get_selected_rows()
iters = [model.get_iter(path) for path in selected]
i = 0
memory = int(data['memorymb'])
res = self.connection.VM.set_memory_limits(self.session_uuid, ref, str(16777216), str(int(memory*1024*1024)), str(int(memory*1024*1024)), str(int(memory*1024*1024)))
if "Value" in res:
self.track_tasks[res['Value']] = ref
else:
if res["ErrorDescription"][0] == "MESSAGE_METHOD_UNKNOWN":
self.connection.VM.set_memory_static_min(self.session_uuid, vm_uuid, str(memory*1024*1024))
self.connection.VM.set_memory_dynamic_min(self.session_uuid, vm_uuid, str(memory*1024*1024))
self.connection.VM.set_memory_static_max(self.session_uuid, vm_uuid, str(memory*1024*1024))
self.connection.VM.set_memory_dynamic_max(self.session_uuid, vm_uuid, str(memory*1024*1024))
self.connection.VM.set_VCPUs_max (self.session_uuid, vm_uuid, str(int(data['numberofvcpus'])))
self.connection.VM.set_VCPUs_at_startup(self.session_uuid, vm_uuid, str(int(data['numberofvcpus'])))
self.connection.VM.set_PV_args(self.session_uuid, vm_uuid, data['entrybootparameters'])
for iter_ref in iters:
vif_cfg['device'] = str(i)
vif_cfg['network'] = self.wine.builder.get_object("listnewvmnetworks").get_value(iter_ref, 3)
vif_cfg['VM'] = vm_uuid
self.connection.VIF.create(self.session_uuid, vif_cfg)
i = i +1
| gpl-2.0 | -2,012,403,232,222,493,700 | 43.953704 | 174 | 0.551596 | false |
codedsk/hubcheck | hubcheck/pageobjects/widgets/resources_new_tags_form.py | 1 | 1607 | from hubcheck.pageobjects.widgets.form_base import FormBase
from hubcheck.pageobjects.basepageelement import TextAC
class ResourcesNewTagsForm(FormBase):
def __init__(self, owner, locatordict={}):
super(ResourcesNewTagsForm,self).__init__(owner,locatordict)
# load hub's classes
ResourcesNewTagsForm_Locators = self.load_class('ResourcesNewTagsForm_Locators')
# update this object's locator
self.locators.update(ResourcesNewTagsForm_Locators.locators)
# update the locators with those from the owner
self.update_locators_from_owner()
# setup page object's components
self.tags = TextAC(self,{'base':'tags',
'aclocatorid':'tagsac',
'choicelocatorid':'tagsacchoices',
'tokenlocatorid':'tagsactoken',
'deletelocatorid':'tagsacdelete'})
self.fields = ['tags']
# update the component's locators with this objects overrides
self._updateLocators()
class ResourcesNewTagsForm_Locators_Base(object):
"""locators for ResourcesNewTagsForm object"""
locators = {
'base' : "css=#hubForm",
'tags' : "css=#actags",
'tagsac' : "css=#token-input-actags",
'tagsacchoices' : "css=.token-input-dropdown-act",
'tagsactoken' : "css=.token-input-token-act",
'tagsacdelete' : "css=.token-input-delete-token-act",
'submit' : "css=#hubForm [type='submit']",
}
| mit | -283,961,049,155,576,580 | 38.195122 | 88 | 0.58743 | false |
solintegra/addons | sale_order_allowed_product_template/models/sale_order.py | 17 | 1156 | # -*- coding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in root directory
##############################################################################
from openerp import models, fields, api
class SaleOrder(models.Model):
_inherit = "sale.order"
allowed_templates = fields.Many2many(
comodel_name='product.template', string='Allowed product templates')
@api.one
@api.onchange('only_allowed_products')
def onchange_only_allowed_products(self):
template_obj = self.env['product.template']
self.allowed_templates = template_obj.search([('sale_ok', '=', True)])
if self.only_allowed_products and self.partner_id:
supplierinfos = self.env['product.supplierinfo'].search(
[('type', '=', 'customer'),
('name', 'in', (self.partner_id.commercial_partner_id.id,
self.partner_id.id))])
self.allowed_templates = template_obj.search(
[('id', 'in', [x.product_tmpl_id.id for x in supplierinfos])])
| agpl-3.0 | -88,227,393,594,731,650 | 45.24 | 78 | 0.525952 | false |
dwadler/QGIS | tests/src/python/test_authmanager_ogr.py | 21 | 4558 | # -*- coding: utf-8 -*-
"""
Tests for auth manager Basic Auth OGR connection credentials injection
From build dir, run: ctest -R PyQgsAuthManagerOgr -V
.. note:: This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
"""
from qgis.core import (
QgsApplication,
QgsAuthManager,
QgsAuthMethodConfig,
QgsDataSourceUri,
QgsProviderRegistry,
)
from qgis.testing import (
start_app,
unittest,
)
__author__ = 'Alessandro Pasotti'
__date__ = '14/11/2017'
__copyright__ = 'Copyright 2017, The QGIS Project'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
qgis_app = start_app()
# Note: value is checked with "in" because some drivers may need additional arguments,
# like temporary paths with rootcerts for PG
TEST_URIS = {
"http://mysite.com/geojson authcfg='%s'": "http://username:[email protected]/geojson",
"PG:\"dbname='databasename' host='addr' port='5432' authcfg='%s'\"": "PG:\"dbname='databasename' host='addr' port='5432' user='username' password='password'",
'SDE:127.0.0.1,12345,dbname, authcfg=\'%s\'': 'SDE:127.0.0.1,12345,dbname,username,password',
'IDB:"server=demo_on user=informix dbname=frames authcfg=\'%s\'"': 'IDB:"server=demo_on user=informix dbname=frames user=username pass=password"',
'@driver=ingres,dbname=test,tables=usa/canada authcfg=\'%s\'': '@driver=ingres,dbname=test,tables=usa/canada,userid=username,password=password',
'MySQL:westholland,port=3306,tables=bedrijven authcfg=\'%s\'': 'MySQL:westholland,port=3306,tables=bedrijven,user=username,password=password',
'MSSQL:server=.\MSSQLSERVER2008;database=dbname;trusted_connection=yes authcfg=\'%s\'': 'MSSQL:server=.\MSSQLSERVER2008;database=dbname;uid=username;pwd=password',
'OCI:/@database_instance:table,table authcfg=\'%s\'': 'OCI:username/password@database_instance:table,table',
'ODBC:database_instance authcfg=\'%s\'': 'ODBC:username/password@database_instance',
'couchdb://myconnection authcfg=\'%s\'': 'couchdb://username:password@myconnection',
'http://www.myconnection.com/geojson authcfg=\'%s\'': 'http://username:[email protected]/geojson',
'https://www.myconnection.com/geojson authcfg=\'%s\'': 'https://username:[email protected]/geojson',
'ftp://www.myconnection.com/geojson authcfg=\'%s\'': 'ftp://username:[email protected]/geojson',
'DODS://www.myconnection.com/geojson authcfg=\'%s\'': 'DODS://username:[email protected]/geojson',
}
class TestAuthManager(unittest.TestCase):
@classmethod
def setUpAuth(cls):
"""Run before all tests and set up authentication"""
authm = QgsApplication.authManager()
assert (authm.setMasterPassword('masterpassword', True))
# Client side
cls.auth_config = QgsAuthMethodConfig("Basic")
cls.auth_config.setConfig('username', cls.username)
cls.auth_config.setConfig('password', cls.password)
cls.auth_config.setName('test_basic_auth_config')
assert (authm.storeAuthenticationConfig(cls.auth_config)[0])
assert cls.auth_config.isValid()
cls.authcfg = cls.auth_config.id()
@classmethod
def setUpClass(cls):
"""Run before all tests:
Creates an auth configuration"""
cls.username = 'username'
cls.password = 'password'
cls.dbname = 'test_basic'
cls.hostname = 'localhost'
cls.setUpAuth()
@classmethod
def tearDownClass(cls):
"""Run after all tests"""
pass
def setUp(self):
"""Run before each test."""
pass
def tearDown(self):
"""Run after each test."""
pass
def testConnections(self):
"""
Test credentials injection
"""
pr = QgsProviderRegistry.instance().createProvider('ogr', '')
for uri, expanded in TEST_URIS.items():
pr.setDataSourceUri(uri % self.authcfg)
self.assertTrue(expanded in pr.dataSourceUri(True), "%s != %s" % (expanded, pr.dataSourceUri(True)))
# Test sublayers
for uri, expanded in TEST_URIS.items():
pr.setDataSourceUri((uri + '|sublayer1') % self.authcfg)
self.assertEqual(pr.dataSourceUri(True).split('|')[1], "sublayer1", pr.dataSourceUri(True))
if __name__ == '__main__':
unittest.main()
| gpl-2.0 | 6,350,448,525,569,748,000 | 39.696429 | 167 | 0.672883 | false |
Seldaiendil/meyeOS | devtools/qooxdoo-1.5-sdk/tool/pylib/misc/copytool.py | 1 | 7378 | #! /usr/bin/env python
################################################################################
#
# qooxdoo - the new era of web development
#
# http://qooxdoo.org
#
# Copyright:
# 2006-2010 1&1 Internet AG, Germany, http://www.1und1.de
#
# License:
# LGPL: http://www.gnu.org/licenses/lgpl.html
# EPL: http://www.eclipse.org/org/documents/epl-v10.php
# See the LICENSE file in the project's top-level directory for details.
#
# Authors:
# * Daniel Wagner (d_wagner)
#
################################################################################
import os, sys
import optparse
import shutil
import filecmp
import stat
sys.path.append(os.path.abspath(os.pardir))
from misc.ExtendAction import ExtendAction
class DummyConsole(object):
def debug(self, msg):
pass
def error(self, msg):
print msg
class CopyTool(object):
def __init__(self, console=DummyConsole()):
self.__console = console
def do_work(self):
if not os.path.exists(self.__source):
raise IOError(2, "No such file: '%s'" %self.__source)
if os.path.exists(self.__targetDir):
if os.path.isfile(self.__targetDir):
raise Exception("Expected a directory but '%s' is a file." %self.__targetDir)
if os.path.isfile(self.__source):
self.__copyFileToDir(self.__source, self.__targetDir)
if os.path.isdir(self.__source):
self.__copyDirToDir(self.__source, self.__targetDir)
def __copyFileToDir(self, sourceFile, targetDir):
self.__console.debug("Copying file %s to directory %s." %(sourceFile, targetDir))
sourceFileName = os.path.basename(sourceFile)
if sourceFileName in self.__exclude:
return
if not os.path.isdir(targetDir):
if self.__create:
self.__console.debug("Creating directory %s." %targetDir)
os.makedirs(targetDir)
else:
raise IOError(2, "No such directory: '%s'" %targetDir)
targetPath = os.path.join(targetDir, sourceFileName)
if os.path.exists(targetPath):
if os.path.isfile(targetPath):
if self.__update:
if not self.__isNewerThan(sourceFile, targetPath):
self.__console.debug("Existing file %s is newer than source file %s, ignoring it." %(targetPath, sourceFile))
return
if not os.access(targetPath, os.W_OK):
self.__console.debug("Removing write-protected target File %s prior to copy." %targetPath)
try:
os.remove(targetPath)
except OSError:
try:
os.chmod(targetPath, stat.S_IWUSR)
except Exception, e:
self.__console.error("Unable to overwrite read-only file %s: %s" %(str(e), targetPath))
try:
shutil.copy(sourceFile, targetPath)
except (IOError, OSError), e:
self.__console.error("Error copying file %s to dir %s: %s" %(sourceFile, targetPath, str(e)))
def __isNewerThan(self, sourceFile, targetFile):
sourceStat = os.stat(sourceFile)
sourceCreat = sourceStat.st_ctime
sourceMod = sourceStat.st_mtime
targetMod = os.stat(targetFile).st_mtime
return (sourceMod > targetMod) or (sourceCreat > targetMod)
def __copyDirToDir(self, sourceDir, targetDir, recursive=False):
self.__console.debug("Copying directory %s to %s." %(sourceDir, targetDir))
sourceDirName = os.path.basename(sourceDir)
if sourceDirName in self.__exclude:
self.__console.debug("Skipping excluded directory %s." %sourceDir)
return
if self.__synchronize and not recursive:
targetPath = targetDir
else:
targetPath = os.path.join(targetDir, sourceDirName)
if not os.path.isdir(targetPath):
if self.__create:
self.__console.debug("Creating directory %s." %targetDir)
os.makedirs(targetPath)
else:
raise IOError(2, "No such directory: '%s'" %targetPath)
compare = filecmp.dircmp(sourceDir, targetPath)
for entry in compare.left_only:
entryPath = os.path.join(sourceDir, entry)
if entry in self.__exclude:
self.__console.debug("Skipping excluded item %s." %entryPath)
continue
if os.path.isfile(entryPath):
self.__copyFileToDir(entryPath, targetPath)
if os.path.isdir(entryPath):
self.__copyDirToDir(entryPath, targetPath, True)
for entry in compare.common_dirs:
entryPath = os.path.join(sourceDir, entry)
if entry in self.__exclude:
self.__console.debug("Skipping excluded directory %s." %entryPath)
continue
self.__copyDirToDir(entryPath, targetPath, True)
for entry in compare.common_files:
entryPath = os.path.join(sourceDir, entry)
if entry in self.__exclude:
self.__console.debug("Skipping excluded file %s." %entryPath)
continue
self.__copyFileToDir(entryPath, targetPath)
def parse_args(self, argumentList=sys.argv[1:]):
parser = optparse.OptionParser(option_class=ExtendAction)
usage_str = '''%prog [options] SOURCE TARGET
copy file or directory SOURCE to directory TARGET'''
parser.set_usage(usage_str)
parser.add_option(
"-s", "--synchronize", dest="synchronize", action="store_true", default=False,
help="synchronize the contents of the source and target directories"
)
parser.add_option(
"-u", "--update-only", dest="update", action="store_true", default=False,
help="only overwrite existing files if the source file is newer"
)
parser.add_option(
"-n", "--no-new-dirs", dest="create", action="store_false", default=True,
help="do not create any source directories that don't already exist in the target path"
)
parser.add_option(
"-x", "--exclude", dest="exclude", default=[], action="extend", type="string",
help="list of file or directory names that should not be copied"
)
(options, args) = parser.parse_args(argumentList)
if not len(args) == 2:
raise RuntimeError( "Missing argument, use -h for help.")
self.__source = os.path.abspath(args[0])
self.__targetDir = os.path.abspath(args[1])
self.__synchronize = options.synchronize
self.__exclude = options.exclude
self.__create = options.create
self.__update = options.update
def main():
copier = CopyTool()
copier.parse_args()
copier.do_work()
if __name__ == '__main__':
try:
main()
except KeyboardInterrupt:
print
print " * Keyboard Interrupt"
sys.exit(1)
| agpl-3.0 | -6,723,039,339,172,752,000 | 34.990244 | 133 | 0.555435 | false |
Alwnikrotikz/tuxedo | src/config.py | 2 | 13411 | #
# Copyright 2008 Charles Connell <[email protected]>
#
# This file is part of Tuxedo.
#
# Tuxedo is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# Tuxedo is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Tuxedo; if not, see <http://gnu.org/licenses/.>
#
import os
import ConfigParser
import task
import statusconfig
# Constant for path of configuration file
CONFIGPATH = os.path.join(os.path.expanduser('~'), '.tuxedo_config')
class Config:
"""Abstraction of config file, accessed through ConfigParser. This is a singleton class; initizalize to use it"""
# storage for the instance reference
__instance = None
def __init__(self):
""" Create singleton instance """
# Check whether we already have an instance
if Config.__instance is None:
# Create and remember instance
Config.__instance = Config.__impl()
# Store instance reference as the only member in the handle
self.__dict__['_Config__instance'] = Config.__instance
def __getattr__(self, attr):
""" Delegate access to implementation """
return getattr(self.__instance, attr)
def __setattr__(self, attr, value):
""" Delegate access to implementation """
return setattr(self.__instance, attr, value)
class __impl:
""" Implementation of the singleton interface """
def __init__(self):
"""Initializes default values and reads from config file the real values."""
self.config = ConfigParser.ConfigParser()
self.config.add_section('priority_colors')
self.config.set('priority_colors', 'lowest', '#9999FF')
self.config.set('priority_colors', 'low', '#CCCCFF')
self.config.set('priority_colors', 'normal', '#FFFFFF')
self.config.set('priority_colors', 'high', '#FFAAAA')
self.config.set('priority_colors', 'highest', '#FF4444')
self.config.add_section('text_colors')
self.config.set('text_colors', 'lowest', '#000000')
self.config.set('text_colors', 'low', '#000000')
self.config.set('text_colors', 'normal', '#000000')
self.config.set('text_colors', 'high', '#000000')
self.config.set('text_colors', 'highest', '#000000')
self.config.add_section('priority_names')
self.config.set('priority_names', 'lowest', 'Lowest')
self.config.set('priority_names', 'low', 'Low')
self.config.set('priority_names', 'normal', 'Normal')
self.config.set('priority_names', 'high', 'High')
self.config.set('priority_names', 'highest', 'Highest')
self.config.add_section('status_names')
self.config.set('status_names', 'not_started', 'Not Started')
self.config.set('status_names', 'in_progress', 'In Progress')
self.config.set('status_names', 'completed', 'Completed')
self.config.add_section('status_options_not_started')
self.config.set('status_options_not_started', 'bold', 'no')
self.config.set('status_options_not_started', 'italic', 'no')
self.config.set('status_options_not_started', 'underline', 'no')
self.config.set('status_options_not_started', 'strikethrough', 'no')
self.config.add_section('status_options_in_progress')
self.config.set('status_options_in_progress', 'bold', 'no')
self.config.set('status_options_in_progress', 'italic', 'yes')
self.config.set('status_options_in_progress', 'underline', 'no')
self.config.set('status_options_in_progress', 'strikethrough', 'no')
self.config.add_section('status_options_completed')
self.config.set('status_options_completed', 'bold', 'no')
self.config.set('status_options_completed', 'italic', 'no')
self.config.set('status_options_completed', 'underline', 'no')
self.config.set('status_options_completed', 'strikethrough', 'yes')
self.config.read(CONFIGPATH)
def priorityColors(self):
"""Make list of colors from config backend and return it."""
prioritycolors = [None]*5
prioritycolors[task.TASK_PRIORITY_LOWEST] = self.config.get('priority_colors', 'lowest')
prioritycolors[task.TASK_PRIORITY_LOW] = self.config.get('priority_colors', 'low')
prioritycolors[task.TASK_PRIORITY_NORMAL] = self.config.get('priority_colors', 'normal')
prioritycolors[task.TASK_PRIORITY_HIGH] = self.config.get('priority_colors', 'high')
prioritycolors[task.TASK_PRIORITY_HIGHEST] = self.config.get('priority_colors', 'highest')
return prioritycolors
def setPriorityColors(self, prioritycolors):
"""Copy colors into config backend."""
self.config.set('priority_colors', 'lowest', prioritycolors[task.TASK_PRIORITY_LOWEST])
self.config.set('priority_colors', 'low', prioritycolors[task.TASK_PRIORITY_LOW])
self.config.set('priority_colors', 'normal', prioritycolors[task.TASK_PRIORITY_NORMAL])
self.config.set('priority_colors', 'high', prioritycolors[task.TASK_PRIORITY_HIGH])
self.config.set('priority_colors', 'highest', prioritycolors[task.TASK_PRIORITY_HIGHEST])
def textColors(self):
"""Make list of colors from config backend and return it."""
textcolors = [None]*5
textcolors[task.TASK_PRIORITY_LOWEST] = self.config.get('text_colors', 'lowest')
textcolors[task.TASK_PRIORITY_LOW] = self.config.get('text_colors', 'low')
textcolors[task.TASK_PRIORITY_NORMAL] = self.config.get('text_colors', 'normal')
textcolors[task.TASK_PRIORITY_HIGH] = self.config.get('text_colors', 'high')
textcolors[task.TASK_PRIORITY_HIGHEST] = self.config.get('text_colors', 'highest')
return textcolors
def setTextColors(self, textcolors):
"""Copy colors into config backend."""
self.config.set('text_colors', 'lowest', textcolors[task.TASK_PRIORITY_LOWEST])
self.config.set('text_colors', 'low', textcolors[task.TASK_PRIORITY_LOW])
self.config.set('text_colors', 'normal', textcolors[task.TASK_PRIORITY_NORMAL])
self.config.set('text_colors', 'high', textcolors[task.TASK_PRIORITY_HIGH])
self.config.set('text_colors', 'highest', textcolors[task.TASK_PRIORITY_HIGHEST])
def priorityNames(self):
"""Make list of names from config backend and return it."""
prioritynames = [None]*5
prioritynames[task.TASK_PRIORITY_LOWEST] = self.config.get('priority_names', 'lowest')
prioritynames[task.TASK_PRIORITY_LOW] = self.config.get('priority_names', 'low')
prioritynames[task.TASK_PRIORITY_NORMAL] = self.config.get('priority_names', 'normal')
prioritynames[task.TASK_PRIORITY_HIGH] = self.config.get('priority_names', 'high')
prioritynames[task.TASK_PRIORITY_HIGHEST] = self.config.get('priority_names', 'highest')
return prioritynames
def setPriorityNames(self, prioritynames):
"""Copy names into config backend."""
self.config.set('priority_names', 'lowest', prioritynames[task.TASK_PRIORITY_LOWEST])
self.config.set('priority_names', 'low', prioritynames[task.TASK_PRIORITY_LOW])
self.config.set('priority_names', 'normal', prioritynames[task.TASK_PRIORITY_NORMAL])
self.config.set('priority_names', 'high', prioritynames[task.TASK_PRIORITY_HIGH])
self.config.set('priority_names', 'highest', prioritynames[task.TASK_PRIORITY_HIGHEST])
def statusNames(self):
"""Make list of names from config backend and return it."""
statusnames = [None]*3
statusnames[task.TASK_NOTSTARTED] = self.config.get('status_names', 'not_started')
statusnames[task.TASK_INPROGRESS] = self.config.get('status_names', 'in_progress')
statusnames[task.TASK_COMPLETED] = self.config.get('status_names', 'completed')
return statusnames
def setStatusNames(self, statusnames):
"""Copy names into config backend."""
self.config.set('status_names', 'not_started', statusnames[task.TASK_NOTSTARTED])
self.config.set('status_names', 'in_progress', statusnames[task.TASK_INPROGRESS])
self.config.set('status_names', 'completed', statusnames[task.TASK_COMPLETED])
def setStatusOptions(self, statusoptions):
"""Copy options into config backend."""
# Make local copy of statusoptions
options = []
for l in statusoptions:
options.append(list(l))
# Scan through each item in 2D array options. Convert each item from boolean to string
for i, l in enumerate(options):
for j, o in enumerate(l):
if l[j] == True:
l[j] = 'yes'
else:
l[j] = 'no'
# Set options in config backend
self.config.set('status_options_not_started', 'bold', options[task.TASK_NOTSTARTED][statusconfig.STATUS_BOLD])
self.config.set('status_options_not_started', 'italic', options[task.TASK_NOTSTARTED][statusconfig.STATUS_ITALIC])
self.config.set('status_options_not_started', 'underline', options[task.TASK_NOTSTARTED][statusconfig.STATUS_UNDERLINE])
self.config.set('status_options_not_started', 'strikethrough', options[task.TASK_NOTSTARTED][statusconfig.STATUS_STRIKETHROUGH])
self.config.set('status_options_in_progress', 'bold', options[task.TASK_INPROGRESS][statusconfig.STATUS_BOLD])
self.config.set('status_options_in_progress', 'italic', options[task.TASK_INPROGRESS][statusconfig.STATUS_ITALIC])
self.config.set('status_options_in_progress', 'underline', options[task.TASK_INPROGRESS][statusconfig.STATUS_UNDERLINE])
self.config.set('status_options_in_progress', 'strikethrough', options[task.TASK_INPROGRESS][statusconfig.STATUS_STRIKETHROUGH])
self.config.set('status_options_completed', 'bold', options[task.TASK_COMPLETED][statusconfig.STATUS_BOLD])
self.config.set('status_options_completed', 'italic', options[task.TASK_COMPLETED][statusconfig.STATUS_ITALIC])
self.config.set('status_options_completed', 'underline', options[task.TASK_COMPLETED][statusconfig.STATUS_UNDERLINE])
self.config.set('status_options_completed', 'strikethrough', options[task.TASK_COMPLETED][statusconfig.STATUS_STRIKETHROUGH])
def statusOptions(self):
"""Make list of options from config backend and return it."""
statusoptions = [[bool, bool, bool, bool], [bool, bool, bool, bool], [bool, bool, bool, bool]]
statusoptions[task.TASK_NOTSTARTED][statusconfig.STATUS_BOLD] = self.config.getboolean('status_options_not_started', 'bold')
statusoptions[task.TASK_NOTSTARTED][statusconfig.STATUS_ITALIC] = self.config.getboolean('status_options_not_started', 'italic')
statusoptions[task.TASK_NOTSTARTED][statusconfig.STATUS_UNDERLINE] = self.config.getboolean('status_options_not_started', 'underline')
statusoptions[task.TASK_NOTSTARTED][statusconfig.STATUS_STRIKETHROUGH] = self.config.getboolean('status_options_not_started', 'strikethrough')
statusoptions[task.TASK_INPROGRESS][statusconfig.STATUS_BOLD] = self.config.getboolean('status_options_in_progress', 'bold')
statusoptions[task.TASK_INPROGRESS][statusconfig.STATUS_ITALIC] = self.config.getboolean('status_options_in_progress', 'italic')
statusoptions[task.TASK_INPROGRESS][statusconfig.STATUS_UNDERLINE] = self.config.getboolean('status_options_in_progress', 'underline')
statusoptions[task.TASK_INPROGRESS][statusconfig.STATUS_STRIKETHROUGH] = self.config.getboolean('status_options_in_progress', 'strikethrough')
statusoptions[task.TASK_COMPLETED][statusconfig.STATUS_BOLD] = self.config.getboolean('status_options_completed', 'bold')
statusoptions[task.TASK_COMPLETED][statusconfig.STATUS_ITALIC] = self.config.getboolean('status_options_completed', 'italic')
statusoptions[task.TASK_COMPLETED][statusconfig.STATUS_UNDERLINE] = self.config.getboolean('status_options_completed', 'underline')
statusoptions[task.TASK_COMPLETED][statusconfig.STATUS_STRIKETHROUGH] = self.config.getboolean('status_options_completed', 'strikethrough')
return statusoptions
def save(self):
"""Open config file and save config options to it."""
fp = open(CONFIGPATH, 'w')
self.config.write(fp)
fp.close()
| gpl-2.0 | -7,962,570,878,012,165,000 | 57.820175 | 154 | 0.650884 | false |
ListFranz/torngas | torngas/cache/backends/localcache.py | 3 | 3889 | "Thread-safe in-memory cache backend."
import time
from base import BaseCache, DEFAULT_TIMEOUT
from torngas.utils import RWLock
# Global in-memory store of cache data. Keyed by name, to provide
# multiple named local memory caches.
_caches = {}
_expire_info = {}
_locks = {}
class LocMemCache(BaseCache):
def __init__(self, name, params):
BaseCache.__init__(self, params)
self._cache = _caches.setdefault(name, {})
self._expire_info = _expire_info.setdefault(name, {})
self._lock = _locks.setdefault(name, RWLock())
def add(self, key, value, timeout=DEFAULT_TIMEOUT, version=None):
key = self.make_key(key, version=version)
self.validate_key(key)
with self._lock.writer():
if self._has_expired(key):
self._set(key, value, timeout)
return True
return False
def get(self, key, default=None, version=None):
key = self.make_key(key, version=version)
self.validate_key(key)
value = None
with self._lock.reader():
if not self._has_expired(key):
value = self._cache[key]
if value is not None:
return value
with self._lock.writer():
try:
del self._cache[key]
del self._expire_info[key]
except KeyError:
pass
return default
def _set(self, key, value, timeout=DEFAULT_TIMEOUT):
if len(self._cache) >= self._max_entries:
self._cull()
self._cache[key] = value
self._expire_info[key] = self.get_backend_timeout(timeout)
def set(self, key, value, timeout=DEFAULT_TIMEOUT, version=None):
key = self.make_key(key, version=version)
self.validate_key(key)
with self._lock.writer():
self._set(key, value, timeout)
def incr(self, key, delta=1, version=None):
value = self.get(key, version=version)
if value is None:
raise ValueError("Key '%s' not found" % key)
new_value = value + delta
key = self.make_key(key, version=version)
with self._lock.writer():
self._cache[key] = new_value
return new_value
def has_key(self, key, version=None):
key = self.make_key(key, version=version)
self.validate_key(key)
with self._lock.reader():
if not self._has_expired(key):
return True
with self._lock.writer():
try:
del self._cache[key]
del self._expire_info[key]
except KeyError:
pass
return False
def _has_expired(self, key):
exp = self._expire_info.get(key, -1)
if exp is None or exp > time.time():
return False
return True
def _cull(self):
if self._cull_frequency == 0:
self.clear()
else:
doomed = [k for (i, k) in enumerate(self._cache) if i % self._cull_frequency == 0]
for k in doomed:
self._delete(k)
def _delete(self, key):
try:
del self._cache[key]
except KeyError:
pass
try:
del self._expire_info[key]
except KeyError:
pass
def delete(self, key, version=None):
key = self.make_key(key, version=version)
self.validate_key(key)
with self._lock.writer():
self._delete(key)
def clear(self):
self._cache.clear()
self._expire_info.clear()
def clear_expires(self, **kwargs):
for key in self._cache.keys():
with self._lock.writer():
exp = self._expire_info.get(key)
if exp and exp < time.time():
self._cache.pop(key)
self._expire_info.pop(key)
| bsd-3-clause | 178,260,659,626,336,350 | 29.147287 | 94 | 0.541013 | false |
trachelr/mne-python | mne/inverse_sparse/tests/test_gamma_map.py | 16 | 2430 | # Author: Martin Luessi <[email protected]>
#
# License: Simplified BSD
import os.path as op
import numpy as np
from nose.tools import assert_true
from numpy.testing import assert_array_almost_equal
from mne.datasets import testing
from mne import read_cov, read_forward_solution, read_evokeds
from mne.cov import regularize
from mne.inverse_sparse import gamma_map
from mne import pick_types_forward
from mne.utils import run_tests_if_main, slow_test
data_path = testing.data_path(download=False)
fname_evoked = op.join(data_path, 'MEG', 'sample',
'sample_audvis-ave.fif')
fname_cov = op.join(data_path, 'MEG', 'sample', 'sample_audvis-cov.fif')
fname_fwd = op.join(data_path, 'MEG', 'sample',
'sample_audvis_trunc-meg-eeg-oct-6-fwd.fif')
subjects_dir = op.join(data_path, 'subjects')
@slow_test
@testing.requires_testing_data
def test_gamma_map():
"""Test Gamma MAP inverse"""
forward = read_forward_solution(fname_fwd, force_fixed=False,
surf_ori=True)
forward = pick_types_forward(forward, meg=False, eeg=True)
evoked = read_evokeds(fname_evoked, condition=0, baseline=(None, 0))
evoked.resample(50)
evoked.crop(tmin=0, tmax=0.3)
cov = read_cov(fname_cov)
cov = regularize(cov, evoked.info)
alpha = 0.2
stc = gamma_map(evoked, forward, cov, alpha, tol=1e-5,
xyz_same_gamma=True, update_mode=1, verbose=False)
assert_array_almost_equal(stc.times, evoked.times, 5)
idx = np.argmax(np.sum(stc.data ** 2, axis=1))
assert_true(np.concatenate(stc.vertices)[idx] == 96397)
stc = gamma_map(evoked, forward, cov, alpha, tol=1e-5,
xyz_same_gamma=False, update_mode=1, verbose=False)
assert_array_almost_equal(stc.times, evoked.times, 5)
idx = np.argmax(np.sum(stc.data ** 2, axis=1))
assert_true(np.concatenate(stc.vertices)[idx] == 82010)
# force fixed orientation
stc, res = gamma_map(evoked, forward, cov, alpha, tol=1e-5,
xyz_same_gamma=False, update_mode=2,
loose=None, return_residual=True, verbose=False)
assert_array_almost_equal(stc.times, evoked.times, 5)
idx = np.argmax(np.sum(stc.data ** 2, axis=1))
# assert_true(np.concatenate(stc.vertices)[idx] == 83398) # XXX FIX
assert_array_almost_equal(evoked.times, res.times)
run_tests_if_main()
| bsd-3-clause | -6,271,904,186,429,369,000 | 36.96875 | 73 | 0.659259 | false |
azonenberg/yosys | backends/edif/runtest.py | 5 | 4333 | #!/usr/bin/env python3
import os
import numpy as np
enable_upto = True
enable_offset = True
enable_hierarchy = True
enable_logic = True
def make_module(f, modname, width, subs):
print("module %s (A, B, C, X, Y, Z);" % modname, file=f)
inbits = list()
outbits = list()
for p in "ABC":
offset = np.random.randint(10) if enable_offset else 0
if enable_upto and np.random.randint(2):
print(" input [%d:%d] %s;" % (offset, offset+width-1, p), file=f)
else:
print(" input [%d:%d] %s;" % (offset+width-1, offset, p), file=f)
for i in range(offset, offset+width):
inbits.append("%s[%d]" % (p, i))
for p in "XYZ":
offset = np.random.randint(10) if enable_offset else 0
if enable_upto and np.random.randint(2):
print(" output [%d:%d] %s;" % (offset, offset+width-1, p), file=f)
else:
print(" output [%d:%d] %s;" % (offset+width-1, offset, p), file=f)
for i in range(offset, offset+width):
outbits.append("%s[%d]" % (p, i))
instidx = 0
subcandidates = list(subs.keys())
while len(outbits) > 0:
submod = None
if len(subcandidates):
submod = np.random.choice(subcandidates)
subcandidates.remove(submod)
if submod is None or 3*subs[submod] >= len(outbits):
for bit in outbits:
if enable_logic:
print(" assign %s = %s & ~%s;" % (bit, np.random.choice(inbits), np.random.choice(inbits)), file=f)
else:
print(" assign %s = %s;" % (bit, np.random.choice(inbits)), file=f)
break
instidx += 1
print(" %s inst%d (" % (submod, instidx), file=f)
for p in "ABC":
print(" .%s({%s})," % (p, ",".join(np.random.choice(inbits, subs[submod]))), file=f)
for p in "XYZ":
bits = list(np.random.choice(outbits, subs[submod], False))
for bit in bits:
outbits.remove(bit)
print(" .%s({%s})%s" % (p, ",".join(bits), "," if p != "Z" else ""), file=f)
print(" );", file=f);
print("endmodule", file=f)
with open("test_top.v", "w") as f:
if enable_hierarchy:
make_module(f, "sub1", 2, {})
make_module(f, "sub2", 3, {})
make_module(f, "sub3", 4, {})
make_module(f, "sub4", 8, {"sub1": 2, "sub2": 3, "sub3": 4})
make_module(f, "sub5", 8, {"sub1": 2, "sub2": 3, "sub3": 4})
make_module(f, "sub6", 8, {"sub1": 2, "sub2": 3, "sub3": 4})
make_module(f, "top", 32, {"sub4": 8, "sub5": 8, "sub6": 8})
else:
make_module(f, "top", 32, {})
os.system("set -x; ../../yosys -p 'synth_xilinx -top top; write_edif -pvector par test_syn.edif' test_top.v")
with open("test_syn.tcl", "w") as f:
print("read_edif test_syn.edif", file=f)
print("link_design", file=f)
print("write_verilog -force test_syn.v", file=f)
os.system("set -x; vivado -nojournal -nolog -mode batch -source test_syn.tcl")
with open("test_tb.v", "w") as f:
print("module tb;", file=f)
print(" reg [31:0] A, B, C;", file=f)
print(" wire [31:0] X, Y, Z;", file=f)
print("", file=f)
print(" top uut (", file=f)
print(" .A(A),", file=f)
print(" .B(B),", file=f)
print(" .C(C),", file=f)
print(" .X(X),", file=f)
print(" .Y(Y),", file=f)
print(" .Z(Z)", file=f)
print(" );", file=f)
print("", file=f)
print(" initial begin", file=f)
for i in range(100):
print(" A = 32'h%08x;" % np.random.randint(2**32), file=f)
print(" B = 32'h%08x;" % np.random.randint(2**32), file=f)
print(" C = 32'h%08x;" % np.random.randint(2**32), file=f)
print(" #10;", file=f)
print(" $display(\"%x %x %x\", X, Y, Z);", file=f)
print(" #10;", file=f)
print(" $finish;", file=f)
print(" end", file=f)
print("endmodule", file=f)
os.system("set -x; iverilog -o test_gold test_tb.v test_top.v")
os.system("set -x; iverilog -o test_gate test_tb.v test_syn.v ../../techlibs/xilinx/cells_sim.v")
os.system("set -x; ./test_gold > test_gold.out")
os.system("set -x; ./test_gate > test_gate.out")
os.system("set -x; md5sum test_gold.out test_gate.out")
| isc | 6,386,841,779,757,902,000 | 34.809917 | 121 | 0.518809 | false |
tboyce1/home-assistant | homeassistant/components/notify/gntp.py | 13 | 2803 | """
GNTP (aka Growl) notification service.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/notify.gntp/
"""
import logging
import os
import voluptuous as vol
from homeassistant.components.notify import (
ATTR_TITLE, ATTR_TITLE_DEFAULT, PLATFORM_SCHEMA, BaseNotificationService)
from homeassistant.const import CONF_PASSWORD, CONF_PORT
import homeassistant.helpers.config_validation as cv
REQUIREMENTS = ['gntp==1.0.3']
_LOGGER = logging.getLogger(__name__)
_GNTP_LOGGER = logging.getLogger('gntp')
_GNTP_LOGGER.setLevel(logging.ERROR)
CONF_APP_NAME = 'app_name'
CONF_APP_ICON = 'app_icon'
CONF_HOSTNAME = 'hostname'
DEFAULT_APP_NAME = 'HomeAssistant'
DEFAULT_HOST = 'localhost'
DEFAULT_PORT = 23053
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Optional(CONF_APP_NAME, default=DEFAULT_APP_NAME): cv.string,
vol.Optional(CONF_APP_ICON): vol.Url,
vol.Optional(CONF_HOSTNAME, default=DEFAULT_HOST): cv.string,
vol.Optional(CONF_PASSWORD): cv.string,
vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port,
})
def get_service(hass, config, discovery_info=None):
"""Get the GNTP notification service."""
if config.get(CONF_APP_ICON) is None:
icon_file = os.path.join(os.path.dirname(__file__), "..", "frontend",
"www_static", "icons", "favicon-192x192.png")
with open(icon_file, 'rb') as file:
app_icon = file.read()
else:
app_icon = config.get(CONF_APP_ICON)
return GNTPNotificationService(config.get(CONF_APP_NAME),
app_icon,
config.get(CONF_HOSTNAME),
config.get(CONF_PASSWORD),
config.get(CONF_PORT))
class GNTPNotificationService(BaseNotificationService):
"""Implement the notification service for GNTP."""
def __init__(self, app_name, app_icon, hostname, password, port):
"""Initialize the service."""
import gntp.notifier
import gntp.errors
self.gntp = gntp.notifier.GrowlNotifier(
applicationName=app_name,
notifications=["Notification"],
applicationIcon=app_icon,
hostname=hostname,
password=password,
port=port
)
try:
self.gntp.register()
except gntp.errors.NetworkError:
_LOGGER.error("Unable to register with the GNTP host")
return
def send_message(self, message="", **kwargs):
"""Send a message to a user."""
self.gntp.notify(noteType="Notification",
title=kwargs.get(ATTR_TITLE, ATTR_TITLE_DEFAULT),
description=message)
| apache-2.0 | -5,882,129,304,215,527,000 | 32.369048 | 78 | 0.625045 | false |
rodelrod/putsches | askbot/management/commands/send_accept_answer_reminders.py | 2 | 3654 | import datetime
from django.core.management.base import NoArgsCommand
from django.conf import settings as django_settings
from django.template.loader import get_template
from askbot import models
from askbot import const
from askbot.conf import settings as askbot_settings
from django.utils.translation import ugettext as _
from django.utils.translation import ungettext
from askbot import mail
from askbot.utils.classes import ReminderSchedule
from django.template import Context
DEBUG_THIS_COMMAND = False
class Command(NoArgsCommand):
def handle_noargs(self, **options):
if askbot_settings.ENABLE_EMAIL_ALERTS == False:
return
if askbot_settings.ENABLE_ACCEPT_ANSWER_REMINDERS == False:
return
#get questions without answers, excluding closed and deleted
#order it by descending added_at date
schedule = ReminderSchedule(
askbot_settings.DAYS_BEFORE_SENDING_ACCEPT_ANSWER_REMINDER,
askbot_settings.ACCEPT_ANSWER_REMINDER_FREQUENCY,
askbot_settings.MAX_ACCEPT_ANSWER_REMINDERS
)
questions = models.Post.objects.get_questions().exclude(
deleted = True
).added_between(
start = schedule.start_cutoff_date,
end = schedule.end_cutoff_date
).filter(
thread__answer_count__gt = 0
).filter(
thread__accepted_answer__isnull=True #answer_accepted = False
).order_by('-added_at')
#for all users, excluding blocked
#for each user, select a tag filtered subset
#format the email reminder and send it
for user in models.User.objects.exclude(status = 'b'):
user_questions = questions.filter(author = user)
final_question_list = user_questions.get_questions_needing_reminder(
activity_type = const.TYPE_ACTIVITY_ACCEPT_ANSWER_REMINDER_SENT,
user = user,
recurrence_delay = schedule.recurrence_delay
)
#todo: rewrite using query set filter
#may be a lot more efficient
question_count = len(final_question_list)
if question_count == 0:
continue
subject_line = _(
'Accept the best answer for %(question_count)d of your questions'
) % {'question_count': question_count}
#todo - make a template for these
if question_count == 1:
reminder_phrase = _('Please accept the best answer for this question:')
else:
reminder_phrase = _('Please accept the best answer for these questions:')
data = {
'site_url': askbot_settings.APP_URL,
'questions': final_question_list,
'reminder_phrase': reminder_phrase
}
template = get_template('email/accept_answer_reminder.html')
body_text = template.render(Context(data))#todo: set lang
if DEBUG_THIS_COMMAND:
print "User: %s<br>\nSubject:%s<br>\nText: %s<br>\n" % \
(user.email, subject_line, body_text)
else:
mail.send_mail(
subject_line = subject_line,
body_text = body_text,
recipient_list = (user.email,)
)
| gpl-3.0 | 2,633,401,880,730,399,000 | 41.488372 | 101 | 0.559113 | false |
spatialdev/onadata | onadata/libs/tests/utils/test_csv_builder.py | 5 | 17220 | import csv
import os
from tempfile import NamedTemporaryFile
from django.utils.dateparse import parse_datetime
from onadata.apps.main.tests.test_base import TestBase
from onadata.apps.logger.models.xform import XForm
from onadata.apps.logger.xform_instance_parser import xform_instance_to_dict
from onadata.libs.utils.csv_builder import AbstractDataFrameBuilder,\
CSVDataFrameBuilder, get_prefix_from_xpath,\
remove_dups_from_list_maintain_order, write_to_csv
from onadata.libs.utils.common_tags import NA_REP
def xls_filepath_from_fixture_name(fixture_name):
"""
Return an xls file path at tests/fixtures/[fixture]/fixture.xls
"""
# TODO currently this only works for fixtures in this app because of
# __file__
return os.path.join(
os.path.dirname(os.path.abspath(__file__)),
"fixtures", fixture_name, fixture_name + ".xls"
)
def xml_inst_filepath_from_fixture_name(fixture_name, instance_name):
return os.path.join(
os.path.dirname(os.path.abspath(__file__)),
"fixtures", fixture_name, "instances",
fixture_name + "_" + instance_name + ".xml"
)
class TestCSVDataFrameBuilder(TestBase):
def setUp(self):
self._create_user_and_login()
self._submission_time = parse_datetime('2013-02-18 15:54:01Z')
def _publish_xls_fixture_set_xform(self, fixture):
"""
Publish an xls file at tests/fixtures/[fixture]/fixture.xls
"""
xls_file_path = xls_filepath_from_fixture_name(fixture)
count = XForm.objects.count()
self._publish_xls_file(xls_file_path)
self.assertEqual(XForm.objects.count(), count + 1)
self.xform = XForm.objects.all().reverse()[0]
def _submit_fixture_instance(
self, fixture, instance, submission_time=None):
"""
Submit an instance at
tests/fixtures/[fixture]/instances/[fixture]_[instance].xml
"""
xml_submission_file_path = xml_inst_filepath_from_fixture_name(
fixture, instance)
self._make_submission(
xml_submission_file_path, forced_submission_time=submission_time)
self.assertEqual(self.response.status_code, 201)
def _publish_single_level_repeat_form(self):
self._publish_xls_fixture_set_xform("new_repeats")
self.survey_name = u"new_repeats"
def _publish_nested_repeats_form(self):
self._publish_xls_fixture_set_xform("nested_repeats")
self.survey_name = u"nested_repeats"
def _publish_grouped_gps_form(self):
self._publish_xls_fixture_set_xform("grouped_gps")
self.survey_name = u"grouped_gps"
def _csv_data_for_dataframe(self):
csv_df_builder = CSVDataFrameBuilder(self.user.username,
self.xform.id_string)
cursor = csv_df_builder._query_data()
return csv_df_builder._format_for_dataframe(cursor)
def test_csv_dataframe_export_to(self):
self._publish_nested_repeats_form()
self._submit_fixture_instance(
"nested_repeats", "01", submission_time=self._submission_time)
self._submit_fixture_instance(
"nested_repeats", "02", submission_time=self._submission_time)
csv_df_builder = CSVDataFrameBuilder(self.user.username,
self.xform.id_string)
temp_file = NamedTemporaryFile(suffix=".csv", delete=False)
csv_df_builder.export_to(temp_file.name)
csv_fixture_path = os.path.join(
os.path.dirname(os.path.abspath(__file__)),
"fixtures", "nested_repeats", "nested_repeats.csv"
)
temp_file.close()
fixture, output = '', ''
with open(csv_fixture_path) as f:
fixture = f.read()
with open(temp_file.name) as f:
output = f.read()
os.unlink(temp_file.name)
self.assertEqual(fixture, output)
def test_csv_columns_for_gps_within_groups(self):
self._publish_grouped_gps_form()
self._submit_fixture_instance("grouped_gps", "01")
data = self._csv_data_for_dataframe()
columns = data[0].keys()
expected_columns = [
u'gps_group/gps',
u'gps_group/_gps_latitude',
u'gps_group/_gps_longitude',
u'gps_group/_gps_altitude',
u'gps_group/_gps_precision',
u'web_browsers/firefox',
u'web_browsers/chrome',
u'web_browsers/ie',
u'web_browsers/safari',
] + AbstractDataFrameBuilder.ADDITIONAL_COLUMNS +\
AbstractDataFrameBuilder.IGNORED_COLUMNS
try:
expected_columns.remove(u'_deleted_at')
except ValueError:
pass
self.maxDiff = None
self.assertEqual(sorted(expected_columns), sorted(columns))
def test_format_mongo_data_for_csv(self):
self.maxDiff = None
self._publish_single_level_repeat_form()
self._submit_fixture_instance("new_repeats", "01")
self.xform.data_dictionary()
data_0 = self._csv_data_for_dataframe()[0]
# remove AbstractDataFrameBuilder.INTERNAL_FIELDS
for key in AbstractDataFrameBuilder.IGNORED_COLUMNS:
if key in data_0:
data_0.pop(key)
for key in AbstractDataFrameBuilder.ADDITIONAL_COLUMNS:
if key in data_0:
data_0.pop(key)
expected_data_0 = {
u'gps': u'-1.2627557 36.7926442 0.0 30.0',
u'_gps_latitude': u'-1.2627557',
u'_gps_longitude': u'36.7926442',
u'_gps_altitude': u'0.0',
u'_gps_precision': u'30.0',
u'kids/has_kids': u'1',
u'info/age': 80,
u'kids/kids_details[1]/kids_name': u'Abel',
u'kids/kids_details[1]/kids_age': 50,
u'kids/kids_details[2]/kids_name': u'Cain',
u'kids/kids_details[2]/kids_age': 76,
u'web_browsers/chrome': True,
u'web_browsers/ie': True,
u'web_browsers/safari': False,
u'web_browsers/firefox': False,
u'info/name': u'Adam',
}
self.assertEqual(expected_data_0, data_0)
def test_split_select_multiples(self):
self._publish_nested_repeats_form()
dd = self.xform.data_dictionary()
self._submit_fixture_instance("nested_repeats", "01")
csv_df_builder = CSVDataFrameBuilder(self.user.username,
self.xform.id_string)
cursor = [k for k in csv_df_builder._query_data()]
record = cursor[0]
select_multiples = CSVDataFrameBuilder._collect_select_multiples(dd)
result = CSVDataFrameBuilder._split_select_multiples(record,
select_multiples)
expected_result = {
u'web_browsers/ie': True,
u'web_browsers/safari': True,
u'web_browsers/firefox': False,
u'web_browsers/chrome': False
}
# build a new dictionary only composed of the keys we want to use in
# the comparison
result = dict([(key, result[key]) for key in result.keys() if key in
expected_result.keys()])
self.assertEqual(expected_result, result)
csv_df_builder = CSVDataFrameBuilder(self.user.username,
self.xform.id_string,
binary_select_multiples=True)
result = csv_df_builder._split_select_multiples(record,
select_multiples)
expected_result = {
u'web_browsers/ie': 1,
u'web_browsers/safari': 1,
u'web_browsers/firefox': 0,
u'web_browsers/chrome': 0
}
# build a new dictionary only composed of the keys we want to use in
# the comparison
result = dict([(key, result[key]) for key in result.keys() if key in
expected_result.keys()])
self.assertEqual(expected_result, result)
def test_split_select_multiples_within_repeats(self):
self.maxDiff = None
record = {
'name': 'Tom',
'age': 23,
'browser_use': [
{
'browser_use/year': '2010',
'browser_use/browsers': 'firefox safari'
},
{
'browser_use/year': '2011',
'browser_use/browsers': 'firefox chrome'
}
]
}
expected_result = {
'name': 'Tom',
'age': 23,
'browser_use': [
{
'browser_use/year': '2010',
'browser_use/browsers/firefox': True,
'browser_use/browsers/safari': True,
'browser_use/browsers/ie': False,
'browser_use/browsers/chrome': False
},
{
'browser_use/year': '2011',
'browser_use/browsers/firefox': True,
'browser_use/browsers/safari': False,
'browser_use/browsers/ie': False,
'browser_use/browsers/chrome': True
}
]
}
select_multiples = {
'browser_use/browsers': [
'browser_use/browsers/firefox',
'browser_use/browsers/safari',
'browser_use/browsers/ie',
'browser_use/browsers/chrome']}
result = CSVDataFrameBuilder._split_select_multiples(record,
select_multiples)
self.assertEqual(expected_result, result)
def test_split_gps_fields(self):
record = {
'gps': '5 6 7 8'
}
gps_fields = ['gps']
expected_result = {
'gps': '5 6 7 8',
'_gps_latitude': '5',
'_gps_longitude': '6',
'_gps_altitude': '7',
'_gps_precision': '8',
}
AbstractDataFrameBuilder._split_gps_fields(record, gps_fields)
self.assertEqual(expected_result, record)
def test_split_gps_fields_within_repeats(self):
record = {
'a_repeat': [
{
'a_repeat/gps': '1 2 3 4'
},
{
'a_repeat/gps': '5 6 7 8'
}
]
}
gps_fields = ['a_repeat/gps']
expected_result = {
'a_repeat': [
{
'a_repeat/gps': '1 2 3 4',
'a_repeat/_gps_latitude': '1',
'a_repeat/_gps_longitude': '2',
'a_repeat/_gps_altitude': '3',
'a_repeat/_gps_precision': '4',
},
{
'a_repeat/gps': '5 6 7 8',
'a_repeat/_gps_latitude': '5',
'a_repeat/_gps_longitude': '6',
'a_repeat/_gps_altitude': '7',
'a_repeat/_gps_precision': '8',
}
]
}
AbstractDataFrameBuilder._split_gps_fields(record, gps_fields)
self.assertEqual(expected_result, record)
def test_unicode_export(self):
unicode_char = unichr(40960)
# fake data
data = [{"key": unicode_char}]
columns = ["key"]
# test csv
passed = False
temp_file = NamedTemporaryFile(suffix=".csv")
write_to_csv(temp_file.name, data, columns)
try:
write_to_csv(temp_file.name, data, columns)
passed = True
except UnicodeEncodeError:
pass
finally:
temp_file.close()
temp_file.close()
self.assertTrue(passed)
def test_repeat_child_name_matches_repeat(self):
"""
ParsedInstance.to_dict creates a list within a repeat if a child has
the same name as the repeat. This test makes sure that doesnt happen
"""
self.maxDiff = None
fixture = "repeat_child_name_matches_repeat"
# publish form so we have a dd to pass to xform inst. parser
self._publish_xls_fixture_set_xform(fixture)
submission_path = os.path.join(
os.path.dirname(os.path.abspath(__file__)),
"fixtures", fixture, fixture + ".xml"
)
# get submission xml str
with open(submission_path, "r") as f:
xml_str = f.read()
dict = xform_instance_to_dict(xml_str, self.xform.data_dictionary())
expected_dict = {
u'test_item_name_matches_repeat': {
u'formhub': {
u'uuid': u'c911d71ce1ac48478e5f8bac99addc4e'
},
u'gps': [
{
u'info': u'Yo',
u'gps': u'-1.2625149 36.7924478 0.0 30.0'
},
{
u'info': u'What',
u'gps': u'-1.2625072 36.7924328 0.0 30.0'
}
]
}
}
self.assertEqual(dict, expected_dict)
def test_remove_dups_from_list_maintain_order(self):
l = ["a", "z", "b", "y", "c", "b", "x"]
result = remove_dups_from_list_maintain_order(l)
expected_result = ["a", "z", "b", "y", "c", "x"]
self.assertEqual(result, expected_result)
def test_prefix_from_xpath(self):
xpath = "parent/child/grandhild"
prefix = get_prefix_from_xpath(xpath)
self.assertEqual(prefix, 'parent/child/')
xpath = "parent/child"
prefix = get_prefix_from_xpath(xpath)
self.assertEqual(prefix, 'parent/')
xpath = "parent"
prefix = get_prefix_from_xpath(xpath)
self.assertTrue(prefix is None)
def test_csv_export(self):
self._publish_single_level_repeat_form()
# submit 7 instances
for i in range(4):
self._submit_fixture_instance("new_repeats", "01")
self._submit_fixture_instance("new_repeats", "02")
for i in range(2):
self._submit_fixture_instance("new_repeats", "01")
csv_df_builder = CSVDataFrameBuilder(self.user.username,
self.xform.id_string)
record_count = csv_df_builder._query_data(count=True)
self.assertEqual(record_count, 7)
temp_file = NamedTemporaryFile(suffix=".csv", delete=False)
csv_df_builder.export_to(temp_file.name)
csv_file = open(temp_file.name)
csv_reader = csv.reader(csv_file)
header = csv_reader.next()
self.assertEqual(
len(header), 17 + len(AbstractDataFrameBuilder.ADDITIONAL_COLUMNS))
rows = []
for row in csv_reader:
rows.append(row)
self.assertEqual(len(rows), 7)
self.assertEqual(rows[4][5], NA_REP)
# close and delete file
csv_file.close()
os.unlink(temp_file.name)
def test_csv_column_indices_in_groups_within_repeats(self):
self._publish_xls_fixture_set_xform("groups_in_repeats")
self._submit_fixture_instance("groups_in_repeats", "01")
dd = self.xform.data_dictionary()
dd.get_keys()
data_0 = self._csv_data_for_dataframe()[0]
# remove dynamic fields
ignore_list = [
'_uuid', 'meta/instanceID', 'formhub/uuid', '_submission_time',
'_id', '_bamboo_dataset_id']
for item in ignore_list:
data_0.pop(item)
expected_data_0 = {
u'_xform_id_string': u'groups_in_repeats',
u'_status': u'submitted_via_web',
u'_tags': u'',
u'_notes': u'',
u'_version': self.xform.version,
u"_submitted_by": u'bob',
u'name': u'Abe',
u'age': 88,
u'has_children': u'1',
u'_attachments': [],
u'children[1]/childs_info/name': u'Cain',
u'children[2]/childs_info/name': u'Abel',
u'children[1]/childs_info/age': 56,
u'children[2]/childs_info/age': 48,
u'children[1]/immunization/immunization_received/polio_1': True,
u'children[1]/immunization/immunization_received/polio_2': False,
u'children[2]/immunization/immunization_received/polio_1': True,
u'children[2]/immunization/immunization_received/polio_2': True,
u'web_browsers/chrome': True,
u'web_browsers/firefox': False,
u'web_browsers/ie': False,
u'web_browsers/safari': False,
u'gps': u'-1.2626156 36.7923571 0.0 30.0',
u'_geolocation': [-1.2626156, 36.7923571],
u'_duration': '',
u'_gps_latitude': u'-1.2626156',
u'_gps_longitude': u'36.7923571',
u'_gps_altitude': u'0.0',
u'_gps_precision': u'30.0',
}
self.maxDiff = None
self.assertEqual(data_0, expected_data_0)
| bsd-2-clause | -4,380,878,115,132,721,000 | 38.225513 | 79 | 0.534901 | false |
c-o-m-m-a-n-d-e-r/CouchPotatoServer | couchpotato/core/event.py | 65 | 5458 | import threading
import traceback
from axl.axel import Event
from couchpotato.core.helpers.variable import mergeDicts, natsortKey
from couchpotato.core.logger import CPLog
log = CPLog(__name__)
events = {}
def runHandler(name, handler, *args, **kwargs):
try:
return handler(*args, **kwargs)
except:
from couchpotato.environment import Env
log.error('Error in event "%s", that wasn\'t caught: %s%s', (name, traceback.format_exc(), Env.all() if not Env.get('dev') else ''))
def addEvent(name, handler, priority = 100):
if not events.get(name):
events[name] = []
def createHandle(*args, **kwargs):
h = None
try:
# Open handler
has_parent = hasattr(handler, 'im_self')
parent = None
if has_parent:
parent = handler.__self__
bc = hasattr(parent, 'beforeCall')
if bc: parent.beforeCall(handler)
# Main event
h = runHandler(name, handler, *args, **kwargs)
# Close handler
if parent and has_parent:
ac = hasattr(parent, 'afterCall')
if ac: parent.afterCall(handler)
except:
log.error('Failed creating handler %s %s: %s', (name, handler, traceback.format_exc()))
return h
events[name].append({
'handler': createHandle,
'priority': priority,
})
def fireEvent(name, *args, **kwargs):
if name not in events: return
#log.debug('Firing event %s', name)
try:
options = {
'is_after_event': False, # Fire after event
'on_complete': False, # onComplete event
'single': False, # Return single handler
'merge': False, # Merge items
'in_order': False, # Fire them in specific order, waits for the other to finish
}
# Do options
for x in options:
try:
val = kwargs[x]
del kwargs[x]
options[x] = val
except: pass
if len(events[name]) == 1:
single = None
try:
single = events[name][0]['handler'](*args, **kwargs)
except:
log.error('Failed running single event: %s', traceback.format_exc())
# Don't load thread for single event
result = {
'single': (single is not None, single),
}
else:
e = Event(name = name, threads = 10, exc_info = True, traceback = True)
for event in events[name]:
e.handle(event['handler'], priority = event['priority'])
# Make sure only 1 event is fired at a time when order is wanted
kwargs['event_order_lock'] = threading.RLock() if options['in_order'] or options['single'] else None
kwargs['event_return_on_result'] = options['single']
# Fire
result = e(*args, **kwargs)
result_keys = result.keys()
result_keys.sort(key = natsortKey)
if options['single'] and not options['merge']:
results = None
# Loop over results, stop when first not None result is found.
for r_key in result_keys:
r = result[r_key]
if r[0] is True and r[1] is not None:
results = r[1]
break
elif r[1]:
errorHandler(r[1])
else:
log.debug('Assume disabled eventhandler for: %s', name)
else:
results = []
for r_key in result_keys:
r = result[r_key]
if r[0] == True and r[1]:
results.append(r[1])
elif r[1]:
errorHandler(r[1])
# Merge
if options['merge'] and len(results) > 0:
# Dict
if isinstance(results[0], dict):
results.reverse()
merged = {}
for result in results:
merged = mergeDicts(merged, result, prepend_list = True)
results = merged
# Lists
elif isinstance(results[0], list):
merged = []
for result in results:
if result not in merged:
merged += result
results = merged
modified_results = fireEvent('result.modify.%s' % name, results, single = True)
if modified_results:
log.debug('Return modified results for %s', name)
results = modified_results
if not options['is_after_event']:
fireEvent('%s.after' % name, is_after_event = True)
if options['on_complete']:
options['on_complete']()
return results
except Exception:
log.error('%s: %s', (name, traceback.format_exc()))
def fireEventAsync(*args, **kwargs):
try:
t = threading.Thread(target = fireEvent, args = args, kwargs = kwargs)
t.setDaemon(True)
t.start()
return True
except Exception as e:
log.error('%s: %s', (args[0], e))
def errorHandler(error):
etype, value, tb = error
log.error(''.join(traceback.format_exception(etype, value, tb)))
def getEvent(name):
return events[name]
| gpl-3.0 | -5,605,307,557,551,446,000 | 28.663043 | 140 | 0.509527 | false |
keunwoochoi/kapre | docs/conf.py | 1 | 2949 |
# Configuration file for the Sphinx documentation builder.
#
# This file only contains a selection of the most common options. For a full
# list see the documentation:
# https://www.sphinx-doc.org/en/master/usage/configuration.html
# -- Path setup --------------------------------------------------------------
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
import os
import sys
sys.path.insert(0, os.path.abspath('../'))
import sphinx_rtd_theme
autodoc_mock_imports = ['tensorflow', 'librosa', 'numpy']
autodoc_member_order = 'bysource'
# -- Project information -----------------------------------------------------
project = 'Kapre'
copyright = '2020, Keunwoo Choi, Deokjin Joo and Juho Kim'
author = 'Keunwoo Choi, Deokjin Joo and Juho Kim'
# The full version, including alpha/beta/rc tags
release = '2017'
# -- General configuration ---------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
"sphinx.ext.autodoc",
"sphinx_rtd_theme",
"sphinx.ext.viewcode", # source linkage
"sphinx.ext.napoleon",
# "sphinx.ext.autosummary",
"sphinx.ext.viewcode", # source linkage
"sphinxcontrib.inlinesyntaxhighlight" # inline code highlight
]
# https://stackoverflow.com/questions/21591107/sphinx-inline-code-highlight
# use language set by highlight directive if no language is set by role
inline_highlight_respect_highlight = True
# use language set by highlight directive if no role is set
inline_highlight_literals = True
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = "sphinx"
# autosummary_generate = True
# autoapi_type = 'python'
# autoapi_dirs = ['../kapre']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path.
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'sphinx_rtd_theme'
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
html_css_files = [
'css/custom.css',
]
def setup(app):
app.add_stylesheet("css/custom.css")
master_doc = 'index'
| mit | 7,498,530,462,489,980,000 | 31.054348 | 79 | 0.678535 | false |
edx/edx-platform | common/djangoapps/third_party_auth/samlproviderdata/tests/test_samlproviderdata.py | 3 | 7778 | import copy
import pytz
from uuid import uuid4
from datetime import datetime
from django.contrib.sites.models import Site
from django.contrib.auth.models import User
from django.urls import reverse
from django.utils.http import urlencode
from rest_framework import status
from rest_framework.test import APITestCase
from enterprise.models import EnterpriseCustomer, EnterpriseCustomerIdentityProvider
from enterprise.constants import ENTERPRISE_ADMIN_ROLE, ENTERPRISE_LEARNER_ROLE
from common.djangoapps.third_party_auth.models import SAMLProviderData, SAMLProviderConfig
from common.djangoapps.third_party_auth.tests.samlutils import set_jwt_cookie
from common.djangoapps.third_party_auth.tests.utils import skip_unless_thirdpartyauth
from common.djangoapps.third_party_auth.utils import convert_saml_slug_provider_id
SINGLE_PROVIDER_CONFIG = {
'entity_id': 'http://entity-id-1',
'metadata_source': 'http://test.url',
'name': 'name-of-config',
'enabled': 'true',
'slug': 'test-slug'
}
# entity_id here matches that of the providerconfig, intentionally
# that allows this data entity to be found
SINGLE_PROVIDER_DATA = {
'entity_id': 'http://entity-id-1',
'sso_url': 'http://test.url',
'public_key': 'a-key0Aid98',
'fetched_at': datetime.now(pytz.UTC).replace(microsecond=0)
}
SINGLE_PROVIDER_DATA_2 = copy.copy(SINGLE_PROVIDER_DATA)
SINGLE_PROVIDER_DATA_2['entity_id'] = 'http://entity-id-2'
SINGLE_PROVIDER_DATA_2['sso_url'] = 'http://test2.url'
ENTERPRISE_ID = str(uuid4())
BAD_ENTERPRISE_ID = str(uuid4())
@skip_unless_thirdpartyauth()
class SAMLProviderDataTests(APITestCase):
"""
API Tests for SAMLProviderConfig REST endpoints
"""
@classmethod
def setUpTestData(cls):
super().setUpTestData()
cls.user = User.objects.create_user(username='testuser', password='testpwd')
cls.site, _ = Site.objects.get_or_create(domain='example.com')
cls.enterprise_customer = EnterpriseCustomer.objects.create(
uuid=ENTERPRISE_ID,
name='test-ep',
slug='test-ep',
site=cls.site)
cls.saml_provider_config, _ = SAMLProviderConfig.objects.get_or_create(
entity_id=SINGLE_PROVIDER_CONFIG['entity_id'],
metadata_source=SINGLE_PROVIDER_CONFIG['metadata_source']
)
# the entity_id here must match that of the saml_provider_config
cls.saml_provider_data, _ = SAMLProviderData.objects.get_or_create(
entity_id=SINGLE_PROVIDER_DATA['entity_id'],
sso_url=SINGLE_PROVIDER_DATA['sso_url'],
fetched_at=SINGLE_PROVIDER_DATA['fetched_at']
)
cls.enterprise_customer_idp, _ = EnterpriseCustomerIdentityProvider.objects.get_or_create(
provider_id=convert_saml_slug_provider_id(cls.saml_provider_config.slug),
enterprise_customer_id=ENTERPRISE_ID
)
def setUp(self):
# a cookie with roles: [{enterprise_admin_role: ent_id}] will be
# needed to rbac to authorize access for this view
set_jwt_cookie(self.client, self.user, [(ENTERPRISE_ADMIN_ROLE, ENTERPRISE_ID)])
self.client.force_authenticate(user=self.user)
def test_get_one_provider_data_success(self):
# GET auth/saml/v0/providerdata/?enterprise_customer_uuid=id
url_base = reverse('saml_provider_data-list')
query_kwargs = {'enterprise_customer_uuid': ENTERPRISE_ID}
url = f'{url_base}?{urlencode(query_kwargs)}'
response = self.client.get(url, format='json')
assert response.status_code == status.HTTP_200_OK
results = response.data['results']
assert len(results) == 1
assert results[0]['sso_url'] == SINGLE_PROVIDER_DATA['sso_url']
def test_create_one_provider_data_success(self):
# POST auth/saml/v0/providerdata/ -d data
url = reverse('saml_provider_data-list')
data = copy.copy(SINGLE_PROVIDER_DATA_2)
data['enterprise_customer_uuid'] = ENTERPRISE_ID
orig_count = SAMLProviderData.objects.count()
response = self.client.post(url, data)
assert response.status_code == status.HTTP_201_CREATED
assert SAMLProviderData.objects.count() == (orig_count + 1)
assert SAMLProviderData.objects.get(entity_id=SINGLE_PROVIDER_DATA_2['entity_id']).sso_url == SINGLE_PROVIDER_DATA_2['sso_url']
def test_create_one_data_with_absent_enterprise_uuid(self):
"""
POST auth/saml/v0/provider_data/ -d data
"""
url = reverse('saml_provider_data-list')
data = copy.copy(SINGLE_PROVIDER_DATA_2)
orig_count = SAMLProviderData.objects.count()
response = self.client.post(url, data)
assert response.status_code == status.HTTP_400_BAD_REQUEST
assert SAMLProviderData.objects.count() == orig_count
def test_patch_one_provider_data(self):
# PATCH auth/saml/v0/providerdata/ -d data
url = reverse('saml_provider_data-detail', kwargs={'pk': self.saml_provider_data.id})
data = {
'sso_url': 'http://new.url'
}
data['enterprise_customer_uuid'] = ENTERPRISE_ID
orig_count = SAMLProviderData.objects.count()
response = self.client.patch(url, data)
assert response.status_code == status.HTTP_200_OK
assert SAMLProviderData.objects.count() == orig_count
# ensure only the sso_url was updated
fetched_provider_data = SAMLProviderData.objects.get(pk=self.saml_provider_data.id)
assert fetched_provider_data.sso_url == 'http://new.url'
assert fetched_provider_data.fetched_at == SINGLE_PROVIDER_DATA['fetched_at']
assert fetched_provider_data.entity_id == SINGLE_PROVIDER_DATA['entity_id']
def test_delete_one_provider_data(self):
# DELETE auth/saml/v0/providerdata/ -d data
url_base = reverse('saml_provider_data-detail', kwargs={'pk': self.saml_provider_data.id})
query_kwargs = {'enterprise_customer_uuid': ENTERPRISE_ID}
url = f'{url_base}?{urlencode(query_kwargs)}'
orig_count = SAMLProviderData.objects.count()
response = self.client.delete(url)
assert response.status_code == status.HTTP_204_NO_CONTENT
assert SAMLProviderData.objects.count() == (orig_count - 1)
# ensure only the sso_url was updated
query_set_count = SAMLProviderData.objects.filter(pk=self.saml_provider_data.id).count()
assert query_set_count == 0
def test_get_one_provider_data_failure(self):
set_jwt_cookie(self.client, self.user, [(ENTERPRISE_ADMIN_ROLE, BAD_ENTERPRISE_ID)])
self.client.force_authenticate(user=self.user)
url_base = reverse('saml_provider_data-list')
query_kwargs = {'enterprise_customer_uuid': BAD_ENTERPRISE_ID}
url = f'{url_base}?{urlencode(query_kwargs)}'
response = self.client.get(url, format='json')
assert response.status_code == status.HTTP_404_NOT_FOUND
def test_unauthenticated_request_is_forbidden(self):
self.client.logout()
urlbase = reverse('saml_provider_data-list')
query_kwargs = {'enterprise_customer_uuid': ENTERPRISE_ID}
url = f'{urlbase}?{urlencode(query_kwargs)}'
set_jwt_cookie(self.client, self.user, [(ENTERPRISE_LEARNER_ROLE, ENTERPRISE_ID)])
response = self.client.get(url, format='json')
assert response.status_code == status.HTTP_403_FORBIDDEN
# manually running second case as DDT is having issues.
self.client.logout()
set_jwt_cookie(self.client, self.user, [(ENTERPRISE_ADMIN_ROLE, BAD_ENTERPRISE_ID)])
response = self.client.get(url, format='json')
assert response.status_code == status.HTTP_403_FORBIDDEN
| agpl-3.0 | -1,832,257,543,057,153,500 | 42.452514 | 135 | 0.673824 | false |
Sbalbp/Xchat_Translator_Plugin | src/translator_plugin.py | 1 | 14334 | #
# XChat Translator Plugin.
#
# Copyright (C) 2014 Sergio Balbuena <[email protected]>.
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#!/usr/bin/env python
__module_name__ = "Message translator"
__module_version__ = "0.1.0"
__module_description__ = "Translates incoming messages using Apertium"
__module_author__ = "Sergio Balbuena <[email protected]>"
import xchat
import sys
import os
sys.path.append(os.environ.get('PYTHONPATH'))
import apertiumpluginutils.apertiumInterfaceAPY as iface
import apertiumpluginutils.apertiumFiles as files
pyVersion = sys.version_info[0]
printMode = 'print'
displayMode = 'compressed'
custom_emit = False
def notify(text, info = True):
if(info):
if(printMode == 'dialog'):
xchat.command('GUI MSGBOX \"'+text+'\"')
elif(printMode == 'print'):
print('Translator plugin information:\n'+text)
else:
if(printMode == 'dialog'):
xchat.command('GUI MSGBOX \"'+text+'\"')
elif(printMode == 'print'):
print('Translator plugin error: '+text)
def userBlocked(user):
blocked = files.getKey('blocked')
if(blocked != None and getFullChannel() in blocked.keys() and user in blocked[getFullChannel()]):
return True
return False
def getFullChannel():
fullChannel = ''
list = xchat.get_list('channels')
if(list):
for i in list:
fullChannel = fullChannel+i.channel+'.'
return fullChannel
def parseBindArguments(args):
numArgs = len(args)
if(numArgs < 3):
notify('Not enough arguments provided', info=False)
return False
if(numArgs > 3):
user = 1
else:
user = 0
if(not args[0] in ['incoming','outgoing']):
notify('First argument must be either \'incoming\' or \'outgoing\'', info=False)
return False
else:
isPair = iface.pairExists(args[1+user],args[2+user])
if(isPair['ok']):
if(not isPair['result']):
notify('Pair '+args[1+user]+' - '+args[2+user]+' does not exist', info=False)
return False
else:
notify(isPair['errorMsg'], info=False)
return False
if(user == 1 and args[0] == 'outgoing'):
notify('Cannot bind a language pair to outgoing messages for a particular user', info=False)
return False
return True
def translate(text, user, direction):
result = None
source = None
target = None
if(userBlocked(user)):
return None
if(direction != 'incoming' and direction != 'outgoing'):
return None
dictionary = files.getDictionary()[direction]
for key in [user, getFullChannel(), 'default']:
if(key in dictionary.keys()):
result = iface.translate(text, dictionary[key]['source'], dictionary[key]['target'])
source = dictionary[key]['source']
target = dictionary[key]['target']
break
if(result != None):
if(result['ok']):
result = result['result']
else:
notify(result['errorMsg'], info=False)
result = None
return [result, [source, target]]
def apertium_apy_cb(word, word_eol, userdata):
if(len(word) <= 1):
text = 'APY addresses:\n'
for i in range(iface.getAPYListSize()):
text = text+'\n'+iface.getAPYAddress(i)
notify(text)
elif(len(word) == 2):
if(iface.getAPYListSize() > int(word[1])):
notify('APY address number '+word[1]+':\n\n'+iface.getAPYAddress(int(word[1])))
else:
notify('Error: Only '+str(iface.getAPYListSize())+' APY addresses available',info=False)
else:
if(iface.setAPYAddress(word[2],order=int(word[1])) == None):
notify('Couldn\'t change APY address\nNo response from given server',info=False)
else:
files.setKey('apyAddress',iface.getAPYList())
notify('Successfully added the APY address: '+word[2])
return xchat.EAT_ALL
def apertium_removeapy_cb(word, word_eol, userdata):
if(len(word) <= 1):
iface.setAPYList([])
files.setKey('apyAddress',[])
notify('All APY addresses removed')
else:
if(iface.removeAPYAddress(int(word[1]))):
files.setKey('apyAddress',iface.getAPYList())
notify('Successfully removed APY address '+word[1])
else:
notify('Couldn\'t remove APY address '+word[1],info=False)
return xchat.EAT_ALL
def apertium_pairs_cb(word, word_eol, userdata):
result = iface.getAllPairs()
it = 2
if(result['ok']):
resultText = 'Available pairs:\n'
result = result['result']
for pair in result:
if(it == 0):
it = 2
resultText = resultText+pair[0]+' - '+pair[1]+'\n'
else:
resultText = resultText+(pair[0]+' - '+pair[1]).ljust(25)
it = it-1
notify(resultText)
else:
notify(result['errorMsg'],info=False)
return xchat.EAT_ALL
def apertium_check_cb(word, word_eol, userdata):
incoming = files.getKey('incoming')
outgoing = files.getKey('outgoing')
channel = getFullChannel()
text = ''
if(len(word) < 2):
text = text+'Default language settings:\n\n incoming: '
if 'default' in incoming.keys():
text = text+incoming['default']['source']+' - '+incoming['default']['target']+'\n'
else:
text = text+'None\n'
text = text+' outgoing: '
if 'default' in outgoing.keys():
text = text+outgoing['default']['source']+' - '+outgoing['default']['target']+'\n\n'
else:
text = text+'None\n\n'
text = text+'Language settings for this channel:\n\n incoming: '
if channel in incoming.keys():
text = text+incoming[channel]['source']+' - '+incoming[channel]['target']+'\n'
else:
text = text+'None\n'
text = text+' outgoing: '
if channel in outgoing.keys():
text = text+outgoing[channel]['source']+' - '+outgoing[channel]['target']+''
else:
text = text+'None'
else:
text = text+'Language settings for user '+word[1]+':\n\n incoming: '
if word[1] in incoming.keys():
text = text+incoming[word[1]]['source']+' - '+incoming[word[1]]['target']+'\n'
else:
text = text+'None\n'
notify(text)
return xchat.EAT_ALL
def apertium_bind_cb(word, word_eol, userdata):
if(parseBindArguments(word[1:])):
if(len(word) > 4):
user = 1
username = word[2]
else:
user = 0
username = getFullChannel()
if(files.setLangPair(word[1],username,word[2+user],word[3+user])):
notify('Successfully set '+word[2+user]+' - '+word[3+user]+' as the '+word[1]+' language pair for '+username)
else:
notify('An error occurred while binding the language pair')
return xchat.EAT_ALL
def apertium_unbind_cb(word, word_eol, userdata):
if(len(word) > 1):
key = word[1]
else:
key = getFullChannel()
success = False
if(files.unsetLangPair('incoming',key)):
success = True
if(files.unsetLangPair('outgoing',key)):
success = True
if(success):
notify('Successfully removed bindings for '+key)
return xchat.EAT_ALL
def apertium_default_cb(word, word_eol, userdata):
if(parseBindArguments(word[1:])):
if(files.setLangPair(word[1],'default',word[2],word[3])):
notify('Successfully set '+word[2]+' - '+word[3]+' as the '+word[1]+' default language pair')
else:
notify('An error occurred while binding the language pair')
return xchat.EAT_ALL
def apertium_block_cb(word, word_eol, userdata):
if(len(word) < 2):
notify('Not enough arguments provided', info=False)
return xchat.EAT_ALL
blocked = files.getKey('blocked')
if(blocked == None):
blocked = {}
if(not(getFullChannel() in blocked.keys())):
blocked[getFullChannel()] = []
blocked[getFullChannel()].append(word[1])
files.setKey('blocked',blocked)
return xchat.EAT_ALL
def apertium_unblock_cb(word, word_eol, userdata):
if(len(word) < 2):
notify('Not enough arguments provided', info=False)
return xchat.EAT_ALL
if(userBlocked(word[1])):
blocked = files.getKey('blocked')
blocked[getFullChannel()].remove(word[1])
files.setKey('blocked',blocked)
return xchat.EAT_ALL
def apertium_display_cb(word, word_eol, userdata):
global displayMode
if(len(word) < 2):
text = ''
if(displayMode == 'both'):
text = '"Both"\nBoth the original message and its translation are displayed'
elif(displayMode == 'replace'):
text = '"Replace"\nOnly the translated message is displayed'
elif(displayMode == 'compressed'):
text = '"Compressed"\nBoth the original message and its translation are displayed in a compressed way'
notify('Current display mode:\n'+text, info=True)
return xchat.EAT_ALL
if(not word[1] in ['both','replace','compressed']):
notify('Display mode argument must be \'both\', \'replace\' or \'compressed\'', info=False)
return xchat.EAT_ALL
displayMode = word[1]
files.setKey('displayMode',displayMode)
notify('Successfully set display mode to '+displayMode)
return xchat.EAT_ALL
def apertium_errordisplay_cb(word, word_eol, userdata):
global printMode
if(len(word) < 2):
notify('Not enough arguments provided', info=False)
return xchat.EAT_ALL
if(not word[1] in ['dialog','print','none']):
notify('Display mode argument must be \'dialog\', \'print\' or \'none\'', info=False)
return xchat.EAT_ALL
printMode = word[1]
return xchat.EAT_ALL
def translate_cm_cb(word, word_eol, userdata):
global custom_emit
if(custom_emit):
return xchat.EAT_NONE
result = translate(word[1],word[0],'incoming')
translation = result[0]
if(translation != None):
if(pyVersion >= 3):
translation = translation.decode('utf-8')
if(displayMode == 'both'):
text = '--- Original ---\n'+word[1]+'\n--- Translation ---\n'+translation
elif(displayMode == 'replace'):
text = translation
elif(displayMode == 'compressed'):
text = word[1]+'\napertium '+result[1][0]+'-'+result[1][1]+': '+translation
else:
text = word[1]
custom_emit = True
xchat.emit_print('Channel Message', word[0], text.replace('\t',' '))
custom_emit = False
return xchat.EAT_ALL
def translate_ym_cb(word, word_eol, userdata):
result = translate(word[1],'default','outgoing')
translation = result[0]
if(translation != None):
text = translation
else:
text = word[1]
xchat.command("msg #channel %s" % text.replace('\t',' '))
return xchat.EAT_ALL
def unload_cb(userdata):
files.save()
files.setFile('apertium_xchat_plugin_preferences.pkl')
files.read()
iface.setAPYList(files.getKey('apyAddress'))
if(files.getKey('displayMode') != None):
displayMode = files.getKey('displayMode')
else:
displayMode = 'compressed'
files.setKey('displayMode',displayMode)
xchat.hook_unload(unload_cb)
xchat.hook_command('apertium_apy', apertium_apy_cb, help='/apertium_apy <position> <address>\nAdds a new APY address in a given position of the APY addresses list.\n If no arguments are passed, it just shows the list of addresses. If only the position argument is passed, it shows the APY address at that position.')
xchat.hook_command('apertium_removeapy', apertium_removeapy_cb, help='/apertium_removeapy <position>\nRemoves the APY address at the given position from the APY list.\nIf no arguments are given, all the APYs are removed.')
xchat.hook_command('apertium_pairs', apertium_pairs_cb, help='/apertium_pairs\nShows all the available Apertium language pairs that can be used.')
xchat.hook_command('apertium_check', apertium_check_cb, help='/apertium_check <user>\nShows the current language pair bindings for the given user.\nIf no argument is passed, shows the default and current channel language pair bindings.')
xchat.hook_command('apertium_bind', apertium_bind_cb, help='/apertium_bind <direction> <user> <source> <target>\nBinds a given language pair to a user or channel.\ndirection must be either \'incoming\' or \'outgoing\'.\nuser (optional) is the name of the user whose messages are translated using the given language pair. If omitted, the language pair is bound to the channel itself.\nsource and target are the codes for the source and target languages from the language pair, respectively. \'outgoing\' messages will only be translated if a language pair has been assigned to either \'default\' (with apertium_default outgoing) or the channel (for example, the language pair eng-spa to translate your messages in english to spanish in a spanish channel). This means you can\'t bind an outgoing language pair to a user.')
xchat.hook_command('apertium_unbind', apertium_unbind_cb, help='/apertium_unbind <user>\nUnbinds the langugage pair associated to a user or channel.\nuser (optional) is the name of the user whose language pairs are to be unbound. If omitted, the language pairs are unbound from the channel itself.')
xchat.hook_command('apertium_default', apertium_default_cb, help='/apertium_default <direction> <source> <target>\nSets a given language pair as default when no bindings exist for users or channels.\ndirection must be either \'incoming\' or \'outgoing\'.\nsource and target are the codes for the source and target languages from the language pair, respectively.')
xchat.hook_command('apertium_block', apertium_block_cb, help='/apertium_block <user>\nBlocks the given user so that their messages are not translated in the current channel.')
xchat.hook_command('apertium_unblock', apertium_unblock_cb, help='/apertium_unblock <user>\nUnblocks the given user so that their messages are translated again in the current channel.')
xchat.hook_command('apertium_display', apertium_display_cb, help='/apertium_display <display_mode>\nSelects how translated messages should be displayed.\n display_mode must be one of the following:\n\'both\' Displays both the original message and its translation.\n\'replace\' Only the translated message is displayed.\n\'compressed\' Shows both the original text and its translation, in a compressed 2-line way.')
xchat.hook_command('apertium_infodisplay', apertium_errordisplay_cb, help='/apertium_infodisplay <info_display_mode>\nSelects how plugin information should be displayed.\n info_display_mode must be one of the following:\n\'dialog\' Shows a dialog box with the information.\n\'print\' Prints the information in the xchat history.\n\'none\' Information is not displayed')
xchat.hook_print('Channel Message', translate_cm_cb)
xchat.hook_print('Your Message', translate_ym_cb)
| gpl-3.0 | 6,040,304,919,373,699,000 | 33.791262 | 820 | 0.707479 | false |
blckshrk/Weboob | weboob/capabilities/parcel.py | 3 | 1678 | # -*- coding: utf-8 -*-
# Copyright(C) 2013 Romain Bignon
#
# This file is part of weboob.
#
# weboob is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# weboob is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with weboob. If not, see <http://www.gnu.org/licenses/>.
from .base import IBaseCap, CapBaseObject, Field, StringField, DateField
class Event(CapBaseObject):
date = DateField('Date')
activity = StringField('Activity')
location = StringField('Location')
def __repr__(self):
return u'<Event date=%r activity=%r location=%r>' % (self.date, self.activity, self.location)
class Parcel(CapBaseObject):
STATUS_UNKNOWN = 0
STATUS_PLANNED = 1
STATUS_IN_TRANSIT = 2
STATUS_ARRIVED = 3
arrival = DateField('Scheduled arrival date')
status = Field('Status of parcel', int, default=STATUS_UNKNOWN)
info = StringField('Information about parcel status')
history = Field('History', list)
class ICapParcel(IBaseCap):
def get_parcel_tracking(self, id):
"""
Get information abouut a parcel.
:param id: ID of the parcel
:type id: :class:`str`
:rtype: :class:`Parcel`
"""
raise NotImplementedError()
| agpl-3.0 | 3,472,126,745,498,578,000 | 30.074074 | 101 | 0.691895 | false |
pratikmallya/hue | desktop/core/ext-py/Paste-2.0.1/paste/util/mimeparse.py | 50 | 6604 | """MIME-Type Parser
This module provides basic functions for handling mime-types. It can handle
matching mime-types against a list of media-ranges. See section 14.1 of
the HTTP specification [RFC 2616] for a complete explanation.
http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.1
Based on mimeparse 0.1.2 by Joe Gregorio:
http://code.google.com/p/mimeparse/
Contents:
- parse_mime_type(): Parses a mime-type into its component parts.
- parse_media_range(): Media-ranges are mime-types with wild-cards and a 'q' quality parameter.
- quality(): Determines the quality ('q') of a mime-type when compared against a list of media-ranges.
- quality_parsed(): Just like quality() except the second parameter must be pre-parsed.
- best_match(): Choose the mime-type with the highest quality ('q') from a list of candidates.
- desired_matches(): Filter against a list of desired mime-types in the order the server prefers.
"""
def parse_mime_type(mime_type):
"""Carves up a mime-type and returns a tuple of the
(type, subtype, params) where 'params' is a dictionary
of all the parameters for the media range.
For example, the media range 'application/xhtml;q=0.5' would
get parsed into:
('application', 'xhtml', {'q', '0.5'})
"""
type = mime_type.split(';')
type, plist = type[0], type[1:]
try:
type, subtype = type.split('/', 1)
except ValueError:
type, subtype = type.strip() or '*', '*'
else:
type = type.strip() or '*'
subtype = subtype.strip() or '*'
params = {}
for param in plist:
param = param.split('=', 1)
if len(param) == 2:
key, value = param[0].strip(), param[1].strip()
if key and value:
params[key] = value
return type, subtype, params
def parse_media_range(range):
"""Carves up a media range and returns a tuple of the
(type, subtype, params) where 'params' is a dictionary
of all the parameters for the media range.
For example, the media range 'application/*;q=0.5' would
get parsed into:
('application', '*', {'q', '0.5'})
In addition this function also guarantees that there
is a value for 'q' in the params dictionary, filling it
in with a proper default if necessary.
"""
type, subtype, params = parse_mime_type(range)
try:
if not 0 <= float(params['q']) <= 1:
raise ValueError
except (KeyError, ValueError):
params['q'] = '1'
return type, subtype, params
def fitness_and_quality_parsed(mime_type, parsed_ranges):
"""Find the best match for a given mime-type against
a list of media_ranges that have already been
parsed by parse_media_range(). Returns a tuple of
the fitness value and the value of the 'q' quality
parameter of the best match, or (-1, 0) if no match
was found. Just as for quality_parsed(), 'parsed_ranges'
must be a list of parsed media ranges."""
best_fitness, best_fit_q = -1, 0
target_type, target_subtype, target_params = parse_media_range(mime_type)
for type, subtype, params in parsed_ranges:
if (type == target_type
or type == '*' or target_type == '*') and (
subtype == target_subtype
or subtype == '*' or target_subtype == '*'):
fitness = 0
if type == target_type:
fitness += 100
if subtype == target_subtype:
fitness += 10
for key in target_params:
if key != 'q' and key in params:
if params[key] == target_params[key]:
fitness += 1
if fitness > best_fitness:
best_fitness = fitness
best_fit_q = params['q']
return best_fitness, float(best_fit_q)
def quality_parsed(mime_type, parsed_ranges):
"""Find the best match for a given mime-type against
a list of media_ranges that have already been
parsed by parse_media_range(). Returns the
'q' quality parameter of the best match, 0 if no
match was found. This function behaves the same as quality()
except that 'parsed_ranges' must be a list of
parsed media ranges."""
return fitness_and_quality_parsed(mime_type, parsed_ranges)[1]
def quality(mime_type, ranges):
"""Returns the quality 'q' of a mime-type when compared
against the media-ranges in ranges. For example:
>>> quality('text/html','text/*;q=0.3, text/html;q=0.7, text/html;level=1, text/html;level=2;q=0.4, */*;q=0.5')
0.7
"""
parsed_ranges = map(parse_media_range, ranges.split(','))
return quality_parsed(mime_type, parsed_ranges)
def best_match(supported, header):
"""Takes a list of supported mime-types and finds the best
match for all the media-ranges listed in header. In case of
ambiguity, whatever comes first in the list will be chosen.
The value of header must be a string that conforms to the format
of the HTTP Accept: header. The value of 'supported' is a list
of mime-types.
>>> best_match(['application/xbel+xml', 'text/xml'], 'text/*;q=0.5,*/*; q=0.1')
'text/xml'
"""
if not supported:
return ''
parsed_header = list(map(parse_media_range, header.split(',')))
best_type = max([
(fitness_and_quality_parsed(mime_type, parsed_header), -n)
for n, mime_type in enumerate(supported)])
return best_type[0][1] and supported[-best_type[1]] or ''
def desired_matches(desired, header):
"""Takes a list of desired mime-types in the order the server prefers to
send them regardless of the browsers preference.
Browsers (such as Firefox) technically want XML over HTML depending on how
one reads the specification. This function is provided for a server to
declare a set of desired mime-types it supports, and returns a subset of
the desired list in the same order should each one be Accepted by the
browser.
>>> desired_matches(['text/html', 'application/xml'], \
... 'text/xml,application/xml,application/xhtml+xml,text/html;q=0.9,text/plain;q=0.8,image/png')
['text/html', 'application/xml']
>>> desired_matches(['text/html', 'application/xml'], 'application/xml,application/json')
['application/xml']
"""
parsed_ranges = list(map(parse_media_range, header.split(',')))
return [mimetype for mimetype in desired
if quality_parsed(mimetype, parsed_ranges)]
| apache-2.0 | 3,668,627,749,946,179,000 | 40.275 | 116 | 0.630073 | false |
SAP/PyRFC | setup.py | 1 | 7547 | # SPDX-FileCopyrightText: 2013 SAP SE Srdjan Boskovic <[email protected]>
#
# SPDX-License-Identifier: Apache-2.0
import inspect
import os
import sys
import subprocess
from codecs import open
from setuptools import setup, find_packages, Extension
MODULE_NAME = "pyrfc"
PYPIPACKAGE = "pyrfc"
HERE = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(HERE, "VERSION"), "rb", "utf-8") as version_file:
VERSION = version_file.read().strip()
with open(os.path.join(HERE, "README.md"), "rb", "utf-8") as readme_file:
LONG_DESCRIPTION = readme_file.read().strip()
BUILD_CYTHON = sys.platform.startswith("linux") or bool(os.getenv("PYRFC_BUILD_CYTHON"))
CMDCLASS = {}
if BUILD_CYTHON:
try:
from Cython.Distutils import build_ext
from Cython.Build import cythonize
except ImportError:
sys.exit(
"Cython not installed: https://cython.readthedocs.io/en/latest/src/quickstart/install.html"
)
CMDCLASS = {"build_ext": build_ext}
# Check if SAP NWRFC SDK configured
SAPNWRFC_HOME = os.environ.get("SAPNWRFC_HOME")
if not SAPNWRFC_HOME:
sys.exit(
"Environment variable SAPNWRFC_HOME not set.\nPlease specify this variable with the root directory of the SAP NWRFC Library."
)
# https://launchpad.support.sap.com/#/notes/2573953
if sys.platform.startswith("linux"):
subprocess.call("./ci/utils/nwrfcsdk-version-linux.sh", shell=True)
LIBS = ["sapnwrfc", "sapucum"]
MACROS = [
("NDEBUG", None),
("_LARGEFILE_SOURCE", None),
("_CONSOLE", None),
("_FILE_OFFSET_BITS", 64),
("SAPonUNIX", None),
("SAPwithUNICODE", None),
("SAPwithTHREADS", None),
("SAPonLIN", None),
]
COMPILE_ARGS = [
"-Wall",
"-O2",
"-fexceptions",
"-funsigned-char",
"-fno-strict-aliasing",
"-Wall",
"-Wno-uninitialized",
"-Wno-deprecated-declarations",
"-Wno-unused-function",
"-Wcast-align",
"-fPIC",
"-pthread",
"-minline-all-stringops",
"-I{}/include".format(SAPNWRFC_HOME),
]
LINK_ARGS = ["-L{}/lib".format(SAPNWRFC_HOME)]
elif sys.platform.startswith("win"):
# https://docs.microsoft.com/en-us/cpp/build/reference/compiler-options-listed-alphabetically
# Python sources
PYTHONSOURCE = os.environ.get("PYTHONSOURCE")
if not PYTHONSOURCE:
PYTHONSOURCE = inspect.getfile(inspect).split("/inspect.py")[0]
# sys.exit('Environment variable PYTHONSOURCE not set. Please specify this variable with the root directory of the PYTHONSOURCE Library.')
subprocess.call("ci\\utils\\nwrfcsdk-version.bat", shell=True)
LIBS = ["sapnwrfc", "libsapucum"]
MACROS = [
("SAPonNT", None),
("_CRT_NON_CONFORMING_SWPRINTFS", None),
("_CRT_SECURE_NO_DEPRECATES", None),
("_CRT_NONSTDC_NO_DEPRECATE", None),
("_AFXDLL", None),
("WIN32", None),
("_WIN32_WINNT", "0x0502"),
("WIN64", None),
("_AMD64_", None),
("NDEBUG", None),
("SAPwithUNICODE", None),
("UNICODE", None),
("_UNICODE", None),
("SAPwithTHREADS", None),
("_ATL_ALLOW_CHAR_UNSIGNED", None),
("_LARGEFILE_SOURCE", None),
("_CONSOLE", None),
("SAP_PLATFORM_MAKENAME", "ntintel"),
]
COMPILE_ARGS = [
"-I{}\\include".format(SAPNWRFC_HOME),
"-I{}\\Include".format(PYTHONSOURCE),
"-I{}\\Include\\PC".format(PYTHONSOURCE),
"/EHs",
"/Gy",
"/J",
"/MD",
"/nologo",
"/W3",
"/Z7",
"/GL",
"/O2",
"/Oy-",
"/we4552",
"/we4700",
"/we4789",
]
LINK_ARGS = [
"-LIBPATH:{}\\lib".format(SAPNWRFC_HOME),
"-LIBPATH:{}\\PCbuild".format(PYTHONSOURCE),
"/NXCOMPAT",
"/STACK:0x2000000",
"/SWAPRUN:NET",
"/DEBUG",
"/OPT:REF",
"/DEBUGTYPE:CV,FIXUP",
"/MACHINE:amd64",
"/nologo",
"/LTCG",
]
elif sys.platform.startswith("darwin"):
subprocess.call("./ci/utils/nwrfcsdk-version-darwin.sh", shell=True)
MACOS_VERSION_MIN = "10.15"
LIBS = ["sapnwrfc", "sapucum"]
MACROS = [
("NDEBUG", None),
("_LARGEFILE_SOURCE", None),
("_CONSOLE", None),
("_FILE_OFFSET_BITS", 64),
("SAPonUNIX", None),
("SAPwithUNICODE", None),
("SAPwithTHREADS", None),
("SAPonDARW", None),
]
COMPILE_ARGS = [
"-Wall",
"-O2",
"-fexceptions",
"-funsigned-char",
"-fno-strict-aliasing",
"-Wno-uninitialized",
"-Wcast-align",
"-fPIC",
"-pthread",
"-minline-all-stringops",
"-isystem",
"-std=c++11",
"-mmacosx-version-min={}".format(MACOS_VERSION_MIN),
"-I{}/include".format(SAPNWRFC_HOME),
"-Wno-cast-align",
"-Wno-deprecated-declarations",
"-Wno-unused-function",
]
LINK_ARGS = [
"-L{}/lib".format(SAPNWRFC_HOME),
"-stdlib=libc++",
"-mmacosx-version-min={}".format(MACOS_VERSION_MIN),
# https://stackoverflow.com/questions/6638500/how-to-specify-rpath-in-a-makefile
"-Wl,-rpath,{}/lib".format(SAPNWRFC_HOME),
]
else:
sys.exit("Platform not supported: {}.".format(sys.platform))
# https://docs.python.org/2/distutils/apiref.html
PYRFC_EXT = Extension(
language="c++",
# https://stackoverflow.com/questions/8024805/cython-compiled-c-extension-importerror-dynamic-module-does-not-define-init-fu
name=f"{MODULE_NAME}.{MODULE_NAME}",
sources=[f"src/{MODULE_NAME}/_{MODULE_NAME}.pyx"],
define_macros=MACROS,
extra_compile_args=COMPILE_ARGS,
extra_link_args=LINK_ARGS,
libraries=LIBS,
)
# cf. http://docs.python.org/distutils/setupscript.html#additional-meta-data
setup(
name=PYPIPACKAGE,
version=VERSION,
description=("Python bindings for SAP NetWeaver RFC SDK"),
long_description=LONG_DESCRIPTION,
long_description_content_type="text/markdown",
download_url="https://github.com/SAP/PyRFC/tarball/master",
classifiers=[ # cf. http://pypi.python.org/pypi?%3Aaction=list_classifiers
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"Natural Language :: English",
"License :: OSI Approved :: Apache Software License",
"Operating System :: OS Independent",
"Programming Language :: Cython",
"Programming Language :: Python",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
],
keywords=f"{MODULE_NAME} {PYPIPACKAGE} pyrfc sap rfc nwrfc sapnwrfc",
author="SAP SE",
url="https://github.com/SAP/pyrfc",
license="OSI Approved :: Apache Software License",
maintainer="Srdjan Boskovic",
maintainer_email="[email protected]",
packages=find_packages(where="src", exclude=["*.cpp", "*.pxd", "*.html"]),
package_dir={"": "src"},
# http://packages.python.org/distribute/setuptools.html#setting-the-zip-safe-flag
zip_safe=False,
install_requires=["setuptools"],
setup_requires=["setuptools-git"],
cmdclass=CMDCLASS,
ext_modules=cythonize(PYRFC_EXT, annotate=True, language_level="3")
if BUILD_CYTHON
else [PYRFC_EXT],
test_suite=MODULE_NAME,
)
| apache-2.0 | 3,132,951,688,388,854,000 | 31.670996 | 146 | 0.592951 | false |
Eficent/sale-workflow | sale_dropshipping/__openerp__.py | 34 | 2664 | # -*- coding: utf-8 -*-
#
#
# OpenERP, Open Source Management Solution
# Copyright (C) 20010 Akretion LDTA (<http://www.akretion.com>).
# @author Raphaël Valyi
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#
{"name": "Sale Dropshipping",
"version": "1.1.1",
"author": "Akretion,Odoo Community Association (OCA)",
"website": "http://www.openerp.com",
"category": "Generic Modules/Purchase",
"depends": ["purchase",
"sale_stock"],
"description": """
Makes it better to deal with purchases with known sale schemes, specially the
following case:
1) normal
2) direct delivery (also called drop shipping)
3) direct invoice
4) direct delivery and direct invoice
See the attached diagram in images/purchase_to_sale.png to see the difference
between those flows.
In all those specific MTO (by opposition of MTS) cases,
it will link the sale order line and the purchase order lines together.
A good idea might be to use this module with the mrp_jit module
if you want MTO flows to be automatically dealt with right
at the sale order validation.
You can also tell if product suppliers accept drop shipping or not.
If they accept it and if sale order
line has more products than the virtual quantity available,
then it selects drop shipping by default.
In the out going product list view, you can filter in or out drop shipping
picking.
TODO: eventually it might be interesting to do a chained move from supplier to
internal location and
from internal location to customer instead of supplier o customer directly.
This would enable moves to properly generate accounting moves
in the stock journal for better tracking.
""",
"init_xml": [],
"demo_xml": [],
"test": ['test/test_sale_policy_procurement.yml',
],
"update_xml": [
"purchase_view.xml",
"sale_view.xml",
"product_view.xml",
"stock_view.xml"],
'images': ['images/purchase_to_sale.png'],
'installable': False,
'certificate': None,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 | -2,053,913,907,080,414,500 | 35.479452 | 78 | 0.726249 | false |
daniaki/pyPPI | pyppi/predict/__init__.py | 1 | 18388 | """
This module contains exports the two main functions required to make
predictions on a list of UniProt edges or :class:`Interaction` instances,
and to parse an edge list of UniProt accessions into
:class:`Interaction` instances. There are also submodules containing
utility functions for plotting, loading datasets, models and so on.
"""
import logging
import numpy as np
import pandas as pd
from joblib import Parallel, delayed
from sqlalchemy.orm import scoped_session
from ..base.constants import SOURCE, TARGET
from ..base.io import load_classifier
from ..base.utilities import remove_duplicates
from ..database import db_session
from ..database.exceptions import ObjectAlreadyExists
from ..database.models import Protein, Interaction
from ..database.utilities import (
create_interaction, get_upid_to_protein_map,
get_source_taget_to_interactions_map
)
from ..data_mining.features import compute_interaction_features
from ..data_mining.uniprot import (
parse_record_into_protein, parallel_download,
recent_accession
)
from .utilities import VALID_SELECTION, interactions_to_Xy_format
from sklearn.feature_extraction.text import VectorizerMixin
from sklearn.multiclass import OneVsRestClassifier
from sklearn.pipeline import Pipeline
logger = logging.getLogger("pyppi")
__all__ = [
"plotting",
"utilities",
"get_or_create_interactions",
"classify_interactions"
]
def _check_classifier_and_selection(classifier=None, selection=None):
mlb = None
if classifier is None:
try:
clf, selection, mlb = load_classifier()
except IOError:
logger.exception(
"Default classifier not found. Run build_data.py script first."
)
raise
else:
clf = classifier
if selection is None or not selection:
raise ValueError(
"If supplying your own classifier, please specify a list "
"of the feature databases it was trained on. "
"Choose from {}.".format(', '.join(VALID_SELECTION))
)
for val in selection:
if val not in VALID_SELECTION:
raise ValueError(
"Invalid selection '{}'. Please select from: {}.".format(
val, ', '.join(VALID_SELECTION)
)
)
return clf, selection, mlb
def _update_missing_protein_map(ppis, session, verbose=False, n_jobs=1,
taxon_id=9606):
to_download = set()
uniprot_ids = [upid for tup in ppis for upid in tup if upid is not None]
# Keep taxon_id as None. Why? Someone has passed in a mouse protein
# by accident. The protein map will return a None entry because
# it doesn't have the same taxon_id. The next step will try to download
# the record for the mouse protein thinking it doesn't exist in the
# database. Keeping the taxon_id None will stop needless downloads.
# Instead just do a filter step afterwards to remove non-matching ids.
protein_map = get_upid_to_protein_map(uniprot_ids, taxon_id=None)
items = list(protein_map.items())
for (upid, protein) in items:
if protein is None:
to_download.add(upid)
# Download missing records (matching taxon_id if supplied), and create
# new protein instances if necessary. Two things to fix later on:
#
# (1) Records might be downloaded that are actually more recent than
# what's in the database. For now it's ok to ignore the newer one.
# (2) Records may be downloaded that are already. in the database
# (which *should* be caught by elif statement no.2).
#
# So really, there's only one thing to fix. TODO: Fix (1).
records = parallel_download(
to_download, n_jobs=n_jobs, verbose=verbose, taxon_id=taxon_id
)
assert len(records) == len(to_download)
for uniprot, record in zip(to_download, records):
if record is None:
protein_map[uniprot] = None
continue
# Check to see if the record downloaded matches a database
# entry with a different UniProt ID. Can happen if the
# accession provided are outdated.
most_recent = recent_accession(record)
existing_entry = Protein.get_by_uniprot_id(most_recent)
if existing_entry is not None:
protein_map[uniprot] = existing_entry
if verbose:
logger.info(
"UniProt '{}' already exists in the database "
"as '{}'".format(uniprot, most_recent)
)
else:
protein = parse_record_into_protein(record)
assert uniprot == protein.uniprot_id
protein.save(session, commit=False)
protein_map[uniprot] = protein
# Check for any non-matching taxon_ids
for upid, protein in protein_map.items():
if (protein is not None) and (taxon_id is not None) and \
(protein.taxon_id != taxon_id):
if verbose:
logger.warning(
"{} has a non-matching taxonomy id {}. "
"Expected {}. Adding associated interactions to "
"invalid.".format(upid, protein.taxon_id, taxon_id)
)
protein_map[upid] = None
try:
session.commit()
return protein_map
except:
session.rollback()
raise
def _create_missing_interactions(ppis, protein_map, session, verbose=False,
taxon_id=9606, n_jobs=1):
valid = []
invalid = []
id_ppis = []
id_protein_map = {p.id: p for _, p in protein_map.items() if p is not None}
for a, b in ppis:
if a not in protein_map:
invalid.append((a, b))
elif b not in protein_map:
invalid.append((a, b))
elif protein_map[a] is None:
invalid.append((a, b))
elif protein_map[b] is None:
invalid.append((a, b))
elif (taxon_id is not None) and (protein_map[a].taxon_id != taxon_id):
if verbose:
logger.warning(
"{} in ({},{}) has a non-matching taxonomy {}. "
"Expected {}. Adding to invalid.".format(
a, a, b, protein_map[a].taxon_id, taxon_id
))
invalid.append((a, b))
elif (taxon_id is not None) and (protein_map[b].taxon_id != taxon_id):
if verbose:
logger.warning(
"{} in ({},{}) has a non-matching taxonomy {}. "
"Expected {}. Adding to invalid.".format(
b, a, b, protein_map[b].taxon_id, taxon_id
))
invalid.append((a, b))
else:
id_ppis.append((protein_map.get(a).id, protein_map.get(b).id))
if id_ppis:
interactions = get_source_taget_to_interactions_map(id_ppis, taxon_id)
new_interactions = {k: v for k, v in interactions.items() if v is None}
feature_map = {}
# The new interaction will need to have their features computed
# Do this in parallel to speed things up.
if new_interactions:
if verbose:
logger.info("Computing features for new interactions.")
features = Parallel(n_jobs=n_jobs)(
delayed(compute_interaction_features)(
id_protein_map[source], id_protein_map[target]
)
for (source, target) in new_interactions
)
for (source, target), features in zip(new_interactions, features):
feature_map[(source, target)] = features
for (a, b), instance in interactions.items():
if instance is None:
source = id_protein_map[a]
target = id_protein_map[b]
class_kwargs = feature_map[(a, b)]
class_kwargs['is_training'] = False
class_kwargs['is_interactome'] = False
class_kwargs['is_holdout'] = False
interaction = create_interaction(
source, target, labels=None,
verbose=verbose, **class_kwargs
)
if verbose:
logger.info("Creating new interaction ({},{})".format(
source.uniprot_id, target.uniprot_id
))
valid.append(interaction)
else:
valid.append(instance)
try:
session.add_all(valid)
session.commit()
except:
session.rollback()
raise
return valid, invalid
def get_or_create_interactions(ppis, session=None, taxon_id=9606,
verbose=False, n_jobs=1):
"""Parse an iterable of interactions in valid and invalid.
Parse an iterable of either :py:class:`Interaction` instances or
edge `tuple` of UniProt string identifiers into :py:class:`Interaction`
instances that are valid. Invalid interactions are those that do not match
`taxon_id` if provided or those for which UniProt entries cannot be
created or downloaded. This method will treat (A, B) the same as
(B, A) since feature wise, they are identical. New interactions will be
constructed with (A, B) sorted in increasing alpha-numeric order.
**Example:** (Q1234, P1234) will create an :py:class:`Interaction` instance
with `source` being P1234 and `target` being Q1234.
Parameters
----------
ppis : `list` or :py:class:`pd.DataFrame`
List of uppercase UniProt identifiers `tuples`, list of
:py:class:`Interaction` objects or a :py:class:`pd.DataFrame`
with columns `source` and `target` containing uppercase UniProt
identifiers.
taxon_id : `int`, Default: 9606
A `UniProt` taxonomy identifier to indicate which organism your
interactions are from. Interactions supplied with proteins not matching
this code will be treated as invalid. If None, the taxonomy identifier
will be ignored. it is strongly recommended that you only make
predictions on interactions with the same identifier as those
used during training.
verbose : `boolean`, Default: True
Logs intermediate messages to your console. Useful for debugging.
session : :py:func: `scoped_session`
A database session object that connects to a SQLite3 database file.
Only supply this if you have experience with `SQLAlchemy` and you
know what you are doing. Leave as `None` to use this package's
database.
n_jobs : `int`, Default: 1
Number of processes to use when downloading new records and
computing features for new interactions. This can provide a nice speed
boost for large input.
Returns
-------
`tuple` : (`list`, `list`, `dict`)
A tuple where the first element a list of :py:class:`Interaction`
instances for each valid and unique interaction in `ppis`.
The second element is a list of invalid interactions. The last
element is a dictionary from input UniProt identifiers the most recent
UniProt identifiers. A change may occur when input proteins are mapped
to newer accessions by the UniProt mapping service. Provided for your
own record keeping.
"""
if session is None:
session = db_session
if isinstance(ppis, pd.DataFrame):
ppis = list(zip(ppis[SOURCE], ppis[TARGET]))
else:
ppis = list(ppis)
if isinstance(ppis[0], Interaction):
invalid = []
interactions = []
for entry in ppis:
if not isinstance(entry, Interaction):
invalid.append(entry)
elif entry.taxon_id != taxon_id:
invalid.append(entry)
else:
interactions.append(entry)
return interactions, invalid, {}
elif isinstance(ppis[0], (tuple, list)):
invalid = []
for ppi in ppis:
if not isinstance(ppi, (tuple, list)):
invalid.append(ppi)
if verbose:
logger.warning(
"Invalid: '{}' is not list or tuple.".format(ppi)
)
elif not len(ppi) == 2:
invalid.append(ppi)
if verbose:
logger.warning(
"Invalid: '{}' is not length 2.".format(ppi)
)
elif any(not isinstance(elem, str) for elem in ppi):
invalid.append(ppi)
if verbose:
logger.warning(
"Invalid: '{}' has non string members.".format(ppi)
)
for invalid_ppi in invalid:
ppis.remove(invalid_ppi)
unique_ppis = remove_duplicates(
(tuple(sorted([str(a), str(b)])) for (a, b) in ppis)
)
if len(unique_ppis) == 0:
return [], invalid, {} # They're all invalid
# Create new proteins not in the database and add them to the map.
# Using a map because it's quicker than asking the database for each
# instance. Could also do this with raw SQL to avoid having the whole
# database in memory, but this is fine for now.
protein_map = _update_missing_protein_map(
ppis=unique_ppis, session=session, n_jobs=n_jobs,
verbose=verbose, taxon_id=taxon_id
)
# Parse the interactions creating missing ones where required.
interactions, invalid = _create_missing_interactions(
ppis=unique_ppis, protein_map=protein_map, n_jobs=n_jobs,
session=session, verbose=verbose, taxon_id=taxon_id
)
# _update_missing_protein_map and _create_missing_interactions
# will add new instances to the current session. Commit these if
# requested making sure to rollback changes if there's an error.
try:
session.commit()
upid_new_upid = {
k: None if v is None else v.uniprot_id
for k, v in protein_map.items()
}
return interactions, invalid, upid_new_upid
except:
session.rollback()
raise
else:
t = type(ppis[0]).__name__
raise TypeError("Unexpected type %s at index 0 in ppis." % t)
def classify_interactions(ppis, proba=True, classifier=None, selection=None,
taxon_id=9606, verbose=True, session=None,
n_jobs=1):
"""Predict the labels of a list of interactions.
Parameters
----------
ppis : `list` or :py:class:`pd.DataFrame`
List of uppercase UniProt identifiers `tuples`, list of
:py:class:`Interaction` objects or a :py:class:`pd.DataFrame`
with columns `source` and `target` containing uppercase UniProt
identifiers.
proba : `boolean`, default: `True`
If true, predict label membership probabilities. Otherwise make
binary predictions.
classifier : `object`, Optional.
Classifier object used to make predictions. If None, loads the default
classifier if it exists throwing an `IOError` if it cannot be found.
The classifier must be a either a :py:class:`Pipeline` where the
first step subclasses :py:class:`VectorizerMixin`, or be a meta
classifier such as a :py:class:`OneVsRestClassifier` where the base
estimator is a :py:class:`Pipeline` as before.
selection : `list`, optional
list of strings indicating feature databases the classifier has been
trained on. Must be supplied if a custom classifier is supplied.
If the wrong selection is supplied for your classifier, un-reliable
predictions will be returned.
taxon_id : `int`, Default: 9606
A `UniProt` taxonomy identifier to indicate which organism your
interactions are from. Interactions supplied with proteins not matching
this code will be treated as invalid. If None, the taxonomy identifier
will be ignored. it is strongly recommended that you only make
predictions on interactions with the same identifier as those
used during training.
verbose : `boolean`, Default: True
Logs intermediate messages to your console. Useful for debugging.
session : :py:func: `scoped_session`
A database session object that connects to a SQLite3 database file.
Only supply this if you have experience with `SQLAlchemy` and you
know what you are doing. Leave as `None` to use this package's
database.
n_jobs : `int`, Default: 1
Number of processes to use when downloading new records and
computing features for new interactions. This can provide a nice speed
boost for large input.
Returns
-------
`tuple`
Tuple of array-like (n_ppis, n_labels), `list`, `dict`, `list` or None
A tuple where the first element if a numpy array of predictions
for each valid and unique interaction in `ppis`. Second element
is a list of invalid PPIs. The third element is a dictionary from
input UniProt identifiers the most recent UniProt identifiers. A
change may occur when input proteins are mapped to newer accessions
by the UniProt mapping service. Provided for your own record keeping.
The final element is the ordering of labels of predictions if the
default classifier is used, otherwise it is `None`.
"""
clf, selection, mlb = _check_classifier_and_selection(
classifier, selection)
valid, invalid, mapping = get_or_create_interactions(
ppis, session, taxon_id, verbose, n_jobs
)
if mlb is not None:
labels = mlb.classes
else:
labels = None
# Make predictions with the saved/supplied model.
if len(valid):
X, _ = interactions_to_Xy_format(valid, selection)
if proba:
return clf.predict_proba(X), invalid, mapping, labels
else:
return clf.predict(X), invalid, mapping, labels
else:
return [], invalid, {}, labels
| mit | 1,748,405,542,420,904,700 | 38.973913 | 79 | 0.605993 | false |
jameszhan/oh-my-zsh | plugins/shell-proxy/proxy.py | 26 | 1923 | #!/usr/bin/env python3
import os
import sys
from subprocess import check_output, list2cmdline
cwd = os.path.dirname(__file__)
ssh_agent = os.path.join(cwd, "ssh-agent.py")
user_proxy = os.environ.get("CONFIG_PROXY", os.path.expandvars("$HOME/.config/proxy"))
def get_http_proxy():
default_proxy = os.environ.get("DEFAULT_PROXY")
if default_proxy:
return default_proxy
if os.path.isfile(user_proxy):
return check_output(user_proxy).decode("utf-8").strip()
raise Exception("Not found, Proxy configuration")
def make_proxies(url: str):
proxies = {"%s_PROXY" % _: url for _ in ("HTTP", "HTTPS", "FTP", "RSYNC", "ALL")}
proxies.update({name.lower(): value for (name, value) in proxies.items()})
proxies["GIT_SSH"] = ssh_agent
return proxies
def merge(mapping: dict):
return ("%s=%s" % _ for _ in mapping.items())
class CommandSet:
proxies = make_proxies(get_http_proxy())
aliases = {
_: "env __SSH_PROGRAM_NAME__=%s %s" % (_, ssh_agent)
for _ in ("ssh", "sftp", "scp", "slogin", "ssh-copy-id")
}
def enable(self):
cmdline("export", *merge(self.proxies))
cmdline("alias", *merge(self.aliases))
def disable(self):
cmdline("unset", *self.proxies.keys())
cmdline("unalias", *self.aliases.keys())
def status(self):
proxies = (
"%11s = %s" % (name, os.environ[name])
for name in self.proxies.keys()
if name in os.environ
)
for _ in proxies:
cmdline("echo", _)
def usage(self):
cmdline("echo", "usage: proxy {enable,disable,status}")
self.status()
def cmdline(*items):
print(list2cmdline(items))
def main():
command = CommandSet()
if len(sys.argv) == 1:
command.usage()
sys.exit(-1)
getattr(command, sys.argv[1], command.usage)()
if __name__ == "__main__":
main()
| mit | -3,728,528,678,885,157,000 | 25.342466 | 86 | 0.586063 | false |
vicnet/weboob | weboob/applications/qbooblyrics/main_window.py | 1 | 11785 | # -*- coding: utf-8 -*-
# Copyright(C) 2016 Julien Veyssier
#
# This file is part of weboob.
#
# weboob is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# weboob is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with weboob. If not, see <http://www.gnu.org/licenses/>.
import os
from PyQt5.QtCore import pyqtSlot as Slot, Qt
from PyQt5.QtGui import QKeySequence
from PyQt5.QtWidgets import QApplication, QFrame, QShortcut
from weboob.capabilities.lyrics import CapLyrics
from weboob.tools.application.qt5 import QtMainWindow, QtDo
from weboob.tools.application.qt5.backendcfg import BackendCfg
from weboob.tools.application.qt5.models import BackendListModel
from weboob.tools.application.qt5.search_history import HistoryCompleter
from weboob.applications.qbooblyrics.ui.main_window_ui import Ui_MainWindow
from weboob.applications.qbooblyrics.ui.result_ui import Ui_Result
from .minisonglyrics import MiniSonglyrics
from .songlyrics import Songlyrics
MAX_TAB_TEXT_LENGTH=30
class Result(QFrame):
def __init__(self, weboob, app, parent=None):
super(Result, self).__init__(parent)
self.ui = Ui_Result()
self.ui.setupUi(self)
self.parent = parent
self.weboob = weboob
self.app = app
self.minis = []
self.current_info_widget = None
# action history is composed by the last action and the action list
# An action is a function, a list of arguments and a description string
self.action_history = {'last_action': None, 'action_list': []}
self.ui.backButton.clicked.connect(self.doBack)
self.ui.backButton.setShortcut(QKeySequence('Alt+Left'))
self.ui.backButton.hide()
def doAction(self, description, fun, args):
''' Call fun with args as arguments
and save it in the action history
'''
self.ui.currentActionLabel.setText(description)
if self.action_history['last_action'] is not None:
self.action_history['action_list'].append(self.action_history['last_action'])
self.ui.backButton.setToolTip('%s (Alt+Left)'%self.action_history['last_action']['description'])
self.ui.backButton.show()
self.action_history['last_action'] = {'function': fun, 'args': args, 'description': description}
# manage tab text
mytabindex = self.parent.ui.resultsTab.indexOf(self)
tabtxt = description
if len(tabtxt) > MAX_TAB_TEXT_LENGTH:
tabtxt = '%s...'%tabtxt[:MAX_TAB_TEXT_LENGTH]
self.parent.ui.resultsTab.setTabText(mytabindex, tabtxt)
self.parent.ui.resultsTab.setTabToolTip(mytabindex, description)
return fun(*args)
@Slot()
def doBack(self):
''' Go back in action history
Basically call previous function and update history
'''
if len(self.action_history['action_list']) > 0:
todo = self.action_history['action_list'].pop()
self.ui.currentActionLabel.setText(todo['description'])
self.action_history['last_action'] = todo
if len(self.action_history['action_list']) == 0:
self.ui.backButton.hide()
else:
self.ui.backButton.setToolTip(self.action_history['action_list'][-1]['description'])
# manage tab text
mytabindex = self.parent.ui.resultsTab.indexOf(self)
tabtxt = todo['description']
if len(tabtxt) > MAX_TAB_TEXT_LENGTH:
tabtxt = '%s...'%tabtxt[:MAX_TAB_TEXT_LENGTH]
self.parent.ui.resultsTab.setTabText(mytabindex, tabtxt)
self.parent.ui.resultsTab.setTabToolTip(mytabindex, todo['description'])
return todo['function'](*todo['args'])
def processFinished(self):
self.parent.ui.searchEdit.setEnabled(True)
QApplication.restoreOverrideCursor()
self.process = None
self.parent.ui.stopButton.hide()
@Slot()
def stopProcess(self):
if self.process is not None:
self.process.stop()
def searchSonglyrics(self,pattern):
if not pattern:
return
self.doAction(u'Search lyrics "%s"' % pattern, self.searchSonglyricsAction, [pattern])
def searchSonglyricsAction(self, pattern):
self.ui.stackedWidget.setCurrentWidget(self.ui.list_page)
for mini in self.minis:
self.ui.list_content.layout().removeWidget(mini)
mini.hide()
mini.deleteLater()
self.minis = []
self.parent.ui.searchEdit.setEnabled(False)
QApplication.setOverrideCursor(Qt.WaitCursor)
backend_name = self.parent.ui.backendEdit.itemData(self.parent.ui.backendEdit.currentIndex())
self.process = QtDo(self.weboob, self.addSonglyrics, fb=self.processFinished)
self.process.do(self.app._do_complete, self.parent.getCount(), ('title'), 'iter_lyrics',
self.parent.ui.typeCombo.currentText(), pattern, backends=backend_name, caps=CapLyrics)
self.parent.ui.stopButton.show()
def addSonglyrics(self, songlyrics):
minisonglyrics = MiniSonglyrics(self.weboob, self.weboob[songlyrics.backend], songlyrics, self)
self.ui.list_content.layout().insertWidget(self.ui.list_content.layout().count()-1,minisonglyrics)
self.minis.append(minisonglyrics)
def displaySonglyrics(self, songlyrics, backend):
self.ui.stackedWidget.setCurrentWidget(self.ui.info_page)
if self.current_info_widget is not None:
self.ui.info_content.layout().removeWidget(self.current_info_widget)
self.current_info_widget.hide()
self.current_info_widget.deleteLater()
wsonglyrics = Songlyrics(songlyrics, backend, self)
self.ui.info_content.layout().addWidget(wsonglyrics)
self.current_info_widget = wsonglyrics
QApplication.restoreOverrideCursor()
def searchId(self, id):
QApplication.setOverrideCursor(Qt.WaitCursor)
if '@' in id:
backend_name = id.split('@')[1]
id = id.split('@')[0]
else:
backend_name = None
for backend in self.weboob.iter_backends():
if (backend_name and backend.name == backend_name) or not backend_name:
songlyrics = backend.get_lyrics(id)
if songlyrics:
self.doAction('Lyrics of "%s" (%s)' % (songlyrics.title, songlyrics.artist), self.displaySonglyrics, [songlyrics, backend])
QApplication.restoreOverrideCursor()
class MainWindow(QtMainWindow):
def __init__(self, config, weboob, app, parent=None):
super(MainWindow, self).__init__(parent)
self.ui = Ui_MainWindow()
self.ui.setupUi(self)
self.config = config
self.weboob = weboob
self.app = app
self.minis = []
self.current_info_widget = None
# search history is a list of patterns which have been searched
history_path = os.path.join(self.weboob.workdir, 'qbooblyrics_history')
qc = HistoryCompleter(history_path, self)
qc.load()
qc.setCaseSensitivity(Qt.CaseInsensitive)
self.ui.searchEdit.setCompleter(qc)
self.ui.typeCombo.addItem('song')
self.ui.typeCombo.addItem('artist')
self.ui.searchEdit.returnPressed.connect(self.search)
self.ui.idEdit.returnPressed.connect(self.searchId)
count = self.config.get('settings', 'maxresultsnumber')
self.ui.countSpin.setValue(int(count))
showT = self.config.get('settings', 'showthumbnails')
self.ui.showTCheck.setChecked(showT == '1')
self.ui.stopButton.hide()
self.ui.actionBackends.triggered.connect(self.backendsConfig)
q = QShortcut(QKeySequence(Qt.CTRL + Qt.Key_Q), self)
q.activated.connect(self.close)
n = QShortcut(QKeySequence(Qt.CTRL + Qt.Key_PageDown), self)
n.activated.connect(self.nextTab)
p = QShortcut(QKeySequence(Qt.CTRL + Qt.Key_PageUp), self)
p.activated.connect(self.prevTab)
w = QShortcut(QKeySequence(Qt.CTRL + Qt.Key_W), self)
w.activated.connect(self.closeCurrentTab)
l = QShortcut(QKeySequence(Qt.CTRL + Qt.Key_L), self)
l.activated.connect(self.ui.searchEdit.setFocus)
l.activated.connect(self.ui.searchEdit.selectAll)
self.ui.resultsTab.tabCloseRequested.connect(self.closeTab)
self.loadBackendsList()
if self.ui.backendEdit.count() == 0:
self.backendsConfig()
@Slot()
def backendsConfig(self):
bckndcfg = BackendCfg(self.weboob, (CapLyrics, ), self)
if bckndcfg.run():
self.loadBackendsList()
def loadBackendsList(self):
model = BackendListModel(self.weboob)
model.addBackends()
self.ui.backendEdit.setModel(model)
current_backend = self.config.get('settings', 'backend')
idx = self.ui.backendEdit.findData(current_backend)
if idx >= 0:
self.ui.backendEdit.setCurrentIndex(idx)
if self.ui.backendEdit.count() == 0:
self.ui.searchEdit.setEnabled(False)
else:
self.ui.searchEdit.setEnabled(True)
def getCount(self):
num = self.ui.countSpin.value()
if num == 0:
return None
else:
return num
@Slot(int)
def closeTab(self, index):
if self.ui.resultsTab.widget(index) != 0:
self.ui.resultsTab.removeTab(index)
@Slot()
def closeCurrentTab(self):
self.closeTab(self.ui.resultsTab.currentIndex())
@Slot()
def prevTab(self):
index = self.ui.resultsTab.currentIndex() - 1
size = self.ui.resultsTab.count()
if size != 0:
self.ui.resultsTab.setCurrentIndex(index % size)
@Slot()
def nextTab(self):
index = self.ui.resultsTab.currentIndex() + 1
size = self.ui.resultsTab.count()
if size != 0:
self.ui.resultsTab.setCurrentIndex(index % size)
def newTab(self, txt, backend, songlyrics=None):
id = ''
if songlyrics is not None:
id = songlyrics.id
new_res = Result(self.weboob, self.app, self)
self.ui.resultsTab.addTab(new_res, txt)
new_res.searchId('%s@%s'%(id,backend.NAME))
self.ui.stopButton.clicked.connect(new_res.stopProcess)
@Slot()
def search(self):
pattern = self.ui.searchEdit.text()
self.ui.searchEdit.completer().addString(pattern)
new_res = Result(self.weboob, self.app, self)
self.ui.resultsTab.addTab(new_res, pattern)
self.ui.resultsTab.setCurrentWidget(new_res)
new_res.searchSonglyrics(pattern)
@Slot()
def searchId(self):
id = self.ui.idEdit.text()
new_res = Result(self.weboob, self.app, self)
self.ui.resultsTab.addTab(new_res, id)
self.ui.resultsTab.setCurrentWidget(new_res)
new_res.searchId(id)
def closeEvent(self, ev):
self.config.set('settings', 'backend', self.ui.backendEdit.itemData(
self.ui.backendEdit.currentIndex()))
self.ui.searchEdit.completer().save()
self.config.set('settings', 'maxresultsnumber', self.ui.countSpin.value())
self.config.save()
ev.accept()
| lgpl-3.0 | -8,720,971,795,465,523,000 | 37.639344 | 143 | 0.648027 | false |
googleapis/python-aiplatform | samples/snippets/create_data_labeling_job_images_sample_test.py | 1 | 1874 | # Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from uuid import uuid4
import pytest
import create_data_labeling_job_images_sample
import helpers
PROJECT_ID = os.getenv("BUILD_SPECIFIC_GCLOUD_PROJECT")
API_ENDPOINT = os.getenv("DATA_LABELING_API_ENDPOINT")
LOCATION = "us-central1"
DATASET_ID = "1905673553261363200"
DISPLAY_NAME = f"temp_create_data_labeling_job_test_{uuid4()}"
INSTRUCTIONS_GCS_URI = (
"gs://ucaip-sample-resources/images/datalabeling_instructions.pdf"
)
ANNOTATION_SPEC = "daisy"
@pytest.fixture(scope="function", autouse=True)
def teardown(teardown_data_labeling_job):
yield
# Creating a data labeling job for images
@pytest.mark.skip(reason="Flaky job state.")
def test_ucaip_generated_create_data_labeling_job_sample(capsys, shared_state):
dataset_name = f"projects/{PROJECT_ID}/locations/{LOCATION}/datasets/{DATASET_ID}"
create_data_labeling_job_images_sample.create_data_labeling_job_images_sample(
project=PROJECT_ID,
display_name=DISPLAY_NAME,
instruction_uri=INSTRUCTIONS_GCS_URI,
dataset=dataset_name,
annotation_spec=ANNOTATION_SPEC,
api_endpoint=API_ENDPOINT,
)
out, _ = capsys.readouterr()
# Save resource name of the newly created data labeing job
shared_state["data_labeling_job_name"] = helpers.get_name(out)
| apache-2.0 | 3,811,565,020,640,605,700 | 31.877193 | 86 | 0.739594 | false |
davidvon/pipa-pay-server | site-packages/flask_restful/utils/cors.py | 2 | 1696 | from datetime import timedelta
from flask import make_response, request, current_app
from functools import update_wrapper
def crossdomain(origin=None, methods=None, headers=None,
max_age=21600, attach_to_all=True,
automatic_options=True):
"""
http://flask.pocoo.org/snippets/56/
"""
if methods is not None:
methods = ', '.join(sorted(x.upper() for x in methods))
if headers is not None and not isinstance(headers, str):
headers = ', '.join(x.upper() for x in headers)
if not isinstance(origin, str):
origin = ', '.join(origin)
if isinstance(max_age, timedelta):
max_age = max_age.total_seconds()
def get_methods():
if methods is not None:
return methods
options_resp = current_app.make_default_options_response()
return options_resp.headers['allow']
def decorator(f):
def wrapped_function(*args, **kwargs):
if automatic_options and request.method == 'OPTIONS':
resp = current_app.make_default_options_response()
else:
resp = make_response(f(*args, **kwargs))
if not attach_to_all and request.method != 'OPTIONS':
return resp
h = resp.headers
h['Access-Control-Allow-Origin'] = origin
h['Access-Control-Allow-Methods'] = get_methods()
h['Access-Control-Max-Age'] = str(max_age)
if headers is not None:
h['Access-Control-Allow-Headers'] = headers
return resp
f.provide_automatic_options = False
return update_wrapper(wrapped_function, f)
return decorator
| apache-2.0 | -6,062,920,469,910,089,000 | 35.085106 | 66 | 0.596108 | false |
r-kan/BUFFY | util/config.py | 1 | 9794 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
import re
import os
import sys
from logging import debug, info
from media.s3 import S3_HEAD
from util.global_def import DIR_DELIM, warning, error
def show_list(list_entry, name):
if not list_entry:
return
if 1 == len(list_entry):
debug("%s: %s" % (name, list_entry[0]))
else:
debug("%s:" % name)
for item in list_entry:
debug("\t%s" % item)
ROOT_KEY = "root"
FILE_KEY = "file"
EXT_KEY = "ext"
RE_KEY = "re"
DYNAMIC_KEY = "dyn"
EXCLUDE_KEY = "exclude"
class Source(object):
def __init__(self, data, root="", is_exclude=False):
self.is_exclude = is_exclude
if not type(data) in [str, dict]:
error("[config] entry 'src' shall contain 'str' or 'dict' value instead of %s, program exit..."
% type(data))
sys.exit()
simple_spec = type(data) is str
self.root = data[ROOT_KEY] if not simple_spec and ROOT_KEY in data else root
assert type(self.root) is str
# file: specify files by give accurate filename/dirname
file_or_dir = data if simple_spec else data[FILE_KEY] if FILE_KEY in data else None
assert not file_or_dir or type(file_or_dir) in [str, list]
self.file_or_dir = file_or_dir if not file_or_dir or type(file_or_dir) is list else [file_or_dir]
# ext: specify files by extension name
ext = data[EXT_KEY] if not simple_spec and EXT_KEY in data else None
assert not ext or type(ext) in [str, list]
self.ext = ext if not ext or type(ext) is list else [ext]
# re: specify files by regular expression matching
re_data = data[RE_KEY] if not simple_spec and RE_KEY in data else None
assert not re_data or type(re_data) in [str, list]
self.re = re_data if not re_data or type(re_data) is list else [re_data]
# dyn: specify files by re + custom code snippets
dynamic = data[DYNAMIC_KEY] if not simple_spec and DYNAMIC_KEY in data else None
assert not dynamic or type(dynamic) is list
# dynamic shall be either a dyn-item(re-str, import-str, eval-str) list, or a list of dyn-items
assert not dynamic or 0 == len(dynamic) or \
(type(dynamic[0]) is list or (type(dynamic[0]) is str and len(dynamic) == 3))
self.dynamic = dynamic if not dynamic or type(dynamic[0]) is list else [dynamic]
assert self.file_or_dir or self.ext or self.re or self.dynamic
if "" == self.root and self.file_or_dir and len(self.file_or_dir) == 1:
dirname, basename = os.path.split(self.file_or_dir[0])
self.root = dirname
if len(basename):
self.file_or_dir = [basename]
else:
self.file_or_dir = None
self.re = [".*"]
if "" is not self.root and not self.is_exclude:
debug("root: %s" % self.root)
self.show_sources()
if len(self.root) > 0 and self.root[-1] != DIR_DELIM:
self.root += DIR_DELIM
# exclude: sources that need not backup (kept by a child 'Source' instance)
assert not self.is_exclude or EXCLUDE_KEY not in data # nested 'exclude' entry is not supported
self.exclude = Source(data[EXCLUDE_KEY], self.root, True) if EXCLUDE_KEY in data else None
def show_sources(self):
prefix = "exclude " if self.is_exclude else ""
show_list(self.file_or_dir, prefix + "file")
show_list(self.ext, prefix + "ext")
show_list(self.re, prefix + "re")
show_list(self.dynamic, prefix + "dyn")
@staticmethod
def get_dir_files(dirname):
assert os.path.isdir(dirname)
ret = []
for root, _, files in os.walk(dirname):
assert len(root) >= 1
if root[-1] != DIR_DELIM:
root += DIR_DELIM
ret += [root + file for file in files]
return ret
@staticmethod
def get_files(file_or_dir):
return Source.get_dir_files(file_or_dir) if os.path.isdir(file_or_dir) else [file_or_dir]
@staticmethod
def get_re_files(root, raw_patterns):
patterns = [re.compile(root + item) for item in raw_patterns]
sources = []
for root, dirs, files in os.walk(root):
assert len(root) >= 1
if root[-1] != DIR_DELIM:
root += DIR_DELIM
for src in (files + dirs):
file_or_dir = root + src
for pattern in patterns:
if re.match(pattern, file_or_dir):
sources += Source.get_files(file_or_dir)
return sources
def get_sources(self):
sources = []
if self.file_or_dir:
for file_or_dir in self.file_or_dir:
src = self.root + file_or_dir
if not os.path.exists(src):
warning("[config] the specified source '%s' does not exist" % src)
continue
sources += Source.get_files(src)
if self.ext or self.re or self.dynamic:
assert "" != self.root
if self.ext:
for root, _, files in os.walk(self.root):
assert len(root) >= 1
if root[-1] != DIR_DELIM:
root += DIR_DELIM
for file in files:
basename, ext = os.path.splitext(file)
if ext.replace(".", "") in self.ext:
sources.append(root + file)
if self.re:
sources += Source.get_re_files(self.root, self.re)
if self.dynamic:
patterns = []
for dyn_item in self.dynamic:
[re_str, import_str, eval_str] = dyn_item
dynamic_alias = "$dyn$"
if dynamic_alias not in re_str:
warning("[config] '%s' does not appear in '%s', dynamic filename mechanism will not apply"
% (dynamic_alias, re_str))
if "" != import_str:
exec("import %s" % import_str)
dyn_str = eval(eval_str)
patterns.append(re_str.replace(dynamic_alias, dyn_str))
sources += Source.get_re_files(self.root, patterns)
exclude_sources = self.exclude.get_sources() if self.exclude else []
# 'set' to remove duplication
return sorted([src for src in list(set(sources)) if src not in exclude_sources and not os.path.islink(src)])
def get_bool_value(data, key, default_value):
return True if key in data and data[key] in ["yes", "y"] else default_value
PATH_KEY = "path"
DETAIL_KEY = "detail"
DEFAULT_DETAIL = False
class Report(object):
def __init__(self, data):
self.path = None
self.detail = DEFAULT_DETAIL
if not data:
return
if not type(data) in [str, dict]:
error("[config] entry 'rpt' shall contain 'str' or 'dict' value instead of %s" % type(data))
return
path = data[PATH_KEY] if type(data) is not str and PATH_KEY in data else data
if "" == path:
return
assert type(path) is str and "" != path
if 0 == path.find(S3_HEAD):
info("[config] report to aws s3 (%s) is not supported" % path)
return
if path[-1] != DIR_DELIM:
path += DIR_DELIM
self.path = path
self.detail = get_bool_value(data, DETAIL_KEY, self.detail)
debug("report path: %s" % self.path)
debug("report detail: %s" % ("yes" if self.detail else "no"))
NAME_KEY = "name"
DST_KEY = "dst" # destination
SRC_KEY = "src" # source
RPT_KEY = "rpt" # report
COMPRESS_KEY = "compress"
ENCODING_KEY = "encoding"
DEFAULT_COMPRESS = False
DEFAULT_ENCODING = False
class Config(object):
def __init__(self, config_file=None, src=None, dst=None, name=None, compress=None, encoding=None, rpt=None):
data = None
if config_file:
if not os.path.exists(config_file):
error("[BUFFY] config file \"%s\" does not exist, program exit..." % config_file)
sys.exit()
info("[BUFFY] reading config file \"%s\"..." % config_file)
with open(config_file) as config_fp:
import json
data = json.load(config_fp)
if not dst and DST_KEY not in data:
error("[config] no \'dst\' specified, program exit...")
sys.exit()
dst = data[DST_KEY] if not dst else dst
if not type(dst) in [str, list]:
error("[config] entry 'src' shall contain 'str' or 'list' value instead of %s, program exit..."
% type(dst))
sys.exit()
if not src and SRC_KEY not in data:
error("[config] no \'src\' specified, program exit...")
sys.exit()
self.dst = [dst] if type(dst) is str else dst
self.name = name if name else data[NAME_KEY] if data and NAME_KEY in data else ""
assert type(self.name) is str
self.compress = compress if None is not compress else get_bool_value(data, COMPRESS_KEY, DEFAULT_COMPRESS)
self.encoding = encoding if None is not encoding else get_bool_value(data, ENCODING_KEY, DEFAULT_ENCODING)
debug("------------------------")
if "" != self.name:
debug("name: %s" % self.name)
show_list(self.dst, "dst")
self.src = Source(src if src else data[SRC_KEY])
debug("compress: %s" % ("yes" if self.compress else "no"))
debug("encoding: %s" % ("yes" if self.encoding else "no"))
self.rpt = Report(rpt if rpt else data[RPT_KEY] if data and RPT_KEY in data else None)
debug("------------------------")
| mit | 7,461,023,288,640,396,000 | 37.865079 | 116 | 0.559935 | false |
Makki1/old-svn | avr/sketchbook/GiraRM_Debug/freebus/freebus_ets/software/freebus-ets/src/build/XML/FB_XMLConverter.py | 1 | 9568 | #!/usr/bin/
#-*- coding: iso-8859-1 -*-
#===============================================================================
# __________ ________________ __ _______
# / ____/ __ \/ ____/ ____/ __ )/ / / / ___/
# / /_ / /_/ / __/ / __/ / __ / / / /\__ \
# / __/ / _, _/ /___/ /___/ /_/ / /_/ /___/ /
# /_/ /_/ |_/_____/_____/_____/\____//____/
#
#Source File: FB_XMLConverter.py
#Version: V0.1 , 04.11.2007
#Author: Jerome Leisner
#email: [email protected]
#===============================================================================
import os
import codecs
import locale
from re import *
##This class contains all Methods to convert a original *.vd_ File to its corresponding
##XML-File.
class FB_XMLConverter:
data=""
read_caption=False
read_record=False
read_table=False
read_value=False
atmp = []
recordname=""
captions = []
val_count=0
__LogObj = None
lineCounter = 0
ByteCount = 0
## The constructor.
#
# @param self : The object pointer
# @param InFile : Path and Filename of Input-File (*.vd_)
# @param InFile : Path and Filename of Output-File (*.xml)
# @param LogObj : a Object to Logging-Class
def __init__(self,InFile,OutFile,LogObj):
self.vd_datei = InFile
self.xml_datei = OutFile
self.__LogObj = LogObj
## Base Method for *.vd_ File to FB-XML-File Conversion
#
# @param self The object pointer.
def convertToXML(self):
parse=False
line=""
next=""
OutFileObj = None
InFileObj = None
try:
if(self.vd_datei != ""):
OutFileObj = open(self.xml_datei,"w")
InFileObj = open(self.vd_datei,"r")
#LOG File
self.__LogObj.NewLog(self.vd_datei+" geöffnet",0)
self.__LogObj.NewLog(self.xml_datei+" geöffnet",0)
#write first two lines
OutFileObj.write("<?xml version=\"1.0\" encoding=\"ISO-8859-1\" ?>\n")
OutFileObj.write("<eib-products>\n")
#read first line and compare of signum
line = InFileObj.readline()
line = line.split("\n")
if(line[0] != "EX-IM"):
#LOG File
self.__LogObj.NewLog("Falsches Format der *vd_ Datei gefunden",1)
return
line = InFileObj.readline()
line = line.split("\n")
#normal parse-run
while(True):
next = InFileObj.readline()
next = next.split("\n")
if(line[0] == "XXX"):
self.parseLine(line[0],OutFileObj)
#cancel first While-Loop , if current line is empty
break
#parse Symbol-Information
re = compile('^\\\\')
while(re.match(next[0])):
#separate "\\"
line[0] = line[0] + next[0].strip("\\\\")
next = InFileObj.readline()
next = next.split("\n")
if(next[0] == ""):
if(line[0] != ""):
self.parseLine(line[0],OutFileObj)
#parse lines after symbolinformation
self.parseLine(line[0],OutFileObj);
line=next
OutFileObj.flush()
OutFileObj.close()
InFileObj.close()
#LOG File
self.__LogObj.NewLog(self.vd_datei+" geschlossen",0)
self.__LogObj.NewLog(self.xml_datei+" geschlossen",0)
except IOError:
#LOG File
self.__LogObj.NewLog("Fehler beim Öffnen der vd_ Datei",2)
if(OutFileObj != None):
OutFileObj.close()
if(InFileObj != None):
InFileObj.close()
#LOG File
self.__LogObj.NewLog(self.vd_datei+" geschlossen",0)
self.__LogObj.NewLog(self.xml_datei+" geschlossen",0)
except:
#LOG File
self.__LogObj.NewLog("Allgemeiner Fehler beim Parsen der vd_ Datei" \
+ " in Zeile: " + str(self.lineCounter) \
+" " + line[0],2)
if(OutFileObj != None):
OutFileObj.close()
#LOG File
self.__LogObj.NewLog(self.xml_datei+" geschlossen",0)
elif(InFileObj != None):
InFileObj.close()
#LOG File
self.__LogObj.NewLog(self.vd_datei+" geschlossen",0)
def parseLine(self,_line,_OutFileObj):
tmp=""
try:
self.lineCounter = self.lineCounter + 1
#line empty ?
if(len(_line) > 0):
#erstes Zeichen laden
start=_line[0:1]
#Start with "T " ? ->
if(_line[0:2] == "T "):
start=' '
else:
_line=""
if(_line[0:2] == "N " ):
#Path of vd_ Datei
leer = 0
elif(_line[0:2] == "K " ):
#Manufacturer tool,Kommentar
leer = 0
elif(_line[0:2] == "D " ):
#Date of file
leer = 0
elif(_line[0:2] == "V " ):
#Version
leer = 0
elif(_line[0:2] == "H " ):
#Typ of Device
leer = 0
elif(_line[0:1] == "-" ):
#Blockende
self.read_caption=False #C-Block
self.read_record=False #R-Block
self.read_table=False #T-Block
self.read_value=False #Values
#start with "T " followed by a number -> new table
elif(_line[0:2] == "T " and _line[2:3].isdigit() == True):
linedata = _line.split()
try:
#extract Table-Number
parseInt = int(linedata[1])
#delete Caption-Array
del self.captions[0:len(self.captions)]
#found new Table
self.read_table=True
except TypeError:
# not a number
self.read_value=True
self.read_record=False
#start with "C " followed by a number -> new Caption
elif(_line[0:1] == "C" and _line[1:2].isdigit() == True):
if(self.read_table == True and self.read_caption == False):
self.read_caption=True
#start with "R " followed by a number -> new Record
elif(_line[0:2] == "R " and _line[2:3].isdigit() == True):
self.read_value=False
self.read_caption=False
self.read_record=True
self.read_table=False
self.__ValueCount = len(self.captions)
self.val_count = 0
#last entry
elif(_line[0:3] == "XXX"):
_OutFileObj.write("\t</"+self.recordname+">")
_OutFileObj.write("</eib-products>")
_OutFileObj.flush()
return
#else: read Values
else:
self.read_value=True
self.read_record=False
#Do we read a table ?
if(self.read_table == True):
tunix=0
#Do we read a Record ?
if(self.read_record == True):
self.atmp=_line.split(" ")
self.val_count=0
if(self.recordname != ""):
#record end
_OutFileObj.write("\t</"+self.recordname+">\n")
#z.Bsp: R 1 T 3 manufacturer
self.recordname=self.atmp[4]
_OutFileObj.write("\t<"+self.recordname+">\n")
#Do we read a caption ?
if(self.read_caption == True):
self.atmp=_line.split(" ")
#add new caption-element
self.captions.append(self.atmp[5])
#Do we read a Value ?
if(self.read_value == True and self.val_count <= len(self.captions)-1):
if(_line != ""):
_line = _line.replace("<","<")
_line = _line.replace(">",">")
_line = _line.replace("&", "&")
#write Values to file
_OutFileObj.write("\t\t<"+self.captions[self.val_count]+">"+_line+"</"+self.captions[self.val_count]+">\n")
#LOG File
#self.__LogObj.NewLog("\t\t<"+self.captions[self.val_count]+">"+_line+"</"+self.captions[self.val_count]+">\n",0)
else:
self.__ValueCount = 0
self.read_value == False
self.val_count = self.val_count + 1
_line=""
except(IOError):
print "Fehler"
| gpl-3.0 | 3,064,554,125,490,663,400 | 28.182965 | 133 | 0.413148 | false |
srikanthmaturu/webpage-classifier | classification/classificationSVM.py | 2 | 3043 | from sklearn.feature_extraction.text import TfidfVectorizer
from sklearn.preprocessing import LabelEncoder
from sklearn.cross_validation import train_test_split
from sklearn.svm import SVC
from sklearn.ensemble import RandomForestClassifier, GradientBoostingClassifier, AdaBoostClassifier
from sklearn.naive_bayes import GaussianNB
from sklearn.neighbors import KNeighborsClassifier
from sklearn.linear_model import LogisticRegression
from sklearn.metrics import confusion_matrix
from sklearn.calibration import calibration_curve
import matplotlib.pyplot as plt
import matplotlib.mlab as mlab
import os
import numpy as np
from nltk.corpus import stopwords
import time
from datetime import datetime
label_fn = []
for root, subdirs, files in os.walk('data'):
if root != 'data':
label = root.split('/')[-1]
for fn in files:
label_fn.append((label, root + '/' + fn))
labels = [t[0] for t in label_fn]
filenames = [t[1] for t in label_fn]
print "Initializing vectorization"
tf = TfidfVectorizer(input='filename', stop_words=stopwords.words('english'),
decode_error='ignore', max_df=0.95, min_df=0.05)
X = tf.fit_transform(filenames).todense()
print('Vectorization Done')
print('Number of features = %d' % X.shape[1])
print('Initializing label encoding')
le = LabelEncoder()
y_str = labels
y = le.fit_transform(y_str)
print('Label encoding done')
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.33, random_state=13)
print('Initializing classifier')
#clf = SVC(C=1000.0) #88.3 0:00:03.069055
print('Classifier initialized')
#clf = RandomForestClassifier() #91.84 0:00:00.019845
#clf = GradientBoostingClassifier() #94.07 0:00:00.108704
#clf = GaussianNB() #69.78 0:00:00.106093
#clf = KNeighborsClassifier(3) #71.90 0:00:16.034729
#clf = AdaBoostClassifier() #85.55 0:00:00.142586
clf = LogisticRegression() #87.42 0:00:00.062891
print('Initializing learning')
clf.fit(X_train, y_train)
print('Learning complete')
print('Initializing classification')
start_time = datetime.now()
y_pred = clf.predict(X_test)
print('Classification complete')
print ('Total classification time = %s' % format(datetime.now() - start_time))
print('Testing Samples = %d' % len(y_test))
print('Correctly classified Samples = %d' % np.sum(y_pred == y_test))
print('Percentage Classified Correctly = %f' % (np.sum(y_pred == y_test)*100.0/len(y_test)))
####Plotting graph####
#plt.gca().set_color_cycle(['red', 'green', 'blue', 'yellow'])
n_groups = 7
means_men = (3, 0.01, 0.1, 0.1, 16, 00.14, 0.06)
means_women = (25, 32, 34, 20, 25)
std_women = (3, 5, 2, 3, 3)
fig, ax = plt.subplots()
index = np.arange(n_groups)
bar_width = 0.35
opacity = 0.4
error_config = {'ecolor': '0.3'}
rects1 = plt.bar(index, means_men, bar_width,
color='r',
label='Classifiers')
plt.xlabel('Classifiers')
plt.ylabel('Time')
plt.title('Classifiers comparison')
plt.xticks(index + bar_width, ('A', 'B', 'C', 'D', 'E', 'F', 'G'))
plt.legend()
plt.tight_layout()
plt.show()
| gpl-2.0 | 5,727,933,664,605,338,000 | 28.833333 | 99 | 0.702267 | false |
redcanari/canari3 | src/canari/easygui.py | 1 | 89323 | """
@version: 0.96(2010-08-29)
@note:
ABOUT EASYGUI
EasyGui provides an easy-to-use interface for simple GUI interaction
with a user. It does not require the programmer to know anything about
tkinter, frames, widgets, callbacks or lambda. All GUI interactions are
invoked by simple function calls that return results.
@note:
WARNING about using EasyGui with IDLE
You may encounter problems using IDLE to run programs that use EasyGui. Try it
and find out. EasyGui is a collection of Tkinter routines that run their own
event loops. IDLE is also a Tkinter application, with its own event loop. The
two may conflict, with unpredictable results. If you find that you have
problems, try running your EasyGui program outside of IDLE.
Note that EasyGui requires Tk release 8.0 or greater.
@note:
LICENSE INFORMATION
EasyGui version 0.96
Copyright (c) 2010, Stephen Raymond Ferg
All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation and/or
other materials provided with the distribution.
3. The name of the author may not be used to endorse or promote products derived
from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING
IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
@note:
ABOUT THE EASYGUI LICENSE
This license is what is generally known as the "modified BSD license",
aka "revised BSD", "new BSD", "3-clause BSD".
See http://www.opensource.org/licenses/bsd-license.php
This license is GPL-compatible.
See http://en.wikipedia.org/wiki/License_compatibility
See http://www.gnu.org/licenses/license-list.html#GPLCompatibleLicenses
The BSD License is less restrictive than GPL.
It allows software released under the license to be incorporated into proprietary products.
Works based on the software may be released under a proprietary license or as closed source software.
http://en.wikipedia.org/wiki/BSD_licenses#3-clause_license_.28.22New_BSD_License.22.29
"""
egversion = __doc__.split()[1]
__all__ = ['ynbox'
, 'ccbox'
, 'boolbox'
, 'indexbox'
, 'msgbox'
, 'buttonbox'
, 'integerbox'
, 'multenterbox'
, 'enterbox'
, 'exceptionbox'
, 'choicebox'
, 'codebox'
, 'textbox'
, 'diropenbox'
, 'fileopenbox'
, 'filesavebox'
, 'passwordbox'
, 'multpasswordbox'
, 'multchoicebox'
# , 'abouteasygui'
, 'egversion'
# , 'egdemo'
# , 'EgStore'
]
from past.builtins import cmp
import sys, os
import string
import pickle
import traceback
#--------------------------------------------------
# check python version and take appropriate action
#--------------------------------------------------
"""
From the python documentation:
sys.hexversion contains the version number encoded as a single integer. This is
guaranteed to increase with each version, including proper support for non-
production releases. For example, to test that the Python interpreter is at
least version 1.5.2, use:
if sys.hexversion >= 0x010502F0:
# use some advanced feature
...
else:
# use an alternative implementation or warn the user
...
"""
if sys.hexversion >= 0x020600F0:
runningPython26 = True
else:
runningPython26 = False
if sys.hexversion >= 0x030000F0:
runningPython3 = True
else:
runningPython3 = False
try:
from PIL import Image as PILImage
from PIL import ImageTk as PILImageTk
PILisLoaded = True
except:
PILisLoaded = False
if runningPython3:
from tkinter import *
import tkinter.filedialog as tk_FileDialog
from io import StringIO
else:
from Tkinter import *
import tkFileDialog as tk_FileDialog
from StringIO import StringIO
def write(*args):
args = [str(arg) for arg in args]
args = " ".join(args)
sys.stdout.write(args)
def writeln(*args):
write(*args)
sys.stdout.write("\n")
say = writeln
if TkVersion < 8.0 :
stars = "*"*75
writeln("""\n\n\n""" + stars + """
You are running Tk version: """ + str(TkVersion) + """
You must be using Tk version 8.0 or greater to use EasyGui.
Terminating.
""" + stars + """\n\n\n""")
sys.exit(0)
def dq(s):
return '"%s"' % s
rootWindowPosition = "+300+200"
PROPORTIONAL_FONT_FAMILY = ("MS", "Sans", "Serif")
MONOSPACE_FONT_FAMILY = ("Courier")
PROPORTIONAL_FONT_SIZE = 10
MONOSPACE_FONT_SIZE = 9 #a little smaller, because it it more legible at a smaller size
TEXT_ENTRY_FONT_SIZE = 12 # a little larger makes it easier to see
#STANDARD_SELECTION_EVENTS = ["Return", "Button-1"]
STANDARD_SELECTION_EVENTS = ["Return", "Button-1", "space"]
# Initialize some global variables that will be reset later
__choiceboxMultipleSelect = None
__widgetTexts = None
__replyButtonText = None
__choiceboxResults = None
__firstWidget = None
__enterboxText = None
__enterboxDefaultText=""
__multenterboxText = ""
choiceboxChoices = None
choiceboxWidget = None
entryWidget = None
boxRoot = None
ImageErrorMsg = (
"\n\n---------------------------------------------\n"
"Error: %s\n%s")
def _bring_to_front():
from subprocess import Popen
Tk().destroy()
Popen([
'osascript',
'-e', 'tell application "System Events"',
'-e', 'set vprocs to every process whose unix id is %s' % os.getpid(),
'-e', 'repeat with proc in vprocs',
'-e', 'set the frontmost of proc to true',
'-e', 'end repeat',
'-e', 'end tell'
])
#-------------------------------------------------------------------
# various boxes built on top of the basic buttonbox
#-----------------------------------------------------------------------
#-----------------------------------------------------------------------
# ynbox
#-----------------------------------------------------------------------
def ynbox(msg="Shall I continue?"
, title=" "
, choices=("Yes", "No")
, image=None
):
"""
Display a msgbox with choices of Yes and No.
The default is "Yes".
The returned value is calculated this way::
if the first choice ("Yes") is chosen, or if the dialog is cancelled:
return 1
else:
return 0
If invoked without a msg argument, displays a generic request for a confirmation
that the user wishes to continue. So it can be used this way::
if ynbox(): pass # continue
else: sys.exit(0) # exit the program
@arg msg: the msg to be displayed.
@arg title: the window title
@arg choices: a list or tuple of the choices to be displayed
"""
return boolbox(msg, title, choices, image=image)
#-----------------------------------------------------------------------
# ccbox
#-----------------------------------------------------------------------
def ccbox(msg="Shall I continue?"
, title=" "
, choices=("Continue", "Cancel")
, image=None
):
"""
Display a msgbox with choices of Continue and Cancel.
The default is "Continue".
The returned value is calculated this way::
if the first choice ("Continue") is chosen, or if the dialog is cancelled:
return 1
else:
return 0
If invoked without a msg argument, displays a generic request for a confirmation
that the user wishes to continue. So it can be used this way::
if ccbox():
pass # continue
else:
sys.exit(0) # exit the program
@arg msg: the msg to be displayed.
@arg title: the window title
@arg choices: a list or tuple of the choices to be displayed
"""
return boolbox(msg, title, choices, image=image)
#-----------------------------------------------------------------------
# boolbox
#-----------------------------------------------------------------------
def boolbox(msg="Shall I continue?"
, title=" "
, choices=("Yes","No")
, image=None
):
"""
Display a boolean msgbox.
The default is the first choice.
The returned value is calculated this way::
if the first choice is chosen, or if the dialog is cancelled:
returns 1
else:
returns 0
"""
reply = buttonbox(msg=msg, choices=choices, title=title, image=image)
if reply == choices[0]: return 1
else: return 0
#-----------------------------------------------------------------------
# indexbox
#-----------------------------------------------------------------------
def indexbox(msg="Shall I continue?"
, title=" "
, choices=("Yes","No")
, image=None
):
"""
Display a buttonbox with the specified choices.
Return the index of the choice selected.
"""
reply = buttonbox(msg=msg, choices=choices, title=title, image=image)
index = -1
for choice in choices:
index = index + 1
if reply == choice: return index
raise AssertionError(
"There is a program logic error in the EasyGui code for indexbox.")
#-----------------------------------------------------------------------
# msgbox
#-----------------------------------------------------------------------
def msgbox(msg="(Your message goes here)", title=" ", ok_button="OK",image=None,root=None):
"""
Display a messagebox
"""
if type(ok_button) != type("OK"):
raise AssertionError("The 'ok_button' argument to msgbox must be a string.")
return buttonbox(msg=msg, title=title, choices=[ok_button], image=image,root=root)
#-------------------------------------------------------------------
# buttonbox
#-------------------------------------------------------------------
def buttonbox(msg="",title=" "
,choices=("Button1", "Button2", "Button3")
, image=None
, root=None
):
"""
Display a msg, a title, and a set of buttons.
The buttons are defined by the members of the choices list.
Return the text of the button that the user selected.
@arg msg: the msg to be displayed.
@arg title: the window title
@arg choices: a list or tuple of the choices to be displayed
"""
if sys.platform == 'darwin':
_bring_to_front()
global boxRoot, __replyButtonText, __widgetTexts, buttonsFrame
# Initialize __replyButtonText to the first choice.
# This is what will be used if the window is closed by the close button.
__replyButtonText = choices[0]
if root:
root.withdraw()
boxRoot = Toplevel(master=root)
boxRoot.withdraw()
else:
boxRoot = Tk()
boxRoot.withdraw()
boxRoot.protocol('WM_DELETE_WINDOW', denyWindowManagerClose )
boxRoot.title(title)
boxRoot.iconname('Dialog')
boxRoot.geometry(rootWindowPosition)
boxRoot.minsize(400, 100)
# ------------- define the messageFrame ---------------------------------
messageFrame = Frame(master=boxRoot)
messageFrame.pack(side=TOP, fill=BOTH)
# ------------- define the imageFrame ---------------------------------
tk_Image = None
if image:
imageFilename = os.path.normpath(image)
junk,ext = os.path.splitext(imageFilename)
if os.path.exists(imageFilename):
if ext.lower() in [".gif", ".pgm", ".ppm"]:
tk_Image = PhotoImage(master=boxRoot, file=imageFilename)
else:
if PILisLoaded:
try:
pil_Image = PILImage.open(imageFilename)
tk_Image = PILImageTk.PhotoImage(pil_Image, master=boxRoot)
except:
msg += ImageErrorMsg % (imageFilename,
"\nThe Python Imaging Library (PIL) could not convert this file to a displayable image."
"\n\nPIL reports:\n" + exception_format())
else: # PIL is not loaded
msg += ImageErrorMsg % (imageFilename,
"\nI could not import the Python Imaging Library (PIL) to display the image.\n\n"
"You may need to install PIL\n"
"(http://www.pythonware.com/products/pil/)\n"
"to display " + ext + " image files.")
else:
msg += ImageErrorMsg % (imageFilename, "\nImage file not found.")
if tk_Image:
imageFrame = Frame(master=boxRoot)
imageFrame.pack(side=TOP, fill=BOTH)
label = Label(imageFrame,image=tk_Image)
label.image = tk_Image # keep a reference!
label.pack(side=TOP, expand=YES, fill=X, padx='1m', pady='1m')
# ------------- define the buttonsFrame ---------------------------------
buttonsFrame = Frame(master=boxRoot)
buttonsFrame.pack(side=TOP, fill=BOTH)
# -------------------- place the widgets in the frames -----------------------
messageWidget = Message(messageFrame, text=msg, width=400)
messageWidget.configure(font=(PROPORTIONAL_FONT_FAMILY,PROPORTIONAL_FONT_SIZE))
messageWidget.pack(side=TOP, expand=YES, fill=X, padx='3m', pady='3m')
__put_buttons_in_buttonframe(choices)
# -------------- the action begins -----------
# put the focus on the first button
__firstWidget.focus_force()
boxRoot.deiconify()
boxRoot.mainloop()
boxRoot.destroy()
if root: root.deiconify()
return __replyButtonText
#-------------------------------------------------------------------
# integerbox
#-------------------------------------------------------------------
def integerbox(msg=""
, title=" "
, default=""
, lowerbound=0
, upperbound=99
, image = None
, root = None
, **invalidKeywordArguments
):
"""
Show a box in which a user can enter an integer.
In addition to arguments for msg and title, this function accepts
integer arguments for "default", "lowerbound", and "upperbound".
The default argument may be None.
When the user enters some text, the text is checked to verify that it
can be converted to an integer between the lowerbound and upperbound.
If it can be, the integer (not the text) is returned.
If it cannot, then an error msg is displayed, and the integerbox is
redisplayed.
If the user cancels the operation, None is returned.
NOTE that the "argLowerBound" and "argUpperBound" arguments are no longer
supported. They have been replaced by "upperbound" and "lowerbound".
"""
if sys.platform == 'darwin':
_bring_to_front()
if "argLowerBound" in invalidKeywordArguments:
raise AssertionError(
"\nintegerbox no longer supports the 'argLowerBound' argument.\n"
+ "Use 'lowerbound' instead.\n\n")
if "argUpperBound" in invalidKeywordArguments:
raise AssertionError(
"\nintegerbox no longer supports the 'argUpperBound' argument.\n"
+ "Use 'upperbound' instead.\n\n")
if default != "":
if type(default) != type(1):
raise AssertionError(
"integerbox received a non-integer value for "
+ "default of " + dq(str(default)) , "Error")
if type(lowerbound) != type(1):
raise AssertionError(
"integerbox received a non-integer value for "
+ "lowerbound of " + dq(str(lowerbound)) , "Error")
if type(upperbound) != type(1):
raise AssertionError(
"integerbox received a non-integer value for "
+ "upperbound of " + dq(str(upperbound)) , "Error")
if msg == "":
msg = ("Enter an integer between " + str(lowerbound)
+ " and "
+ str(upperbound)
)
while 1:
reply = enterbox(msg, title, str(default), image=image, root=root)
if reply is None:
return None
try:
reply = int(reply)
except:
msgbox ("The value that you entered:\n\t%s\nis not an integer." % dq(str(reply))
, "Error")
continue
if reply < lowerbound:
msgbox ("The value that you entered is less than the lower bound of "
+ str(lowerbound) + ".", "Error")
continue
if reply > upperbound:
msgbox ("The value that you entered is greater than the upper bound of "
+ str(upperbound) + ".", "Error")
continue
# reply has passed all validation checks.
# It is an integer between the specified bounds.
return reply
#-------------------------------------------------------------------
# multenterbox
#-------------------------------------------------------------------
def multenterbox(msg="Fill in values for the fields."
, title=" "
, fields=()
, values=()
):
r"""
Show screen with multiple data entry fields.
If there are fewer values than names, the list of values is padded with
empty strings until the number of values is the same as the number of names.
If there are more values than names, the list of values
is truncated so that there are as many values as names.
Returns a list of the values of the fields,
or None if the user cancels the operation.
Here is some example code, that shows how values returned from
multenterbox can be checked for validity before they are accepted::
----------------------------------------------------------------------
msg = "Enter your personal information"
title = "Credit Card Application"
fieldNames = ["Name","Street Address","City","State","ZipCode"]
fieldValues = [] # we start with blanks for the values
fieldValues = multenterbox(msg,title, fieldNames)
# make sure that none of the fields was left blank
while 1:
if fieldValues == None: break
errmsg = ""
for i in range(len(fieldNames)):
if fieldValues[i].strip() == "":
errmsg += ('"%s" is a required field.\n\n' % fieldNames[i])
if errmsg == "":
break # no problems found
fieldValues = multenterbox(errmsg, title, fieldNames, fieldValues)
writeln("Reply was: %s" % str(fieldValues))
----------------------------------------------------------------------
@arg msg: the msg to be displayed.
@arg title: the window title
@arg fields: a list of fieldnames.
@arg values: a list of field values
"""
return __multfillablebox(msg,title,fields,values,None)
#-----------------------------------------------------------------------
# multpasswordbox
#-----------------------------------------------------------------------
def multpasswordbox(msg="Fill in values for the fields."
, title=" "
, fields=tuple()
,values=tuple()
):
r"""
Same interface as multenterbox. But in multpassword box,
the last of the fields is assumed to be a password, and
is masked with asterisks.
Example
=======
Here is some example code, that shows how values returned from
multpasswordbox can be checked for validity before they are accepted::
msg = "Enter logon information"
title = "Demo of multpasswordbox"
fieldNames = ["Server ID", "User ID", "Password"]
fieldValues = [] # we start with blanks for the values
fieldValues = multpasswordbox(msg,title, fieldNames)
# make sure that none of the fields was left blank
while 1:
if fieldValues == None: break
errmsg = ""
for i in range(len(fieldNames)):
if fieldValues[i].strip() == "":
errmsg = errmsg + ('"%s" is a required field.\n\n' % fieldNames[i])
if errmsg == "": break # no problems found
fieldValues = multpasswordbox(errmsg, title, fieldNames, fieldValues)
writeln("Reply was: %s" % str(fieldValues))
"""
return __multfillablebox(msg,title,fields,values,"*")
def bindArrows(widget):
widget.bind("<Down>", tabRight)
widget.bind("<Up>" , tabLeft)
widget.bind("<Right>",tabRight)
widget.bind("<Left>" , tabLeft)
def tabRight(event):
boxRoot.event_generate("<Tab>")
def tabLeft(event):
boxRoot.event_generate("<Shift-Tab>")
#-----------------------------------------------------------------------
# __multfillablebox
#-----------------------------------------------------------------------
def __multfillablebox(msg="Fill in values for the fields."
, title=" "
, fields=()
, values=()
, mask = None
):
if sys.platform == 'darwin':
_bring_to_front()
global boxRoot, __multenterboxText, __multenterboxDefaultText, cancelButton, entryWidget, okButton
choices = ["OK", "Cancel"]
if len(fields) == 0: return None
fields = list(fields[:]) # convert possible tuples to a list
values = list(values[:]) # convert possible tuples to a list
if len(values) == len(fields): pass
elif len(values) > len(fields):
fields = fields[0:len(values)]
else:
while len(values) < len(fields):
values.append("")
boxRoot = Tk()
boxRoot.protocol('WM_DELETE_WINDOW', denyWindowManagerClose )
boxRoot.title(title)
boxRoot.iconname('Dialog')
boxRoot.geometry(rootWindowPosition)
boxRoot.bind("<Escape>", __multenterboxCancel)
# -------------------- put subframes in the boxRoot --------------------
messageFrame = Frame(master=boxRoot)
messageFrame.pack(side=TOP, fill=BOTH)
#-------------------- the msg widget ----------------------------
messageWidget = Message(messageFrame, width="4.5i", text=msg)
messageWidget.configure(font=(PROPORTIONAL_FONT_FAMILY,PROPORTIONAL_FONT_SIZE))
messageWidget.pack(side=RIGHT, expand=1, fill=BOTH, padx='3m', pady='3m')
global entryWidgets
entryWidgets = []
lastWidgetIndex = len(fields) - 1
for widgetIndex in range(len(fields)):
argFieldName = fields[widgetIndex]
argFieldValue = values[widgetIndex]
entryFrame = Frame(master=boxRoot)
entryFrame.pack(side=TOP, fill=BOTH)
# --------- entryWidget ----------------------------------------------
labelWidget = Label(entryFrame, text=argFieldName)
labelWidget.pack(side=LEFT)
entryWidget = Entry(entryFrame, width=40,highlightthickness=2)
entryWidgets.append(entryWidget)
entryWidget.configure(font=(PROPORTIONAL_FONT_FAMILY,TEXT_ENTRY_FONT_SIZE))
entryWidget.pack(side=RIGHT, padx="3m")
bindArrows(entryWidget)
entryWidget.bind("<Return>", __multenterboxGetText)
entryWidget.bind("<Escape>", __multenterboxCancel)
# for the last entryWidget, if this is a multpasswordbox,
# show the contents as just asterisks
if widgetIndex == lastWidgetIndex:
if mask:
entryWidgets[widgetIndex].configure(show=mask)
# put text into the entryWidget
entryWidgets[widgetIndex].insert(0,argFieldValue)
widgetIndex += 1
# ------------------ ok button -------------------------------
buttonsFrame = Frame(master=boxRoot)
buttonsFrame.pack(side=BOTTOM, fill=BOTH)
okButton = Button(buttonsFrame, takefocus=1, text="OK")
bindArrows(okButton)
okButton.pack(expand=1, side=LEFT, padx='3m', pady='3m', ipadx='2m', ipady='1m')
# for the commandButton, bind activation events to the activation event handler
commandButton = okButton
handler = __multenterboxGetText
for selectionEvent in STANDARD_SELECTION_EVENTS:
commandButton.bind("<%s>" % selectionEvent, handler)
# ------------------ cancel button -------------------------------
cancelButton = Button(buttonsFrame, takefocus=1, text="Cancel")
bindArrows(cancelButton)
cancelButton.pack(expand=1, side=RIGHT, padx='3m', pady='3m', ipadx='2m', ipady='1m')
# for the commandButton, bind activation events to the activation event handler
commandButton = cancelButton
handler = __multenterboxCancel
for selectionEvent in STANDARD_SELECTION_EVENTS:
commandButton.bind("<%s>" % selectionEvent, handler)
# ------------------- time for action! -----------------
entryWidgets[0].focus_force() # put the focus on the entryWidget
boxRoot.mainloop() # run it!
# -------- after the run has completed ----------------------------------
boxRoot.destroy() # button_click didn't destroy boxRoot, so we do it now
return __multenterboxText
#-----------------------------------------------------------------------
# __multenterboxGetText
#-----------------------------------------------------------------------
def __multenterboxGetText(event):
global __multenterboxText
__multenterboxText = []
for entryWidget in entryWidgets:
__multenterboxText.append(entryWidget.get())
boxRoot.quit()
def __multenterboxCancel(event):
global __multenterboxText
__multenterboxText = None
boxRoot.quit()
#-------------------------------------------------------------------
# enterbox
#-------------------------------------------------------------------
def enterbox(msg="Enter something."
, title=" "
, default=""
, strip=True
, image=None
, root=None
):
"""
Show a box in which a user can enter some text.
You may optionally specify some default text, which will appear in the
enterbox when it is displayed.
Returns the text that the user entered, or None if he cancels the operation.
By default, enterbox strips its result (i.e. removes leading and trailing
whitespace). (If you want it not to strip, use keyword argument: strip=False.)
This makes it easier to test the results of the call::
reply = enterbox(....)
if reply:
...
else:
...
"""
result = __fillablebox(msg, title, default=default, mask=None,image=image,root=root)
if result and strip:
result = result.strip()
return result
def passwordbox(msg="Enter your password."
, title=" "
, default=""
, image=None
, root=None
):
"""
Show a box in which a user can enter a password.
The text is masked with asterisks, so the password is not displayed.
Returns the text that the user entered, or None if he cancels the operation.
"""
return __fillablebox(msg, title, default, mask="*",image=image,root=root)
def __fillablebox(msg
, title=""
, default=""
, mask=None
, image=None
, root=None
):
"""
Show a box in which a user can enter some text.
You may optionally specify some default text, which will appear in the
enterbox when it is displayed.
Returns the text that the user entered, or None if he cancels the operation.
"""
if sys.platform == 'darwin':
_bring_to_front()
global boxRoot, __enterboxText, __enterboxDefaultText
global cancelButton, entryWidget, okButton
if title is None:
title = ""
if default is None:
default = ""
__enterboxDefaultText = default
__enterboxText = __enterboxDefaultText
if root:
root.withdraw()
boxRoot = Toplevel(master=root)
boxRoot.withdraw()
else:
boxRoot = Tk()
boxRoot.withdraw()
boxRoot.protocol('WM_DELETE_WINDOW', denyWindowManagerClose )
boxRoot.title(title)
boxRoot.iconname('Dialog')
boxRoot.geometry(rootWindowPosition)
boxRoot.bind("<Escape>", __enterboxCancel)
# ------------- define the messageFrame ---------------------------------
messageFrame = Frame(master=boxRoot)
messageFrame.pack(side=TOP, fill=BOTH)
# ------------- define the imageFrame ---------------------------------
tk_Image = None
if image:
imageFilename = os.path.normpath(image)
junk,ext = os.path.splitext(imageFilename)
if os.path.exists(imageFilename):
if ext.lower() in [".gif", ".pgm", ".ppm"]:
tk_Image = PhotoImage(master=boxRoot, file=imageFilename)
else:
if PILisLoaded:
try:
pil_Image = PILImage.open(imageFilename)
tk_Image = PILImageTk.PhotoImage(pil_Image, master=boxRoot)
except:
msg += ImageErrorMsg % (imageFilename,
"\nThe Python Imaging Library (PIL) could not convert this file to a displayable image."
"\n\nPIL reports:\n" + exception_format())
else: # PIL is not loaded
msg += ImageErrorMsg % (imageFilename,
"\nI could not import the Python Imaging Library (PIL) to display the image.\n\n"
"You may need to install PIL\n"
"(http://www.pythonware.com/products/pil/)\n"
"to display " + ext + " image files.")
else:
msg += ImageErrorMsg % (imageFilename, "\nImage file not found.")
if tk_Image:
imageFrame = Frame(master=boxRoot)
imageFrame.pack(side=TOP, fill=BOTH)
label = Label(imageFrame,image=tk_Image)
label.image = tk_Image # keep a reference!
label.pack(side=TOP, expand=YES, fill=X, padx='1m', pady='1m')
# ------------- define the buttonsFrame ---------------------------------
buttonsFrame = Frame(master=boxRoot)
buttonsFrame.pack(side=TOP, fill=BOTH)
# ------------- define the entryFrame ---------------------------------
entryFrame = Frame(master=boxRoot)
entryFrame.pack(side=TOP, fill=BOTH)
# ------------- define the buttonsFrame ---------------------------------
buttonsFrame = Frame(master=boxRoot)
buttonsFrame.pack(side=TOP, fill=BOTH)
#-------------------- the msg widget ----------------------------
messageWidget = Message(messageFrame, width="4.5i", text=msg)
messageWidget.configure(font=(PROPORTIONAL_FONT_FAMILY,PROPORTIONAL_FONT_SIZE))
messageWidget.pack(side=RIGHT, expand=1, fill=BOTH, padx='3m', pady='3m')
# --------- entryWidget ----------------------------------------------
entryWidget = Entry(entryFrame, width=40)
bindArrows(entryWidget)
entryWidget.configure(font=(PROPORTIONAL_FONT_FAMILY,TEXT_ENTRY_FONT_SIZE))
if mask:
entryWidget.configure(show=mask)
entryWidget.pack(side=LEFT, padx="3m")
entryWidget.bind("<Return>", __enterboxGetText)
entryWidget.bind("<Escape>", __enterboxCancel)
# put text into the entryWidget
entryWidget.insert(0,__enterboxDefaultText)
# ------------------ ok button -------------------------------
okButton = Button(buttonsFrame, takefocus=1, text="OK")
bindArrows(okButton)
okButton.pack(expand=1, side=LEFT, padx='3m', pady='3m', ipadx='2m', ipady='1m')
# for the commandButton, bind activation events to the activation event handler
commandButton = okButton
handler = __enterboxGetText
for selectionEvent in STANDARD_SELECTION_EVENTS:
commandButton.bind("<%s>" % selectionEvent, handler)
# ------------------ cancel button -------------------------------
cancelButton = Button(buttonsFrame, takefocus=1, text="Cancel")
bindArrows(cancelButton)
cancelButton.pack(expand=1, side=RIGHT, padx='3m', pady='3m', ipadx='2m', ipady='1m')
# for the commandButton, bind activation events to the activation event handler
commandButton = cancelButton
handler = __enterboxCancel
for selectionEvent in STANDARD_SELECTION_EVENTS:
commandButton.bind("<%s>" % selectionEvent, handler)
# ------------------- time for action! -----------------
entryWidget.focus_force() # put the focus on the entryWidget
boxRoot.deiconify()
boxRoot.mainloop() # run it!
# -------- after the run has completed ----------------------------------
if root: root.deiconify()
boxRoot.destroy() # button_click didn't destroy boxRoot, so we do it now
return __enterboxText
def __enterboxGetText(event):
global __enterboxText
__enterboxText = entryWidget.get()
boxRoot.quit()
def __enterboxRestore(event):
global entryWidget
entryWidget.delete(0,len(entryWidget.get()))
entryWidget.insert(0, __enterboxDefaultText)
def __enterboxCancel(event):
global __enterboxText
__enterboxText = None
boxRoot.quit()
def denyWindowManagerClose():
""" don't allow WindowManager close
"""
x = Tk()
x.withdraw()
x.bell()
x.destroy()
#-------------------------------------------------------------------
# multchoicebox
#-------------------------------------------------------------------
def multchoicebox(msg="Pick as many items as you like."
, title=" "
, choices=()
, **kwargs
):
"""
Present the user with a list of choices.
allow him to select multiple items and return them in a list.
if the user doesn't choose anything from the list, return the empty list.
return None if he cancelled selection.
@arg msg: the msg to be displayed.
@arg title: the window title
@arg choices: a list or tuple of the choices to be displayed
"""
if len(choices) == 0: choices = ["Program logic error - no choices were specified."]
global __choiceboxMultipleSelect
__choiceboxMultipleSelect = 1
return __choicebox(msg, title, choices)
#-----------------------------------------------------------------------
# choicebox
#-----------------------------------------------------------------------
def choicebox(msg="Pick something."
, title=" "
, choices=()
):
"""
Present the user with a list of choices.
return the choice that he selects.
return None if he cancels the selection selection.
@arg msg: the msg to be displayed.
@arg title: the window title
@arg choices: a list or tuple of the choices to be displayed
"""
if len(choices) == 0: choices = ["Program logic error - no choices were specified."]
global __choiceboxMultipleSelect
__choiceboxMultipleSelect = 0
return __choicebox(msg,title,choices)
#-----------------------------------------------------------------------
# __choicebox
#-----------------------------------------------------------------------
def __choicebox(msg
, title
, choices
):
"""
internal routine to support choicebox() and multchoicebox()
"""
if sys.platform == 'darwin':
_bring_to_front()
global boxRoot, __choiceboxResults, choiceboxWidget, defaultText
global choiceboxWidget, choiceboxChoices
#-------------------------------------------------------------------
# If choices is a tuple, we make it a list so we can sort it.
# If choices is already a list, we make a new list, so that when
# we sort the choices, we don't affect the list object that we
# were given.
#-------------------------------------------------------------------
choices = list(choices[:])
if len(choices) == 0:
choices = ["Program logic error - no choices were specified."]
defaultButtons = ["OK", "Cancel"]
# make sure all choices are strings
for index in range(len(choices)):
choices[index] = str(choices[index])
lines_to_show = min(len(choices), 20)
lines_to_show = 20
if title is None:
title = ""
# Initialize __choiceboxResults
# This is the value that will be returned if the user clicks the close icon
__choiceboxResults = None
boxRoot = Tk()
boxRoot.protocol('WM_DELETE_WINDOW', denyWindowManagerClose )
screen_width = boxRoot.winfo_screenwidth()
screen_height = boxRoot.winfo_screenheight()
root_width = int((screen_width * 0.8))
root_height = int((screen_height * 0.5))
root_xpos = int((screen_width * 0.1))
root_ypos = int((screen_height * 0.05))
boxRoot.title(title)
boxRoot.iconname('Dialog')
rootWindowPosition = "+0+0"
boxRoot.geometry(rootWindowPosition)
boxRoot.expand=NO
boxRoot.minsize(root_width, root_height)
rootWindowPosition = "+" + str(root_xpos) + "+" + str(root_ypos)
boxRoot.geometry(rootWindowPosition)
# ---------------- put the frames in the window -----------------------------------------
message_and_buttonsFrame = Frame(master=boxRoot)
message_and_buttonsFrame.pack(side=TOP, fill=X, expand=NO)
messageFrame = Frame(message_and_buttonsFrame)
messageFrame.pack(side=LEFT, fill=X, expand=YES)
#messageFrame.pack(side=TOP, fill=X, expand=YES)
buttonsFrame = Frame(message_and_buttonsFrame)
buttonsFrame.pack(side=RIGHT, expand=NO, pady=0)
#buttonsFrame.pack(side=TOP, expand=YES, pady=0)
choiceboxFrame = Frame(master=boxRoot)
choiceboxFrame.pack(side=BOTTOM, fill=BOTH, expand=YES)
# -------------------------- put the widgets in the frames ------------------------------
# ---------- put a msg widget in the msg frame-------------------
messageWidget = Message(messageFrame, anchor=NW, text=msg, width=int(root_width * 0.9))
messageWidget.configure(font=(PROPORTIONAL_FONT_FAMILY,PROPORTIONAL_FONT_SIZE))
messageWidget.pack(side=LEFT, expand=YES, fill=BOTH, padx='1m', pady='1m')
# -------- put the choiceboxWidget in the choiceboxFrame ---------------------------
choiceboxWidget = Listbox(choiceboxFrame
, height=lines_to_show
, borderwidth="1m"
, relief="flat"
, bg="white"
)
if __choiceboxMultipleSelect:
choiceboxWidget.configure(selectmode=MULTIPLE)
choiceboxWidget.configure(font=(PROPORTIONAL_FONT_FAMILY,PROPORTIONAL_FONT_SIZE))
# add a vertical scrollbar to the frame
rightScrollbar = Scrollbar(choiceboxFrame, orient=VERTICAL, command=choiceboxWidget.yview)
choiceboxWidget.configure(yscrollcommand = rightScrollbar.set)
# add a horizontal scrollbar to the frame
bottomScrollbar = Scrollbar(choiceboxFrame, orient=HORIZONTAL, command=choiceboxWidget.xview)
choiceboxWidget.configure(xscrollcommand = bottomScrollbar.set)
# pack the Listbox and the scrollbars. Note that although we must define
# the textArea first, we must pack it last, so that the bottomScrollbar will
# be located properly.
bottomScrollbar.pack(side=BOTTOM, fill = X)
rightScrollbar.pack(side=RIGHT, fill = Y)
choiceboxWidget.pack(side=LEFT, padx="1m", pady="1m", expand=YES, fill=BOTH)
#---------------------------------------------------
# sort the choices
# eliminate duplicates
# put the choices into the choiceboxWidget
#---------------------------------------------------
for index in range(len(choices)):
choices[index] = str(choices[index])
if runningPython3:
choices.sort(key=str.lower)
else:
choices.sort( lambda x,y: cmp(x.lower(), y.lower())) # case-insensitive sort
lastInserted = None
choiceboxChoices = []
for choice in choices:
if choice == lastInserted: pass
else:
choiceboxWidget.insert(END, choice)
choiceboxChoices.append(choice)
lastInserted = choice
boxRoot.bind('<Any-Key>', KeyboardListener)
# put the buttons in the buttonsFrame
if len(choices) > 0:
okButton = Button(buttonsFrame, takefocus=YES, text="OK", height=1, width=6)
bindArrows(okButton)
okButton.pack(expand=NO, side=TOP, padx='2m', pady='1m', ipady="1m", ipadx="2m")
# for the commandButton, bind activation events to the activation event handler
commandButton = okButton
handler = __choiceboxGetChoice
for selectionEvent in STANDARD_SELECTION_EVENTS:
commandButton.bind("<%s>" % selectionEvent, handler)
# now bind the keyboard events
choiceboxWidget.bind("<Return>", __choiceboxGetChoice)
choiceboxWidget.bind("<Double-Button-1>", __choiceboxGetChoice)
else:
# now bind the keyboard events
choiceboxWidget.bind("<Return>", __choiceboxCancel)
choiceboxWidget.bind("<Double-Button-1>", __choiceboxCancel)
cancelButton = Button(buttonsFrame, takefocus=YES, text="Cancel", height=1, width=6)
bindArrows(cancelButton)
cancelButton.pack(expand=NO, side=BOTTOM, padx='2m', pady='1m', ipady="1m", ipadx="2m")
# for the commandButton, bind activation events to the activation event handler
commandButton = cancelButton
handler = __choiceboxCancel
for selectionEvent in STANDARD_SELECTION_EVENTS:
commandButton.bind("<%s>" % selectionEvent, handler)
# add special buttons for multiple select features
if len(choices) > 0 and __choiceboxMultipleSelect:
selectionButtonsFrame = Frame(messageFrame)
selectionButtonsFrame.pack(side=RIGHT, fill=Y, expand=NO)
selectAllButton = Button(selectionButtonsFrame, text="Select All", height=1, width=6)
bindArrows(selectAllButton)
selectAllButton.bind("<Button-1>",__choiceboxSelectAll)
selectAllButton.pack(expand=NO, side=TOP, padx='2m', pady='1m', ipady="1m", ipadx="2m")
clearAllButton = Button(selectionButtonsFrame, text="Clear All", height=1, width=6)
bindArrows(clearAllButton)
clearAllButton.bind("<Button-1>",__choiceboxClearAll)
clearAllButton.pack(expand=NO, side=TOP, padx='2m', pady='1m', ipady="1m", ipadx="2m")
# -------------------- bind some keyboard events ----------------------------
boxRoot.bind("<Escape>", __choiceboxCancel)
# --------------------- the action begins -----------------------------------
# put the focus on the choiceboxWidget, and the select highlight on the first item
choiceboxWidget.select_set(0)
choiceboxWidget.focus_force()
# --- run it! -----
boxRoot.mainloop()
boxRoot.destroy()
return __choiceboxResults
def __choiceboxGetChoice(event):
global boxRoot, __choiceboxResults, choiceboxWidget
if __choiceboxMultipleSelect:
__choiceboxResults = [choiceboxWidget.get(index) for index in choiceboxWidget.curselection()]
else:
choice_index = choiceboxWidget.curselection()
__choiceboxResults = choiceboxWidget.get(choice_index)
# writeln("Debugging> mouse-event=", event, " event.type=", event.type)
# writeln("Debugging> choice=", choice_index, __choiceboxResults)
boxRoot.quit()
def __choiceboxSelectAll(event):
global choiceboxWidget, choiceboxChoices
choiceboxWidget.selection_set(0, len(choiceboxChoices)-1)
def __choiceboxClearAll(event):
global choiceboxWidget, choiceboxChoices
choiceboxWidget.selection_clear(0, len(choiceboxChoices)-1)
def __choiceboxCancel(event):
global boxRoot, __choiceboxResults
__choiceboxResults = None
boxRoot.quit()
def KeyboardListener(event):
global choiceboxChoices, choiceboxWidget
key = event.keysym
if len(key) <= 1:
if key in string.printable:
# Find the key in the list.
# before we clear the list, remember the selected member
try:
start_n = int(choiceboxWidget.curselection()[0])
except IndexError:
start_n = -1
## clear the selection.
choiceboxWidget.selection_clear(0, 'end')
## start from previous selection +1
for n in range(start_n+1, len(choiceboxChoices)):
item = choiceboxChoices[n]
if item[0].lower() == key.lower():
choiceboxWidget.selection_set(first=n)
choiceboxWidget.see(n)
return
else:
# has not found it so loop from top
for n in range(len(choiceboxChoices)):
item = choiceboxChoices[n]
if item[0].lower() == key.lower():
choiceboxWidget.selection_set(first = n)
choiceboxWidget.see(n)
return
# nothing matched -- we'll look for the next logical choice
for n in range(len(choiceboxChoices)):
item = choiceboxChoices[n]
if item[0].lower() > key.lower():
if n > 0:
choiceboxWidget.selection_set(first = (n-1))
else:
choiceboxWidget.selection_set(first = 0)
choiceboxWidget.see(n)
return
# still no match (nothing was greater than the key)
# we set the selection to the first item in the list
lastIndex = len(choiceboxChoices)-1
choiceboxWidget.selection_set(first = lastIndex)
choiceboxWidget.see(lastIndex)
return
#-----------------------------------------------------------------------
# exception_format
#-----------------------------------------------------------------------
def exception_format():
"""
Convert exception info into a string suitable for display.
"""
return "".join(traceback.format_exception(
sys.exc_info()[0]
, sys.exc_info()[1]
, sys.exc_info()[2]
))
#-----------------------------------------------------------------------
# exceptionbox
#-----------------------------------------------------------------------
def exceptionbox(msg=None, title=None):
"""
Display a box that gives information about
an exception that has just been raised.
The caller may optionally pass in a title for the window, or a
msg to accompany the error information.
Note that you do not need to (and cannot) pass an exception object
as an argument. The latest exception will automatically be used.
"""
if title is None:
title = "Error Report"
if msg is None:
msg = "An error (exception) has occurred in the program."
codebox(msg, title, exception_format())
#-------------------------------------------------------------------
# codebox
#-------------------------------------------------------------------
def codebox(msg=""
, title=" "
, text=""
):
"""
Display some text in a monospaced font, with no line wrapping.
This function is suitable for displaying code and text that is
formatted using spaces.
The text parameter should be a string, or a list or tuple of lines to be
displayed in the textbox.
"""
return textbox(msg, title, text, codebox=1 )
#-------------------------------------------------------------------
# textbox
#-------------------------------------------------------------------
def textbox(msg=""
, title=" "
, text=""
, codebox=0
):
"""
Display some text in a proportional font with line wrapping at word breaks.
This function is suitable for displaying general written text.
The text parameter should be a string, or a list or tuple of lines to be
displayed in the textbox.
"""
if sys.platform == 'darwin':
_bring_to_front()
if msg is None:
msg = ""
if title is None:
title = ""
global boxRoot, __replyButtonText, __widgetTexts, buttonsFrame
global rootWindowPosition
choices = ["OK"]
__replyButtonText = choices[0]
boxRoot = Tk()
boxRoot.protocol('WM_DELETE_WINDOW', denyWindowManagerClose )
screen_width = boxRoot.winfo_screenwidth()
screen_height = boxRoot.winfo_screenheight()
root_width = int((screen_width * 0.8))
root_height = int((screen_height * 0.5))
root_xpos = int((screen_width * 0.1))
root_ypos = int((screen_height * 0.05))
boxRoot.title(title)
boxRoot.iconname('Dialog')
rootWindowPosition = "+0+0"
boxRoot.geometry(rootWindowPosition)
boxRoot.expand=NO
boxRoot.minsize(root_width, root_height)
rootWindowPosition = "+" + str(root_xpos) + "+" + str(root_ypos)
boxRoot.geometry(rootWindowPosition)
mainframe = Frame(master=boxRoot)
mainframe.pack(side=TOP, fill=BOTH, expand=YES)
# ---- put frames in the window -----------------------------------
# we pack the textboxFrame first, so it will expand first
textboxFrame = Frame(mainframe, borderwidth=3)
textboxFrame.pack(side=BOTTOM , fill=BOTH, expand=YES)
message_and_buttonsFrame = Frame(mainframe)
message_and_buttonsFrame.pack(side=TOP, fill=X, expand=NO)
messageFrame = Frame(message_and_buttonsFrame)
messageFrame.pack(side=LEFT, fill=X, expand=YES)
buttonsFrame = Frame(message_and_buttonsFrame)
buttonsFrame.pack(side=RIGHT, expand=NO)
# -------------------- put widgets in the frames --------------------
# put a textArea in the top frame
if codebox:
character_width = int((root_width * 0.6) / MONOSPACE_FONT_SIZE)
textArea = Text(textboxFrame,height=25,width=character_width, padx="2m", pady="1m")
textArea.configure(wrap=NONE)
textArea.configure(font=(MONOSPACE_FONT_FAMILY, MONOSPACE_FONT_SIZE))
else:
character_width = int((root_width * 0.6) / MONOSPACE_FONT_SIZE)
textArea = Text(
textboxFrame
, height=25
, width=character_width
, padx="2m"
, pady="1m"
)
textArea.configure(wrap=WORD)
textArea.configure(font=(PROPORTIONAL_FONT_FAMILY,PROPORTIONAL_FONT_SIZE))
# some simple keybindings for scrolling
mainframe.bind("<Next>" , textArea.yview_scroll( 1,PAGES))
mainframe.bind("<Prior>", textArea.yview_scroll(-1,PAGES))
mainframe.bind("<Right>", textArea.xview_scroll( 1,PAGES))
mainframe.bind("<Left>" , textArea.xview_scroll(-1,PAGES))
mainframe.bind("<Down>", textArea.yview_scroll( 1,UNITS))
mainframe.bind("<Up>" , textArea.yview_scroll(-1,UNITS))
# add a vertical scrollbar to the frame
rightScrollbar = Scrollbar(textboxFrame, orient=VERTICAL, command=textArea.yview)
textArea.configure(yscrollcommand = rightScrollbar.set)
# add a horizontal scrollbar to the frame
bottomScrollbar = Scrollbar(textboxFrame, orient=HORIZONTAL, command=textArea.xview)
textArea.configure(xscrollcommand = bottomScrollbar.set)
# pack the textArea and the scrollbars. Note that although we must define
# the textArea first, we must pack it last, so that the bottomScrollbar will
# be located properly.
# Note that we need a bottom scrollbar only for code.
# Text will be displayed with wordwrap, so we don't need to have a horizontal
# scroll for it.
if codebox:
bottomScrollbar.pack(side=BOTTOM, fill=X)
rightScrollbar.pack(side=RIGHT, fill=Y)
textArea.pack(side=LEFT, fill=BOTH, expand=YES)
# ---------- put a msg widget in the msg frame-------------------
messageWidget = Message(messageFrame, anchor=NW, text=msg, width=int(root_width * 0.9))
messageWidget.configure(font=(PROPORTIONAL_FONT_FAMILY,PROPORTIONAL_FONT_SIZE))
messageWidget.pack(side=LEFT, expand=YES, fill=BOTH, padx='1m', pady='1m')
# put the buttons in the buttonsFrame
okButton = Button(buttonsFrame, takefocus=YES, text="OK", height=1, width=6)
okButton.pack(expand=NO, side=TOP, padx='2m', pady='1m', ipady="1m", ipadx="2m")
# for the commandButton, bind activation events to the activation event handler
commandButton = okButton
handler = __textboxOK
for selectionEvent in ["Return","Button-1","Escape"]:
commandButton.bind("<%s>" % selectionEvent, handler)
# ----------------- the action begins ----------------------------------------
try:
# load the text into the textArea
if type(text) == type("abc"): pass
else:
try:
text = "".join(text) # convert a list or a tuple to a string
except:
msgbox("Exception when trying to convert "+ str(type(text)) + " to text in textArea")
sys.exit(16)
textArea.insert(END,text, "normal")
except:
msgbox("Exception when trying to load the textArea.")
sys.exit(16)
try:
okButton.focus_force()
except:
msgbox("Exception when trying to put focus on okButton.")
sys.exit(16)
boxRoot.mainloop()
# this line MUST go before the line that destroys boxRoot
areaText = textArea.get(0.0,END)
boxRoot.destroy()
return areaText # return __replyButtonText
#-------------------------------------------------------------------
# __textboxOK
#-------------------------------------------------------------------
def __textboxOK(event):
global boxRoot
boxRoot.quit()
#-------------------------------------------------------------------
# diropenbox
#-------------------------------------------------------------------
def diropenbox(msg=None
, title=None
, default=None
):
"""
A dialog to get a directory name.
Note that the msg argument, if specified, is ignored.
Returns the name of a directory, or None if user chose to cancel.
If the "default" argument specifies a directory name, and that
directory exists, then the dialog box will start with that directory.
"""
if sys.platform == 'darwin':
_bring_to_front()
title=getFileDialogTitle(msg,title)
localRoot = Tk()
localRoot.withdraw()
if not default: default = None
f = tk_FileDialog.askdirectory(
parent=localRoot
, title=title
, initialdir=default
, initialfile=None
)
localRoot.destroy()
if not f: return None
return os.path.normpath(f)
#-------------------------------------------------------------------
# getFileDialogTitle
#-------------------------------------------------------------------
def getFileDialogTitle(msg
, title
):
if msg and title: return "%s - %s" % (title,msg)
if msg and not title: return str(msg)
if title and not msg: return str(title)
return None # no message and no title
#-------------------------------------------------------------------
# class FileTypeObject for use with fileopenbox
#-------------------------------------------------------------------
class FileTypeObject:
def __init__(self,filemask):
if len(filemask) == 0:
raise AssertionError('Filetype argument is empty.')
self.masks = []
if type(filemask) == type("abc"): # a string
self.initializeFromString(filemask)
elif type(filemask) == type([]): # a list
if len(filemask) < 2:
raise AssertionError('Invalid filemask.\n'
+'List contains less than 2 members: "%s"' % filemask)
else:
self.name = filemask[-1]
self.masks = list(filemask[:-1] )
else:
raise AssertionError('Invalid filemask: "%s"' % filemask)
def __eq__(self,other):
if self.name == other.name: return True
return False
def add(self,other):
for mask in other.masks:
if mask in self.masks: pass
else: self.masks.append(mask)
def toTuple(self):
return (self.name,tuple(self.masks))
def isAll(self):
if self.name == "All files": return True
return False
def initializeFromString(self, filemask):
# remove everything except the extension from the filemask
self.ext = os.path.splitext(filemask)[1]
if self.ext == "" : self.ext = ".*"
if self.ext == ".": self.ext = ".*"
self.name = self.getName()
self.masks = ["*" + self.ext]
def getName(self):
e = self.ext
if e == ".*" : return "All files"
if e == ".txt": return "Text files"
if e == ".py" : return "Python files"
if e == ".pyc" : return "Python files"
if e == ".xls": return "Excel files"
if e.startswith("."):
return e[1:].upper() + " files"
return e.upper() + " files"
#-------------------------------------------------------------------
# fileopenbox
#-------------------------------------------------------------------
def fileopenbox(msg=None
, title=None
, default="*"
, filetypes=None
):
"""
A dialog to get a file name.
About the "default" argument
============================
The "default" argument specifies a filepath that (normally)
contains one or more wildcards.
fileopenbox will display only files that match the default filepath.
If omitted, defaults to "*" (all files in the current directory).
WINDOWS EXAMPLE::
...default="c:/myjunk/*.py"
will open in directory c:\myjunk\ and show all Python files.
WINDOWS EXAMPLE::
...default="c:/myjunk/test*.py"
will open in directory c:\myjunk\ and show all Python files
whose names begin with "test".
Note that on Windows, fileopenbox automatically changes the path
separator to the Windows path separator (backslash).
About the "filetypes" argument
==============================
If specified, it should contain a list of items,
where each item is either::
- a string containing a filemask # e.g. "*.txt"
- a list of strings, where all of the strings except the last one
are filemasks (each beginning with "*.",
such as "*.txt" for text files, "*.py" for Python files, etc.).
and the last string contains a filetype description
EXAMPLE::
filetypes = ["*.css", ["*.htm", "*.html", "HTML files"] ]
NOTE THAT
=========
If the filetypes list does not contain ("All files","*"),
it will be added.
If the filetypes list does not contain a filemask that includes
the extension of the "default" argument, it will be added.
For example, if default="*abc.py"
and no filetypes argument was specified, then
"*.py" will automatically be added to the filetypes argument.
@rtype: string or None
@return: the name of a file, or None if user chose to cancel
@arg msg: the msg to be displayed.
@arg title: the window title
@arg default: filepath with wildcards
@arg filetypes: filemasks that a user can choose, e.g. "*.txt"
"""
if sys.platform == 'darwin':
_bring_to_front()
localRoot = Tk()
localRoot.withdraw()
initialbase, initialfile, initialdir, filetypes = fileboxSetup(default,filetypes)
#------------------------------------------------------------
# if initialfile contains no wildcards; we don't want an
# initial file. It won't be used anyway.
# Also: if initialbase is simply "*", we don't want an
# initialfile; it is not doing any useful work.
#------------------------------------------------------------
if (initialfile.find("*") < 0) and (initialfile.find("?") < 0):
initialfile = None
elif initialbase == "*":
initialfile = None
f = tk_FileDialog.askopenfilename(parent=localRoot
, title=getFileDialogTitle(msg,title)
, initialdir=initialdir
, initialfile=initialfile
, filetypes=filetypes
)
localRoot.destroy()
if not f: return None
return os.path.normpath(f)
#-------------------------------------------------------------------
# filesavebox
#-------------------------------------------------------------------
def filesavebox(msg=None
, title=None
, default=""
, filetypes=None
):
"""
A file to get the name of a file to save.
Returns the name of a file, or None if user chose to cancel.
The "default" argument should contain a filename (i.e. the
current name of the file to be saved). It may also be empty,
or contain a filemask that includes wildcards.
The "filetypes" argument works like the "filetypes" argument to
fileopenbox.
"""
if sys.platform == 'darwin':
_bring_to_front()
localRoot = Tk()
localRoot.withdraw()
initialbase, initialfile, initialdir, filetypes = fileboxSetup(default,filetypes)
f = tk_FileDialog.asksaveasfilename(parent=localRoot
, title=getFileDialogTitle(msg,title)
, initialfile=initialfile
, initialdir=initialdir
, filetypes=filetypes
)
localRoot.destroy()
if not f: return None
return os.path.normpath(f)
#-------------------------------------------------------------------
#
# fileboxSetup
#
#-------------------------------------------------------------------
def fileboxSetup(default,filetypes):
if not default: default = os.path.join(".","*")
initialdir, initialfile = os.path.split(default)
if not initialdir : initialdir = "."
if not initialfile: initialfile = "*"
initialbase, initialext = os.path.splitext(initialfile)
initialFileTypeObject = FileTypeObject(initialfile)
allFileTypeObject = FileTypeObject("*")
ALL_filetypes_was_specified = False
if not filetypes: filetypes= []
filetypeObjects = []
for filemask in filetypes:
fto = FileTypeObject(filemask)
if fto.isAll():
ALL_filetypes_was_specified = True # remember this
if fto == initialFileTypeObject:
initialFileTypeObject.add(fto) # add fto to initialFileTypeObject
else:
filetypeObjects.append(fto)
#------------------------------------------------------------------
# make sure that the list of filetypes includes the ALL FILES type.
#------------------------------------------------------------------
if ALL_filetypes_was_specified:
pass
elif allFileTypeObject == initialFileTypeObject:
pass
else:
filetypeObjects.insert(0,allFileTypeObject)
#------------------------------------------------------------------
# Make sure that the list includes the initialFileTypeObject
# in the position in the list that will make it the default.
# This changed between Python version 2.5 and 2.6
#------------------------------------------------------------------
if len(filetypeObjects) == 0:
filetypeObjects.append(initialFileTypeObject)
if initialFileTypeObject in (filetypeObjects[0], filetypeObjects[-1]):
pass
else:
if runningPython26:
filetypeObjects.append(initialFileTypeObject)
else:
filetypeObjects.insert(0,initialFileTypeObject)
filetypes = [fto.toTuple() for fto in filetypeObjects]
return initialbase, initialfile, initialdir, filetypes
#-------------------------------------------------------------------
# utility routines
#-------------------------------------------------------------------
# These routines are used by several other functions in the EasyGui module.
def __buttonEvent(event):
"""
Handle an event that is generated by a person clicking a button.
"""
global boxRoot, __widgetTexts, __replyButtonText
__replyButtonText = __widgetTexts[event.widget]
boxRoot.quit() # quit the main loop
def __put_buttons_in_buttonframe(choices):
"""Put the buttons in the buttons frame
"""
global __widgetTexts, __firstWidget, buttonsFrame
__firstWidget = None
__widgetTexts = {}
i = 0
for buttonText in choices:
tempButton = Button(buttonsFrame, takefocus=1, text=buttonText)
bindArrows(tempButton)
tempButton.pack(expand=YES, side=LEFT, padx='1m', pady='1m', ipadx='2m', ipady='1m')
# remember the text associated with this widget
__widgetTexts[tempButton] = buttonText
# remember the first widget, so we can put the focus there
if i == 0:
__firstWidget = tempButton
i = 1
# for the commandButton, bind activation events to the activation event handler
commandButton = tempButton
handler = __buttonEvent
for selectionEvent in STANDARD_SELECTION_EVENTS:
commandButton.bind("<%s>" % selectionEvent, handler)
##-----------------------------------------------------------------------
##
## class EgStore
##
##-----------------------------------------------------------------------
#class EgStore:
# r"""
#A class to support persistent storage.
#
#You can use EgStore to support the storage and retrieval
#of user settings for an EasyGui application.
#
#
## Example A
##-----------------------------------------------------------------------
## define a class named Settings as a subclass of EgStore
##-----------------------------------------------------------------------
#class Settings(EgStore):
#::
# def __init__(self, filename): # filename is required
# #-------------------------------------------------
# # Specify default/initial values for variables that
# # this particular application wants to remember.
# #-------------------------------------------------
# self.userId = ""
# self.targetServer = ""
#
# #-------------------------------------------------
# # For subclasses of EgStore, these must be
# # the last two statements in __init__
# #-------------------------------------------------
# self.filename = filename # this is required
# self.restore() # restore values from the storage file if possible
#
#
#
## Example B
##-----------------------------------------------------------------------
## create settings, a persistent Settings object
##-----------------------------------------------------------------------
#settingsFile = "myApp_settings.txt"
#settings = Settings(settingsFile)
#
#user = "obama_barak"
#server = "whitehouse1"
#settings.userId = user
#settings.targetServer = server
#settings.store() # persist the settings
#
## run code that gets a new value for userId, and persist the settings
#user = "biden_joe"
#settings.userId = user
#settings.store()
#
#
## Example C
##-----------------------------------------------------------------------
## recover the Settings instance, change an attribute, and store it again.
##-----------------------------------------------------------------------
#settings = Settings(settingsFile)
#settings.userId = "vanrossum_g"
#settings.store()
#
#"""
# def __init__(self, filename): # obtaining filename is required
# self.filename = None
# raise NotImplementedError()
#
# def restore(self):
# """
# Set the values of whatever attributes are recoverable
# from the pickle file.
#
# Populate the attributes (the __dict__) of the EgStore object
# from the attributes (the __dict__) of the pickled object.
#
# If the pickled object has attributes that have been initialized
# in the EgStore object, then those attributes of the EgStore object
# will be replaced by the values of the corresponding attributes
# in the pickled object.
#
# If the pickled object is missing some attributes that have
# been initialized in the EgStore object, then those attributes
# of the EgStore object will retain the values that they were
# initialized with.
#
# If the pickled object has some attributes that were not
# initialized in the EgStore object, then those attributes
# will be ignored.
#
# IN SUMMARY:
#
# After the recover() operation, the EgStore object will have all,
# and only, the attributes that it had when it was initialized.
#
# Where possible, those attributes will have values recovered
# from the pickled object.
# """
# if not os.path.exists(self.filename): return self
# if not os.path.isfile(self.filename): return self
#
# try:
# f = open(self.filename,"rb")
# unpickledObject = pickle.load(f)
# f.close()
#
# for key in list(self.__dict__.keys()):
# default = self.__dict__[key]
# self.__dict__[key] = unpickledObject.__dict__.get(key,default)
# except:
# pass
#
# return self
#
# def store(self):
# """
# Save the attributes of the EgStore object to a pickle file.
# Note that if the directory for the pickle file does not already exist,
# the store operation will fail.
# """
# f = open(self.filename, "wb")
# pickle.dump(self, f)
# f.close()
#
#
# def kill(self):
# """
# Delete my persistent file (i.e. pickle file), if it exists.
# """
# if os.path.isfile(self.filename):
# os.remove(self.filename)
# return
#
# def __str__(self):
# """
# return my contents as a string in an easy-to-read format.
# """
# # find the length of the longest attribute name
# longest_key_length = 0
# keys = []
# for key in self.__dict__.keys():
# keys.append(key)
# longest_key_length = max(longest_key_length, len(key))
#
# keys.sort() # sort the attribute names
# lines = []
# for key in keys:
# value = self.__dict__[key]
# key = key.ljust(longest_key_length)
# lines.append("%s : %s\n" % (key,repr(value)) )
# return "".join(lines) # return a string showing the attributes
##-----------------------------------------------------------------------
##
## test/demo easygui
##
##-----------------------------------------------------------------------
#def egdemo():
# """
# Run the EasyGui demo.
# """
# # clear the console
# writeln("\n" * 100)
#
# intro_message = ("Pick the kind of box that you wish to demo.\n"
# + "\n * Python version " + sys.version
# + "\n * EasyGui version " + egversion
# + "\n * Tk version " + str(TkVersion)
# )
#
# #========================================== END DEMONSTRATION DATA
#
#
# while 1: # do forever
# choices = [
# "msgbox",
# "buttonbox",
# "buttonbox(image) -- a buttonbox that displays an image",
# "choicebox",
# "multchoicebox",
# "textbox",
# "ynbox",
# "ccbox",
# "enterbox",
# "enterbox(image) -- an enterbox that displays an image",
# "exceptionbox",
# "codebox",
# "integerbox",
# "boolbox",
# "indexbox",
# "filesavebox",
# "fileopenbox",
# "passwordbox",
# "multenterbox",
# "multpasswordbox",
# "diropenbox",
# "About EasyGui",
# " Help"
# ]
# choice = choicebox(msg=intro_message
# , title="EasyGui " + egversion
# , choices=choices)
#
# if not choice: return
#
# reply = choice.split()
#
# if reply[0] == "msgbox":
# reply = msgbox("short msg", "This is a long title")
# writeln("Reply was: %s" % repr(reply))
#
# elif reply[0] == "About":
# reply = abouteasygui()
#
# elif reply[0] == "Help":
# _demo_help()
#
# elif reply[0] == "buttonbox":
# reply = buttonbox()
# writeln("Reply was: %s" % repr(reply))
#
# title = "Demo of Buttonbox with many, many buttons!"
# msg = "This buttonbox shows what happens when you specify too many buttons."
# reply = buttonbox(msg=msg, title=title, choices=choices)
# writeln("Reply was: %s" % repr(reply))
#
# elif reply[0] == "buttonbox(image)":
# _demo_buttonbox_with_image()
#
# elif reply[0] == "boolbox":
# reply = boolbox()
# writeln("Reply was: %s" % repr(reply))
#
# elif reply[0] == "enterbox":
# image = "python_and_check_logo.gif"
# message = "Enter the name of your best friend."\
# "\n(Result will be stripped.)"
# reply = enterbox(message, "Love!", " Suzy Smith ")
# writeln("Reply was: %s" % repr(reply))
#
# message = "Enter the name of your best friend."\
# "\n(Result will NOT be stripped.)"
# reply = enterbox(message, "Love!", " Suzy Smith ",strip=False)
# writeln("Reply was: %s" % repr(reply))
#
# reply = enterbox("Enter the name of your worst enemy:", "Hate!")
# writeln("Reply was: %s" % repr(reply))
#
# elif reply[0] == "enterbox(image)":
# image = "python_and_check_logo.gif"
# message = "What kind of snake is this?"
# reply = enterbox(message, "Quiz",image=image)
# writeln("Reply was: %s" % repr(reply))
#
# elif reply[0] == "exceptionbox":
# try:
# thisWillCauseADivideByZeroException = 1/0
# except:
# exceptionbox()
#
# elif reply[0] == "integerbox":
# reply = integerbox(
# "Enter a number between 3 and 333",
# "Demo: integerbox WITH a default value",
# 222, 3, 333)
# writeln("Reply was: %s" % repr(reply))
#
# reply = integerbox(
# "Enter a number between 0 and 99",
# "Demo: integerbox WITHOUT a default value"
# )
# writeln("Reply was: %s" % repr(reply))
#
# elif reply[0] == "diropenbox" : _demo_diropenbox()
# elif reply[0] == "fileopenbox": _demo_fileopenbox()
# elif reply[0] == "filesavebox": _demo_filesavebox()
#
# elif reply[0] == "indexbox":
# title = reply[0]
# msg = "Demo of " + reply[0]
# choices = ["Choice1", "Choice2", "Choice3", "Choice4"]
# reply = indexbox(msg, title, choices)
# writeln("Reply was: %s" % repr(reply))
#
# elif reply[0] == "passwordbox":
# reply = passwordbox("Demo of password box WITHOUT default"
# + "\n\nEnter your secret password", "Member Logon")
# writeln("Reply was: %s" % str(reply))
#
# reply = passwordbox("Demo of password box WITH default"
# + "\n\nEnter your secret password", "Member Logon", "alfie")
# writeln("Reply was: %s" % str(reply))
#
# elif reply[0] == "multenterbox":
# msg = "Enter your personal information"
# title = "Credit Card Application"
# fieldNames = ["Name","Street Address","City","State","ZipCode"]
# fieldValues = [] # we start with blanks for the values
# fieldValues = multenterbox(msg,title, fieldNames)
#
# # make sure that none of the fields was left blank
# while 1:
# if fieldValues == None: break
# errmsg = ""
# for i in range(len(fieldNames)):
# if fieldValues[i].strip() == "":
# errmsg = errmsg + ('"%s" is a required field.\n\n' % fieldNames[i])
# if errmsg == "": break # no problems found
# fieldValues = multenterbox(errmsg, title, fieldNames, fieldValues)
#
# writeln("Reply was: %s" % str(fieldValues))
#
# elif reply[0] == "multpasswordbox":
# msg = "Enter logon information"
# title = "Demo of multpasswordbox"
# fieldNames = ["Server ID", "User ID", "Password"]
# fieldValues = [] # we start with blanks for the values
# fieldValues = multpasswordbox(msg,title, fieldNames)
#
# # make sure that none of the fields was left blank
# while 1:
# if fieldValues == None: break
# errmsg = ""
# for i in range(len(fieldNames)):
# if fieldValues[i].strip() == "":
# errmsg = errmsg + ('"%s" is a required field.\n\n' % fieldNames[i])
# if errmsg == "": break # no problems found
# fieldValues = multpasswordbox(errmsg, title, fieldNames, fieldValues)
#
# writeln("Reply was: %s" % str(fieldValues))
#
# elif reply[0] == "ynbox":
# title = "Demo of ynbox"
# msg = "Were you expecting the Spanish Inquisition?"
# reply = ynbox(msg, title)
# writeln("Reply was: %s" % repr(reply))
# if reply:
# msgbox("NOBODY expects the Spanish Inquisition!", "Wrong!")
#
# elif reply[0] == "ccbox":
# title = "Demo of ccbox"
# reply = ccbox(msg,title)
# writeln("Reply was: %s" % repr(reply))
#
# elif reply[0] == "choicebox":
# title = "Demo of choicebox"
# longchoice = "This is an example of a very long option which you may or may not wish to choose."*2
# listChoices = ["nnn", "ddd", "eee", "fff", "aaa", longchoice
# , "aaa", "bbb", "ccc", "ggg", "hhh", "iii", "jjj", "kkk", "LLL", "mmm" , "nnn", "ooo", "ppp", "qqq", "rrr", "sss", "ttt", "uuu", "vvv"]
#
# msg = "Pick something. " + ("A wrapable sentence of text ?! "*30) + "\nA separate line of text."*6
# reply = choicebox(msg=msg, choices=listChoices)
# writeln("Reply was: %s" % repr(reply))
#
# msg = "Pick something. "
# reply = choicebox(msg=msg, title=title, choices=listChoices)
# writeln("Reply was: %s" % repr(reply))
#
# msg = "Pick something. "
# reply = choicebox(msg="The list of choices is empty!", choices=[])
# writeln("Reply was: %s" % repr(reply))
#
# elif reply[0] == "multchoicebox":
# listChoices = ["aaa", "bbb", "ccc", "ggg", "hhh", "iii", "jjj", "kkk"
# , "LLL", "mmm" , "nnn", "ooo", "ppp", "qqq"
# , "rrr", "sss", "ttt", "uuu", "vvv"]
#
# msg = "Pick as many choices as you wish."
# reply = multchoicebox(msg,"Demo of multchoicebox", listChoices)
# writeln("Reply was: %s" % repr(reply))
#
# elif reply[0] == "textbox": _demo_textbox(reply[0])
# elif reply[0] == "codebox": _demo_codebox(reply[0])
#
# else:
# msgbox("Choice\n\n" + choice + "\n\nis not recognized", "Program Logic Error")
# return
#
#
#def _demo_textbox(reply):
# text_snippet = ((\
#"""It was the best of times, and it was the worst of times. The rich ate cake, and the poor had cake recommended to them, but wished only for enough cash to buy bread. The time was ripe for revolution! """ \
#*5)+"\n\n")*10
# title = "Demo of textbox"
# msg = "Here is some sample text. " * 16
# reply = textbox(msg, title, text_snippet)
# writeln("Reply was: %s" % str(reply))
#
#def _demo_codebox(reply):
# code_snippet = ("dafsdfa dasflkj pp[oadsij asdfp;ij asdfpjkop asdfpok asdfpok asdfpok"*3) +"\n"+\
#"""# here is some dummy Python code
#for someItem in myListOfStuff:
# do something(someItem)
# do something()
# do something()
# if somethingElse(someItem):
# doSomethingEvenMoreInteresting()
#
#"""*16
# msg = "Here is some sample code. " * 16
# reply = codebox(msg, "Code Sample", code_snippet)
# writeln("Reply was: %s" % repr(reply))
#
#
#def _demo_buttonbox_with_image():
#
# msg = "Do you like this picture?\nIt is "
# choices = ["Yes","No","No opinion"]
#
# for image in [
# "python_and_check_logo.gif"
# ,"python_and_check_logo.jpg"
# ,"python_and_check_logo.png"
# ,"zzzzz.gif"]:
#
# reply=buttonbox(msg + image,image=image,choices=choices)
# writeln("Reply was: %s" % repr(reply))
#
#
#def _demo_help():
# savedStdout = sys.stdout # save the sys.stdout file object
# sys.stdout = capturedOutput = StringIO()
# help("easygui")
# sys.stdout = savedStdout # restore the sys.stdout file object
# codebox("EasyGui Help",text=capturedOutput.getvalue())
#
#def _demo_filesavebox():
# filename = "myNewFile.txt"
# title = "File SaveAs"
# msg ="Save file as:"
#
# f = filesavebox(msg,title,default=filename)
# writeln("You chose to save file: %s" % f)
#
#def _demo_diropenbox():
# title = "Demo of diropenbox"
# msg = "Pick the directory that you wish to open."
# d = diropenbox(msg, title)
# writeln("You chose directory...: %s" % d)
#
# d = diropenbox(msg, title,default="./")
# writeln("You chose directory...: %s" % d)
#
# d = diropenbox(msg, title,default="c:/")
# writeln("You chose directory...: %s" % d)
#
#
#def _demo_fileopenbox():
# msg = "Python files"
# title = "Open files"
# default="*.py"
# f = fileopenbox(msg,title,default=default)
# writeln("You chose to open file: %s" % f)
#
# default="./*.gif"
# filetypes = ["*.jpg",["*.zip","*.tgs","*.gz", "Archive files"],["*.htm", "*.html","HTML files"]]
# f = fileopenbox(msg,title,default=default,filetypes=filetypes)
# writeln("You chose to open file: %s" % f)
#
# """#deadcode -- testing ----------------------------------------
# f = fileopenbox(None,None,default=default)
# writeln("You chose to open file: %s" % f)
#
# f = fileopenbox(None,title,default=default)
# writeln("You chose to open file: %s" % f)
#
# f = fileopenbox(msg,None,default=default)
# writeln("You chose to open file: %s" % f)
#
# f = fileopenbox(default=default)
# writeln("You chose to open file: %s" % f)
#
# f = fileopenbox(default=None)
# writeln("You chose to open file: %s" % f)
# #----------------------------------------------------deadcode """
#
#
#def _dummy():
# pass
#
#EASYGUI_ABOUT_INFORMATION = '''
#========================================================================
#0.96(2010-08-29)
#========================================================================
#This version fixes some problems with version independence.
#
#BUG FIXES
#------------------------------------------------------
# * A statement with Python 2.x-style exception-handling syntax raised
# a syntax error when running under Python 3.x.
# Thanks to David Williams for reporting this problem.
#
# * Under some circumstances, PIL was unable to display non-gif images
# that it should have been able to display.
# The cause appears to be non-version-independent import syntax.
# PIL modules are now imported with a version-independent syntax.
# Thanks to Horst Jens for reporting this problem.
#
#LICENSE CHANGE
#------------------------------------------------------
#Starting with this version, EasyGui is licensed under what is generally known as
#the "modified BSD license" (aka "revised BSD", "new BSD", "3-clause BSD").
#This license is GPL-compatible but less restrictive than GPL.
#Earlier versions were licensed under the Creative Commons Attribution License 2.0.
#
#
#========================================================================
#0.95(2010-06-12)
#========================================================================
#
#ENHANCEMENTS
#------------------------------------------------------
# * Previous versions of EasyGui could display only .gif image files using the
# msgbox "image" argument. This version can now display all image-file formats
# supported by PIL the Python Imaging Library) if PIL is installed.
# If msgbox is asked to open a non-gif image file, it attempts to import
# PIL and to use PIL to convert the image file to a displayable format.
# If PIL cannot be imported (probably because PIL is not installed)
# EasyGui displays an error message saying that PIL must be installed in order
# to display the image file.
#
# Note that
# http://www.pythonware.com/products/pil/
# says that PIL doesn't yet support Python 3.x.
#
#
#========================================================================
#0.94(2010-06-06)
#========================================================================
#
#ENHANCEMENTS
#------------------------------------------------------
# * The codebox and textbox functions now return the contents of the box, rather
# than simply the name of the button ("Yes"). This makes it possible to use
# codebox and textbox as data-entry widgets. A big "thank you!" to Dominic
# Comtois for requesting this feature, patiently explaining his requirement,
# and helping to discover the tkinter techniques to implement it.
#
# NOTE THAT in theory this change breaks backward compatibility. But because
# (in previous versions of EasyGui) the value returned by codebox and textbox
# was meaningless, no application should have been checking it. So in actual
# practice, this change should not break backward compatibility.
#
# * Added support for SPACEBAR to command buttons. Now, when keyboard
# focus is on a command button, a press of the SPACEBAR will act like
# a press of the ENTER key; it will activate the command button.
#
# * Added support for keyboard navigation with the arrow keys (up,down,left,right)
# to the fields and buttons in enterbox, multenterbox and multpasswordbox,
# and to the buttons in choicebox and all buttonboxes.
#
# * added highlightthickness=2 to entry fields in multenterbox and
# multpasswordbox. Now it is easier to tell which entry field has
# keyboard focus.
#
#
#BUG FIXES
#------------------------------------------------------
# * In EgStore, the pickle file is now opened with "rb" and "wb" rather than
# with "r" and "w". This change is necessary for compatibility with Python 3+.
# Thanks to Marshall Mattingly for reporting this problem and providing the fix.
#
# * In integerbox, the actual argument names did not match the names described
# in the docstring. Thanks to Daniel Zingaro of at University of Toronto for
# reporting this problem.
#
# * In integerbox, the "argLowerBound" and "argUpperBound" arguments have been
# renamed to "lowerbound" and "upperbound" and the docstring has been corrected.
#
# NOTE THAT THIS CHANGE TO THE ARGUMENT-NAMES BREAKS BACKWARD COMPATIBILITY.
# If argLowerBound or argUpperBound are used, an AssertionError with an
# explanatory error message is raised.
#
# * In choicebox, the signature to choicebox incorrectly showed choicebox as
# accepting a "buttons" argument. The signature has been fixed.
#
#
#========================================================================
#0.93(2009-07-07)
#========================================================================
#
#ENHANCEMENTS
#------------------------------------------------------
#
# * Added exceptionbox to display stack trace of exceptions
#
# * modified names of some font-related constants to make it
# easier to customize them
#
#
#========================================================================
#0.92(2009-06-22)
#========================================================================
#
#ENHANCEMENTS
#------------------------------------------------------
#
# * Added EgStore class to to provide basic easy-to-use persistence.
#
#BUG FIXES
#------------------------------------------------------
#
# * Fixed a bug that was preventing Linux users from copying text out of
# a textbox and a codebox. This was not a problem for Windows users.
#
#'''
#
#def abouteasygui():
# """
# shows the easygui revision history
# """
# codebox("About EasyGui\n"+egversion,"EasyGui",EASYGUI_ABOUT_INFORMATION)
# return None
#
#
#
#if __name__ == '__main__':
# if True:
# egdemo()
# else:
# # test the new root feature
# root = Tk()
# msg = """This is a test of a main Tk() window in which we will place an easygui msgbox.
# It will be an interesting experiment.\n\n"""
# messageWidget = Message(root, text=msg, width=1000)
# messageWidget.pack(side=TOP, expand=YES, fill=X, padx='3m', pady='3m')
# messageWidget = Message(root, text=msg, width=1000)
# messageWidget.pack(side=TOP, expand=YES, fill=X, padx='3m', pady='3m')
#
#
# msgbox("this is a test of passing in boxRoot", root=root)
# msgbox("this is a second test of passing in boxRoot", root=root)
#
# reply = enterbox("Enter something", root=root)
# writeln("You wrote:", reply)
#
# reply = enterbox("Enter something else", root=root)
# writeln("You wrote:", reply)
# root.destroy()
| gpl-3.0 | -8,555,048,554,977,324,000 | 34.305534 | 210 | 0.565633 | false |
karllessard/tensorflow | tensorflow/python/distribute/mirrored_run.py | 3 | 19404 | # Copyright 2020 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Class MirroredStrategy implementing tf.distribute.Strategy."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import contextlib
import functools
import threading
import weakref
from tensorflow.python import pywrap_tfe
from tensorflow.python.autograph.core import ag_ctx as autograph_ctx
from tensorflow.python.autograph.impl import api as autograph
from tensorflow.python.distribute import distribute_lib
from tensorflow.python.distribute import distribute_utils
from tensorflow.python.distribute import shared_variable_creator
from tensorflow.python.eager import context
from tensorflow.python.eager import def_function
from tensorflow.python.framework import device as tf_device
from tensorflow.python.framework import ops
from tensorflow.python.ops import summary_ops_v2
from tensorflow.python.ops import variable_scope
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.training import coordinator
def call_for_each_replica(strategy, fn, args=None, kwargs=None):
"""Call `fn` on each worker devices(replica).
It's highly recommended to wrap the call to this function inside a
`tf.function`, otherwise the performance is poor.
Args:
strategy: `tf.distribute.Strategy`.
fn: function to call on each worker devices.
args: positional arguments to `fn`.
kwargs: keyword arguments to `fn`.
Returns:
Wrapped returned value of `fn` from all replicas.
"""
if args is None:
args = ()
if kwargs is None:
kwargs = {}
if isinstance(fn, def_function.Function):
if strategy not in _cfer_fn_cache:
_cfer_fn_cache[strategy] = weakref.WeakKeyDictionary()
wrapped = _cfer_fn_cache[strategy].get(fn)
if wrapped is None:
# We need to wrap fn such that it triggers _call_for_each_replica inside
# the tf.function. We use _clone() instead of @tf.function wrapped
# call_for_each_replica() because we would like to retain the arguments to
# the @tf.function decorator of fn.
wrapped = fn._clone( # pylint: disable=protected-access
python_function=functools.partial(call_for_each_replica, strategy,
fn.python_function))
_cfer_fn_cache[strategy][fn] = wrapped
return wrapped(args, kwargs)
if context.executing_eagerly():
logging.log_first_n(
logging.WARN, "Using %s eagerly has significant "
"overhead currently. We will be working on improving "
"this in the future, but for now please wrap "
"`call_for_each_replica` or `experimental_run` or "
"`run` inside a tf.function to get "
"the best performance." % strategy.__class__.__name__, 5)
else:
# When a tf.function is wrapped to trigger _call_for_each_replica (see
# the other branch above), AutoGraph stops conversion at
# _call_for_each_replica itself (TF library functions are allowlisted).
# This makes sure that the Python function that originally passed to
# the tf.function is still converted.
fn = autograph.tf_convert(fn, autograph_ctx.control_status_ctx())
return _call_for_each_replica(strategy, fn, args, kwargs)
# Per strategy cache for call_for_each_replica def_function.Function objects.
_cfer_fn_cache = weakref.WeakKeyDictionary()
@contextlib.contextmanager
def _enter_graph(g, eager, creator_stack=None):
"""Context manager for selecting a graph and maybe eager mode."""
if eager:
with g.as_default(), context.eager_mode():
if creator_stack is not None:
g._variable_creator_stack = creator_stack # pylint: disable=protected-access
yield
else:
with g.as_default():
if creator_stack is not None:
g._variable_creator_stack = creator_stack # pylint: disable=protected-access
yield
def _cpu_device(device):
cpu_device = tf_device.DeviceSpec.from_string(device)
cpu_device = cpu_device.replace(device_type="CPU", device_index=0)
return cpu_device.to_string()
class _RequestedStop(Exception): # pylint: disable=g-bad-exception-name
pass
def _call_for_each_replica(distribution, fn, args, kwargs):
"""Run `fn` in separate threads, once per replica/worker device.
Args:
distribution: the DistributionStrategy object.
fn: function to run (will be run once per replica, each in its own thread).
args: positional arguments for `fn`
kwargs: keyword arguments for `fn`.
Returns:
Merged return value of `fn` across all replicas.
Raises:
RuntimeError: If fn() calls get_replica_context().merge_call() a different
number of times from the available devices.
"""
# TODO(josh11b): Add this option once we add synchronization to variable
# creation. Until then, this is pretty unsafe to use.
run_concurrently = False
if not context.executing_eagerly():
# Needed for per-thread device, etc. contexts in graph mode.
ops.get_default_graph().switch_to_thread_local()
coord = coordinator.Coordinator(clean_stop_exception_types=(_RequestedStop,))
shared_variable_store = {}
devices = distribution.extended.worker_devices
# TODO(isaprykin): Create these threads once instead of during every call.
threads = []
for index in range(len(devices)):
variable_creator_fn = shared_variable_creator.make_fn(
shared_variable_store, index)
t = _MirroredReplicaThread(
distribution, coord, index, devices, variable_creator_fn, fn,
distribute_utils.select_replica(index, args),
distribute_utils.select_replica(index, kwargs))
threads.append(t)
for t in threads:
t.start()
# When `fn` starts `should_run` event is set on _MirroredReplicaThread
# (`MRT`) threads. The execution waits until
# `MRT.has_paused` is set, which indicates that either `fn` is
# complete or a `get_replica_context().merge_call()` is called. If `fn` is
# complete, then `MRT.done` is set to True. Otherwise, arguments
# of `get_replica_context().merge_call` from all paused threads are grouped
# and the `merge_fn` is performed. Results of the
# `get_replica_context().merge_call` are then set to `MRT.merge_result`.
# Each such `get_replica_context().merge_call` call returns the
# `MRT.merge_result` for that thread when `MRT.should_run` event
# is reset again. Execution of `fn` resumes.
try:
with coord.stop_on_exception():
all_done = False
while not all_done and not coord.should_stop():
done = []
if run_concurrently:
for t in threads:
t.should_run.set()
for t in threads:
t.has_paused.wait()
t.has_paused.clear()
if coord.should_stop():
return None
done.append(t.done)
else:
for t in threads:
t.should_run.set()
t.has_paused.wait()
t.has_paused.clear()
if coord.should_stop():
return None
done.append(t.done)
if coord.should_stop():
return None
all_done = all(done)
if not all_done:
if any(done):
raise RuntimeError("Some replicas made a different number of "
"replica_context().merge_call() calls.")
# get_replica_context().merge_call() case
merge_args = distribute_utils.regroup(
tuple(t.merge_args for t in threads))
merge_kwargs = distribute_utils.regroup(
tuple(t.merge_kwargs for t in threads))
# We capture the name_scope of the MRT when we call merge_fn
# to ensure that if we have opened a name scope in the MRT,
# it will be respected when executing the merge function. We only
# capture the name_scope from the first MRT and assume it is
# the same for all other MRTs.
mtt_captured_name_scope = threads[0].captured_name_scope
mtt_captured_var_scope = threads[0].captured_var_scope
# Capture and merge the control dependencies from all the threads.
mtt_captured_control_deps = set()
for t in threads:
mtt_captured_control_deps.update(t.captured_control_deps)
with ops.name_scope(mtt_captured_name_scope),\
ops.control_dependencies(mtt_captured_control_deps), \
variable_scope.variable_scope(mtt_captured_var_scope):
merge_result = threads[0].merge_fn(distribution, *merge_args,
**merge_kwargs)
for r, t in enumerate(threads):
t.merge_result = distribute_utils.select_replica(r, merge_result)
finally:
for t in threads:
t.should_run.set()
coord.join(threads)
return distribute_utils.regroup(tuple(t.main_result for t in threads))
class _MirroredReplicaThread(threading.Thread):
"""A thread that runs() a function on a device."""
def __init__(self, dist, coord, replica_id, devices, variable_creator_fn,
fn, args, kwargs):
super(_MirroredReplicaThread, self).__init__()
self.coord = coord
self.distribution = dist
self.devices = devices
self.replica_id = replica_id
self.variable_creator_fn = variable_creator_fn
# State needed to run and return the results of `fn`.
self.main_fn = fn
self.main_args = args
self.main_kwargs = kwargs
self.main_result = None
self.done = False
# State needed to run the next merge_call() (if any) requested via
# ReplicaContext.
self.merge_fn = None
self.merge_args = None
self.merge_kwargs = None
self.merge_result = None
self.captured_name_scope = None
self.captured_var_scope = None
# We use a thread.Event for the main thread to signal when this
# thread should start running (`should_run`), and another for
# this thread to transfer control back to the main thread
# (`has_paused`, either when it gets to a
# `get_replica_context().merge_call` or when `fn` returns). In
# either case the event starts cleared, is signaled by calling
# set(). The receiving thread waits for the signal by calling
# wait() and then immediately clearing the event using clear().
self.should_run = threading.Event()
self.has_paused = threading.Event()
# These fields have to do with inheriting various contexts from the
# parent thread:
context.ensure_initialized()
ctx = context.context()
self.in_eager = ctx.executing_eagerly()
self.record_thread_local_summary_state()
self.record_thread_local_eager_context_state()
self.context_device_policy = (
pywrap_tfe.TFE_ContextGetDevicePlacementPolicy(
ctx._context_handle)) # pylint: disable=protected-access
self.graph = ops.get_default_graph()
with ops.init_scope():
self._init_in_eager = context.executing_eagerly()
self._init_graph = ops.get_default_graph()
self._variable_creator_stack = self.graph._variable_creator_stack[:] # pylint: disable=protected-access
self._var_scope = variable_scope.get_variable_scope()
# Adding a "/" at end lets us re-enter this scope later.
self._name_scope = self.graph.get_name_scope()
if self._name_scope:
self._name_scope += "/"
if self.replica_id > 0:
if not self._name_scope:
self._name_scope = ""
self._name_scope += "replica_%d/" % self.replica_id
def run(self):
self.should_run.wait()
self.should_run.clear()
try:
if self.coord.should_stop():
return
self.restore_thread_local_summary_state()
self.restore_thread_local_eager_context_state()
# TODO(josh11b): Use current logical device instead of 0 here.
with self.coord.stop_on_exception(), \
_enter_graph(self._init_graph, self._init_in_eager), \
_enter_graph(self.graph, self.in_eager,
self._variable_creator_stack), \
context.device_policy(self.context_device_policy), \
_MirroredReplicaContext(self.distribution, self.replica_id), \
ops.device(self.devices[self.replica_id]), \
ops.name_scope(self._name_scope), \
variable_scope.variable_scope(
self._var_scope, reuse=self.replica_id > 0), \
variable_scope.variable_creator_scope(self.variable_creator_fn):
self.main_result = self.main_fn(*self.main_args, **self.main_kwargs)
self.done = True
finally:
self.has_paused.set()
def record_thread_local_summary_state(self):
"""Record the thread local summary state in self."""
# TODO(slebedev): is this still relevant? the referenced bug is closed.
summary_state = summary_ops_v2._summary_state # pylint: disable=protected-access
self._summary_step = summary_state.step
self._summary_writer = summary_state.writer
self._summary_recording = summary_state.is_recording
self._summary_recording_distribution_strategy = (
summary_state.is_recording_distribution_strategy)
def restore_thread_local_summary_state(self):
"""Restore thread local summary state from self."""
# TODO(slebedev): is this still relevant? the referenced bug is closed.
summary_state = summary_ops_v2._summary_state # pylint: disable=protected-access
summary_state.step = self._summary_step
summary_state.writer = self._summary_writer
summary_state.is_recording = self._summary_recording
summary_state.is_recording_distribution_strategy = (
self._summary_recording_distribution_strategy)
def record_thread_local_eager_context_state(self):
ctx = context.context()
eager_context_state = ctx._thread_local_data # pylint: disable=protected-access
self._eager_context_op_callbacks = eager_context_state.op_callbacks
# TODO(b/125892694): record other fields in EagerContext.
def restore_thread_local_eager_context_state(self):
ctx = context.context()
eager_context_state = ctx._thread_local_data # pylint: disable=protected-access
eager_context_state.op_callbacks = self._eager_context_op_callbacks
# TODO(b/125892694): record other fields in EagerContext.
class _MirroredReplicaContext(distribute_lib.ReplicaContext):
"""ReplicaContext for synchronized replica."""
def _merge_call(self, fn, args, kwargs):
"""`merge_call()` implementation for synchronized replica.
This pauses the current replica thread and passes `fn` and its arguments to
the main thread. The main thread will wait until all replicas pause, then
invoke `fn` with grouped arguments. The current replica thread will continue
after `fn` completes.
See `_call_for_each_replica` for the logic in the main thread.
Args:
fn: a function that is called in cross replica context with grouped
arguments from each replica. `fn` should returns grouped values.
args: positional arguments to `fn`.
kwargs: keyward arguments to `fn`.
Returns:
Return value of `fn` for the current replica.
Raises:
RuntimeError: when merge_call happens in a different graph, e.g. in a
different tf.function, which is not supported now.
_RequestedStop: when stop is requested.
"""
t = threading.current_thread()
assert isinstance(t, _MirroredReplicaThread)
t.merge_fn = fn
t.merge_args = args
t.merge_kwargs = kwargs
t.captured_name_scope = t.graph.get_name_scope()
# Adding a "/" at end lets us re-enter this scope later.
if t.captured_name_scope:
t.captured_name_scope += "/"
t.captured_var_scope = variable_scope.get_variable_scope()
t.captured_control_deps = t.graph._current_control_dependencies() # pylint: disable=protected-access
# It is problematic if `merge_call` is called under a different graph other
# than the one that `_call_for_each_replica` is called under, there are
# 3 cases this can happen:
#
# 1. The `fn` passed to `_call_for_each_replica` is decorated with
# `tf.function` and there is a `merge_call` in `fn`. Since
# MirroredStrategy traces a separate function per thread (per device),
# and each trace takes a shared lock, the lock is never released by the
# first thread and subsequent replica threads cannot proceed to trace
# their own functions. This issue is addressed by always converting
# `_call_for_each_replica(tf.function(f))` to
# ``tf.function(_call_for_each_replica(f))`.` in
# `MirroredStrategy._call_for_each_replica`.
#
# 2. The `fn` passed to `_call_for_each_replica` contains a nested
# `tf.function`, and there is a `merge_call` in the nested `tf.function`.
# In this case each thread can successfully trace its own function, but
# since the `merge_fn` passed to `merge_call` is executed in the main
# thread (where `_call_for_each_replica` is executed), it can't access
# the tensors that come from different graphs.
#
# 3. The `fn` passed to `_call_for_each_replica` contains a control-flow
# statement, and there is a `merge_call` inside the control-flow body,
# `fn` or `_call_for_each_replica` is decorated with `tf.function`.
# Control flow statement creates a separate graph for its body, similar
# to #2, `merge_fn` executed in the main thread can't access the
# tensors that come from different graphs.
#
# We raise an error for #2 and #3.
if ops.get_default_graph() != t.graph:
raise RuntimeError(
"`merge_call` called while defining a new graph or a tf.function."
" This can often happen if the function `fn` passed to"
" `strategy.run()` contains a nested `@tf.function`, and the nested "
"`@tf.function` contains a synchronization point, such as aggregating"
" gradients (e.g, optimizer.apply_gradients), or if the function `fn`"
" uses a control flow statement which contains a synchronization"
" point in the body. Such behaviors are not yet supported. Instead,"
" please avoid nested `tf.function`s or control flow statements that"
" may potentially cross a synchronization boundary, for example,"
" wrap the `fn` passed to `strategy.run` or the entire `strategy.run`"
" inside a `tf.function` or move the control flow out of `fn`")
t.has_paused.set()
t.should_run.wait()
t.should_run.clear()
if t.coord.should_stop():
raise _RequestedStop()
return t.merge_result
@property
def devices(self):
distribute_lib.require_replica_context(self)
return [
self._strategy.extended.worker_devices_by_replica[
self._replica_id_in_sync_group]
]
| apache-2.0 | -2,522,490,858,203,456,500 | 41.740088 | 108 | 0.673727 | false |
darjeeling/django | tests/template_tests/syntax_tests/test_width_ratio.py | 56 | 5956 | from django.template import TemplateSyntaxError
from django.test import SimpleTestCase
from ..utils import setup
class WidthRatioTagTests(SimpleTestCase):
libraries = {'custom': 'template_tests.templatetags.custom'}
@setup({'widthratio01': '{% widthratio a b 0 %}'})
def test_widthratio01(self):
output = self.engine.render_to_string('widthratio01', {'a': 50, 'b': 100})
self.assertEqual(output, '0')
@setup({'widthratio02': '{% widthratio a b 100 %}'})
def test_widthratio02(self):
output = self.engine.render_to_string('widthratio02', {'a': 0, 'b': 0})
self.assertEqual(output, '0')
@setup({'widthratio03': '{% widthratio a b 100 %}'})
def test_widthratio03(self):
output = self.engine.render_to_string('widthratio03', {'a': 0, 'b': 100})
self.assertEqual(output, '0')
@setup({'widthratio04': '{% widthratio a b 100 %}'})
def test_widthratio04(self):
output = self.engine.render_to_string('widthratio04', {'a': 50, 'b': 100})
self.assertEqual(output, '50')
@setup({'widthratio05': '{% widthratio a b 100 %}'})
def test_widthratio05(self):
output = self.engine.render_to_string('widthratio05', {'a': 100, 'b': 100})
self.assertEqual(output, '100')
@setup({'widthratio06': '{% widthratio a b 100 %}'})
def test_widthratio06(self):
"""
62.5 should round to 62
"""
output = self.engine.render_to_string('widthratio06', {'a': 50, 'b': 80})
self.assertEqual(output, '62')
@setup({'widthratio07': '{% widthratio a b 100 %}'})
def test_widthratio07(self):
"""
71.4 should round to 71
"""
output = self.engine.render_to_string('widthratio07', {'a': 50, 'b': 70})
self.assertEqual(output, '71')
# Raise exception if we don't have 3 args, last one an integer
@setup({'widthratio08': '{% widthratio %}'})
def test_widthratio08(self):
with self.assertRaises(TemplateSyntaxError):
self.engine.get_template('widthratio08')
@setup({'widthratio09': '{% widthratio a b %}'})
def test_widthratio09(self):
with self.assertRaises(TemplateSyntaxError):
self.engine.render_to_string('widthratio09', {'a': 50, 'b': 100})
@setup({'widthratio10': '{% widthratio a b 100.0 %}'})
def test_widthratio10(self):
output = self.engine.render_to_string('widthratio10', {'a': 50, 'b': 100})
self.assertEqual(output, '50')
@setup({'widthratio11': '{% widthratio a b c %}'})
def test_widthratio11(self):
"""
#10043: widthratio should allow max_width to be a variable
"""
output = self.engine.render_to_string('widthratio11', {'a': 50, 'c': 100, 'b': 100})
self.assertEqual(output, '50')
# #18739: widthratio should handle None args consistently with
# non-numerics
@setup({'widthratio12a': '{% widthratio a b c %}'})
def test_widthratio12a(self):
output = self.engine.render_to_string('widthratio12a', {'a': 'a', 'c': 100, 'b': 100})
self.assertEqual(output, '')
@setup({'widthratio12b': '{% widthratio a b c %}'})
def test_widthratio12b(self):
output = self.engine.render_to_string('widthratio12b', {'a': None, 'c': 100, 'b': 100})
self.assertEqual(output, '')
@setup({'widthratio13a': '{% widthratio a b c %}'})
def test_widthratio13a(self):
output = self.engine.render_to_string('widthratio13a', {'a': 0, 'c': 100, 'b': 'b'})
self.assertEqual(output, '')
@setup({'widthratio13b': '{% widthratio a b c %}'})
def test_widthratio13b(self):
output = self.engine.render_to_string('widthratio13b', {'a': 0, 'c': 100, 'b': None})
self.assertEqual(output, '')
@setup({'widthratio14a': '{% widthratio a b c %}'})
def test_widthratio14a(self):
with self.assertRaises(TemplateSyntaxError):
self.engine.render_to_string('widthratio14a', {'a': 0, 'c': 'c', 'b': 100})
@setup({'widthratio14b': '{% widthratio a b c %}'})
def test_widthratio14b(self):
with self.assertRaises(TemplateSyntaxError):
self.engine.render_to_string('widthratio14b', {'a': 0, 'c': None, 'b': 100})
@setup({'widthratio15': '{% load custom %}{% widthratio a|noop:"x y" b 0 %}'})
def test_widthratio15(self):
"""
Test whitespace in filter argument
"""
output = self.engine.render_to_string('widthratio15', {'a': 50, 'b': 100})
self.assertEqual(output, '0')
# Widthratio with variable assignment
@setup({'widthratio16': '{% widthratio a b 100 as variable %}-{{ variable }}-'})
def test_widthratio16(self):
output = self.engine.render_to_string('widthratio16', {'a': 50, 'b': 100})
self.assertEqual(output, '-50-')
@setup({'widthratio17': '{% widthratio a b 100 as variable %}-{{ variable }}-'})
def test_widthratio17(self):
output = self.engine.render_to_string('widthratio17', {'a': 100, 'b': 100})
self.assertEqual(output, '-100-')
@setup({'widthratio18': '{% widthratio a b 100 as %}'})
def test_widthratio18(self):
with self.assertRaises(TemplateSyntaxError):
self.engine.get_template('widthratio18')
@setup({'widthratio19': '{% widthratio a b 100 not_as variable %}'})
def test_widthratio19(self):
with self.assertRaises(TemplateSyntaxError):
self.engine.get_template('widthratio19')
@setup({'widthratio20': '{% widthratio a b 100 %}'})
def test_widthratio20(self):
output = self.engine.render_to_string('widthratio20', {'a': float('inf'), 'b': float('inf')})
self.assertEqual(output, '')
@setup({'widthratio21': '{% widthratio a b 100 %}'})
def test_widthratio21(self):
output = self.engine.render_to_string('widthratio21', {'a': float('inf'), 'b': 2})
self.assertEqual(output, '')
| bsd-3-clause | 7,121,642,453,124,679,000 | 40.361111 | 101 | 0.603089 | false |
krafczyk/spack | var/spack/repos/builtin/packages/perl-dbd-mysql/package.py | 5 | 1729 | ##############################################################################
# Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, [email protected], All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class PerlDbdMysql(PerlPackage):
"""MySQL driver for the Perl5 Database Interface (DBI)"""
homepage = "http://search.cpan.org/~michielb/DBD-mysql-4.043/lib/DBD/mysql.pm"
url = "http://search.cpan.org/CPAN/authors/id/M/MI/MICHIELB/DBD-mysql-4.043.tar.gz"
version('4.043', '4a00dd7f1c057931147c65dfc4901c36')
depends_on('perl-test-deep', type=('build', 'run'))
depends_on('perl-dbi', type=('build', 'run'))
depends_on('mariadb@:10.1.23')
| lgpl-2.1 | 8,481,777,820,896,529,000 | 44.5 | 92 | 0.67033 | false |
peterbe/airmozilla | vendor-local/lib/python/south/creator/changes.py | 20 | 23267 | """
Contains things to detect changes - either using options passed in on the
commandline, or by using autodetection, etc.
"""
from django.db import models
from django.contrib.contenttypes.generic import GenericRelation
from django.utils.datastructures import SortedDict
from south.creator.freezer import remove_useless_attributes, freeze_apps, model_key
from south.utils import auto_through
class BaseChanges(object):
"""
Base changes class.
"""
def suggest_name(self):
return ''
def split_model_def(self, model, model_def):
"""
Given a model and its model def (a dict of field: triple), returns three
items: the real fields dict, the Meta dict, and the M2M fields dict.
"""
real_fields = SortedDict()
meta = SortedDict()
m2m_fields = SortedDict()
for name, triple in model_def.items():
if name == "Meta":
meta = triple
elif isinstance(model._meta.get_field_by_name(name)[0], models.ManyToManyField):
m2m_fields[name] = triple
else:
real_fields[name] = triple
return real_fields, meta, m2m_fields
def current_model_from_key(self, key):
app_label, model_name = key.split(".")
return models.get_model(app_label, model_name)
def current_field_from_key(self, key, fieldname):
app_label, model_name = key.split(".")
# Special, for the magical field from order_with_respect_to
if fieldname == "_order":
field = models.IntegerField()
field.name = "_order"
field.attname = "_order"
field.column = "_order"
field.default = 0
return field
# Otherwise, normal.
return models.get_model(app_label, model_name)._meta.get_field_by_name(fieldname)[0]
class AutoChanges(BaseChanges):
"""
Detects changes by 'diffing' two sets of frozen model definitions.
"""
# Field types we don't generate add/remove field changes for.
IGNORED_FIELD_TYPES = [
GenericRelation,
]
def __init__(self, migrations, old_defs, old_orm, new_defs):
self.migrations = migrations
self.old_defs = old_defs
self.old_orm = old_orm
self.new_defs = new_defs
def suggest_name(self):
parts = ["auto"]
for change_name, params in self.get_changes():
if change_name == "AddModel":
parts.append("add_%s" % params['model']._meta.object_name.lower())
elif change_name == "DeleteModel":
parts.append("del_%s" % params['model']._meta.object_name.lower())
elif change_name == "AddField":
parts.append("add_field_%s_%s" % (
params['model']._meta.object_name.lower(),
params['field'].name,
))
elif change_name == "DeleteField":
parts.append("del_field_%s_%s" % (
params['model']._meta.object_name.lower(),
params['field'].name,
))
elif change_name == "ChangeField":
parts.append("chg_field_%s_%s" % (
params['model']._meta.object_name.lower(),
params['new_field'].name,
))
elif change_name == "AddUnique":
parts.append("add_unique_%s_%s" % (
params['model']._meta.object_name.lower(),
"_".join([x.name for x in params['fields']]),
))
elif change_name == "DeleteUnique":
parts.append("del_unique_%s_%s" % (
params['model']._meta.object_name.lower(),
"_".join([x.name for x in params['fields']]),
))
return ("__".join(parts))[:70]
def get_changes(self):
"""
Returns the difference between the old and new sets of models as a 5-tuple:
added_models, deleted_models, added_fields, deleted_fields, changed_fields
"""
deleted_models = set()
# See if anything's vanished
for key in self.old_defs:
if key not in self.new_defs:
# We shouldn't delete it if it was managed=False
old_fields, old_meta, old_m2ms = self.split_model_def(self.old_orm[key], self.old_defs[key])
if old_meta.get("managed", "True") != "False":
# Alright, delete it.
yield ("DeleteModel", {
"model": self.old_orm[key],
"model_def": old_fields,
})
# Also make sure we delete any M2Ms it had.
for fieldname in old_m2ms:
# Only delete its stuff if it wasn't a through=.
field = self.old_orm[key + ":" + fieldname]
if auto_through(field):
yield ("DeleteM2M", {"model": self.old_orm[key], "field": field})
# And any unique constraints it had
unique_together = eval(old_meta.get("unique_together", "[]"))
if unique_together:
# If it's only a single tuple, make it into the longer one
if isinstance(unique_together[0], basestring):
unique_together = [unique_together]
# For each combination, make an action for it
for fields in unique_together:
yield ("DeleteUnique", {
"model": self.old_orm[key],
"fields": [self.old_orm[key]._meta.get_field_by_name(x)[0] for x in fields],
})
# We always add it in here so we ignore it later
deleted_models.add(key)
# Or appeared
for key in self.new_defs:
if key not in self.old_defs:
# We shouldn't add it if it's managed=False
new_fields, new_meta, new_m2ms = self.split_model_def(self.current_model_from_key(key), self.new_defs[key])
if new_meta.get("managed", "True") != "False":
yield ("AddModel", {
"model": self.current_model_from_key(key),
"model_def": new_fields,
})
# Also make sure we add any M2Ms it has.
for fieldname in new_m2ms:
# Only create its stuff if it wasn't a through=.
field = self.current_field_from_key(key, fieldname)
if auto_through(field):
yield ("AddM2M", {"model": self.current_model_from_key(key), "field": field})
# And any unique constraints it has
unique_together = eval(new_meta.get("unique_together", "[]"))
if unique_together:
# If it's only a single tuple, make it into the longer one
if isinstance(unique_together[0], basestring):
unique_together = [unique_together]
# For each combination, make an action for it
for fields in unique_together:
yield ("AddUnique", {
"model": self.current_model_from_key(key),
"fields": [self.current_model_from_key(key)._meta.get_field_by_name(x)[0] for x in fields],
})
# Now, for every model that's stayed the same, check its fields.
for key in self.old_defs:
if key not in deleted_models:
old_fields, old_meta, old_m2ms = self.split_model_def(self.old_orm[key], self.old_defs[key])
new_fields, new_meta, new_m2ms = self.split_model_def(self.current_model_from_key(key), self.new_defs[key])
# Do nothing for models which are now not managed.
if new_meta.get("managed", "True") == "False":
continue
# Find fields that have vanished.
for fieldname in old_fields:
if fieldname not in new_fields:
# Don't do it for any fields we're ignoring
field = self.old_orm[key + ":" + fieldname]
field_allowed = True
for field_type in self.IGNORED_FIELD_TYPES:
if isinstance(field, field_type):
field_allowed = False
if field_allowed:
# Looks alright.
yield ("DeleteField", {
"model": self.old_orm[key],
"field": field,
"field_def": old_fields[fieldname],
})
# And ones that have appeared
for fieldname in new_fields:
if fieldname not in old_fields:
# Don't do it for any fields we're ignoring
field = self.current_field_from_key(key, fieldname)
field_allowed = True
for field_type in self.IGNORED_FIELD_TYPES:
if isinstance(field, field_type):
field_allowed = False
if field_allowed:
# Looks alright.
yield ("AddField", {
"model": self.current_model_from_key(key),
"field": field,
"field_def": new_fields[fieldname],
})
# Find M2Ms that have vanished
for fieldname in old_m2ms:
if fieldname not in new_m2ms:
# Only delete its stuff if it wasn't a through=.
field = self.old_orm[key + ":" + fieldname]
if auto_through(field):
yield ("DeleteM2M", {"model": self.old_orm[key], "field": field})
# Find M2Ms that have appeared
for fieldname in new_m2ms:
if fieldname not in old_m2ms:
# Only create its stuff if it wasn't a through=.
field = self.current_field_from_key(key, fieldname)
if auto_through(field):
yield ("AddM2M", {"model": self.current_model_from_key(key), "field": field})
# For the ones that exist in both models, see if they were changed
for fieldname in set(old_fields).intersection(set(new_fields)):
# Non-index changes
if self.different_attributes(
remove_useless_attributes(old_fields[fieldname], True, True),
remove_useless_attributes(new_fields[fieldname], True, True)):
yield ("ChangeField", {
"model": self.current_model_from_key(key),
"old_field": self.old_orm[key + ":" + fieldname],
"new_field": self.current_field_from_key(key, fieldname),
"old_def": old_fields[fieldname],
"new_def": new_fields[fieldname],
})
# Index changes
old_field = self.old_orm[key + ":" + fieldname]
new_field = self.current_field_from_key(key, fieldname)
if not old_field.db_index and new_field.db_index:
# They've added an index.
yield ("AddIndex", {
"model": self.current_model_from_key(key),
"fields": [new_field],
})
if old_field.db_index and not new_field.db_index:
# They've removed an index.
yield ("DeleteIndex", {
"model": self.old_orm[key],
"fields": [old_field],
})
# See if their uniques have changed
if old_field.unique != new_field.unique:
# Make sure we look at the one explicitly given to see what happened
if new_field.unique:
yield ("AddUnique", {
"model": self.current_model_from_key(key),
"fields": [new_field],
})
else:
yield ("DeleteUnique", {
"model": self.old_orm[key],
"fields": [old_field],
})
# See if there's any M2Ms that have changed.
for fieldname in set(old_m2ms).intersection(set(new_m2ms)):
old_field = self.old_orm[key + ":" + fieldname]
new_field = self.current_field_from_key(key, fieldname)
# Have they _added_ a through= ?
if auto_through(old_field) and not auto_through(new_field):
yield ("DeleteM2M", {"model": self.old_orm[key], "field": old_field})
# Have they _removed_ a through= ?
if not auto_through(old_field) and auto_through(new_field):
yield ("AddM2M", {"model": self.current_model_from_key(key), "field": new_field})
## See if the unique_togethers have changed
# First, normalise them into lists of sets.
old_unique_together = eval(old_meta.get("unique_together", "[]"))
new_unique_together = eval(new_meta.get("unique_together", "[]"))
if old_unique_together and isinstance(old_unique_together[0], basestring):
old_unique_together = [old_unique_together]
if new_unique_together and isinstance(new_unique_together[0], basestring):
new_unique_together = [new_unique_together]
old_unique_together = map(set, old_unique_together)
new_unique_together = map(set, new_unique_together)
# See if any appeared or disappeared
for item in old_unique_together:
if item not in new_unique_together:
yield ("DeleteUnique", {
"model": self.old_orm[key],
"fields": [self.old_orm[key + ":" + x] for x in item],
})
for item in new_unique_together:
if item not in old_unique_together:
yield ("AddUnique", {
"model": self.current_model_from_key(key),
"fields": [self.current_field_from_key(key, x) for x in item],
})
@classmethod
def is_triple(cls, triple):
"Returns whether the argument is a triple."
return isinstance(triple, (list, tuple)) and len(triple) == 3 and \
isinstance(triple[0], (str, unicode)) and \
isinstance(triple[1], (list, tuple)) and \
isinstance(triple[2], dict)
@classmethod
def different_attributes(cls, old, new):
"""
Backwards-compat comparison that ignores orm. on the RHS and not the left
and which knows django.db.models.fields.CharField = models.CharField.
Has a whole load of tests in tests/autodetection.py.
"""
# If they're not triples, just do normal comparison
if not cls.is_triple(old) or not cls.is_triple(new):
return old != new
# Expand them out into parts
old_field, old_pos, old_kwd = old
new_field, new_pos, new_kwd = new
# Copy the positional and keyword arguments so we can compare them and pop off things
old_pos, new_pos = old_pos[:], new_pos[:]
old_kwd = dict(old_kwd.items())
new_kwd = dict(new_kwd.items())
# Remove comparison of the existence of 'unique', that's done elsewhere.
# TODO: Make this work for custom fields where unique= means something else?
if "unique" in old_kwd:
del old_kwd['unique']
if "unique" in new_kwd:
del new_kwd['unique']
# If the first bit is different, check it's not by dj.db.models...
if old_field != new_field:
if old_field.startswith("models.") and (new_field.startswith("django.db.models") \
or new_field.startswith("django.contrib.gis")):
if old_field.split(".")[-1] != new_field.split(".")[-1]:
return True
else:
# Remove those fields from the final comparison
old_field = new_field = ""
# If there's a positional argument in the first, and a 'to' in the second,
# see if they're actually comparable.
if (old_pos and "to" in new_kwd) and ("orm" in new_kwd['to'] and "orm" not in old_pos[0]):
# Do special comparison to fix #153
try:
if old_pos[0] != new_kwd['to'].split("'")[1].split(".")[1]:
return True
except IndexError:
pass # Fall back to next comparison
# Remove those attrs from the final comparison
old_pos = old_pos[1:]
del new_kwd['to']
return old_field != new_field or old_pos != new_pos or old_kwd != new_kwd
class ManualChanges(BaseChanges):
"""
Detects changes by reading the command line.
"""
def __init__(self, migrations, added_models, added_fields, added_indexes):
self.migrations = migrations
self.added_models = added_models
self.added_fields = added_fields
self.added_indexes = added_indexes
def suggest_name(self):
bits = []
for model_name in self.added_models:
bits.append('add_model_%s' % model_name)
for field_name in self.added_fields:
bits.append('add_field_%s' % field_name)
for index_name in self.added_indexes:
bits.append('add_index_%s' % index_name)
return '_'.join(bits).replace('.', '_')
def get_changes(self):
# Get the model defs so we can use them for the yield later
model_defs = freeze_apps([self.migrations.app_label()])
# Make the model changes
for model_name in self.added_models:
model = models.get_model(self.migrations.app_label(), model_name)
real_fields, meta, m2m_fields = self.split_model_def(model, model_defs[model_key(model)])
yield ("AddModel", {
"model": model,
"model_def": real_fields,
})
# And the field changes
for field_desc in self.added_fields:
try:
model_name, field_name = field_desc.split(".")
except (TypeError, ValueError):
raise ValueError("%r is not a valid field description." % field_desc)
model = models.get_model(self.migrations.app_label(), model_name)
real_fields, meta, m2m_fields = self.split_model_def(model, model_defs[model_key(model)])
yield ("AddField", {
"model": model,
"field": model._meta.get_field_by_name(field_name)[0],
"field_def": real_fields[field_name],
})
# And the indexes
for field_desc in self.added_indexes:
try:
model_name, field_name = field_desc.split(".")
except (TypeError, ValueError):
print "%r is not a valid field description." % field_desc
model = models.get_model(self.migrations.app_label(), model_name)
yield ("AddIndex", {
"model": model,
"fields": [model._meta.get_field_by_name(field_name)[0]],
})
class InitialChanges(BaseChanges):
"""
Creates all models; handles --initial.
"""
def suggest_name(self):
return 'initial'
def __init__(self, migrations):
self.migrations = migrations
def get_changes(self):
# Get the frozen models for this app
model_defs = freeze_apps([self.migrations.app_label()])
for model in models.get_models(models.get_app(self.migrations.app_label())):
# Don't do anything for unmanaged, abstract or proxy models
if model._meta.abstract or getattr(model._meta, "proxy", False) or not getattr(model._meta, "managed", True):
continue
real_fields, meta, m2m_fields = self.split_model_def(model, model_defs[model_key(model)])
# Firstly, add the main table and fields
yield ("AddModel", {
"model": model,
"model_def": real_fields,
})
# Then, add any uniqueness that's around
if meta:
unique_together = eval(meta.get("unique_together", "[]"))
if unique_together:
# If it's only a single tuple, make it into the longer one
if isinstance(unique_together[0], basestring):
unique_together = [unique_together]
# For each combination, make an action for it
for fields in unique_together:
yield ("AddUnique", {
"model": model,
"fields": [model._meta.get_field_by_name(x)[0] for x in fields],
})
# Finally, see if there's some M2M action
for name, triple in m2m_fields.items():
field = model._meta.get_field_by_name(name)[0]
# But only if it's not through=foo (#120)
if field.rel.through:
try:
# Django 1.1 and below
through_model = field.rel.through_model
except AttributeError:
# Django 1.2
through_model = field.rel.through
if (not field.rel.through) or getattr(through_model._meta, "auto_created", False):
yield ("AddM2M", {
"model": model,
"field": field,
})
| bsd-3-clause | 4,041,765,190,620,965,400 | 46.580777 | 123 | 0.491168 | false |
boos/cppcheck | addons/y2038.py | 2 | 7108 | #!/usr/bin/env python3
#
# cppcheck addon for Y2038 safeness detection
#
# Detects:
#
# 1. _TIME_BITS being defined to something else than 64 bits
# 2. _USE_TIME_BITS64 being defined when _TIME_BITS is not
# 3. Any Y2038-unsafe symbol when _USE_TIME_BITS64 is not defined.
#
# Example usage:
# $ cppcheck --dump path-to-src/test.c
# $ y2038.py path-to-src/test.c.dump
#
# y2038.py will walk the source tree for .dump files.
from __future__ import print_function
import cppcheckdata
import sys
import os
import re
# --------------------------------------------
# #define/#undef detection regular expressions
# --------------------------------------------
# test for '#define _TIME_BITS 64'
re_define_time_bits_64 = re.compile(r'^\s*#\s*define\s+_TIME_BITS\s+64\s*$')
# test for '#define _TIME_BITS ...' (combine w/ above to test for 'not 64')
re_define_time_bits = re.compile(r'^\s*#\s*define\s+_TIME_BITS\s+.*$')
# test for '#undef _TIME_BITS' (if it ever happens)
re_undef_time_bits = re.compile(r'^\s*#\s*undef\s+_TIME_BITS\s*$')
# test for '#define _USE_TIME_BITS64'
re_define_use_time_bits64 = re.compile(r'^\s*#\s*define\s+_USE_TIME_BITS64\s*$')
# test for '#undef _USE_TIME_BITS64' (if it ever happens)
re_undef_use_time_bits64 = re.compile(r'^\s*#\s*undef\s+_USE_TIME_BITS64\s*$')
# --------------------------------
# List of Y2038-unsafe identifiers
# --------------------------------
# This is WIP. Eventually it should contain all identifiers (types
# and functions) which would be affected by the Y2038 bug.
id_Y2038 = {
# Y2038-unsafe types by definition
'time_t'
# Types using Y2038-unsafe types
'lastlog',
'msqid_ds',
'semid_ds',
'timeb',
'timespec',
'timeval',
'utimbuf',
'itimerspec',
'stat',
'clnt_ops',
'elf_prstatus',
'itimerval',
'ntptimeval',
'rusage',
'timex',
'utmp',
'utmpx',
# APIs using 2038-unsafe types
'ctime',
'ctime_r',
'difftime',
'gmtime',
'gmtime_r',
'localtime',
'localtime_r',
'mktime',
'stime',
'timegm',
'timelocal',
'time',
'msgctl',
'ftime',
'aio_suspend',
'clock_getres',
'clock_gettime',
'clock_nanosleep',
'clock_settime',
'futimens',
'mq_timedreceive',
'mq_timedsend',
'nanosleep',
'pselect',
'pthread_cond_timedwait',
'pthread_mutex_timedlock',
'pthread_rwlock_timedrdlock',
'pthread_rwlock_timedwrlock',
'sched_rr_get_interval',
'sem_timedwait',
'sigtimedwait',
'timespec_get',
'utimensat',
'adjtime',
'pmap_rmtcall',
'clntudp_bufcreate',
'clntudp_create',
'futimes',
'gettimeofday',
'lutimes',
'select',
'settimeofday',
'utimes',
'utime',
'timerfd_gettime',
'timerfd_settime',
'timer_gettime',
'timer_settime',
'fstatat',
'fstat',
'__fxstatat',
'__fxstat',
'lstat',
'__lxstat',
'stat',
'__xstat',
'struct itimerval',
'setitimer',
'getitimer',
'ntp_gettime',
'getrusage',
'wait3',
'wait4',
'adjtimex',
'ntp_adjtime',
'getutent_r',
'getutent',
'getutid_r',
'getutid',
'getutline_r',
'getutline',
'login',
'pututline',
'updwtmp',
'getutxent',
'getutxid',
'getutxline',
'pututxline'
}
def check_y2038_safe(dumpfile, quiet=False):
# Assume that the code is Y2038 safe until proven otherwise
y2038safe = True
# load XML from .dump file
data = cppcheckdata.CppcheckData(dumpfile)
# Convert dump file path to source file in format generated by cppcheck.
# For example after the following call:
# cppcheck ./src/my-src.c --dump
# We got 'src/my-src.c' value for 'file' field in cppcheckdata.
srcfile = dumpfile.rstrip('.dump')
srcfile = os.path.expanduser(srcfile)
srcfile = os.path.normpath(srcfile)
# go through each configuration
for cfg in data.iterconfigurations():
if not quiet:
print('Checking %s, config %s...' % (srcfile, cfg.name))
safe_ranges = []
safe = -1
time_bits_defined = False
srclinenr = '0'
for directive in cfg.directives:
# track source line number
if directive.file == srcfile:
srclinenr = directive.linenr
# check for correct _TIME_BITS if present
if re_define_time_bits_64.match(directive.str):
time_bits_defined = True
elif re_define_time_bits.match(directive.str):
cppcheckdata.reportError(directive, 'error',
'_TIME_BITS must be defined equal to 64',
'y2038',
'type-bits-not-64')
time_bits_defined = False
y2038safe = False
elif re_undef_time_bits.match(directive.str):
time_bits_defined = False
# check for _USE_TIME_BITS64 (un)definition
if re_define_use_time_bits64.match(directive.str):
safe = int(srclinenr)
# warn about _TIME_BITS not being defined
if not time_bits_defined:
cppcheckdata.reportError(directive, 'warning',
'_USE_TIME_BITS64 is defined but _TIME_BITS was not',
'y2038',
'type-bits-undef')
elif re_undef_use_time_bits64.match(directive.str):
unsafe = int(srclinenr)
# do we have a safe..unsafe area?
if unsafe > safe > 0:
safe_ranges.append((safe, unsafe))
safe = -1
# check end of source beyond last directive
if len(cfg.tokenlist) > 0:
unsafe = int(cfg.tokenlist[-1].linenr)
if unsafe > safe > 0:
safe_ranges.append((safe, unsafe))
# go through all tokens
for token in cfg.tokenlist:
if token.str in id_Y2038:
if not any(lower <= int(token.linenr) <= upper
for (lower, upper) in safe_ranges):
cppcheckdata.reportError(token, 'warning',
token.str + ' is Y2038-unsafe',
'y2038',
'unsafe-call')
y2038safe = False
token = token.next
return y2038safe
def get_args():
parser = cppcheckdata.ArgumentParser()
return parser.parse_args()
if __name__ == '__main__':
args = get_args()
exit_code = 0
quiet = not any((args.quiet, args.cli))
if not args.dumpfile:
if not args.quiet:
print("no input files.")
sys.exit(0)
for dumpfile in args.dumpfile:
if not args.quiet:
print('Checking ' + dumpfile + '...')
check_y2038_safe(dumpfile, quiet)
sys.exit(cppcheckdata.EXIT_CODE)
| gpl-3.0 | 4,043,944,046,647,444,000 | 27.318725 | 98 | 0.539252 | false |
christiangalsterer/execbeat | vendor/github.com/elastic/beats/packetbeat/tests/system/test_0019_hide_params.py | 5 | 2020 | from packetbeat import BaseTest
"""
Tests for checking the hide_keywords options.
"""
class Test(BaseTest):
def test_http_hide_post(self):
"""
Should be able to strip the password from
a POST request.
"""
self.render_config_template(
http_hide_keywords=["pass", "password"]
)
self.run_packetbeat(pcap="hide_secret_POST.pcap",
debug_selectors=["http", "httpdetailed"])
objs = self.read_output()
assert len(objs) == 1
o = objs[0]
assert o["type"] == "http"
assert o["http.request.params"] == "pass=xxxxx&user=monica"
assert o["path"] == "/login"
for _, val in o.items():
if isinstance(val, basestring):
assert "secret" not in val
def test_http_hide_get(self):
"""
Should be able to strip the password from
a GET request.
"""
self.render_config_template(
http_hide_keywords=["pass", "password"]
)
self.run_packetbeat(pcap="hide_secret_GET.pcap",
debug_selectors=["http", "httpdetailed"])
objs = self.read_output()
assert len(objs) == 1
o = objs[0]
assert o["type"] == "http"
assert o["http.request.params"] == "pass=xxxxx&user=monica"
assert o["path"] == "/login"
for _, val in o.items():
if isinstance(val, basestring):
assert "secret" not in val
def test_http_hide_post_default(self):
"""
By default nothing is stripped.
"""
self.render_config_template()
self.run_packetbeat(pcap="hide_secret_POST.pcap",
debug_selectors=["http", "httpdetailed"])
objs = self.read_output()
assert len(objs) == 1
o = objs[0]
assert o["type"] == "http"
assert o["http.request.params"] == "pass=secret&user=monica"
assert o["path"] == "/login"
| apache-2.0 | 5,050,577,057,077,931,000 | 30.076923 | 69 | 0.526733 | false |
claudio-unipv/pvcheck | test/test_pvcheck.py | 1 | 1695 | import unittest
import sys
sys.path.insert(0, '../src')
import io
from pvcheck import *
from testdata import *
import formatter
import executor
class TestPVCheck(unittest.TestCase):
def test_exec_single_test(self):
dst = io.StringIO()
fmt = formatter.TextFormatter(destination=dst)
pv = PvCheck(executor.Executor(), fmt)
test = TestCase("echo", [
Section(".ARGS", ["[OUT]\nfoo"]),
Section("OUT", ["foo"])
])
ok = pv.exec_single_test(test, ["echo"])
exp = """TEST: echo
COMMAND LINE:
echo [OUT]
foo
OUT: OK
"""
self.assertTrue(ok)
self.assertEqual(dst.getvalue(), exp)
def test_exec_suite(self):
dst = io.StringIO()
verb = formatter.TextFormatter.SUCCESS
fmt = formatter.TextFormatter(destination=dst,
verbosity=verb)
pv = PvCheck(executor.Executor(), fmt)
sections = [
Section(".TEST", ["echo1"]),
Section(".ARGS", ["[OUT]\nfoo"]),
Section("OUT", ["foo"]),
Section(".TEST", ["echo2"]),
Section(".ARGS", ["[OUT]\nbar"]),
Section("OUT", ["foo"]),
Section(".TEST", ["echo3"]),
Section(".ARGS", ["[OUT]\nfoo"]),
Section("OUT", ["foo"]),
Section("NOTFOUND", ["notfound"])
]
failures = pv.exec_suite(TestSuite(sections), ["echo"])
exp = """OUT: OK
OUT: line 1 is wrong (expected 'foo', got 'bar')
OUT: OK
NOTFOUND: missing section
"""
self.assertEqual(failures, 2)
self.assertEqual(dst.getvalue(), exp)
if __name__ == '__main__':
unittest.main()
| mit | -774,937,732,231,544,200 | 26.33871 | 63 | 0.533333 | false |
ObsidianBlk/GemRB--Unofficial- | gemrb/GUIScripts/iwd/Autodetect.py | 9 | 1320 | # -*-python-*-
# vim: set ts=4 sw=4 expandtab:
# GemRB - Infinity Engine Emulator
# Copyright (C) 2010 The GemRB Project
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
import GemRB
from ie_restype import *
from AutodetectCommon import CheckFiles
files = (
("START", "CHU", RES_CHU),
("STARTPOS", "2DA", RES_2DA),
("STARTARE", "2DA", RES_2DA),
("EXPTABLE", "2DA", RES_2DA),
("MUSIC", "2DA", RES_2DA),
)
files_how = (
("CHMB1A1", "BAM", RES_BAM),
("TRACKING", "2DA", RES_2DA),
)
if CheckFiles(files):
if CheckFiles(files_how):
GemRB.AddGameTypeHint ("how", 95)
else:
GemRB.AddGameTypeHint ("iwd", 90)
| gpl-2.0 | -7,405,696,586,030,289,000 | 28.333333 | 80 | 0.687879 | false |
danielperna84/hass-poc-configurator | configurator.py | 2 | 227435 | #!/usr/bin/python3
# -*- coding: utf-8 -*-
# pylint: disable=too-many-lines
"""
Configurator for Home Assistant.
https://github.com/danielperna84/hass-configurator
"""
import os
import sys
import argparse
import json
import ssl
import socket
import socketserver
import base64
import ipaddress
import signal
import cgi
import shlex
import subprocess
import logging
import fnmatch
import hashlib
import mimetypes
from string import Template
from http.server import BaseHTTPRequestHandler
import urllib.request
from urllib.parse import urlparse, parse_qs, unquote
### Some options for you to change
LISTENIP = "0.0.0.0"
PORT = 3218
# Set BASEPATH to something like "/home/hass/.homeassistant/" if you're not
# running the configurator from that path
BASEPATH = None
# Set ENFORCE_BASEPATH to True to lock the configurator into the basepath and
# thereby prevent it from opening files outside of the BASEPATH
ENFORCE_BASEPATH = False
# Set the paths to a certificate and the key if you're using SSL,
# e.g "/etc/ssl/certs/mycert.pem"
SSL_CERTIFICATE = None
SSL_KEY = None
# Set the destination where the HASS API is reachable
HASS_API = "http://127.0.0.1:8123/api/"
# Set the destination where the websocket API is reachable (if different
# from HASS_API, e.g. wss://hass.example.com/api/websocket)
HASS_WS_API = None
# If a password is required to access the API, set it in the form of "password"
# if you have HA ignoring SSL locally this is not needed if on same machine.
HASS_API_PASSWORD = None
# Using the CREDENTIALS variable is deprecated.
# It will still work though if USERNAME and PASSWORD are not set.
CREDENTIALS = None
# Set the username used for basic authentication.
USERNAME = None
# Set the password used for basic authentication.
PASSWORD = None
# Limit access to the configurator by adding allowed IP addresses / networks to
# the list, e.g ALLOWED_NETWORKS = ["192.168.0.0/24", "172.16.47.23"]
ALLOWED_NETWORKS = []
# Allow access to the configurator to client IP addesses which match the result
# of DNS lookups for the specified domains.
ALLOWED_DOMAINS = []
# List of statically banned IP addresses, e.g. ["1.1.1.1", "2.2.2.2"]
BANNED_IPS = []
# Ban IPs after n failed login attempts. Restart service to reset banning.
# The default of `0` disables this feature.
BANLIMIT = 0
# Enable git integration.
# GitPython (https://gitpython.readthedocs.io/en/stable/) has to be installed.
GIT = False
# Files to ignore in the UI. A good example list that cleans up the UI is
# [".*", "*.log", "deps", "icloud", "*.conf", "*.json", "certs", "__pycache__"]
IGNORE_PATTERN = []
# if DIRSFIRST is set to `true`, directories will be displayed at the top
DIRSFIRST = False
# Sesame token. Browse to the configurator URL + /secrettoken to unban your
# client IP and add it to the list of allowed IPs.
SESAME = None
# Instead of a static SESAME token you may also use a TOTP based token that
# changes every 30 seconds. The value needs to be a base 32 encoded string.
SESAME_TOTP_SECRET = None
# Verify the hostname used in the request. Block access if it doesn't match
# this value
VERIFY_HOSTNAME = None
# Prefix for environment variables
ENV_PREFIX = "HC_"
# Ignore SSL errors when connecting to the HASS API
IGNORE_SSL = False
# Notification service like `notify.mytelegram`. Default is `persistent_notification.create`
NOTIFY_SERVICE_DEFAULT = "persistent_notification.create"
NOTIFY_SERVICE = NOTIFY_SERVICE_DEFAULT
### End of options
LOGLEVEL_MAPPING = {
"critical": logging.CRITICAL,
"error": logging.ERROR,
"warning": logging.WARNING,
"info": logging.INFO,
"debug": logging.DEBUG
}
DEFAULT_LOGLEVEL = "info"
LOGLEVEL = LOGLEVEL_MAPPING.get(os.environ.get("HC_LOGLEVEL", DEFAULT_LOGLEVEL))
LOG = logging.getLogger(__name__)
LOG.setLevel(LOGLEVEL)
SO = logging.StreamHandler(sys.stdout)
SO.setLevel(LOGLEVEL)
SO.setFormatter(
logging.Formatter('%(levelname)s:%(asctime)s:%(name)s:%(message)s'))
LOG.addHandler(SO)
RELEASEURL = "https://api.github.com/repos/danielperna84/hass-configurator/releases/latest"
VERSION = "0.3.5"
BASEDIR = "."
DEV = False
LISTENPORT = None
TOTP = None
HTTPD = None
FAIL2BAN_IPS = {}
REPO = None
INDEX = Template(r"""<!DOCTYPE html>
<html lang="en">
<head>
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8" />
<meta name="viewport" content="width=device-width, initial-scale=1, maximum-scale=1.0" />
<title>HASS Configurator</title>
<link href="https://fonts.googleapis.com/icon?family=Material+Icons" rel="stylesheet">
<link href="https://cdnjs.cloudflare.com/ajax/libs/MaterialDesign-Webfont/3.4.93/css/materialdesignicons.min.css" rel="stylesheet">
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/materialize/0.100.2/css/materialize.min.css">
<style type="text/css" media="screen">
body {
margin: 0;
padding: 0;
background-color: #fafafa;
display: flex;
min-height: 100vh;
flex-direction: column;
}
main {
flex: 1 0 auto;
}
#editor {
position: fixed;
top: 135px;
right: 0;
bottom: 0;
}
@media only screen and (max-width: 600px) {
#editor {
top: 125px;
}
.toolbar_mobile {
margin-bottom: 0;
}
}
.leftellipsis {
overflow: hidden;
direction: rtl;
text-overflow: ellipsis;
white-space: nowrap;
}
.select-wrapper input.select-dropdown {
width: 96%;
overflow: hidden;
direction: ltr;
text-overflow: ellipsis;
white-space: nowrap;
}
#edit_float {
z-index: 10;
}
#filebrowser {
background-color: #fff;
}
#fbheader {
display: block;
cursor: initial;
pointer-events: none;
color: #424242 !important;
font-weight: 400;
font-size: .9em;
min-height: 64px;
padding-top: 8px;
margin-left: -5px;
max-width: 250px;
}
#fbheaderbranch {
padding: 5px 10px !important;
display: none;
color: #757575 !important;
}
#branchselector {
font-weight: 400;
}
a.branch_select.active {
color: white !important;
}
#fbelements {
margin: 0;
position: relative;
}
a.collection-item {
color: #616161 !important;
}
.fbtoolbarbutton {
color: #757575 !important;
min-height: 64px !important;
}
.fbmenubutton {
color: #616161 !important;
display: inline-block;
float: right;
min-height: 64px;
padding-top: 8px !important;
padding-left: 20px !important;
}
.filename {
color: #616161 !important;
font-weight: 400;
display: inline-block;
width: 182px;
white-space: nowrap;
text-overflow: ellipsis;
cursor: pointer;
}
.nowrap {
white-space: nowrap;
}
.text_darkgreen {
color: #1b5e20 !important;
}
.text_darkred {
color: #b71c1c !important;
}
span.stats {
margin: -10px 0 0 0;
padding: 0;
font-size: 0.5em;
color: #616161 !important;
line-height: 16px;
display: inherit;
}
.collection-item #uplink {
background-color: #f5f5f5;
width: 323px !important;
margin-left: -3px !important;
}
input.currentfile_input {
margin-bottom: 0;
margin-top: 0;
padding-left: 5px;
border-bottom: 0;
}
.side_tools {
vertical-align: middle;
}
.fbtoolbarbutton_icon {
margin-top: 20px;
}
.collection {
margin: 0;
background-color: #fff;
}
li.collection-item {
border-bottom: 1px solid #eeeeee !important;
}
.side-nav {
width: 337px !important;
height: 100% !important;
}
.fb_side-nav li {
line-height: 36px;
}
.fb_side-nav a {
padding: 0 0 0 16px;
display: inline-block !important;
}
.fb_side-nav li>a>i {
margin-right: 16px !important;
cursor: pointer;
}
.green {
color: #fff;
}
.red {
color: #fff;
}
#dropdown_menu, #dropdown_menu_mobile {
min-width: 235px;
}
#dropdown_gitmenu {
min-width: 140px !important;
}
.dropdown-content li>a,
.dropdown-content li>span {
color: #616161 !important;
}
.fb_dd {
margin-left: -15px !important;
}
.blue_check:checked+label:before {
border-right: 2px solid #03a9f4;
border-bottom: 2px solid #03a9f4;
}
.input-field input:focus+label {
color: #03a9f4 !important;
}
.input-field input[type=text].valid {
border-bottom: 1px solid #03a9f4 !important;
box-shadow: 0 1px 0 0 #03a9f4 !important;
}
.input-field input[type=text]:focus {
border-bottom: 1px solid #03a9f4 !important;
box-shadow: 0 1px 0 0 #03a9f4 !important;
}
.input-field input:focus+label {
color: #03a9f4 !important;
}
.input-field input[type=password].valid {
border-bottom: 1px solid #03a9f4 !important;
box-shadow: 0 1px 0 0 #03a9f4 !important;
}
.input-field input[type=password]:focus {
border-bottom: 1px solid #03a9f4 !important;
box-shadow: 0 1px 0 0 #03a9f4 !important;
}
.input-field textarea:focus+label {
color: #03a9f4 !important;
}
.input-field textarea:focus {
border-bottom: 1px solid #03a9f4 !important;
box-shadow: 0 1px 0 0 #03a9f4 !important;
}
#modal_acekeyboard {
top: auto;
width: 96%;
min-height: 96%;
border-radius: 0;
margin: auto;
}
.modal .modal-content_nopad {
padding: 0;
}
.waves-effect.waves-blue .waves-ripple {
background-color: #03a9f4;
}
.preloader-background {
display: flex;
align-items: center;
justify-content: center;
background-color: #eee;
position: fixed;
z-index: 10000;
top: 0;
left: 0;
right: 0;
bottom: 0;
}
.modal-content_nopad {
position: relative;
}
.modal-content_nopad .modal_btn {
position: absolute;
top: 2px;
right:0;
}
footer {
z-index: 10;
}
.shadow {
height: 25px;
margin: -26px;
min-width: 320px;
background-color: transparent;
}
.ace_optionsMenuEntry input {
position: relative !important;
left: 0 !important;
opacity: 1 !important;
}
.ace_optionsMenuEntry select {
position: relative !important;
left: 0 !important;
opacity: 1 !important;
display: block !important;
}
.ace_search {
background-color: #eeeeee !important;
border-radius: 0 !important;
border: 0 !important;
box-shadow: 0 6px 10px 0 rgba(0, 0, 0, 0.14), 0 1px 18px 0 rgba(0, 0, 0, 0.12), 0 3px 5px -1px rgba(0, 0, 0, 0.3);
}
.ace_search_form {
background-color: #fafafa;
width: 300px;
border: 0 !important;
border-radius: 0 !important;
outline: none !important;
box-shadow: 0 2px 2px 0 rgba(0, 0, 0, 0.14), 0 1px 5px 0 rgba(0, 0, 0, 0.12), 0 2px 1px -2px rgba(0, 0, 0, 0.2);
margin-bottom: 15px !important;
margin-left: 8px !important;
color: #424242 !important;
}
.ace_search_field {
padding-left: 4px !important;
margin-left: 10px !important;
max-width: 275px !important;
font-family: 'Roboto', sans-serif !important;
border-bottom: 1px solid #03a9f4 !important;
color: #424242 !important;
}
.ace_replace_form {
background-color: #fafafa;
width: 300px;
border: 0 !important;
border-radius: 0 !important;
outline: none !important;
box-shadow: 0 2px 2px 0 rgba(0, 0, 0, 0.14), 0 1px 5px 0 rgba(0, 0, 0, 0.12), 0 2px 1px -2px rgba(0, 0, 0, 0.2);
margin-bottom: 15px !important;
margin-left: 8px !important;
}
.ace_search_options {
background-color: #eeeeee;
text-align: left !important;
letter-spacing: .5px !important;
transition: .2s ease-out;
font-family: 'Roboto', sans-serif !important;
font-size: 130%;
top: 0 !important;
}
.ace_searchbtn {
text-decoration: none !important;
min-width: 40px !important;
min-height: 30px !important;
color: #424242 !important;
text-align: center !important;
letter-spacing: .5px !important;
transition: .2s ease-out;
cursor: pointer;
font-family: 'Roboto', sans-serif !important;
}
.ace_searchbtn:hover {
background-color: #03a9f4;
}
.ace_replacebtn {
text-decoration: none !important;
min-width: 40px !important;
min-height: 30px !important;
color: #424242 !important;
text-align: center !important;
letter-spacing: .5px !important;
transition: .2s ease-out;
cursor: pointer;
font-family: 'Roboto', sans-serif !important;
}
.ace_replacebtn:hover {
background-color: #03a9f4;
}
.ace_button {
text-decoration: none !important;
min-width: 40px !important;
min-height: 30px !important;
border-radius: 0 !important;
outline: none !important;
color: #424242 !important;
background-color: #fafafa;
text-align: center;
letter-spacing: .5px;
transition: .2s ease-out;
cursor: pointer;
font-family: 'Roboto', sans-serif !important;
}
.ace_button:hover {
background-color: #03a9f4 !important;
}
.ace_invisible {
color: rgba(191, 191, 191, 0.5) !important;
}
.fbicon_pad {
min-height: 64px !important;
}
.fbmenuicon_pad {
min-height: 64px;
margin-top: 6px !important;
margin-right: 18px !important;
color: #616161 !important;
}
.no-padding {
padding: 0 !important;
}
.branch_select {
min-width: 300px !important;
font-size: 14px !important;
font-weight: 400 !important;
}
a.branch_hover:hover {
background-color: #e0e0e0 !important;
}
.hidesave {
opacity: 0;
-webkit-transition: all 0.5s ease-in-out;
-moz-transition: all 0.5s ease-in-out;
-ms-transition: all 0.5s ease-in-out;
-o-transition: all 0.5s ease-in-out;
transition: all 0.5s ease-in-out;
}
.pathtip_color {
-webkit-animation: fadeinout 1.75s linear 1 forwards;
animation: fadeinout 1.75s linear 1 forwards;
}
@-webkit-keyframes fadeinout {
0% { background-color: #f5f5f5; }
50% { background-color: #ff8a80; }
100% { background-color: #f5f5f5; }
}
@keyframes fadeinout {
0% { background-color: #f5f5f5; }
50% { background-color: #ff8a80; }
100% { background-color: #f5f5f5; }
}
#lint-status {
position: absolute;
top: 0.75rem;
right: 10px;
}
.cursor-pointer {
cursor: pointer;
}
#modal_lint.modal {
width: 80%;
}
#modal_lint textarea {
resize: none;
height: auto;
}
</style>
<script src="https://cdnjs.cloudflare.com/ajax/libs/ace/1.4.2/ace.js" type="text/javascript" charset="utf-8"></script>
<script src="https://cdnjs.cloudflare.com/ajax/libs/ace/1.4.2/ext-modelist.js" type="text/javascript" charset="utf-8"></script>
<script src="https://cdnjs.cloudflare.com/ajax/libs/ace/1.4.2/ext-language_tools.js" type="text/javascript" charset="utf-8"></script>
</head>
<body>
<div class="preloader-background">
<div class="preloader-wrapper big active">
<div class="spinner-layer spinner-blue">
<div class="circle-clipper left">
<div class="circle"></div>
</div>
<div class="gap-patch">
<div class="circle"></div>
</div>
<div class="circle-clipper right">
<div class="circle"></div>
</div>
</div>
<div class="spinner-layer spinner-red">
<div class="circle-clipper left">
<div class="circle"></div>
</div>
<div class="gap-patch">
<div class="circle"></div>
</div>
<div class="circle-clipper right">
<div class="circle"></div>
</div>
</div>
<div class="spinner-layer spinner-yellow">
<div class="circle-clipper left">
<div class="circle"></div>
</div>
<div class="gap-patch">
<div class="circle"></div>
</div>
<div class="circle-clipper right">
<div class="circle"></div>
</div>
</div>
<div class="spinner-layer spinner-green">
<div class="circle-clipper left">
<div class="circle"></div>
</div>
<div class="gap-patch">
<div class="circle"></div>
</div>
<div class="circle-clipper right">
<div class="circle"></div>
</div>
</div>
</div>
</div>
<header>
<div class="navbar-fixed">
<nav class="light-blue">
<div class="nav-wrapper">
<ul class="left">
<li><a class="waves-effect waves-light tooltipped files-collapse hide-on-small-only" data-activates="slide-out" data-position="bottom" data-delay="500" data-tooltip="Browse Filesystem" style="padding-left: 25px; padding-right: 25px;"><i class="material-icons">folder</i></a></li>
<li><a class="waves-effect waves-light files-collapse hide-on-med-and-up" data-activates="slide-out" style="padding-left: 25px; padding-right: 25px;"><i class="material-icons">folder</i></a></li>
</ul>
<ul class="right">
<li><a class="waves-effect waves-light tooltipped hide-on-small-only markdirty hidesave" data-position="bottom" data-delay="500" data-tooltip="Save" onclick="save_check()"><i class="material-icons">save</i></a></li>
<li><a class="waves-effect waves-light tooltipped hide-on-small-only modal-trigger" data-position="bottom" data-delay="500" data-tooltip="Close" href="#modal_close"><i class="material-icons">close</i></a></li>
<li><a class="waves-effect waves-light tooltipped hide-on-small-only" data-position="bottom" data-delay="500" data-tooltip="Search" onclick="editor.execCommand('replace')"><i class="material-icons">search</i></a></li>
<li><a class="waves-effect waves-light dropdown-button hide-on-small-only $versionclass" data-activates="dropdown_menu" data-beloworigin="true"><i class="material-icons right">settings</i></a></li>
<li><a class="waves-effect waves-light hide-on-med-and-up markdirty hidesave" onclick="save_check()"><i class="material-icons">save</i></a></li>
<li><a class="waves-effect waves-light hide-on-med-and-up modal-trigger" href="#modal_close"><i class="material-icons">close</i></a></li>
<li><a class="waves-effect waves-light hide-on-med-and-up" onclick="editor.execCommand('replace')"><i class="material-icons">search</i></a></li>
<li><a class="waves-effect waves-light dropdown-button hide-on-med-and-up $versionclass" data-activates="dropdown_menu_mobile" data-beloworigin="true"><i class="material-icons right">settings</i></a></li>
</ul>
</div>
</nav>
</div>
</header>
<main>
<ul id="dropdown_menu" class="dropdown-content z-depth-4">
<li><a onclick="localStorage.setItem('new_tab', true);window.open(window.location.origin+window.location.pathname, '_blank');">New tab</a></li>
<li class="divider"></li>
<li><a target="_blank" href="https://home-assistant.io/components/">Components</a></li>
<li><a target="_blank" href="https://materialdesignicons.com/">Material Icons</a></li>
<li><a href="#" data-activates="ace_settings" class="ace_settings-collapse">Editor Settings</a></li>
<li><a class="modal-trigger" href="#modal_netstat" onclick="get_netstat()">Network status</a></li>
<li><a class="modal-trigger" href="#modal_about">About HASS-Configurator</a></li>
<li class="divider"></li>
<!--<li><a href="#modal_check_config">Check HASS Configuration</a></li>-->
<li><a class="modal-trigger" href="#modal_events">Observe events</a></li>
<li><a class="modal-trigger" href="#modal_reload_automations">Reload automations</a></li>
<li><a class="modal-trigger" href="#modal_reload_scripts">Reload scripts</a></li>
<li><a class="modal-trigger" href="#modal_reload_groups">Reload groups</a></li>
<li><a class="modal-trigger" href="#modal_reload_core">Reload core</a></li>
<li><a class="modal-trigger" href="#modal_restart">Restart HASS</a></li>
<li class="divider"></li>
<li><a class="modal-trigger" href="#modal_exec_command">Execute shell command</a></li>
</ul>
<ul id="dropdown_menu_mobile" class="dropdown-content z-depth-4">
<li><a onclick="localStorage.setItem('new_tab', true);window.open(window.location.origin+window.location.pathname, '_blank');">New tab</a></li>
<li class="divider"></li>
<li><a target="_blank" href="https://home-assistant.io/help/">Help</a></li>
<li><a target="_blank" href="https://home-assistant.io/components/">Components</a></li>
<li><a target="_blank" href="https://materialdesignicons.com/">Material Icons</a></li>
<li><a href="#" data-activates="ace_settings" class="ace_settings-collapse">Editor Settings</a></li>
<li><a class="modal-trigger" href="#modal_netstat" onclick="get_netstat()">Network status</a></li>
<li><a class="modal-trigger" href="#modal_about">About HASS-Configurator</a></li>
<li class="divider"></li>
<!--<li><a href="#modal_check_config">Check HASS Configuration</a></li>-->
<li><a class="modal-trigger" href="#modal_events">Observe events</a></li>
<li><a class="modal-trigger" href="#modal_reload_automations">Reload automations</a></li>
<li><a class="modal-trigger" href="#modal_reload_scripts">Reload scripts</a></li>
<li><a class="modal-trigger" href="#modal_reload_groups">Reload groups</a></li>
<li><a class="modal-trigger" href="#modal_reload_core">Reload core</a></li>
<li><a class="modal-trigger" href="#modal_restart">Restart HASS</a></li>
<li class="divider"></li>
<li><a class="modal-trigger" href="#modal_exec_command">Execute shell command</a></li>
</ul>
<ul id="dropdown_gitmenu" class="dropdown-content z-depth-4">
<li><a class="modal-trigger" href="#modal_init" class="nowrap waves-effect">git init</a></li>
<li><a class="modal-trigger" href="#modal_commit" class="nowrap waves-effect">git commit</a></li>
<li><a class="modal-trigger" href="#modal_push" class="nowrap waves-effect">git push</a></li>
<li><a class="modal-trigger" href="#modal_stash" class="nowrap waves-effect">git stash</a></li>
</ul>
<ul id="dropdown_gitmenu_mobile" class="dropdown-content z-depth-4">
<li><a class="modal-trigger" href="#modal_init" class="nowrap waves-effect">git init</a></li>
<li><a class="modal-trigger" href="#modal_commit" class="nowrap waves-effect">git commit</a></li>
<li><a class="modal-trigger" href="#modal_push" class="nowrap waves-effect">git push</a></li>
<li><a class="modal-trigger" href="#modal_stash" class="nowrap waves-effect">git stash</a></li>
</ul>
<div id="modal_acekeyboard" class="modal bottom-sheet modal-fixed-footer">
<div class="modal-content centered">
<h4 class="grey-text text-darken-3">Ace Keyboard Shortcuts<i class="mdi mdi-keyboard right grey-text text-darken-3" style="font-size: 2rem;"></i></h4>
<br>
<ul class="collapsible popout" data-collapsible="expandable">
<li>
<div class="collapsible-header"><i class="material-icons">view_headline</i>Line Operations</div>
<div class="collapsible-body">
<table class="bordered highlight centered">
<thead>
<tr>
<th>Windows/Linux</th>
<th>Mac</th>
<th>Action</th>
</tr>
</thead>
<tbody>
<tr>
<td>Ctrl-D</td>
<td>Command-D</td>
<td>Remove line</td>
</tr>
<tr>
<td>Alt-Shift-Down</td>
<td>Command-Option-Down</td>
<td>Copy lines down</td>
</tr>
<tr>
<td>Alt-Shift-Up</td>
<td>Command-Option-Up</td>
<td>Copy lines up</td>
</tr>
<tr>
<td>Alt-Down</td>
<td>Option-Down</td>
<td>Move lines down</td>
</tr>
<tr>
<td>Alt-Up</td>
<td>Option-Up</td>
<td>Move lines up</td>
</tr>
<tr>
<td>Alt-Delete</td>
<td>Ctrl-K</td>
<td>Remove to line end</td>
</tr>
<tr>
<td>Alt-Backspace</td>
<td>Command-Backspace</td>
<td>Remove to linestart</td>
</tr>
<tr>
<td>Ctrl-Backspace</td>
<td>Option-Backspace, Ctrl-Option-Backspace</td>
<td>Remove word left</td>
</tr>
<tr>
<td>Ctrl-Delete</td>
<td>Option-Delete</td>
<td>Remove word right</td>
</tr>
<tr>
<td>---</td>
<td>Ctrl-O</td>
<td>Split line</td>
</tr>
</tbody>
</table>
</div>
</li>
<li>
<div class="collapsible-header"><i class="material-icons">photo_size_select_small</i>Selection</div>
<div class="collapsible-body">
<table class="bordered highlight centered">
<thead>
<tr>
<th >Windows/Linux</th>
<th >Mac</th>
<th >Action</th>
</tr>
</thead>
<tbody>
<tr>
<td >Ctrl-A</td>
<td >Command-A</td>
<td >Select all</td>
</tr>
<tr>
<td >Shift-Left</td>
<td >Shift-Left</td>
<td >Select left</td>
</tr>
<tr>
<td >Shift-Right</td>
<td >Shift-Right</td>
<td >Select right</td>
</tr>
<tr>
<td >Ctrl-Shift-Left</td>
<td >Option-Shift-Left</td>
<td >Select word left</td>
</tr>
<tr>
<td >Ctrl-Shift-Right</td>
<td >Option-Shift-Right</td>
<td >Select word right</td>
</tr>
<tr>
<td >Shift-Home</td>
<td >Shift-Home</td>
<td >Select line start</td>
</tr>
<tr>
<td >Shift-End</td>
<td >Shift-End</td>
<td >Select line end</td>
</tr>
<tr>
<td >Alt-Shift-Right</td>
<td >Command-Shift-Right</td>
<td >Select to line end</td>
</tr>
<tr>
<td >Alt-Shift-Left</td>
<td >Command-Shift-Left</td>
<td >Select to line start</td>
</tr>
<tr>
<td >Shift-Up</td>
<td >Shift-Up</td>
<td >Select up</td>
</tr>
<tr>
<td >Shift-Down</td>
<td >Shift-Down</td>
<td >Select down</td>
</tr>
<tr>
<td >Shift-PageUp</td>
<td >Shift-PageUp</td>
<td >Select page up</td>
</tr>
<tr>
<td >Shift-PageDown</td>
<td >Shift-PageDown</td>
<td >Select page down</td>
</tr>
<tr>
<td >Ctrl-Shift-Home</td>
<td >Command-Shift-Up</td>
<td >Select to start</td>
</tr>
<tr>
<td >Ctrl-Shift-End</td>
<td >Command-Shift-Down</td>
<td >Select to end</td>
</tr>
<tr>
<td >Ctrl-Shift-D</td>
<td >Command-Shift-D</td>
<td >Duplicate selection</td>
</tr>
<tr>
<td >Ctrl-Shift-P</td>
<td >---</td>
<td >Select to matching bracket</td>
</tr>
</tbody>
</table>
</div>
</li>
<li>
<div class="collapsible-header"><i class="material-icons">multiline_chart</i>Multicursor</div>
<div class="collapsible-body">
<table class="bordered highlight centered">
<thead>
<tr>
<th>Windows/Linux</th>
<th>Mac</th>
<th>Action</th>
</tr>
</thead>
<tbody>
<tr>
<td>Ctrl-Alt-Up</td>
<td>Ctrl-Option-Up</td>
<td>Add multi-cursor above</td>
</tr>
<tr>
<td>Ctrl-Alt-Down</td>
<td>Ctrl-Option-Down</td>
<td>Add multi-cursor below</td>
</tr>
<tr>
<td>Ctrl-Alt-Right</td>
<td>Ctrl-Option-Right</td>
<td>Add next occurrence to multi-selection</td>
</tr>
<tr>
<td>Ctrl-Alt-Left</td>
<td>Ctrl-Option-Left</td>
<td>Add previous occurrence to multi-selection</td>
</tr>
<tr>
<td>Ctrl-Alt-Shift-Up</td>
<td>Ctrl-Option-Shift-Up</td>
<td>Move multicursor from current line to the line above</td>
</tr>
<tr>
<td>Ctrl-Alt-Shift-Down</td>
<td>Ctrl-Option-Shift-Down</td>
<td>Move multicursor from current line to the line below</td>
</tr>
<tr>
<td>Ctrl-Alt-Shift-Right</td>
<td>Ctrl-Option-Shift-Right</td>
<td>Remove current occurrence from multi-selection and move to next</td>
</tr>
<tr>
<td>Ctrl-Alt-Shift-Left</td>
<td>Ctrl-Option-Shift-Left</td>
<td>Remove current occurrence from multi-selection and move to previous</td>
</tr>
<tr>
<td>Ctrl-Shift-L</td>
<td>Ctrl-Shift-L</td>
<td>Select all from multi-selection</td>
</tr>
</tbody>
</table>
</div>
</li>
<li>
<div class="collapsible-header"><i class="material-icons">call_missed_outgoing</i>Go To</div>
<div class="collapsible-body">
<table class="bordered highlight centered">
<thead>
<tr>
<th>Windows/Linux</th>
<th>Mac</th>
<th>Action</th>
</tr>
</thead>
<tbody>
<tr>
<td>Left</td>
<td>Left, Ctrl-B</td>
<td>Go to left</td>
</tr>
<tr>
<td>Right</td>
<td>Right, Ctrl-F</td>
<td>Go to right</td>
</tr>
<tr>
<td>Ctrl-Left</td>
<td>Option-Left</td>
<td>Go to word left</td>
</tr>
<tr>
<td>Ctrl-Right</td>
<td>Option-Right</td>
<td>Go to word right</td>
</tr>
<tr>
<td>Up</td>
<td>Up, Ctrl-P</td>
<td>Go line up</td>
</tr>
<tr>
<td>Down</td>
<td>Down, Ctrl-N</td>
<td>Go line down</td>
</tr>
<tr>
<td>Alt-Left, Home</td>
<td>Command-Left, Home, Ctrl-A</td>
<td>Go to line start</td>
</tr>
<tr>
<td>Alt-Right, End</td>
<td>Command-Right, End, Ctrl-E</td>
<td>Go to line end</td>
</tr>
<tr>
<td>PageUp</td>
<td>Option-PageUp</td>
<td>Go to page up</td>
</tr>
<tr>
<td>PageDown</td>
<td>Option-PageDown, Ctrl-V</td>
<td>Go to page down</td>
</tr>
<tr>
<td>Ctrl-Home</td>
<td>Command-Home, Command-Up</td>
<td>Go to start</td>
</tr>
<tr>
<td>Ctrl-End</td>
<td>Command-End, Command-Down</td>
<td>Go to end</td>
</tr>
<tr>
<td>Ctrl-L</td>
<td>Command-L</td>
<td>Go to line</td>
</tr>
<tr>
<td>Ctrl-Down</td>
<td>Command-Down</td>
<td>Scroll line down</td>
</tr>
<tr>
<td>Ctrl-Up</td>
<td>---</td>
<td>Scroll line up</td>
</tr>
<tr>
<td>Ctrl-P</td>
<td>---</td>
<td>Go to matching bracket</td>
</tr>
<tr>
<td>---</td>
<td>Option-PageDown</td>
<td>Scroll page down</td>
</tr>
<tr>
<td>---</td>
<td>Option-PageUp</td>
<td>Scroll page up</td>
</tr>
</tbody>
</table>
</div>
</li>
<li>
<div class="collapsible-header"><i class="material-icons">find_replace</i>Find/Replace</div>
<div class="collapsible-body">
<table class="bordered highlight centered">
<thead>
<tr>
<th>Windows/Linux</th>
<th>Mac</th>
<th>Action</th>
</tr>
</thead>
<tbody>
<tr>
<td>Ctrl-F</td>
<td>Command-F</td>
<td>Find</td>
</tr>
<tr>
<td>Ctrl-H</td>
<td>Command-Option-F</td>
<td>Replace</td>
</tr>
<tr>
<td>Ctrl-K</td>
<td>Command-G</td>
<td>Find next</td>
</tr>
<tr>
<td>Ctrl-Shift-K</td>
<td>Command-Shift-G</td>
<td>Find previous</td>
</tr>
</tbody>
</table>
</div>
</li>
<li>
<div class="collapsible-header"><i class="material-icons">all_out</i>Folding</div>
<div class="collapsible-body">
<table class="bordered highlight centered">
<thead>
<tr>
<th>Windows/Linux</th>
<th>Mac</th>
<th>Action</th>
</tr>
</thead>
<tbody>
<tr>
<td>Alt-L, Ctrl-F1</td>
<td>Command-Option-L, Command-F1</td>
<td>Fold selection</td>
</tr>
<tr>
<td>Alt-Shift-L, Ctrl-Shift-F1</td>
<td>Command-Option-Shift-L, Command-Shift-F1</td>
<td>Unfold</td>
</tr>
<tr>
<td>Alt-0</td>
<td>Command-Option-0</td>
<td>Fold all</td>
</tr>
<tr>
<td>Alt-Shift-0</td>
<td>Command-Option-Shift-0</td>
<td>Unfold all</td>
</tr>
</tbody>
</table>
</div>
</li>
<li>
<div class="collapsible-header"><i class="material-icons">devices_other</i>Other</div>
<div class="collapsible-body">
<table class="bordered highlight centered">
<thead>
<tr>
<th>Windows/Linux</th>
<th>Mac</th>
<th>Action</th>
</tr>
</thead>
<tbody>
<tr>
<td>Tab</td>
<td>Tab</td>
<td>Indent</td>
</tr>
<tr>
<td>Shift-Tab</td>
<td>Shift-Tab</td>
<td>Outdent</td>
</tr>
<tr>
<td>Ctrl-Z</td>
<td>Command-Z</td>
<td>Undo</td>
</tr>
<tr>
<td>Ctrl-Shift-Z, Ctrl-Y</td>
<td>Command-Shift-Z, Command-Y</td>
<td>Redo</td>
</tr>
<tr>
<td>Ctrl-,</td>
<td>Command-,</td>
<td>Show the settings menu</td>
</tr>
<tr>
<td>Ctrl-/</td>
<td>Command-/</td>
<td>Toggle comment</td>
</tr>
<tr>
<td>Ctrl-T</td>
<td>Ctrl-T</td>
<td>Transpose letters</td>
</tr>
<tr>
<td>Ctrl-Enter</td>
<td>Command-Enter</td>
<td>Enter full screen</td>
</tr>
<tr>
<td>Ctrl-Shift-U</td>
<td>Ctrl-Shift-U</td>
<td>Change to lower case</td>
</tr>
<tr>
<td>Ctrl-U</td>
<td>Ctrl-U</td>
<td>Change to upper case</td>
</tr>
<tr>
<td>Insert</td>
<td>Insert</td>
<td>Overwrite</td>
</tr>
<tr>
<td>Ctrl-Shift-E</td>
<td>Command-Shift-E</td>
<td>Macros replay</td>
</tr>
<tr>
<td>Ctrl-Alt-E</td>
<td>---</td>
<td>Macros recording</td>
</tr>
<tr>
<td>Delete</td>
<td>---</td>
<td>Delete</td>
</tr>
<tr>
<td>---</td>
<td>Ctrl-L</td>
<td>Center selection</td>
</tr>
</tbody>
</table>
</div>
</li>
</ul>
</div>
<div class="modal-footer">
<a class="modal-action modal-close waves-effect btn-flat light-blue-text">Close</a>
</div>
</div>
<div id="modal_events" class="modal modal-fixed-footer">
<div class="modal-content">
<h4 class="grey-text text-darken-3">Event Observer<i class="grey-text text-darken-3 material-icons right" style="font-size: 2rem;">error_outline</i></h4>
<br />
<div class="row">
<form class="col s12">
<div class="row">
<div class="input-field col s12">
<input type="text" id="ws_uri" placeholder="ws://127.0.0.1:8123/api/websocket" value="$hass_ws_address"/>
<label for="ws_uri">Websocket URI</label>
</div>
</div>
<div class="row">
<div class="input-field col s12">
<input type="password" id="ws_password" value="$api_password"/>
<label for="ws_password">API password</label>
</div>
</div>
<div class="row">
<div class="input-field col s12">
<textarea id="ws_events" class="materialize-textarea"></textarea>
</div>
</div>
</form>
</div>
</div>
<div class="modal-footer">
<a onclick="ws_connect()" id="ws_b_c" class="modal-action waves-effect waves-green btn-flat light-blue-text">Connect</a>
<a onclick="ws_disconnect()" id="ws_b_d" class="modal-action waves-effect waves-green btn-flat light-blue-text disabled">Disconnect</a>
<a onclick="ws_disconnect()" class="modal-action modal-close waves-effect waves-red btn-flat light-blue-text">Close</a>
</div>
</div>
<div id="modal_save" class="modal">
<div class="modal-content">
<h4 class="grey-text text-darken-3">Save<i class="grey-text text-darken-3 material-icons right" style="font-size: 2rem;">save</i></h4>
<p>Do you really want to save?</p>
</div>
<div class="modal-footer">
<a class=" modal-action modal-close waves-effect waves-red btn-flat light-blue-text">No</a>
<a onclick="save()" class=" modal-action modal-close waves-effect waves-green btn-flat light-blue-text">Yes</a>
</div>
</div>
<div id="modal_upload" class="modal">
<div class="modal-content">
<h4 class="grey-text text-darken-3">Upload File<i class="grey-text text-darken-3 material-icons right" style="font-size: 2.28rem;">file_upload</i></h4>
<p>Please choose a file to upload</p>
<form action="#" id="uploadform">
<div class="file-field input-field">
<div class="btn light-blue waves-effect">
<span>File</span>
<input type="file" id="uploadfile" />
</div>
<div class="file-path-wrapper">
<input class="file-path validate" type="text">
</div>
</div>
</form>
</div>
<div class="modal-footer">
<a class=" modal-action modal-close waves-effect waves-red btn-flat light-blue-text">Cancel</a>
<a onclick="upload()" class="modal-action modal-close waves-effect waves-green btn-flat light-blue-text">OK</a>
</div>
</div>
<div id="modal_init" class="modal">
<div class="modal-content">
<h4 class="grey-text text-darken-3">git init<i class="mdi mdi-git right grey-text text-darken-3" style="font-size: 2.48rem;"></i></h4>
<p>Are you sure you want to initialize a repository at the current path?</p>
</div>
<div class="modal-footer">
<a class=" modal-action modal-close waves-effect waves-red btn-flat light-blue-text">Cancel</a>
<a onclick="gitinit()" class=" modal-action modal-close waves-effect waves-green btn-flat light-blue-text">OK</a>
</div>
</div>
<div id="modal_commit" class="modal">
<div class="modal-content">
<h4 class="grey-text text-darken-3">git commit<i class="mdi mdi-git right grey-text text-darken-3" style="font-size: 2.48rem;"></i></h4>
<div class="row">
<div class="input-field col s12">
<input type="text" id="commitmessage">
<label class="active" for="commitmessage">Commit message</label>
</div>
</div>
</div>
<div class="modal-footer">
<a class=" modal-action modal-close waves-effect waves-red btn-flat light-blue-text">Cancel</a>
<a onclick="commit(document.getElementById('commitmessage').value)" class=" modal-action modal-close waves-effect waves-green btn-flat light-blue-text">OK</a>
</div>
</div>
<div id="modal_push" class="modal">
<div class="modal-content">
<h4 class="grey-text text-darken-3">git push<i class="mdi mdi-git right grey-text text-darken-3" style="font-size: 2.48rem;"></i></h4>
<p>Are you sure you want to push your commited changes to the configured remote / origin?</p>
</div>
<div class="modal-footer">
<a class=" modal-action modal-close waves-effect waves-red btn-flat light-blue-text">Cancel</a>
<a onclick="gitpush()" class=" modal-action modal-close waves-effect waves-green btn-flat light-blue-text">OK</a>
</div>
</div>
<div id="modal_stash" class="modal">
<div class="modal-content">
<h4 class="grey-text text-darken-3">git stash<i class="mdi mdi-git right grey-text text-darken-3" style="font-size: 2.48rem;"></i></h4>
<p>Are you sure you want to stash your changes?</p>
</div>
<div class="modal-footer">
<a class=" modal-action modal-close waves-effect waves-red btn-flat light-blue-text">Cancel</a>
<a onclick="gitstash()" class=" modal-action modal-close waves-effect waves-green btn-flat light-blue-text">OK</a>
</div>
</div>
<div id="modal_close" class="modal">
<div class="modal-content">
<h4 class="grey-text text-darken-3">Close File<i class="grey-text text-darken-3 material-icons right" style="font-size: 2.28rem;">close</i></h4>
<p>Are you sure you want to close the current file? Unsaved changes will be lost.</p>
</div>
<div class="modal-footer">
<a class=" modal-action modal-close waves-effect waves-red btn-flat light-blue-text">No</a>
<a onclick="closefile()" class="modal-action modal-close waves-effect waves-green btn-flat light-blue-text">Yes</a>
</div>
</div>
<div id="modal_delete" class="modal">
<div class="modal-content">
<h4 class="grey-text text-darken-3">Delete</h4>
<p>Are you sure you want to delete <span class="fb_currentfile"></span>?</p>
</div>
<div class="modal-footer">
<a class=" modal-action modal-close waves-effect waves-red btn-flat light-blue-text">No</a>
<a onclick="delete_element()" class="modal-action modal-close waves-effect waves-green btn-flat light-blue-text">Yes</a>
</div>
</div>
<div id="modal_gitadd" class="modal">
<div class="modal-content">
<h4 class="grey-text text-darken-3">git add<i class="mdi mdi-git right grey-text text-darken-3" style="font-size: 2.48rem;"></i></h4>
<p>Are you sure you want to add <span class="fb_currentfile"></span> to the index?</p>
</div>
<div class="modal-footer">
<a class=" modal-action modal-close waves-effect waves-red btn-flat light-blue-text">No</a>
<a onclick="gitadd()" class="modal-action modal-close waves-effect waves-green btn-flat light-blue-text">Yes</a>
</div>
</div>
<div id="modal_check_config" class="modal">
<div class="modal-content">
<h4 class="grey-text text-darken-3">Check configuration<i class="mdi mdi-settings right grey-text text-darken-3" style="font-size: 2rem;"></i></h4>
<p>Do you want to check the configuration?</p>
</div>
<div class="modal-footer">
<a class=" modal-action modal-close waves-effect waves-red btn-flat light-blue-text">No</a>
<a onclick="check_config()" class=" modal-action modal-close waves-effect waves-green btn-flat light-blue-text">Yes</a>
</div>
</div>
<div id="modal_reload_automations" class="modal">
<div class="modal-content">
<h4 class="grey-text text-darken-3">Reload automations<i class="mdi mdi-settings right grey-text text-darken-3" style="font-size: 2rem;"></i></h4>
<p>Do you want to reload the automations?</p>
</div>
<div class="modal-footer">
<a class=" modal-action modal-close waves-effect waves-red btn-flat light-blue-text">No</a>
<a onclick="reload_automations()" class=" modal-action modal-close waves-effect waves-green btn-flat light-blue-text">Yes</a>
</div>
</div>
<div id="modal_reload_scripts" class="modal">
<div class="modal-content">
<h4 class="grey-text text-darken-3">Reload scripts<i class="mdi mdi-settings right grey-text text-darken-3" style="font-size: 2rem;"></i></h4>
<p>Do you want to reload the scripts?</p>
</div>
<div class="modal-footer">
<a class=" modal-action modal-close waves-effect waves-red btn-flat light-blue-text">No</a>
<a onclick="reload_scripts()" class=" modal-action modal-close waves-effect waves-green btn-flat light-blue-text">Yes</a>
</div>
</div>
<div id="modal_reload_groups" class="modal">
<div class="modal-content">
<h4 class="grey-text text-darken-3">Reload groups<i class="mdi mdi-settings right grey-text text-darken-3" style="font-size: 2rem;"></i></h4>
<p>Do you want to reload the groups?</p>
</div>
<div class="modal-footer">
<a class=" modal-action modal-close waves-effect waves-red btn-flat light-blue-text">No</a>
<a onclick="reload_groups()" class=" modal-action modal-close waves-effect waves-green btn-flat light-blue-text">Yes</a>
</div>
</div>
<div id="modal_reload_core" class="modal">
<div class="modal-content">
<h4 class="grey-text text-darken-3">Reload core<i class="mdi mdi-settings right grey-text text-darken-3" style="font-size: 2rem;"></i></h4>
<p>Do you want to reload the core?</p>
</div>
<div class="modal-footer">
<a class=" modal-action modal-close waves-effect waves-red btn-flat light-blue-text">No</a>
<a onclick="reload_core()" class=" modal-action modal-close waves-effect waves-green btn-flat light-blue-text">Yes</a>
</div>
</div>
<div id="modal_restart" class="modal">
<div class="modal-content">
<h4 class="grey-text text-darken-3">Restart<i class="mdi mdi-restart right grey-text text-darken-3" style="font-size: 2rem;"></i></h4>
<p>Do you really want to restart Home Assistant?</p>
</div>
<div class="modal-footer">
<a class=" modal-action modal-close waves-effect waves-red btn-flat light-blue-text">No</a>
<a onclick="restart()" class=" modal-action modal-close waves-effect waves-green btn-flat light-blue-text">Yes</a>
</div>
</div>
<div id="modal_a_net_remove" class="modal">
<div class="modal-content">
<h4 class="grey-text text-darken-3">Remove allowed network / IP<i class="mdi mdi-settings right grey-text text-darken-3" style="font-size: 2rem;"></i></h4>
<p>Do you really want to remove the network / IP <b><span id="removenet"></span></b> from the list of allowed networks?</p>
</div>
<div class="modal-footer">
<a class=" modal-action modal-close waves-effect waves-red btn-flat light-blue-text">No</a>
<a onclick="a_net_remove()" class=" modal-action modal-close waves-effect waves-green btn-flat light-blue-text">Yes</a>
</div>
</div>
<div id="modal_a_net_add" class="modal">
<div class="modal-content">
<h4 class="grey-text text-darken-3">Add allowed network / IP<i class="mdi mdi-settings right grey-text text-darken-3" style="font-size: 2rem;"></i></h4>
<p>Do you really want to Add the network / IP <b><span id="addnet"></span></b> to the list of allowed networks?</p>
</div>
<div class="modal-footer">
<a class=" modal-action modal-close waves-effect waves-red btn-flat light-blue-text">No</a>
<a onclick="a_net_add()" class=" modal-action modal-close waves-effect waves-green btn-flat light-blue-text">Yes</a>
</div>
</div>
<div id="modal_unban" class="modal">
<div class="modal-content">
<h4 class="grey-text text-darken-3">Unban IP<i class="mdi mdi-settings right grey-text text-darken-3" style="font-size: 2rem;"></i></h4>
<p>Do you really want to unban the IP <b><span id="unbanip"></span></b>?</p>
</div>
<div class="modal-footer">
<a class=" modal-action modal-close waves-effect waves-red btn-flat light-blue-text">No</a>
<a onclick="banned_unban()" class=" modal-action modal-close waves-effect waves-green btn-flat light-blue-text">Yes</a>
</div>
</div>
<div id="modal_ban" class="modal">
<div class="modal-content">
<h4 class="grey-text text-darken-3">Ban IP<i class="mdi mdi-settings right grey-text text-darken-3" style="font-size: 2rem;"></i></h4>
<p>Do you really want to ban the IP <b><span id="banip"></span></b>?</p>
</div>
<div class="modal-footer">
<a class=" modal-action modal-close waves-effect waves-red btn-flat light-blue-text">No</a>
<a onclick="banned_ban()" class=" modal-action modal-close waves-effect waves-green btn-flat light-blue-text">Yes</a>
</div>
</div>
<div id="modal_exec_command" class="modal">
<div class="modal-content">
<h4 class="grey-text text-darken-3">Execute shell command<i class="mdi mdi-laptop right grey-text text-darken-3" style="font-size: 2rem;"></i></h4>
<pre class="col s6" id="command_history"></pre>
<br>
<div class="row">
<div class="input-field col s12">
<input placeholder="/bin/ls -l /var/log" id="commandline" type="text">
<label for="commandline">Command</label>
</div>
</div>
</div>
<div class="modal-footer">
<a class=" modal-action modal-close waves-effect waves-red btn-flat light-blue-text">Close</a>
<a onclick="document.getElementById('command_history').innerText='';" class=" modal-action waves-effect waves-green btn-flat light-blue-text">Clear</a>
<a onclick="exec_command()" class=" modal-action waves-effect waves-green btn-flat light-blue-text">Execute</a>
</div>
</div>
<div id="modal_markdirty" class="modal">
<div class="modal-content">
<h4 class="grey-text text-darken-3">Unsaved Changes<i class="grey-text text-darken-3 material-icons right" style="font-size: 2rem;">save</i></h4>
<p>You have unsaved changes in the current file. Please save the changes or close the file before opening a new one.</p>
</div>
<div class="modal-footer">
<a class="modal-action modal-close waves-effect waves-red btn-flat light-blue-text">Abort</a>
<a onclick="document.getElementById('currentfile').value='';editor.getSession().setValue('');$('.markdirty').each(function(i, o){o.classList.remove('red');});" class="modal-action modal-close waves-effect waves-green btn-flat light-blue-text">Close file</a>
<a onclick="save()" class="modal-action modal-close waves-effect waves-green btn-flat light-blue-text">Save changes</a>
</div>
</div>
<div id="modal_newfolder" class="modal">
<div class="modal-content">
<h4 class="grey-text text-darken-3">New Folder<i class="grey-text text-darken-3 material-icons right" style="font-size: 2rem;">create_new_folder</i></h4>
<br>
<div class="row">
<div class="input-field col s12">
<input type="text" id="newfoldername">
<label class="active" for="newfoldername">New Folder Name</label>
</div>
</div>
</div>
<div class="modal-footer">
<a class=" modal-action modal-close waves-effect waves-red btn-flat light-blue-text">Cancel</a>
<a onclick="newfolder(document.getElementById('newfoldername').value)" class=" modal-action modal-close waves-effect waves-green btn-flat light-blue-text">OK</a>
</div>
</div>
<div id="modal_newfile" class="modal">
<div class="modal-content">
<h4 class="grey-text text-darken-3">New File<i class="grey-text text-darken-3 material-icons right" style="font-size: 2rem;">note_add</i></h4>
<br>
<div class="row">
<div class="input-field col s12">
<input type="text" id="newfilename">
<label class="active" for="newfilename">New File Name</label>
</div>
</div>
</div>
<div class="modal-footer">
<a class=" modal-action modal-close waves-effect waves-red btn-flat light-blue-text">Cancel</a>
<a onclick="newfile(document.getElementById('newfilename').value)" class=" modal-action modal-close waves-effect waves-green btn-flat light-blue-text">OK</a>
</div>
</div>
<div id="modal_newbranch" class="modal">
<div class="modal-content">
<h4 class="grey-text text-darken-3">New Branch<i class="mdi mdi-git right grey-text text-darken-3" style="font-size: 2.48rem;"></i></h4>
<div class="row">
<div class="input-field col s12">
<input type="text" id="newbranch">
<label class="active" for="newbranch">New Branch Name</label>
</div>
</div>
</div>
<div class="modal-footer">
<a class=" modal-action modal-close waves-effect waves-red btn-flat light-blue-text">Cancel</a>
<a onclick="newbranch(document.getElementById('newbranch').value)" class=" modal-action modal-close waves-effect waves-green btn-flat light-blue-text">OK</a>
</div>
</div>
<div id="modal_netstat" class="modal">
<div class="modal-content">
<h4 class="grey-text text-darken-3">Network status<i class="mdi mdi-network right grey-text text-darken-3" style="font-size: 2.48rem;"></i></h4>
<p><label for="your_address">Your address: </label><span id="your_address">$your_address</span></p>
<p><label for="listening_address">Listening address: </label><span id="listening_address">$listening_address</span></p>
<p><label for="hass_api_address">HASS API address: </label><span id="hass_api_address">$hass_api_address</span></p>
<p>Modifying the following lists is not persistent. To statically control access please use the configuration file.</p>
<p>
<ul id="allowed_networks" class="collection with-header"></ul>
<br />
<div class="input-field">
<a href="#" class="prefix" onclick="helper_a_net_add()"><i class="mdi mdi-plus-circle prefix light-blue-text"></i></a></i>
<input placeholder="192.168.0.0/16" id="add_net_ip" type="text">
<label for="add_net_ip">Add network / IP</label>
</div>
</p>
<p>
<ul id="banned_ips" class="collection with-header"></ul>
<br />
<div class="input-field">
<a href="#" class="prefix" onclick="helper_banned_ban()"><i class="mdi mdi-plus-circle prefix light-blue-text"></i></a></i>
<input placeholder="1.2.3.4" id="add_banned_ip" type="text">
<label for="add_banned_ip">Ban IP</label>
</div>
</p>
</div>
<div class="modal-footer">
<a class=" modal-action modal-close waves-effect waves-red btn-flat light-blue-text">Cancel</a>
</div>
</div>
<div id="modal_about" class="modal modal-fixed-footer">
<div class="modal-content">
<h4 class="grey-text text-darken-3"><a class="black-text" href="https://github.com/danielperna84/hass-configurator/" target="_blank">HASS Configurator</a></h4>
<p>Version: <a class="$versionclass" href="https://github.com/danielperna84/hass-configurator/releases/" target="_blank">$current</a></p>
<p>Web-based file editor designed to modify configuration files of <a class="light-blue-text" href="https://home-assistant.io/" target="_blank">Home Assistant</a> or other textual files. Use at your own risk.</p>
<p>Published under the MIT license</p>
<p>Developed by:</p>
<ul>
<li>
<div class="chip"> <img src="https://avatars3.githubusercontent.com/u/7396998?v=4&s=400" alt="Contact Person"> <a class="black-text" href="https://github.com/danielperna84" target="_blank">Daniel Perna</a> </div>
</li>
<li>
<div class="chip"> <img src="https://avatars2.githubusercontent.com/u/1509640?v=4&s=400" alt="Contact Person"> <a class="black-text" href="https://github.com/jmart518" target="_blank">JT Martinez</a> </div>
</li>
<li>
<div class="chip"> <img src="https://avatars0.githubusercontent.com/u/1525413?v=4&s=400" alt="Contact Person"> <a class="black-text" href="https://github.com/AtoxIO" target="_blank">AtoxIO</a> </div>
</li>
<li>
<div class="chip"> <img src="https://avatars0.githubusercontent.com/u/646513?s=400&v=4" alt="Contact Person"> <a class="black-text" href="https://github.com/Munsio" target="_blank">Martin Treml</a> </div>
</li>
<li>
<div class="chip"> <img src="https://avatars2.githubusercontent.com/u/1399443?s=460&v=4" alt="Contact Person"> <a class="black-text" href="https://github.com/sytone" target="_blank">Sytone</a> </div>
</li>
<li>
<div class="chip"> <img src="https://avatars3.githubusercontent.com/u/1561226?s=400&v=4" alt="Contact Person"> <a class="black-text" href="https://github.com/dimagoltsman" target="_blank">Dima Goltsman</a> </div>
</li>
</ul>
<p>Libraries used:</p>
<div class="row">
<div class="col s6 m3 l3">
<a href="https://ace.c9.io/" target="_blank">
<div class="card grey lighten-3 hoverable waves-effect">
<div class="card-image">
<img src="https://drive.google.com/uc?export=view&id=0B6wTGzSOtvNBeld4U09LQkV0c2M">
</div>
<div class="card-content">
<p class="grey-text text-darken-2">Ace Editor</p>
</div>
</div>
</a>
</div>
<div class="col s6 m3 l3">
<a class="light-blue-text" href="http://materializecss.com/" target="_blank">
<div class="card grey lighten-3 hoverable">
<div class="card-image">
<img src="https://evwilkin.github.io/images/materializecss.png">
</div>
<div class="card-content">
<p class="grey-text text-darken-2">Materialize</p>
</div>
</div>
</a>
</div>
<div class="col s6 m3 l3">
<a class="light-blue-text" href="https://jquery.com/" target="_blank">
<div class="card grey lighten-3 hoverable">
<div class="card-image">
<img src="https://drive.google.com/uc?export=view&id=0B6wTGzSOtvNBdFI0ZXRGb01xNzQ">
</div>
<div class="card-content">
<p class="grey-text text-darken-2">JQuery</p>
</div>
</div>
</a>
</div>
<div class="col s6 m3 l3">
<a class="light-blue-text" href="https://gitpython.readthedocs.io" target="_blank">
<div class="card grey lighten-3 hoverable">
<div class="card-image">
<img src="https://drive.google.com/uc?export=view&id=0B6wTGzSOtvNBakk4ek1uRGxqYVE">
</div>
<div class="card-content">
<p class="grey-text text-darken-2">GitPython</p>
</div>
</div>
</a>
</div>
<div class="col s6 m3 l3">
<a class="light-blue-text" href="https://github.com/nodeca/js-yaml" target="_blank">
<div class="card grey lighten-3 hoverable">
<div class="card-image">
</div>
<div class="card-content">
<p class="grey-text text-darken-2">js-yaml</p>
</div>
</div>
</a>
</div>
</div>
</div>
<div class="modal-footer">
<a class=" modal-action modal-close waves-effect btn-flat light-blue-text">OK</a>
</div>
</div>
<div id="modal_lint" class="modal">
<div class="modal-content">
<textarea rows="8" readonly></textarea>
</div>
<div class="modal-footer">
<a class="modal-action modal-close waves-effect btn-flat light-blue-text">OK</a>
</div>
</div>
<!-- Main Editor Area -->
<div class="row">
<div id="hass_menu_l" class="col m4 l3 hide-on-small-only">
<br>
<div class="input-field col s12">
<select onchange="insert(this.value)">
<option value="" disabled selected>Select trigger platform</option>
<option value="event">Event</option>
<option value="homeassistant">Home Assistant</option>
<option value="mqtt">MQTT</option>
<option value="numeric_state">Numeric State</option>
<option value="state">State</option>
<option value="sun">Sun</option>
<option value="template">Template</option>
<option value="time">Time</option>
<option value="zone">Zone</option>
</select>
<label>Trigger platforms</label>
</div>
<div class="input-field col s12">
<select id="events" onchange="insert(this.value)"></select>
<label>Events</label>
</div>
<div class="input-field col s12">
<input type="text" id="entities-search" class="autocomplete" placeholder="sensor.example">
<label>Search entity</label>
</div>
<div class="input-field col s12">
<select id="entities" onchange="insert(this.value)"></select>
<label>Entities</label>
</div>
<div class="input-field col s12">
<select onchange="insert(this.value)">
<option value="" disabled selected>Select condition</option>
<option value="numeric_state">Numeric state</option>
<option value="state">State</option>
<option value="sun">Sun</option>
<option value="template">Template</option>
<option value="time">Time</option>
<option value="zone">Zone</option>
</select>
<label>Conditions</label>
</div>
<div class="input-field col s12">
<select id="services" onchange="insert(this.value)"> </select>
<label>Services</label>
</div>
</div>
<div id="filename_row" class="col s12 m8 l9">
<div class="card input-field col s12 grey lighten-4 hoverable pathtip">
<input class="currentfile_input" value="" id="currentfile" type="text">
<i class="material-icons" id="lint-status" onclick="show_lint_error()"></i>
</div>
</div>
<div class="col s12 m8 l9 z-depth-2" id="editor"></div>
<div id="edit_float" class="fixed-action-btn vertical click-to-toggle">
<a class="btn-floating btn-large red accent-2 hoverable">
<i class="material-icons">edit</i>
</a>
<ul>
<li><a class="btn-floating yellow tooltipped" data-position="left" data-delay="50" data-tooltip="Undo" onclick="editor.execCommand('undo')"><i class="material-icons">undo</i></a></li>
<li><a class="btn-floating green tooltipped" data-position="left" data-delay="50" data-tooltip="Redo" onclick="editor.execCommand('redo')"><i class="material-icons">redo</i></a></li>
<li><a class="btn-floating blue tooltipped" data-position="left" data-delay="50" data-tooltip="Indent" onclick="editor.execCommand('indent')"><i class="material-icons">format_indent_increase</i></a></li>
<li><a class="btn-floating orange tooltipped" data-position="left" data-delay="50" data-tooltip="Outdent" onclick="editor.execCommand('outdent')"><i class="material-icons">format_indent_decrease</i></a></li>
<li><a class="btn-floating brown tooltipped" data-position="left" data-delay="50" data-tooltip="Fold" onclick="toggle_fold()"><i class="material-icons">all_out</i></a></li>
<li><a class="btn-floating grey tooltipped" data-position="left" data-delay="50" data-tooltip="(Un)comment" onclick="editor.execCommand('togglecomment')">#</a></li>
</ul>
</div>
</div>
<!-- Left filebrowser sidenav -->
<div class="row">
<ul id="slide-out" class="side-nav grey lighten-4">
<li class="no-padding">
<ul class="row no-padding center hide-on-small-only grey lighten-4" style="margin-bottom: 0;">
<a class="col s3 waves-effect fbtoolbarbutton tooltipped modal-trigger" href="#modal_newfile" data-position="bottom" data-delay="500" data-tooltip="New File"><i class="grey-text text-darken-2 material-icons fbtoolbarbutton_icon">note_add</i></a>
<a class="col s3 waves-effect fbtoolbarbutton tooltipped modal-trigger" href="#modal_newfolder" data-position="bottom" data-delay="500" data-tooltip="New Folder"><i class="grey-text text-darken-2 material-icons fbtoolbarbutton_icon">create_new_folder</i></a>
<a class="col s3 waves-effect fbtoolbarbutton tooltipped modal-trigger" href="#modal_upload" data-position="bottom" data-delay="500" data-tooltip="Upload File"><i class="grey-text text-darken-2 material-icons fbtoolbarbutton_icon">file_upload</i></a>
<a class="col s3 waves-effect fbtoolbarbutton tooltipped dropdown-button $githidden" data-activates="dropdown_gitmenu" data-alignment='right' data-beloworigin='true' data-delay='500' data-position="bottom" data-tooltip="Git"><i class="mdi mdi-git grey-text text-darken-2 material-icons" style="padding-top: 17px;"></i></a>
</ul>
<ul class="row center toolbar_mobile hide-on-med-and-up grey lighten-4" style="margin-bottom: 0;">
<a class="col s3 waves-effect fbtoolbarbutton modal-trigger" href="#modal_newfile"><i class="grey-text text-darken-2 material-icons fbtoolbarbutton_icon">note_add</i></a>
<a class="col s3 waves-effect fbtoolbarbutton modal-trigger" href="#modal_newfolder"><i class="grey-text text-darken-2 material-icons fbtoolbarbutton_icon">create_new_folder</i></a>
<a class="col s3 waves-effect fbtoolbarbutton modal-trigger" href="#modal_upload"><i class="grey-text text-darken-2 material-icons fbtoolbarbutton_icon">file_upload</i></a>
<a class="col s3 waves-effect fbtoolbarbutton dropdown-button $githidden" data-activates="dropdown_gitmenu_mobile" data-alignment='right' data-beloworigin='true'><i class="mdi mdi-git grey-text text-darken-2 material-icons" style="padding-top: 17px;"></i></a>
</ul>
</li>
<li>
<div class="col s2 no-padding" style="min-height: 64px">
<a id="uplink" class="col s12 waves-effect" style="min-height: 64px; padding-top: 15px; cursor: pointer;"><i class="arrow grey-text text-darken-2 material-icons">arrow_back</i></a>
</div>
<div class="col s10 " style="white-space: nowrap; overflow: auto; min-height: 64px">
<div id="fbheader" class="leftellipsis"></div>
</div>
</li>
<ul id='branches' class="dropdown-content branch_select z-depth-2 grey lighten-4">
<ul id="branchlist"></ul>
</ul>
<li>
<ul class="row no-padding" style="margin-bottom: 0;">
<a id="branchselector" class="col s10 dropdown-button waves-effect truncate grey-text text-darken-2" data-beloworigin="true" data-activates='branches'><i class="grey-text text-darken-2 left material-icons" style="margin-left: 0; margin-right: 0; padding-top: 12px; padding-right: 8px;">arrow_drop_down</i>Branch:<span id="fbheaderbranch"></span></a>
<a id="newbranchbutton" class="waves-effect col s2 center modal-trigger" href="#modal_newbranch"><i class="grey-text text-darken-2 center material-icons" style="padding-top: 12px;">add</i></a>
</ul>
<div class="divider" style="margin-top: 0;"></div>
</li>
<li>
<ul id="fbelements"></ul>
</li>
<div class="row col s12 shadow"></div>
<div id="hass_menu_s" class="z-depth-3 hide-on-med-and-up">
<div class="input-field col s12" style="margin-top: 30px;">
<select onchange="insert(this.value)">
<option value="" disabled selected>Select trigger platform</option>
<option value="event">Event</option>
<option value="mqtt">MQTT</option>
<option value="numeric_state">Numeric State</option>
<option value="state">State</option>
<option value="sun">Sun</option>
<option value="template">Template</option>
<option value="time">Time</option>
<option value="zone">Zone</option>
</select>
<label>Trigger Platforms</label>
</div>
<div class="input-field col s12">
<select id="events_side" onchange="insert(this.value)"></select>
<label>Events</label>
</div>
<div class="input-field col s12">
<input type="text" id="entities-search_side" class="autocomplete" placeholder="sensor.example">
<label>Search entity</label>
</div>
<div class="input-field col s12">
<select id="entities_side" onchange="insert(this.value)"></select>
<label>Entities</label>
</div>
<div class="input-field col s12">
<select onchange="insert(this.value)">
<option value="" disabled selected>Select condition</option>
<option value="numeric_state">Numeric state</option>
<option value="state">State</option>
<option value="sun">Sun</option>
<option value="template">Template</option>
<option value="time">Time</option>
<option value="zone">Zone</option>
</select>
<label>Conditions</label>
</div>
<div class="input-field col s12">
<select id="services_side" onchange="insert(this.value)"></select>
<label>Services</label>
</div>
</div>
</ul>
</div>
<!-- Ace Editor SideNav -->
<div class="row">
<ul id="ace_settings" class="side-nav">
<li class="center s12 grey lighten-3 z-depth-1 subheader">Editor Settings</li>
<div class="row col s12">
<p class="col s12"> <a class="waves-effect waves-light btn light-blue modal-trigger" href="#modal_acekeyboard">Keyboard Shortcuts</a> </p>
<p class="col s12">
<input type="checkbox" class="blue_check" onclick="set_save_prompt(this.checked)" id="savePrompt" />
<Label for="savePrompt">Prompt before save</label>
</p>
<p class="col s12">
<input type="checkbox" class="blue_check" onclick="set_hide_filedetails(this.checked)" id="hideDetails" />
<Label for="hideDetails">Hide details in browser</label>
</p>
<p class="col s12">
<input type="checkbox" class="blue_check" onclick="editor.setOption('animatedScroll', !editor.getOptions().animatedScroll)" id="animatedScroll" />
<Label for="animatedScroll">Animated Scroll</label>
</p>
<p class="col s12">
<input type="checkbox" class="blue_check" onclick="editor.setOption('behavioursEnabled', !editor.getOptions().behavioursEnabled)" id="behavioursEnabled" />
<Label for="behavioursEnabled">Behaviour Enabled</label>
</p>
<p class="col s12">
<input type="checkbox" class="blue_check" onclick="editor.setOption('displayIndentGuides', !editor.getOptions().displayIndentGuides)" id="displayIndentGuides" />
<Label for="displayIndentGuides">Display Indent Guides</label>
</p>
<p class="col s12">
<input type="checkbox" class="blue_check" onclick="editor.setOption('fadeFoldWidgets', !editor.getOptions().fadeFoldWidgets)" id="fadeFoldWidgets" />
<Label for="fadeFoldWidgets">Fade Fold Widgets</label>
</p>
<div class="input-field col s12">
<input type="number" onchange="editor.setOption('fontSize', parseInt(this.value))" min="6" id="fontSize">
<label class="active" for="fontSize">Font Size</label>
</div>
<p class="col s12">
<input type="checkbox" class="blue_check" onclick="editor.setOption('highlightActiveLine', !editor.getOptions().highlightActiveLine)" id="highlightActiveLine" />
<Label for="highlightActiveLine">Hightlight Active Line</label>
</p>
<p class="col s12">
<input type="checkbox" class="blue_check" onclick="editor.setOption('highlightGutterLine', !editor.getOptions().highlightGutterLine)" id="highlightGutterLine" />
<Label for="highlightGutterLine">Hightlight Gutter Line</label>
</p>
<p class="col s12">
<input type="checkbox" class="blue_check" onclick="editor.setOption('highlightSelectedWord', !editor.getOptions().highlightSelectedWord)" id="highlightSelectedWord" />
<Label for="highlightSelectedWord">Hightlight Selected Word</label>
</p>
<p class="col s12">
<input type="checkbox" class="blue_check" onclick="editor.setOption('hScrollBarAlwaysVisible', !editor.getOptions().hScrollBarAlwaysVisible)" id="hScrollBarAlwaysVisible" />
<Label for="hScrollBarAlwaysVisible">H Scroll Bar Always Visible</label>
</p>
<div class="input-field col s12">
<select onchange="editor.setKeyboardHandler(this.value)" id="setKeyboardHandler">
<option value="">ace</option>
<option value="ace/keyboard/vim">vim</option>
<option value="ace/keyboard/emacs">emacs</option>
</select>
<label for="setKeyboardHandler">Keyboard Handler</label>
</div>
<div class="input-field col s12">
<select onchange="editor.setOption('mode', this.value)" id="mode">
<option value="ace/mode/abap">abap</option>
<option value="ace/mode/abc">abc</option>
<option value="ace/mode/actionscript">actionscript</option>
<option value="ace/mode/ada">ada</option>
<option value="ace/mode/apache_conf">apache_conf</option>
<option value="ace/mode/asciidoc">asciidoc</option>
<option value="ace/mode/assembly_x86">assembly_x86</option>
<option value="ace/mode/autohotkey">autohotkey</option>
<option value="ace/mode/batchfile">batchfile</option>
<option value="ace/mode/bro">bro</option>
<option value="ace/mode/c_cpp">c_cpp</option>
<option value="ace/mode/c9search">c9search</option>
<option value="ace/mode/cirru">cirru</option>
<option value="ace/mode/clojure">clojure</option>
<option value="ace/mode/cobol">cobol</option>
<option value="ace/mode/coffee">coffee</option>
<option value="ace/mode/coldfusion">coldfusion</option>
<option value="ace/mode/csharp">csharp</option>
<option value="ace/mode/css">css</option>
<option value="ace/mode/curly">curly</option>
<option value="ace/mode/d">d</option>
<option value="ace/mode/dart">dart</option>
<option value="ace/mode/diff">diff</option>
<option value="ace/mode/django">django</option>
<option value="ace/mode/dockerfile">dockerfile</option>
<option value="ace/mode/dot">dot</option>
<option value="ace/mode/drools">drools</option>
<option value="ace/mode/dummy">dummy</option>
<option value="ace/mode/dummysyntax">dummysyntax</option>
<option value="ace/mode/eiffel">eiffel</option>
<option value="ace/mode/ejs">ejs</option>
<option value="ace/mode/elixir">elixir</option>
<option value="ace/mode/elm">elm</option>
<option value="ace/mode/erlang">erlang</option>
<option value="ace/mode/forth">forth</option>
<option value="ace/mode/fortran">fortran</option>
<option value="ace/mode/ftl">ftl</option>
<option value="ace/mode/gcode">gcode</option>
<option value="ace/mode/gherkin">gherkin</option>
<option value="ace/mode/gitignore">gitignore</option>
<option value="ace/mode/glsl">glsl</option>
<option value="ace/mode/gobstones">gobstones</option>
<option value="ace/mode/golang">golang</option>
<option value="ace/mode/groovy">groovy</option>
<option value="ace/mode/haml">haml</option>
<option value="ace/mode/handlebars">handlebars</option>
<option value="ace/mode/haskell">haskell</option>
<option value="ace/mode/haskell_cabal">haskell_cabal</option>
<option value="ace/mode/haxe">haxe</option>
<option value="ace/mode/hjson">hjson</option>
<option value="ace/mode/html">html</option>
<option value="ace/mode/html_elixir">html_elixir</option>
<option value="ace/mode/html_ruby">html_ruby</option>
<option value="ace/mode/ini">ini</option>
<option value="ace/mode/io">io</option>
<option value="ace/mode/jack">jack</option>
<option value="ace/mode/jade">jade</option>
<option value="ace/mode/java">java</option>
<option value="ace/mode/javascript">javascript</option>
<option value="ace/mode/json">json</option>
<option value="ace/mode/jsoniq">jsoniq</option>
<option value="ace/mode/jsp">jsp</option>
<option value="ace/mode/jsx">jsx</option>
<option value="ace/mode/julia">julia</option>
<option value="ace/mode/kotlin">kotlin</option>
<option value="ace/mode/latex">latex</option>
<option value="ace/mode/less">less</option>
<option value="ace/mode/liquid">liquid</option>
<option value="ace/mode/lisp">lisp</option>
<option value="ace/mode/livescript">livescript</option>
<option value="ace/mode/logiql">logiql</option>
<option value="ace/mode/lsl">lsl</option>
<option value="ace/mode/lua">lua</option>
<option value="ace/mode/luapage">luapage</option>
<option value="ace/mode/lucene">lucene</option>
<option value="ace/mode/makefile">makefile</option>
<option value="ace/mode/markdown">markdown</option>
<option value="ace/mode/mask">mask</option>
<option value="ace/mode/matlab">matlab</option>
<option value="ace/mode/maze">maze</option>
<option value="ace/mode/mel">mel</option>
<option value="ace/mode/mushcode">mushcode</option>
<option value="ace/mode/mysql">mysql</option>
<option value="ace/mode/nix">nix</option>
<option value="ace/mode/nsis">nsis</option>
<option value="ace/mode/objectivec">objectivec</option>
<option value="ace/mode/ocaml">ocaml</option>
<option value="ace/mode/pascal">pascal</option>
<option value="ace/mode/perl">perl</option>
<option value="ace/mode/pgsql">pgsql</option>
<option value="ace/mode/php">php</option>
<option value="ace/mode/powershell">powershell</option>
<option value="ace/mode/praat">praat</option>
<option value="ace/mode/prolog">prolog</option>
<option value="ace/mode/properties">properties</option>
<option value="ace/mode/protobuf">protobuf</option>
<option value="ace/mode/python">python</option>
<option value="ace/mode/r">r</option>
<option value="ace/mode/razor">razor</option>
<option value="ace/mode/rdoc">rdoc</option>
<option value="ace/mode/rhtml">rhtml</option>
<option value="ace/mode/rst">rst</option>
<option value="ace/mode/ruby">ruby</option>
<option value="ace/mode/rust">rust</option>
<option value="ace/mode/sass">sass</option>
<option value="ace/mode/scad">scad</option>
<option value="ace/mode/scala">scala</option>
<option value="ace/mode/scheme">scheme</option>
<option value="ace/mode/scss">scss</option>
<option value="ace/mode/sh">sh</option>
<option value="ace/mode/sjs">sjs</option>
<option value="ace/mode/smarty">smarty</option>
<option value="ace/mode/snippets">snippets</option>
<option value="ace/mode/soy_template">soy_template</option>
<option value="ace/mode/space">space</option>
<option value="ace/mode/sql">sql</option>
<option value="ace/mode/sqlserver">sqlserver</option>
<option value="ace/mode/stylus">stylus</option>
<option value="ace/mode/svg">svg</option>
<option value="ace/mode/swift">swift</option>
<option value="ace/mode/tcl">tcl</option>
<option value="ace/mode/tex">tex</option>
<option value="ace/mode/text">text</option>
<option value="ace/mode/textile">textile</option>
<option value="ace/mode/toml">toml</option>
<option value="ace/mode/tsx">tsx</option>
<option value="ace/mode/twig">twig</option>
<option value="ace/mode/typescript">typescript</option>
<option value="ace/mode/vala">vala</option>
<option value="ace/mode/vbscript">vbscript</option>
<option value="ace/mode/velocity">velocity</option>
<option value="ace/mode/verilog">verilog</option>
<option value="ace/mode/vhdl">vhdl</option>
<option value="ace/mode/wollok">wollok</option>
<option value="ace/mode/xml">xml</option>
<option value="ace/mode/xquery">xquery</option>
<option value="ace/mode/yaml">yaml</option>
</select>
<label for="mode">Mode</label>
</div>
<div class="input-field col s12">
<select onchange="editor.setOption('newLineMode', this.value)" id="newLineMode">
<option value="auto">Auto</option>
<option value="windows">Windows</option>
<option value="unix">Unix</option>
</select>
<label for="newLineMode">New Line Mode</label>
</div>
<p class="col s12">
<input type="checkbox" class="blue_check" onclick="editor.setOption('overwrite', !editor.getOptions().overwrite)" id="overwrite" />
<Label for="overwrite">Overwrite</label>
</p>
<p class="col s12">
<input type="checkbox" class="blue_check" onclick="editor.setOption('readOnly', !editor.getOptions().readOnly)" id="readOnly" />
<Label for="readOnly">Read Only</label>
</p>
<div class="input-field col s12">
<input value="2" type="number" onchange="editor.setOption('scrollSpeed', parseInt(this.value))" id="scrollSpeed">
<label class="active" for="scrollSpeed">Scroll Speed</label>
</div>
<p class="col s12">
<input type="checkbox" class="blue_check" onclick="editor.setOption('showFoldWidgets', !editor.getOptions().showFoldWidgets)" id="showFoldWidgets" />
<Label for="showFoldWidgets">Show Fold Widgets</label>
</p>
<p class="col s12">
<input type="checkbox" class="blue_check" onclick="editor.setOption('showGutter', !editor.getOptions().showGutter)" id="showGutter" />
<Label for="showGutter">Show Gutter</label>
</p>
<p class="col s12">
<input type="checkbox" class="blue_check" onclick="editor.setOption('showInvisibles', !editor.getOptions().showInvisibles)" id="showInvisibles" />
<Label for="showInvisibles">Show Invisibles</label>
</p>
<p class="col s12">
<input type="checkbox" class="blue_check" onclick="editor.setOption('showPrintMargin', !editor.getOptions().showPrintMargin)" id="showPrintMargin" />
<Label for="showPrintMargin">Show Print Margin</label>
</p>
<p class="col s12">
<input type="checkbox" class="blue_check" onclick="editor.setOption('showLineNumbers', !editor.getOptions().showLineNumbers)" id="showLineNumbers" />
<Label for="showLineNumbers">Show Line Numbers</label>
</p>
<div class="input-field col s12">
<input type="number" onchange="editor.setOption('tabSize', parseInt(this.value))" min="1" id="tabSize">
<label class="active" for="tabSize">Tab Size</label>
</div>
<div class="input-field col s12">
<select onchange="editor.setTheme(this.value)" id="theme">
<optgroup label="Light Themes">
<option value="ace/theme/chrome">Chrome</option>
<option value="ace/theme/clouds">Clouds</option>
<option value="ace/theme/crimson_editor">Crimson Editor</option>
<option value="ace/theme/dawn">Dawn</option>
<option value="ace/theme/dreamweaver">Dreamweaver</option>
<option value="ace/theme/eclipse">Eclipse</option>
<option value="ace/theme/github">GitHub</option>
<option value="ace/theme/iplastic">IPlastic</option>
<option value="ace/theme/solarized_light">Solarized Light</option>
<option value="ace/theme/textmate">TextMate</option>
<option value="ace/theme/tomorrow">Tomorrow</option>
<option value="ace/theme/xcode">XCode</option>
<option value="ace/theme/kuroir">Kuroir</option>
<option value="ace/theme/katzenmilch">KatzenMilch</option>
<option value="ace/theme/sqlserver">SQL Server</option>
</optgroup>
<optgroup label="Dark Themes">
<option value="ace/theme/ambiance">Ambiance</option>
<option value="ace/theme/chaos">Chaos</option>
<option value="ace/theme/clouds_midnight">Clouds Midnight</option>
<option value="ace/theme/cobalt">Cobalt</option>
<option value="ace/theme/gruvbox">Gruvbox</option>
<option value="ace/theme/idle_fingers">idle Fingers</option>
<option value="ace/theme/kr_theme">krTheme</option>
<option value="ace/theme/merbivore">Merbivore</option>
<option value="ace/theme/merbivore_soft">Merbivore Soft</option>
<option value="ace/theme/mono_industrial">Mono Industrial</option>
<option value="ace/theme/monokai">Monokai</option>
<option value="ace/theme/pastel_on_dark">Pastel on dark</option>
<option value="ace/theme/solarized_dark">Solarized Dark</option>
<option value="ace/theme/terminal">Terminal</option>
<option value="ace/theme/tomorrow_night">Tomorrow Night</option>
<option value="ace/theme/tomorrow_night_blue">Tomorrow Night Blue</option>
<option value="ace/theme/tomorrow_night_bright">Tomorrow Night Bright</option>
<option value="ace/theme/tomorrow_night_eighties">Tomorrow Night 80s</option>
<option value="ace/theme/twilight">Twilight</option>
<option value="ace/theme/vibrant_ink">Vibrant Ink</option>
</optgroup>
</select>
<label for="theme">Theme</label>
</div>
<p class="col s12">
<input type="checkbox" class="blue_check" onclick="editor.setOption('useSoftTabs', !editor.getOptions().useSoftTabs)" id="useSoftTabs" />
<Label for="useSoftTabs">Use Soft Tabs</label>
</p>
<p class="col s12">
<input type="checkbox" class="blue_check" onclick="editor.setOption('useWorker', !editor.getOptions().useWorker)" id="useWorker" />
<Label for="useWorker">Use Worker</label>
</p>
<p class="col s12">
<input type="checkbox" class="blue_check" onclick="editor.setOption('vScrollBarAlwaysVisible', !editor.getOptions().vScrollBarAlwaysVisible)" id="vScrollBarAlwaysVisible" />
<Label for="vScrollBarAlwaysVisible">V Scroll Bar Always Visible</label>
</p>
<p class="col s12">
<input type="checkbox" class="blue_check" onclick="editor.setOption('wrapBehavioursEnabled', !editor.getOptions().wrapBehavioursEnabled)" id="wrapBehavioursEnabled" />
<Label for="wrapBehavioursEnabled">Wrap Behaviours Enabled</label>
</p>
<p class="col s12">
<input type="checkbox" class="blue_check" onclick="editor.getSession().setUseWrapMode(!editor.getSession().getUseWrapMode());if(editor.getSession().getUseWrapMode()){document.getElementById('wrap_limit').focus();document.getElementById('wrap_limit').onchange();}" id="wrap" />
<Label for="wrap">Wrap Mode</label>
</p>
<div class="input-field col s12">
<input id="wrap_limit" type="number" onchange="editor.setOption('wrap', parseInt(this.value))" min="1" value="80">
<label class="active" for="wrap_limit">Wrap Limit</label>
</div> <a class="waves-effect waves-light btn light-blue" onclick="save_ace_settings()">Save Settings Locally</a>
<p class="center col s12"> Ace Editor 1.4.2 </p>
</div>
</ul>
</div>
</main>
<input type="hidden" id="fb_currentfile" value="" />
<!-- Scripts -->
<script src="https://code.jquery.com/jquery-3.3.1.min.js"></script>
<script src="https://cdnjs.cloudflare.com/ajax/libs/materialize/0.100.2/js/materialize.min.js"></script>
<script>
function ws_connect() {
function msg(str) {
document.getElementById("ws_events").value = str + "\n\n" + document.getElementById("ws_events").value;
$('#ws_events').trigger('autoresize');
}
try {
ws = new WebSocket(document.getElementById("ws_uri").value);
ws.addEventListener("open", function(event) {
if (document.getElementById("ws_password").value.split(".").length == 3) {
var auth = {
type: "auth",
access_token: document.getElementById("ws_password").value
};
}
else {
var auth = {
type: "auth",
api_password: document.getElementById("ws_password").value
};
}
var data = {
id: 1,
type: "subscribe_events"
};
if (document.getElementById("ws_password").value) {
ws.send(JSON.stringify(auth));
}
ws.send(JSON.stringify(data));
});
ws.onmessage = function(event) {
msg(event.data);
}
ws.onclose = function() {
msg('Socket closed');
document.getElementById('ws_b_c').classList.remove('disabled');
document.getElementById('ws_b_d').classList.add('disabled');
};
ws.onopen = function() {
msg('Socket connected');
document.getElementById('ws_b_c').classList.add('disabled');
document.getElementById('ws_b_d').classList.remove('disabled');
};
}
catch(err) {
console.log("Error: " + err.message);
}
}
function ws_disconnect() {
try {
ws.close();
}
catch(err) {
console.log("Error: " + err.message);
}
}
</script>
<script type="text/javascript">
var init_loadfile = $loadfile;
var global_current_filepath = null;
var global_current_filename = null;
function toggle_hass_panels() {
if (document.getElementById("hass_menu_l").style.display == "none") {
document.getElementById("hass_menu_l").style.display = "";
document.getElementById("editor").classList.remove("l12");
document.getElementById("editor").classList.add("l9");
document.getElementById("filename_row").classList.remove("l12");
document.getElementById("filename_row").classList.add("l9");
}
else {
document.getElementById("hass_menu_l").style.display = "none";
document.getElementById("editor").classList.remove("l9");
document.getElementById("editor").classList.add("l12");
document.getElementById("filename_row").classList.remove("l9");
document.getElementById("filename_row").classList.add("l12");
}
if (document.getElementById("hass_menu_s").style.display == "none") {
document.getElementById("hass_menu_s").style.display = "";
document.getElementById("editor").classList.remove("l12");
document.getElementById("editor").classList.add("l9");
document.getElementById("filename_row").classList.remove("l12");
document.getElementById("filename_row").classList.add("l9");
}
else {
document.getElementById("hass_menu_s").style.display = "none";
document.getElementById("editor").classList.remove("l9");
document.getElementById("editor").classList.add("l12");
document.getElementById("filename_row").classList.remove("l9");
document.getElementById("filename_row").classList.add("l12");
}
}
function got_focus_or_visibility() {
if (global_current_filename && global_current_filepath) {
// The globals are set, set the localStorage to those values
var current_file = {current_filepath: global_current_filepath,
current_filename: global_current_filename}
localStorage.setItem('current_file', JSON.stringify(current_file));
}
else {
// This tab had no prior file opened, clearing from localStorage
localStorage.removeItem('current_file');
}
}
window.onfocus = function() {
got_focus_or_visibility();
}
//window.onblur = function() {
// console.log("lost focus");
//}
// Got this from here: https://developer.mozilla.org/en-US/docs/Web/API/Page_Visibility_API
// Set the name of the hidden property and the change event for visibility
var hidden, visibilityChange;
if (typeof document.hidden !== "undefined") { // Opera 12.10 and Firefox 18 and later support
hidden = "hidden";
visibilityChange = "visibilitychange";
}
else if (typeof document.msHidden !== "undefined") {
hidden = "msHidden";
visibilityChange = "msvisibilitychange";
}
else if (typeof document.webkitHidden !== "undefined") {
hidden = "webkitHidden";
visibilityChange = "webkitvisibilitychange";
}
function handleVisibilityChange() {
if (document[hidden]) {
// We're doing nothing when the tab gets out of vision
}
else {
// We're doing this if the tab becomes visible
got_focus_or_visibility();
}
}
// Warn if the browser doesn't support addEventListener or the Page Visibility API
if (typeof document.addEventListener === "undefined" || typeof document.hidden === "undefined") {
console.log("This requires a browser, such as Google Chrome or Firefox, that supports the Page Visibility API.");
}
else {
// Handle page visibility change
document.addEventListener(visibilityChange, handleVisibilityChange, false);
}
$(document).keydown(function(e) {
if ((e.key == 's' || e.key == 'S' ) && (e.ctrlKey || e.metaKey)) {
e.preventDefault();
save_check();
return false;
}
return true;
});
$(document).ready(function () {
$('select').material_select();
$('.modal').modal();
$('ul.tabs').tabs();
$('.collapsible').collapsible({
onOpen: function(el) {
$('#branch_tab').click();
},
});
$('.dropdown-button').dropdown({
inDuration: 300,
outDuration: 225,
constrainWidth: false,
hover: false,
gutter: 0,
belowOrigin: true,
alignment: 'right',
stopPropagation: false
});
$('.files-collapse').sideNav({
menuWidth: 320,
edge: 'left',
closeOnClick: false,
draggable: true
});
$('.ace_settings-collapse').sideNav({
menuWidth: 300,
edge: 'right',
closeOnClick: true,
draggable: false
});
// This fixes the dead spaces when trying to close the file browser
$(document).on('click', '.drag-target', function(){$('.button-collapse').sideNav('hide');})
listdir('.');
document.getElementById('savePrompt').checked = get_save_prompt();
document.getElementById('hideDetails').checked = get_hide_filedetails();
var entities_search = new Object();
if (states_list) {
for (var i = 0; i < states_list.length; i++) {
entities_search[states_list[i].attributes.friendly_name + ' (' + states_list[i].entity_id + ')'] = null;
}
}
$('#entities-search').autocomplete({
data: entities_search,
limit: 40,
onAutocomplete: function(val) {
insert(val.split("(")[1].split(")")[0]);
},
minLength: 1,
});
$('#entities-search_side').autocomplete({
data: entities_search,
limit: 40,
onAutocomplete: function(val) {
insert(val.split("(")[1].split(")")[0]);
},
minLength: 1,
});
$standalone
});
</script>
<script type="text/javascript">
document.addEventListener("DOMContentLoaded", function() {
$('.preloader-background').delay(800).fadeOut('slow');
$('.preloader-wrapper').delay(800).fadeOut('slow');
if (init_loadfile) {
init_loadfile_name = init_loadfile.split('/').pop();
loadfile(init_loadfile, init_loadfile_name);
}
else {
if (!localStorage.getItem("new_tab")) {
var old_file = localStorage.getItem("current_file");
if (old_file) {
old_file = JSON.parse(old_file);
loadfile(old_file.current_filepath, old_file.current_filename);
}
}
else {
localStorage.removeItem("current_file");
}
localStorage.removeItem("new_tab");
}
});
</script>
<script>
var modemapping = new Object();
modemapping['c'] = 'ace/mode/c_cpp';
modemapping['cpp'] = 'ace/mode/c_cpp';
modemapping['css'] = 'ace/mode/css';
modemapping['gitignore'] = 'ace/mode/gitignore';
modemapping['htm'] = 'ace/mode/html';
modemapping['html'] = 'ace/mode/html';
modemapping['js'] = 'ace/mode/javascript';
modemapping['json'] = 'ace/mode/json';
modemapping['php'] = 'ace/mode/php';
modemapping['py'] = 'ace/mode/python';
modemapping['sh'] = 'ace/mode/sh';
modemapping['sql'] = 'ace/mode/sql';
modemapping['txt'] = 'ace/mode/text';
modemapping['xml'] = 'ace/mode/xml';
modemapping['yaml'] = 'ace/mode/yaml';
function sort_select(id) {
var options = $('#' + id + ' option');
var arr = options.map(function (_, o) {
return {
t: $(o).text(), v: o.value
};
}).get();
arr.sort(function (o1, o2) {
var t1 = o1.t.toLowerCase(), t2 = o2.t.toLowerCase();
return t1 > t2 ? 1 : t1 < t2 ? -1 : 0;
});
options.each(function (i, o) {
o.value = arr[i].v;
$(o).text(arr[i].t);
});
}
var separator = '$separator';
var services_list = $services;
var events_list = $events;
var states_list = $states;
if (events_list) {
var events = document.getElementById("events");
for (var i = 0; i < events_list.length; i++) {
var option = document.createElement("option");
option.value = events_list[i].event;
option.text = events_list[i].event;
events.add(option);
}
var events = document.getElementById("events_side");
for (var i = 0; i < events_list.length; i++) {
var option = document.createElement("option");
option.value = events_list[i].event;
option.text = events_list[i].event;
events.add(option);
}
sort_select('events');
sort_select('events_side');
}
if (states_list) {
var entities = document.getElementById("entities");
for (var i = 0; i < states_list.length; i++) {
var option = document.createElement("option");
option.value = states_list[i].entity_id;
option.text = states_list[i].attributes.friendly_name + ' (' + states_list[i].entity_id + ')';
entities.add(option);
}
var entities = document.getElementById("entities_side");
for (var i = 0; i < states_list.length; i++) {
var option = document.createElement("option");
option.value = states_list[i].entity_id;
option.text = states_list[i].attributes.friendly_name + ' (' + states_list[i].entity_id + ')';
entities.add(option);
}
sort_select('entities');
sort_select('entities_side');
}
if (services_list) {
var services = document.getElementById("services");
for (var i = 0; i < services_list.length; i++) {
for (var k in services_list[i].services) {
var option = document.createElement("option");
option.value = services_list[i].domain + '.' + k;
option.text = services_list[i].domain + '.' + k;
services.add(option);
}
}
var services = document.getElementById("services_side");
for (var i = 0; i < services_list.length; i++) {
for (var k in services_list[i].services) {
var option = document.createElement("option");
option.value = services_list[i].domain + '.' + k;
option.text = services_list[i].domain + '.' + k;
services.add(option);
}
}
sort_select('services');
sort_select('services_side');
}
function listdir(path) {
$.get(encodeURI("api/listdir?path=" + path), function(data) {
if (!data.error) {
renderpath(data);
}
else {
console.log("Permission denied.");
}
});
document.getElementById("slide-out").scrollTop = 0;
}
function renderitem(itemdata, index) {
var li = document.createElement('li');
li.classList.add("collection-item", "fbicon_pad", "col", "s12", "no-padding", "white");
var item = document.createElement('a');
item.classList.add("waves-effect", "col", "s10", "fbicon_pad");
var iicon = document.createElement('i');
iicon.classList.add("material-icons", "fbmenuicon_pad");
var stats = document.createElement('span');
date = new Date(itemdata.modified*1000);
stats.classList.add('stats');
if (itemdata.type == 'dir') {
iicon.innerHTML = 'folder';
item.setAttribute("onclick", "listdir('" + encodeURI(itemdata.fullpath) + "')");
stats.innerHTML = "Mod.: " + date.toUTCString();
}
else {
nameparts = itemdata.name.split('.');
extension = nameparts[nameparts.length -1];
if (['c', 'cpp', 'css', 'htm', 'html', 'js', 'json', 'php', 'py', 'sh', 'sql', 'xml', 'yaml'].indexOf(extension.toLocaleLowerCase()) > +1 ) {
iicon.classList.add('mdi', 'mdi-file-xml');
}
else if (['txt', 'doc', 'docx'].indexOf(extension.toLocaleLowerCase()) > -1 ) {
iicon.classList.add('mdi', 'mdi-file-document');
}
else if (['bmp', 'gif', 'jpg', 'jpeg', 'png', 'tif', 'webp'].indexOf(extension.toLocaleLowerCase()) > -1 ) {
iicon.classList.add('mdi', 'mdi-file-image');
}
else if (['mp3', 'ogg', 'wav'].indexOf(extension) > -1 ) {
iicon.classList.add('mdi', 'mdi-file-music');
}
else if (['avi', 'flv', 'mkv', 'mp4', 'mpg', 'mpeg', 'webm'].indexOf(extension.toLocaleLowerCase()) > -1 ) {
iicon.classList.add('mdi', 'mdi-file-video');
}
else if (['pdf'].indexOf(extension.toLocaleLowerCase()) > -1 ) {
iicon.classList.add('mdi', 'mdi-file-pdf');
}
else {
iicon.classList.add('mdi', 'mdi-file');
}
item.setAttribute("onclick", "loadfile('" + encodeURI(itemdata.fullpath) + "', '" + itemdata.name + "')");
stats.innerHTML = "Mod.: " + date.toUTCString() + " Size: " + (itemdata.size/1024).toFixed(1) + " KiB";
}
item.appendChild(iicon);
var itext = document.createElement('div');
itext.innerHTML = itemdata.name;
itext.classList.add("filename");
var hasgitadd = false;
if (itemdata.gitstatus) {
if (itemdata.gittracked == 'untracked') {
itext.classList.add('text_darkred');
hasgitadd = true;
}
else {
if(itemdata.gitstatus == 'unstaged') {
itext.classList.add('text_darkred');
hasgitadd = true;
}
else if (itemdata.gitstatus == 'staged') {
itext.classList.add('text_darkgreen');
}
}
}
item.appendChild(itext);
if (!get_hide_filedetails()) {
item.appendChild(stats);
}
var dropdown = document.createElement('ul');
dropdown.id = 'fb_dropdown_' + index;
dropdown.classList.add('dropdown-content');
dropdown.classList.add("z-depth-4");
// Download button
var dd_download = document.createElement('li');
var dd_download_a = document.createElement('a');
dd_download_a.classList.add("waves-effect", "fb_dd");
dd_download_a.setAttribute('onclick', "download_file('" + encodeURI(itemdata.fullpath) + "')");
dd_download_a.innerHTML = "Download";
dd_download.appendChild(dd_download_a);
dropdown.appendChild(dd_download);
// Delete button
var dd_delete = document.createElement('li');
dd_delete.classList.add("waves-effect", "fb_dd");
var dd_delete_a = document.createElement('a');
dd_delete_a.setAttribute('href', "#modal_delete");
dd_delete_a.classList.add("modal-trigger");
dd_delete_a.innerHTML = "Delete";
dd_delete.appendChild(dd_delete_a);
dropdown.appendChild(dd_delete);
if (itemdata.gitstatus) {
if (hasgitadd) {
var divider = document.createElement('li');
divider.classList.add('divider');
dropdown.appendChild(divider);
// git add button
var dd_gitadd = document.createElement('li');
var dd_gitadd_a = document.createElement('a');
dd_gitadd_a.classList.add('waves-effect', 'fb_dd', 'modal-trigger');
dd_gitadd_a.setAttribute('href', "#modal_gitadd");
dd_gitadd_a.innerHTML = "git add";
dd_gitadd.appendChild(dd_gitadd_a);
dropdown.appendChild(dd_gitadd);
// git diff button
var dd_gitdiff = document.createElement('li');
var dd_gitdiff_a = document.createElement('a');
dd_gitdiff_a.classList.add('waves-effect', 'fb_dd', 'modal-trigger');
dd_gitdiff_a.setAttribute('onclick', "gitdiff()");
dd_gitdiff_a.innerHTML = "git diff";
dd_gitdiff.appendChild(dd_gitdiff_a);
dropdown.appendChild(dd_gitdiff);
}
}
var menubutton = document.createElement('a');
menubutton.classList.add("fbmenubutton", "waves-effect", "dropdown-button", "col", "s2", "fbicon_pad");
menubutton.classList.add('waves-effect');
menubutton.classList.add('dropdown-button');
menubutton.setAttribute('data-activates', dropdown.id);
menubutton.setAttribute('data-alignment', 'right');
var menubuttonicon = document.createElement('i');
menubutton.classList.add('material-icons');
menubutton.classList.add("right");
menubutton.innerHTML = 'more_vert';
menubutton.setAttribute('onclick', "document.getElementById('fb_currentfile').value='" + encodeURI(itemdata.fullpath) + "';$('span.fb_currentfile').html('" + itemdata.name + "')");
li.appendChild(item);
li.appendChild(menubutton);
li.setAttribute("title", itemdata.name)
li.appendChild(dropdown);
return li;
}
function renderpath(dirdata) {
var newbranchbutton = document.getElementById('newbranchbutton');
newbranchbutton.style.cssText = "display: none !important"
var fbelements = document.getElementById("fbelements");
while (fbelements.firstChild) {
fbelements.removeChild(fbelements.firstChild);
}
var fbheader = document.getElementById('fbheader');
fbheader.innerHTML = dirdata.abspath;
var branchselector = document.getElementById('branchselector');
var fbheaderbranch = document.getElementById('fbheaderbranch');
var branchlist = document.getElementById('branchlist');
while (branchlist.firstChild) {
branchlist.removeChild(branchlist.firstChild);
}
if (dirdata.activebranch) {
newbranchbutton.style.display = "inline-block";
fbheaderbranch.innerHTML = dirdata.activebranch;
fbheaderbranch.style.display = "inline";
branchselector.style.display = "block";
for (var i = 0; i < dirdata.branches.length; i++) {
var branch = document.createElement('li');
var link = document.createElement('a');
link.classList.add("branch_select", "truncate");
link.innerHTML = dirdata.branches[i];
link.href = '#';
link.setAttribute('onclick', 'checkout("' + dirdata.branches[i] + '");collapseAll()')
branch.appendChild(link);
if (dirdata.branches[i] == dirdata.activebranch) {
link.classList.add("active", "grey", "darken-1");
}
else {
link.classList.add("grey-text", "text-darken-3", "branch_hover", "waves-effect", "grey", "lighten-4");
}
branchlist.appendChild(branch);
}
}
else {
fbheaderbranch.innerHTML = "";
fbheaderbranch.style.display = "";
branchselector.style.display = "none";
}
var uplink = document.getElementById('uplink');
uplink.setAttribute("onclick", "listdir('" + encodeURI(dirdata.parent) + "')")
for (var i = 0; i < dirdata.content.length; i++) {
fbelements.appendChild(renderitem(dirdata.content[i], i));
}
$(".dropdown-button").dropdown();
}
function collapseAll() {
$(".collapsible-header").removeClass(function() { return "active"; });
$(".collapsible").collapsible({accordion: true});
$(".collapsible").collapsible({accordion: false});
}
function checkout(){
$(".collapsible-header").removeClass(function(){
return "active";
});
$(".collapsible").collapsible({accordion: true});
$(".collapsible").collapsible({accordion: false});
}
function loadfile(filepath, filenameonly) {
if ($('.markdirty.red').length) {
$('#modal_markdirty').modal('open');
}
else {
url = "api/file?filename=" + filepath;
fileparts = filepath.split('.');
extension = fileparts[fileparts.length -1];
raw_open = [
"jpg",
"jpeg",
"png",
"svg",
"bmp",
"webp",
"gif"
]
if (raw_open.indexOf(extension) > -1) {
window.open(url, '_blank');
}
else {
$.get(url, function(data) {
if (modemapping.hasOwnProperty(extension)) {
editor.setOption('mode', modemapping[extension]);
}
else {
editor.setOption('mode', "ace/mode/text");
}
editor.getSession().setValue(data, -1);
document.getElementById('currentfile').value = decodeURI(filepath);
editor.session.getUndoManager().markClean();
$('.markdirty').each(function(i, o){o.classList.remove('red');});
$('.hidesave').css('opacity', 0);
document.title = filenameonly + " - HASS Configurator";
global_current_filepath = filepath;
global_current_filename = filenameonly;
var current_file = {current_filepath: global_current_filepath,
current_filename: global_current_filename}
localStorage.setItem('current_file', JSON.stringify(current_file));
check_lint();
});
}
}
}
function closefile() {
document.getElementById('currentfile').value='';
editor.getSession().setValue('');
$('.markdirty').each(function(i, o) {
o.classList.remove('red');
});
localStorage.removeItem('current_file');
global_current_filepath = null;
global_current_filename = null;
document.title = 'HASS Configurator';
}
function check_config() {
$.get("api/check_config", function (resp) {
if (resp.length == 0) {
var $toastContent = $("<div><pre>Configuration seems valid.</pre></div>");
Materialize.toast($toastContent, 2000);
}
else {
var $toastContent = $("<div><pre>" + resp[0].state + "</pre></div>");
Materialize.toast($toastContent, 2000);
}
});
}
function reload_automations() {
$.get("api/reload_automations", function (resp) {
var $toastContent = $("<div>Automations reloaded</div>");
Materialize.toast($toastContent, 2000);
});
}
function reload_scripts() {
$.get("api/reload_scripts", function (resp) {
var $toastContent = $("<div>Scripts reloaded</div>");
Materialize.toast($toastContent, 2000);
});
}
function reload_groups() {
$.get("api/reload_groups", function (resp) {
var $toastContent = $("<div><pre>Groups reloaded</pre></div>");
Materialize.toast($toastContent, 2000);
});
}
function reload_core() {
$.get("api/reload_core", function (resp) {
var $toastContent = $("<div><pre>Core reloaded</pre></div>");
Materialize.toast($toastContent, 2000);
});
}
function restart() {
$.get("api/restart", function (resp) {
if (resp.length == 0) {
var $toastContent = $("<div><pre>Restarting HASS</pre></div>");
Materialize.toast($toastContent, 2000);
}
else {
var $toastContent = $("<div><pre>" + resp + "</pre></div>");
Materialize.toast($toastContent, 2000);
}
});
}
function get_netstat() {
$.get("api/netstat", function (resp) {
if (resp.hasOwnProperty("allowed_networks")) {
var allowed_list = document.getElementById("allowed_networks");
while (allowed_list.firstChild) {
allowed_list.removeChild(allowed_list.firstChild);
}
var header = document.createElement("li");
header.classList.add("collection-header");
var header_h4 = document.createElement("h4");
header_h4.innerText = "Allowed networks";
header_h4.classList.add("grey-text");
header_h4.classList.add("text-darken-3");
header.appendChild(header_h4);
allowed_list.appendChild(header);
for (var i = 0; i < resp.allowed_networks.length; i++) {
var li = document.createElement("li");
li.classList.add("collection-item");
var li_div = document.createElement("div");
var address = document.createElement("span");
address.innerText = resp.allowed_networks[i];
li_div.appendChild(address);
var li_a = document.createElement("a");
li_a.classList.add("light-blue-text");
li_a.href = "#!";
li_a.classList.add("secondary-content");
var li_a_i = document.createElement("i");
li_a_i.classList.add("mdi");
li_a_i.classList.add("mdi-delete");
li_a_i.innerText = "Remove";
li_a.appendChild(li_a_i);
li_a.setAttribute("onclick", "helper_a_net_remove('" + resp.allowed_networks[i] + "')");
li_div.appendChild(li_a);
li.appendChild(li_div);
allowed_list.appendChild(li);
}
}
if (resp.hasOwnProperty("banned_ips")) {
var banlist = document.getElementById("banned_ips");
while (banlist.firstChild) {
banlist.removeChild(banlist.firstChild);
}
var header = document.createElement("li");
header.classList.add("collection-header");
var header_h4 = document.createElement("h4");
header_h4.innerText = "Banned IPs";
header_h4.classList.add("grey-text");
header_h4.classList.add("text-darken-3");
header.appendChild(header_h4);
banlist.appendChild(header);
for (var i = 0; i < resp.banned_ips.length; i++) {
var li = document.createElement("li");
li.classList.add("collection-item");
var li_div = document.createElement("div");
var address = document.createElement("span");
address.innerText = resp.banned_ips[i];
li_div.appendChild(address);
var li_a = document.createElement("a");
li_a.classList.add("light-blue-text");
li_a.href = "#!";
li_a.classList.add("secondary-content");
var li_a_i = document.createElement("i");
li_a_i.classList.add("mdi");
li_a_i.classList.add("mdi-delete");
li_a_i.innerText = "Unban";
li_a.appendChild(li_a_i);
li_a.setAttribute("onclick", "helper_banned_unban('" + resp.banned_ips[i] + "')");
li_div.appendChild(li_a);
li.appendChild(li_div);
banlist.appendChild(li);
}
}
});
}
function helper_a_net_remove(network) {
document.getElementById("removenet").innerText = network;
$('#modal_netstat').modal('close');
$('#modal_a_net_remove').modal('open');
}
function a_net_remove() {
var network = document.getElementById("removenet").innerText
data = new Object();
data.network = network;
data.method = 'remove';
$.post("api/allowed_networks", data).done(function(resp) {
if (resp.error) {
var $toastContent = $("<div><pre>" + resp.message + "</pre></div>");
Materialize.toast($toastContent, 5000);
}
else {
var $toastContent = $("<div><pre>" + resp.message + "</pre></div>");
Materialize.toast($toastContent, 2000);
}
});
}
function helper_a_net_add() {
document.getElementById("addnet").innerText = document.getElementById("add_net_ip").value;
document.getElementById("add_net_ip").value = "";
$('#modal_netstat').modal('close');
$('#modal_a_net_add').modal('open');
}
function a_net_add() {
var network = document.getElementById("addnet").innerText
data = new Object();
data.network = network;
data.method = 'add';
$.post("api/allowed_networks", data).done(function(resp) {
if (resp.error) {
var $toastContent = $("<div><pre>" + resp.message + "</pre></div>");
Materialize.toast($toastContent, 5000);
}
else {
var $toastContent = $("<div><pre>" + resp.message + "</pre></div>");
Materialize.toast($toastContent, 2000);
}
});
}
function helper_banned_unban(ip) {
document.getElementById("unbanip").innerText = ip;
$('#modal_netstat').modal('close');
$('#modal_unban').modal('open');
}
function banned_unban() {
var ip = document.getElementById("unbanip").innerText
data = new Object();
data.ip = ip;
data.method = 'unban';
$.post("api/banned_ips", data).done(function(resp) {
if (resp.error) {
var $toastContent = $("<div><pre>" + resp.message + "</pre></div>");
Materialize.toast($toastContent, 5000);
}
else {
var $toastContent = $("<div><pre>" + resp.message + "</pre></div>");
Materialize.toast($toastContent, 2000);
}
});
}
function helper_banned_ban() {
document.getElementById("banip").innerText = document.getElementById("add_banned_ip").value;
document.getElementById("add_banned_ip").value = "";
$('#modal_netstat').modal('close');
$('#modal_ban').modal('open');
}
function banned_ban() {
var ip = document.getElementById("banip").innerText
data = new Object();
data.ip = ip;
data.method = 'ban';
$.post("api/banned_ips", data).done(function(resp) {
if (resp.error) {
var $toastContent = $("<div><pre>" + resp.message + "</pre></div>");
Materialize.toast($toastContent, 5000);
}
else {
var $toastContent = $("<div><pre>" + resp.message + "</pre></div>");
Materialize.toast($toastContent, 2000);
}
});
}
function save() {
var filepath = document.getElementById('currentfile').value;
if (filepath.length > 0) {
data = new Object();
data.filename = filepath;
data.text = editor.getValue()
$.post("api/save", data).done(function(resp) {
if (resp.error) {
var $toastContent = $("<div><pre>" + resp.message + "\n" + resp.path + "</pre></div>");
Materialize.toast($toastContent, 5000);
}
else {
var $toastContent = $("<div><pre>" + resp.message + "</pre></div>");
Materialize.toast($toastContent, 2000);
listdir(document.getElementById('fbheader').innerHTML);
$('.markdirty').each(function(i, o){o.classList.remove('red');});
$('.hidesave').css('opacity', 0);
editor.session.getUndoManager().markClean();
}
});
}
else {
Materialize.toast('Error: Please provide a filename', 5000);
}
}
function save_check() {
var filepath = document.getElementById('currentfile').value;
if (filepath.length > 0) {
if (get_save_prompt()) {
$('#modal_save').modal('open');
}
else {
save();
}
}
else {
Materialize.toast('Error: Please provide a filename', 5000);
$(".pathtip").bind("animationend webkitAnimationEnd oAnimationEnd MSAnimationEnd", function(){
$(this).removeClass("pathtip_color");
}).addClass("pathtip_color");
}
}
function download_file(filepath) {
window.open("api/download?filename="+encodeURI(filepath));
}
function delete_file() {
var path = document.getElementById('currentfile').value;
if (path.length > 0) {
data = new Object();
data.path= path;
$.post("api/delete", data).done(function(resp) {
if (resp.error) {
var $toastContent = $("<div><pre>" + resp.message + "\n" + resp.path + "</pre></div>");
Materialize.toast($toastContent, 5000);
}
else {
var $toastContent = $("<div><pre>" + resp.message + "</pre></div>");
Materialize.toast($toastContent, 2000);
listdir(document.getElementById('fbheader').innerHTML)
document.getElementById('currentfile').value='';
editor.setValue('');
}
});
}
}
function exec_command() {
var command = document.getElementById('commandline').value;
if (command.length > 0) {
data = new Object();
data.command = command;
data.timeout = 15;
$.post("api/exec_command", data).done(function(resp) {
if (resp.error) {
var $toastContent = $("<div><pre>" + resp.message + "</pre></div>");
Materialize.toast($toastContent, 5000);
}
else {
var history = document.getElementById('command_history');
history.innerText += resp.message + ': ' + resp.returncode + "\n";
if (resp.stdout) {
history.innerText += resp.stdout;
}
if (resp.stderr) {
history.innerText += resp.stderr;
}
}
});
}
}
function delete_element() {
var path = document.getElementById('fb_currentfile').value;
if (path.length > 0) {
data = new Object();
data.path= path;
$.post("api/delete", data).done(function(resp) {
if (resp.error) {
var $toastContent = $("<div><pre>" + resp.message + "\n" + resp.path + "</pre></div>");
Materialize.toast($toastContent, 5000);
}
else {
var $toastContent = $("<div><pre>" + resp.message + "</pre></div>");
Materialize.toast($toastContent, 2000);
listdir(document.getElementById('fbheader').innerHTML);
if (document.getElementById('currentfile').value == path) {
document.getElementById('currentfile').value='';
editor.setValue('');
}
}
});
}
}
function gitadd() {
var path = document.getElementById('fb_currentfile').value;
if (path.length > 0) {
data = new Object();
data.path = path;
$.post("api/gitadd", data).done(function(resp) {
if (resp.error) {
var $toastContent = $("<div><pre>" + resp.message + "\n" + resp.path + "</pre></div>");
Materialize.toast($toastContent, 5000);
}
else {
var $toastContent = $("<div><pre>" + resp.message + "</pre></div>");
Materialize.toast($toastContent, 2000);
listdir(document.getElementById('fbheader').innerHTML);
}
});
}
}
function gitdiff() {
var path = document.getElementById('fb_currentfile').value;
closefile();
if (path.length > 0) {
data = new Object();
data.path = path;
$.post("api/gitdiff", data).done(function(resp) {
if (resp.error) {
var $toastContent = $("<div><pre>" + resp.message + "\n" + resp.path + "</pre></div>");
Materialize.toast($toastContent, 5000);
}
else {
editor.setOption('mode', modemapping['diff']);
editor.getSession().setValue(resp.message, -1);
editor.session.getUndoManager().markClean();
}
});
}
}
function gitinit() {
var path = document.getElementById("fbheader").innerHTML;
if (path.length > 0) {
data = new Object();
data.path = path;
$.post("api/init", data).done(function(resp) {
if (resp.error) {
var $toastContent = $("<div><pre>" + resp.message + "\n" + resp.path + "</pre></div>");
Materialize.toast($toastContent, 5000);
}
else {
var $toastContent = $("<div><pre>" + resp.message + "</pre></div>");
Materialize.toast($toastContent, 2000);
listdir(document.getElementById('fbheader').innerHTML);
}
});
}
}
function commit(message) {
var path = document.getElementById("fbheader").innerHTML;
if (path.length > 0) {
data = new Object();
data.path = path;
data.message = message;
$.post("api/commit", data).done(function(resp) {
if (resp.error) {
var $toastContent = $("<div><pre>" + resp.message + "\n" + resp.path + "</pre></div>");
Materialize.toast($toastContent, 5000);
}
else {
var $toastContent = $("<div><pre>" + resp.message + "</pre></div>");
Materialize.toast($toastContent, 2000);
listdir(document.getElementById('fbheader').innerHTML);
document.getElementById('commitmessage').value = "";
}
});
}
}
function gitpush() {
var path = document.getElementById("fbheader").innerHTML;
if (path.length > 0) {
data = new Object();
data.path = path;
$.post("api/push", data).done(function(resp) {
if (resp.error) {
var $toastContent = $("<div><pre>" + resp.message + "\n" + resp.path + "</pre></div>");
Materialize.toast($toastContent, 5000);
}
else {
var $toastContent = $("<div><pre>" + resp.message + "</pre></div>");
Materialize.toast($toastContent, 2000);
listdir(document.getElementById('fbheader').innerHTML);
}
});
}
}
function gitstash() {
var path = document.getElementById("fbheader").innerHTML;
if (path.length > 0) {
data = new Object();
data.path = path;
$.post("api/stash", data).done(function(resp) {
if (resp.error) {
var $toastContent = $("<div><pre>" + resp.message + "\n" + resp.path + "</pre></div>");
Materialize.toast($toastContent, 5000);
}
else {
var $toastContent = $("<div><pre>" + resp.message + "</pre></div>");
Materialize.toast($toastContent, 5000);
listdir(document.getElementById('fbheader').innerHTML);
}
});
}
}
function checkout(branch) {
var path = document.getElementById("fbheader").innerHTML;
if (path.length > 0) {
data = new Object();
data.path = path;
data.branch = branch;
$.post("api/checkout", data).done(function(resp) {
if (resp.error) {
var $toastContent = $("<div><pre>" + resp.message + "\n" + resp.path + "</pre></div>");
Materialize.toast($toastContent, 5000);
}
else {
var $toastContent = $("<div><pre>" + resp.message + "</pre></div>");
Materialize.toast($toastContent, 2000);
listdir(document.getElementById('fbheader').innerHTML);
}
});
}
}
function newbranch(branch) {
var path = document.getElementById("fbheader").innerHTML;
if (path.length > 0) {
data = new Object();
data.path = path;
data.branch = branch;
$.post("api/newbranch", data).done(function(resp) {
if (resp.error) {
var $toastContent = $("<div><pre>" + resp.message + "\n" + resp.path + "</pre></div>");
Materialize.toast($toastContent, 5000);
}
else {
var $toastContent = $("<div><pre>" + resp.message + "</pre></div>");
Materialize.toast($toastContent, 2000);
listdir(document.getElementById('fbheader').innerHTML);
}
});
}
}
function newfolder(foldername) {
var path = document.getElementById('fbheader').innerHTML;
if (path.length > 0 && foldername.length > 0) {
data = new Object();
data.path = path;
data.name = foldername;
$.post("api/newfolder", data).done(function(resp) {
if (resp.error) {
var $toastContent = $("<div><pre>" + resp.message + "\n" + resp.path + "</pre></div>");
Materialize.toast($toastContent, 5000);
}
else {
var $toastContent = $("<div><pre>" + resp.message + "</pre></div>");
Materialize.toast($toastContent, 2000);
}
listdir(document.getElementById('fbheader').innerHTML);
document.getElementById('newfoldername').value = '';
});
}
}
function newfile(filename) {
var path = document.getElementById('fbheader').innerHTML;
if (path.length > 0 && filename.length > 0) {
data = new Object();
data.path = path;
data.name = filename;
$.post("api/newfile", data).done(function(resp) {
if (resp.error) {
var $toastContent = $("<div><pre>" + resp.message + "\n" + resp.path + "</pre></div>");
Materialize.toast($toastContent, 5000);
}
else {
var $toastContent = $("<div><pre>" + resp.message + "</pre></div>");
Materialize.toast($toastContent, 2000);
}
listdir(document.getElementById('fbheader').innerHTML);
document.getElementById('newfilename').value = '';
});
}
}
function upload() {
var file_data = $('#uploadfile').prop('files')[0];
var form_data = new FormData();
form_data.append('file', file_data);
form_data.append('path', document.getElementById('fbheader').innerHTML);
$.ajax({
url: 'api/upload',
dataType: 'json',
cache: false,
contentType: false,
processData: false,
data: form_data,
type: 'post',
success: function(resp){
if (resp.error) {
var $toastContent = $("<div><pre>Error: " + resp.message + "</pre></div>");
Materialize.toast($toastContent, 2000);
}
else {
var $toastContent = $("<div><pre>Upload succesful</pre></div>");
Materialize.toast($toastContent, 2000);
listdir(document.getElementById('fbheader').innerHTML);
document.getElementById('uploadform').reset();
}
}
});
}
</script>
<script>
ace.require("ace/ext/language_tools");
var editor = ace.edit("editor");
editor.on("input", function() {
if (editor.session.getUndoManager().isClean()) {
$('.markdirty').each(function(i, o){o.classList.remove('red');});
$('.hidesave').css('opacity', 0);
}
else {
$('.markdirty').each(function(i, o){o.classList.add('red');});
$('.hidesave').css('opacity', 1);
}
});
if (localStorage.hasOwnProperty("pochass")) {
editor.setOptions(JSON.parse(localStorage.pochass));
editor.setOptions({
enableBasicAutocompletion: true,
enableSnippets: true
})
editor.$blockScrolling = Infinity;
}
else {
editor.getSession().setMode("ace/mode/yaml");
editor.setOptions({
showInvisibles: true,
useSoftTabs: true,
displayIndentGuides: true,
highlightSelectedWord: true,
enableBasicAutocompletion: true,
enableSnippets: true
})
editor.$blockScrolling = Infinity;
}
function set_save_prompt(checked) {
localStorage.setItem('save_prompt', JSON.stringify({save_prompt: checked}));
}
function get_save_prompt() {
if (localStorage.getItem('save_prompt')) {
var save_prompt = JSON.parse(localStorage.getItem('save_prompt'));
return save_prompt.save_prompt;
}
return false;
}
function set_hide_filedetails(checked) {
localStorage.setItem('hide_filedetails', JSON.stringify({hide_filedetails: checked}));
}
function get_hide_filedetails() {
if (localStorage.getItem('hide_filedetails')) {
var hide_filedetails = JSON.parse(localStorage.getItem('hide_filedetails'));
return hide_filedetails.hide_filedetails;
}
return false;
}
function apply_settings() {
var options = editor.getOptions();
for (var key in options) {
if (options.hasOwnProperty(key)) {
var target = document.getElementById(key);
if (target) {
if (typeof(options[key]) == "boolean" && target.type === 'checkbox') {
target.checked = options[key];
target.setAttribute("checked", options[key]);
}
else if (typeof(options[key]) == "number" && target.type === 'number') {
target.value = options[key];
}
else if (typeof(options[key]) == "string" && target.tagName == 'SELECT') {
target.value = options[key];
}
}
}
}
}
apply_settings();
function save_ace_settings() {
localStorage.pochass = JSON.stringify(editor.getOptions())
Materialize.toast("Ace Settings Saved", 2000);
}
function insert(text) {
var pos = editor.selection.getCursor();
var end = editor.session.insert(pos, text);
editor.selection.setRange({
start: pos,
end: end
});
editor.focus();
}
var foldstatus = true;
function toggle_fold() {
if (foldstatus) {
editor.getSession().foldAll();
}
else {
editor.getSession().unfold();
}
foldstatus = !foldstatus;
}
</script>
<script src="https://cdnjs.cloudflare.com/ajax/libs/js-yaml/3.12.1/js-yaml.js" type="text/javascript" charset="utf-8"></script>
<script type="text/javascript">
var lint_timeout;
var lint_status = $('#lint-status'); // speed optimization
var lint_error = "";
function check_lint() {
if (document.getElementById('currentfile').value.match(".yaml$")) {
try {
var text = editor.getValue().replace(/!(include|secret|env_var)/g,".$1"); // hack because js-yaml does not like !include/!secret
jsyaml.safeLoad(text);
lint_status.text("check_circle");
lint_status.removeClass("cursor-pointer red-text grey-text");
lint_status.addClass("green-text");
lint_error = "";
} catch (err) {
lint_status.text("error");
lint_status.removeClass("green-text grey-text");
lint_status.addClass("cursor-pointer red-text");
lint_error = err.message;
}
} else {
lint_status.empty();
}
}
function queue_lint(e) {
if (document.getElementById('currentfile').value.match(".yaml$")) {
clearTimeout(lint_timeout);
lint_timeout = setTimeout(check_lint, 500);
if (lint_status.text() != "cached") {
lint_status.text("cached");
lint_status.removeClass("cursor-pointer red-text green-text");
lint_status.addClass("grey-text");
}
} else {
lint_status.empty();
}
}
function show_lint_error() {
if(lint_error) {
$("#modal_lint textarea").val(lint_error);
$("#modal_lint").modal('open');
}
}
editor.on('change', queue_lint);
</script>
</body>
</html>""")
# pylint: disable=unused-argument
def signal_handler(sig, frame):
"""Handle signal to shut down server."""
global HTTPD
LOG.info("Got signal: %s. Shutting down server", str(sig))
HTTPD.server_close()
sys.exit(0)
def load_settings(args):
"""Load settings from file and environment."""
global LISTENIP, LISTENPORT, BASEPATH, SSL_CERTIFICATE, SSL_KEY, HASS_API, \
HASS_API_PASSWORD, CREDENTIALS, ALLOWED_NETWORKS, BANNED_IPS, BANLIMIT, \
DEV, IGNORE_PATTERN, DIRSFIRST, SESAME, VERIFY_HOSTNAME, ENFORCE_BASEPATH, \
ENV_PREFIX, NOTIFY_SERVICE, USERNAME, PASSWORD, SESAME_TOTP_SECRET, TOTP, \
GIT, REPO, PORT, IGNORE_SSL, HASS_WS_API, ALLOWED_DOMAINS
settings = {}
settingsfile = args.settings
if settingsfile:
try:
if os.path.isfile(settingsfile):
with open(settingsfile) as fptr:
settings = json.loads(fptr.read())
LOG.debug("Settings from file:")
LOG.debug(settings)
else:
LOG.warning("File not found: %s", settingsfile)
except Exception as err:
LOG.warning(err)
LOG.warning("Not loading settings from file")
ENV_PREFIX = settings.get('ENV_PREFIX', ENV_PREFIX)
for key, value in os.environ.items():
if key.startswith(ENV_PREFIX):
# Convert booleans
if value in ['true', 'false', 'True', 'False']:
value = value in ['true', 'True']
# Convert None / null
elif value in ['none', 'None', 'null']:
value = None
# Convert plain numbers
elif value.isnumeric():
value = int(value)
# Make lists out of comma separated values for list-settings
elif key[len(ENV_PREFIX):] in ["ALLOWED_NETWORKS", "BANNED_IPS", "IGNORE_PATTERN"]:
value = value.split(',')
settings[key[len(ENV_PREFIX):]] = value
LOG.debug("Settings after looking at environment:")
LOG.debug(settings)
if args.git:
GIT = args.git
else:
GIT = settings.get("GIT", GIT)
if GIT:
try:
# pylint: disable=redefined-outer-name
from git import Repo as REPO
except ImportError:
LOG.warning("Unable to import Git module")
if args.listen:
LISTENIP = args.listen
else:
LISTENIP = settings.get("LISTENIP", LISTENIP)
if args.port is not None:
PORT = args.port
else:
LISTENPORT = settings.get("LISTENPORT", None)
PORT = settings.get("PORT", PORT)
if LISTENPORT is not None:
PORT = LISTENPORT
if args.basepath:
BASEPATH = args.basepath
else:
BASEPATH = settings.get("BASEPATH", BASEPATH)
if args.enforce:
ENFORCE_BASEPATH = True
else:
ENFORCE_BASEPATH = settings.get("ENFORCE_BASEPATH", ENFORCE_BASEPATH)
SSL_CERTIFICATE = settings.get("SSL_CERTIFICATE", SSL_CERTIFICATE)
SSL_KEY = settings.get("SSL_KEY", SSL_KEY)
if args.standalone:
HASS_API = None
else:
HASS_API = settings.get("HASS_API", HASS_API)
HASS_WS_API = settings.get("HASS_WS_API", HASS_WS_API)
HASS_API_PASSWORD = settings.get("HASS_API_PASSWORD", HASS_API_PASSWORD)
CREDENTIALS = settings.get("CREDENTIALS", CREDENTIALS)
ALLOWED_NETWORKS = settings.get("ALLOWED_NETWORKS", ALLOWED_NETWORKS)
if ALLOWED_NETWORKS and not all(ALLOWED_NETWORKS):
LOG.warning("Invalid value for ALLOWED_NETWORKS. Using empty list.")
ALLOWED_NETWORKS = []
for net in ALLOWED_NETWORKS:
try:
ipaddress.ip_network(net)
except Exception:
LOG.warning("Invalid network in ALLOWED_NETWORKS: %s", net)
ALLOWED_NETWORKS.remove(net)
ALLOWED_DOMAINS = settings.get("ALLOWED_DOMAINS", ALLOWED_DOMAINS)
if ALLOWED_DOMAINS and not all(ALLOWED_DOMAINS):
LOG.warning("Invalid value for ALLOWED_DOMAINS. Using empty list.")
ALLOWED_DOMAINS = []
BANNED_IPS = settings.get("BANNED_IPS", BANNED_IPS)
if BANNED_IPS and not all(BANNED_IPS):
LOG.warning("Invalid value for BANNED_IPS. Using empty list.")
BANNED_IPS = []
for banned_ip in BANNED_IPS:
try:
ipaddress.ip_address(banned_ip)
except Exception:
LOG.warning("Invalid IP address in BANNED_IPS: %s", banned_ip)
BANNED_IPS.remove(banned_ip)
BANLIMIT = settings.get("BANLIMIT", BANLIMIT)
if args.dev:
DEV = True
else:
DEV = settings.get("DEV", DEV)
IGNORE_PATTERN = settings.get("IGNORE_PATTERN", IGNORE_PATTERN)
if IGNORE_PATTERN and not all(IGNORE_PATTERN):
LOG.warning("Invalid value for IGNORE_PATTERN. Using empty list.")
IGNORE_PATTERN = []
if args.dirsfirst:
DIRSFIRST = args.dirsfirst
else:
DIRSFIRST = settings.get("DIRSFIRST", DIRSFIRST)
SESAME = settings.get("SESAME", SESAME)
SESAME_TOTP_SECRET = settings.get("SESAME_TOTP_SECRET", SESAME_TOTP_SECRET)
VERIFY_HOSTNAME = settings.get("VERIFY_HOSTNAME", VERIFY_HOSTNAME)
NOTIFY_SERVICE = settings.get("NOTIFY_SERVICE", NOTIFY_SERVICE_DEFAULT)
IGNORE_SSL = settings.get("IGNORE_SSL", IGNORE_SSL)
if IGNORE_SSL:
# pylint: disable=protected-access
ssl._create_default_https_context = ssl._create_unverified_context
if args.username and args.password:
USERNAME = args.username
PASSWORD = args.password
else:
USERNAME = settings.get("USERNAME", USERNAME)
PASSWORD = settings.get("PASSWORD", PASSWORD)
PASSWORD = str(PASSWORD) if PASSWORD else None
if CREDENTIALS and (USERNAME is None or PASSWORD is None):
USERNAME = CREDENTIALS.split(":")[0]
PASSWORD = ":".join(CREDENTIALS.split(":")[1:])
if PASSWORD and PASSWORD.startswith("{sha256}"):
PASSWORD = PASSWORD.lower()
if SESAME_TOTP_SECRET:
try:
import pyotp
TOTP = pyotp.TOTP(SESAME_TOTP_SECRET)
except ImportError:
LOG.warning("Unable to import pyotp module")
except Exception as err:
LOG.warning("Unable to create TOTP object: %s", err)
def is_jwt(token):
"""Perform basic check if token is a JWT token."""
return len(token.split('.')) == 3
def is_safe_path(basedir, path, follow_symlinks=True):
"""Check path for malicious traversal."""
if basedir is None:
return True
if follow_symlinks:
return os.path.realpath(path).startswith(basedir.encode('utf-8'))
return os.path.abspath(path).startswith(basedir.encode('utf-8'))
def get_dircontent(path, repo=None):
"""Get content of directory."""
dircontent = []
if repo:
untracked = [
"%s%s%s"%(repo.working_dir, os.sep, e) for e in \
["%s"%os.sep.join(f.split('/')) for f in repo.untracked_files]
]
staged = {}
unstaged = {}
try:
for element in repo.index.diff("HEAD"):
staged["%s%s%s" % (repo.working_dir,
os.sep,
"%s"%os.sep.join(
element.b_path.split('/')))] = element.change_type
except Exception as err:
LOG.warning("Exception: %s", str(err))
for element in repo.index.diff(None):
unstaged["%s%s%s" % (repo.working_dir,
os.sep,
"%s"%os.sep.join(
element.b_path.split('/')))] = element.change_type
else:
untracked = []
staged = {}
unstaged = {}
def sorted_file_list():
"""Sort list of files / directories."""
dirlist = [x for x in os.listdir(path) if os.path.isdir(os.path.join(path, x))]
filelist = [x for x in os.listdir(path) if not os.path.isdir(os.path.join(path, x))]
if DIRSFIRST:
return sorted(dirlist, key=lambda x: x.lower()) + \
sorted(filelist, key=lambda x: x.lower())
return sorted(dirlist + filelist, key=lambda x: x.lower())
for elem in sorted_file_list():
edata = {}
edata['name'] = elem
edata['dir'] = path
edata['fullpath'] = os.path.abspath(os.path.join(path, elem))
edata['type'] = 'dir' if os.path.isdir(edata['fullpath']) else 'file'
try:
stats = os.stat(os.path.join(path, elem))
edata['size'] = stats.st_size
edata['modified'] = stats.st_mtime
edata['created'] = stats.st_ctime
except Exception:
edata['size'] = 0
edata['modified'] = 0
edata['created'] = 0
edata['changetype'] = None
edata['gitstatus'] = bool(repo)
edata['gittracked'] = 'untracked' if edata['fullpath'] in untracked else 'tracked'
if edata['fullpath'] in unstaged:
edata['gitstatus'] = 'unstaged'
edata['changetype'] = unstaged.get(edata['name'], None)
elif edata['fullpath'] in staged:
edata['gitstatus'] = 'staged'
edata['changetype'] = staged.get(edata['name'], None)
hidden = False
if IGNORE_PATTERN is not None:
for file_pattern in IGNORE_PATTERN:
if fnmatch.fnmatch(edata['name'], file_pattern):
hidden = True
if not hidden:
dircontent.append(edata)
return dircontent
def get_html():
"""Load the HTML from file in dev-mode, otherwise embedded."""
if DEV:
try:
with open(os.path.join(os.path.dirname(os.path.realpath(__file__)),
"dev.html")) as fptr:
html = Template(fptr.read())
return html
except Exception as err:
LOG.warning(err)
LOG.warning("Delivering embedded HTML")
return INDEX
def password_problems(password, name="UNKNOWN"):
"""Rudimentary checks for password strength."""
problems = 0
password = str(password)
if password is None:
return problems
if len(password) < 8:
LOG.warning("Password %s is too short", name)
problems += 1
if password.isalpha():
LOG.warning("Password %s does not contain digits", name)
problems += 2
if password.isdigit():
LOG.warning("Password %s does not contain alphabetic characters", name)
problems += 4
quota = len(set(password)) / len(password)
exp = len(password) ** len(set(password))
score = exp / quota / 8
if score < 65536:
LOG.warning("Password %s does not contain enough unique characters (%i)",
name, len(set(password)))
problems += 8
return problems
def check_access(clientip):
"""Check if IP is allowed to access the configurator / API."""
global BANNED_IPS
if clientip in BANNED_IPS:
LOG.warning("Client IP banned.")
return False
if not ALLOWED_NETWORKS:
return True
for net in ALLOWED_NETWORKS:
ipobject = ipaddress.ip_address(clientip)
if ipobject in ipaddress.ip_network(net):
return True
LOG.warning("Client IP not within allowed networks.")
if ALLOWED_DOMAINS:
for domain in ALLOWED_DOMAINS:
try:
domain_data = socket.getaddrinfo(domain, None)
except Exception as err:
LOG.warning("Unable to lookup domain data: %s", err)
continue
for res in domain_data:
if res[0] in [socket.AF_INET, socket.AF_INET6]:
if res[4][0] == clientip:
return True
LOG.warning("Client IP not within allowed domains.")
BANNED_IPS.append(clientip)
return False
def verify_hostname(request_hostname):
"""Verify the provided host header is correct."""
if VERIFY_HOSTNAME:
if VERIFY_HOSTNAME not in request_hostname:
return False
return True
class RequestHandler(BaseHTTPRequestHandler):
"""Request handler."""
# pylint: disable=redefined-builtin
def log_message(self, format, *args):
LOG.info("%s - %s", self.client_address[0], format % args)
# pylint: disable=invalid-name
def do_BLOCK(self, status=420, reason="Policy not fulfilled"):
"""Customized do_BLOCK method."""
self.send_response(status)
self.end_headers()
self.wfile.write(bytes(reason, "utf8"))
# pylint: disable=invalid-name
def do_GET(self):
"""Customized do_GET method."""
if not verify_hostname(self.headers.get('Host', '')):
self.do_BLOCK(403, "Forbidden")
return
req = urlparse(self.path)
if SESAME or TOTP:
chunk = req.path.split("/")[-1]
if SESAME and chunk == SESAME:
if self.client_address[0] not in ALLOWED_NETWORKS:
ALLOWED_NETWORKS.append(self.client_address[0])
if self.client_address[0] in BANNED_IPS:
BANNED_IPS.remove(self.client_address[0])
url = req.path[:req.path.rfind(chunk)]
self.send_response(302)
self.send_header('Location', url)
self.end_headers()
data = {
"title": "HASS Configurator - SESAME access",
"message": "Your SESAME token has been used to whitelist " \
"the IP address %s." % self.client_address[0]
}
notify(**data)
return
if TOTP and TOTP.verify(chunk):
if self.client_address[0] not in ALLOWED_NETWORKS:
ALLOWED_NETWORKS.append(self.client_address[0])
if self.client_address[0] in BANNED_IPS:
BANNED_IPS.remove(self.client_address[0])
url = req.path[:req.path.rfind(chunk)]
self.send_response(302)
self.send_header('Location', url)
self.end_headers()
data = {
"title": "HASS Configurator - SESAME access",
"message": "Your SESAME token has been used to whitelist " \
"the IP address %s." % self.client_address[0]
}
notify(**data)
return
if not check_access(self.client_address[0]):
self.do_BLOCK()
return
query = parse_qs(req.query)
self.send_response(200)
# pylint: disable=no-else-return
if req.path.endswith('/api/file'):
content = ""
filename = query.get('filename', None)
try:
if filename:
is_raw = False
filename = unquote(filename[0]).encode('utf-8')
if ENFORCE_BASEPATH and not is_safe_path(BASEPATH, filename):
raise OSError('Access denied.')
filepath = os.path.join(BASEDIR.encode('utf-8'), filename)
if os.path.isfile(filepath):
mimetype = mimetypes.guess_type(filepath.decode('utf-8'))
if mimetype[0] is not None:
if mimetype[0].split('/')[0] == 'image':
is_raw = True
if is_raw:
with open(filepath, 'rb') as fptr:
content = fptr.read()
self.send_header('Content-type', mimetype[0])
else:
with open(filepath, 'rb') as fptr:
content += fptr.read().decode('utf-8')
self.send_header('Content-type', 'text/text')
else:
self.send_header('Content-type', 'text/text')
content = "File not found"
except Exception as err:
LOG.warning(err)
self.send_header('Content-type', 'text/text')
content = str(err)
self.end_headers()
if is_raw:
self.wfile.write(content)
else:
self.wfile.write(bytes(content, "utf8"))
return
elif req.path.endswith('/api/download'):
content = ""
filename = query.get('filename', None)
try:
if filename:
filename = unquote(filename[0]).encode('utf-8')
if ENFORCE_BASEPATH and not is_safe_path(BASEPATH, filename):
raise OSError('Access denied.')
LOG.info(filename)
if os.path.isfile(os.path.join(BASEDIR.encode('utf-8'), filename)):
with open(os.path.join(BASEDIR.encode('utf-8'), filename), 'rb') as fptr:
filecontent = fptr.read()
self.send_header(
'Content-Disposition',
'attachment; filename=%s' % filename.decode('utf-8').split(os.sep)[-1])
self.end_headers()
self.wfile.write(filecontent)
return
content = "File not found"
except Exception as err:
LOG.warning(err)
content = str(err)
self.send_header('Content-type', 'text/text')
self.wfile.write(bytes(content, "utf8"))
return
elif req.path.endswith('/api/listdir'):
content = {'error': None}
self.send_header('Content-type', 'text/json')
self.end_headers()
dirpath = query.get('path', None)
try:
if dirpath:
dirpath = unquote(dirpath[0]).encode('utf-8')
if os.path.isdir(dirpath):
if ENFORCE_BASEPATH and not is_safe_path(BASEPATH,
dirpath):
raise OSError('Access denied.')
repo = None
activebranch = None
dirty = False
branches = []
if REPO:
try:
# pylint: disable=not-callable
repo = REPO(dirpath.decode('utf-8'),
search_parent_directories=True)
activebranch = repo.active_branch.name
dirty = repo.is_dirty()
for branch in repo.branches:
branches.append(branch.name)
except Exception as err:
LOG.debug("Exception (no repo): %s", str(err))
dircontent = get_dircontent(dirpath.decode('utf-8'), repo)
filedata = {
'content': dircontent,
'abspath': os.path.abspath(dirpath).decode('utf-8'),
'parent': os.path.dirname(os.path.abspath(dirpath)).decode('utf-8'),
'branches': branches,
'activebranch': activebranch,
'dirty': dirty,
'error': None
}
self.wfile.write(bytes(json.dumps(filedata), "utf8"))
except Exception as err:
LOG.warning(err)
content['error'] = str(err)
self.wfile.write(bytes(json.dumps(content), "utf8"))
return
elif req.path.endswith('/api/abspath'):
content = ""
self.send_header('Content-type', 'text/text')
self.end_headers()
dirpath = query.get('path', None)
if dirpath:
dirpath = unquote(dirpath[0]).encode('utf-8')
LOG.debug(dirpath)
absp = os.path.abspath(dirpath)
LOG.debug(absp)
if os.path.isdir(dirpath):
self.wfile.write(os.path.abspath(dirpath))
return
elif req.path.endswith('/api/parent'):
content = ""
self.send_header('Content-type', 'text/text')
self.end_headers()
dirpath = query.get('path', None)
if dirpath:
dirpath = unquote(dirpath[0]).encode('utf-8')
LOG.debug(dirpath)
absp = os.path.abspath(dirpath)
LOG.debug(absp)
if os.path.isdir(dirpath):
self.wfile.write(os.path.abspath(os.path.dirname(dirpath)))
return
elif req.path.endswith('/api/netstat'):
content = ""
self.send_header('Content-type', 'text/json')
self.end_headers()
res = {
"allowed_networks": ALLOWED_NETWORKS,
"banned_ips": BANNED_IPS
}
self.wfile.write(bytes(json.dumps(res), "utf8"))
return
elif req.path.endswith('/api/restart'):
LOG.info("/api/restart")
self.send_header('Content-type', 'text/json')
self.end_headers()
res = {"restart": False}
try:
headers = {
"Content-Type": "application/json"
}
if HASS_API_PASSWORD:
if is_jwt(HASS_API_PASSWORD):
headers["Authorization"] = "Bearer %s" % HASS_API_PASSWORD
else:
headers["x-ha-access"] = HASS_API_PASSWORD
req = urllib.request.Request(
"%sservices/homeassistant/restart" % HASS_API,
headers=headers, method='POST')
with urllib.request.urlopen(req) as response:
res = json.loads(response.read().decode('utf-8'))
LOG.debug(res)
except Exception as err:
LOG.warning(err)
res['restart'] = str(err)
self.wfile.write(bytes(json.dumps(res), "utf8"))
return
elif req.path.endswith('/api/check_config'):
LOG.info("/api/check_config")
self.send_header('Content-type', 'text/json')
self.end_headers()
res = {"check_config": False}
try:
headers = {
"Content-Type": "application/json"
}
if HASS_API_PASSWORD:
if is_jwt(HASS_API_PASSWORD):
headers["Authorization"] = "Bearer %s" % HASS_API_PASSWORD
else:
headers["x-ha-access"] = HASS_API_PASSWORD
req = urllib.request.Request(
"%sservices/homeassistant/check_config" % HASS_API,
headers=headers, method='POST')
except Exception as err:
LOG.warning(err)
res['restart'] = str(err)
self.wfile.write(bytes(json.dumps(res), "utf8"))
return
elif req.path.endswith('/api/reload_automations'):
LOG.info("/api/reload_automations")
self.send_header('Content-type', 'text/json')
self.end_headers()
res = {"reload_automations": False}
try:
headers = {
"Content-Type": "application/json"
}
if HASS_API_PASSWORD:
if is_jwt(HASS_API_PASSWORD):
headers["Authorization"] = "Bearer %s" % HASS_API_PASSWORD
else:
headers["x-ha-access"] = HASS_API_PASSWORD
req = urllib.request.Request(
"%sservices/automation/reload" % HASS_API,
headers=headers, method='POST')
with urllib.request.urlopen(req) as response:
LOG.debug(json.loads(response.read().decode('utf-8')))
res['service'] = "called successfully"
except Exception as err:
LOG.warning(err)
res['restart'] = str(err)
self.wfile.write(bytes(json.dumps(res), "utf8"))
return
elif req.path.endswith('/api/reload_scripts'):
LOG.info("/api/reload_scripts")
self.send_header('Content-type', 'text/json')
self.end_headers()
res = {"reload_scripts": False}
try:
headers = {
"Content-Type": "application/json"
}
if HASS_API_PASSWORD:
if is_jwt(HASS_API_PASSWORD):
headers["Authorization"] = "Bearer %s" % HASS_API_PASSWORD
else:
headers["x-ha-access"] = HASS_API_PASSWORD
req = urllib.request.Request(
"%sservices/script/reload" % HASS_API,
headers=headers, method='POST')
with urllib.request.urlopen(req) as response:
LOG.debug(json.loads(response.read().decode('utf-8')))
res['service'] = "called successfully"
except Exception as err:
LOG.warning(err)
res['restart'] = str(err)
self.wfile.write(bytes(json.dumps(res), "utf8"))
return
elif req.path.endswith('/api/reload_groups'):
LOG.info("/api/reload_groups")
self.send_header('Content-type', 'text/json')
self.end_headers()
res = {"reload_groups": False}
try:
headers = {
"Content-Type": "application/json"
}
if HASS_API_PASSWORD:
if is_jwt(HASS_API_PASSWORD):
headers["Authorization"] = "Bearer %s" % HASS_API_PASSWORD
else:
headers["x-ha-access"] = HASS_API_PASSWORD
req = urllib.request.Request(
"%sservices/group/reload" % HASS_API,
headers=headers, method='POST')
with urllib.request.urlopen(req) as response:
LOG.debug(json.loads(response.read().decode('utf-8')))
res['service'] = "called successfully"
except Exception as err:
LOG.warning(err)
res['restart'] = str(err)
self.wfile.write(bytes(json.dumps(res), "utf8"))
return
elif req.path.endswith('/api/reload_core'):
LOG.info("/api/reload_core")
self.send_header('Content-type', 'text/json')
self.end_headers()
res = {"reload_core": False}
try:
headers = {
"Content-Type": "application/json"
}
if HASS_API_PASSWORD:
if is_jwt(HASS_API_PASSWORD):
headers["Authorization"] = "Bearer %s" % HASS_API_PASSWORD
else:
headers["x-ha-access"] = HASS_API_PASSWORD
req = urllib.request.Request(
"%sservices/homeassistant/reload_core_config" % HASS_API,
headers=headers, method='POST')
with urllib.request.urlopen(req) as response:
LOG.debug(json.loads(response.read().decode('utf-8')))
res['service'] = "called successfully"
except Exception as err:
LOG.warning(err)
res['restart'] = str(err)
self.wfile.write(bytes(json.dumps(res), "utf8"))
return
elif req.path.endswith('/'):
self.send_header('Content-type', 'text/html')
self.end_headers()
loadfile = query.get('loadfile', [None])[0]
if loadfile is None:
loadfile = 'null'
else:
loadfile = "'%s'" % loadfile
services = "[]"
events = "[]"
states = "[]"
try:
if HASS_API:
headers = {
"Content-Type": "application/json"
}
if HASS_API_PASSWORD:
if is_jwt(HASS_API_PASSWORD):
headers["Authorization"] = "Bearer %s" % HASS_API_PASSWORD
else:
headers["x-ha-access"] = HASS_API_PASSWORD
req = urllib.request.Request("%sservices" % HASS_API,
headers=headers, method='GET')
with urllib.request.urlopen(req) as response:
services = response.read().decode('utf-8')
req = urllib.request.Request("%sevents" % HASS_API,
headers=headers, method='GET')
with urllib.request.urlopen(req) as response:
events = response.read().decode('utf-8')
req = urllib.request.Request("%sstates" % HASS_API,
headers=headers, method='GET')
with urllib.request.urlopen(req) as response:
states = response.read().decode('utf-8')
except Exception as err:
LOG.warning("Exception getting bootstrap")
LOG.warning(err)
color = ""
try:
response = urllib.request.urlopen(RELEASEURL)
latest = json.loads(response.read().decode('utf-8'))['tag_name']
if VERSION != latest:
color = "red-text"
except Exception as err:
LOG.warning("Exception getting release")
LOG.warning(err)
ws_api = ""
if HASS_API:
protocol, uri = HASS_API.split("//")
ws_api = "%s://%swebsocket" % (
"wss" if protocol == 'https' else 'ws', uri
)
if HASS_WS_API:
ws_api = HASS_WS_API
standalone = ""
if not HASS_API:
standalone = "toggle_hass_panels();"
html = get_html().safe_substitute(
services=services,
events=events,
states=states,
loadfile=loadfile,
current=VERSION,
versionclass=color,
githidden="" if GIT else "hiddendiv",
# pylint: disable=anomalous-backslash-in-string
separator="\%s" % os.sep if os.sep == "\\" else os.sep,
your_address=self.client_address[0],
listening_address="%s://%s:%i" % (
'https' if SSL_CERTIFICATE else 'http', LISTENIP, PORT),
hass_api_address="%s" % (HASS_API, ),
hass_ws_address=ws_api,
api_password=HASS_API_PASSWORD if HASS_API_PASSWORD else "",
standalone=standalone)
self.wfile.write(bytes(html, "utf8"))
return
else:
self.send_response(404)
self.end_headers()
self.wfile.write(bytes("File not found", "utf8"))
# pylint: disable=invalid-name
def do_POST(self):
"""Customized do_POST method."""
global ALLOWED_NETWORKS, BANNED_IPS
if not verify_hostname(self.headers.get('Host', '')):
self.do_BLOCK(403, "Forbidden")
return
if not check_access(self.client_address[0]):
self.do_BLOCK()
return
req = urlparse(self.path)
response = {
"error": True,
"message": "Generic failure"
}
length = int(self.headers['content-length'])
if req.path.endswith('/api/save'):
try:
postvars = parse_qs(self.rfile.read(length).decode('utf-8'),
keep_blank_values=1)
except Exception as err:
LOG.warning(err)
response['message'] = "%s" % (str(err))
postvars = {}
if 'filename' in postvars.keys() and 'text' in postvars.keys():
if postvars['filename'] and postvars['text']:
try:
filename = unquote(postvars['filename'][0])
response['file'] = filename
with open(filename, 'wb') as fptr:
fptr.write(bytes(postvars['text'][0], "utf-8"))
self.send_response(200)
self.send_header('Content-type', 'text/json')
self.end_headers()
response['error'] = False
response['message'] = "File saved successfully"
self.wfile.write(bytes(json.dumps(response), "utf8"))
return
except Exception as err:
response['message'] = "%s" % (str(err))
LOG.warning(err)
else:
response['message'] = "Missing filename or text"
elif req.path.endswith('/api/upload'):
if length > 104857600: #100 MB for now
read = 0
while read < length:
read += len(self.rfile.read(min(66556, length - read)))
self.send_response(200)
self.send_header('Content-type', 'text/json')
self.end_headers()
response['error'] = True
response['message'] = "File too big: %i" % read
self.wfile.write(bytes(json.dumps(response), "utf8"))
return
form = cgi.FieldStorage(
fp=self.rfile,
headers=self.headers,
environ={
'REQUEST_METHOD': 'POST',
'CONTENT_TYPE': self.headers['Content-Type'],
})
filename = form['file'].filename
filepath = form['path'].file.read()
data = form['file'].file.read()
open("%s%s%s" % (filepath, os.sep, filename), "wb").write(data)
self.send_response(200)
self.send_header('Content-type', 'text/json')
self.end_headers()
response['error'] = False
response['message'] = "Upload successful"
self.wfile.write(bytes(json.dumps(response), "utf8"))
return
elif req.path.endswith('/api/delete'):
try:
postvars = parse_qs(self.rfile.read(length).decode('utf-8'),
keep_blank_values=1)
except Exception as err:
LOG.warning(err)
response['message'] = "%s" % (str(err))
postvars = {}
if 'path' in postvars.keys():
if postvars['path']:
try:
delpath = unquote(postvars['path'][0])
response['path'] = delpath
try:
if os.path.isdir(delpath):
os.rmdir(delpath)
else:
os.unlink(delpath)
self.send_response(200)
self.send_header('Content-type', 'text/json')
self.end_headers()
response['error'] = False
response['message'] = "Deletion successful"
self.wfile.write(bytes(json.dumps(response), "utf8"))
return
except Exception as err:
LOG.warning(err)
response['error'] = True
response['message'] = str(err)
except Exception as err:
response['message'] = "%s" % (str(err))
LOG.warning(err)
else:
response['message'] = "Missing filename or text"
elif req.path.endswith('/api/exec_command'):
try:
postvars = parse_qs(self.rfile.read(length).decode('utf-8'),
keep_blank_values=1)
except Exception as err:
LOG.warning(err)
response['message'] = "%s" % (str(err))
postvars = {}
if 'command' in postvars.keys():
if postvars['command']:
try:
command = shlex.split(postvars['command'][0])
timeout = 15
if 'timeout' in postvars.keys():
if postvars['timeout']:
timeout = int(postvars['timeout'][0])
try:
proc = subprocess.Popen(
command,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, stderr = proc.communicate(timeout=timeout)
self.send_response(200)
self.send_header('Content-type', 'text/json')
self.end_headers()
response['error'] = False
response['message'] = "Command executed: %s" % postvars['command'][0]
response['returncode'] = proc.returncode
try:
response['stdout'] = stdout.decode(sys.getdefaultencoding())
except Exception as err:
LOG.warning(err)
response['stdout'] = stdout.decode("utf-8", errors="replace")
try:
response['stderr'] = stderr.decode(sys.getdefaultencoding())
except Exception as err:
LOG.warning(err)
response['stderr'] = stderr.decode("utf-8", errors="replace")
self.wfile.write(bytes(json.dumps(response), "utf8"))
return
except Exception as err:
LOG.warning(err)
response['error'] = True
response['message'] = str(err)
except Exception as err:
response['message'] = "%s" % (str(err))
LOG.warning(err)
else:
response['message'] = "Missing command"
elif req.path.endswith('/api/gitadd'):
try:
postvars = parse_qs(self.rfile.read(length).decode('utf-8'),
keep_blank_values=1)
except Exception as err:
LOG.warning(err)
response['message'] = "%s" % (str(err))
postvars = {}
if 'path' in postvars.keys():
if postvars['path']:
try:
addpath = unquote(postvars['path'][0])
# pylint: disable=not-callable
repo = REPO(addpath,
search_parent_directories=True)
filepath = "/".join(
addpath.split(os.sep)[len(repo.working_dir.split(os.sep)):])
response['path'] = filepath
try:
repo.index.add([filepath])
response['error'] = False
response['message'] = "Added file to index"
self.send_response(200)
self.send_header('Content-type', 'text/json')
self.end_headers()
self.wfile.write(bytes(json.dumps(response), "utf8"))
return
except Exception as err:
LOG.warning(err)
response['error'] = True
response['message'] = str(err)
except Exception as err:
response['message'] = "%s" % (str(err))
LOG.warning(err)
else:
response['message'] = "Missing filename"
elif req.path.endswith('/api/gitdiff'):
try:
postvars = parse_qs(self.rfile.read(length).decode('utf-8'),
keep_blank_values=1)
except Exception as err:
LOG.warning(err)
response['message'] = "%s" % (str(err))
postvars = {}
if 'path' in postvars.keys():
if postvars['path']:
try:
diffpath = unquote(postvars['path'][0])
# pylint: disable=not-callable
repo = REPO(diffpath,
search_parent_directories=True)
filepath = "/".join(
diffpath.split(os.sep)[len(repo.working_dir.split(os.sep)):])
response['path'] = filepath
try:
diff = repo.index.diff(None,
create_patch=True,
paths=filepath)[0].diff.decode("utf-8")
response['error'] = False
response['message'] = diff
self.send_response(200)
self.send_header('Content-type', 'text/json')
self.end_headers()
self.wfile.write(bytes(json.dumps(response), "utf8"))
return
except Exception as err:
LOG.warning(err)
response['error'] = True
response['message'] = "Unable to load diff: %s" % str(err)
except Exception as err:
response['message'] = "%s" % (str(err))
LOG.warning(err)
else:
response['message'] = "Missing filename"
elif req.path.endswith('/api/commit'):
try:
postvars = parse_qs(self.rfile.read(length).decode('utf-8'),
keep_blank_values=1)
except Exception as err:
LOG.warning(err)
response['message'] = "%s" % (str(err))
postvars = {}
if 'path' in postvars.keys() and 'message' in postvars.keys():
if postvars['path'] and postvars['message']:
try:
commitpath = unquote(postvars['path'][0])
response['path'] = commitpath
message = unquote(postvars['message'][0])
# pylint: disable=not-callable
repo = REPO(commitpath,
search_parent_directories=True)
try:
repo.index.commit(message)
response['error'] = False
response['message'] = "Changes commited"
self.send_response(200)
self.send_header('Content-type', 'text/json')
self.end_headers()
self.wfile.write(bytes(json.dumps(response), "utf8"))
return
except Exception as err:
response['error'] = True
response['message'] = str(err)
LOG.debug(response)
except Exception as err:
response['message'] = "Not a git repository: %s" % (str(err))
LOG.warning("Exception (no repo): %s", str(err))
else:
response['message'] = "Missing path"
elif req.path.endswith('/api/checkout'):
try:
postvars = parse_qs(self.rfile.read(length).decode('utf-8'),
keep_blank_values=1)
except Exception as err:
LOG.warning(err)
response['message'] = "%s" % (str(err))
postvars = {}
if 'path' in postvars.keys() and 'branch' in postvars.keys():
if postvars['path'] and postvars['branch']:
try:
branchpath = unquote(postvars['path'][0])
response['path'] = branchpath
branch = unquote(postvars['branch'][0])
# pylint: disable=not-callable
repo = REPO(branchpath,
search_parent_directories=True)
try:
head = [h for h in repo.heads if h.name == branch][0]
head.checkout()
response['error'] = False
response['message'] = "Checked out %s" % branch
self.send_response(200)
self.send_header('Content-type', 'text/json')
self.end_headers()
self.wfile.write(bytes(json.dumps(response), "utf8"))
return
except Exception as err:
response['error'] = True
response['message'] = str(err)
LOG.warning(response)
except Exception as err:
response['message'] = "Not a git repository: %s" % (str(err))
LOG.warning("Exception (no repo): %s", str(err))
else:
response['message'] = "Missing path or branch"
elif req.path.endswith('/api/newbranch'):
try:
postvars = parse_qs(self.rfile.read(length).decode('utf-8'),
keep_blank_values=1)
except Exception as err:
LOG.warning(err)
response['message'] = "%s" % (str(err))
postvars = {}
if 'path' in postvars.keys() and 'branch' in postvars.keys():
if postvars['path'] and postvars['branch']:
try:
branchpath = unquote(postvars['path'][0])
response['path'] = branchpath
branch = unquote(postvars['branch'][0])
# pylint: disable=not-callable
repo = REPO(branchpath,
search_parent_directories=True)
try:
repo.git.checkout("HEAD", b=branch)
response['error'] = False
response['message'] = "Created and checked out %s" % branch
self.send_response(200)
self.send_header('Content-type', 'text/json')
self.end_headers()
self.wfile.write(bytes(json.dumps(response), "utf8"))
return
except Exception as err:
response['error'] = True
response['message'] = str(err)
LOG.warning(response)
except Exception as err:
response['message'] = "Not a git repository: %s" % (str(err))
LOG.warning("Exception (no repo): %s", str(err))
else:
response['message'] = "Missing path or branch"
elif req.path.endswith('/api/init'):
try:
postvars = parse_qs(self.rfile.read(length).decode('utf-8'),
keep_blank_values=1)
except Exception as err:
LOG.warning(err)
response['message'] = "%s" % (str(err))
postvars = {}
if 'path' in postvars.keys():
if postvars['path']:
try:
repopath = unquote(postvars['path'][0])
response['path'] = repopath
try:
repo = REPO.init(repopath)
response['error'] = False
response['message'] = "Initialized repository in %s" % repopath
self.send_response(200)
self.send_header('Content-type', 'text/json')
self.end_headers()
self.wfile.write(bytes(json.dumps(response), "utf8"))
return
except Exception as err:
response['error'] = True
response['message'] = str(err)
LOG.warning(response)
except Exception as err:
response['message'] = "Not a git repository: %s" % (str(err))
LOG.warning("Exception (no repo): %s", str(err))
else:
response['message'] = "Missing path or branch"
elif req.path.endswith('/api/push'):
try:
postvars = parse_qs(self.rfile.read(length).decode('utf-8'),
keep_blank_values=1)
except Exception as err:
LOG.warning(err)
response['message'] = "%s" % (str(err))
postvars = {}
if 'path' in postvars.keys():
if postvars['path']:
try:
repopath = unquote(postvars['path'][0])
response['path'] = repopath
try:
# pylint: disable=not-callable
repo = REPO(repopath)
urls = []
if repo.remotes:
for url in repo.remotes.origin.urls:
urls.append(url)
if not urls:
response['error'] = True
response['message'] = "No remotes configured for %s" % repopath
else:
repo.remotes.origin.push()
response['error'] = False
response['message'] = "Pushed to %s" % urls[0]
self.send_response(200)
self.send_header('Content-type', 'text/json')
self.end_headers()
self.wfile.write(bytes(json.dumps(response), "utf8"))
return
except Exception as err:
response['error'] = True
response['message'] = str(err)
LOG.warning(response)
except Exception as err:
response['message'] = "Not a git repository: %s" % (str(err))
LOG.warning("Exception (no repo): %s", str(err))
else:
response['message'] = "Missing path or branch"
elif req.path.endswith('/api/stash'):
try:
postvars = parse_qs(self.rfile.read(length).decode('utf-8'),
keep_blank_values=1)
except Exception as err:
LOG.warning(err)
response['message'] = "%s" % (str(err))
postvars = {}
if 'path' in postvars.keys():
if postvars['path']:
try:
repopath = unquote(postvars['path'][0])
response['path'] = repopath
try:
# pylint: disable=not-callable
repo = REPO(repopath)
returnvalue = repo.git.stash()
response['error'] = False
response['message'] = "%s\n%s" % (returnvalue, repopath)
self.send_response(200)
self.send_header('Content-type', 'text/json')
self.end_headers()
self.wfile.write(bytes(json.dumps(response), "utf8"))
return
except Exception as err:
response['error'] = True
response['message'] = str(err)
LOG.warning(response)
except Exception as err:
response['message'] = "Not a git repository: %s" % (str(err))
LOG.warning("Exception (no repo): %s", str(err))
else:
response['message'] = "Missing path or branch"
elif req.path.endswith('/api/newfolder'):
try:
postvars = parse_qs(self.rfile.read(length).decode('utf-8'),
keep_blank_values=1)
except Exception as err:
LOG.warning(err)
response['message'] = "%s" % (str(err))
postvars = {}
if 'path' in postvars.keys() and 'name' in postvars.keys():
if postvars['path'] and postvars['name']:
try:
basepath = unquote(postvars['path'][0])
name = unquote(postvars['name'][0])
response['path'] = os.path.join(basepath, name)
try:
os.makedirs(response['path'])
self.send_response(200)
self.send_header('Content-type', 'text/json')
self.end_headers()
response['error'] = False
response['message'] = "Folder created"
self.wfile.write(bytes(json.dumps(response), "utf8"))
return
except Exception as err:
LOG.warning(err)
response['error'] = True
response['message'] = str(err)
except Exception as err:
response['message'] = "%s" % (str(err))
LOG.warning(err)
elif req.path.endswith('/api/newfile'):
try:
postvars = parse_qs(self.rfile.read(length).decode('utf-8'),
keep_blank_values=1)
except Exception as err:
LOG.warning(err)
response['message'] = "%s" % (str(err))
postvars = {}
if 'path' in postvars.keys() and 'name' in postvars.keys():
if postvars['path'] and postvars['name']:
try:
basepath = unquote(postvars['path'][0])
name = unquote(postvars['name'][0])
response['path'] = os.path.join(basepath, name)
try:
with open(response['path'], 'w') as fptr:
fptr.write("")
self.send_response(200)
self.send_header('Content-type', 'text/json')
self.end_headers()
response['error'] = False
response['message'] = "File created"
self.wfile.write(bytes(json.dumps(response), "utf8"))
return
except Exception as err:
LOG.warning(err)
response['error'] = True
response['message'] = str(err)
except Exception as err:
response['message'] = "%s" % (str(err))
LOG.warning(err)
else:
response['message'] = "Missing filename or text"
elif req.path.endswith('/api/allowed_networks'):
try:
postvars = parse_qs(self.rfile.read(length).decode('utf-8'),
keep_blank_values=1)
except Exception as err:
LOG.warning(err)
response['message'] = "%s" % (str(err))
postvars = {}
if 'network' in postvars.keys() and 'method' in postvars.keys():
if postvars['network'] and postvars['method']:
try:
network = unquote(postvars['network'][0])
method = unquote(postvars['method'][0])
if method == 'remove':
if network in ALLOWED_NETWORKS:
ALLOWED_NETWORKS.remove(network)
if not ALLOWED_NETWORKS:
ALLOWED_NETWORKS.append("0.0.0.0/0")
response['error'] = False
elif method == 'add':
ipaddress.ip_network(network)
ALLOWED_NETWORKS.append(network)
response['error'] = False
else:
response['error'] = True
self.send_response(200)
self.send_header('Content-type', 'text/json')
self.end_headers()
response['error'] = False
response['message'] = "ALLOWED_NETWORKS (%s): %s" % (method, network)
self.wfile.write(bytes(json.dumps(response), "utf8"))
return
except Exception as err:
response['error'] = True
response['message'] = "%s" % (str(err))
LOG.warning(err)
else:
response['message'] = "Missing network"
elif req.path.endswith('/api/banned_ips'):
try:
postvars = parse_qs(self.rfile.read(length).decode('utf-8'),
keep_blank_values=1)
except Exception as err:
LOG.warning(err)
response['message'] = "%s" % (str(err))
postvars = {}
if 'ip' in postvars.keys() and 'method' in postvars.keys():
if postvars['ip'] and postvars['method']:
try:
ip_address = unquote(postvars['ip'][0])
method = unquote(postvars['method'][0])
if method == 'unban':
if ip_address in BANNED_IPS:
BANNED_IPS.remove(ip_address)
response['error'] = False
elif method == 'ban':
ipaddress.ip_network(ip_address)
BANNED_IPS.append(ip_address)
else:
response['error'] = True
self.send_response(200)
self.send_header('Content-type', 'text/json')
self.end_headers()
response['message'] = "BANNED_IPS (%s): %s" % (method, ip_address)
self.wfile.write(bytes(json.dumps(response), "utf8"))
return
except Exception as err:
response['error'] = True
response['message'] = "%s" % (str(err))
LOG.warning(err)
else:
response['message'] = "Missing IP"
else:
response['message'] = "Invalid method"
self.send_response(200)
self.send_header('Content-type', 'text/json')
self.end_headers()
self.wfile.write(bytes(json.dumps(response), "utf8"))
return
class AuthHandler(RequestHandler):
"""Handler to verify auth header."""
def do_BLOCK(self, status=420, reason="Policy not fulfilled"):
self.send_response(status)
self.end_headers()
self.wfile.write(bytes(reason, "utf8"))
# pylint: disable=invalid-name
def do_AUTHHEAD(self):
"""Request authorization."""
LOG.info("Requesting authorization")
self.send_response(401)
self.send_header('WWW-Authenticate', 'Basic realm=\"HASS-Configurator\"')
self.send_header('Content-type', 'text/html')
self.end_headers()
def do_GET(self):
if not verify_hostname(self.headers.get('Host', '')):
self.do_BLOCK(403, "Forbidden")
return
header = self.headers.get('Authorization', None)
if header is None:
self.do_AUTHHEAD()
self.wfile.write(bytes('no auth header received', 'utf-8'))
else:
authorization = header.split()
if len(authorization) == 2 and authorization[0] == "Basic":
plain = base64.b64decode(authorization[1]).decode("utf-8")
parts = plain.split(':')
username = parts[0]
password = ":".join(parts[1:])
if PASSWORD.startswith("{sha256}"):
password = "{sha256}%s" % hashlib.sha256(password.encode("utf-8")).hexdigest()
if username == USERNAME and password == PASSWORD:
if BANLIMIT:
FAIL2BAN_IPS.pop(self.client_address[0], None)
super().do_GET()
return
if BANLIMIT:
bancounter = FAIL2BAN_IPS.get(self.client_address[0], 1)
if bancounter >= BANLIMIT:
LOG.warning("Blocking access from %s", self.client_address[0])
self.do_BLOCK()
return
FAIL2BAN_IPS[self.client_address[0]] = bancounter + 1
self.do_AUTHHEAD()
self.wfile.write(bytes('Authentication required', 'utf-8'))
def do_POST(self):
if not verify_hostname(self.headers.get('Host', '')):
self.do_BLOCK(403, "Forbidden")
return
header = self.headers.get('Authorization', None)
if header is None:
self.do_AUTHHEAD()
self.wfile.write(bytes('no auth header received', 'utf-8'))
else:
authorization = header.split()
if len(authorization) == 2 and authorization[0] == "Basic":
plain = base64.b64decode(authorization[1]).decode("utf-8")
parts = plain.split(':')
username = parts[0]
password = ":".join(parts[1:])
if PASSWORD.startswith("{sha256}"):
password = "{sha256}%s" % hashlib.sha256(password.encode("utf-8")).hexdigest()
if username == USERNAME and password == PASSWORD:
if BANLIMIT:
FAIL2BAN_IPS.pop(self.client_address[0], None)
super().do_POST()
return
if BANLIMIT:
bancounter = FAIL2BAN_IPS.get(self.client_address[0], 1)
if bancounter >= BANLIMIT:
LOG.warning("Blocking access from %s", self.client_address[0])
self.do_BLOCK()
return
FAIL2BAN_IPS[self.client_address[0]] = bancounter + 1
self.do_AUTHHEAD()
self.wfile.write(bytes('Authentication required', 'utf-8'))
class SimpleServer(socketserver.ThreadingMixIn, socketserver.TCPServer):
"""Server class."""
daemon_threads = True
allow_reuse_address = True
def __init__(self, server_address, RequestHandlerClass):
socketserver.TCPServer.__init__(self, server_address, RequestHandlerClass)
def notify(title="HASS Configurator",
message="Notification by HASS Configurator",
notification_id=None):
"""Helper function to send notifications via HASS."""
if not HASS_API or not NOTIFY_SERVICE:
return
headers = {
"Content-Type": "application/json"
}
data = {
"title": title,
"message": message
}
if notification_id and NOTIFY_SERVICE == NOTIFY_SERVICE_DEFAULT:
data["notification_id"] = notification_id
if HASS_API_PASSWORD:
if is_jwt(HASS_API_PASSWORD):
headers["Authorization"] = "Bearer %s" % HASS_API_PASSWORD
else:
headers["x-ha-access"] = HASS_API_PASSWORD
req = urllib.request.Request(
"%sservices/%s" % (HASS_API, NOTIFY_SERVICE.replace('.', '/')),
data=bytes(json.dumps(data).encode('utf-8')),
headers=headers, method='POST')
LOG.info("%s", data)
try:
with urllib.request.urlopen(req) as response:
message = response.read().decode('utf-8')
LOG.debug(message)
except Exception as err:
LOG.warning("Exception while creating notification: %s", err)
def main():
"""Main function, duh!"""
global HTTPD
signal.signal(signal.SIGINT, signal_handler)
parser = argparse.ArgumentParser(description="Visit " \
"https://github.com/danielperna84/hass-configurator for more details " \
"about the availble options.")
parser.add_argument(
'settings', nargs='?',
help="Path to file with persistent settings.")
parser.add_argument(
'--listen', '-l', nargs='?',
help="The IP address the service is listening on. Default: 0.0.0.0")
parser.add_argument(
'--port', '-p', nargs='?', type=int,
help="The port the service is listening on. " \
"0 allocates a dynamic port. Default: 3218")
parser.add_argument(
'--allowed_networks', '-a', nargs='?',
help="Comma-separated list of allowed networks / IP addresses " \
"from which access is allowed. Eg. 127.0.0.1,192.168.0.0/16. " \
"By default access is allowed from anywhere.")
parser.add_argument(
'--username', '-U', nargs='?',
help="Username required for access.")
parser.add_argument(
'--password', '-P', nargs='?',
help="Password required for access.")
parser.add_argument(
'--sesame', '-S', nargs='?',
help="SESAME token for whitelisting client IPs by accessing " \
"a scret URL: http://1.2.3.4:3218/secret_sesame_token")
parser.add_argument(
'--basepath', '-b', nargs='?',
help="Path to initially serve files from")
parser.add_argument(
'--enforce', '-e', action='store_true',
help="Lock the configurator into the basepath.")
parser.add_argument(
'--standalone', '-s', action='store_true',
help="Don't fetch data from HASS_API.")
parser.add_argument(
'--dirsfirst', '-d', action='store_true',
help="Display directories first.")
parser.add_argument(
'--git', '-g', action='store_true',
help="Enable GIT support.")
parser.add_argument(
'--dev', '-D', action='store_true',
help="Enable Dev-Mode (serve dev.html instead of embedded HTML).")
args = parser.parse_args()
load_settings(args)
LOG.info("Starting server")
try:
problems = None
if HASS_API_PASSWORD:
problems = password_problems(HASS_API_PASSWORD, "HASS_API_PASSWORD")
if problems:
data = {
"title": "HASS Configurator - Password warning",
"message": "Your HASS API password seems insecure (%i). " \
"Refer to the HASS configurator logs for further information." % problems,
"notification_id": "HC_HASS_API_PASSWORD"
}
notify(**data)
problems = None
if SESAME:
problems = password_problems(SESAME, "SESAME")
if problems:
data = {
"title": "HASS Configurator - Password warning",
"message": "Your SESAME seems insecure (%i). " \
"Refer to the HASS configurator logs for further information." % problems,
"notification_id": "HC_SESAME"
}
notify(**data)
problems = None
if PASSWORD:
problems = password_problems(PASSWORD, "PASSWORD")
if problems:
data = {
"title": "HASS Configurator - Password warning",
"message": "Your PASSWORD seems insecure (%i). " \
"Refer to the HASS configurator logs for further information." % problems,
"notification_id": "HC_PASSWORD"
}
notify(**data)
except Exception as err:
LOG.warning("Exception while checking passwords: %s", err)
custom_server = SimpleServer
if ':' in LISTENIP:
custom_server.address_family = socket.AF_INET6
server_address = (LISTENIP, PORT)
if USERNAME and PASSWORD:
handler = AuthHandler
else:
handler = RequestHandler
HTTPD = custom_server(server_address, handler)
if SSL_CERTIFICATE:
HTTPD.socket = ssl.wrap_socket(HTTPD.socket,
certfile=SSL_CERTIFICATE,
keyfile=SSL_KEY,
server_side=True)
LOG.info('Listening on: %s://%s:%i',
'https' if SSL_CERTIFICATE else 'http',
HTTPD.server_address[0], HTTPD.server_address[1])
if BASEPATH:
os.chdir(BASEPATH)
HTTPD.serve_forever()
if __name__ == "__main__":
main()
| mit | -8,328,957,504,702,736,000 | 43.700275 | 363 | 0.501027 | false |
boompieman/iim_project | project_python2/lib/python2.7/site-packages/pygments/lexers/textfmts.py | 21 | 10852 | # -*- coding: utf-8 -*-
"""
pygments.lexers.textfmts
~~~~~~~~~~~~~~~~~~~~~~~~
Lexers for various text formats.
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
from pygments.lexer import RegexLexer, bygroups
from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
Number, Generic, Literal
from pygments.util import ClassNotFound
__all__ = ['IrcLogsLexer', 'TodotxtLexer', 'HttpLexer', 'GettextLexer']
class IrcLogsLexer(RegexLexer):
"""
Lexer for IRC logs in *irssi*, *xchat* or *weechat* style.
"""
name = 'IRC logs'
aliases = ['irc']
filenames = ['*.weechatlog']
mimetypes = ['text/x-irclog']
flags = re.VERBOSE | re.MULTILINE
timestamp = r"""
(
# irssi / xchat and others
(?: \[|\()? # Opening bracket or paren for the timestamp
(?: # Timestamp
(?: (?:\d{1,4} [-/])* # Date as - or /-separated groups of digits
(?:\d{1,4})
[T ])? # Date/time separator: T or space
(?: \d?\d [:.])* # Time as :/.-separated groups of 1 or 2 digits
(?: \d?\d)
)
(?: \]|\))?\s+ # Closing bracket or paren for the timestamp
|
# weechat
\d{4}\s\w{3}\s\d{2}\s # Date
\d{2}:\d{2}:\d{2}\s+ # Time + Whitespace
|
# xchat
\w{3}\s\d{2}\s # Date
\d{2}:\d{2}:\d{2}\s+ # Time + Whitespace
)?
"""
tokens = {
'root': [
# log start/end
(r'^\*\*\*\*(.*)\*\*\*\*$', Comment),
# hack
("^" + timestamp + r'(\s*<[^>]*>\s*)$', bygroups(Comment.Preproc, Name.Tag)),
# normal msgs
("^" + timestamp + r"""
(\s*<.*?>\s*) # Nick """,
bygroups(Comment.Preproc, Name.Tag), 'msg'),
# /me msgs
("^" + timestamp + r"""
(\s*[*]\s+) # Star
(\S+\s+.*?\n) # Nick + rest of message """,
bygroups(Comment.Preproc, Keyword, Generic.Inserted)),
# join/part msgs
("^" + timestamp + r"""
(\s*(?:\*{3}|<?-[!@=P]?->?)\s*) # Star(s) or symbols
(\S+\s+) # Nick + Space
(.*?\n) # Rest of message """,
bygroups(Comment.Preproc, Keyword, String, Comment)),
(r"^.*?\n", Text),
],
'msg': [
(r"\S+:(?!//)", Name.Attribute), # Prefix
(r".*\n", Text, '#pop'),
],
}
class GettextLexer(RegexLexer):
"""
Lexer for Gettext catalog files.
.. versionadded:: 0.9
"""
name = 'Gettext Catalog'
aliases = ['pot', 'po']
filenames = ['*.pot', '*.po']
mimetypes = ['application/x-gettext', 'text/x-gettext', 'text/gettext']
tokens = {
'root': [
(r'^#,\s.*?$', Keyword.Type),
(r'^#:\s.*?$', Keyword.Declaration),
# (r'^#$', Comment),
(r'^(#|#\.\s|#\|\s|#~\s|#\s).*$', Comment.Single),
(r'^(")([A-Za-z-]+:)(.*")$',
bygroups(String, Name.Property, String)),
(r'^".*"$', String),
(r'^(msgid|msgid_plural|msgstr|msgctxt)(\s+)(".*")$',
bygroups(Name.Variable, Text, String)),
(r'^(msgstr\[)(\d)(\])(\s+)(".*")$',
bygroups(Name.Variable, Number.Integer, Name.Variable, Text, String)),
]
}
class HttpLexer(RegexLexer):
"""
Lexer for HTTP sessions.
.. versionadded:: 1.5
"""
name = 'HTTP'
aliases = ['http']
flags = re.DOTALL
def get_tokens_unprocessed(self, text, stack=('root',)):
"""Reset the content-type state."""
self.content_type = None
return RegexLexer.get_tokens_unprocessed(self, text, stack)
def header_callback(self, match):
if match.group(1).lower() == 'content-type':
content_type = match.group(5).strip()
if ';' in content_type:
content_type = content_type[:content_type.find(';')].strip()
self.content_type = content_type
yield match.start(1), Name.Attribute, match.group(1)
yield match.start(2), Text, match.group(2)
yield match.start(3), Operator, match.group(3)
yield match.start(4), Text, match.group(4)
yield match.start(5), Literal, match.group(5)
yield match.start(6), Text, match.group(6)
def continuous_header_callback(self, match):
yield match.start(1), Text, match.group(1)
yield match.start(2), Literal, match.group(2)
yield match.start(3), Text, match.group(3)
def content_callback(self, match):
content_type = getattr(self, 'content_type', None)
content = match.group()
offset = match.start()
if content_type:
from pygments.lexers import get_lexer_for_mimetype
possible_lexer_mimetypes = [content_type]
if '+' in content_type:
# application/calendar+xml can be treated as application/xml
# if there's not a better match.
general_type = re.sub(r'^(.*)/.*\+(.*)$', r'\1/\2',
content_type)
possible_lexer_mimetypes.append(general_type)
for i in possible_lexer_mimetypes:
try:
lexer = get_lexer_for_mimetype(i)
except ClassNotFound:
pass
else:
for idx, token, value in lexer.get_tokens_unprocessed(content):
yield offset + idx, token, value
return
yield offset, Text, content
tokens = {
'root': [
(r'(GET|POST|PUT|DELETE|HEAD|OPTIONS|TRACE|PATCH)( +)([^ ]+)( +)'
r'(HTTP)(/)(1\.[01])(\r?\n|\Z)',
bygroups(Name.Function, Text, Name.Namespace, Text,
Keyword.Reserved, Operator, Number, Text),
'headers'),
(r'(HTTP)(/)(1\.[01])( +)(\d{3})( +)([^\r\n]+)(\r?\n|\Z)',
bygroups(Keyword.Reserved, Operator, Number, Text, Number,
Text, Name.Exception, Text),
'headers'),
],
'headers': [
(r'([^\s:]+)( *)(:)( *)([^\r\n]+)(\r?\n|\Z)', header_callback),
(r'([\t ]+)([^\r\n]+)(\r?\n|\Z)', continuous_header_callback),
(r'\r?\n', Text, 'content')
],
'content': [
(r'.+', content_callback)
]
}
def analyse_text(text):
return text.startswith(('GET /', 'POST /', 'PUT /', 'DELETE /', 'HEAD /',
'OPTIONS /', 'TRACE /', 'PATCH /'))
class TodotxtLexer(RegexLexer):
"""
Lexer for `Todo.txt <http://todotxt.com/>`_ todo list format.
.. versionadded:: 2.0
"""
name = 'Todotxt'
aliases = ['todotxt']
# *.todotxt is not a standard extension for Todo.txt files; including it
# makes testing easier, and also makes autodetecting file type easier.
filenames = ['todo.txt', '*.todotxt']
mimetypes = ['text/x-todo']
# Aliases mapping standard token types of Todo.txt format concepts
CompleteTaskText = Operator # Chosen to de-emphasize complete tasks
IncompleteTaskText = Text # Incomplete tasks should look like plain text
# Priority should have most emphasis to indicate importance of tasks
Priority = Generic.Heading
# Dates should have next most emphasis because time is important
Date = Generic.Subheading
# Project and context should have equal weight, and be in different colors
Project = Generic.Error
Context = String
# If tag functionality is added, it should have the same weight as Project
# and Context, and a different color. Generic.Traceback would work well.
# Regex patterns for building up rules; dates, priorities, projects, and
# contexts are all atomic
# TODO: Make date regex more ISO 8601 compliant
date_regex = r'\d{4,}-\d{2}-\d{2}'
priority_regex = r'\([A-Z]\)'
project_regex = r'\+\S+'
context_regex = r'@\S+'
# Compound regex expressions
complete_one_date_regex = r'(x )(' + date_regex + r')'
complete_two_date_regex = (complete_one_date_regex + r'( )(' +
date_regex + r')')
priority_date_regex = r'(' + priority_regex + r')( )(' + date_regex + r')'
tokens = {
# Should parse starting at beginning of line; each line is a task
'root': [
# Complete task entry points: two total:
# 1. Complete task with two dates
(complete_two_date_regex, bygroups(CompleteTaskText, Date,
CompleteTaskText, Date),
'complete'),
# 2. Complete task with one date
(complete_one_date_regex, bygroups(CompleteTaskText, Date),
'complete'),
# Incomplete task entry points: six total:
# 1. Priority plus date
(priority_date_regex, bygroups(Priority, IncompleteTaskText, Date),
'incomplete'),
# 2. Priority only
(priority_regex, Priority, 'incomplete'),
# 3. Leading date
(date_regex, Date, 'incomplete'),
# 4. Leading context
(context_regex, Context, 'incomplete'),
# 5. Leading project
(project_regex, Project, 'incomplete'),
# 6. Non-whitespace catch-all
('\S+', IncompleteTaskText, 'incomplete'),
],
# Parse a complete task
'complete': [
# Newline indicates end of task, should return to root
(r'\s*\n', CompleteTaskText, '#pop'),
# Tokenize contexts and projects
(context_regex, Context),
(project_regex, Project),
# Tokenize non-whitespace text
('\S+', CompleteTaskText),
# Tokenize whitespace not containing a newline
('\s+', CompleteTaskText),
],
# Parse an incomplete task
'incomplete': [
# Newline indicates end of task, should return to root
(r'\s*\n', IncompleteTaskText, '#pop'),
# Tokenize contexts and projects
(context_regex, Context),
(project_regex, Project),
# Tokenize non-whitespace text
('\S+', IncompleteTaskText),
# Tokenize whitespace not containing a newline
('\s+', IncompleteTaskText),
],
}
| gpl-3.0 | -7,211,764,571,210,842,000 | 35.538721 | 89 | 0.502488 | false |
vortex-ape/scikit-learn | sklearn/utils/metaestimators.py | 9 | 7751 | """Utilities for meta-estimators"""
# Author: Joel Nothman
# Andreas Mueller
# License: BSD
from abc import ABCMeta, abstractmethod
from operator import attrgetter
from functools import update_wrapper
import numpy as np
from ..utils import safe_indexing
from ..externals import six
from ..base import BaseEstimator
__all__ = ['if_delegate_has_method']
class _BaseComposition(six.with_metaclass(ABCMeta, BaseEstimator)):
"""Handles parameter management for classifiers composed of named estimators.
"""
@abstractmethod
def __init__(self):
pass
def _get_params(self, attr, deep=True):
out = super(_BaseComposition, self).get_params(deep=deep)
if not deep:
return out
estimators = getattr(self, attr)
out.update(estimators)
for name, estimator in estimators:
if hasattr(estimator, 'get_params'):
for key, value in six.iteritems(
estimator.get_params(deep=True)):
out['%s__%s' % (name, key)] = value
return out
def _set_params(self, attr, **params):
# Ensure strict ordering of parameter setting:
# 1. All steps
if attr in params:
setattr(self, attr, params.pop(attr))
# 2. Step replacement
items = getattr(self, attr)
names = []
if items:
names, _ = zip(*items)
for name in list(six.iterkeys(params)):
if '__' not in name and name in names:
self._replace_estimator(attr, name, params.pop(name))
# 3. Step parameters and other initialisation arguments
super(_BaseComposition, self).set_params(**params)
return self
def _replace_estimator(self, attr, name, new_val):
# assumes `name` is a valid estimator name
new_estimators = list(getattr(self, attr))
for i, (estimator_name, _) in enumerate(new_estimators):
if estimator_name == name:
new_estimators[i] = (name, new_val)
break
setattr(self, attr, new_estimators)
def _validate_names(self, names):
if len(set(names)) != len(names):
raise ValueError('Names provided are not unique: '
'{0!r}'.format(list(names)))
invalid_names = set(names).intersection(self.get_params(deep=False))
if invalid_names:
raise ValueError('Estimator names conflict with constructor '
'arguments: {0!r}'.format(sorted(invalid_names)))
invalid_names = [name for name in names if '__' in name]
if invalid_names:
raise ValueError('Estimator names must not contain __: got '
'{0!r}'.format(invalid_names))
class _IffHasAttrDescriptor(object):
"""Implements a conditional property using the descriptor protocol.
Using this class to create a decorator will raise an ``AttributeError``
if none of the delegates (specified in ``delegate_names``) is an attribute
of the base object or the first found delegate does not have an attribute
``attribute_name``.
This allows ducktyping of the decorated method based on
``delegate.attribute_name``. Here ``delegate`` is the first item in
``delegate_names`` for which ``hasattr(object, delegate) is True``.
See https://docs.python.org/3/howto/descriptor.html for an explanation of
descriptors.
"""
def __init__(self, fn, delegate_names, attribute_name):
self.fn = fn
self.delegate_names = delegate_names
self.attribute_name = attribute_name
# update the docstring of the descriptor
update_wrapper(self, fn)
def __get__(self, obj, type=None):
# raise an AttributeError if the attribute is not present on the object
if obj is not None:
# delegate only on instances, not the classes.
# this is to allow access to the docstrings.
for delegate_name in self.delegate_names:
try:
delegate = attrgetter(delegate_name)(obj)
except AttributeError:
continue
else:
getattr(delegate, self.attribute_name)
break
else:
attrgetter(self.delegate_names[-1])(obj)
# lambda, but not partial, allows help() to work with update_wrapper
out = lambda *args, **kwargs: self.fn(obj, *args, **kwargs)
# update the docstring of the returned function
update_wrapper(out, self.fn)
return out
def if_delegate_has_method(delegate):
"""Create a decorator for methods that are delegated to a sub-estimator
This enables ducktyping by hasattr returning True according to the
sub-estimator.
Parameters
----------
delegate : string, list of strings or tuple of strings
Name of the sub-estimator that can be accessed as an attribute of the
base object. If a list or a tuple of names are provided, the first
sub-estimator that is an attribute of the base object will be used.
"""
if isinstance(delegate, list):
delegate = tuple(delegate)
if not isinstance(delegate, tuple):
delegate = (delegate,)
return lambda fn: _IffHasAttrDescriptor(fn, delegate,
attribute_name=fn.__name__)
def _safe_split(estimator, X, y, indices, train_indices=None):
"""Create subset of dataset and properly handle kernels.
Slice X, y according to indices for cross-validation, but take care of
precomputed kernel-matrices or pairwise affinities / distances.
If ``estimator._pairwise is True``, X needs to be square and
we slice rows and columns. If ``train_indices`` is not None,
we slice rows using ``indices`` (assumed the test set) and columns
using ``train_indices``, indicating the training set.
Labels y will always be indexed only along the first axis.
Parameters
----------
estimator : object
Estimator to determine whether we should slice only rows or rows and
columns.
X : array-like, sparse matrix or iterable
Data to be indexed. If ``estimator._pairwise is True``,
this needs to be a square array-like or sparse matrix.
y : array-like, sparse matrix or iterable
Targets to be indexed.
indices : array of int
Rows to select from X and y.
If ``estimator._pairwise is True`` and ``train_indices is None``
then ``indices`` will also be used to slice columns.
train_indices : array of int or None, default=None
If ``estimator._pairwise is True`` and ``train_indices is not None``,
then ``train_indices`` will be use to slice the columns of X.
Returns
-------
X_subset : array-like, sparse matrix or list
Indexed data.
y_subset : array-like, sparse matrix or list
Indexed targets.
"""
if getattr(estimator, "_pairwise", False):
if not hasattr(X, "shape"):
raise ValueError("Precomputed kernels or affinity matrices have "
"to be passed as arrays or sparse matrices.")
# X is a precomputed square kernel matrix
if X.shape[0] != X.shape[1]:
raise ValueError("X should be a square kernel matrix")
if train_indices is None:
X_subset = X[np.ix_(indices, indices)]
else:
X_subset = X[np.ix_(indices, train_indices)]
else:
X_subset = safe_indexing(X, indices)
if y is not None:
y_subset = safe_indexing(y, indices)
else:
y_subset = None
return X_subset, y_subset
| bsd-3-clause | -7,314,363,191,375,173,000 | 35.909524 | 81 | 0.615404 | false |
2manysecrets/parallel-wget | testenv/WgetTest.py | 1 | 11760 | import os
import shutil
import shlex
import sys
import traceback
import HTTPServer
import re
import time
from subprocess import call
from ColourTerm import printer
from difflib import unified_diff
HTTP = "HTTP"
HTTPS = "HTTPS"
""" A Custom Exception raised by the Test Environment. """
class TestFailed (Exception):
def __init__ (self, error):
self.error = error
""" Class that defines methods common to both HTTP and FTP Tests. """
class CommonMethods:
TestFailed = TestFailed
def init_test_env (self, name):
testDir = name + "-test"
try:
os.mkdir (testDir)
except FileExistsError:
shutil.rmtree (testDir)
os.mkdir (testDir)
os.chdir (testDir)
self.tests_passed = True
def get_domain_addr (self, addr):
self.port = str (addr[1])
return addr[0] + ":" + str(addr[1]) + "/"
def exec_wget (self, options, urls, domain_list):
cmd_line = self.get_cmd_line (options, urls, domain_list)
params = shlex.split (cmd_line)
print (params)
if os.getenv ("SERVER_WAIT"):
time.sleep (float (os.getenv ("SERVER_WAIT")))
try:
retcode = call (params)
except FileNotFoundError as filenotfound:
raise TestFailed (
"The Wget Executable does not exist at the expected path")
return retcode
def get_cmd_line (self, options, urls, domain_list):
TEST_PATH = os.path.abspath (".")
WGET_PATH = os.path.join (TEST_PATH, "..", "..", "src", "wget")
WGET_PATH = os.path.abspath (WGET_PATH)
cmd_line = WGET_PATH + " " + options + " "
for i in range (0, self.servers):
for url in urls[i]:
protocol = "http://" if self.server_types[i] is "HTTP" else "https://"
cmd_line += protocol + domain_list[i] + url + " "
# for url in urls:
# cmd_line += domain_list[0] + url + " "
print (cmd_line)
return cmd_line
def __test_cleanup (self):
testDir = self.name + "-test"
os.chdir ('..')
try:
if os.getenv ("NO_CLEANUP") is None:
shutil.rmtree (testDir)
except Exception as ae:
print ("Unknown Exception while trying to remove Test Environment.")
def _exit_test (self):
self.__test_cleanup ()
def begin (self):
return 0 if self.tests_passed else 100
""" Methods to check if the Test Case passes or not. """
def __gen_local_filesys (self):
file_sys = dict ()
for parent, dirs, files in os.walk ('.'):
for name in files:
onefile = dict ()
# Create the full path to file, removing the leading ./
# Might not work on non-unix systems. Someone please test.
filepath = os.path.join (parent, name)
file_handle = open (filepath, 'r')
file_content = file_handle.read ()
onefile['content'] = file_content
filepath = filepath[2:]
file_sys[filepath] = onefile
file_handle.close ()
return file_sys
def __check_downloaded_files (self, exp_filesys):
local_filesys = self.__gen_local_filesys ()
for files in exp_filesys:
if files.name in local_filesys:
local_file = local_filesys.pop (files.name)
if files.content != local_file ['content']:
for line in unified_diff (local_file['content'], files.content, fromfile="Actual", tofile="Expected"):
sys.stderr.write (line)
raise TestFailed ("Contents of " + files.name + " do not match")
else:
raise TestFailed ("Expected file " + files.name + " not found")
if local_filesys:
print (local_filesys)
raise TestFailed ("Extra files downloaded.")
def _replace_substring (self, string):
pattern = re.compile ('\{\{\w+\}\}')
match_obj = pattern.search (string)
if match_obj is not None:
rep = match_obj.group()
temp = getattr (self, rep.strip ('{}'))
string = string.replace (rep, temp)
return string
""" Test Rule Definitions """
""" This should really be taken out soon. All this extra stuff to ensure
re-use of old code is crap. Someone needs to re-write it. The new rework
branch is much better written, but integrating it requires effort.
All these classes should never exist. The whole server needs to modified.
"""
class Authentication:
def __init__ (self, auth_obj):
self.auth_type = auth_obj['Type']
self.auth_user = auth_obj['User']
self.auth_pass = auth_obj['Pass']
class ExpectHeader:
def __init__ (self, header_obj):
self.headers = header_obj
class RejectHeader:
def __init__ (self, header_obj):
self.headers = header_obj
class Response:
def __init__ (self, retcode):
self.response_code = retcode
class SendHeader:
def __init__ (self, header_obj):
self.headers = header_obj
def get_server_rules (self, file_obj):
""" The handling of expect header could be made much better when the
options are parsed in a true and better fashion. For an example,
see the commented portion in Test-basic-auth.py.
"""
server_rules = dict ()
for rule in file_obj.rules:
r_obj = getattr (self, rule) (file_obj.rules[rule])
server_rules[rule] = r_obj
return server_rules
""" Pre-Test Hook Function Calls """
def ServerFiles (self, server_files):
for i in range (0, self.servers):
file_list = dict ()
server_rules = dict ()
for file_obj in server_files[i]:
content = self._replace_substring (file_obj.content)
file_list[file_obj.name] = content
rule_obj = self.get_server_rules (file_obj)
server_rules[file_obj.name] = rule_obj
self.server_list[i].server_conf (file_list, server_rules)
def LocalFiles (self, local_files):
for file_obj in local_files:
file_handler = open (file_obj.name, "w")
file_handler.write (file_obj.content)
file_handler.close ()
def ServerConf (self, server_settings):
for i in range (0, self.servers):
self.server_list[i].server_sett (server_settings)
""" Test Option Function Calls """
def WgetCommands (self, command_list):
self.options = self._replace_substring (command_list)
def Urls (self, url_list):
self.urls = url_list
""" Post-Test Hook Function Calls """
def ExpectedRetcode (self, retcode):
if self.act_retcode != retcode:
pr = "Return codes do not match.\nExpected: " + str(retcode) + "\nActual: " + str(self.act_retcode)
raise TestFailed (pr)
def ExpectedFiles (self, exp_filesys):
self.__check_downloaded_files (exp_filesys)
def FilesCrawled (self, Request_Headers):
for i in range (0, self.servers):
headers = set(Request_Headers[i])
o_headers = self.Request_remaining[i]
header_diff = headers.symmetric_difference (o_headers)
if len(header_diff) is not 0:
printer ("RED", str (header_diff))
raise TestFailed ("Not all files were crawled correctly")
""" Class for HTTP Tests. """
class HTTPTest (CommonMethods):
# Temp Notes: It is expected that when pre-hook functions are executed, only an empty test-dir exists.
# pre-hook functions are executed just prior to the call to Wget is made.
# post-hook functions will be executed immediately after the call to Wget returns.
def __init__ (
self,
name="Unnamed Test",
pre_hook=dict(),
test_params=dict(),
post_hook=dict(),
servers=[HTTP]
):
try:
self.Server_setup (name, pre_hook, test_params, post_hook, servers)
except TestFailed as tf:
printer ("RED", "Error: " + tf.error)
self.tests_passed = False
except Exception as ae:
printer ("RED", "Unhandled Exception Caught.")
print ( ae.__str__ ())
traceback.print_exc ()
self.tests_passed = False
else:
printer ("GREEN", "Test Passed")
finally:
self._exit_test ()
def Server_setup (self, name, pre_hook, test_params, post_hook, servers):
self.name = name
self.server_types = servers
self.servers = len (servers)
printer ("BLUE", "Running Test " + self.name)
self.init_test_env (name)
self.server_list = list()
self.domain_list = list()
for server_type in servers:
server_inst = getattr (self, "init_" + server_type + "_Server") ()
self.server_list.append (server_inst)
domain = self.get_domain_addr (server_inst.server_address)
self.domain_list.append (domain)
#self.server = self.init_HTTP_Server ()
#self.domain = self.get_domain_addr (self.server.server_address)
self.pre_hook_call (pre_hook)
self.call_test (test_params)
self.post_hook_call (post_hook)
def pre_hook_call (self, pre_hook):
for pre_hook_func in pre_hook:
try:
assert hasattr (self, pre_hook_func)
except AssertionError as ae:
self.stop_HTTP_Server ()
raise TestFailed ("Pre Test Function " + pre_hook_func + " not defined.")
getattr (self, pre_hook_func) (pre_hook[pre_hook_func])
def call_test (self, test_params):
for test_func in test_params:
try:
assert hasattr (self, test_func)
except AssertionError as ae:
self.stop_HTTP_Server ()
raise TestFailed ("Test Option " + test_func + " unknown.")
getattr (self, test_func) (test_params[test_func])
try:
self.act_retcode = self.exec_wget (self.options, self.urls, self.domain_list)
except TestFailed as tf:
self.stop_HTTP_Server ()
raise TestFailed (tf.__str__ ())
self.stop_HTTP_Server ()
def post_hook_call (self, post_hook):
for post_hook_func in post_hook:
try:
assert hasattr (self, post_hook_func)
except AssertionError as ae:
raise TestFailed ("Post Test Function " + post_hook_func + " not defined.")
getattr (self, post_hook_func) (post_hook[post_hook_func])
def init_HTTP_Server (self):
server = HTTPServer.HTTPd ()
server.start ()
return server
def init_HTTPS_Server (self):
server = HTTPServer.HTTPSd ()
server.start ()
return server
def stop_HTTP_Server (self):
self.Request_remaining = list ()
for server in self.server_list:
server_req = server.server_inst.get_req_headers ()
self.Request_remaining.append (server_req)
server.server_inst.shutdown ()
""" WgetFile is a File Data Container object """
class WgetFile:
def __init__ (
self,
name,
content="Test Contents",
timestamp=None,
rules=dict()
):
self.name = name
self.content = content
self.timestamp = timestamp
self.rules = rules
# vim: set ts=4 sts=4 sw=4 tw=80 et :
| gpl-3.0 | 3,523,046,395,472,589,000 | 33.896142 | 122 | 0.567517 | false |
openstack/octavia | octavia/tests/unit/common/sample_configs/sample_configs_split.py | 1 | 42888 | # Copyright 2014 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
import collections
from oslo_config import cfg
from octavia.common import constants
from octavia.tests.common import sample_certs
CONF = cfg.CONF
def sample_amphora_tuple(id='sample_amphora_id_1', lb_network_ip='10.0.1.1',
vrrp_ip='10.1.1.1', ha_ip='192.168.10.1',
vrrp_port_id='1234', ha_port_id='1234', role=None,
status='ACTIVE', vrrp_interface=None,
vrrp_priority=None, api_version='0.5'):
in_amphora = collections.namedtuple(
'amphora', 'id, lb_network_ip, vrrp_ip, ha_ip, vrrp_port_id, '
'ha_port_id, role, status, vrrp_interface,'
'vrrp_priority, api_version')
return in_amphora(
id=id,
lb_network_ip=lb_network_ip,
vrrp_ip=vrrp_ip,
ha_ip=ha_ip,
vrrp_port_id=vrrp_port_id,
ha_port_id=ha_port_id,
role=role,
status=status,
vrrp_interface=vrrp_interface,
vrrp_priority=vrrp_priority,
api_version=api_version)
RET_PERSISTENCE = {
'type': 'HTTP_COOKIE',
'cookie_name': None}
RET_MONITOR_1 = {
'id': 'sample_monitor_id_1',
'type': 'HTTP',
'delay': 30,
'timeout': 31,
'fall_threshold': 3,
'rise_threshold': 2,
'http_method': 'GET',
'url_path': '/index.html',
'expected_codes': '418',
'enabled': True,
'http_version': 1.0,
'domain_name': None}
RET_MONITOR_2 = {
'id': 'sample_monitor_id_2',
'type': 'HTTP',
'delay': 30,
'timeout': 31,
'fall_threshold': 3,
'rise_threshold': 2,
'http_method': 'GET',
'url_path': '/healthmon.html',
'expected_codes': '418',
'enabled': True,
'http_version': 1.0,
'domain_name': None}
RET_MEMBER_1 = {
'id': 'sample_member_id_1',
'address': '10.0.0.99',
'protocol_port': 82,
'weight': 13,
'subnet_id': '10.0.0.1/24',
'enabled': True,
'operating_status': 'ACTIVE',
'monitor_address': None,
'monitor_port': None,
'backup': False}
RET_MEMBER_2 = {
'id': 'sample_member_id_2',
'address': '10.0.0.98',
'protocol_port': 82,
'weight': 13,
'subnet_id': '10.0.0.1/24',
'enabled': True,
'operating_status': 'ACTIVE',
'monitor_address': None,
'monitor_port': None,
'backup': False}
RET_MEMBER_3 = {
'id': 'sample_member_id_3',
'address': '10.0.0.97',
'protocol_port': 82,
'weight': 13,
'subnet_id': '10.0.0.1/24',
'enabled': True,
'operating_status': 'ACTIVE',
'monitor_address': None,
'monitor_port': None,
'backup': False}
RET_POOL_1 = {
'id': 'sample_pool_id_1',
'protocol': 'http',
'lb_algorithm': 'roundrobin',
'members': [RET_MEMBER_1, RET_MEMBER_2],
'health_monitor': RET_MONITOR_1,
'session_persistence': RET_PERSISTENCE,
'enabled': True,
'operating_status': 'ACTIVE',
'stick_size': '10k',
constants.HTTP_REUSE: False,
'ca_tls_path': '',
'crl_path': '',
'tls_enabled': False}
RET_POOL_2 = {
'id': 'sample_pool_id_2',
'protocol': 'http',
'lb_algorithm': 'roundrobin',
'members': [RET_MEMBER_3],
'health_monitor': RET_MONITOR_2,
'session_persistence': RET_PERSISTENCE,
'enabled': True,
'operating_status': 'ACTIVE',
'stick_size': '10k',
constants.HTTP_REUSE: False,
'ca_tls_path': '',
'crl_path': '',
'tls_enabled': False}
RET_DEF_TLS_CONT = {'id': 'cont_id_1', 'allencompassingpem': 'imapem',
'primary_cn': 'FakeCn'}
RET_SNI_CONT_1 = {'id': 'cont_id_2', 'allencompassingpem': 'imapem2',
'primary_cn': 'FakeCn'}
RET_SNI_CONT_2 = {'id': 'cont_id_3', 'allencompassingpem': 'imapem3',
'primary_cn': 'FakeCn2'}
RET_L7RULE_1 = {
'id': 'sample_l7rule_id_1',
'type': constants.L7RULE_TYPE_PATH,
'compare_type': constants.L7RULE_COMPARE_TYPE_STARTS_WITH,
'key': None,
'value': '/api',
'invert': False,
'enabled': True}
RET_L7RULE_2 = {
'id': 'sample_l7rule_id_2',
'type': constants.L7RULE_TYPE_HEADER,
'compare_type': constants.L7RULE_COMPARE_TYPE_CONTAINS,
'key': 'Some-header',
'value': 'This\\ string\\\\\\ with\\ stuff',
'invert': True,
'enabled': True}
RET_L7RULE_3 = {
'id': 'sample_l7rule_id_3',
'type': constants.L7RULE_TYPE_COOKIE,
'compare_type': constants.L7RULE_COMPARE_TYPE_REGEX,
'key': 'some-cookie',
'value': 'this.*|that',
'invert': False,
'enabled': True}
RET_L7RULE_4 = {
'id': 'sample_l7rule_id_4',
'type': constants.L7RULE_TYPE_FILE_TYPE,
'compare_type': constants.L7RULE_COMPARE_TYPE_EQUAL_TO,
'key': None,
'value': 'jpg',
'invert': False,
'enabled': True}
RET_L7RULE_5 = {
'id': 'sample_l7rule_id_5',
'type': constants.L7RULE_TYPE_HOST_NAME,
'compare_type': constants.L7RULE_COMPARE_TYPE_ENDS_WITH,
'key': None,
'value': '.example.com',
'invert': False,
'enabled': True}
RET_L7RULE_6 = {
'id': 'sample_l7rule_id_6',
'type': constants.L7RULE_TYPE_HOST_NAME,
'compare_type': constants.L7RULE_COMPARE_TYPE_ENDS_WITH,
'key': None,
'value': '.example.com',
'invert': False,
'enabled': False}
RET_L7POLICY_1 = {
'id': 'sample_l7policy_id_1',
'action': constants.L7POLICY_ACTION_REDIRECT_TO_POOL,
'redirect_pool': RET_POOL_2,
'redirect_url': None,
'redirect_prefix': None,
'enabled': True,
'l7rules': [RET_L7RULE_1],
'redirect_http_code': None}
RET_L7POLICY_2 = {
'id': 'sample_l7policy_id_2',
'action': constants.L7POLICY_ACTION_REDIRECT_TO_URL,
'redirect_pool': None,
'redirect_url': 'http://www.example.com',
'redirect_prefix': None,
'enabled': True,
'l7rules': [RET_L7RULE_2, RET_L7RULE_3],
'redirect_http_code': 302}
RET_L7POLICY_3 = {
'id': 'sample_l7policy_id_3',
'action': constants.L7POLICY_ACTION_REJECT,
'redirect_pool': None,
'redirect_url': None,
'redirect_prefix': None,
'enabled': True,
'l7rules': [RET_L7RULE_4, RET_L7RULE_5],
'redirect_http_code': None}
RET_L7POLICY_4 = {
'id': 'sample_l7policy_id_4',
'action': constants.L7POLICY_ACTION_REJECT,
'redirect_pool': None,
'redirect_url': None,
'redirect_prefix': None,
'enabled': True,
'l7rules': [],
'redirect_http_code': None}
RET_L7POLICY_5 = {
'id': 'sample_l7policy_id_5',
'action': constants.L7POLICY_ACTION_REJECT,
'redirect_pool': None,
'redirect_url': None,
'redirect_prefix': None,
'enabled': False,
'l7rules': [RET_L7RULE_5],
'redirect_http_code': None}
RET_L7POLICY_6 = {
'id': 'sample_l7policy_id_6',
'action': constants.L7POLICY_ACTION_REJECT,
'redirect_pool': None,
'redirect_url': None,
'redirect_prefix': None,
'enabled': True,
'l7rules': [],
'redirect_http_code': None}
RET_L7POLICY_7 = {
'id': 'sample_l7policy_id_7',
'action': constants.L7POLICY_ACTION_REDIRECT_PREFIX,
'redirect_pool': None,
'redirect_url': None,
'redirect_prefix': 'https://example.com',
'enabled': True,
'l7rules': [RET_L7RULE_2, RET_L7RULE_3],
'redirect_http_code': 302}
RET_L7POLICY_8 = {
'id': 'sample_l7policy_id_8',
'action': constants.L7POLICY_ACTION_REDIRECT_TO_URL,
'redirect_pool': None,
'redirect_url': 'http://www.example.com',
'redirect_prefix': None,
'enabled': True,
'l7rules': [RET_L7RULE_2, RET_L7RULE_3],
'redirect_http_code': None}
RET_LISTENER = {
'id': 'sample_listener_id_1',
'protocol_port': '80',
'protocol': 'HTTP',
'protocol_mode': 'http',
'default_pool': RET_POOL_1,
'connection_limit': constants.HAPROXY_MAX_MAXCONN,
'amphorae': [sample_amphora_tuple()],
'peer_port': 1024,
'topology': 'SINGLE',
'user_log_format': '12345\\ sample_loadbalancer_id_1\\ %f\\ %ci\\ %cp\\ '
'%t\\ %{+Q}r\\ %ST\\ %B\\ %U\\ %[ssl_c_verify]\\ '
'%{+Q}[ssl_c_s_dn]\\ %b\\ %s\\ %Tt\\ %tsc',
'pools': [RET_POOL_1],
'l7policies': [],
'enabled': True,
'insert_headers': {},
'timeout_client_data': 50000,
'timeout_member_connect': 5000,
'timeout_member_data': 50000,
'timeout_tcp_inspect': 0,
}
RET_LISTENER_L7 = {
'id': 'sample_listener_id_1',
'protocol_port': '80',
'protocol': 'HTTP',
'protocol_mode': 'http',
'default_pool': RET_POOL_1,
'connection_limit': constants.HAPROXY_MAX_MAXCONN,
'amphorae': [sample_amphora_tuple()],
'peer_port': 1024,
'topology': 'SINGLE',
'user_log_format': '12345\\ sample_loadbalancer_id_1\\ %f\\ %ci\\ %cp\\ '
'%t\\ %{+Q}r\\ %ST\\ %B\\ %U\\ %[ssl_c_verify]\\ '
'%{+Q}[ssl_c_s_dn]\\ %b\\ %s\\ %Tt\\ %tsc',
'pools': [RET_POOL_1, RET_POOL_2],
'l7policies': [RET_L7POLICY_1, RET_L7POLICY_2, RET_L7POLICY_3,
RET_L7POLICY_4, RET_L7POLICY_5, RET_L7POLICY_6,
RET_L7POLICY_7],
'enabled': True,
'insert_headers': {},
'timeout_client_data': 50000,
'timeout_member_connect': 5000,
'timeout_member_data': 50000,
'timeout_tcp_inspect': 0,
}
RET_LISTENER_TLS = {
'id': 'sample_listener_id_1',
'protocol_port': '443',
'protocol': 'TERMINATED_HTTPS',
'protocol_mode': 'http',
'default_pool': RET_POOL_1,
'connection_limit': constants.HAPROXY_MAX_MAXCONN,
'tls_certificate_id': 'cont_id_1',
'default_tls_path': '/etc/ssl/sample_loadbalancer_id_1/fakeCN.pem',
'default_tls_container': RET_DEF_TLS_CONT,
'pools': [RET_POOL_1],
'l7policies': [],
'enabled': True,
'insert_headers': {}}
RET_LISTENER_TLS_SNI = {
'id': 'sample_listener_id_1',
'protocol_port': '443',
'protocol': 'TERMINATED_HTTPS',
'default_pool': RET_POOL_1,
'connection_limit': constants.HAPROXY_MAX_MAXCONN,
'tls_certificate_id': 'cont_id_1',
'default_tls_path': '/etc/ssl/sample_loadbalancer_id_1/fakeCN.pem',
'default_tls_container': RET_DEF_TLS_CONT,
'crt_dir': '/v2/sample_loadbalancer_id_1',
'sni_container_ids': ['cont_id_2', 'cont_id_3'],
'sni_containers': [RET_SNI_CONT_1, RET_SNI_CONT_2],
'pools': [RET_POOL_1],
'l7policies': [],
'enabled': True,
'insert_headers': {}}
RET_AMPHORA = {
'id': 'sample_amphora_id_1',
'lb_network_ip': '10.0.1.1',
'vrrp_ip': '10.1.1.1',
'ha_ip': '192.168.10.1',
'vrrp_port_id': '1234',
'ha_port_id': '1234',
'role': None,
'status': 'ACTIVE',
'vrrp_interface': None,
'vrrp_priority': None}
RET_LB = {
'host_amphora': RET_AMPHORA,
'id': 'sample_loadbalancer_id_1',
'vip_address': '10.0.0.2',
'listener': RET_LISTENER,
'topology': 'SINGLE',
'enabled': True,
'global_connection_limit': constants.HAPROXY_MAX_MAXCONN}
RET_LB_L7 = {
'host_amphora': RET_AMPHORA,
'id': 'sample_loadbalancer_id_1',
'vip_address': '10.0.0.2',
'listener': RET_LISTENER_L7,
'topology': 'SINGLE',
'enabled': True,
'global_connection_limit': constants.HAPROXY_MAX_MAXCONN}
UDP_SOURCE_IP_BODY = {
'type': constants.SESSION_PERSISTENCE_SOURCE_IP,
'persistence_timeout': 33,
'persistence_granularity': '255.0.0.0'
}
RET_UDP_HEALTH_MONITOR = {
'id': 'sample_monitor_id_1',
'type': constants.HEALTH_MONITOR_UDP_CONNECT,
'delay': 30,
'timeout': 31,
'enabled': True,
'fall_threshold': 3,
'check_script_path': (CONF.haproxy_amphora.base_path +
'/lvs/check/udp_check.sh')
}
UDP_HEALTH_MONITOR_NO_SCRIPT = {
'id': 'sample_monitor_id_1',
'check_script_path': None,
'delay': 30,
'enabled': True,
'fall_threshold': 3,
'timeout': 31,
'type': 'UDP'
}
RET_UDP_MEMBER = {
'id': 'member_id_1',
'address': '192.0.2.10',
'protocol_port': 82,
'weight': 13,
'enabled': True,
'monitor_address': None,
'monitor_port': None
}
RET_UDP_MEMBER_MONITOR_IP_PORT = {
'id': 'member_id_1',
'address': '192.0.2.10',
'protocol_port': 82,
'weight': 13,
'enabled': True,
'monitor_address': '192.168.1.1',
'monitor_port': 9000
}
UDP_MEMBER_1 = {
'id': 'sample_member_id_1',
'address': '10.0.0.99',
'enabled': True,
'protocol_port': 82,
'weight': 13,
'monitor_address': None,
'monitor_port': None
}
UDP_MEMBER_2 = {
'id': 'sample_member_id_2',
'address': '10.0.0.98',
'enabled': True,
'protocol_port': 82,
'weight': 13,
'monitor_address': None,
'monitor_port': None
}
RET_UDP_POOL = {
'id': 'sample_pool_id_1',
'enabled': True,
'health_monitor': UDP_HEALTH_MONITOR_NO_SCRIPT,
'lb_algorithm': 'rr',
'members': [UDP_MEMBER_1, UDP_MEMBER_2],
'protocol': 'udp',
'session_persistence': UDP_SOURCE_IP_BODY
}
RET_UDP_LISTENER = {
'connection_limit': 98,
'default_pool': {
'id': 'sample_pool_id_1',
'enabled': True,
'health_monitor': RET_UDP_HEALTH_MONITOR,
'lb_algorithm': 'rr',
'members': [UDP_MEMBER_1, UDP_MEMBER_2],
'protocol': 'udp',
'session_persistence': UDP_SOURCE_IP_BODY
},
'enabled': True,
'id': 'sample_listener_id_1',
'protocol_mode': 'udp',
'protocol_port': '80'
}
def sample_loadbalancer_tuple(proto=None, monitor=True, persistence=True,
persistence_type=None, tls=False, sni=False,
topology=None, l7=False, enabled=True):
proto = 'HTTP' if proto is None else proto
topology = 'SINGLE' if topology is None else topology
in_lb = collections.namedtuple(
'load_balancer', 'id, name, protocol, vip, listeners, amphorae,'
' enabled')
return in_lb(
id='sample_loadbalancer_id_1',
name='test-lb',
protocol=proto,
vip=sample_vip_tuple(),
topology=topology,
listeners=[sample_listener_tuple(proto=proto, monitor=monitor,
persistence=persistence,
persistence_type=persistence_type,
tls=tls,
sni=sni,
l7=l7,
enabled=enabled)],
enabled=enabled
)
def sample_listener_loadbalancer_tuple(proto=None, topology=None,
enabled=True):
proto = 'HTTP' if proto is None else proto
if topology and topology in ['ACTIVE_STANDBY', 'ACTIVE_ACTIVE']:
more_amp = True
else:
more_amp = False
topology = constants.TOPOLOGY_SINGLE
in_lb = collections.namedtuple(
'load_balancer', 'id, name, protocol, vip, amphorae, topology, '
'listeners, enabled, project_id')
return in_lb(
id='sample_loadbalancer_id_1',
name='test-lb',
protocol=proto,
vip=sample_vip_tuple(),
amphorae=[sample_amphora_tuple(role=constants.ROLE_MASTER),
sample_amphora_tuple(
id='sample_amphora_id_2',
lb_network_ip='10.0.1.2',
vrrp_ip='10.1.1.2',
role=constants.ROLE_BACKUP)]
if more_amp else [sample_amphora_tuple()],
topology=topology,
listeners=[],
enabled=enabled,
project_id='12345'
)
def sample_lb_with_udp_listener_tuple(
proto=None, topology=None, enabled=True, pools=None):
proto = 'HTTP' if proto is None else proto
if topology and topology in ['ACTIVE_STANDBY', 'ACTIVE_ACTIVE']:
more_amp = True
else:
more_amp = False
topology = constants.TOPOLOGY_SINGLE
listeners = [sample_listener_tuple(
proto=constants.PROTOCOL_UDP,
persistence_type=constants.SESSION_PERSISTENCE_SOURCE_IP,
persistence_timeout=33,
persistence_granularity='255.255.0.0',
monitor_proto=constants.HEALTH_MONITOR_UDP_CONNECT)]
in_lb = collections.namedtuple(
'load_balancer', 'id, name, protocol, vip, amphorae, topology, '
'pools, enabled, project_id, listeners')
return in_lb(
id='sample_loadbalancer_id_1',
name='test-lb',
protocol=proto,
vip=sample_vip_tuple(),
amphorae=[sample_amphora_tuple(role=constants.ROLE_MASTER),
sample_amphora_tuple(
id='sample_amphora_id_2',
lb_network_ip='10.0.1.2',
vrrp_ip='10.1.1.2',
role=constants.ROLE_BACKUP)]
if more_amp else [sample_amphora_tuple()],
topology=topology,
listeners=listeners,
pools=pools or [],
enabled=enabled,
project_id='12345'
)
def sample_vrrp_group_tuple():
in_vrrp_group = collections.namedtuple(
'vrrp_group', 'load_balancer_id, vrrp_auth_type, vrrp_auth_pass, '
'advert_int, smtp_server, smtp_connect_timeout, '
'vrrp_group_name')
return in_vrrp_group(
vrrp_group_name='sample_loadbalancer_id_1',
load_balancer_id='sample_loadbalancer_id_1',
vrrp_auth_type='PASS',
vrrp_auth_pass='123',
advert_int='1',
smtp_server='',
smtp_connect_timeout='')
def sample_vip_tuple():
vip = collections.namedtuple('vip', 'ip_address')
return vip(ip_address='10.0.0.2')
def sample_listener_tuple(proto=None, monitor=True, alloc_default_pool=True,
persistence=True, persistence_type=None,
persistence_cookie=None, persistence_timeout=None,
persistence_granularity=None,
tls=False, sni=False, peer_port=None, topology=None,
l7=False, enabled=True, insert_headers=None,
be_proto=None, monitor_ip_port=False,
monitor_proto=None, monitor_expected_codes=None,
backup_member=False, disabled_member=False,
connection_limit=-1,
timeout_client_data=50000,
timeout_member_connect=5000,
timeout_member_data=50000,
timeout_tcp_inspect=0,
client_ca_cert=False, client_crl_cert=False,
ssl_type_l7=False, pool_cert=False,
pool_ca_cert=False, pool_crl=False,
tls_enabled=False, hm_host_http_check=False,
id='sample_listener_id_1', recursive_nest=False,
provisioning_status=constants.ACTIVE):
proto = 'HTTP' if proto is None else proto
if be_proto is None:
be_proto = 'HTTP' if proto == 'TERMINATED_HTTPS' else proto
topology = 'SINGLE' if topology is None else topology
port = '443' if proto in ['HTTPS', 'TERMINATED_HTTPS'] else '80'
peer_port = 1024 if peer_port is None else peer_port
insert_headers = insert_headers or {}
in_listener = collections.namedtuple(
'listener', 'id, project_id, protocol_port, protocol, default_pool, '
'connection_limit, tls_certificate_id, '
'sni_container_ids, default_tls_container, '
'sni_containers, load_balancer, peer_port, pools, '
'l7policies, enabled, insert_headers, timeout_client_data,'
'timeout_member_connect, timeout_member_data, '
'timeout_tcp_inspect, client_ca_tls_certificate_id, '
'client_ca_tls_certificate, client_authentication, '
'client_crl_container_id, provisioning_status')
if l7:
pools = [
sample_pool_tuple(
proto=be_proto, monitor=monitor, persistence=persistence,
persistence_type=persistence_type,
persistence_cookie=persistence_cookie,
monitor_ip_port=monitor_ip_port, monitor_proto=monitor_proto,
pool_cert=pool_cert, pool_ca_cert=pool_ca_cert,
pool_crl=pool_crl, tls_enabled=tls_enabled,
hm_host_http_check=hm_host_http_check),
sample_pool_tuple(
proto=be_proto, monitor=monitor, persistence=persistence,
persistence_type=persistence_type,
persistence_cookie=persistence_cookie, sample_pool=2,
monitor_ip_port=monitor_ip_port, monitor_proto=monitor_proto,
pool_cert=pool_cert, pool_ca_cert=pool_ca_cert,
pool_crl=pool_crl, tls_enabled=tls_enabled,
hm_host_http_check=hm_host_http_check)]
l7policies = [
sample_l7policy_tuple('sample_l7policy_id_1', sample_policy=1),
sample_l7policy_tuple('sample_l7policy_id_2', sample_policy=2),
sample_l7policy_tuple('sample_l7policy_id_3', sample_policy=3),
sample_l7policy_tuple('sample_l7policy_id_4', sample_policy=4),
sample_l7policy_tuple('sample_l7policy_id_5', sample_policy=5),
sample_l7policy_tuple('sample_l7policy_id_6', sample_policy=6),
sample_l7policy_tuple('sample_l7policy_id_7', sample_policy=7)]
if ssl_type_l7:
l7policies.append(sample_l7policy_tuple(
'sample_l7policy_id_8', sample_policy=8))
else:
pools = [
sample_pool_tuple(
proto=be_proto, monitor=monitor, persistence=persistence,
persistence_type=persistence_type,
persistence_cookie=persistence_cookie,
monitor_ip_port=monitor_ip_port, monitor_proto=monitor_proto,
backup_member=backup_member, disabled_member=disabled_member,
pool_cert=pool_cert, pool_ca_cert=pool_ca_cert,
pool_crl=pool_crl, tls_enabled=tls_enabled,
hm_host_http_check=hm_host_http_check)]
l7policies = []
listener = in_listener(
id=id,
project_id='12345',
protocol_port=port,
protocol=proto,
load_balancer=sample_listener_loadbalancer_tuple(proto=proto,
topology=topology),
peer_port=peer_port,
default_pool=sample_pool_tuple(
proto=be_proto, monitor=monitor, persistence=persistence,
persistence_type=persistence_type,
persistence_cookie=persistence_cookie,
persistence_timeout=persistence_timeout,
persistence_granularity=persistence_granularity,
monitor_ip_port=monitor_ip_port,
monitor_proto=monitor_proto,
monitor_expected_codes=monitor_expected_codes,
pool_cert=pool_cert,
pool_ca_cert=pool_ca_cert,
pool_crl=pool_crl,
tls_enabled=tls_enabled,
hm_host_http_check=hm_host_http_check
) if alloc_default_pool else '',
connection_limit=connection_limit,
tls_certificate_id='cont_id_1' if tls else '',
sni_container_ids=['cont_id_2', 'cont_id_3'] if sni else [],
default_tls_container=sample_tls_container_tuple(
id='cont_id_1', certificate=sample_certs.X509_CERT,
private_key=sample_certs.X509_CERT_KEY,
intermediates=sample_certs.X509_IMDS_LIST,
primary_cn=sample_certs.X509_CERT_CN
) if tls else '',
sni_containers=[
sample_tls_sni_container_tuple(
tls_container_id='cont_id_2',
tls_container=sample_tls_container_tuple(
id='cont_id_2', certificate=sample_certs.X509_CERT_2,
private_key=sample_certs.X509_CERT_KEY_2,
intermediates=sample_certs.X509_IMDS_LIST,
primary_cn=sample_certs.X509_CERT_CN_2)),
sample_tls_sni_container_tuple(
tls_container_id='cont_id_3',
tls_container=sample_tls_container_tuple(
id='cont_id_3', certificate=sample_certs.X509_CERT_3,
private_key=sample_certs.X509_CERT_KEY_3,
intermediates=sample_certs.X509_IMDS_LIST,
primary_cn=sample_certs.X509_CERT_CN_3))]
if sni else [],
pools=pools,
l7policies=l7policies,
enabled=enabled,
insert_headers=insert_headers,
timeout_client_data=timeout_client_data,
timeout_member_connect=timeout_member_connect,
timeout_member_data=timeout_member_data,
timeout_tcp_inspect=timeout_tcp_inspect,
client_ca_tls_certificate_id='cont_id_ca' if client_ca_cert else '',
client_ca_tls_certificate=sample_tls_container_tuple(
id='cont_id_ca', certificate=sample_certs.X509_CA_CERT,
primary_cn=sample_certs.X509_CA_CERT_CN
) if client_ca_cert else '',
client_authentication=(
constants.CLIENT_AUTH_MANDATORY if client_ca_cert else
constants.CLIENT_AUTH_NONE),
client_crl_container_id='cont_id_crl' if client_crl_cert else '',
provisioning_status=provisioning_status,
)
if recursive_nest:
listener.load_balancer.listeners.append(listener)
return listener
def sample_tls_sni_container_tuple(tls_container_id=None, tls_container=None):
sc = collections.namedtuple('sni_container', 'tls_container_id, '
'tls_container')
return sc(tls_container_id=tls_container_id, tls_container=tls_container)
def sample_tls_sni_containers_tuple(tls_container_id=None, tls_container=None):
sc = collections.namedtuple('sni_containers', 'tls_container_id, '
'tls_container')
return [sc(tls_container_id=tls_container_id, tls_container=tls_container)]
def sample_tls_container_tuple(id='cont_id_1', certificate=None,
private_key=None, intermediates=None,
primary_cn=None):
sc = collections.namedtuple(
'tls_container',
'id, certificate, private_key, intermediates, primary_cn')
return sc(id=id, certificate=certificate, private_key=private_key,
intermediates=intermediates or [], primary_cn=primary_cn)
def sample_pool_tuple(proto=None, monitor=True, persistence=True,
persistence_type=None, persistence_cookie=None,
persistence_timeout=None, persistence_granularity=None,
sample_pool=1, monitor_ip_port=False,
monitor_proto=None, monitor_expected_codes=None,
backup_member=False,
disabled_member=False, has_http_reuse=True,
pool_cert=False, pool_ca_cert=False, pool_crl=False,
tls_enabled=False, hm_host_http_check=False,
provisioning_status=constants.ACTIVE):
proto = 'HTTP' if proto is None else proto
monitor_proto = proto if monitor_proto is None else monitor_proto
in_pool = collections.namedtuple(
'pool', 'id, protocol, lb_algorithm, members, health_monitor, '
'session_persistence, enabled, operating_status, '
'tls_certificate_id, ca_tls_certificate_id, '
'crl_container_id, tls_enabled, provisioning_status, ' +
constants.HTTP_REUSE)
if (proto == constants.PROTOCOL_UDP and
persistence_type == constants.SESSION_PERSISTENCE_SOURCE_IP):
kwargs = {'persistence_type': persistence_type,
'persistence_timeout': persistence_timeout,
'persistence_granularity': persistence_granularity}
else:
kwargs = {'persistence_type': persistence_type,
'persistence_cookie': persistence_cookie}
persis = sample_session_persistence_tuple(**kwargs)
mon = None
if sample_pool == 1:
id = 'sample_pool_id_1'
members = [sample_member_tuple('sample_member_id_1', '10.0.0.99',
monitor_ip_port=monitor_ip_port),
sample_member_tuple('sample_member_id_2', '10.0.0.98',
monitor_ip_port=monitor_ip_port,
backup=backup_member,
enabled=not disabled_member)]
if monitor is True:
mon = sample_health_monitor_tuple(
proto=monitor_proto,
host_http_check=hm_host_http_check,
expected_codes=monitor_expected_codes)
elif sample_pool == 2:
id = 'sample_pool_id_2'
members = [sample_member_tuple('sample_member_id_3', '10.0.0.97',
monitor_ip_port=monitor_ip_port)]
if monitor is True:
mon = sample_health_monitor_tuple(
proto=monitor_proto, sample_hm=2,
host_http_check=hm_host_http_check,
expected_codes=monitor_expected_codes)
return in_pool(
id=id,
protocol=proto,
lb_algorithm='ROUND_ROBIN',
members=members,
health_monitor=mon,
session_persistence=persis if persistence is True else None,
enabled=True,
operating_status='ACTIVE', has_http_reuse=has_http_reuse,
tls_certificate_id='pool_cont_1' if pool_cert else None,
ca_tls_certificate_id='pool_ca_1' if pool_ca_cert else None,
crl_container_id='pool_crl' if pool_crl else None,
tls_enabled=tls_enabled, provisioning_status=provisioning_status)
def sample_member_tuple(id, ip, enabled=True,
operating_status=constants.ACTIVE,
provisioning_status=constants.ACTIVE,
monitor_ip_port=False, backup=False):
in_member = collections.namedtuple('member',
'id, ip_address, protocol_port, '
'weight, subnet_id, '
'enabled, operating_status, '
'monitor_address, monitor_port, '
'backup, provisioning_status')
monitor_address = '192.168.1.1' if monitor_ip_port else None
monitor_port = 9000 if monitor_ip_port else None
return in_member(
id=id,
ip_address=ip,
protocol_port=82,
weight=13,
subnet_id='10.0.0.1/24',
enabled=enabled,
operating_status=operating_status,
monitor_address=monitor_address,
monitor_port=monitor_port,
backup=backup, provisioning_status=provisioning_status)
def sample_session_persistence_tuple(persistence_type=None,
persistence_cookie=None,
persistence_timeout=None,
persistence_granularity=None):
spersistence = collections.namedtuple('SessionPersistence',
'type, cookie_name, '
'persistence_timeout, '
'persistence_granularity')
pt = 'HTTP_COOKIE' if persistence_type is None else persistence_type
return spersistence(type=pt,
cookie_name=persistence_cookie,
persistence_timeout=persistence_timeout,
persistence_granularity=persistence_granularity)
def sample_health_monitor_tuple(proto='HTTP', sample_hm=1,
host_http_check=False, expected_codes=None,
provisioning_status=constants.ACTIVE):
proto = 'HTTP' if proto == 'TERMINATED_HTTPS' else proto
monitor = collections.namedtuple(
'monitor', 'id, type, delay, timeout, fall_threshold, rise_threshold,'
'http_method, url_path, expected_codes, enabled, '
'check_script_path, http_version, domain_name, '
'provisioning_status')
if sample_hm == 1:
id = 'sample_monitor_id_1'
url_path = '/index.html'
elif sample_hm == 2:
id = 'sample_monitor_id_2'
url_path = '/healthmon.html'
kwargs = {
'id': id,
'type': proto,
'delay': 30,
'timeout': 31,
'fall_threshold': 3,
'rise_threshold': 2,
'http_method': 'GET',
'url_path': url_path,
'expected_codes': '418',
'enabled': True,
'provisioning_status': provisioning_status,
}
if host_http_check:
kwargs.update({'http_version': 1.1, 'domain_name': 'testlab.com'})
else:
kwargs.update({'http_version': 1.0, 'domain_name': None})
if expected_codes:
kwargs.update({'expected_codes': expected_codes})
if proto == constants.HEALTH_MONITOR_UDP_CONNECT:
kwargs['check_script_path'] = (CONF.haproxy_amphora.base_path +
'lvs/check/' + 'udp_check.sh')
else:
kwargs['check_script_path'] = None
return monitor(**kwargs)
def sample_l7policy_tuple(id,
action=constants.L7POLICY_ACTION_REJECT,
redirect_pool=None, redirect_url=None,
redirect_prefix=None,
enabled=True, redirect_http_code=302,
sample_policy=1,
provisioning_status=constants.ACTIVE):
in_l7policy = collections.namedtuple('l7policy',
'id, action, redirect_pool, '
'redirect_url, redirect_prefix, '
'l7rules, enabled, '
'redirect_http_code, '
'provisioning_status')
l7rules = []
if sample_policy == 1:
action = constants.L7POLICY_ACTION_REDIRECT_TO_POOL
redirect_pool = sample_pool_tuple(sample_pool=2)
l7rules = [sample_l7rule_tuple('sample_l7rule_id_1')]
elif sample_policy == 2:
action = constants.L7POLICY_ACTION_REDIRECT_TO_URL
redirect_url = 'http://www.example.com'
l7rules = [sample_l7rule_tuple('sample_l7rule_id_2', sample_rule=2),
sample_l7rule_tuple('sample_l7rule_id_3', sample_rule=3)]
elif sample_policy == 3:
action = constants.L7POLICY_ACTION_REJECT
l7rules = [sample_l7rule_tuple('sample_l7rule_id_4', sample_rule=4),
sample_l7rule_tuple('sample_l7rule_id_5', sample_rule=5)]
elif sample_policy == 4:
action = constants.L7POLICY_ACTION_REJECT
elif sample_policy == 5:
action = constants.L7POLICY_ACTION_REJECT
enabled = False
l7rules = [sample_l7rule_tuple('sample_l7rule_id_5', sample_rule=5)]
elif sample_policy == 6:
action = constants.L7POLICY_ACTION_REJECT
l7rules = [sample_l7rule_tuple('sample_l7rule_id_6', sample_rule=6)]
elif sample_policy == 7:
action = constants.L7POLICY_ACTION_REDIRECT_PREFIX
redirect_prefix = 'https://example.com'
l7rules = [sample_l7rule_tuple('sample_l7rule_id_2', sample_rule=2),
sample_l7rule_tuple('sample_l7rule_id_3', sample_rule=3)]
elif sample_policy == 8:
action = constants.L7POLICY_ACTION_REDIRECT_TO_URL
redirect_url = 'http://www.ssl-type-l7rule-test.com'
l7rules = [sample_l7rule_tuple('sample_l7rule_id_7', sample_rule=7),
sample_l7rule_tuple('sample_l7rule_id_8', sample_rule=8),
sample_l7rule_tuple('sample_l7rule_id_9', sample_rule=9),
sample_l7rule_tuple('sample_l7rule_id_10', sample_rule=10),
sample_l7rule_tuple('sample_l7rule_id_11', sample_rule=11)]
return in_l7policy(
id=id,
action=action,
redirect_pool=redirect_pool,
redirect_url=redirect_url,
redirect_prefix=redirect_prefix,
l7rules=l7rules,
enabled=enabled,
redirect_http_code=redirect_http_code
if (action in [constants.L7POLICY_ACTION_REDIRECT_TO_URL,
constants.L7POLICY_ACTION_REDIRECT_PREFIX] and
redirect_http_code) else None,
provisioning_status=provisioning_status)
def sample_l7rule_tuple(id,
type=constants.L7RULE_TYPE_PATH,
compare_type=constants.L7RULE_COMPARE_TYPE_STARTS_WITH,
key=None, value='/api',
invert=False, enabled=True,
sample_rule=1, provisioning_status=constants.ACTIVE):
in_l7rule = collections.namedtuple('l7rule',
'id, type, compare_type, '
'key, value, invert, enabled, '
'provisioning_status')
if sample_rule == 2:
type = constants.L7RULE_TYPE_HEADER
compare_type = constants.L7RULE_COMPARE_TYPE_CONTAINS
key = 'Some-header'
value = 'This string\\ with stuff'
invert = True
enabled = True
if sample_rule == 3:
type = constants.L7RULE_TYPE_COOKIE
compare_type = constants.L7RULE_COMPARE_TYPE_REGEX
key = 'some-cookie'
value = 'this.*|that'
invert = False
enabled = True
if sample_rule == 4:
type = constants.L7RULE_TYPE_FILE_TYPE
compare_type = constants.L7RULE_COMPARE_TYPE_EQUAL_TO
key = None
value = 'jpg'
invert = False
enabled = True
if sample_rule == 5:
type = constants.L7RULE_TYPE_HOST_NAME
compare_type = constants.L7RULE_COMPARE_TYPE_ENDS_WITH
key = None
value = '.example.com'
invert = False
enabled = True
if sample_rule == 6:
type = constants.L7RULE_TYPE_HOST_NAME
compare_type = constants.L7RULE_COMPARE_TYPE_ENDS_WITH
key = None
value = '.example.com'
invert = False
enabled = False
if sample_rule == 7:
type = constants.L7RULE_TYPE_SSL_CONN_HAS_CERT
compare_type = constants.L7RULE_COMPARE_TYPE_EQUAL_TO
key = None
value = 'tRuE'
invert = False
enabled = True
if sample_rule == 8:
type = constants.L7RULE_TYPE_SSL_VERIFY_RESULT
compare_type = constants.L7RULE_COMPARE_TYPE_EQUAL_TO
key = None
value = '1'
invert = True
enabled = True
if sample_rule == 9:
type = constants.L7RULE_TYPE_SSL_DN_FIELD
compare_type = constants.L7RULE_COMPARE_TYPE_REGEX
key = 'STREET'
value = r'^STREET.*NO\.$'
invert = True
enabled = True
if sample_rule == 10:
type = constants.L7RULE_TYPE_SSL_DN_FIELD
compare_type = constants.L7RULE_COMPARE_TYPE_STARTS_WITH
key = 'OU-3'
value = 'Orgnization Bala'
invert = True
enabled = True
return in_l7rule(
id=id,
type=type,
compare_type=compare_type,
key=key,
value=value,
invert=invert,
enabled=enabled, provisioning_status=provisioning_status)
def sample_base_expected_config(frontend=None, logging=None, backend=None,
peers=None, global_opts=None, defaults=None):
if frontend is None:
frontend = ("frontend sample_listener_id_1\n"
" maxconn {maxconn}\n"
" bind 10.0.0.2:80\n"
" mode http\n"
" default_backend sample_pool_id_1\n"
" timeout client 50000\n").format(
maxconn=constants.HAPROXY_MAX_MAXCONN)
if logging is None:
logging = (" log-format 12345\\ sample_loadbalancer_id_1\\ %f\\ "
"%ci\\ %cp\\ %t\\ %{+Q}r\\ %ST\\ %B\\ %U\\ "
"%[ssl_c_verify]\\ %{+Q}[ssl_c_s_dn]\\ %b\\ %s\\ %Tt\\ "
"%tsc\n\n")
if backend is None:
backend = ("backend sample_pool_id_1\n"
" mode http\n"
" balance roundrobin\n"
" cookie SRV insert indirect nocache\n"
" timeout check 31s\n"
" option httpchk GET /index.html HTTP/1.0\\r\\n\n"
" http-check expect rstatus 418\n"
" fullconn {maxconn}\n"
" option allbackups\n"
" timeout connect 5000\n"
" timeout server 50000\n"
" server sample_member_id_1 10.0.0.99:82 weight 13 "
"check inter 30s fall 3 rise 2 cookie sample_member_id_1\n"
" server sample_member_id_2 10.0.0.98:82 weight 13 "
"check inter 30s fall 3 rise 2 cookie sample_member_id_2\n"
"\n").format(maxconn=constants.HAPROXY_MAX_MAXCONN)
if peers is None:
peers = "\n\n"
if global_opts is None:
global_opts = " maxconn {maxconn}\n\n".format(
maxconn=constants.HAPROXY_MAX_MAXCONN)
if defaults is None:
defaults = ("defaults\n"
" log global\n"
" retries 3\n"
" option redispatch\n"
" option splice-request\n"
" option splice-response\n"
" option http-keep-alive\n\n")
return ("# Configuration for loadbalancer sample_loadbalancer_id_1\n"
"global\n"
" daemon\n"
" user nobody\n"
" log /run/rsyslog/octavia/log local0\n"
" log /run/rsyslog/octavia/log local1 notice\n"
" stats socket /var/lib/octavia/sample_listener_id_1.sock"
" mode 0666 level user\n" +
global_opts + defaults + peers + frontend + logging + backend)
| apache-2.0 | 8,750,399,087,215,040,000 | 36.9876 | 79 | 0.562162 | false |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.