repo_name
stringlengths 5
100
| path
stringlengths 4
375
| copies
stringclasses 991
values | size
stringlengths 4
7
| content
stringlengths 666
1M
| license
stringclasses 15
values |
---|---|---|---|---|---|
sbesson/registry | omero_qa/qa/urls.py | 3 | 2613 | #!/usr/bin/env python
#
#
#
# Copyright (c) 2009 University of Dundee.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Author: Aleksandra Tarkowska <A(dot)Tarkowska(at)dundee(dot)ac(dot)uk>, 2008.
#
# Version: 1.0
#
import os.path
from django.conf.urls.defaults import *
from django.conf import settings
from omero_qa.qa import views
# url patterns
urlpatterns = patterns('',
url( r'^login/$', views.login_processsing, name='qa_login'),
url( r'^reset_token/$', views.reset_token_view, name='qa_reset_token'),
url( r'^logout/$', views.logout_view, name='qa_logout'),
url( r'^register/$', views.register, name='qa_register'),
url( r'^save_email/$', views.save_email_view, name='qa_save_email'),
url( r'^upload/$', views.upload, name='qa_upload'),
url( r'^upload_processing/$', views.upload_processing, name='qa_upload_processing'),
url( r'^web_upload_processing/$', views.upload_from_web_processing, name='qa_web_upload_processing'),
url( r'^initial/$', views.initial_processing, name='qa_initial_processing'),
url( r'^feedback/$', views.feedback, name='feedback'),
url( r'^feedback/(?P<fid>[0-9]+)/(?:(?P<action>((?i)add_comment|(?i)add_user_comment|(?i)status_update))/)?$', views.feedback, name='qa_feedback_id'),
url( r'^feedback/(?P<action>((?i)add|(?i)test_result))/$', views.feedback_action, name='qa_feedback_action'),
#url( r'^test_file/(?P<fid>[0-9]+)/(?P<tid>[0-9]+)/(?P<action>((?i)delete))/$', views.test_file, name='test_file_action'),
url( r'^ticket/(?P<action>((?i)new|(?i)add|(?i)save))/(?P<fid>[0-9]+)/(?:(?P<tid>[0-9]+)/)?$', views.ticket, name='qa_ticket'),
url( r'^error_content/(?P<fid>[0-9]+)/$', views.error_content, name='qa_error_content'),
url( r'^test_error_content/(?P<tid>[0-9]+)/$', views.test_error_content, name='qa_test_error_content'),
# extra
url( r'^metadata_validator/(?P<build_number>[0-9]+)/$', views.metadata_validator, name='qa_metadata_validator'),
)
| agpl-3.0 |
kamawanu/pydbgr | trepan/lib/print.py | 2 | 4139 | # -*- coding: utf-8 -*-
# Copyright (C) 2007, 2008, 2009, 2010 Rocky Bernstein
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import inspect, types
def print_dict(s, obj, title):
if hasattr(obj, "__dict__"):
obj = obj.__dict__
pass
if isinstance(obj, types.DictType) or isinstance(obj, types.DictProxyType):
s += "\n%s:\n" % title
keys = obj.keys()
keys.sort()
for key in keys:
s+=" %s:\t%s\n" % (repr(key), obj[key])
pass
pass
return s
def print_argspec(obj, obj_name):
'''A slightly decorated version of inspect.format_argspec'''
try:
return obj_name + inspect.formatargspec(*inspect.getargspec(obj))
except:
return None
return # Not reached
def print_obj(arg, frame, format=None, short=False):
"""Return a string representation of an object """
try:
if not frame:
# ?? Should we have set up a dummy globals
# to have persistence?
obj = eval(arg, None, None)
else:
obj = eval(arg, frame.f_globals, frame.f_locals)
pass
except:
return 'No symbol "' + arg + '" in current context.'
#format and print
what = arg
if format:
what = format + ' ' + arg
obj = printf(obj, format)
s = '%s = %s' % (what, obj)
if not short:
s += '\ntype = %s' % type(obj)
if callable(obj):
argspec = print_argspec(obj, arg)
if argspec:
s += ':\n\t'
if inspect.isclass(obj):
s += 'Class constructor information:\n\t'
obj = obj.__init__
elif type(obj) is types.InstanceType:
obj = obj.__call__
pass
s+= argspec
pass
# Try to list the members of a class.
# Not sure if this is correct or the
# best way to do.
s = print_dict(s, obj, "object variables")
if hasattr(obj, "__class__"):
s = print_dict(s, obj.__class__, "class variables")
pass
return s
pconvert = {'c':chr, 'x': hex, 'o': oct, 'f': float, 's': str}
twos = ('0000', '0001', '0010', '0011', '0100', '0101', '0110', '0111',
'1000', '1001', '1010', '1011', '1100', '1101', '1110', '1111')
def printf(val, fmt):
global pconvert, twos
if not fmt:
fmt = ' ' # not 't' nor in pconvert
# Strip leading '/'
if fmt[0] == '/':
fmt = fmt[1:]
f = fmt[0]
if f in pconvert.keys():
try:
return apply(pconvert[f], (val,))
except:
return str(val)
# binary (t is from 'twos')
if f == 't':
try:
res = ''
while val:
res = twos[val & 0xf] + res
val = val >> 4
return res
except:
return str(val)
return str(val)
if __name__ == '__main__':
print print_dict('', globals(), 'my globals')
print '-' * 40
print print_obj('print_obj', None)
print '-' * 30
print print_obj('Exception', None)
print '-' * 30
print print_argspec('Exception', None)
class Foo:
def __init__(self, bar=None): pass
pass
print print_obj('Foo.__init__', None)
print '-' * 30
print print_argspec(Foo.__init__, '__init__')
assert printf(31, "/o") == '037'
assert printf(31, "/t") == '00011111'
assert printf(33, "/c") == '!'
assert printf(33, "/x") == '0x21'
| gpl-3.0 |
willprice/arduino-sphere-project | scripts/example_direction_finder/temboo/Library/Zendesk/Organizations/ListOrganizationsByUser.py | 5 | 4947 | # -*- coding: utf-8 -*-
###############################################################################
#
# ListOrganizationsByUser
# Lists all organizations by user.
#
# Python versions 2.6, 2.7, 3.x
#
# Copyright 2014, Temboo Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
# either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
#
#
###############################################################################
from temboo.core.choreography import Choreography
from temboo.core.choreography import InputSet
from temboo.core.choreography import ResultSet
from temboo.core.choreography import ChoreographyExecution
import json
class ListOrganizationsByUser(Choreography):
def __init__(self, temboo_session):
"""
Create a new instance of the ListOrganizationsByUser Choreo. A TembooSession object, containing a valid
set of Temboo credentials, must be supplied.
"""
super(ListOrganizationsByUser, self).__init__(temboo_session, '/Library/Zendesk/Organizations/ListOrganizationsByUser')
def new_input_set(self):
return ListOrganizationsByUserInputSet()
def _make_result_set(self, result, path):
return ListOrganizationsByUserResultSet(result, path)
def _make_execution(self, session, exec_id, path):
return ListOrganizationsByUserChoreographyExecution(session, exec_id, path)
class ListOrganizationsByUserInputSet(InputSet):
"""
An InputSet with methods appropriate for specifying the inputs to the ListOrganizationsByUser
Choreo. The InputSet object is used to specify input parameters when executing this Choreo.
"""
def set_Email(self, value):
"""
Set the value of the Email input for this Choreo. ((required, string) The email address you use to login to your Zendesk account.)
"""
super(ListOrganizationsByUserInputSet, self)._set_input('Email', value)
def set_ID(self, value):
"""
Set the value of the ID input for this Choreo. ((required, string) ID of the user.)
"""
super(ListOrganizationsByUserInputSet, self)._set_input('ID', value)
def set_Page(self, value):
"""
Set the value of the Page input for this Choreo. ((optional, integer) The page number of the results to be returned. Used together with the Number parameter to paginate a large set of results.)
"""
super(ListOrganizationsByUserInputSet, self)._set_input('Page', value)
def set_Password(self, value):
"""
Set the value of the Password input for this Choreo. ((required, password) Your Zendesk password.)
"""
super(ListOrganizationsByUserInputSet, self)._set_input('Password', value)
def set_PerPage(self, value):
"""
Set the value of the PerPage input for this Choreo. ((optional, integer) The number of results to return per page. Maximum is 100 and default is 100.)
"""
super(ListOrganizationsByUserInputSet, self)._set_input('PerPage', value)
def set_Server(self, value):
"""
Set the value of the Server input for this Choreo. ((required, string) Your Zendesk domain and subdomain (e.g., temboocare.zendesk.com).)
"""
super(ListOrganizationsByUserInputSet, self)._set_input('Server', value)
class ListOrganizationsByUserResultSet(ResultSet):
"""
A ResultSet with methods tailored to the values returned by the ListOrganizationsByUser Choreo.
The ResultSet object is used to retrieve the results of a Choreo execution.
"""
def getJSONFromString(self, str):
return json.loads(str)
def get_Response(self):
"""
Retrieve the value for the "Response" output from this Choreo execution. ((json) The response from Zendesk.)
"""
return self._output.get('Response', None)
def get_NextPage(self):
"""
Retrieve the value for the "NextPage" output from this Choreo execution. ((integer) The index for the next page of results.)
"""
return self._output.get('NextPage', None)
def get_PreviousPage(self):
"""
Retrieve the value for the "PreviousPage" output from this Choreo execution. ((integer) The index for the previous page of results.)
"""
return self._output.get('PreviousPage', None)
class ListOrganizationsByUserChoreographyExecution(ChoreographyExecution):
def _make_result_set(self, response, path):
return ListOrganizationsByUserResultSet(response, path)
| gpl-2.0 |
hefen1/chromium | third_party/markdown/extensions/attr_list.py | 109 | 6363 | # markdown is released under the BSD license
# Copyright 2007, 2008 The Python Markdown Project (v. 1.7 and later)
# Copyright 2004, 2005, 2006 Yuri Takhteyev (v. 0.2-1.6b)
# Copyright 2004 Manfred Stienstra (the original version)
#
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the <organization> nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE PYTHON MARKDOWN PROJECT ''AS IS'' AND ANY
# EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL ANY CONTRIBUTORS TO THE PYTHON MARKDOWN PROJECT
# BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""
Attribute List Extension for Python-Markdown
============================================
Adds attribute list syntax. Inspired by
[maruku](http://maruku.rubyforge.org/proposal.html#attribute_lists)'s
feature of the same name.
Copyright 2011 [Waylan Limberg](http://achinghead.com/).
Contact: [email protected]
License: BSD (see ../LICENSE.md for details)
Dependencies:
* [Python 2.4+](http://python.org)
* [Markdown 2.1+](http://packages.python.org/Markdown/)
"""
from __future__ import absolute_import
from __future__ import unicode_literals
from . import Extension
from ..treeprocessors import Treeprocessor
from ..util import isBlockLevel
import re
try:
Scanner = re.Scanner
except AttributeError:
# must be on Python 2.4
from sre import Scanner
def _handle_double_quote(s, t):
k, v = t.split('=')
return k, v.strip('"')
def _handle_single_quote(s, t):
k, v = t.split('=')
return k, v.strip("'")
def _handle_key_value(s, t):
return t.split('=')
def _handle_word(s, t):
if t.startswith('.'):
return '.', t[1:]
if t.startswith('#'):
return 'id', t[1:]
return t, t
_scanner = Scanner([
(r'[^ ]+=".*?"', _handle_double_quote),
(r"[^ ]+='.*?'", _handle_single_quote),
(r'[^ ]+=[^ ]*', _handle_key_value),
(r'[^ ]+', _handle_word),
(r' ', None)
])
def get_attrs(str):
""" Parse attribute list and return a list of attribute tuples. """
return _scanner.scan(str)[0]
def isheader(elem):
return elem.tag in ['h1', 'h2', 'h3', 'h4', 'h5', 'h6']
class AttrListTreeprocessor(Treeprocessor):
BASE_RE = r'\{\:?([^\}]*)\}'
HEADER_RE = re.compile(r'[ ]*%s[ ]*$' % BASE_RE)
BLOCK_RE = re.compile(r'\n[ ]*%s[ ]*$' % BASE_RE)
INLINE_RE = re.compile(r'^%s' % BASE_RE)
NAME_RE = re.compile(r'[^A-Z_a-z\u00c0-\u00d6\u00d8-\u00f6\u00f8-\u02ff\u0370-\u037d'
r'\u037f-\u1fff\u200c-\u200d\u2070-\u218f\u2c00-\u2fef'
r'\u3001-\ud7ff\uf900-\ufdcf\ufdf0-\ufffd'
r'\:\-\.0-9\u00b7\u0300-\u036f\u203f-\u2040]+')
def run(self, doc):
for elem in doc.getiterator():
if isBlockLevel(elem.tag):
# Block level: check for attrs on last line of text
RE = self.BLOCK_RE
if isheader(elem):
# header: check for attrs at end of line
RE = self.HEADER_RE
if len(elem) and elem[-1].tail:
# has children. Get from tail of last child
m = RE.search(elem[-1].tail)
if m:
self.assign_attrs(elem, m.group(1))
elem[-1].tail = elem[-1].tail[:m.start()]
if isheader(elem):
# clean up trailing #s
elem[-1].tail = elem[-1].tail.rstrip('#').rstrip()
elif elem.text:
# no children. Get from text.
m = RE.search(elem.text)
if m:
self.assign_attrs(elem, m.group(1))
elem.text = elem.text[:m.start()]
if isheader(elem):
# clean up trailing #s
elem.text = elem.text.rstrip('#').rstrip()
else:
# inline: check for attrs at start of tail
if elem.tail:
m = self.INLINE_RE.match(elem.tail)
if m:
self.assign_attrs(elem, m.group(1))
elem.tail = elem.tail[m.end():]
def assign_attrs(self, elem, attrs):
""" Assign attrs to element. """
for k, v in get_attrs(attrs):
if k == '.':
# add to class
cls = elem.get('class')
if cls:
elem.set('class', '%s %s' % (cls, v))
else:
elem.set('class', v)
else:
# assign attr k with v
elem.set(self.sanitize_name(k), v)
def sanitize_name(self, name):
"""
Sanitize name as 'an XML Name, minus the ":"'.
See http://www.w3.org/TR/REC-xml-names/#NT-NCName
"""
return self.NAME_RE.sub('_', name)
class AttrListExtension(Extension):
def extendMarkdown(self, md, md_globals):
md.treeprocessors.add('attr_list', AttrListTreeprocessor(md), '>prettify')
def makeExtension(configs={}):
return AttrListExtension(configs=configs)
| bsd-3-clause |
drawks/ansible | lib/ansible/modules/cloud/azure/azure_rm_resourcegroup_facts.py | 11 | 6676 | #!/usr/bin/python
#
# Copyright (c) 2016 Matt Davis, <[email protected]>
# Chris Houseknecht, <[email protected]>
#
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: azure_rm_resourcegroup_facts
version_added: "2.1"
short_description: Get resource group facts.
description:
- Get facts for a specific resource group or all resource groups.
options:
name:
description:
- Limit results to a specific resource group.
tags:
description:
- Limit results by providing a list of tags. Format tags as 'key' or 'key:value'.
list_resources:
description:
- List all resources under the resource group.
- Note this will cost network overhead for each resource group. Suggest use this when C(name) set.
version_added: 2.8
extends_documentation_fragment:
- azure
author:
- "Chris Houseknecht (@chouseknecht)"
- "Matt Davis (@nitzmahone)"
'''
EXAMPLES = '''
- name: Get facts for one resource group
azure_rm_resourcegroup_facts:
name: myResourceGroup
- name: Get facts for all resource groups
azure_rm_resourcegroup_facts:
- name: Get facts by tags
azure_rm_resourcegroup_facts:
tags:
- testing
- foo:bar
- name: Get facts for one resource group including resources it contains
azure_rm_resourcegroup_facts:
name: myResourceGroup
list_resources: yes
'''
RETURN = '''
azure_resourcegroups:
description: List of resource group dicts.
returned: always
type: list
contains:
id:
description:
- Resource id.
type: str
sample: "/subscriptions/xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx/resourceGroup/myResourceGroup"
name:
description:
- Resource group name.
type: str
sample: foo
tags:
description:
- Tags assigned to resource group.
type: dict
sample: { "tag": "value" }
resources:
description:
- List of resources under the resource group.
- Only shows when C(list_resources) set to C(True).
type: list
contains:
id:
description:
- Resource id.
type: str
sample: "/subscriptions/xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx/resourceGroups/myResourceGroup/providers/Microsoft.Compute/virtualMa
chines/myVirtualMachine"
name:
description:
- Resource name.
type: str
sample: myVirtualMachine
location:
description:
- Resource region.
type: str
sample: eastus
type:
description:
- Resource type.
type: str
sample: "Microsoft.Compute/virtualMachines"
tags:
description:
- Tags to assign to the managed disk.
type: dict
sample: { "tag": "value" }
'''
try:
from msrestazure.azure_exceptions import CloudError
except Exception:
# This is handled in azure_rm_common
pass
from ansible.module_utils.azure_rm_common import AzureRMModuleBase
AZURE_OBJECT_CLASS = 'ResourceGroup'
class AzureRMResourceGroupFacts(AzureRMModuleBase):
def __init__(self):
self.module_arg_spec = dict(
name=dict(type='str'),
tags=dict(type='list'),
list_resources=dict(type='bool')
)
self.results = dict(
changed=False,
ansible_facts=dict(azure_resourcegroups=[]),
resourcegroups=[]
)
self.name = None
self.tags = None
self.list_resources = None
super(AzureRMResourceGroupFacts, self).__init__(self.module_arg_spec,
supports_tags=False,
facts_module=True)
def exec_module(self, **kwargs):
for key in self.module_arg_spec:
setattr(self, key, kwargs[key])
if self.name:
self.results['ansible_facts']['azure_resourcegroups'] = self.get_item()
else:
self.results['ansible_facts']['azure_resourcegroups'] = self.list_items()
if self.list_resources:
for item in self.results['ansible_facts']['azure_resourcegroups']:
item['resources'] = self.list_by_rg(item['name'])
self.results['resourcegroups'] = self.results['ansible_facts']['azure_resourcegroups']
return self.results
def get_item(self):
self.log('Get properties for {0}'.format(self.name))
item = None
result = []
try:
item = self.rm_client.resource_groups.get(self.name)
except CloudError:
pass
if item and self.has_tags(item.tags, self.tags):
result = [self.serialize_obj(item, AZURE_OBJECT_CLASS)]
return result
def list_items(self):
self.log('List all items')
try:
response = self.rm_client.resource_groups.list()
except CloudError as exc:
self.fail("Failed to list all items - {0}".format(str(exc)))
results = []
for item in response:
if self.has_tags(item.tags, self.tags):
results.append(self.serialize_obj(item, AZURE_OBJECT_CLASS))
return results
def list_by_rg(self, name):
self.log('List resources under resource group')
results = []
try:
response = self.rm_client.resources.list_by_resource_group(name)
while True:
results.append(response.next().as_dict())
except StopIteration:
pass
except CloudError as exc:
self.fail('Error when listing resources under resource group {0}: {1}'.format(name, exc.message or str(exc)))
return results
def main():
AzureRMResourceGroupFacts()
if __name__ == '__main__':
main()
| gpl-3.0 |
captainpete/rethinkdb | external/v8_3.30.33.16/build/gyp/pylib/gyp/generator/gypsh.py | 2779 | 1665 | # Copyright (c) 2011 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""gypsh output module
gypsh is a GYP shell. It's not really a generator per se. All it does is
fire up an interactive Python session with a few local variables set to the
variables passed to the generator. Like gypd, it's intended as a debugging
aid, to facilitate the exploration of .gyp structures after being processed
by the input module.
The expected usage is "gyp -f gypsh -D OS=desired_os".
"""
import code
import sys
# All of this stuff about generator variables was lovingly ripped from gypd.py.
# That module has a much better description of what's going on and why.
_generator_identity_variables = [
'EXECUTABLE_PREFIX',
'EXECUTABLE_SUFFIX',
'INTERMEDIATE_DIR',
'PRODUCT_DIR',
'RULE_INPUT_ROOT',
'RULE_INPUT_DIRNAME',
'RULE_INPUT_EXT',
'RULE_INPUT_NAME',
'RULE_INPUT_PATH',
'SHARED_INTERMEDIATE_DIR',
]
generator_default_variables = {
}
for v in _generator_identity_variables:
generator_default_variables[v] = '<(%s)' % v
def GenerateOutput(target_list, target_dicts, data, params):
locals = {
'target_list': target_list,
'target_dicts': target_dicts,
'data': data,
}
# Use a banner that looks like the stock Python one and like what
# code.interact uses by default, but tack on something to indicate what
# locals are available, and identify gypsh.
banner='Python %s on %s\nlocals.keys() = %s\ngypsh' % \
(sys.version, sys.platform, repr(sorted(locals.keys())))
code.interact(banner, local=locals)
| agpl-3.0 |
iluxonchik/python-general-repo | scripts/fenix/fenix.py | 1 | 4366 | # Fenix Edu Pseudo-API
# ################################################################################################################
# This is a simple pseudo-API for Fenix Edu, created just to simplify scraping from fenix, this needs to be
# re-worked.
#
# ################################################################################################################
# Example Usage:
#
# fenix = Fenix("ist1+++++", "<PASSWORD>")
# fenix = fenix.login() ----> NOTE: "fenix" will be None if login fails
#
# if fenix is not None:
# response = fenix.open(<URL>) ---> returns the response for the requested URL
#
# ################################################################################################################
import urllib.parse, urllib.request, http.cookiejar
from urllib.error import HTTPError
from bs4 import BeautifulSoup
class Fenix(object):
USER_AGENT_HEADER = ('User-Agent', 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/35.0.1916.47 Safari/537.36')
KEEP_ALIVE_HEADER = ('Connection', "keep-alive")
OPENER_HEADERS = [USER_AGENT_HEADER, KEEP_ALIVE_HEADER]
LOGIN_URL = "https://id.tecnico.ulisboa.pt/cas/login"
STUDENT_URL = "https://fenix.tecnico.ulisboa.pt/student/"
def __init__(self, username, password):
self.username = username # this shouldn't be stored as plain text, only like this for demo purposes
self.password = password
self.opener = None
def login(self):
"""
Logins the user with the credentials specified on object instantiation.
Returns:
Fenix object (self) on login success
None on login failure
"""
cj = http.cookiejar.CookieJar()
self.opener = urllib.request.build_opener(urllib.request.HTTPCookieProcessor(cj))
self.opener.addheaders = Fenix.OPENER_HEADERS
# Let's parse the needed "lt" and "execution" values (needed for POST request, when logging in)
try:
res = self.opener.open(Fenix.LOGIN_URL)
except HTTPError:
return None
bsObj = BeautifulSoup(res, "html.parser")
lt = bsObj.find('input', {'type':'hidden', 'name':'lt'}).attrs['value']
execution = bsObj.find('input', {'type':'hidden', 'name':'execution'}).attrs['value']
# Build the POST request
values = {"username" : self.username, "password" : self.password, "submit-istid" : "Entrar", "lt" : lt, "execution" : execution, "_eventId":"submit"}
data = urllib.parse.urlencode(values)
binary_data = data.encode("ascii")
try:
res = self.opener.open(Fenix.LOGIN_URL, binary_data) # send the post request, after that we should be logged in
# Get the sessionid cookie
res = self.opener.open("https://id.tecnico.ulisboa.pt/cas/login?service=https%3A%2F%2Fbarra.tecnico.ulisboa.pt%2Flogin%2F%3Fnext%3Dhttps%253A%252F%252Fid.tecnico.ulisboa.pt%252Fcas%252Flogin%253Fservice%253Dhttps%253A%252F%252Ffenix.tecnico.ulisboa.pt%252FloginCAS.do")
# Get the csrftoken cookie | NOTE: this one isn't required
res = self.opener.open("https://barra.tecnico.ulisboa.pt/include/?fluid=true&login=https://fenix.tecnico.ulisboa.pt/login&lang=pt&logout=https://fenix.tecnico.ulisboa.pt/logout&next-param=service")
except HTTPError:
return None
return self if self.is_logged_in() else None
def open(self, url, data=None):
return self.opener.open(url, data)
def is_logged_in(self):
"""
Checks whether the current instance is associated with a logged in user.
"""
# Try to access a protected resource (only logged in users can access it),
# in case the instance is not associated with a logged in user, HTTPError with
# error code 404 is raised. In this case, if some other HTTPError occurs, this
# will return false as well
# TODO:
# * more efficient way of checking if user is logged in
# * create and raise exceptions for not logged in case
#
if self.opener is not None:
try:
self.opener.open(Fenix.STUDENT_URL)
return True
except HTTPError:
return False
return False
| mit |
40223235/2015cd_midterm2 | static/Brython3.1.1-20150328-091302/Lib/unittest/result.py | 727 | 6397 | """Test result object"""
import io
import sys
import traceback
from . import util
from functools import wraps
__unittest = True
def failfast(method):
@wraps(method)
def inner(self, *args, **kw):
if getattr(self, 'failfast', False):
self.stop()
return method(self, *args, **kw)
return inner
STDOUT_LINE = '\nStdout:\n%s'
STDERR_LINE = '\nStderr:\n%s'
class TestResult(object):
"""Holder for test result information.
Test results are automatically managed by the TestCase and TestSuite
classes, and do not need to be explicitly manipulated by writers of tests.
Each instance holds the total number of tests run, and collections of
failures and errors that occurred among those test runs. The collections
contain tuples of (testcase, exceptioninfo), where exceptioninfo is the
formatted traceback of the error that occurred.
"""
_previousTestClass = None
_testRunEntered = False
_moduleSetUpFailed = False
def __init__(self, stream=None, descriptions=None, verbosity=None):
self.failfast = False
self.failures = []
self.errors = []
self.testsRun = 0
self.skipped = []
self.expectedFailures = []
self.unexpectedSuccesses = []
self.shouldStop = False
self.buffer = False
self._stdout_buffer = None
self._stderr_buffer = None
self._original_stdout = sys.stdout
self._original_stderr = sys.stderr
self._mirrorOutput = False
def printErrors(self):
"Called by TestRunner after test run"
#fixme brython
pass
def startTest(self, test):
"Called when the given test is about to be run"
self.testsRun += 1
self._mirrorOutput = False
self._setupStdout()
def _setupStdout(self):
if self.buffer:
if self._stderr_buffer is None:
self._stderr_buffer = io.StringIO()
self._stdout_buffer = io.StringIO()
sys.stdout = self._stdout_buffer
sys.stderr = self._stderr_buffer
def startTestRun(self):
"""Called once before any tests are executed.
See startTest for a method called before each test.
"""
def stopTest(self, test):
"""Called when the given test has been run"""
self._restoreStdout()
self._mirrorOutput = False
def _restoreStdout(self):
if self.buffer:
if self._mirrorOutput:
output = sys.stdout.getvalue()
error = sys.stderr.getvalue()
if output:
if not output.endswith('\n'):
output += '\n'
self._original_stdout.write(STDOUT_LINE % output)
if error:
if not error.endswith('\n'):
error += '\n'
self._original_stderr.write(STDERR_LINE % error)
sys.stdout = self._original_stdout
sys.stderr = self._original_stderr
self._stdout_buffer.seek(0)
self._stdout_buffer.truncate()
self._stderr_buffer.seek(0)
self._stderr_buffer.truncate()
def stopTestRun(self):
"""Called once after all tests are executed.
See stopTest for a method called after each test.
"""
@failfast
def addError(self, test, err):
"""Called when an error has occurred. 'err' is a tuple of values as
returned by sys.exc_info().
"""
self.errors.append((test, self._exc_info_to_string(err, test)))
self._mirrorOutput = True
@failfast
def addFailure(self, test, err):
"""Called when an error has occurred. 'err' is a tuple of values as
returned by sys.exc_info()."""
self.failures.append((test, self._exc_info_to_string(err, test)))
self._mirrorOutput = True
def addSuccess(self, test):
"Called when a test has completed successfully"
pass
def addSkip(self, test, reason):
"""Called when a test is skipped."""
self.skipped.append((test, reason))
def addExpectedFailure(self, test, err):
"""Called when an expected failure/error occured."""
self.expectedFailures.append(
(test, self._exc_info_to_string(err, test)))
@failfast
def addUnexpectedSuccess(self, test):
"""Called when a test was expected to fail, but succeed."""
self.unexpectedSuccesses.append(test)
def wasSuccessful(self):
"Tells whether or not this result was a success"
return len(self.failures) == len(self.errors) == 0
def stop(self):
"Indicates that the tests should be aborted"
self.shouldStop = True
def _exc_info_to_string(self, err, test):
"""Converts a sys.exc_info()-style tuple of values into a string."""
exctype, value, tb = err
# Skip test runner traceback levels
while tb and self._is_relevant_tb_level(tb):
tb = tb.tb_next
if exctype is test.failureException:
# Skip assert*() traceback levels
length = self._count_relevant_tb_levels(tb)
msgLines = traceback.format_exception(exctype, value, tb, length)
else:
msgLines = traceback.format_exception(exctype, value, tb)
if self.buffer:
output = sys.stdout.getvalue()
error = sys.stderr.getvalue()
if output:
if not output.endswith('\n'):
output += '\n'
msgLines.append(STDOUT_LINE % output)
if error:
if not error.endswith('\n'):
error += '\n'
msgLines.append(STDERR_LINE % error)
return ''.join(msgLines)
def _is_relevant_tb_level(self, tb):
#fix me brython
#return '__unittest' in tb.tb_frame.f_globals
return True #for now, lets just return False
def _count_relevant_tb_levels(self, tb):
length = 0
while tb and not self._is_relevant_tb_level(tb):
length += 1
tb = tb.tb_next
return length
def __repr__(self):
return ("<%s run=%i errors=%i failures=%i>" %
(util.strclass(self.__class__), self.testsRun, len(self.errors),
len(self.failures)))
| gpl-3.0 |
antoviaque/huey | docs/conf.py | 9 | 7906 | # -*- coding: utf-8 -*-
#
# huey documentation build configuration file, created by
# sphinx-quickstart on Wed Nov 16 12:48:28 2011.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = []
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'huey'
copyright = u'2013, charles leifer'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
src_dir = os.path.realpath(os.path.dirname(os.path.dirname(__file__)))
sys.path.insert(0, src_dir)
from huey import __version__
version = __version__
# The full version, including alpha/beta/rc tags.
release = __version__
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#html_theme = 'flask'
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {
# 'index_logo': 'logo.jpg',
#}
#
## Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = ['_themes']
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'hueydoc'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'huey.tex', u'huey Documentation',
u'charles leifer', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'huey', u'huey Documentation',
[u'charles leifer'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'huey', u'huey Documentation',
u'charles leifer', 'huey', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
| mit |
RoyXiang/ehForwarderBot | plugins/eh_telegram_master/__init__.py | 1 | 63061 | # coding=utf-8
import telegram
import telegram.ext
import config
import datetime
import utils
import urllib
import html
import logging
import time
import magic
import os
import re
import mimetypes
import pydub
import threading
import traceback
import base64
from . import db, speech
from .whitelisthandler import WhitelistHandler
from channel import EFBChannel, EFBMsg, MsgType, MsgSource, TargetType, ChannelType
from channelExceptions import EFBChatNotFound, EFBMessageTypeNotSupported, EFBMessageError
from .msgType import get_msg_type, TGMsgType
from moviepy.editor import VideoFileClip
class Flags:
# General Flags
CANCEL_PROCESS = "cancel"
# Chat linking
CONFIRM_LINK = 0x11
EXEC_LINK = 0x12
# Start a chat
START_CHOOSE_CHAT = 0x21
# Command
COMMAND_PENDING = 0x31
class TelegramChannel(EFBChannel):
"""
EFB Channel - Telegram (Master)
Requires python-telegram-bot
Author: Eana Hufwe <https://github.com/blueset>
External Services:
You may need API keys from following service providers to enjoy more functions.
Baidu Speech Recognition API: http://yuyin.baidu.com/
Bing Speech API: https://www.microsoft.com/cognitive-services/en-us/speech-api
Additional configs:
eh_telegram_master = {
"token": "Telegram bot token",
"admins": [12345678, 87654321],
"bing_speech_api": ["token1", "token2"],
"baidu_speech_api": {
"app_id": 123456,
"api_key": "APIkey",
"secret_key": "secret_key"
}
}
"""
# Meta Info
channel_name = "Telegram Master"
channel_emoji = "✈"
channel_id = "eh_telegram_master"
channel_type = ChannelType.Master
supported_message_types = {MsgType.Text, MsgType.File, MsgType.Audio,
MsgType.Command, MsgType.Image, MsgType.Link, MsgType.Location,
MsgType.Sticker, MsgType.Video}
# Data
slaves = None
bot = None
msg_status = {}
msg_storage = {}
me = None
_stop_polling = False
# Constants
TYPE_DICT = {
TGMsgType.Text: MsgType.Text,
TGMsgType.Audio: MsgType.Audio,
TGMsgType.Document: MsgType.File,
TGMsgType.Photo: MsgType.Image,
TGMsgType.Sticker: MsgType.Sticker,
TGMsgType.Video: MsgType.Video,
TGMsgType.Voice: MsgType.Audio,
TGMsgType.Location: MsgType.Location,
TGMsgType.Venue: MsgType.Location,
}
def __init__(self, queue, mutex, slaves):
"""
Initialization.
Args:
queue (queue.Queue): global message queue
slaves (dict): Dictionary of slaves
"""
super().__init__(queue, mutex)
self.slaves = slaves
try:
self.bot = telegram.ext.Updater(getattr(config, self.channel_id)['token'])
except (AttributeError, KeyError):
raise ValueError("Token is not properly defined. Please define it in `config.py`.")
mimetypes.init(files=["mimetypes"])
self.admins = getattr(config, self.channel_id)['admins']
self.logger = logging.getLogger("plugins.%s.TelegramChannel" % self.channel_id)
self.me = self.bot.bot.get_me()
self.bot.dispatcher.add_handler(WhitelistHandler(self.admins))
self.bot.dispatcher.add_handler(telegram.ext.CommandHandler("link", self.link_chat_show_list, pass_args=True))
self.bot.dispatcher.add_handler(telegram.ext.CommandHandler("chat", self.start_chat_list, pass_args=True))
self.bot.dispatcher.add_handler(telegram.ext.CommandHandler("recog", self.recognize_speech, pass_args=True))
self.bot.dispatcher.add_handler(telegram.ext.CallbackQueryHandler(self.callback_query_dispatcher))
self.bot.dispatcher.add_handler(telegram.ext.CommandHandler("start", self.start, pass_args=True))
self.bot.dispatcher.add_handler(telegram.ext.CommandHandler("extra", self.extra_help))
self.bot.dispatcher.add_handler(telegram.ext.CommandHandler("help", self.help))
self.bot.dispatcher.add_handler(telegram.ext.CommandHandler("unlink_all", self.unlink_all))
self.bot.dispatcher.add_handler(
telegram.ext.RegexHandler(r"^/(?P<id>[0-9]+)_(?P<command>[a-z0-9_-]+)", self.extra_call,
pass_groupdict=True))
self.bot.dispatcher.add_handler(telegram.ext.MessageHandler(
telegram.ext.Filters.text |
telegram.ext.Filters.photo |
telegram.ext.Filters.sticker |
telegram.ext.Filters.document |
telegram.ext.Filters.venue |
telegram.ext.Filters.location |
telegram.ext.Filters.audio |
telegram.ext.Filters.voice |
telegram.ext.Filters.video,
self.msg
))
self.bot.dispatcher.add_error_handler(self.error)
# Truncate string by bytes
# Written by Mark Tolonen
# http://stackoverflow.com/a/13738452/1989455
@staticmethod
def _utf8_lead_byte(b):
"""A UTF-8 intermediate byte starts with the bits 10xxxxxx."""
return (b & 0xC0) != 0x80
def _utf8_byte_truncate(self, text, max_bytes):
"""If text[max_bytes] is not a lead byte, back up until a lead byte is
found and truncate before that character."""
utf8 = text.encode('utf8')
if len(utf8) <= max_bytes:
return utf8.decode()
i = max_bytes
while i > 0 and not self._utf8_lead_byte(utf8[i]):
i -= 1
return utf8[:i].decode()
def callback_query_dispatcher(self, bot, update):
"""
Dispatch a callback query based on the message session status.
Args:
bot (telegram.bot.Bot): bot
update (telegram.Update): update
"""
# Get essential information about the query
query = update.callback_query
chat_id = query.message.chat.id
text = query.data
msg_id = update.callback_query.message.message_id
msg_status = self.msg_status.get("%s.%s" % (chat_id, msg_id), None)
# dispatch the query
if msg_status in [Flags.CONFIRM_LINK]:
self.link_chat_confirm(bot, chat_id, msg_id, text)
elif msg_status in [Flags.EXEC_LINK]:
self.link_chat_exec(bot, chat_id, msg_id, text)
elif msg_status == Flags.START_CHOOSE_CHAT:
self.make_chat_head(bot, chat_id, msg_id, text)
elif msg_status == Flags.COMMAND_PENDING:
self.command_exec(bot, chat_id, msg_id, text)
else:
bot.editMessageText(text="Session expired. Please try again. (SE01)",
chat_id=chat_id,
message_id=msg_id)
@staticmethod
def _reply_error(bot, update, errmsg):
"""
A wrap that directly reply a message with error details.
Returns:
telegram.Message: Message sent
"""
return bot.send_message(update.message.chat.id, errmsg, reply_to_message_id=update.message.message_id)
def process_msg(self, msg):
"""
Process a message from slave channel and deliver it to the user.
Args:
msg (EFBMsg): The message.
"""
try:
xid = datetime.datetime.now().timestamp()
self.logger.debug("%s, Msg text: %s", xid, msg.text)
self.logger.debug("%s, process_msg_step_0", xid)
chat_uid = "%s.%s" % (msg.channel_id, msg.origin['uid'])
tg_chats = db.get_chat_assoc(slave_uid=chat_uid)
tg_chat = None
multi_slaves = False
if tg_chats:
tg_chat = tg_chats[0]
slaves = db.get_chat_assoc(master_uid=tg_chat)
if slaves and len(slaves) > 1:
multi_slaves = True
msg_prefix = "" # For group member name
tg_chat_assoced = False
if msg.source != MsgSource.Group:
msg.member = {"uid": -1, "name": "", "alias": ""}
# Generate chat text template & Decide type target
tg_dest = getattr(config, self.channel_id)['admins'][0]
self.logger.debug("%s, process_msg_step_1, tg_dest=%s, msg.origin=%s", xid, tg_dest, str(msg.origin))
if msg.source == MsgSource.Group:
self.logger.debug("msg.member: %s", str(msg.member))
msg_prefix = msg.member['name'] if msg.member['name'] == msg.member['alias'] or not msg.member['alias'] \
else "%s (%s)" % (msg.member['alias'], msg.member['name'])
if tg_chat: # if this chat is linked
tg_dest = int(tg_chat.split('.')[1])
tg_chat_assoced = True
if tg_chat and not multi_slaves: # if singly linked
if msg_prefix: # if group message
msg_template = "%s:\n%s" % (msg_prefix, "%s")
else:
msg_template = "%s"
elif msg.source == MsgSource.User:
emoji_prefix = msg.channel_emoji + utils.Emojis.get_source_emoji(msg.source)
name_prefix = msg.origin["name"] if msg.origin["alias"] == msg.origin["name"] or not msg.origin['alias'] \
else "%s (%s)" % (msg.origin["alias"], msg.origin["name"])
msg_template = "%s %s:\n%s" % (emoji_prefix, name_prefix, "%s")
elif msg.source == MsgSource.Group:
emoji_prefix = msg.channel_emoji + utils.Emojis.get_source_emoji(msg.source)
name_prefix = msg.origin["name"] if msg.origin["alias"] == msg.origin["name"] or not msg.origin['alias'] \
else "%s (%s)" % (msg.origin["alias"], msg.origin["name"])
msg_template = "%s %s [%s]:\n%s" % (emoji_prefix, msg_prefix, name_prefix, "%s")
elif msg.source == MsgSource.System:
emoji_prefix = msg.channel_emoji + utils.Emojis.get_source_emoji(msg.source)
name_prefix = msg.origin["name"] if msg.origin["alias"] == msg.origin["name"] or not msg.origin['alias'] \
else "%s (%s)" % (msg.origin["alias"], msg.origin["name"])
msg_template = "%s %s:\n%s" % (emoji_prefix, name_prefix, "%s")
else:
msg_template = "Unknown message source (%s)\n%s" % (msg.source, "%s")
# Type dispatching
self.logger.debug("%s, process_msg_step_2", xid)
append_last_msg = False
if msg.type == MsgType.Text:
parse_mode = "HTML" if self._flag("text_as_html", False) else None
if tg_chat_assoced:
last_msg = db.get_last_msg_from_chat(tg_dest)
if last_msg:
if last_msg.msg_type == "Text":
append_last_msg = str(last_msg.slave_origin_uid) == "%s.%s" % (msg.channel_id, msg.origin['uid'])
if msg.source == MsgSource.Group:
append_last_msg &= str(last_msg.slave_member_uid) == str(msg.member['uid'])
append_last_msg &= datetime.datetime.now() - last_msg.time <= datetime.timedelta(
seconds=self._flag('join_msg_threshold_secs', 15))
else:
append_last_msg = False
else:
append_last_msg = False
self.logger.debug("Text: Append last msg: %s", append_last_msg)
self.logger.debug("%s, process_msg_step_3_0, tg_dest = %s, tg_chat_assoced = %s, append_last_msg = %s",
xid, tg_dest, tg_chat_assoced, append_last_msg)
if tg_chat_assoced and append_last_msg:
self.logger.debug("%s, process_msg_step_3_0_1", xid)
msg.text = "%s\n%s" % (last_msg.text, msg.text)
try:
tg_msg = self.bot.bot.editMessageText(chat_id=tg_dest,
message_id=last_msg.master_msg_id.split(".", 1)[1],
text=msg_template % msg.text,
parse_mode=parse_mode)
except telegram.error.BadRequest:
tg_msg = self.bot.bot.editMessageText(chat_id=tg_dest,
message_id=last_msg.master_msg_id.split(".", 1)[1],
text=msg_template % msg.text)
else:
self.logger.debug("%s, process_msg_step_3_0_3", xid)
try:
tg_msg = self.bot.bot.send_message(tg_dest, text=msg_template % msg.text, parse_mode=parse_mode)
except telegram.error.BadRequest:
tg_msg = self.bot.bot.send_message(tg_dest, text=msg_template % msg.text)
self.logger.debug("%s, process_msg_step_3_0_4, tg_msg = %s", xid, tg_msg)
self.logger.debug("%s, process_msg_step_3_1", xid)
elif msg.type == MsgType.Link:
thumbnail = urllib.parse.quote(msg.attributes["image"] or "", safe="?=&#:/")
thumbnail = "<a href=\"%s\">🔗</a>" % thumbnail if thumbnail else "🔗"
text = "%s <a href=\"%s\">%s</a>\n%s" % \
(thumbnail,
urllib.parse.quote(msg.attributes["url"], safe="?=&#:/"),
html.escape(msg.attributes["title"] or msg.attributes["url"]),
html.escape(msg.attributes["description"] or ""))
if msg.text:
text += "\n\n" + msg.text
try:
tg_msg = self.bot.bot.send_message(tg_dest, text=msg_template % text, parse_mode="HTML")
except telegram.error.BadRequest:
text = "🔗 %s\n%s\n\n%s" % (html.escape(msg.attributes["title"] or ""),
html.escape(msg.attributes["description"] or ""),
urllib.parse.quote(msg.attributes["url"] or "", safe="?=&#:/"))
if msg.text:
text += "\n\n" + msg.text
tg_msg = self.bot.bot.send_message(tg_dest, text=msg_template % msg.text)
elif msg.type in [MsgType.Image, MsgType.Sticker]:
self.logger.debug("%s, process_msg_step_3_2", xid)
self.logger.debug("Received %s\nPath: %s\nMIME: %s", msg.type, msg.path, msg.mime)
self.logger.debug("Path: %s\nSize: %s", msg.path, os.stat(msg.path).st_size)
if os.stat(msg.path).st_size == 0:
os.remove(msg.path)
tg_msg = self.bot.bot.send_message(tg_dest,
msg_template % ("Error: Empty %s received. (MS01)" % msg.type))
else:
if not msg.text:
if msg.type == MsgType.Image:
msg.text = "sent a picture."
elif msg.type == MsgType.Sticker:
msg.text = "sent a sticker."
if msg.mime == "image/gif":
tg_msg = self.bot.bot.sendDocument(tg_dest, msg.file, caption=msg_template % msg.text)
else:
try:
tg_msg = self.bot.bot.sendPhoto(tg_dest, msg.file, caption=msg_template % msg.text)
except telegram.error.BadRequest:
tg_msg = self.bot.bot.sendDocument(tg_dest, msg.file, caption=msg_template % msg.text)
os.remove(msg.path)
self.logger.debug("%s, process_msg_step_3_3", xid)
elif msg.type == MsgType.File:
if os.stat(msg.path).st_size == 0:
os.remove(msg.path)
tg_msg = self.bot.bot.send_message(tg_dest,
msg_template % ("Error: Empty %s received. (MS02)" % msg.type))
else:
if not msg.filename:
file_name = os.path.basename(msg.path)
msg.text = "sent a file."
else:
file_name = msg.filename
tg_msg = self.bot.bot.send_document(tg_dest, msg.file, caption=msg_template % msg.text,
filename=file_name)
os.remove(msg.path)
elif msg.type == MsgType.Audio:
if os.stat(msg.path).st_size == 0:
os.remove(msg.path)
return self.bot.bot.send_message(tg_dest,
msg_template % ("Error: Empty %s received. (MS03)" % msg.type))
msg.text = msg.text or ''
self.logger.debug("%s, process_msg_step_4_1, no_conversion = %s", xid,
self._flag("no_conversion", False))
if self._flag("no_conversion", False):
self.logger.debug("%s, process_msg_step_4_2, mime = %s", xid, msg.mime)
if msg.mime == "audio/mpeg":
tg_msg = self.bot.bot.sendAudio(tg_dest, msg.file, caption=msg_template % msg.text)
else:
tg_msg = self.bot.bot.sendDocument(tg_dest, msg.file, caption=msg_template % msg.text)
else:
pydub.AudioSegment.from_file(msg.file).export("%s.ogg" % msg.path,
format="ogg",
codec="libopus",
bitrate="65536",
parameters=["-vbr", "on", "-compression_level", "10"])
ogg_file = open("%s.ogg" % msg.path, 'rb')
tg_msg = self.bot.bot.sendVoice(tg_dest, ogg_file, caption=msg_template % msg.text)
os.remove("%s.ogg" % msg.path)
os.remove(msg.path)
elif msg.type == MsgType.Location:
self.logger.info("---\nsending venue\nlat: %s, long: %s\ntitle: %s\naddr: %s",
msg.attributes['latitude'], msg.attributes['longitude'], msg.text, msg_template % "")
tg_msg = self.bot.bot.sendVenue(tg_dest, latitude=msg.attributes['latitude'],
longitude=msg.attributes['longitude'], title=msg.text,
address=msg_template % "")
elif msg.type == MsgType.Video:
if os.stat(msg.path).st_size == 0:
os.remove(msg.path)
return self.bot.bot.send_message(tg_dest, msg_template % ("Error: Empty %s recieved" % msg.type))
if not msg.text:
msg.text = "sent a video."
tg_msg = self.bot.bot.sendVideo(tg_dest, msg.file, caption=msg_template % msg.text)
os.remove(msg.path)
elif msg.type == MsgType.Command:
buttons = []
for i, ival in enumerate(msg.attributes['commands']):
buttons.append([telegram.InlineKeyboardButton(ival['name'], callback_data=str(i))])
tg_msg = self.bot.bot.send_message(tg_dest, msg_template % msg.text,
reply_markup=telegram.InlineKeyboardMarkup(buttons))
self.msg_status["%s.%s" % (tg_dest, tg_msg.message_id)] = Flags.COMMAND_PENDING
self.msg_storage["%s.%s" % (tg_dest, tg_msg.message_id)] = {"channel": msg.channel_id,
"text": msg_template % msg.text,
"commands": msg.attributes['commands']}
else:
tg_msg = self.bot.bot.send_message(tg_dest, msg_template % "Unsupported incoming message type. (UT01)")
self.logger.debug("%s, process_msg_step_4", xid)
if msg.source in (MsgSource.User, MsgSource.Group):
msg_log = {"master_msg_id": "%s.%s" % (tg_msg.chat.id, tg_msg.message_id),
"text": msg.text or "Sent a %s." % msg.type,
"msg_type": msg.type,
"sent_to": "Master",
"slave_origin_uid": "%s.%s" % (msg.channel_id, msg.origin['uid']),
"slave_origin_display_name": msg.origin['alias'],
"slave_member_uid": msg.member['uid'],
"slave_member_display_name": msg.member['alias'],
"slave_message_uid": msg.uid}
if tg_chat_assoced and append_last_msg:
msg_log['update'] = True
db.add_msg_log(**msg_log)
self.logger.debug("%s, process_msg_step_5", xid)
except Exception as e:
self.logger.error(repr(e) + traceback.format_exc())
def slave_chats_pagination(self, storage_id, offset=0, filter=""):
"""
Generate a list of (list of) `InlineKeyboardButton`s of chats in slave channels,
based on the status of message located by `storage_id` and the paging from
`offset` value.
Args:
storage_id (str): Message_storage ID for generating the buttons list.
offset (int): Offset for pagination
Returns:
tuple (str, list of list of InlineKeyboardButton):
A tuple: legend, chat_btn_list
`legend` is the legend of all Emoji headings in the entire list.
`chat_btn_list` is a list which can be fit into `telegram.InlineKeyboardMarkup`.
"""
legend = [
"%s: Linked" % utils.Emojis.LINK_EMOJI,
"%s: User" % utils.Emojis.USER_EMOJI,
"%s: Group" % utils.Emojis.GROUP_EMOJI
]
if self.msg_storage.get(storage_id, None):
chats = self.msg_storage[storage_id]['chats']
channels = self.msg_storage[storage_id]['channels']
count = self.msg_storage[storage_id]['count']
else:
rfilter = re.compile(filter, re.DOTALL | re.IGNORECASE)
if filter:
self.logger.debug("Filter string: %s", filter)
chats = []
channels = {}
for slave_id in self.slaves:
slave = self.slaves[slave_id]
slave_chats = slave.get_chats()
channels[slave.channel_id] = {
"channel_name": slave.channel_name,
"channel_emoji": slave.channel_emoji
}
for chat in slave_chats:
c = {
"channel_id": slave.channel_id,
"channel_name": slave.channel_name,
"channel_emoji": slave.channel_emoji,
"chat_name": chat['name'],
"chat_alias": chat['alias'],
"chat_uid": chat['uid'],
"type": chat['type']
}
entry_string = "Channel: %s\nName: %s\nAlias: %s\nID: %s\nType: %s" \
% (c['channel_name'], c['chat_name'], c['chat_alias'], c['chat_uid'], c['type'])
if not filter or rfilter.search(entry_string):
chats.append(c)
count = len(chats)
self.msg_storage[storage_id] = {
"offset": offset,
"count": len(chats),
"chats": chats.copy(),
"channels": channels.copy()
}
for ch in channels:
legend.append("%s: %s" % (channels[ch]['channel_emoji'], channels[ch]['channel_name']))
# Build inline button list
chat_btn_list = []
chats_per_page = self._flag("chats_per_page", 10)
for i in range(offset, min(offset + chats_per_page, count)):
chat = chats[i]
linked = utils.Emojis.LINK_EMOJI if bool(
db.get_chat_assoc(slave_uid="%s.%s" % (chat['channel_id'], chat['chat_uid']))) else ""
chat_type = utils.Emojis.get_source_emoji(chat['type'])
chat_name = chat['chat_alias'] if chat['chat_name'] == chat['chat_alias'] else "%s (%s)" % (
chat['chat_alias'], chat['chat_name'])
button_text = "%s%s: %s %s" % (chat['channel_emoji'], chat_type, chat_name, linked)
button_callback = "chat %s" % i
chat_btn_list.append([telegram.InlineKeyboardButton(button_text, callback_data=button_callback)])
# Pagination
page_number_row = []
if offset - chats_per_page >= 0:
page_number_row.append(telegram.InlineKeyboardButton("< Prev", callback_data="offset %s" % (
offset - chats_per_page)))
page_number_row.append(telegram.InlineKeyboardButton("Cancel", callback_data=Flags.CANCEL_PROCESS))
if offset + chats_per_page < count:
page_number_row.append(telegram.InlineKeyboardButton("Next >", callback_data="offset %s" % (
offset + chats_per_page)))
chat_btn_list.append(page_number_row)
return legend, chat_btn_list
def link_chat_show_list(self, bot, update, args=None):
"""
Show the list of available chats for linking.
Triggered by `/link`.
Args:
bot: Telegram Bot instance
update: Message update
"""
args = args or []
self.link_chat_gen_list(bot, self.admins[0], filter=" ".join(args))
def link_chat_gen_list(self, bot, chat_id, message_id=None, offset=0, filter=""):
"""
Generate the list for chat linking, and update it to a message.
Args:
bot: Telegram Bot instance
chat_id: Chat ID
message_id: ID of message to be updated, None to send a new message.
offset: Offset for pagination.
"""
if not message_id:
message_id = bot.send_message(chat_id, "Processing...").message_id
msg_text = "Please choose the chat you want to link with ...\n\nLegend:\n"
legend, chat_btn_list = self.slave_chats_pagination("%s.%s" % (chat_id, message_id), offset, filter=filter)
for i in legend:
msg_text += "%s\n" % i
msg = bot.editMessageText(chat_id=chat_id, message_id=message_id, text=msg_text,
reply_markup=telegram.InlineKeyboardMarkup(chat_btn_list))
self.msg_status["%s.%s" % (chat_id, msg.message_id)] = Flags.CONFIRM_LINK
def link_chat_confirm(self, bot, tg_chat_id, tg_msg_id, callback_uid):
"""
Confirmation of chat linking. Triggered by callback message on status `Flags.CONFIRM_LINK`.
Args:
bot: Telegram Bot instance
tg_chat_id: Chat ID
tg_msg_id: Message ID triggered the callback
callback_uid: Callback message
"""
if callback_uid.split()[0] == "offset":
return self.link_chat_gen_list(bot, tg_chat_id, message_id=tg_msg_id, offset=int(callback_uid.split()[1]))
if callback_uid == Flags.CANCEL_PROCESS:
txt = "Cancelled."
self.msg_status.pop("%s.%s" % (tg_chat_id, tg_msg_id), None)
self.msg_storage.pop("%s.%s" % (tg_chat_id, tg_msg_id), None)
return bot.editMessageText(text=txt,
chat_id=tg_chat_id,
message_id=tg_msg_id)
if callback_uid[:4] != "chat":
txt = "Invalid parameter (%s). (IP01)" % callback_uid
self.msg_status.pop("%s.%s" % (tg_chat_id, tg_msg_id), None)
self.msg_storage.pop("%s.%s" % (tg_chat_id, tg_msg_id), None)
return bot.editMessageText(text=txt,
chat_id=tg_chat_id,
message_id=tg_msg_id)
callback_uid = int(callback_uid.split()[1])
chat = self.msg_storage["%s.%s" % (tg_chat_id, tg_msg_id)]['chats'][callback_uid]
chat_uid = "%s.%s" % (chat['channel_id'], chat['chat_uid'])
chat_display_name = chat['chat_name'] if chat['chat_name'] == chat['chat_alias'] else "%s (%s)" % (
chat['chat_alias'], chat['chat_name'])
chat_display_name = "'%s' from '%s %s'" % (chat_display_name, chat['channel_emoji'], chat['channel_name'])
linked = bool(db.get_chat_assoc(slave_uid=chat_uid))
self.msg_status["%s.%s" % (tg_chat_id, tg_msg_id)] = Flags.EXEC_LINK
self.msg_storage["%s.%s" % (tg_chat_id, tg_msg_id)] = {
"chat_uid": chat_uid,
"chat_display_name": chat_display_name,
"chats": [chat.copy()],
"tg_chat_id": tg_chat_id,
"tg_msg_id": tg_msg_id
}
txt = "You've selected chat %s." % chat_display_name
if linked:
txt += "\nThis chat has already linked to Telegram."
txt += "\nWhat would you like to do?"
link_url = "https://telegram.me/%s?startgroup=%s" % (
self.me.username, urllib.parse.quote(self.b64en("%s.%s" % (tg_chat_id, tg_msg_id))))
self.logger.debug("Telegram start trigger for linking chat: %s", link_url)
if linked:
btn_list = [telegram.InlineKeyboardButton("Relink", url=link_url),
telegram.InlineKeyboardButton("Unlink", callback_data="unlink 0")]
else:
btn_list = [telegram.InlineKeyboardButton("Link", url=link_url)]
btn_list.append(telegram.InlineKeyboardButton("Cancel", callback_data=Flags.CANCEL_PROCESS))
bot.editMessageText(text=txt,
chat_id=tg_chat_id,
message_id=tg_msg_id,
reply_markup=telegram.InlineKeyboardMarkup([btn_list]))
def link_chat_exec(self, bot, tg_chat_id, tg_msg_id, callback_uid):
"""
Action to link a chat. Triggered by callback message with status `Flags.EXEC_LINK`.
Args:
bot: Telegram Bot instance
tg_chat_id: Chat ID
tg_msg_id: Message ID triggered the callback
callback_uid: Callback message
"""
if callback_uid == Flags.CANCEL_PROCESS:
txt = "Cancelled."
self.msg_status.pop("%s.%s" % (tg_chat_id, tg_msg_id), None)
self.msg_storage.pop("%s.%s" % (tg_chat_id, tg_msg_id), None)
return bot.editMessageText(text=txt,
chat_id=tg_chat_id,
message_id=tg_msg_id)
cmd, chat_lid = callback_uid.split()
chat = self.msg_storage["%s.%s" % (tg_chat_id, tg_msg_id)]['chats'][int(chat_lid)]
chat_uid = "%s.%s" % (chat['channel_id'], chat['chat_uid'])
chat_display_name = chat['chat_name'] if chat['chat_name'] == chat['chat_alias'] else "%s (%s)" % (
chat['chat_alias'], chat['chat_name'])
chat_display_name = "'%s' from '%s %s'" % (chat_display_name, chat['channel_emoji'], chat['channel_name']) \
if chat['channel_name'] else "'%s'" % chat_display_name
self.msg_status.pop("%s.%s" % (tg_chat_id, tg_msg_id), None)
self.msg_storage.pop("%s.%s" % (tg_chat_id, tg_msg_id), None)
if cmd == "unlink":
db.remove_chat_assoc(slave_uid=chat_uid)
txt = "Chat %s is unlinked." % (chat_display_name)
return bot.editMessageText(text=txt, chat_id=tg_chat_id, message_id=tg_msg_id)
txt = "Command '%s' (%s) is not recognised, please try again" % (cmd, callback_uid)
bot.editMessageText(text=txt, chat_id=tg_chat_id, message_id=tg_msg_id)
def unlink_all(self, bot, update):
if update.message.chat.id == update.message.from_user.id:
return bot.send_message(update.message.chat.id, "Send `/unlink_all` to a group to unlink all remote chats "
"from it.",
parse_mode=telegram.ParseMode.MARKDOWN,
reply_to_message_id=update.message.message_id)
assocs = db.get_chat_assoc(master_uid="%s.%s" % (self.channel_id, update.message.chat.id))
if len(assocs) < 1:
return bot.send_message(update.message.chat.id, "No chat is linked to the group.",
reply_to_message_id=update.message.message_id)
else:
db.remove_chat_assoc(master_uid="%s.%s" % (self.channel_id, update.message.chat.id))
return bot.send_message(update.message.chat.id, "All chats has been unlinked from this group. (%s)" % len(assocs),
reply_to_message_id=update.message.message_id)
def start_chat_list(self, bot, update, args=None):
"""
Send a list to for chat list generation.
Triggered by `/list`.
Args:
bot: Telegram Bot instance
update: Message update
args: Arguments from the command message
"""
args = args or []
msg_id = self.chat_head_req_generate(bot, self.admins[0], filter=" ".join(args))
self.msg_status["%s.%s" % (self.admins[0], msg_id)] = Flags.START_CHOOSE_CHAT
def chat_head_req_generate(self, bot, chat_id, message_id=None, offset=0, filter=""):
"""
Generate the list for chat head, and update it to a message.
Args:
bot: Telegram Bot instance
chat_id: Chat ID
message_id: ID of message to be updated, None to send a new message.
offset: Offset for pagination.
filter: Regex String used as a filter.
"""
if not message_id:
message_id = bot.send_message(chat_id, text="Processing...").message_id
legend, chat_btn_list = self.slave_chats_pagination("%s.%s" % (chat_id, message_id), offset, filter=filter)
msg_text = "Choose a chat you want to start with...\n\nLegend:\n"
for i in legend:
msg_text += "%s\n" % i
bot.editMessageText(text=msg_text,
chat_id=chat_id,
message_id=message_id,
reply_markup=telegram.InlineKeyboardMarkup(chat_btn_list))
return message_id
def make_chat_head(self, bot, tg_chat_id, tg_msg_id, callback_uid):
"""
Create a chat head. Triggered by callback message with status `Flags.START_CHOOSE_CHAT`.
Args:
bot: Telegram Bot instance
tg_chat_id: Chat ID
tg_msg_id: Message ID triggered the callback
callback_uid: Callback message
"""
if callback_uid.split()[0] == "offset":
return self.chat_head_req_generate(bot, tg_chat_id, message_id=tg_msg_id,
offset=int(callback_uid.split()[1]))
if callback_uid == Flags.CANCEL_PROCESS:
txt = "Cancelled."
self.msg_status.pop("%s.%s" % (tg_chat_id, tg_msg_id), None)
self.msg_storage.pop("%s.%s" % (tg_chat_id, tg_msg_id), None)
return bot.editMessageText(text=txt,
chat_id=tg_chat_id,
message_id=tg_msg_id)
if callback_uid[:4] != "chat":
txt = "Invalid parameter. (%s)" % callback_uid
self.msg_status.pop("%s.%s" % (tg_chat_id, tg_msg_id), None)
self.msg_storage.pop("%s.%s" % (tg_chat_id, tg_msg_id), None)
return bot.editMessageText(text=txt,
chat_id=tg_chat_id,
message_id=tg_msg_id)
callback_uid = int(callback_uid.split()[1])
chat = self.msg_storage["%s.%s" % (tg_chat_id, tg_msg_id)]['chats'][callback_uid]
chat_uid = "%s.%s" % (chat['channel_id'], chat['chat_uid'])
chat_display_name = chat['chat_name'] if chat['chat_name'] == chat['chat_alias'] else "%s (%s)" % (
chat['chat_alias'], chat['chat_name'])
chat_display_name = "'%s' from '%s %s'" % (chat_display_name, chat['channel_emoji'], chat['channel_name'])
self.msg_status.pop("%s.%s" % (tg_chat_id, tg_msg_id), None)
self.msg_storage.pop("%s.%s" % (tg_chat_id, tg_msg_id), None)
txt = "Reply to this message to chat with %s." % chat_display_name
msg_log = {"master_msg_id": "%s.%s" % (tg_chat_id, tg_msg_id),
"text": txt,
"msg_type": "Text",
"sent_to": "Master",
"slave_origin_uid": chat_uid,
"slave_origin_display_name": chat_display_name,
"slave_member_uid": None,
"slave_member_display_name": None}
db.add_msg_log(**msg_log)
bot.editMessageText(text=txt, chat_id=tg_chat_id, message_id=tg_msg_id)
def command_exec(self, bot, chat_id, message_id, callback):
"""
Run a command from a command message.
Triggered by callback message with status `Flags.COMMAND_PENDING`.
Args:
bot: Telegram Bot instance
chat_id: Chat ID
message_id: Message ID triggered the callback
callback: Callback message
"""
if not callback.isdecimal():
msg = "Invalid parameter: %s. (CE01)" % callback
self.msg_status.pop("%s.%s" % (chat_id, message_id), None)
self.msg_storage.pop("%s.%s" % (chat_id, message_id), None)
return bot.editMessageText(text=msg, chat_id=chat_id, message_id=message_id)
elif not (0 <= int(callback) < len(self.msg_storage["%s.%s" % (chat_id, message_id)])):
msg = "Index out of bound: %s. (CE02)" % callback
self.msg_status.pop("%s.%s" % (chat_id, message_id), None)
self.msg_storage.pop("%s.%s" % (chat_id, message_id), None)
return bot.editMessageText(text=msg, chat_id=chat_id, message_id=message_id)
callback = int(callback)
channel_id = self.msg_storage["%s.%s" % (chat_id, message_id)]['channel']
command = self.msg_storage["%s.%s" % (chat_id, message_id)]['commands'][callback]
msg = self.msg_storage["%s.%s" % (chat_id, message_id)]['text'] + "\n------\n" + getattr(
self.slaves[channel_id], command['callable'])(*command['args'], **command['kwargs'])
self.msg_status.pop("%s.%s" % (chat_id, message_id), None)
self.msg_storage.pop("%s.%s" % (chat_id, message_id), None)
return bot.editMessageText(text=msg, chat_id=chat_id, message_id=message_id)
def extra_help(self, bot, update):
"""
Show list of extra functions and their usage.
Triggered by `/extra`.
Args:
bot: Telegram Bot instance
update: Message update
"""
msg = "List of slave channel features:"
for n, i in enumerate(sorted(self.slaves)):
i = self.slaves[i]
msg += "\n\n<b>%s %s</b>" % (i.channel_emoji, i.channel_name)
xfns = i.get_extra_functions()
if xfns:
for j in xfns:
fn_name = "/%s_%s" % (n, j)
msg += "\n\n%s <b>(%s)</b>\n%s" % (
fn_name, xfns[j].name, xfns[j].desc.format(function_name=fn_name))
else:
msg += "No command found."
bot.send_message(update.message.chat.id, msg, parse_mode="HTML")
def extra_call(self, bot, update, groupdict=None):
"""
Call an extra function from slave channel.
Args:
bot: Telegram Bot instance
update: Message update
groupdict: Parameters offered by the message
"""
if int(groupdict['id']) >= len(self.slaves):
return self._reply_error(bot, update, "Invalid slave channel ID. (XC01)")
ch = self.slaves[sorted(self.slaves)[int(groupdict['id'])]]
fns = ch.get_extra_functions()
if groupdict['command'] not in fns:
return self._reply_error(bot, update, "Command not found in selected channel. (XC02)")
header = "%s %s: %s\n-------\n" % (ch.channel_emoji, ch.channel_name, fns[groupdict['command']].name)
msg = bot.send_message(update.message.chat.id, header + "Please wait...")
result = fns[groupdict['command']](" ".join(update.message.text.split(' ', 1)[1:]))
bot.editMessageText(text=header + result, chat_id=update.message.chat.id, message_id=msg.message_id)
def msg(self, bot, update):
"""
Process, wrap and deliver messages from user.
Args:
bot: Telegram Bot instance
update: Message update
"""
self.logger.debug("----\nMsg from tg user:\n%s", update.message.to_dict())
target = None
multi_slaves = False
assoc = None
if update.message.chat.id != update.message.from_user.id: # from group
assocs = db.get_chat_assoc(master_uid="%s.%s" % (self.channel_id, update.message.chat.id))
if len(assocs) == 1:
assoc = assocs[0]
elif len(assocs) > 1:
multi_slaves = True
reply_to = bool(getattr(update.message, "reply_to_message", None))
private_chat = update.message.chat.id == update.message.from_user.id
if private_chat:
if reply_to:
try:
assoc = db.get_msg_log("%s.%s" % (
update.message.reply_to_message.chat.id,
update.message.reply_to_message.message_id)).slave_origin_uid
except:
return self._reply_error(bot, update,
"Message is not found in database. Please try with another one. (UC03)")
else:
return self._reply_error(bot, update,
"Please reply to an incoming message. (UC04)")
else: # group chat
if multi_slaves:
if reply_to:
try:
assoc = db.get_msg_log("%s.%s" % (
update.message.reply_to_message.chat.id,
update.message.reply_to_message.message_id)).slave_origin_uid
except:
return self._reply_error(bot, update,
"Message is not found in database. "
"Please try with another one. (UC05)")
else:
return self._reply_error(bot, update,
"This group is linked to multiple remote chats. "
"Please reply to an incoming message. "
"To unlink all remote chats, please send /unlink_all . (UC06)")
elif assoc:
if reply_to:
try:
targetlog = db.get_msg_log(
"%s.%s" % (
update.message.reply_to_message.chat.id, update.message.reply_to_message.message_id))
target = targetlog.slave_origin_uid
targetChannel, targetUid = target.split('.', 1)
except:
return self._reply_error(bot, update,
"Message is not found in database. "
"Please try with another message. (UC07)")
else:
return self._reply_error(bot, update,
"This group is not linked to any chat. (UC06)")
self.logger.debug("Destination chat = %s", assoc)
channel, uid = assoc.split('.', 2)
if channel not in self.slaves:
return self._reply_error(bot, update, "Internal error: Channel not found.")
try:
m = EFBMsg(self)
m.uid = "%s.%s" % (update.message.chat.id, update.message.message_id)
mtype = get_msg_type(update.message)
# Chat and author related stuff
m.origin['uid'] = update.message.from_user.id
if getattr(update.message.from_user, "last_name", None):
m.origin['alias'] = "%s %s" % (update.message.from_user.first_name, update.message.from_user.last_name)
else:
m.origin['alias'] = update.message.from_user.first_name
if getattr(update.message.from_user, "username", None):
m.origin['name'] = "@%s" % update.message.from_user.id
else:
m.origin['name'] = m.origin['alias']
m.destination = {
'channel': channel,
'uid': uid,
'name': '',
'alias': ''
}
if target:
if targetChannel == channel:
trgtMsg = EFBMsg(self.slaves[targetChannel])
trgtMsg.type = MsgType.Text
trgtMsg.text = targetlog.text
trgtMsg.member = {
"name": targetlog.slave_member_display_name,
"alias": targetlog.slave_member_display_name,
"uid": targetlog.slave_member_uid
}
trgtMsg.origin = {
"name": targetlog.slave_origin_display_name,
"alias": targetlog.slave_origin_display_name,
"uid": targetlog.slave_origin_uid.split('.', 2)[1]
}
m.target = {
"type": TargetType.Message,
"target": trgtMsg
}
# Type specific stuff
self.logger.debug("Msg type: %s", mtype)
if self.TYPE_DICT.get(mtype, None):
m.type = self.TYPE_DICT[mtype]
else:
raise EFBMessageTypeNotSupported()
if m.type not in self.slaves[channel].supported_message_types:
raise EFBMessageTypeNotSupported()
if mtype == TGMsgType.Text:
m.type = MsgType.Text
m.text = update.message.text
elif mtype == TGMsgType.Photo:
m.type = MsgType.Image
m.text = update.message.caption
m.path, m.mime = self._download_file(update.message, update.message.photo[-1], m.type)
m.file = open(m.path, "rb")
elif mtype == TGMsgType.Sticker:
m.type = MsgType.Sticker
m.text = ""
m.path, m.mime = self._download_file(update.message, update.message.sticker, m.type)
m.file = open(m.path, "rb")
elif mtype == TGMsgType.Document:
m.text = update.message.caption
self.logger.debug("tg: Document file received")
m.filename = getattr(update.message.document, "file_name", None) or None
if update.message.document.mime_type == "video/mp4":
self.logger.debug("tg: Telegram GIF received")
m.type = MsgType.Image
m.path, m.mime = self._download_gif(update.message, update.message.document, m.type)
else:
m.type = MsgType.File
m.path, m.mime = self._download_file(update.message, update.message.document, m.type)
m.mime = update.message.document.mime_type or m.mime
m.file = open(m.path, "rb")
elif mtype == TGMsgType.Video:
m.type = MsgType.Video
m.text = update.message.caption
m.path, m.mime = self._download_file(update.message, update.message.video, m.type)
m.file = open(m.path, "rb")
elif mtype == TGMsgType.Audio:
m.type = MsgType.Audio
m.text = "%s - %s\n%s" % (
update.message.audio.title, update.message.audio.performer, update.message.caption)
m.path, m.mime = self._download_file(update.message, update.message.audio, m.type)
elif mtype == TGMsgType.Voice:
m.type = MsgType.Audio
m.text = update.message.caption
m.path, m.mime = self._download_file(update.message, update.message.voice, m.type)
elif mtype == TGMsgType.Location:
m.type = MsgType.Location
m.text = "Location"
m.attributes = {
"latitude": update.message.location.latitude,
"longitude": update.message.location.longitude
}
elif mtype == TGMsgType.Venue:
m.type = MsgType.Location
m.text = update.message.location.title + "\n" + update.message.location.adderss
m.attributes = {
"latitude": update.message.venue.location.latitude,
"longitude": update.message.venue.location.longitude
}
else:
return self._reply_error(bot, update, "Message type not supported. (MN02)")
self.slaves[channel].send_message(m)
except EFBChatNotFound:
return self._reply_error(bot, update, "Chat is not reachable from the slave channel. (CN01)")
except EFBMessageTypeNotSupported:
return self._reply_error(bot, update, "Message type not supported. (MN01)")
except EFBMessageError as e:
return self._reply_error(bot, update, "Message is not sent. (MN01)\n\n%s" % str(e))
def _download_file(self, tg_msg, file_obj, msg_type):
"""
Download media file from telegram platform.
Args:
tg_msg: Telegram message instance
file_obj: File object
msg_type: Type of message
Returns:
tuple of str[2]: Full path of the file, MIME type
"""
path = os.path.join("storage", self.channel_id)
if not os.path.exists(path):
os.makedirs(path)
size = getattr(file_obj, "file_size", None)
file_id = file_obj.file_id
if size and size > 20 * 1024 ** 2:
raise EFBMessageError("Attachment is too large. Maximum 20 MB. (AT01)")
f = self.bot.bot.getFile(file_id)
fname = "%s_%s_%s_%s" % (msg_type, tg_msg.chat.id, tg_msg.message_id, int(time.time()))
fullpath = os.path.join(path, fname)
f.download(fullpath)
mime = getattr(file_obj, "mime_type", magic.from_file(fullpath, mime=True))
if type(mime) is bytes:
mime = mime.decode()
guess_ext = mimetypes.guess_extension(mime) or ".unknown"
if guess_ext == ".unknown":
self.logger.warning("File %s with mime %s has no matching extensions.", fullpath, mime)
ext = ".jpeg" if mime == "image/jpeg" else guess_ext
os.rename(fullpath, "%s%s" % (fullpath, ext))
fullpath = "%s%s" % (fullpath, ext)
return fullpath, mime
def _download_gif(self, tg_msg, file_id, msg_type):
"""
Download and convert GIF image.
Args:
tg_msg: Telegram message instance
file_id: File ID
msg_type: Type of message
Returns:
tuple of str[2]: Full path of the file, MIME type
"""
fullpath, mime = self._download_file(tg_msg, file_id, msg_type)
VideoFileClip(fullpath).write_gif(fullpath + ".gif", program="ffmpeg")
return fullpath + ".gif", "image/gif"
def start(self, bot, update, args=[]):
"""
Process bot command `/start`.
Args:
bot: Telegram Bot instance
update: Message update
args: Arguments from message
"""
if update.message.from_user.id != update.message.chat.id: # from group
try:
data = self.msg_storage[self.b64de(args[0])]
except KeyError:
update.message.reply_text("Session expired or unknown parameter. (SE02)")
chat_uid = data["chat_uid"]
chat_display_name = data["chat_display_name"]
slave_channel, slave_chat_uid = chat_uid.split('.', 1)
if slave_channel in self.slaves:
db.add_chat_assoc(master_uid="%s.%s" % (self.channel_id, update.message.chat.id),
slave_uid=chat_uid,
multiple_slave=self._flag("multiple_slave_chats", False))
txt = "Chat '%s' is now linked." % chat_display_name
unlink_btn = telegram.InlineKeyboardMarkup(
[[telegram.InlineKeyboardButton("Unlink", callback_data="unlink 0")]])
new_msg = bot.send_message(update.message.chat.id, text=txt, reply_markup=unlink_btn)
self.msg_status[args[0]] = \
self.msg_status["%s.%s" % (update.message.chat.id, new_msg.message_id)] = \
Flags.EXEC_LINK
self.msg_storage[args[0]] = \
self.msg_storage["%s.%s" % (update.message.chat.id, new_msg.message_id)] = \
{"chats": data['chats']}
bot.editMessageText(chat_id=data["tg_chat_id"],
message_id=data["tg_msg_id"],
text=txt,
reply_markup=unlink_btn)
self.msg_status.pop(args[0], False)
else:
txt = "Welcome to EH Forwarder Bot: EFB Telegram Master Channel.\n\n" \
"To learn more, please visit https://github.com/blueset/ehForwarderBot ."
bot.send_message(update.message.from_user.id, txt)
def help(self, bot, update):
txt = "EFB Telegram Master Channel\n" \
"/link\n" \
" Link a remote chat to an empty Telegram group.\n" \
" Followed by a regular expression to filter results.\n" \
"/chat\n" \
" Generate a chat head to start a conversation.\n" \
" Followed by a regular expression to filter results.\n" \
"/extra\n" \
" List all extra function from slave channels.\n" \
"/unlink_all\n" \
" Unlink all remote chats in this chat.\n" \
"/recog\n" \
" Reply to a voice message to convert it to text.\n" \
" Followed by a language code to choose a specific lanugage.\n" \
" You have to enable speech to text in the config file first.\n" \
"/help\n" \
" Print this command list."
bot.send_message(update.message.from_user.id, txt)
def recognize_speech(self, bot, update, args=[]):
"""
Recognise voice message. Triggered by `/recog`.
Args:
bot: Telegram Bot instance
update: Message update
args: Arguments from message
"""
class speechNotImplemented:
lang_list = []
def __init__(self, *args, **kwargs):
pass
def recognize(self, *args, **kwargs):
return ["Not enabled or error in configuration."]
if not getattr(update.message, "reply_to_message", None):
txt = "/recog [lang_code]\nReply to a voice with this command to recognize it.\n" \
"mples:\n/recog\n/recog zh\n/recog en\n(RS01)"
return self._reply_error(bot, update, txt)
if not getattr(update.message.reply_to_message, "voice"):
return self._reply_error(bot, update,
"Reply only to a voice with this command to recognize it. (RS02)")
try:
baidu_speech = speech.BaiduSpeech(getattr(config, self.channel_id)['baidu_speech_api'])
except:
baidu_speech = speechNotImplemented()
try:
bing_speech = speech.BingSpeech(getattr(config, self.channel_id)['bing_speech_api'])
except:
bing_speech = speechNotImplemented()
if len(args) > 0 and (args[0][:2] not in ['zh', 'en', 'ja'] and args[0] not in bing_speech.lang_list):
return self._reply_error(bot, update, "Language is not supported. Try with zh, ja or en. (RS03)")
if update.message.reply_to_message.voice.duration > 60:
return self._reply_error(bot, update, "Only voice shorter than 60s is supported. (RS04)")
path, mime = self._download_file(update.message, update.message.reply_to_message.voice, MsgType.Audio)
results = {}
if len(args) == 0:
results['Baidu (English)'] = baidu_speech.recognize(path, "en")
results['Baidu (Mandarin)'] = baidu_speech.recognize(path, "zh")
results['Bing (English)'] = bing_speech.recognize(path, "en-US")
results['Bing (Mandarin)'] = bing_speech.recognize(path, "zh-CN")
results['Bing (Japanese)'] = bing_speech.recognize(path, "ja-JP")
elif args[0][:2] == 'zh':
results['Baidu (Mandarin)'] = baidu_speech.recognize(path, "zh")
if args[0] in bing_speech.lang_list:
results['Bing (%s)' % args[0]] = bing_speech.recognize(path, args[0])
else:
results['Bing (Mandarin)'] = bing_speech.recognize(path, "zh-CN")
elif args[0][:2] == 'en':
results['Baidu (English)'] = baidu_speech.recognize(path, "en")
if args[0] in bing_speech.lang_list:
results['Bing (%s)' % args[0]] = bing_speech.recognize(path, args[0])
else:
results['Bing (English)'] = bing_speech.recognize(path, "en-US")
elif args[0][:2] == 'ja':
results['Bing (Japanese)'] = bing_speech.recognize(path, "ja-JP")
elif args[0][:2] == 'ct':
results['Baidu (Cantonese)'] = baidu_speech.recognize(path, "ct")
elif args[0] in bing_speech.lang_list:
results['Bing (%s)' % args[0]] = bing_speech.recognize(path, args[0])
msg = ""
for i in results:
msg += "\n*%s*:\n" % i
for j in results[i]:
msg += "%s\n" % j
msg = "Results:\n%s" % msg
bot.send_message(update.message.reply_to_message.chat.id, msg,
reply_to_message_id=update.message.reply_to_message.message_id,
parse_mode=telegram.ParseMode.MARKDOWN)
os.remove(path)
def poll(self):
"""
Message polling process.
"""
self.polling_from_tg()
while True:
try:
m = self.queue.get()
if m is None:
break
self.logger.info("Got message from queue\nType: %s\nText: %s\n----" % (m.type, m.text))
threading.Thread(target=self.process_msg, args=(m,)).start()
self.queue.task_done()
self.logger.info("Msg sent to TG, task_done marked.")
except Exception as e:
self.logger.error("Error occurred during message polling")
self.logger.error(repr(e))
self.bot.stop()
self.poll()
self.logger.debug("Gracefully stopping %s (%s).", self.channel_name, self.channel_id)
self.bot.stop()
self.logger.debug("%s (%s) gracefully stopped.", self.channel_name, self.channel_id)
def polling_from_tg(self):
"""
Poll message from Telegram Bot API. Can be used to extend
"""
self.bot.start_polling(timeout=10)
def error(self, bot, update, error):
"""
Print error to console, and send error message to first admin.
Triggered by python-telegram-bot error callback.
"""
if "Conflict: terminated by other long poll or webhook (409)" in str(error):
msg = 'Please immediately turn off all EFB instances.\nAnother bot instance or web-hook detected.'
self.logger.error(msg)
bot.send_message(getattr(config, self.channel_id)['admins'][0], msg)
else:
try:
bot.send_message(getattr(config, self.channel_id)['admins'][0],
"EFB Telegram Master channel encountered error <code>%s</code> "
"caused by update <code>%s</code>.\n\n"
"Report issue: <a href=\"https://github.com/blueset/ehForwarderBot/issues/new\">GitHub Issue Page</a>" %
(html.escape(str(error)), html.escape(str(update))), parse_mode="HTML")
except:
bot.send_message(getattr(config, self.channel_id)['admins'][0],
"EFB Telegram Master channel encountered error\n%s\n"
"caused by update\n%s\n\n"
"Report issue: https://github.com/blueset/ehForwarderBot/issues/new" %
(html.escape(str(error)), html.escape(str(update))))
self.logger.error('ERROR! Update %s caused error %s' % (update, error))
def _flag(self, key, value):
"""
Retrieve value for experimental flags.
Args:
key: Key of the flag.
value: Default/fallback value.
Returns:
Value for the flag.
"""
return getattr(config, self.channel_id).get('flags', dict()).get(key, value)
@property
def stop_polling(self):
return self._stop_polling
@stop_polling.setter
def stop_polling(self, val):
if val:
self.queue.put(None)
self._stop_polling = val
@staticmethod
def b64en(s):
return base64.b64encode(s.encode(), b"-_").decode().rstrip("=")
@staticmethod
def b64de(s):
return base64.b64decode((s + '=' * (- len(s) % 4)).encode(), b"-_").decode()
| gpl-3.0 |
piyush0609/scipy | scipy/io/arff/arffread.py | 25 | 21335 | #! /usr/bin/env python
# Last Change: Mon Aug 20 08:00 PM 2007 J
from __future__ import division, print_function, absolute_import
import re
import itertools
import datetime
from functools import partial
import numpy as np
from scipy._lib.six import next
"""A module to read arff files."""
__all__ = ['MetaData', 'loadarff', 'ArffError', 'ParseArffError']
# An Arff file is basically two parts:
# - header
# - data
#
# A header has each of its components starting by @META where META is one of
# the keyword (attribute of relation, for now).
# TODO:
# - both integer and reals are treated as numeric -> the integer info is lost !
# - Replace ValueError by ParseError or something
# We know can handle the following:
# - numeric and nominal attributes
# - missing values for numeric attributes
r_meta = re.compile('^\s*@')
# Match a comment
r_comment = re.compile(r'^%')
# Match an empty line
r_empty = re.compile(r'^\s+$')
# Match a header line, that is a line which starts by @ + a word
r_headerline = re.compile(r'^@\S*')
r_datameta = re.compile(r'^@[Dd][Aa][Tt][Aa]')
r_relation = re.compile(r'^@[Rr][Ee][Ll][Aa][Tt][Ii][Oo][Nn]\s*(\S*)')
r_attribute = re.compile(r'^@[Aa][Tt][Tt][Rr][Ii][Bb][Uu][Tt][Ee]\s*(..*$)')
# To get attributes name enclosed with ''
r_comattrval = re.compile(r"'(..+)'\s+(..+$)")
# To get attributes name enclosed with '', possibly spread across multilines
r_mcomattrval = re.compile(r"'([..\n]+)'\s+(..+$)")
# To get normal attributes
r_wcomattrval = re.compile(r"(\S+)\s+(..+$)")
#-------------------------
# Module defined exception
#-------------------------
class ArffError(IOError):
pass
class ParseArffError(ArffError):
pass
#------------------
# Various utilities
#------------------
# An attribute is defined as @attribute name value
def parse_type(attrtype):
"""Given an arff attribute value (meta data), returns its type.
Expect the value to be a name."""
uattribute = attrtype.lower().strip()
if uattribute[0] == '{':
return 'nominal'
elif uattribute[:len('real')] == 'real':
return 'numeric'
elif uattribute[:len('integer')] == 'integer':
return 'numeric'
elif uattribute[:len('numeric')] == 'numeric':
return 'numeric'
elif uattribute[:len('string')] == 'string':
return 'string'
elif uattribute[:len('relational')] == 'relational':
return 'relational'
elif uattribute[:len('date')] == 'date':
return 'date'
else:
raise ParseArffError("unknown attribute %s" % uattribute)
def get_nominal(attribute):
"""If attribute is nominal, returns a list of the values"""
return attribute.split(',')
def read_data_list(ofile):
"""Read each line of the iterable and put it in a list."""
data = [next(ofile)]
if data[0].strip()[0] == '{':
raise ValueError("This looks like a sparse ARFF: not supported yet")
data.extend([i for i in ofile])
return data
def get_ndata(ofile):
"""Read the whole file to get number of data attributes."""
data = [next(ofile)]
loc = 1
if data[0].strip()[0] == '{':
raise ValueError("This looks like a sparse ARFF: not supported yet")
for i in ofile:
loc += 1
return loc
def maxnomlen(atrv):
"""Given a string containing a nominal type definition, returns the
string len of the biggest component.
A nominal type is defined as seomthing framed between brace ({}).
Parameters
----------
atrv : str
Nominal type definition
Returns
-------
slen : int
length of longest component
Examples
--------
maxnomlen("{floup, bouga, fl, ratata}") returns 6 (the size of
ratata, the longest nominal value).
>>> maxnomlen("{floup, bouga, fl, ratata}")
6
"""
nomtp = get_nom_val(atrv)
return max(len(i) for i in nomtp)
def get_nom_val(atrv):
"""Given a string containing a nominal type, returns a tuple of the
possible values.
A nominal type is defined as something framed between braces ({}).
Parameters
----------
atrv : str
Nominal type definition
Returns
-------
poss_vals : tuple
possible values
Examples
--------
>>> get_nom_val("{floup, bouga, fl, ratata}")
('floup', 'bouga', 'fl', 'ratata')
"""
r_nominal = re.compile('{(.+)}')
m = r_nominal.match(atrv)
if m:
return tuple(i.strip() for i in m.group(1).split(','))
else:
raise ValueError("This does not look like a nominal string")
def get_date_format(atrv):
r_date = re.compile(r"[Dd][Aa][Tt][Ee]\s+[\"']?(.+?)[\"']?$")
m = r_date.match(atrv)
if m:
pattern = m.group(1).strip()
# convert time pattern from Java's SimpleDateFormat to C's format
datetime_unit = None
if "yyyy" in pattern:
pattern = pattern.replace("yyyy", "%Y")
datetime_unit = "Y"
elif "yy":
pattern = pattern.replace("yy", "%y")
datetime_unit = "Y"
if "MM" in pattern:
pattern = pattern.replace("MM", "%m")
datetime_unit = "M"
if "dd" in pattern:
pattern = pattern.replace("dd", "%d")
datetime_unit = "D"
if "HH" in pattern:
pattern = pattern.replace("HH", "%H")
datetime_unit = "h"
if "mm" in pattern:
pattern = pattern.replace("mm", "%M")
datetime_unit = "m"
if "ss" in pattern:
pattern = pattern.replace("ss", "%S")
datetime_unit = "s"
if "z" in pattern or "Z" in pattern:
raise ValueError("Date type attributes with time zone not supported, yet")
if datetime_unit is None:
raise ValueError("Invalid or unsupported date format")
return pattern, datetime_unit
else:
raise ValueError("Invalid or no date format")
def go_data(ofile):
"""Skip header.
the first next() call of the returned iterator will be the @data line"""
return itertools.dropwhile(lambda x: not r_datameta.match(x), ofile)
#----------------
# Parsing header
#----------------
def tokenize_attribute(iterable, attribute):
"""Parse a raw string in header (eg starts by @attribute).
Given a raw string attribute, try to get the name and type of the
attribute. Constraints:
* The first line must start with @attribute (case insensitive, and
space like characters before @attribute are allowed)
* Works also if the attribute is spread on multilines.
* Works if empty lines or comments are in between
Parameters
----------
attribute : str
the attribute string.
Returns
-------
name : str
name of the attribute
value : str
value of the attribute
next : str
next line to be parsed
Examples
--------
If attribute is a string defined in python as r"floupi real", will
return floupi as name, and real as value.
>>> iterable = iter([0] * 10) # dummy iterator
>>> tokenize_attribute(iterable, r"@attribute floupi real")
('floupi', 'real', 0)
If attribute is r"'floupi 2' real", will return 'floupi 2' as name,
and real as value.
>>> tokenize_attribute(iterable, r" @attribute 'floupi 2' real ")
('floupi 2', 'real', 0)
"""
sattr = attribute.strip()
mattr = r_attribute.match(sattr)
if mattr:
# atrv is everything after @attribute
atrv = mattr.group(1)
if r_comattrval.match(atrv):
name, type = tokenize_single_comma(atrv)
next_item = next(iterable)
elif r_wcomattrval.match(atrv):
name, type = tokenize_single_wcomma(atrv)
next_item = next(iterable)
else:
# Not sure we should support this, as it does not seem supported by
# weka.
raise ValueError("multi line not supported yet")
#name, type, next_item = tokenize_multilines(iterable, atrv)
else:
raise ValueError("First line unparsable: %s" % sattr)
if type == 'relational':
raise ValueError("relational attributes not supported yet")
return name, type, next_item
def tokenize_single_comma(val):
# XXX we match twice the same string (here and at the caller level). It is
# stupid, but it is easier for now...
m = r_comattrval.match(val)
if m:
try:
name = m.group(1).strip()
type = m.group(2).strip()
except IndexError:
raise ValueError("Error while tokenizing attribute")
else:
raise ValueError("Error while tokenizing single %s" % val)
return name, type
def tokenize_single_wcomma(val):
# XXX we match twice the same string (here and at the caller level). It is
# stupid, but it is easier for now...
m = r_wcomattrval.match(val)
if m:
try:
name = m.group(1).strip()
type = m.group(2).strip()
except IndexError:
raise ValueError("Error while tokenizing attribute")
else:
raise ValueError("Error while tokenizing single %s" % val)
return name, type
def read_header(ofile):
"""Read the header of the iterable ofile."""
i = next(ofile)
# Pass first comments
while r_comment.match(i):
i = next(ofile)
# Header is everything up to DATA attribute ?
relation = None
attributes = []
while not r_datameta.match(i):
m = r_headerline.match(i)
if m:
isattr = r_attribute.match(i)
if isattr:
name, type, i = tokenize_attribute(ofile, i)
attributes.append((name, type))
else:
isrel = r_relation.match(i)
if isrel:
relation = isrel.group(1)
else:
raise ValueError("Error parsing line %s" % i)
i = next(ofile)
else:
i = next(ofile)
return relation, attributes
#--------------------
# Parsing actual data
#--------------------
def safe_float(x):
"""given a string x, convert it to a float. If the stripped string is a ?,
return a Nan (missing value).
Parameters
----------
x : str
string to convert
Returns
-------
f : float
where float can be nan
Examples
--------
>>> safe_float('1')
1.0
>>> safe_float('1\\n')
1.0
>>> safe_float('?\\n')
nan
"""
if '?' in x:
return np.nan
else:
return float(x)
def safe_nominal(value, pvalue):
svalue = value.strip()
if svalue in pvalue:
return svalue
elif svalue == '?':
return svalue
else:
raise ValueError("%s value not in %s" % (str(svalue), str(pvalue)))
def safe_date(value, date_format, datetime_unit):
date_str = value.strip().strip("'").strip('"')
if date_str == '?':
return np.datetime64('NaT', datetime_unit)
else:
dt = datetime.datetime.strptime(date_str, date_format)
return np.datetime64(dt).astype("datetime64[%s]" % datetime_unit)
def get_delim(line):
"""Given a string representing a line of data, check whether the
delimiter is ',' or space.
Parameters
----------
line : str
line of data
Returns
-------
delim : {',', ' '}
Examples
--------
>>> get_delim(',')
','
>>> get_delim(' ')
' '
>>> get_delim(', ')
','
>>> get_delim('x')
Traceback (most recent call last):
...
ValueError: delimiter not understood: x
"""
if ',' in line:
return ','
if ' ' in line:
return ' '
raise ValueError("delimiter not understood: " + line)
class MetaData(object):
"""Small container to keep useful informations on a ARFF dataset.
Knows about attributes names and types.
Examples
--------
::
data, meta = loadarff('iris.arff')
# This will print the attributes names of the iris.arff dataset
for i in meta:
print i
# This works too
meta.names()
# Getting attribute type
types = meta.types()
Notes
-----
Also maintains the list of attributes in order, i.e. doing for i in
meta, where meta is an instance of MetaData, will return the
different attribute names in the order they were defined.
"""
def __init__(self, rel, attr):
self.name = rel
# We need the dictionary to be ordered
# XXX: may be better to implement an ordered dictionary
self._attributes = {}
self._attrnames = []
for name, value in attr:
tp = parse_type(value)
self._attrnames.append(name)
if tp == 'nominal':
self._attributes[name] = (tp, get_nom_val(value))
elif tp == 'date':
self._attributes[name] = (tp, get_date_format(value)[0])
else:
self._attributes[name] = (tp, None)
def __repr__(self):
msg = ""
msg += "Dataset: %s\n" % self.name
for i in self._attrnames:
msg += "\t%s's type is %s" % (i, self._attributes[i][0])
if self._attributes[i][1]:
msg += ", range is %s" % str(self._attributes[i][1])
msg += '\n'
return msg
def __iter__(self):
return iter(self._attrnames)
def __getitem__(self, key):
return self._attributes[key]
def names(self):
"""Return the list of attribute names."""
return self._attrnames
def types(self):
"""Return the list of attribute types."""
attr_types = [self._attributes[name][0] for name in self._attrnames]
return attr_types
def loadarff(f):
"""
Read an arff file.
The data is returned as a record array, which can be accessed much like
a dictionary of numpy arrays. For example, if one of the attributes is
called 'pressure', then its first 10 data points can be accessed from the
``data`` record array like so: ``data['pressure'][0:10]``
Parameters
----------
f : file-like or str
File-like object to read from, or filename to open.
Returns
-------
data : record array
The data of the arff file, accessible by attribute names.
meta : `MetaData`
Contains information about the arff file such as name and
type of attributes, the relation (name of the dataset), etc...
Raises
------
ParseArffError
This is raised if the given file is not ARFF-formatted.
NotImplementedError
The ARFF file has an attribute which is not supported yet.
Notes
-----
This function should be able to read most arff files. Not
implemented functionality include:
* date type attributes
* string type attributes
It can read files with numeric and nominal attributes. It cannot read
files with sparse data ({} in the file). However, this function can
read files with missing data (? in the file), representing the data
points as NaNs.
Examples
--------
>>> from scipy.io import arff
>>> from cStringIO import StringIO
>>> content = \"\"\"
... @relation foo
... @attribute width numeric
... @attribute height numeric
... @attribute color {red,green,blue,yellow,black}
... @data
... 5.0,3.25,blue
... 4.5,3.75,green
... 3.0,4.00,red
... \"\"\"
>>> f = StringIO(content)
>>> data, meta = arff.loadarff(f)
>>> data
array([(5.0, 3.25, 'blue'), (4.5, 3.75, 'green'), (3.0, 4.0, 'red')],
dtype=[('width', '<f8'), ('height', '<f8'), ('color', '|S6')])
>>> meta
Dataset: foo
\twidth's type is numeric
\theight's type is numeric
\tcolor's type is nominal, range is ('red', 'green', 'blue', 'yellow', 'black')
"""
if hasattr(f, 'read'):
ofile = f
else:
ofile = open(f, 'rt')
try:
return _loadarff(ofile)
finally:
if ofile is not f: # only close what we opened
ofile.close()
def _loadarff(ofile):
# Parse the header file
try:
rel, attr = read_header(ofile)
except ValueError as e:
msg = "Error while parsing header, error was: " + str(e)
raise ParseArffError(msg)
# Check whether we have a string attribute (not supported yet)
hasstr = False
for name, value in attr:
type = parse_type(value)
if type == 'string':
hasstr = True
meta = MetaData(rel, attr)
# XXX The following code is not great
# Build the type descriptor descr and the list of convertors to convert
# each attribute to the suitable type (which should match the one in
# descr).
# This can be used once we want to support integer as integer values and
# not as numeric anymore (using masked arrays ?).
acls2dtype = {'real': float, 'integer': float, 'numeric': float}
acls2conv = {'real': safe_float, 'integer': safe_float, 'numeric': safe_float}
descr = []
convertors = []
if not hasstr:
for name, value in attr:
type = parse_type(value)
if type == 'date':
date_format, datetime_unit = get_date_format(value)
descr.append((name, "datetime64[%s]" % datetime_unit))
convertors.append(partial(safe_date, date_format=date_format, datetime_unit=datetime_unit))
elif type == 'nominal':
n = maxnomlen(value)
descr.append((name, 'S%d' % n))
pvalue = get_nom_val(value)
convertors.append(partial(safe_nominal, pvalue=pvalue))
else:
descr.append((name, acls2dtype[type]))
convertors.append(safe_float)
#dc.append(acls2conv[type])
#sdescr.append((name, acls2sdtype[type]))
else:
# How to support string efficiently ? Ideally, we should know the max
# size of the string before allocating the numpy array.
raise NotImplementedError("String attributes not supported yet, sorry")
ni = len(convertors)
# Get the delimiter from the first line of data:
def next_data_line(row_iter):
"""Assumes we are already in the data part (eg after @data)."""
raw = next(row_iter)
while r_empty.match(raw) or r_comment.match(raw):
raw = next(row_iter)
return raw
try:
try:
dtline = next_data_line(ofile)
delim = get_delim(dtline)
except ValueError as e:
raise ParseArffError("Error while parsing delimiter: " + str(e))
finally:
ofile.seek(0, 0)
ofile = go_data(ofile)
# skip the @data line
next(ofile)
def generator(row_iter, delim=','):
# TODO: this is where we are spending times (~80%). I think things
# could be made more efficiently:
# - We could for example "compile" the function, because some values
# do not change here.
# - The function to convert a line to dtyped values could also be
# generated on the fly from a string and be executed instead of
# looping.
# - The regex are overkill: for comments, checking that a line starts
# by % should be enough and faster, and for empty lines, same thing
# --> this does not seem to change anything.
# We do not abstract skipping comments and empty lines for performances
# reason.
raw = next(row_iter)
while r_empty.match(raw) or r_comment.match(raw):
raw = next(row_iter)
# 'compiling' the range since it does not change
# Note, I have already tried zipping the converters and
# row elements and got slightly worse performance.
elems = list(range(ni))
row = raw.split(delim)
yield tuple([convertors[i](row[i]) for i in elems])
for raw in row_iter:
while r_comment.match(raw) or r_empty.match(raw):
raw = next(row_iter)
row = raw.split(delim)
yield tuple([convertors[i](row[i]) for i in elems])
a = generator(ofile, delim=delim)
# No error should happen here: it is a bug otherwise
data = np.fromiter(a, descr)
return data, meta
#-----
# Misc
#-----
def basic_stats(data):
nbfac = data.size * 1. / (data.size - 1)
return np.nanmin(data), np.nanmax(data), np.mean(data), np.std(data) * nbfac
def print_attribute(name, tp, data):
type = tp[0]
if type == 'numeric' or type == 'real' or type == 'integer':
min, max, mean, std = basic_stats(data)
print("%s,%s,%f,%f,%f,%f" % (name, type, min, max, mean, std))
else:
msg = name + ",{"
for i in range(len(tp[1])-1):
msg += tp[1][i] + ","
msg += tp[1][-1]
msg += "}"
print(msg)
def test_weka(filename):
data, meta = loadarff(filename)
print(len(data.dtype))
print(data.size)
for i in meta:
print_attribute(i,meta[i],data[i])
# make sure nose does not find this as a test
test_weka.__test__ = False
if __name__ == '__main__':
import sys
filename = sys.argv[1]
test_weka(filename)
| bsd-3-clause |
xyuanmu/XX-Net | python3.8.2/Lib/site-packages/pip/_vendor/packaging/_structures.py | 62 | 1416 | # This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
class Infinity(object):
def __repr__(self):
return "Infinity"
def __hash__(self):
return hash(repr(self))
def __lt__(self, other):
return False
def __le__(self, other):
return False
def __eq__(self, other):
return isinstance(other, self.__class__)
def __ne__(self, other):
return not isinstance(other, self.__class__)
def __gt__(self, other):
return True
def __ge__(self, other):
return True
def __neg__(self):
return NegativeInfinity
Infinity = Infinity()
class NegativeInfinity(object):
def __repr__(self):
return "-Infinity"
def __hash__(self):
return hash(repr(self))
def __lt__(self, other):
return True
def __le__(self, other):
return True
def __eq__(self, other):
return isinstance(other, self.__class__)
def __ne__(self, other):
return not isinstance(other, self.__class__)
def __gt__(self, other):
return False
def __ge__(self, other):
return False
def __neg__(self):
return Infinity
NegativeInfinity = NegativeInfinity()
| bsd-2-clause |
pombredanne/PeachPy | peachpy/encoder.py | 6 | 7810 | # This file is part of Peach-Py package and is licensed under the Simplified BSD license.
# See license.rst for the full text of the license.
from peachpy.abi import Endianness
class Encoder:
def __init__(self, endianness, bitness=None):
assert endianness in {Endianness.Little, Endianness.Big}
if endianness == Endianness.Little:
self.int16 = Encoder.int16le
self.uint16 = Encoder.uint16le
self.int32 = Encoder.int32le
self.uint32 = Encoder.uint32le
self.int64 = Encoder.int64le
self.uint64 = Encoder.uint64le
else:
self.int16 = Encoder.int16be
self.uint16 = Encoder.uint16be
self.int32 = Encoder.int32be
self.uint32 = Encoder.uint32be
self.int64 = Encoder.int64be
self.uint64 = Encoder.uint64be
self.bitness = bitness
if bitness is not None:
assert bitness in {32, 64}, "Only 32-bit and 64-bit encoders are supported"
if bitness == 32:
self.signed_offset = self.int32
self.unsigned_offset = self.uint32
else:
self.signed_offset = self.int64
self.unsigned_offset = self.uint64
@staticmethod
def int8(n):
"""Converts signed 8-bit integer to bytearray representation"""
assert -128 <= n <= 127, "%u can not be represented as an 8-bit signed integer" % n
return bytearray([n & 0xFF])
@staticmethod
def uint8(n):
"""Converts unsigned 8-bit integer to bytearray representation"""
assert 0 <= n <= 255, "%u can not be represented as an 8-bit unsigned integer" % n
return bytearray([n])
@staticmethod
def int16le(n):
"""Converts signed 16-bit integer to little-endian bytearray representation"""
assert -32768 <= n <= 32767, "%u can not be represented as a 16-bit signed integer" % n
return bytearray([n & 0xFF, (n >> 8) & 0xFF])
@staticmethod
def int16be(n):
"""Converts signed 16-bit integer to big-endian bytearray representation"""
assert -32768 <= n <= 32767, "%u can not be represented as a 16-bit signed integer" % n
return bytearray([n >> 8, (n & 0xFF) & 0xFF])
@staticmethod
def uint16le(n):
"""Converts unsigned 16-bit integer to little-endian bytearray representation"""
assert 0 <= n <= 65535, "%u can not be represented as a 16-bit unsigned integer" % n
return bytearray([n & 0xFF, n >> 8])
@staticmethod
def uint16be(n):
"""Converts unsigned 16-bit integer to big-endian bytearray representation"""
assert 0 <= n <= 65535, "%u can not be represented as a 16-bit unsigned integer" % n
return bytearray([n >> 8, n & 0xFF])
@staticmethod
def int32le(n):
"""Converts signed 32-bit integer to little-endian bytearray representation"""
assert -2147483648 <= n <= 2147483647, "%u can not be represented as a 32-bit signed integer" % n
return bytearray([n & 0xFF, (n >> 8) & 0xFF, (n >> 16) & 0xFF, (n >> 24) & 0xFF])
@staticmethod
def int32be(n):
"""Converts signed 32-bit integer to big-endian bytearray representation"""
assert -2147483648 <= n <= 2147483647, "%u can not be represented as a 32-bit signed integer" % n
return bytearray([(n >> 24) & 0xFF, (n >> 16) & 0xFF, (n >> 8) & 0xFF, n & 0xFF])
@staticmethod
def uint32le(n):
"""Converts unsigned 32-bit integer to little-endian bytearray representation"""
assert 0 <= n <= 4294967295, "%u can not be represented as a 32-bit unsigned integer" % n
return bytearray([n & 0xFF, (n >> 8) & 0xFF, (n >> 16) & 0xFF, n >> 24])
@staticmethod
def uint32be(n):
"""Converts unsigned 32-bit integer to big-endian bytearray representation"""
assert 0 <= n <= 4294967295, "%u can not be represented as a 32-bit unsigned integer" % n
return bytearray([n >> 24, (n >> 16) & 0xFF, (n >> 8) & 0xFF, n & 0xFF])
@staticmethod
def int64le(n):
"""Converts signed 64-bit integer to little-endian bytearray representation"""
assert -9223372036854775808 <= n <= 9223372036854775807, \
"%u can not be represented as a 64-bit signed integer" % n
return bytearray([n & 0xFF, (n >> 8) & 0xFF, (n >> 16) & 0xFF, (n >> 24) & 0xFF,
(n >> 32) & 0xFF, (n >> 40) & 0xFF, (n >> 48) & 0xFF, (n >> 56) & 0xFF])
@staticmethod
def int64be(n):
"""Converts signed 64-bit integer to big-endian bytearray representation"""
assert -9223372036854775808 <= n <= 9223372036854775807, \
"%u can not be represented as a 64-bit signed integer" % n
return bytearray([(n >> 56) & 0xFF, (n >> 48) & 0xFF, (n >> 40) & 0xFF, (n >> 32) & 0xFF,
(n >> 24) & 0xFF, (n >> 16) & 0xFF, (n >> 8) & 0xFF, n & 0xFF])
@staticmethod
def uint64le(n):
"""Converts unsigned 64-bit integer to little-endian bytearray representation"""
assert 0 <= n <= 18446744073709551615, "%u can not be represented as a 64-bit unsigned integer" % n
return bytearray([n & 0xFF, (n >> 8) & 0xFF, (n >> 16) & 0xFF, (n >> 24) & 0xFF,
(n >> 32) & 0xFF, (n >> 40) & 0xFF, (n >> 48) & 0xFF, (n >> 56) & 0xFF])
@staticmethod
def uint64be(n):
"""Converts unsigned 64-bit integer to big-endian bytearray representation"""
assert 0 <= n <= 18446744073709551615, "%u can not be represented as a 64-bit unsigned integer" % n
return bytearray([(n >> 56) & 0xFF, (n >> 48) & 0xFF, (n >> 40) & 0xFF, (n >> 32) & 0xFF,
(n >> 24) & 0xFF, (n >> 16) & 0xFF, (n >> 8) & 0xFF, n & 0xFF])
def int16(self, n):
"""Converts signed 16-bit integer to bytearray representation according to encoder endianness"""
pass
def uint16(self, n):
"""Converts unsigned 16-bit integer to bytearray representation according to encoder endianness"""
pass
def int32(self, n):
"""Converts signed 32-bit integer to bytearray representation according to encoder endianness"""
pass
def uint32(self, n):
"""Converts unsigned 32-bit integer to bytearray representation according to encoder endianness"""
pass
def int64(self, n):
"""Converts signed 64-bit integer to bytearray representation according to encoder endianness"""
pass
def uint64(self, n):
"""Converts unsigned 64-bit integer to bytearray representation according to encoder endianness"""
pass
@staticmethod
def fixed_string(string, size):
"""Converts string to fixed-length bytearray representation"""
assert isinstance(size, (int, long)) and size > 0, "size %u is not a positive integer" % size
if string is None:
return bytearray(size)
import codecs
byte_string = codecs.encode(string, "utf8")
if len(byte_string) > size:
raise ValueError("The length of %s exceeds the target %d" % (string, size))
elif len(byte_string) == size:
return byte_string
else:
return byte_string + bytearray(size - len(byte_string))
def signed_offset(self, n):
"""Converts signed integer offset to bytearray representation according to encoder bitness and endianness"""
raise ValueError("Can not encode signed offset: encoder bitness not specified")
def unsigned_offset(self, n):
"""Converts unsigned integer offset to bytearray representation according to encoder bitness and endianness"""
raise ValueError("Can not encode unsigned offset: encoder bitness not specified")
| bsd-2-clause |
ganzenmg/lammps_current | tools/eff/lmp2data.py | 54 | 4104 | #!/usr/local/bin/python-2.5/bin/python
Info="""
Module name: lmp2data.py
Author: (c) Andres Jaramillo-Botero
California Institute of Technology
[email protected]
Project: pEFF
Version: August 2009
Extracts the electron radii from a lammps trajectory dump of style custom:
dump 1 all custom period dump_file id type x y z spin radius ...
NOTE: The radius must be the i'th column per trajectory entry in the dump file
"""
# import essentials:
import sys, os
from math import log10
from shutil import rmtree
from getopt import gnu_getopt as getopt
import numpy
def printHelp():
print Info
print "Usage: python lmp2data.py test.lammpstrj\n"
return
def makeradii(infile,outfile,column,flag_all):
print "Reading %s ... [WAIT]"%infile,
fin = open(infile,'r')
lines = fin.xreadlines()
print 7*"\b"+"[DONE]"
frame=0
radii=[]
# grep the number of frames and atoms/frame
os.system("grep TIMESTEP %s | wc -l > frames; grep -m 1 -A 1 ATOMS %s > atoms; grep -m 1 \"ITEM: ATOMS\" %s > params"%(infile,infile,infile))
tmp=open("frames",'r')
frames=int(tmp.readline().split()[0])
tmp.close()
tmp=open("atoms",'r')
atoms=int(tmp.readlines()[1].split()[0])
tmp.close()
tmp=open("params",'r')
ids=tmp.readline().split()[2:]
os.system("rm -rf frames atoms params")
arry=numpy.zeros((atoms,frames),dtype=str)
framecnt=0
header=9
ecount=0
if flag_all==True: atom_type="nuclei and electron"
else: atom_type="electron"
print "Extracting %s %s per frame from %s ... "%(atom_type,ids[column],infile),
for i,line in enumerate(lines):
lo=(atoms+header)*framecnt+header
hi=lo+atoms
if (i<lo):
continue
elif (i >= lo) and (i < hi):
lparse=line.split()
id=int(lparse[0])
# r=float(lparse[column-1])
r=lparse[column]
# if (float(r)!=0):
arry[id-1][framecnt]=r
print arry[id-1][framecnt],r,raw_input()
if (float(r)!=0) and (framecnt==0): ecount+=1
# else: arry[id-1][framecnt]=r
if (i==lo+1):
sys.stdout.write("%d/%d%s"%(framecnt+1,frames,(int(log10(framecnt+1))+3+int(log10(frames)))*"\b"))
sys.stdout.flush()
if (i == hi+1):
framecnt+=1
print
if outfile=="":
outfile=infile+'.%s'%(ids[column])
fout=open(outfile,'w')
else: fout=open(outfile,'w')
print "Writing %s/frame table to %s ... "%(ids[column],outfile),
sys.stdout.flush()
for i in range(frames):
fout.writelines('\tF'+str(i))
fout.writelines("\n")
e=1
for a in range(atoms):
if flag_all==True:
sys.stdout.write("%d/%d%s"%(a+1,atoms,(int(log10(a+1))+int(log10(atoms))+3)*"\b"))
sys.stdout.flush()
fout.writelines("%d\t"%(a+1))
for f in range(frames):
fout.writelines("%s\t"%(arry[a][f]))
fout.writelines("\n")
else:
if arry[a][0] == 0.0:
continue
else:
sys.stdout.write("%d/%d%s"%(e,ecount,(int(log10(e))+int(log10(ecount))+3)*"\b"))
sys.stdout.flush()
e+=1
fout.writelines("%d\t"%(a+1))
for f in range(frames):
fout.writelines("%s\t"%(arry[a][f]))
fout.writelines("\n")
print
print "DONE .... GOODBYE !!"
fout.close()
fin.close()
if __name__ == '__main__':
# set defaults
outfile = ""
flag_all = False
column=6 # default = radius
# check for input:
opts, argv = getopt(sys.argv[1:], 'c:o:ha')
# if no input, print help and exit
if len(argv) != 1:
printHelp()
sys.exit(1)
else:
infile=argv[0]
# read options
for opt, arg in opts:
if opt == '-h': # -h: print help
printHelp()
if opt == '-o': # output file name
outfile=arg
if opt == '-a': # all nuclii+electrons
flag_all=True
if opt == '-c': # select column from lammpstrj file to tabulate
column=int(arg)
makeradii(infile,outfile,column,flag_all)
| gpl-2.0 |
dataDogma/Computer-Science | Django-MVP-page/venv/Lib/site-packages/pip/_vendor/colorama/winterm.py | 578 | 6290 | # Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file.
from . import win32
# from wincon.h
class WinColor(object):
BLACK = 0
BLUE = 1
GREEN = 2
CYAN = 3
RED = 4
MAGENTA = 5
YELLOW = 6
GREY = 7
# from wincon.h
class WinStyle(object):
NORMAL = 0x00 # dim text, dim background
BRIGHT = 0x08 # bright text, dim background
BRIGHT_BACKGROUND = 0x80 # dim text, bright background
class WinTerm(object):
def __init__(self):
self._default = win32.GetConsoleScreenBufferInfo(win32.STDOUT).wAttributes
self.set_attrs(self._default)
self._default_fore = self._fore
self._default_back = self._back
self._default_style = self._style
# In order to emulate LIGHT_EX in windows, we borrow the BRIGHT style.
# So that LIGHT_EX colors and BRIGHT style do not clobber each other,
# we track them separately, since LIGHT_EX is overwritten by Fore/Back
# and BRIGHT is overwritten by Style codes.
self._light = 0
def get_attrs(self):
return self._fore + self._back * 16 + (self._style | self._light)
def set_attrs(self, value):
self._fore = value & 7
self._back = (value >> 4) & 7
self._style = value & (WinStyle.BRIGHT | WinStyle.BRIGHT_BACKGROUND)
def reset_all(self, on_stderr=None):
self.set_attrs(self._default)
self.set_console(attrs=self._default)
def fore(self, fore=None, light=False, on_stderr=False):
if fore is None:
fore = self._default_fore
self._fore = fore
# Emulate LIGHT_EX with BRIGHT Style
if light:
self._light |= WinStyle.BRIGHT
else:
self._light &= ~WinStyle.BRIGHT
self.set_console(on_stderr=on_stderr)
def back(self, back=None, light=False, on_stderr=False):
if back is None:
back = self._default_back
self._back = back
# Emulate LIGHT_EX with BRIGHT_BACKGROUND Style
if light:
self._light |= WinStyle.BRIGHT_BACKGROUND
else:
self._light &= ~WinStyle.BRIGHT_BACKGROUND
self.set_console(on_stderr=on_stderr)
def style(self, style=None, on_stderr=False):
if style is None:
style = self._default_style
self._style = style
self.set_console(on_stderr=on_stderr)
def set_console(self, attrs=None, on_stderr=False):
if attrs is None:
attrs = self.get_attrs()
handle = win32.STDOUT
if on_stderr:
handle = win32.STDERR
win32.SetConsoleTextAttribute(handle, attrs)
def get_position(self, handle):
position = win32.GetConsoleScreenBufferInfo(handle).dwCursorPosition
# Because Windows coordinates are 0-based,
# and win32.SetConsoleCursorPosition expects 1-based.
position.X += 1
position.Y += 1
return position
def set_cursor_position(self, position=None, on_stderr=False):
if position is None:
# I'm not currently tracking the position, so there is no default.
# position = self.get_position()
return
handle = win32.STDOUT
if on_stderr:
handle = win32.STDERR
win32.SetConsoleCursorPosition(handle, position)
def cursor_adjust(self, x, y, on_stderr=False):
handle = win32.STDOUT
if on_stderr:
handle = win32.STDERR
position = self.get_position(handle)
adjusted_position = (position.Y + y, position.X + x)
win32.SetConsoleCursorPosition(handle, adjusted_position, adjust=False)
def erase_screen(self, mode=0, on_stderr=False):
# 0 should clear from the cursor to the end of the screen.
# 1 should clear from the cursor to the beginning of the screen.
# 2 should clear the entire screen, and move cursor to (1,1)
handle = win32.STDOUT
if on_stderr:
handle = win32.STDERR
csbi = win32.GetConsoleScreenBufferInfo(handle)
# get the number of character cells in the current buffer
cells_in_screen = csbi.dwSize.X * csbi.dwSize.Y
# get number of character cells before current cursor position
cells_before_cursor = csbi.dwSize.X * csbi.dwCursorPosition.Y + csbi.dwCursorPosition.X
if mode == 0:
from_coord = csbi.dwCursorPosition
cells_to_erase = cells_in_screen - cells_before_cursor
if mode == 1:
from_coord = win32.COORD(0, 0)
cells_to_erase = cells_before_cursor
elif mode == 2:
from_coord = win32.COORD(0, 0)
cells_to_erase = cells_in_screen
# fill the entire screen with blanks
win32.FillConsoleOutputCharacter(handle, ' ', cells_to_erase, from_coord)
# now set the buffer's attributes accordingly
win32.FillConsoleOutputAttribute(handle, self.get_attrs(), cells_to_erase, from_coord)
if mode == 2:
# put the cursor where needed
win32.SetConsoleCursorPosition(handle, (1, 1))
def erase_line(self, mode=0, on_stderr=False):
# 0 should clear from the cursor to the end of the line.
# 1 should clear from the cursor to the beginning of the line.
# 2 should clear the entire line.
handle = win32.STDOUT
if on_stderr:
handle = win32.STDERR
csbi = win32.GetConsoleScreenBufferInfo(handle)
if mode == 0:
from_coord = csbi.dwCursorPosition
cells_to_erase = csbi.dwSize.X - csbi.dwCursorPosition.X
if mode == 1:
from_coord = win32.COORD(0, csbi.dwCursorPosition.Y)
cells_to_erase = csbi.dwCursorPosition.X
elif mode == 2:
from_coord = win32.COORD(0, csbi.dwCursorPosition.Y)
cells_to_erase = csbi.dwSize.X
# fill the entire screen with blanks
win32.FillConsoleOutputCharacter(handle, ' ', cells_to_erase, from_coord)
# now set the buffer's attributes accordingly
win32.FillConsoleOutputAttribute(handle, self.get_attrs(), cells_to_erase, from_coord)
def set_title(self, title):
win32.SetConsoleTitle(title)
| gpl-3.0 |
hamzehd/edx-platform | cms/lib/xblock/test/test_authoring_mixin.py | 105 | 6282 | """
Tests for the Studio authoring XBlock mixin.
"""
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
from xmodule.modulestore.tests.factories import CourseFactory, ItemFactory
from xmodule.partitions.partitions import Group, UserPartition
class AuthoringMixinTestCase(ModuleStoreTestCase):
"""
Tests the studio authoring XBlock mixin.
"""
def setUp(self):
"""
Create a simple course with a video component.
"""
super(AuthoringMixinTestCase, self).setUp()
self.course = CourseFactory.create()
chapter = ItemFactory.create(
category='chapter',
parent_location=self.course.location,
display_name='Test Chapter'
)
sequential = ItemFactory.create(
category='sequential',
parent_location=chapter.location,
display_name='Test Sequential'
)
vertical = ItemFactory.create(
category='vertical',
parent_location=sequential.location,
display_name='Test Vertical'
)
video = ItemFactory.create(
category='video',
parent_location=vertical.location,
display_name='Test Vertical'
)
self.vertical_location = vertical.location
self.video_location = video.location
self.pet_groups = [Group(1, 'Cat Lovers'), Group(2, 'Dog Lovers')]
def create_content_groups(self, content_groups):
"""
Create a cohorted user partition with the specified content groups.
"""
# pylint: disable=attribute-defined-outside-init
self.content_partition = UserPartition(
1,
'Content Groups',
'Contains Groups for Cohorted Courseware',
content_groups,
scheme_id='cohort'
)
self.course.user_partitions = [self.content_partition]
self.store.update_item(self.course, self.user.id)
def create_verification_user_partitions(self, checkpoint_names):
"""
Create user partitions for verification checkpoints.
"""
scheme = UserPartition.get_scheme("verification")
self.course.user_partitions = [
UserPartition(
id=0,
name=checkpoint_name,
description="Verification checkpoint",
scheme=scheme,
groups=[
Group(scheme.ALLOW, "Completed verification at {}".format(checkpoint_name)),
Group(scheme.DENY, "Did not complete verification at {}".format(checkpoint_name)),
],
)
for checkpoint_name in checkpoint_names
]
self.store.update_item(self.course, self.user.id)
def set_staff_only(self, item_location):
"""Make an item visible to staff only."""
item = self.store.get_item(item_location)
item.visible_to_staff_only = True
self.store.update_item(item, self.user.id)
def set_group_access(self, item_location, group_ids):
"""
Set group_access for the specified item to the specified group
ids within the content partition.
"""
item = self.store.get_item(item_location)
item.group_access[self.content_partition.id] = group_ids
self.store.update_item(item, self.user.id)
def verify_visibility_view_contains(self, item_location, substrings):
"""
Verify that an item's visibility view returns an html string
containing all the expected substrings.
"""
item = self.store.get_item(item_location)
html = item.visibility_view().body_html()
for string in substrings:
self.assertIn(string, html)
def test_html_no_partition(self):
self.verify_visibility_view_contains(self.video_location, 'No content groups exist')
def test_html_empty_partition(self):
self.create_content_groups([])
self.verify_visibility_view_contains(self.video_location, 'No content groups exist')
def test_html_populated_partition(self):
self.create_content_groups(self.pet_groups)
self.verify_visibility_view_contains(self.video_location, ['Cat Lovers', 'Dog Lovers'])
def test_html_no_partition_staff_locked(self):
self.set_staff_only(self.vertical_location)
self.verify_visibility_view_contains(self.video_location, ['No content groups exist'])
def test_html_empty_partition_staff_locked(self):
self.create_content_groups([])
self.set_staff_only(self.vertical_location)
self.verify_visibility_view_contains(self.video_location, 'No content groups exist')
def test_html_populated_partition_staff_locked(self):
self.create_content_groups(self.pet_groups)
self.set_staff_only(self.vertical_location)
self.verify_visibility_view_contains(
self.video_location,
['The Unit this component is contained in is hidden from students.', 'Cat Lovers', 'Dog Lovers']
)
def test_html_false_content_group(self):
self.create_content_groups(self.pet_groups)
self.set_group_access(self.video_location, ['false_group_id'])
self.verify_visibility_view_contains(
self.video_location, ['Cat Lovers', 'Dog Lovers', 'Content group no longer exists.']
)
def test_html_false_content_group_staff_locked(self):
self.create_content_groups(self.pet_groups)
self.set_staff_only(self.vertical_location)
self.set_group_access(self.video_location, ['false_group_id'])
self.verify_visibility_view_contains(
self.video_location,
[
'Cat Lovers',
'Dog Lovers',
'The Unit this component is contained in is hidden from students.',
'Content group no longer exists.'
]
)
def test_html_verification_checkpoints(self):
self.create_verification_user_partitions(["Midterm A", "Midterm B"])
self.verify_visibility_view_contains(
self.video_location,
[
"Verification Checkpoint",
"Midterm A",
"Midterm B",
]
)
| agpl-3.0 |
scalable-networks/ext | gnuradio-3.7.0.1/gr-vocoder/python/vocoder/qa_codec2_vocoder.py | 8 | 2420 | #!/usr/bin/env python
#
# Copyright 2011,2013 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# GNU Radio is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# GNU Radio is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GNU Radio; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
from gnuradio import gr, gr_unittest, vocoder, blocks
class test_codec2_vocoder (gr_unittest.TestCase):
def setUp (self):
self.tb = gr.top_block()
def tearDown (self):
self.tb = None
def test001_module_load (self):
data = 20*(100,200,300,400,500,600,700,800)
expected_data = (0,0,0,3,2,0,1,5,6,7,1,-1,0,-5,-11,-10,-20,-22,
-20,-20,-27,-26,-36,-48,-59,-24,5,-7,-12,-27,-22,
-22,-16,13,20,39,23,25,8,-6,15,44,97,135,145,125,
94,102,126,129,165,192,180,132,99,79,73,83,72,47,
40,0,-32,-46,-67,-99,-123,-114,-87,-108,-131,-152,
-181,-245,-348,-294,-101,-71,-85,-26,99,123,15,2,77,
13,-117,-145,-105,-39,-50,-89,-59,-77,-134,-95,-51,
-22,17,-19,-59,-74,-103,-78,4,77,113,60,18,13,-67,
-49,24,88,179,190,89,18,-90,-102,-50,-5,123,135,57,
31,-82,-98,-51,6,93,104,44,-5,-84,-107,-44,45,102,104,
15,-47,-107,-126,-87,-11,89,93,13,-95,-136,-187,-70,
-167,216,-70,-103,175,-284,-486)
src = blocks.vector_source_s(data)
enc = vocoder.codec2_encode_sp()
dec = vocoder.codec2_decode_ps()
snk = blocks.vector_sink_s()
self.tb.connect(src, enc, dec, snk)
self.tb.run()
actual_result = snk.data()
self.assertEqual(expected_data, actual_result)
if __name__ == '__main__':
gr_unittest.run(test_codec2_vocoder, "test_codec2_vocoder.xml")
| gpl-2.0 |
Osndok/zim-desktop-wiki | zim/plugins/distractionfree.py | 1 | 8457 | # -*- coding: utf-8 -*-
# Copyright 2012 Jaap Karssenberg <[email protected]>
import gtk
import logging
from zim.plugins import PluginClass, WindowExtension, extends
from zim.gui import PATHBAR_NONE, PATHBAR_RECENT
logger = logging.getLogger('zim.plugins.distractionfree')
_minsize = 300 # prevent pageview from disappearing altogether
_minmargin = 5 # minimum margin to keep from other widgets
class DistractionFreePlugin(PluginClass):
plugin_info = {
'name': _('Distraction Free Editing'), # T: plugin name
'description': _(
'This plugin adds settings that help using zim\n'
'as a distraction free editor.\n'
), # T: plugin description
'author': 'Jaap Karssenberg',
'help': 'Plugins:Distraction Free Editing',
}
plugin_preferences = (
# key, type, label, default
('hide_menubar', 'bool', _('Hide menubar in fullscreen mode'), True), # T: plugin preference
('hide_toolbar', 'bool', _('Hide toolbar in fullscreen mode'), True), # T: plugin preference
('hide_pathbar', 'bool', _('Hide pathbar in fullscreen mode'), True), # T: plugin preference
('hide_statusbar', 'bool', _('Hide statusbar in fullscreen mode'), True), # T: plugin preference
('max_page_width', 'int', _('Maximum page width'), 850, (_minsize, 10000)), # T: plugin preference
('vmargin', 'int', _('Vertical margin'), 50, (0, 10000)), # T: plugin preference
('basecolor', 'color', _('Text background color'), '#babdb6'), # T: plugin preference
('textcolor', 'color', _('Text foreground color'), '#2e3436'), # T: plugin preference
('bgcolor', 'color', _('Screen background color'), '#2e3436'), # T: plugin preference
#('fgcolor', 'color', _('Screen foreground color'), '#eeeeec'),
)
@extends('MainWindow')
class MainWindowExtension(WindowExtension):
def __init__(self, plugin, window):
WindowExtension.__init__(self, plugin, window)
self._normal_colors = None
self._show_panes = True
self.preferences = plugin.preferences
self.connectto(plugin.preferences, 'changed', self.on_preferences_changed)
self.connectto(window, 'init-uistate', self.on_init_uistate)
self.connectto(window, 'fullscreen-changed')
self.connectto(window.pageview.view, 'size-allocate')
def on_init_uistate(self, window):
self.on_preferences_changed(self.plugin.preferences)
def on_preferences_changed(self, preferences):
# Set show menubar & Update margins
show_menubar = not preferences['hide_menubar']
show_toolbar = not preferences['hide_toolbar']
show_pathbar = not preferences['hide_pathbar']
show_statusbar = not preferences['hide_statusbar']
if self.window.isfullscreen:
self.window.toggle_menubar(show_menubar)
self.window.toggle_toolbar(show_toolbar)
self.window.toggle_statusbar(show_statusbar)
if show_pathbar \
and self.window.uistate['pathbar_type_fullscreen'] == PATHBAR_NONE:
self.window.set_pathbar(PATHBAR_RECENT)
elif not show_pathbar:
self.window.set_pathbar(PATHBAR_NONE)
textview = self.window.pageview.view
self.on_size_allocate(textview, textview.get_allocation())
else:
self.window.uistate['show_menubar_fullscreen'] = show_menubar
self.window.uistate['show_toolbar_fullscreen'] = show_toolbar
self.window.uistate['show_statusbar_fullscreen'] = show_statusbar
if show_pathbar \
and self.window.uistate['pathbar_type_fullscreen'] == PATHBAR_NONE:
self.window.uistate['pathbar_type_fullscreen'] = PATHBAR_RECENT
elif not show_pathbar:
self.window.uistate['pathbar_type_fullscreen'] = PATHBAR_NONE
# TODO - would be nice to be able to toggle hide/show for pathbar without need to set type
# allow hiding container or seperate widget from "model"
def on_fullscreen_changed(self, window):
if window.isfullscreen:
self._show_panes = bool(window.get_visible_panes())
self._save_colors(window)
self._set_colors(self._custom_colors)
window.toggle_panes(False)
window.pageview.swindow.set_shadow_type(gtk.SHADOW_NONE) # XXX
elif self._normal_colors:
self._set_colors(self._normal_colors)
window.toggle_panes(self._show_panes)
window.pageview.grab_focus()
window.pageview.swindow.set_shadow_type(gtk.SHADOW_IN) # XXX
else:
pass
# NOTE: would be nice to change color of _all_ widgets when switching
# to fullscreen, but this is practically not possible because
# we can not set just the few colors in RcStyle, would need to
# switch the whole theme
def _save_colors(self, window):
style = window.pageview.view.rc_get_style()
self._normal_colors = []
for state in (
gtk.STATE_NORMAL,
#gtk.STATE_ACTIVE,
#gtk.STATE_PRELIGHT,
#gtk.STATE_SELECTED,
#gtk.STATE_INSENSITIVE
):
self._normal_colors.append({
'base': style.base[gtk.STATE_NORMAL],
'text': style.text[gtk.STATE_NORMAL],
'bg': style.bg[gtk.STATE_NORMAL],
#'fg': style.fg[gtk.STATE_NORMAL],
})
@property
def _custom_colors(self):
# array of NORMAL, ACTIVE, PRELIGHT, SELECTED, INSENSITIVE
normal = {
'base': self.preferences['basecolor'],
'text': self.preferences['textcolor'],
'bg': self.preferences['bgcolor'],
#'fg': self.preferences['fgcolor'],
}
#selected = { # like normal, but reverse text and base
# 'base': self.preferences['textcolor'],
# 'text': self.preferences['basecolor'],
# 'bg': self.preferences['bgcolor'],
# 'fg': self.preferences['fgcolor'],
#}
#return [normal, normal, normal, selected, normal]
return (normal,)
def _set_colors(self, colors):
# See gtk.RcStyle docs for all values in RC file
rc = 'style "zim-colors"\n{\n'
for i, state in enumerate((
'NORMAL',
#'ACTIVE',
#'PRELIGHT',
#'SELECTED',
#'INSENSITIVE',
)):
values = colors[i]
values['state'] = state
rc += '\tbase[%(state)s] = "%(base)s"\n' \
'\ttext[%(state)s] = "%(text)s"\n' \
'\tbg[%(state)s] = "%(bg)s"\n' % values
#'\tfg[%(state)s] = "%(fg)s"\n' % values
#rc += '}\nclass "GtkWidget" style "zim-colors"'
rc += '}\nwidget "*.zim-pageview" style "zim-colors"\n'
logger.debug('Parse RC: >>>\n%s<<<', rc)
gtk.rc_parse_string(rc)
gtk.rc_reset_styles(gtk.settings_get_default())
def on_size_allocate(self, textview, allocation):
# Here we play with textview margin windows to position text
# in center of screen with a maximum size
if not self.window.isfullscreen:
self._set_margins(0, 0, 0, 0)
return
# Screen geometry
screen = gtk.gdk.screen_get_default()
root_window = screen.get_root_window()
mouse_x, mouse_y, mouse_mods = root_window.get_pointer()
current_monitor_number = screen.get_monitor_at_point(mouse_x, mouse_y)
monitor_geometry = screen.get_monitor_geometry(current_monitor_number)
screen_width = monitor_geometry.width
screen_height = monitor_geometry.height
# X max width based on user preference
max_x = self.preferences['max_page_width']
xmargin = int((screen_width - max_x) / 2)
if allocation.width > max_x:
if allocation.x > xmargin:
# we are bumped to the right
left = _minmargin
right = allocation.width - max_x
elif (allocation.x + allocation.width) < (screen_width - xmargin):
# we are bumped to the left
left = allocation.width - max_x
right = _minmargin
else:
# room on both sides
left = xmargin - allocation.x
right = allocation.width - max_x - left
else:
left = _minmargin
right = _minmargin
# Y setting simply keeps a small margin
vmargin = self.preferences['vmargin']
if vmargin > ((screen_height - _minsize) / 2):
vmargin = ((screen_height - _minsize) / 2)
if allocation.y < vmargin:
top = vmargin - allocation.y
else:
top = _minmargin
if (allocation.y + allocation.height) > (screen_height - vmargin):
bottom = (allocation.y + allocation.height) - (screen_height - vmargin)
else:
bottom = _minmargin
self._set_margins(left, right, top, bottom)
def _set_margins(self, left, right, top, bottom):
textview = self.window.pageview.view
textview.set_border_window_size(gtk.TEXT_WINDOW_LEFT, left)
textview.set_border_window_size(gtk.TEXT_WINDOW_RIGHT, right)
textview.set_border_window_size(gtk.TEXT_WINDOW_TOP, top)
textview.set_border_window_size(gtk.TEXT_WINDOW_BOTTOM, bottom)
def teardown(self):
# show at least menubar again, set margins to zero & restore colors
self.window.uistate['show_menubar_fullscreen'] = True
self._set_margins(0, 0, 0, 0)
if self._normal_colors:
self._set_colors(self._normal_colors)
| gpl-2.0 |
keedio/hue | desktop/core/ext-py/Paste-2.0.1/tests/test_exceptions/test_error_middleware.py | 47 | 3389 | from paste.fixture import *
from paste.exceptions.errormiddleware import ErrorMiddleware
from paste import lint
from paste.util.quoting import strip_html
#
# For some strange reason, these 4 lines cannot be removed or the regression
# test breaks; is it counting the number of lines in the file somehow?
#
def do_request(app, expect_status=500):
app = lint.middleware(app)
app = ErrorMiddleware(app, {}, debug=True)
app = clear_middleware(app)
testapp = TestApp(app)
res = testapp.get('', status=expect_status,
expect_errors=True)
return res
def clear_middleware(app):
"""
The fixture sets paste.throw_errors, which suppresses exactly what
we want to test in this case. This wrapper also strips exc_info
on the *first* call to start_response (but not the second, or
subsequent calls.
"""
def clear_throw_errors(environ, start_response):
headers_sent = []
def replacement(status, headers, exc_info=None):
if headers_sent:
return start_response(status, headers, exc_info)
headers_sent.append(True)
return start_response(status, headers)
if 'paste.throw_errors' in environ:
del environ['paste.throw_errors']
return app(environ, replacement)
return clear_throw_errors
############################################################
## Applications that raise exceptions
############################################################
def bad_app():
"No argument list!"
return None
def unicode_bad_app(environ, start_response):
raise ValueError(u"\u1000")
def start_response_app(environ, start_response):
"raise error before start_response"
raise ValueError("hi")
def after_start_response_app(environ, start_response):
start_response("200 OK", [('Content-type', 'text/plain')])
raise ValueError('error2')
def iter_app(environ, start_response):
start_response("200 OK", [('Content-type', 'text/plain')])
return yielder([b'this', b' is ', b' a', None])
def yielder(args):
for arg in args:
if arg is None:
raise ValueError("None raises error")
yield arg
############################################################
## Tests
############################################################
def test_makes_exception():
res = do_request(bad_app)
assert '<html' in res
res = strip_html(str(res))
if six.PY3:
assert 'bad_app() takes 0 positional arguments but 2 were given' in res
else:
assert 'bad_app() takes no arguments (2 given' in res, repr(res)
assert 'iterator = application(environ, start_response_wrapper)' in res
assert 'paste.lint' in res
assert 'paste.exceptions.errormiddleware' in res
def test_unicode_exception():
res = do_request(unicode_bad_app)
def test_start_res():
res = do_request(start_response_app)
res = strip_html(str(res))
assert 'ValueError: hi' in res
assert 'test_error_middleware' in res
assert ':52 in start_response_app' in res
def test_after_start():
res = do_request(after_start_response_app, 200)
res = strip_html(str(res))
#print res
assert 'ValueError: error2' in res
def test_iter_app():
res = do_request(lint.middleware(iter_app), 200)
#print res
assert 'None raises error' in res
assert 'yielder' in res
| apache-2.0 |
40223208/2015cdb_g4_0420 | static/Brython3.1.1-20150328-091302/Lib/operator.py | 674 | 7736 | #!/usr/bin/env python3
"""
Operator Interface
This module exports a set of functions corresponding to the intrinsic
operators of Python. For example, operator.add(x, y) is equivalent
to the expression x+y. The function names are those used for special
methods; variants without leading and trailing '__' are also provided
for convenience.
This is the pure Python implementation of the module.
"""
# downloaded from http://bugs.python.org/file28327/operator.py
#import builtins as _bi #there is no builtins module
def lt(a, b):
"Same as a < b."
return a < b
__lt__ = lt
def le(a, b):
"Same as a <= b."
return a <= b
__le__ = le
def eq(a, b):
"Same as a == b."
return a == b
__eq__ = eq
def ne(a, b):
"Same as a != b."
return a != b
__ne__ = ne
def ge(a, b):
"Same as a >= b."
return a >= b
__ge__ = ge
def gt(a, b):
"Same as a > b."
return a > b
__gt__ = gt
def not_(a):
"Same as not a."
return not a
__not__ = not_
def truth(a):
"Return True if a is true, False otherwise."
#return _bi.bool(a)
return bool(a)
def is_(a, b):
"Same as a is b."
return a is b
# brython does not like (causes syntax error)
#def is_not(a, b):
# "Same as a is not b."
# return a is not b
#recursion error or just comment out and add code below function
#def abs(a):
# "Same as abs(a)."
# #return _bi.abs(a)
# return abs(a)
__abs__ = abs
abs=abs
def add(a, b):
"Same as a + b."
return a + b
__add__ = add
def and_(a, b):
"Same as a & b."
return a & b
__and__ = and_
def floordiv(a, b):
"Same as a // b."
return a // b
__floordiv__ = floordiv
def index(a):
"Same as a.__index__()."
return a.__index__()
__index__ = index
def inv(a):
"Same as ~a."
return ~a #brython does not like
#return a^(2**31)
invert = __inv__ = __invert__ = inv
def lshift(a, b):
"Same as a << b."
return a << b
__lshift__ = lshift
def mod(a, b):
"Same as a % b."
return a % b
__mod__ = mod
def mul(a, b):
"Same as a * b."
return a * b
__mul__ = mul
def neg(a):
"Same as -a."
return -a
__neg__ = neg
def or_(a, b):
"Same as a | b."
return a | b
__or__ = or_
def pos(a):
"Same as +a."
return +a #brython does not like
if a >= 0: return a
return -a
__pos__ = pos
def pow(a, b):
"Same as a ** b."
return a ** b
__pow__ = pow
def rshift(a, b):
"Same as a >> b."
return a >> b
__rshift__ = rshift
def sub(a, b):
"Same as a - b."
return a - b
__sub__ = sub
def truediv(a, b):
"Same as a / b."
return a / b
__truediv__ = truediv
def xor(a, b):
"Same as a ^ b."
return a ^ b
__xor__ = xor
def concat(a, b):
"Same as a + b, for a and b sequences."
if not (hasattr(a, '__getitem__') and hasattr(b, '__getitem__')):
raise TypeError('a and b must be sequences')
return a + b
__concat__ = concat
def contains(a, b):
"Same as b in a (note reversed operands)."
return b in a
__contains__ = contains
def countOf(a, b):
"Return the number of times b occurs in a."
count = 0
for i in a:
if i == b:
count += 1
return count
def delitem(a, b):
"Same as del a[b]."
del a[b]
__delitem__ = delitem
def getitem(a, b):
"Same as a[b]."
return a[b]
__getitem__ = getitem
#fixme brython doesn't like this function
def indexOf(a, b):
"Return the first index of b in a."
#for i, j in _bi.enumerate(a):
for i, j in enumerate(a):
if j == b:
return i
else:
raise ValueError('b not found in a')
def setitem(a, b, c):
"Same as a[b] = c."
a[b] = c
__setitem__ = setitem
class attrgetter:
"""
Return a callable object that fetches the given attribute(s) from its operand.
After f=attrgetter('name'), the call f(r) returns r.name.
After g=attrgetter('name', 'date'), the call g(r) returns (r.name, r.date).
After h=attrgetter('name.first', 'name.last'), the call h(r) returns
(r.name.first, r.name.last).
"""
def __init__(self, attr, *attrs):
self._attrs = (attr,)
self._attrs += attrs
if any(not isinstance(attr, str) for attr in self._attrs):
raise TypeError('attribute name must be a string')
@staticmethod
def _resolve_attr(obj, attr):
for name in attr.split('.'):
#obj = _bi.getattr(obj, name)
obj = getattr(obj, name)
return obj
def __call__(self, obj):
if len(self._attrs) == 1:
return self._resolve_attr(obj, self._attrs[0])
return tuple(self._resolve_attr(obj, attr) for attr in self._attrs)
class itemgetter:
"""
Return a callable object that fetches the given item(s) from its operand.
After f=itemgetter(2), the call f(r) returns r[2].
After g=itemgetter(2,5,3), the call g(r) returns (r[2], r[5], r[3])
"""
def __init__(self, item, *items):
self._items = (item,)
self._items += items
def __call__(self, obj):
if len(self._items) == 1:
return obj[self._items[0]]
return tuple(obj[item] for item in self._items)
class methodcaller:
"""
Return a callable object that calls the given method on its operand.
After f = methodcaller('name'), the call f(r) returns r.name().
After g = methodcaller('name', 'date', foo=1), the call g(r) returns
r.name('date', foo=1).
"""
def __init__(self, name, *args, **kwargs):
self._name = name
self._args = args
self._kwargs = kwargs
def __call__(self, obj):
return getattr(obj, self._name)(*self._args, **self._kwargs)
def iadd(a, b):
"Same as a += b."
a += b
return a
__iadd__ = iadd
def iand(a, b):
"Same as a &= b."
a &= b
return a
__iand__ = iand
def iconcat(a, b):
"Same as a += b, for a and b sequences."
if not (hasattr(a, '__getitem__') and hasattr(b, '__getitem__')):
raise TypeError('a and b must be sequences')
a += b
return a
__iconcat__ = iconcat
def ifloordiv(a, b):
"Same as a //= b."
a //= b
return a
__ifloordiv__ = ifloordiv
def ilshift(a, b):
"Same as a <<= b."
a <<= b
return a
__ilshift__ = ilshift
def imod(a, b):
"Same as a %= b."
a %= b
return a
__imod__ = imod
def imul(a, b):
"Same as a *= b."
a *= b
return a
__imul__ = imul
def ior(a, b):
"Same as a |= b."
a |= b
return a
__ior__ = ior
def ipow(a, b):
"Same as a **= b."
a **=b
return a
__ipow__ = ipow
def irshift(a, b):
"Same as a >>= b."
a >>= b
return a
__irshift__ = irshift
def isub(a, b):
"Same as a -= b."
a -= b
return a
__isub__ = isub
def itruediv(a, b):
"Same as a /= b."
a /= b
return a
__itruediv__ = itruediv
def ixor(a, b):
"Same as a ^= b."
a ^= b
return a
__ixor__ = ixor
def length_hint(obj, default=0):
"""
Return an estimate of the number of items in obj.
This is useful for presizing containers when building from an iterable.
If the object supports len(), the result will be exact. Otherwise, it may
over- or under-estimate by an arbitrary amount. The result will be an
integer >= 0.
"""
try:
return len(obj)
except TypeError:
try:
val = obj.__length_hint__()
if val is NotImplemented:
raise TypeError
except (AttributeError, TypeError):
return default
else:
if not val > 0:
raise ValueError('default must be > 0')
return val
#try:
# from _operator import *
# from _operator import __doc__
#except ImportError:
# pass
| gpl-3.0 |
boomsbloom/dtm-fmri | DTM/for_gensim/lib/python2.7/site-packages/matplotlib/axes/_base.py | 1 | 129192 | from __future__ import (absolute_import, division, print_function,
unicode_literals)
from matplotlib.externals import six
from matplotlib.externals.six.moves import xrange
import itertools
import warnings
import math
from operator import itemgetter
import numpy as np
from numpy import ma
import matplotlib
from matplotlib import cbook
from matplotlib.cbook import (_check_1d, _string_to_bool, iterable,
index_of, get_label)
from matplotlib import docstring
import matplotlib.colors as mcolors
import matplotlib.lines as mlines
import matplotlib.patches as mpatches
import matplotlib.artist as martist
import matplotlib.transforms as mtransforms
import matplotlib.ticker as mticker
import matplotlib.axis as maxis
import matplotlib.scale as mscale
import matplotlib.spines as mspines
import matplotlib.font_manager as font_manager
import matplotlib.text as mtext
import matplotlib.image as mimage
from matplotlib.offsetbox import OffsetBox
from matplotlib.artist import allow_rasterization
from matplotlib.cbook import iterable, index_of
from matplotlib.rcsetup import cycler
rcParams = matplotlib.rcParams
is_string_like = cbook.is_string_like
is_sequence_of_strings = cbook.is_sequence_of_strings
def _process_plot_format(fmt):
"""
Process a MATLAB style color/line style format string. Return a
(*linestyle*, *color*) tuple as a result of the processing. Default
values are ('-', 'b'). Example format strings include:
* 'ko': black circles
* '.b': blue dots
* 'r--': red dashed lines
.. seealso::
:func:`~matplotlib.Line2D.lineStyles` and
:func:`~matplotlib.pyplot.colors`
for all possible styles and color format string.
"""
linestyle = None
marker = None
color = None
# Is fmt just a colorspec?
try:
color = mcolors.colorConverter.to_rgb(fmt)
# We need to differentiate grayscale '1.0' from tri_down marker '1'
try:
fmtint = str(int(fmt))
except ValueError:
return linestyle, marker, color # Yes
else:
if fmt != fmtint:
# user definitely doesn't want tri_down marker
return linestyle, marker, color # Yes
else:
# ignore converted color
color = None
except ValueError:
pass # No, not just a color.
# handle the multi char special cases and strip them from the
# string
if fmt.find('--') >= 0:
linestyle = '--'
fmt = fmt.replace('--', '')
if fmt.find('-.') >= 0:
linestyle = '-.'
fmt = fmt.replace('-.', '')
if fmt.find(' ') >= 0:
linestyle = 'None'
fmt = fmt.replace(' ', '')
chars = [c for c in fmt]
for c in chars:
if c in mlines.lineStyles:
if linestyle is not None:
raise ValueError(
'Illegal format string "%s"; two linestyle symbols' % fmt)
linestyle = c
elif c in mlines.lineMarkers:
if marker is not None:
raise ValueError(
'Illegal format string "%s"; two marker symbols' % fmt)
marker = c
elif c in mcolors.colorConverter.colors:
if color is not None:
raise ValueError(
'Illegal format string "%s"; two color symbols' % fmt)
color = c
else:
raise ValueError(
'Unrecognized character %c in format string' % c)
if linestyle is None and marker is None:
linestyle = rcParams['lines.linestyle']
if linestyle is None:
linestyle = 'None'
if marker is None:
marker = 'None'
return linestyle, marker, color
class _process_plot_var_args(object):
"""
Process variable length arguments to the plot command, so that
plot commands like the following are supported::
plot(t, s)
plot(t1, s1, t2, s2)
plot(t1, s1, 'ko', t2, s2)
plot(t1, s1, 'ko', t2, s2, 'r--', t3, e3)
an arbitrary number of *x*, *y*, *fmt* are allowed
"""
def __init__(self, axes, command='plot'):
self.axes = axes
self.command = command
self.set_prop_cycle()
def __getstate__(self):
# note: it is not possible to pickle a itertools.cycle instance
return {'axes': self.axes, 'command': self.command}
def __setstate__(self, state):
self.__dict__ = state.copy()
self.set_prop_cycle()
def set_prop_cycle(self, *args, **kwargs):
if not (args or kwargs) or (len(args) == 1 and args[0] is None):
prop_cycler = rcParams['axes.prop_cycle']
if prop_cycler is None and 'axes.color_cycle' in rcParams:
clist = rcParams['axes.color_cycle']
prop_cycler = cycler('color', clist)
else:
prop_cycler = cycler(*args, **kwargs)
self.prop_cycler = itertools.cycle(prop_cycler)
# This should make a copy
self._prop_keys = prop_cycler.keys
def __call__(self, *args, **kwargs):
if self.axes.xaxis is not None and self.axes.yaxis is not None:
xunits = kwargs.pop('xunits', self.axes.xaxis.units)
if self.axes.name == 'polar':
xunits = kwargs.pop('thetaunits', xunits)
yunits = kwargs.pop('yunits', self.axes.yaxis.units)
if self.axes.name == 'polar':
yunits = kwargs.pop('runits', yunits)
if xunits != self.axes.xaxis.units:
self.axes.xaxis.set_units(xunits)
if yunits != self.axes.yaxis.units:
self.axes.yaxis.set_units(yunits)
ret = self._grab_next_args(*args, **kwargs)
return ret
def set_lineprops(self, line, **kwargs):
assert self.command == 'plot', 'set_lineprops only works with "plot"'
line.set(**kwargs)
def set_patchprops(self, fill_poly, **kwargs):
assert self.command == 'fill', 'set_patchprops only works with "fill"'
fill_poly.set(**kwargs)
def _xy_from_xy(self, x, y):
if self.axes.xaxis is not None and self.axes.yaxis is not None:
bx = self.axes.xaxis.update_units(x)
by = self.axes.yaxis.update_units(y)
if self.command != 'plot':
# the Line2D class can handle unitized data, with
# support for post hoc unit changes etc. Other mpl
# artists, e.g., Polygon which _process_plot_var_args
# also serves on calls to fill, cannot. So this is a
# hack to say: if you are not "plot", which is
# creating Line2D, then convert the data now to
# floats. If you are plot, pass the raw data through
# to Line2D which will handle the conversion. So
# polygons will not support post hoc conversions of
# the unit type since they are not storing the orig
# data. Hopefully we can rationalize this at a later
# date - JDH
if bx:
x = self.axes.convert_xunits(x)
if by:
y = self.axes.convert_yunits(y)
# like asanyarray, but converts scalar to array, and doesn't change
# existing compatible sequences
x = _check_1d(x)
y = _check_1d(y)
if x.shape[0] != y.shape[0]:
raise ValueError("x and y must have same first dimension")
if x.ndim > 2 or y.ndim > 2:
raise ValueError("x and y can be no greater than 2-D")
if x.ndim == 1:
x = x[:, np.newaxis]
if y.ndim == 1:
y = y[:, np.newaxis]
return x, y
def _getdefaults(self, ignore, *kwargs):
"""
Only advance the cycler if the cycler has information that
is not specified in any of the supplied tuple of dicts.
Ignore any keys specified in the `ignore` set.
Returns a copy of defaults dictionary if there are any
keys that are not found in any of the supplied dictionaries.
If the supplied dictionaries have non-None values for
everything the property cycler has, then just return
an empty dictionary. Ignored keys are excluded from the
returned dictionary.
"""
prop_keys = self._prop_keys
if ignore is None:
ignore = set([])
prop_keys = prop_keys - ignore
if any(all(kw.get(k, None) is None for kw in kwargs)
for k in prop_keys):
# Need to copy this dictionary or else the next time around
# in the cycle, the dictionary could be missing entries.
default_dict = six.next(self.prop_cycler).copy()
for p in ignore:
default_dict.pop(p, None)
else:
default_dict = {}
return default_dict
def _setdefaults(self, defaults, *kwargs):
"""
Given a defaults dictionary, and any other dictionaries,
update those other dictionaries with information in defaults if
none of the other dictionaries contains that information.
"""
for k in defaults:
if all(kw.get(k, None) is None for kw in kwargs):
for kw in kwargs:
kw[k] = defaults[k]
def _makeline(self, x, y, kw, kwargs):
kw = kw.copy() # Don't modify the original kw.
kwargs = kwargs.copy()
default_dict = self._getdefaults(None, kw, kwargs)
self._setdefaults(default_dict, kw, kwargs)
seg = mlines.Line2D(x, y, **kw)
self.set_lineprops(seg, **kwargs)
return seg
def _makefill(self, x, y, kw, kwargs):
kw = kw.copy() # Don't modify the original kw.
kwargs = kwargs.copy()
# Ignore 'marker'-related properties as they aren't Polygon
# properties, but they are Line2D properties, and so they are
# likely to appear in the default cycler construction.
# This is done here to the defaults dictionary as opposed to the
# other two dictionaries because we do want to capture when a
# *user* explicitly specifies a marker which should be an error.
# We also want to prevent advancing the cycler if there are no
# defaults needed after ignoring the given properties.
ignores = set(['marker', 'markersize', 'markeredgecolor',
'markerfacecolor', 'markeredgewidth'])
# Also ignore anything provided by *kwargs*.
for k, v in six.iteritems(kwargs):
if v is not None:
ignores.add(k)
# Only using the first dictionary to use as basis
# for getting defaults for back-compat reasons.
# Doing it with both seems to mess things up in
# various places (probably due to logic bugs elsewhere).
default_dict = self._getdefaults(ignores, kw)
self._setdefaults(default_dict, kw)
# Looks like we don't want "color" to be interpreted to
# mean both facecolor and edgecolor for some reason.
# So the "kw" dictionary is thrown out, and only its
# 'color' value is kept and translated as a 'facecolor'.
# This design should probably be revisited as it increases
# complexity.
facecolor = kw.get('color', None)
# Throw out 'color' as it is now handled as a facecolor
default_dict.pop('color', None)
# To get other properties set from the cycler
# modify the kwargs dictionary.
self._setdefaults(default_dict, kwargs)
seg = mpatches.Polygon(np.hstack((x[:, np.newaxis],
y[:, np.newaxis])),
facecolor=facecolor,
fill=True,
closed=kw['closed'])
self.set_patchprops(seg, **kwargs)
return seg
def _plot_args(self, tup, kwargs):
ret = []
if len(tup) > 1 and is_string_like(tup[-1]):
linestyle, marker, color = _process_plot_format(tup[-1])
tup = tup[:-1]
elif len(tup) == 3:
raise ValueError('third arg must be a format string')
else:
linestyle, marker, color = None, None, None
# Don't allow any None value; These will be up-converted
# to one element array of None which causes problems
# downstream.
if any(v is None for v in tup):
raise ValueError("x and y must not be None")
kw = {}
for k, v in zip(('linestyle', 'marker', 'color'),
(linestyle, marker, color)):
if v is not None:
kw[k] = v
if 'label' not in kwargs or kwargs['label'] is None:
kwargs['label'] = get_label(tup[-1], None)
if len(tup) == 2:
x = _check_1d(tup[0])
y = _check_1d(tup[-1])
else:
x, y = index_of(tup[-1])
x, y = self._xy_from_xy(x, y)
if self.command == 'plot':
func = self._makeline
else:
kw['closed'] = kwargs.get('closed', True)
func = self._makefill
ncx, ncy = x.shape[1], y.shape[1]
for j in xrange(max(ncx, ncy)):
seg = func(x[:, j % ncx], y[:, j % ncy], kw, kwargs)
ret.append(seg)
return ret
def _grab_next_args(self, *args, **kwargs):
remaining = args
while 1:
if len(remaining) == 0:
return
if len(remaining) <= 3:
for seg in self._plot_args(remaining, kwargs):
yield seg
return
if is_string_like(remaining[2]):
isplit = 3
else:
isplit = 2
for seg in self._plot_args(remaining[:isplit], kwargs):
yield seg
remaining = remaining[isplit:]
class _AxesBase(martist.Artist):
"""
"""
name = "rectilinear"
_shared_x_axes = cbook.Grouper()
_shared_y_axes = cbook.Grouper()
def __str__(self):
return "Axes(%g,%g;%gx%g)" % tuple(self._position.bounds)
def __init__(self, fig, rect,
axisbg=None, # defaults to rc axes.facecolor
frameon=True,
sharex=None, # use Axes instance's xaxis info
sharey=None, # use Axes instance's yaxis info
label='',
xscale=None,
yscale=None,
**kwargs
):
"""
Build an :class:`Axes` instance in
:class:`~matplotlib.figure.Figure` *fig* with
*rect=[left, bottom, width, height]* in
:class:`~matplotlib.figure.Figure` coordinates
Optional keyword arguments:
================ =========================================
Keyword Description
================ =========================================
*adjustable* [ 'box' | 'datalim' | 'box-forced']
*alpha* float: the alpha transparency (can be None)
*anchor* [ 'C', 'SW', 'S', 'SE', 'E', 'NE', 'N',
'NW', 'W' ]
*aspect* [ 'auto' | 'equal' | aspect_ratio ]
*autoscale_on* [ *True* | *False* ] whether or not to
autoscale the *viewlim*
*axis_bgcolor* any matplotlib color, see
:func:`~matplotlib.pyplot.colors`
*axisbelow* draw the grids and ticks below the other
artists
*cursor_props* a (*float*, *color*) tuple
*figure* a :class:`~matplotlib.figure.Figure`
instance
*frame_on* a boolean - draw the axes frame
*label* the axes label
*navigate* [ *True* | *False* ]
*navigate_mode* [ 'PAN' | 'ZOOM' | None ] the navigation
toolbar button status
*position* [left, bottom, width, height] in
class:`~matplotlib.figure.Figure` coords
*sharex* an class:`~matplotlib.axes.Axes` instance
to share the x-axis with
*sharey* an class:`~matplotlib.axes.Axes` instance
to share the y-axis with
*title* the title string
*visible* [ *True* | *False* ] whether the axes is
visible
*xlabel* the xlabel
*xlim* (*xmin*, *xmax*) view limits
*xscale* [%(scale)s]
*xticklabels* sequence of strings
*xticks* sequence of floats
*ylabel* the ylabel strings
*ylim* (*ymin*, *ymax*) view limits
*yscale* [%(scale)s]
*yticklabels* sequence of strings
*yticks* sequence of floats
================ =========================================
""" % {'scale': ' | '.join(
[repr(x) for x in mscale.get_scale_names()])}
martist.Artist.__init__(self)
if isinstance(rect, mtransforms.Bbox):
self._position = rect
else:
self._position = mtransforms.Bbox.from_bounds(*rect)
self._originalPosition = self._position.frozen()
# self.set_axes(self)
self.axes = self
self.set_aspect('auto')
self._adjustable = 'box'
self.set_anchor('C')
self._sharex = sharex
self._sharey = sharey
if sharex is not None:
self._shared_x_axes.join(self, sharex)
if sharex._adjustable == 'box':
sharex._adjustable = 'datalim'
#warnings.warn(
# 'shared axes: "adjustable" is being changed to "datalim"')
self._adjustable = 'datalim'
if sharey is not None:
self._shared_y_axes.join(self, sharey)
if sharey._adjustable == 'box':
sharey._adjustable = 'datalim'
#warnings.warn(
# 'shared axes: "adjustable" is being changed to "datalim"')
self._adjustable = 'datalim'
self.set_label(label)
self.set_figure(fig)
self.set_axes_locator(kwargs.get("axes_locator", None))
self.spines = self._gen_axes_spines()
# this call may differ for non-sep axes, e.g., polar
self._init_axis()
if axisbg is None:
axisbg = rcParams['axes.facecolor']
self._axisbg = axisbg
self._frameon = frameon
self._axisbelow = rcParams['axes.axisbelow']
self._rasterization_zorder = None
self._hold = rcParams['axes.hold']
self._connected = {} # a dict from events to (id, func)
self.cla()
# funcs used to format x and y - fall back on major formatters
self.fmt_xdata = None
self.fmt_ydata = None
self.set_cursor_props((1, 'k')) # set the cursor properties for axes
self._cachedRenderer = None
self.set_navigate(True)
self.set_navigate_mode(None)
if xscale:
self.set_xscale(xscale)
if yscale:
self.set_yscale(yscale)
if len(kwargs):
self.update(kwargs)
if self.xaxis is not None:
self._xcid = self.xaxis.callbacks.connect('units finalize',
self.relim)
if self.yaxis is not None:
self._ycid = self.yaxis.callbacks.connect('units finalize',
self.relim)
def __setstate__(self, state):
self.__dict__ = state
# put the _remove_method back on all artists contained within the axes
for container_name in ['lines', 'collections', 'tables', 'patches',
'texts', 'images']:
container = getattr(self, container_name)
for artist in container:
artist._remove_method = container.remove
self._stale = True
def get_window_extent(self, *args, **kwargs):
"""
get the axes bounding box in display space; *args* and
*kwargs* are empty
"""
return self.bbox
def _init_axis(self):
"move this out of __init__ because non-separable axes don't use it"
self.xaxis = maxis.XAxis(self)
self.spines['bottom'].register_axis(self.xaxis)
self.spines['top'].register_axis(self.xaxis)
self.yaxis = maxis.YAxis(self)
self.spines['left'].register_axis(self.yaxis)
self.spines['right'].register_axis(self.yaxis)
self._update_transScale()
def set_figure(self, fig):
"""
Set the class:`~matplotlib.axes.Axes` figure
accepts a class:`~matplotlib.figure.Figure` instance
"""
martist.Artist.set_figure(self, fig)
self.bbox = mtransforms.TransformedBbox(self._position,
fig.transFigure)
# these will be updated later as data is added
self.dataLim = mtransforms.Bbox.null()
self.viewLim = mtransforms.Bbox.unit()
self.transScale = mtransforms.TransformWrapper(
mtransforms.IdentityTransform())
self._set_lim_and_transforms()
def _set_lim_and_transforms(self):
"""
set the *dataLim* and *viewLim*
:class:`~matplotlib.transforms.Bbox` attributes and the
*transScale*, *transData*, *transLimits* and *transAxes*
transformations.
.. note::
This method is primarily used by rectilinear projections
of the :class:`~matplotlib.axes.Axes` class, and is meant
to be overridden by new kinds of projection axes that need
different transformations and limits. (See
:class:`~matplotlib.projections.polar.PolarAxes` for an
example.
"""
self.transAxes = mtransforms.BboxTransformTo(self.bbox)
# Transforms the x and y axis separately by a scale factor.
# It is assumed that this part will have non-linear components
# (e.g., for a log scale).
self.transScale = mtransforms.TransformWrapper(
mtransforms.IdentityTransform())
# An affine transformation on the data, generally to limit the
# range of the axes
self.transLimits = mtransforms.BboxTransformFrom(
mtransforms.TransformedBbox(self.viewLim, self.transScale))
# The parentheses are important for efficiency here -- they
# group the last two (which are usually affines) separately
# from the first (which, with log-scaling can be non-affine).
self.transData = self.transScale + (self.transLimits + self.transAxes)
self._xaxis_transform = mtransforms.blended_transform_factory(
self.transData, self.transAxes)
self._yaxis_transform = mtransforms.blended_transform_factory(
self.transAxes, self.transData)
def get_xaxis_transform(self, which='grid'):
"""
Get the transformation used for drawing x-axis labels, ticks
and gridlines. The x-direction is in data coordinates and the
y-direction is in axis coordinates.
.. note::
This transformation is primarily used by the
:class:`~matplotlib.axis.Axis` class, and is meant to be
overridden by new kinds of projections that may need to
place axis elements in different locations.
"""
if which == 'grid':
return self._xaxis_transform
elif which == 'tick1':
# for cartesian projection, this is bottom spine
return self.spines['bottom'].get_spine_transform()
elif which == 'tick2':
# for cartesian projection, this is top spine
return self.spines['top'].get_spine_transform()
else:
raise ValueError('unknown value for which')
def get_xaxis_text1_transform(self, pad_points):
"""
Get the transformation used for drawing x-axis labels, which
will add the given amount of padding (in points) between the
axes and the label. The x-direction is in data coordinates
and the y-direction is in axis coordinates. Returns a
3-tuple of the form::
(transform, valign, halign)
where *valign* and *halign* are requested alignments for the
text.
.. note::
This transformation is primarily used by the
:class:`~matplotlib.axis.Axis` class, and is meant to be
overridden by new kinds of projections that may need to
place axis elements in different locations.
"""
return (self.get_xaxis_transform(which='tick1') +
mtransforms.ScaledTranslation(0, -1 * pad_points / 72.0,
self.figure.dpi_scale_trans),
"top", "center")
def get_xaxis_text2_transform(self, pad_points):
"""
Get the transformation used for drawing the secondary x-axis
labels, which will add the given amount of padding (in points)
between the axes and the label. The x-direction is in data
coordinates and the y-direction is in axis coordinates.
Returns a 3-tuple of the form::
(transform, valign, halign)
where *valign* and *halign* are requested alignments for the
text.
.. note::
This transformation is primarily used by the
:class:`~matplotlib.axis.Axis` class, and is meant to be
overridden by new kinds of projections that may need to
place axis elements in different locations.
"""
return (self.get_xaxis_transform(which='tick2') +
mtransforms.ScaledTranslation(0, pad_points / 72.0,
self.figure.dpi_scale_trans),
"bottom", "center")
def get_yaxis_transform(self, which='grid'):
"""
Get the transformation used for drawing y-axis labels, ticks
and gridlines. The x-direction is in axis coordinates and the
y-direction is in data coordinates.
.. note::
This transformation is primarily used by the
:class:`~matplotlib.axis.Axis` class, and is meant to be
overridden by new kinds of projections that may need to
place axis elements in different locations.
"""
if which == 'grid':
return self._yaxis_transform
elif which == 'tick1':
# for cartesian projection, this is bottom spine
return self.spines['left'].get_spine_transform()
elif which == 'tick2':
# for cartesian projection, this is top spine
return self.spines['right'].get_spine_transform()
else:
raise ValueError('unknown value for which')
def get_yaxis_text1_transform(self, pad_points):
"""
Get the transformation used for drawing y-axis labels, which
will add the given amount of padding (in points) between the
axes and the label. The x-direction is in axis coordinates
and the y-direction is in data coordinates. Returns a 3-tuple
of the form::
(transform, valign, halign)
where *valign* and *halign* are requested alignments for the
text.
.. note::
This transformation is primarily used by the
:class:`~matplotlib.axis.Axis` class, and is meant to be
overridden by new kinds of projections that may need to
place axis elements in different locations.
"""
return (self.get_yaxis_transform(which='tick1') +
mtransforms.ScaledTranslation(-1 * pad_points / 72.0, 0,
self.figure.dpi_scale_trans),
"center", "right")
def get_yaxis_text2_transform(self, pad_points):
"""
Get the transformation used for drawing the secondary y-axis
labels, which will add the given amount of padding (in points)
between the axes and the label. The x-direction is in axis
coordinates and the y-direction is in data coordinates.
Returns a 3-tuple of the form::
(transform, valign, halign)
where *valign* and *halign* are requested alignments for the
text.
.. note::
This transformation is primarily used by the
:class:`~matplotlib.axis.Axis` class, and is meant to be
overridden by new kinds of projections that may need to
place axis elements in different locations.
"""
return (self.get_yaxis_transform(which='tick2') +
mtransforms.ScaledTranslation(pad_points / 72.0, 0,
self.figure.dpi_scale_trans),
"center", "left")
def _update_transScale(self):
self.transScale.set(
mtransforms.blended_transform_factory(
self.xaxis.get_transform(), self.yaxis.get_transform()))
if hasattr(self, "lines"):
for line in self.lines:
try:
line._transformed_path.invalidate()
except AttributeError:
pass
def get_position(self, original=False):
'Return the a copy of the axes rectangle as a Bbox'
if original:
return self._originalPosition.frozen()
else:
return self._position.frozen()
def set_position(self, pos, which='both'):
"""
Set the axes position with::
pos = [left, bottom, width, height]
in relative 0,1 coords, or *pos* can be a
:class:`~matplotlib.transforms.Bbox`
There are two position variables: one which is ultimately
used, but which may be modified by :meth:`apply_aspect`, and a
second which is the starting point for :meth:`apply_aspect`.
Optional keyword arguments:
*which*
========== ====================
value description
========== ====================
'active' to change the first
'original' to change the second
'both' to change both
========== ====================
"""
if not isinstance(pos, mtransforms.BboxBase):
pos = mtransforms.Bbox.from_bounds(*pos)
if which in ('both', 'active'):
self._position.set(pos)
if which in ('both', 'original'):
self._originalPosition.set(pos)
self.stale = True
def reset_position(self):
"""Make the original position the active position"""
pos = self.get_position(original=True)
self.set_position(pos, which='active')
def set_axes_locator(self, locator):
"""
set axes_locator
ACCEPT: a callable object which takes an axes instance and renderer and
returns a bbox.
"""
self._axes_locator = locator
self.stale = True
def get_axes_locator(self):
"""
return axes_locator
"""
return self._axes_locator
def _set_artist_props(self, a):
"""set the boilerplate props for artists added to axes"""
a.set_figure(self.figure)
if not a.is_transform_set():
a.set_transform(self.transData)
a.axes = self
if a.mouseover:
self.mouseover_set.add(a)
def _gen_axes_patch(self):
"""
Returns the patch used to draw the background of the axes. It
is also used as the clipping path for any data elements on the
axes.
In the standard axes, this is a rectangle, but in other
projections it may not be.
.. note::
Intended to be overridden by new projection types.
"""
return mpatches.Rectangle((0.0, 0.0), 1.0, 1.0)
def _gen_axes_spines(self, locations=None, offset=0.0, units='inches'):
"""
Returns a dict whose keys are spine names and values are
Line2D or Patch instances. Each element is used to draw a
spine of the axes.
In the standard axes, this is a single line segment, but in
other projections it may not be.
.. note::
Intended to be overridden by new projection types.
"""
return {
'left': mspines.Spine.linear_spine(self, 'left'),
'right': mspines.Spine.linear_spine(self, 'right'),
'bottom': mspines.Spine.linear_spine(self, 'bottom'),
'top': mspines.Spine.linear_spine(self, 'top'), }
def cla(self):
"""Clear the current axes."""
# Note: this is called by Axes.__init__()
# stash the current visibility state
if hasattr(self, 'patch'):
patch_visible = self.patch.get_visible()
else:
patch_visible = True
xaxis_visible = self.xaxis.get_visible()
yaxis_visible = self.yaxis.get_visible()
self.xaxis.cla()
self.yaxis.cla()
for name, spine in six.iteritems(self.spines):
spine.cla()
self.ignore_existing_data_limits = True
self.callbacks = cbook.CallbackRegistry()
if self._sharex is not None:
# major and minor are class instances with
# locator and formatter attributes
self.xaxis.major = self._sharex.xaxis.major
self.xaxis.minor = self._sharex.xaxis.minor
x0, x1 = self._sharex.get_xlim()
self.set_xlim(x0, x1, emit=False, auto=None)
# Save the current formatter/locator so we don't lose it
majf = self._sharex.xaxis.get_major_formatter()
minf = self._sharex.xaxis.get_minor_formatter()
majl = self._sharex.xaxis.get_major_locator()
minl = self._sharex.xaxis.get_minor_locator()
# This overwrites the current formatter/locator
self.xaxis._set_scale(self._sharex.xaxis.get_scale())
# Reset the formatter/locator
self.xaxis.set_major_formatter(majf)
self.xaxis.set_minor_formatter(minf)
self.xaxis.set_major_locator(majl)
self.xaxis.set_minor_locator(minl)
else:
self.xaxis._set_scale('linear')
if self._sharey is not None:
self.yaxis.major = self._sharey.yaxis.major
self.yaxis.minor = self._sharey.yaxis.minor
y0, y1 = self._sharey.get_ylim()
self.set_ylim(y0, y1, emit=False, auto=None)
# Save the current formatter/locator so we don't lose it
majf = self._sharey.yaxis.get_major_formatter()
minf = self._sharey.yaxis.get_minor_formatter()
majl = self._sharey.yaxis.get_major_locator()
minl = self._sharey.yaxis.get_minor_locator()
# This overwrites the current formatter/locator
self.yaxis._set_scale(self._sharey.yaxis.get_scale())
# Reset the formatter/locator
self.yaxis.set_major_formatter(majf)
self.yaxis.set_minor_formatter(minf)
self.yaxis.set_major_locator(majl)
self.yaxis.set_minor_locator(minl)
else:
self.yaxis._set_scale('linear')
# update the minor locator for x and y axis based on rcParams
if (rcParams['xtick.minor.visible']):
self.xaxis.set_minor_locator(mticker.AutoMinorLocator())
if (rcParams['ytick.minor.visible']):
self.yaxis.set_minor_locator(mticker.AutoMinorLocator())
self._autoscaleXon = True
self._autoscaleYon = True
self._xmargin = rcParams['axes.xmargin']
self._ymargin = rcParams['axes.ymargin']
self._tight = False
self._update_transScale() # needed?
self._get_lines = _process_plot_var_args(self)
self._get_patches_for_fill = _process_plot_var_args(self, 'fill')
self._gridOn = rcParams['axes.grid']
self.lines = []
self.patches = []
self.texts = []
self.tables = []
self.artists = []
self.images = []
self.mouseover_set = set()
self._current_image = None # strictly for pyplot via _sci, _gci
self.legend_ = None
self.collections = [] # collection.Collection instances
self.containers = []
self.grid(False) # Disable grid on init to use rcParameter
self.grid(self._gridOn, which=rcParams['axes.grid.which'],
axis=rcParams['axes.grid.axis'])
props = font_manager.FontProperties(
size=rcParams['axes.titlesize'],
weight=rcParams['axes.titleweight']
)
self.titleOffsetTrans = mtransforms.ScaledTranslation(
0.0, 5.0 / 72.0, self.figure.dpi_scale_trans)
self.title = mtext.Text(
x=0.5, y=1.0, text='',
fontproperties=props,
verticalalignment='baseline',
horizontalalignment='center',
)
self._left_title = mtext.Text(
x=0.0, y=1.0, text='',
fontproperties=props.copy(),
verticalalignment='baseline',
horizontalalignment='left', )
self._right_title = mtext.Text(
x=1.0, y=1.0, text='',
fontproperties=props.copy(),
verticalalignment='baseline',
horizontalalignment='right',
)
for _title in (self.title, self._left_title, self._right_title):
_title.set_transform(self.transAxes + self.titleOffsetTrans)
_title.set_clip_box(None)
self._set_artist_props(_title)
# the patch draws the background of the axes. we want this to
# be below the other artists; the axesPatch name is
# deprecated. We use the frame to draw the edges so we are
# setting the edgecolor to None
self.patch = self.axesPatch = self._gen_axes_patch()
self.patch.set_figure(self.figure)
self.patch.set_facecolor(self._axisbg)
self.patch.set_edgecolor('None')
self.patch.set_linewidth(0)
self.patch.set_transform(self.transAxes)
self.set_axis_on()
self.xaxis.set_clip_path(self.patch)
self.yaxis.set_clip_path(self.patch)
self._shared_x_axes.clean()
self._shared_y_axes.clean()
if self._sharex:
self.xaxis.set_visible(xaxis_visible)
self.patch.set_visible(patch_visible)
if self._sharey:
self.yaxis.set_visible(yaxis_visible)
self.patch.set_visible(patch_visible)
self.stale = True
def clear(self):
"""clear the axes"""
self.cla()
def set_prop_cycle(self, *args, **kwargs):
"""
Set the property cycle for any future plot commands on this Axes.
set_prop_cycle(arg)
set_prop_cycle(label, itr)
set_prop_cycle(label1=itr1[, label2=itr2[, ...]])
Form 1 simply sets given `Cycler` object.
Form 2 creates and sets a `Cycler` from a label and an iterable.
Form 3 composes and sets a `Cycler` as an inner product of the
pairs of keyword arguments. In other words, all of the
iterables are cycled simultaneously, as if through zip().
Parameters
----------
arg : Cycler
Set the given Cycler.
Can also be `None` to reset to the cycle defined by the
current style.
label : name
The property key. Must be a valid `Artist` property.
For example, 'color' or 'linestyle'. Aliases are allowed,
such as 'c' for 'color' and 'lw' for 'linewidth'.
itr : iterable
Finite-length iterable of the property values. These values
are validated and will raise a ValueError if invalid.
See Also
--------
:func:`cycler` Convenience function for creating your
own cyclers.
"""
if args and kwargs:
raise TypeError("Cannot supply both positional and keyword "
"arguments to this method.")
if len(args) == 1 and args[0] is None:
prop_cycle = None
else:
prop_cycle = cycler(*args, **kwargs)
self._get_lines.set_prop_cycle(prop_cycle)
self._get_patches_for_fill.set_prop_cycle(prop_cycle)
def set_color_cycle(self, clist):
"""
Set the color cycle for any future plot commands on this Axes.
*clist* is a list of mpl color specifiers.
.. deprecated:: 1.5
"""
cbook.warn_deprecated(
'1.5', name='set_color_cycle', alternative='set_prop_cycle')
self.set_prop_cycle('color', clist)
def ishold(self):
"""return the HOLD status of the axes"""
return self._hold
def hold(self, b=None):
"""
Call signature::
hold(b=None)
Set the hold state. If *hold* is *None* (default), toggle the
*hold* state. Else set the *hold* state to boolean value *b*.
Examples::
# toggle hold
hold()
# turn hold on
hold(True)
# turn hold off
hold(False)
When hold is *True*, subsequent plot commands will be added to
the current axes. When hold is *False*, the current axes and
figure will be cleared on the next plot command
"""
if b is None:
self._hold = not self._hold
else:
self._hold = b
def get_aspect(self):
return self._aspect
def set_aspect(self, aspect, adjustable=None, anchor=None):
"""
*aspect*
======== ================================================
value description
======== ================================================
'auto' automatic; fill position rectangle with data
'normal' same as 'auto'; deprecated
'equal' same scaling from data to plot units for x and y
num a circle will be stretched such that the height
is num times the width. aspect=1 is the same as
aspect='equal'.
======== ================================================
*adjustable*
============ =====================================
value description
============ =====================================
'box' change physical size of axes
'datalim' change xlim or ylim
'box-forced' same as 'box', but axes can be shared
============ =====================================
'box' does not allow axes sharing, as this can cause
unintended side effect. For cases when sharing axes is
fine, use 'box-forced'.
*anchor*
===== =====================
value description
===== =====================
'C' centered
'SW' lower left corner
'S' middle of bottom edge
'SE' lower right corner
etc.
===== =====================
.. deprecated:: 1.2
the option 'normal' for aspect is deprecated. Use 'auto' instead.
"""
if aspect == 'normal':
cbook.warn_deprecated(
'1.2', name='normal', alternative='auto', obj_type='aspect')
self._aspect = 'auto'
elif aspect in ('equal', 'auto'):
self._aspect = aspect
else:
self._aspect = float(aspect) # raise ValueError if necessary
if adjustable is not None:
self.set_adjustable(adjustable)
if anchor is not None:
self.set_anchor(anchor)
self.stale = True
def get_adjustable(self):
return self._adjustable
def set_adjustable(self, adjustable):
"""
ACCEPTS: [ 'box' | 'datalim' | 'box-forced']
"""
if adjustable in ('box', 'datalim', 'box-forced'):
if self in self._shared_x_axes or self in self._shared_y_axes:
if adjustable == 'box':
raise ValueError(
'adjustable must be "datalim" for shared axes')
self._adjustable = adjustable
else:
raise ValueError('argument must be "box", or "datalim"')
self.stale = True
def get_anchor(self):
return self._anchor
def set_anchor(self, anchor):
"""
*anchor*
===== ============
value description
===== ============
'C' Center
'SW' bottom left
'S' bottom
'SE' bottom right
'E' right
'NE' top right
'N' top
'NW' top left
'W' left
===== ============
"""
if (anchor in list(six.iterkeys(mtransforms.Bbox.coefs)) or
len(anchor) == 2):
self._anchor = anchor
else:
raise ValueError('argument must be among %s' %
', '.join(six.iterkeys(mtransforms.Bbox.coefs)))
self.stale = True
def get_data_ratio(self):
"""
Returns the aspect ratio of the raw data.
This method is intended to be overridden by new projection
types.
"""
xmin, xmax = self.get_xbound()
ymin, ymax = self.get_ybound()
xsize = max(math.fabs(xmax - xmin), 1e-30)
ysize = max(math.fabs(ymax - ymin), 1e-30)
return ysize / xsize
def get_data_ratio_log(self):
"""
Returns the aspect ratio of the raw data in log scale.
Will be used when both axis scales are in log.
"""
xmin, xmax = self.get_xbound()
ymin, ymax = self.get_ybound()
xsize = max(math.fabs(math.log10(xmax) - math.log10(xmin)), 1e-30)
ysize = max(math.fabs(math.log10(ymax) - math.log10(ymin)), 1e-30)
return ysize / xsize
def apply_aspect(self, position=None):
"""
Use :meth:`_aspect` and :meth:`_adjustable` to modify the
axes box or the view limits.
"""
if position is None:
position = self.get_position(original=True)
aspect = self.get_aspect()
if self.name != 'polar':
xscale, yscale = self.get_xscale(), self.get_yscale()
if xscale == "linear" and yscale == "linear":
aspect_scale_mode = "linear"
elif xscale == "log" and yscale == "log":
aspect_scale_mode = "log"
elif ((xscale == "linear" and yscale == "log") or
(xscale == "log" and yscale == "linear")):
if aspect != "auto":
warnings.warn(
'aspect is not supported for Axes with xscale=%s, '
'yscale=%s' % (xscale, yscale))
aspect = "auto"
else: # some custom projections have their own scales.
pass
else:
aspect_scale_mode = "linear"
if aspect == 'auto':
self.set_position(position, which='active')
return
if aspect == 'equal':
A = 1
else:
A = aspect
# Ensure at drawing time that any Axes involved in axis-sharing
# does not have its position changed.
if self in self._shared_x_axes or self in self._shared_y_axes:
if self._adjustable == 'box':
self._adjustable = 'datalim'
warnings.warn(
'shared axes: "adjustable" is being changed to "datalim"')
figW, figH = self.get_figure().get_size_inches()
fig_aspect = figH / figW
if self._adjustable in ['box', 'box-forced']:
if aspect_scale_mode == "log":
box_aspect = A * self.get_data_ratio_log()
else:
box_aspect = A * self.get_data_ratio()
pb = position.frozen()
pb1 = pb.shrunk_to_aspect(box_aspect, pb, fig_aspect)
self.set_position(pb1.anchored(self.get_anchor(), pb), 'active')
return
# reset active to original in case it had been changed
# by prior use of 'box'
self.set_position(position, which='active')
xmin, xmax = self.get_xbound()
ymin, ymax = self.get_ybound()
if aspect_scale_mode == "log":
xmin, xmax = math.log10(xmin), math.log10(xmax)
ymin, ymax = math.log10(ymin), math.log10(ymax)
xsize = max(math.fabs(xmax - xmin), 1e-30)
ysize = max(math.fabs(ymax - ymin), 1e-30)
l, b, w, h = position.bounds
box_aspect = fig_aspect * (h / w)
data_ratio = box_aspect / A
y_expander = (data_ratio * xsize / ysize - 1.0)
# If y_expander > 0, the dy/dx viewLim ratio needs to increase
if abs(y_expander) < 0.005:
return
if aspect_scale_mode == "log":
dL = self.dataLim
dL_width = math.log10(dL.x1) - math.log10(dL.x0)
dL_height = math.log10(dL.y1) - math.log10(dL.y0)
xr = 1.05 * dL_width
yr = 1.05 * dL_height
else:
dL = self.dataLim
xr = 1.05 * dL.width
yr = 1.05 * dL.height
xmarg = xsize - xr
ymarg = ysize - yr
Ysize = data_ratio * xsize
Xsize = ysize / data_ratio
Xmarg = Xsize - xr
Ymarg = Ysize - yr
# Setting these targets to, e.g., 0.05*xr does not seem to
# help.
xm = 0
ym = 0
changex = (self in self._shared_y_axes and
self not in self._shared_x_axes)
changey = (self in self._shared_x_axes and
self not in self._shared_y_axes)
if changex and changey:
warnings.warn("adjustable='datalim' cannot work with shared "
"x and y axes")
return
if changex:
adjust_y = False
else:
if xmarg > xm and ymarg > ym:
adjy = ((Ymarg > 0 and y_expander < 0) or
(Xmarg < 0 and y_expander > 0))
else:
adjy = y_expander > 0
adjust_y = changey or adjy # (Ymarg > xmarg)
if adjust_y:
yc = 0.5 * (ymin + ymax)
y0 = yc - Ysize / 2.0
y1 = yc + Ysize / 2.0
if aspect_scale_mode == "log":
self.set_ybound((10. ** y0, 10. ** y1))
else:
self.set_ybound((y0, y1))
else:
xc = 0.5 * (xmin + xmax)
x0 = xc - Xsize / 2.0
x1 = xc + Xsize / 2.0
if aspect_scale_mode == "log":
self.set_xbound((10. ** x0, 10. ** x1))
else:
self.set_xbound((x0, x1))
def axis(self, *v, **kwargs):
"""Set axis properties.
Valid signatures::
xmin, xmax, ymin, ymax = axis()
xmin, xmax, ymin, ymax = axis(list_arg)
xmin, xmax, ymin, ymax = axis(string_arg)
xmin, xmax, ymin, ymax = axis(**kwargs)
Parameters
----------
v : list of float or {'on', 'off', 'equal', 'tight', 'scaled',\
'normal', 'auto', 'image', 'square'}
Optional positional argument
Axis data limits set from a list; or a command relating to axes:
========== ================================================
Value Description
========== ================================================
'on' Toggle axis lines and labels on
'off' Toggle axis lines and labels off
'equal' Equal scaling by changing limits
'scaled' Equal scaling by changing box dimensions
'tight' Limits set such that all data is shown
'auto' Automatic scaling, fill rectangle with data
'normal' Same as 'auto'; deprecated
'image' 'scaled' with axis limits equal to data limits
'square' Square plot; similar to 'scaled', but initially\
forcing xmax-xmin = ymax-ymin
========== ================================================
emit : bool, optional
Passed to set_{x,y}lim functions, if observers
are notified of axis limit change
xmin, ymin, xmax, ymax : float, optional
The axis limits to be set
Returns
-------
xmin, xmax, ymin, ymax : float
The axis limits
"""
if len(v) == 0 and len(kwargs) == 0:
xmin, xmax = self.get_xlim()
ymin, ymax = self.get_ylim()
return xmin, xmax, ymin, ymax
emit = kwargs.get('emit', True)
if len(v) == 1 and is_string_like(v[0]):
s = v[0].lower()
if s == 'on':
self.set_axis_on()
elif s == 'off':
self.set_axis_off()
elif s in ('equal', 'tight', 'scaled', 'normal',
'auto', 'image', 'square'):
self.set_autoscale_on(True)
self.set_aspect('auto')
self.autoscale_view(tight=False)
# self.apply_aspect()
if s == 'equal':
self.set_aspect('equal', adjustable='datalim')
elif s == 'scaled':
self.set_aspect('equal', adjustable='box', anchor='C')
self.set_autoscale_on(False) # Req. by Mark Bakker
elif s == 'tight':
self.autoscale_view(tight=True)
self.set_autoscale_on(False)
elif s == 'image':
self.autoscale_view(tight=True)
self.set_autoscale_on(False)
self.set_aspect('equal', adjustable='box', anchor='C')
elif s == 'square':
self.set_aspect('equal', adjustable='box', anchor='C')
self.set_autoscale_on(False)
xlim = self.get_xlim()
ylim = self.get_ylim()
edge_size = max(np.diff(xlim), np.diff(ylim))
self.set_xlim([xlim[0], xlim[0] + edge_size],
emit=emit, auto=False)
self.set_ylim([ylim[0], ylim[0] + edge_size],
emit=emit, auto=False)
else:
raise ValueError('Unrecognized string %s to axis; '
'try on or off' % s)
xmin, xmax = self.get_xlim()
ymin, ymax = self.get_ylim()
return xmin, xmax, ymin, ymax
try:
v[0]
except IndexError:
xmin = kwargs.get('xmin', None)
xmax = kwargs.get('xmax', None)
auto = False # turn off autoscaling, unless...
if xmin is None and xmax is None:
auto = None # leave autoscaling state alone
xmin, xmax = self.set_xlim(xmin, xmax, emit=emit, auto=auto)
ymin = kwargs.get('ymin', None)
ymax = kwargs.get('ymax', None)
auto = False # turn off autoscaling, unless...
if ymin is None and ymax is None:
auto = None # leave autoscaling state alone
ymin, ymax = self.set_ylim(ymin, ymax, emit=emit, auto=auto)
return xmin, xmax, ymin, ymax
v = v[0]
if len(v) != 4:
raise ValueError('v must contain [xmin xmax ymin ymax]')
self.set_xlim([v[0], v[1]], emit=emit, auto=False)
self.set_ylim([v[2], v[3]], emit=emit, auto=False)
return v
def get_legend(self):
"""
Return the legend.Legend instance, or None if no legend is defined
"""
return self.legend_
def get_images(self):
"""return a list of Axes images contained by the Axes"""
return cbook.silent_list('AxesImage', self.images)
def get_lines(self):
"""Return a list of lines contained by the Axes"""
return cbook.silent_list('Line2D', self.lines)
def get_xaxis(self):
"""Return the XAxis instance"""
return self.xaxis
def get_xgridlines(self):
"""Get the x grid lines as a list of Line2D instances"""
return cbook.silent_list('Line2D xgridline',
self.xaxis.get_gridlines())
def get_xticklines(self):
"""Get the xtick lines as a list of Line2D instances"""
return cbook.silent_list('Text xtickline',
self.xaxis.get_ticklines())
def get_yaxis(self):
"""Return the YAxis instance"""
return self.yaxis
def get_ygridlines(self):
"""Get the y grid lines as a list of Line2D instances"""
return cbook.silent_list('Line2D ygridline',
self.yaxis.get_gridlines())
def get_yticklines(self):
"""Get the ytick lines as a list of Line2D instances"""
return cbook.silent_list('Line2D ytickline',
self.yaxis.get_ticklines())
# Adding and tracking artists
def _sci(self, im):
"""
helper for :func:`~matplotlib.pyplot.sci`;
do not use elsewhere.
"""
if isinstance(im, matplotlib.contour.ContourSet):
if im.collections[0] not in self.collections:
raise ValueError(
"ContourSet must be in current Axes")
elif im not in self.images and im not in self.collections:
raise ValueError(
"Argument must be an image, collection, or ContourSet in "
"this Axes")
self._current_image = im
def _gci(self):
"""
Helper for :func:`~matplotlib.pyplot.gci`;
do not use elsewhere.
"""
return self._current_image
def has_data(self):
"""
Return *True* if any artists have been added to axes.
This should not be used to determine whether the *dataLim*
need to be updated, and may not actually be useful for
anything.
"""
return (
len(self.collections) +
len(self.images) +
len(self.lines) +
len(self.patches)) > 0
def add_artist(self, a):
"""Add any :class:`~matplotlib.artist.Artist` to the axes.
Use `add_artist` only for artists for which there is no dedicated
"add" method; and if necessary, use a method such as
`update_datalim` or `update_datalim_numerix` to manually update the
dataLim if the artist is to be included in autoscaling.
Returns the artist.
"""
a.axes = self
self.artists.append(a)
self._set_artist_props(a)
a.set_clip_path(self.patch)
a._remove_method = lambda h: self.artists.remove(h)
return a
def add_collection(self, collection, autolim=True):
"""
Add a :class:`~matplotlib.collections.Collection` instance
to the axes.
Returns the collection.
"""
label = collection.get_label()
if not label:
collection.set_label('_collection%d' % len(self.collections))
self.collections.append(collection)
self._set_artist_props(collection)
if collection.get_clip_path() is None:
collection.set_clip_path(self.patch)
if autolim:
self.update_datalim(collection.get_datalim(self.transData))
collection._remove_method = lambda h: self.collections.remove(h)
return collection
def add_image(self, image):
"""
Add a :class:`~matplotlib.image.AxesImage` to the axes.
Returns the image.
"""
self._set_artist_props(image)
self.images.append(image)
image._remove_method = lambda h: self.images.remove(h)
return image
def add_line(self, line):
"""
Add a :class:`~matplotlib.lines.Line2D` to the list of plot
lines
Returns the line.
"""
self._set_artist_props(line)
if line.get_clip_path() is None:
line.set_clip_path(self.patch)
self._update_line_limits(line)
if not line.get_label():
line.set_label('_line%d' % len(self.lines))
self.lines.append(line)
line._remove_method = lambda h: self.lines.remove(h)
return line
def _update_line_limits(self, line):
"""
Figures out the data limit of the given line, updating self.dataLim.
"""
path = line.get_path()
if path.vertices.size == 0:
return
line_trans = line.get_transform()
if line_trans == self.transData:
data_path = path
elif any(line_trans.contains_branch_seperately(self.transData)):
# identify the transform to go from line's coordinates
# to data coordinates
trans_to_data = line_trans - self.transData
# if transData is affine we can use the cached non-affine component
# of line's path. (since the non-affine part of line_trans is
# entirely encapsulated in trans_to_data).
if self.transData.is_affine:
line_trans_path = line._get_transformed_path()
na_path, _ = line_trans_path.get_transformed_path_and_affine()
data_path = trans_to_data.transform_path_affine(na_path)
else:
data_path = trans_to_data.transform_path(path)
else:
# for backwards compatibility we update the dataLim with the
# coordinate range of the given path, even though the coordinate
# systems are completely different. This may occur in situations
# such as when ax.transAxes is passed through for absolute
# positioning.
data_path = path
if data_path.vertices.size > 0:
updatex, updatey = line_trans.contains_branch_seperately(
self.transData)
self.dataLim.update_from_path(data_path,
self.ignore_existing_data_limits,
updatex=updatex,
updatey=updatey)
self.ignore_existing_data_limits = False
def add_patch(self, p):
"""
Add a :class:`~matplotlib.patches.Patch` *p* to the list of
axes patches; the clipbox will be set to the Axes clipping
box. If the transform is not set, it will be set to
:attr:`transData`.
Returns the patch.
"""
self._set_artist_props(p)
if p.get_clip_path() is None:
p.set_clip_path(self.patch)
self._update_patch_limits(p)
self.patches.append(p)
p._remove_method = lambda h: self.patches.remove(h)
return p
def _update_patch_limits(self, patch):
"""update the data limits for patch *p*"""
# hist can add zero height Rectangles, which is useful to keep
# the bins, counts and patches lined up, but it throws off log
# scaling. We'll ignore rects with zero height or width in
# the auto-scaling
# cannot check for '==0' since unitized data may not compare to zero
# issue #2150 - we update the limits if patch has non zero width
# or height.
if (isinstance(patch, mpatches.Rectangle) and
((not patch.get_width()) and (not patch.get_height()))):
return
vertices = patch.get_path().vertices
if vertices.size > 0:
xys = patch.get_patch_transform().transform(vertices)
if patch.get_data_transform() != self.transData:
patch_to_data = (patch.get_data_transform() -
self.transData)
xys = patch_to_data.transform(xys)
updatex, updatey = patch.get_transform().\
contains_branch_seperately(self.transData)
self.update_datalim(xys, updatex=updatex,
updatey=updatey)
def add_table(self, tab):
"""
Add a :class:`~matplotlib.tables.Table` instance to the
list of axes tables
Returns the table.
"""
self._set_artist_props(tab)
self.tables.append(tab)
tab.set_clip_path(self.patch)
tab._remove_method = lambda h: self.tables.remove(h)
return tab
def add_container(self, container):
"""
Add a :class:`~matplotlib.container.Container` instance
to the axes.
Returns the collection.
"""
label = container.get_label()
if not label:
container.set_label('_container%d' % len(self.containers))
self.containers.append(container)
container.set_remove_method(lambda h: self.containers.remove(h))
return container
def relim(self, visible_only=False):
"""
Recompute the data limits based on current artists. If you want to
exclude invisible artists from the calculation, set
``visible_only=True``
At present, :class:`~matplotlib.collections.Collection`
instances are not supported.
"""
# Collections are deliberately not supported (yet); see
# the TODO note in artists.py.
self.dataLim.ignore(True)
self.dataLim.set_points(mtransforms.Bbox.null().get_points())
self.ignore_existing_data_limits = True
for line in self.lines:
if not visible_only or line.get_visible():
self._update_line_limits(line)
for p in self.patches:
if not visible_only or p.get_visible():
self._update_patch_limits(p)
def update_datalim(self, xys, updatex=True, updatey=True):
"""
Update the data lim bbox with seq of xy tups or equiv. 2-D array
"""
# if no data is set currently, the bbox will ignore its
# limits and set the bound to be the bounds of the xydata.
# Otherwise, it will compute the bounds of it's current data
# and the data in xydata
if iterable(xys) and not len(xys):
return
if not ma.isMaskedArray(xys):
xys = np.asarray(xys)
self.dataLim.update_from_data_xy(xys, self.ignore_existing_data_limits,
updatex=updatex, updatey=updatey)
self.ignore_existing_data_limits = False
def update_datalim_numerix(self, x, y):
"""
Update the data lim bbox with seq of xy tups
"""
# if no data is set currently, the bbox will ignore it's
# limits and set the bound to be the bounds of the xydata.
# Otherwise, it will compute the bounds of it's current data
# and the data in xydata
if iterable(x) and not len(x):
return
self.dataLim.update_from_data(x, y, self.ignore_existing_data_limits)
self.ignore_existing_data_limits = False
def update_datalim_bounds(self, bounds):
"""
Update the datalim to include the given
:class:`~matplotlib.transforms.Bbox` *bounds*
"""
self.dataLim.set(mtransforms.Bbox.union([self.dataLim, bounds]))
def _process_unit_info(self, xdata=None, ydata=None, kwargs=None):
"""Look for unit *kwargs* and update the axis instances as necessary"""
if self.xaxis is None or self.yaxis is None:
return
if xdata is not None:
# we only need to update if there is nothing set yet.
if not self.xaxis.have_units():
self.xaxis.update_units(xdata)
if ydata is not None:
# we only need to update if there is nothing set yet.
if not self.yaxis.have_units():
self.yaxis.update_units(ydata)
# process kwargs 2nd since these will override default units
if kwargs is not None:
xunits = kwargs.pop('xunits', self.xaxis.units)
if self.name == 'polar':
xunits = kwargs.pop('thetaunits', xunits)
if xunits != self.xaxis.units:
self.xaxis.set_units(xunits)
# If the units being set imply a different converter,
# we need to update.
if xdata is not None:
self.xaxis.update_units(xdata)
yunits = kwargs.pop('yunits', self.yaxis.units)
if self.name == 'polar':
yunits = kwargs.pop('runits', yunits)
if yunits != self.yaxis.units:
self.yaxis.set_units(yunits)
# If the units being set imply a different converter,
# we need to update.
if ydata is not None:
self.yaxis.update_units(ydata)
def in_axes(self, mouseevent):
"""
Return *True* if the given *mouseevent* (in display coords)
is in the Axes
"""
return self.patch.contains(mouseevent)[0]
def get_autoscale_on(self):
"""
Get whether autoscaling is applied for both axes on plot commands
"""
return self._autoscaleXon and self._autoscaleYon
def get_autoscalex_on(self):
"""
Get whether autoscaling for the x-axis is applied on plot commands
"""
return self._autoscaleXon
def get_autoscaley_on(self):
"""
Get whether autoscaling for the y-axis is applied on plot commands
"""
return self._autoscaleYon
def set_autoscale_on(self, b):
"""
Set whether autoscaling is applied on plot commands
accepts: [ *True* | *False* ]
"""
self._autoscaleXon = b
self._autoscaleYon = b
def set_autoscalex_on(self, b):
"""
Set whether autoscaling for the x-axis is applied on plot commands
accepts: [ *True* | *False* ]
"""
self._autoscaleXon = b
def set_autoscaley_on(self, b):
"""
Set whether autoscaling for the y-axis is applied on plot commands
accepts: [ *True* | *False* ]
"""
self._autoscaleYon = b
def set_xmargin(self, m):
"""
Set padding of X data limits prior to autoscaling.
*m* times the data interval will be added to each
end of that interval before it is used in autoscaling.
accepts: float in range 0 to 1
"""
if m < 0 or m > 1:
raise ValueError("margin must be in range 0 to 1")
self._xmargin = m
self.stale = True
def set_ymargin(self, m):
"""
Set padding of Y data limits prior to autoscaling.
*m* times the data interval will be added to each
end of that interval before it is used in autoscaling.
accepts: float in range 0 to 1
"""
if m < 0 or m > 1:
raise ValueError("margin must be in range 0 to 1")
self._ymargin = m
self.stale = True
def margins(self, *args, **kw):
"""
Set or retrieve autoscaling margins.
signatures::
margins()
returns xmargin, ymargin
::
margins(margin)
margins(xmargin, ymargin)
margins(x=xmargin, y=ymargin)
margins(..., tight=False)
All three forms above set the xmargin and ymargin parameters.
All keyword parameters are optional. A single argument
specifies both xmargin and ymargin. The *tight* parameter
is passed to :meth:`autoscale_view`, which is executed after
a margin is changed; the default here is *True*, on the
assumption that when margins are specified, no additional
padding to match tick marks is usually desired. Setting
*tight* to *None* will preserve the previous setting.
Specifying any margin changes only the autoscaling; for example,
if *xmargin* is not None, then *xmargin* times the X data
interval will be added to each end of that interval before
it is used in autoscaling.
"""
if not args and not kw:
return self._xmargin, self._ymargin
tight = kw.pop('tight', True)
mx = kw.pop('x', None)
my = kw.pop('y', None)
if len(args) == 1:
mx = my = args[0]
elif len(args) == 2:
mx, my = args
elif len(args) > 2:
raise ValueError("more than two arguments were supplied")
if mx is not None:
self.set_xmargin(mx)
if my is not None:
self.set_ymargin(my)
scalex = (mx is not None)
scaley = (my is not None)
self.autoscale_view(tight=tight, scalex=scalex, scaley=scaley)
def set_rasterization_zorder(self, z):
"""
Set zorder value below which artists will be rasterized. Set
to `None` to disable rasterizing of artists below a particular
zorder.
"""
self._rasterization_zorder = z
self.stale = True
def get_rasterization_zorder(self):
"""
Get zorder value below which artists will be rasterized
"""
return self._rasterization_zorder
def autoscale(self, enable=True, axis='both', tight=None):
"""
Autoscale the axis view to the data (toggle).
Convenience method for simple axis view autoscaling.
It turns autoscaling on or off, and then,
if autoscaling for either axis is on, it performs
the autoscaling on the specified axis or axes.
*enable*: [True | False | None]
True (default) turns autoscaling on, False turns it off.
None leaves the autoscaling state unchanged.
*axis*: ['x' | 'y' | 'both']
which axis to operate on; default is 'both'
*tight*: [True | False | None]
If True, set view limits to data limits;
if False, let the locator and margins expand the view limits;
if None, use tight scaling if the only artist is an image,
otherwise treat *tight* as False.
The *tight* setting is retained for future autoscaling
until it is explicitly changed.
Returns None.
"""
if enable is None:
scalex = True
scaley = True
else:
scalex = False
scaley = False
if axis in ['x', 'both']:
self._autoscaleXon = bool(enable)
scalex = self._autoscaleXon
if axis in ['y', 'both']:
self._autoscaleYon = bool(enable)
scaley = self._autoscaleYon
self.autoscale_view(tight=tight, scalex=scalex, scaley=scaley)
def autoscale_view(self, tight=None, scalex=True, scaley=True):
"""
Autoscale the view limits using the data limits. You can
selectively autoscale only a single axis, e.g., the xaxis by
setting *scaley* to *False*. The autoscaling preserves any
axis direction reversal that has already been done.
The data limits are not updated automatically when artist data are
changed after the artist has been added to an Axes instance. In that
case, use :meth:`matplotlib.axes.Axes.relim` prior to calling
autoscale_view.
"""
if tight is None:
# if image data only just use the datalim
_tight = self._tight or (len(self.images) > 0 and
len(self.lines) == 0 and
len(self.patches) == 0)
else:
_tight = self._tight = bool(tight)
if scalex and self._autoscaleXon:
xshared = self._shared_x_axes.get_siblings(self)
dl = [ax.dataLim for ax in xshared]
# ignore non-finite data limits if good limits exist
finite_dl = [d for d in dl if np.isfinite(d).all()]
if len(finite_dl):
dl = finite_dl
bb = mtransforms.BboxBase.union(dl)
x0, x1 = bb.intervalx
xlocator = self.xaxis.get_major_locator()
try:
# e.g., DateLocator has its own nonsingular()
x0, x1 = xlocator.nonsingular(x0, x1)
except AttributeError:
# Default nonsingular for, e.g., MaxNLocator
x0, x1 = mtransforms.nonsingular(x0, x1, increasing=False,
expander=0.05)
if self._xmargin > 0:
delta = (x1 - x0) * self._xmargin
x0 -= delta
x1 += delta
if not _tight:
x0, x1 = xlocator.view_limits(x0, x1)
self.set_xbound(x0, x1)
if scaley and self._autoscaleYon:
yshared = self._shared_y_axes.get_siblings(self)
dl = [ax.dataLim for ax in yshared]
# ignore non-finite data limits if good limits exist
finite_dl = [d for d in dl if np.isfinite(d).all()]
if len(finite_dl):
dl = finite_dl
bb = mtransforms.BboxBase.union(dl)
y0, y1 = bb.intervaly
ylocator = self.yaxis.get_major_locator()
try:
y0, y1 = ylocator.nonsingular(y0, y1)
except AttributeError:
y0, y1 = mtransforms.nonsingular(y0, y1, increasing=False,
expander=0.05)
if self._ymargin > 0:
delta = (y1 - y0) * self._ymargin
y0 -= delta
y1 += delta
if not _tight:
y0, y1 = ylocator.view_limits(y0, y1)
self.set_ybound(y0, y1)
def _get_axis_list(self):
return (self.xaxis, self.yaxis)
# Drawing
@allow_rasterization
def draw(self, renderer=None, inframe=False):
"""Draw everything (plot lines, axes, labels)"""
if renderer is None:
renderer = self._cachedRenderer
if renderer is None:
raise RuntimeError('No renderer defined')
if not self.get_visible():
return
renderer.open_group('axes')
# prevent triggering call backs during the draw process
self._stale = True
locator = self.get_axes_locator()
if locator:
pos = locator(self, renderer)
self.apply_aspect(pos)
else:
self.apply_aspect()
artists = self.get_children()
artists.remove(self.patch)
# the frame draws the edges around the axes patch -- we
# decouple these so the patch can be in the background and the
# frame in the foreground. Do this before drawing the axis
# objects so that the spine has the opportunity to update them.
if not (self.axison and self._frameon):
for spine in six.itervalues(self.spines):
artists.remove(spine)
if self.axison and not inframe:
if self._axisbelow:
self.xaxis.set_zorder(0.5)
self.yaxis.set_zorder(0.5)
else:
self.xaxis.set_zorder(2.5)
self.yaxis.set_zorder(2.5)
else:
for _axis in self._get_axis_list():
artists.remove(_axis)
if inframe:
artists.remove(self.title)
artists.remove(self._left_title)
artists.remove(self._right_title)
# add images to dsu if the backend supports compositing.
# otherwise, does the manual compositing without adding images to dsu.
if len(self.images) <= 1 or renderer.option_image_nocomposite():
_do_composite = False
else:
_do_composite = True
for im in self.images:
artists.remove(im)
if self.figure.canvas.is_saving():
dsu = [(a.zorder, a) for a in artists]
else:
dsu = [(a.zorder, a) for a in artists
if (not a.get_animated() or a in self.images)]
dsu.sort(key=itemgetter(0))
# rasterize artists with negative zorder
# if the minimum zorder is negative, start rasterization
rasterization_zorder = self._rasterization_zorder
if (rasterization_zorder is not None and
len(dsu) > 0 and dsu[0][0] < rasterization_zorder):
renderer.start_rasterizing()
dsu_rasterized = [l for l in dsu if l[0] < rasterization_zorder]
dsu = [l for l in dsu if l[0] >= rasterization_zorder]
else:
dsu_rasterized = []
# the patch draws the background rectangle -- the frame below
# will draw the edges
if self.axison and self._frameon:
self.patch.draw(renderer)
if _do_composite:
# make a composite image, blending alpha
# list of (mimage.Image, ox, oy)
zorder_images = [(im.zorder, im) for im in self.images
if im.get_visible()]
zorder_images.sort(key=lambda x: x[0])
mag = renderer.get_image_magnification()
ims = [(im.make_image(mag), 0, 0, im.get_alpha())
for z, im in zorder_images]
l, b, r, t = self.bbox.extents
width = int(mag * ((round(r) + 0.5) - (round(l) - 0.5)))
height = int(mag * ((round(t) + 0.5) - (round(b) - 0.5)))
im = mimage.from_images(height,
width,
ims)
im.is_grayscale = False
l, b, w, h = self.bbox.bounds
# composite images need special args so they will not
# respect z-order for now
gc = renderer.new_gc()
gc.set_clip_rectangle(self.bbox)
gc.set_clip_path(mtransforms.TransformedPath(
self.patch.get_path(),
self.patch.get_transform()))
renderer.draw_image(gc, round(l), round(b), im)
gc.restore()
if dsu_rasterized:
for zorder, a in dsu_rasterized:
a.draw(renderer)
renderer.stop_rasterizing()
for zorder, a in dsu:
a.draw(renderer)
renderer.close_group('axes')
self._cachedRenderer = renderer
self.stale = False
def draw_artist(self, a):
"""
This method can only be used after an initial draw which
caches the renderer. It is used to efficiently update Axes
data (axis ticks, labels, etc are not updated)
"""
if self._cachedRenderer is None:
msg = ('draw_artist can only be used after an initial draw which'
' caches the render')
raise AttributeError(msg)
a.draw(self._cachedRenderer)
def redraw_in_frame(self):
"""
This method can only be used after an initial draw which
caches the renderer. It is used to efficiently update Axes
data (axis ticks, labels, etc are not updated)
"""
if self._cachedRenderer is None:
msg = ('redraw_in_frame can only be used after an initial draw'
' which caches the render')
raise AttributeError(msg)
self.draw(self._cachedRenderer, inframe=True)
def get_renderer_cache(self):
return self._cachedRenderer
# Axes rectangle characteristics
def get_frame_on(self):
"""
Get whether the axes rectangle patch is drawn
"""
return self._frameon
def set_frame_on(self, b):
"""
Set whether the axes rectangle patch is drawn
ACCEPTS: [ *True* | *False* ]
"""
self._frameon = b
self.stale = True
def get_axisbelow(self):
"""
Get whether axis below is true or not
"""
return self._axisbelow
def set_axisbelow(self, b):
"""
Set whether the axis ticks and gridlines are above or below most
artists
ACCEPTS: [ *True* | *False* ]
"""
self._axisbelow = b
self.stale = True
@docstring.dedent_interpd
def grid(self, b=None, which='major', axis='both', **kwargs):
"""
Turn the axes grids on or off.
Call signature::
grid(self, b=None, which='major', axis='both', **kwargs)
Set the axes grids on or off; *b* is a boolean. (For MATLAB
compatibility, *b* may also be a string, 'on' or 'off'.)
If *b* is *None* and ``len(kwargs)==0``, toggle the grid state. If
*kwargs* are supplied, it is assumed that you want a grid and *b*
is thus set to *True*.
*which* can be 'major' (default), 'minor', or 'both' to control
whether major tick grids, minor tick grids, or both are affected.
*axis* can be 'both' (default), 'x', or 'y' to control which
set of gridlines are drawn.
*kwargs* are used to set the grid line properties, e.g.,::
ax.grid(color='r', linestyle='-', linewidth=2)
Valid :class:`~matplotlib.lines.Line2D` kwargs are
%(Line2D)s
"""
if len(kwargs):
b = True
b = _string_to_bool(b)
if axis == 'x' or axis == 'both':
self.xaxis.grid(b, which=which, **kwargs)
if axis == 'y' or axis == 'both':
self.yaxis.grid(b, which=which, **kwargs)
def ticklabel_format(self, **kwargs):
"""
Change the `~matplotlib.ticker.ScalarFormatter` used by
default for linear axes.
Optional keyword arguments:
============ =========================================
Keyword Description
============ =========================================
*style* [ 'sci' (or 'scientific') | 'plain' ]
plain turns off scientific notation
*scilimits* (m, n), pair of integers; if *style*
is 'sci', scientific notation will
be used for numbers outside the range
10`m`:sup: to 10`n`:sup:.
Use (0,0) to include all numbers.
*useOffset* [True | False | offset]; if True,
the offset will be calculated as needed;
if False, no offset will be used; if a
numeric offset is specified, it will be
used.
*axis* [ 'x' | 'y' | 'both' ]
*useLocale* If True, format the number according to
the current locale. This affects things
such as the character used for the
decimal separator. If False, use
C-style (English) formatting. The
default setting is controlled by the
axes.formatter.use_locale rcparam.
============ =========================================
Only the major ticks are affected.
If the method is called when the
:class:`~matplotlib.ticker.ScalarFormatter` is not the
:class:`~matplotlib.ticker.Formatter` being used, an
:exc:`AttributeError` will be raised.
"""
style = kwargs.pop('style', '').lower()
scilimits = kwargs.pop('scilimits', None)
useOffset = kwargs.pop('useOffset', None)
useLocale = kwargs.pop('useLocale', None)
axis = kwargs.pop('axis', 'both').lower()
if scilimits is not None:
try:
m, n = scilimits
m + n + 1 # check that both are numbers
except (ValueError, TypeError):
raise ValueError("scilimits must be a sequence of 2 integers")
if style[:3] == 'sci':
sb = True
elif style in ['plain', 'comma']:
sb = False
if style == 'plain':
cb = False
else:
cb = True
raise NotImplementedError("comma style remains to be added")
elif style == '':
sb = None
else:
raise ValueError("%s is not a valid style value")
try:
if sb is not None:
if axis == 'both' or axis == 'x':
self.xaxis.major.formatter.set_scientific(sb)
if axis == 'both' or axis == 'y':
self.yaxis.major.formatter.set_scientific(sb)
if scilimits is not None:
if axis == 'both' or axis == 'x':
self.xaxis.major.formatter.set_powerlimits(scilimits)
if axis == 'both' or axis == 'y':
self.yaxis.major.formatter.set_powerlimits(scilimits)
if useOffset is not None:
if axis == 'both' or axis == 'x':
self.xaxis.major.formatter.set_useOffset(useOffset)
if axis == 'both' or axis == 'y':
self.yaxis.major.formatter.set_useOffset(useOffset)
if useLocale is not None:
if axis == 'both' or axis == 'x':
self.xaxis.major.formatter.set_useLocale(useLocale)
if axis == 'both' or axis == 'y':
self.yaxis.major.formatter.set_useLocale(useLocale)
except AttributeError:
raise AttributeError(
"This method only works with the ScalarFormatter.")
def locator_params(self, axis='both', tight=None, **kwargs):
"""
Control behavior of tick locators.
Keyword arguments:
*axis*
['x' | 'y' | 'both'] Axis on which to operate;
default is 'both'.
*tight*
[True | False | None] Parameter passed to :meth:`autoscale_view`.
Default is None, for no change.
Remaining keyword arguments are passed to directly to the
:meth:`~matplotlib.ticker.MaxNLocator.set_params` method.
Typically one might want to reduce the maximum number
of ticks and use tight bounds when plotting small
subplots, for example::
ax.locator_params(tight=True, nbins=4)
Because the locator is involved in autoscaling,
:meth:`autoscale_view` is called automatically after
the parameters are changed.
This presently works only for the
:class:`~matplotlib.ticker.MaxNLocator` used
by default on linear axes, but it may be generalized.
"""
_x = axis in ['x', 'both']
_y = axis in ['y', 'both']
if _x:
self.xaxis.get_major_locator().set_params(**kwargs)
if _y:
self.yaxis.get_major_locator().set_params(**kwargs)
self.autoscale_view(tight=tight, scalex=_x, scaley=_y)
def tick_params(self, axis='both', **kwargs):
"""
Change the appearance of ticks and tick labels.
Keyword arguments:
*axis* : ['x' | 'y' | 'both']
Axis on which to operate; default is 'both'.
*reset* : [True | False]
If *True*, set all parameters to defaults
before processing other keyword arguments. Default is
*False*.
*which* : ['major' | 'minor' | 'both']
Default is 'major'; apply arguments to *which* ticks.
*direction* : ['in' | 'out' | 'inout']
Puts ticks inside the axes, outside the axes, or both.
*length*
Tick length in points.
*width*
Tick width in points.
*color*
Tick color; accepts any mpl color spec.
*pad*
Distance in points between tick and label.
*labelsize*
Tick label font size in points or as a string (e.g., 'large').
*labelcolor*
Tick label color; mpl color spec.
*colors*
Changes the tick color and the label color to the same value:
mpl color spec.
*zorder*
Tick and label zorder.
*bottom*, *top*, *left*, *right* : [bool | 'on' | 'off']
controls whether to draw the respective ticks.
*labelbottom*, *labeltop*, *labelleft*, *labelright*
Boolean or ['on' | 'off'], controls whether to draw the
respective tick labels.
Example::
ax.tick_params(direction='out', length=6, width=2, colors='r')
This will make all major ticks be red, pointing out of the box,
and with dimensions 6 points by 2 points. Tick labels will
also be red.
"""
if axis in ['x', 'both']:
xkw = dict(kwargs)
xkw.pop('left', None)
xkw.pop('right', None)
xkw.pop('labelleft', None)
xkw.pop('labelright', None)
self.xaxis.set_tick_params(**xkw)
if axis in ['y', 'both']:
ykw = dict(kwargs)
ykw.pop('top', None)
ykw.pop('bottom', None)
ykw.pop('labeltop', None)
ykw.pop('labelbottom', None)
self.yaxis.set_tick_params(**ykw)
def set_axis_off(self):
"""turn off the axis"""
self.axison = False
self.stale = True
def set_axis_on(self):
"""turn on the axis"""
self.axison = True
self.stale = True
def get_axis_bgcolor(self):
"""Return the axis background color"""
return self._axisbg
def set_axis_bgcolor(self, color):
"""
set the axes background color
ACCEPTS: any matplotlib color - see
:func:`~matplotlib.pyplot.colors`
"""
self._axisbg = color
self.patch.set_facecolor(color)
self.stale = True
# data limits, ticks, tick labels, and formatting
def invert_xaxis(self):
"Invert the x-axis."
left, right = self.get_xlim()
self.set_xlim(right, left, auto=None)
def xaxis_inverted(self):
"""Returns *True* if the x-axis is inverted."""
left, right = self.get_xlim()
return right < left
def get_xbound(self):
"""
Returns the x-axis numerical bounds where::
lowerBound < upperBound
"""
left, right = self.get_xlim()
if left < right:
return left, right
else:
return right, left
def set_xbound(self, lower=None, upper=None):
"""
Set the lower and upper numerical bounds of the x-axis.
This method will honor axes inversion regardless of parameter order.
It will not change the _autoscaleXon attribute.
"""
if upper is None and iterable(lower):
lower, upper = lower
old_lower, old_upper = self.get_xbound()
if lower is None:
lower = old_lower
if upper is None:
upper = old_upper
if self.xaxis_inverted():
if lower < upper:
self.set_xlim(upper, lower, auto=None)
else:
self.set_xlim(lower, upper, auto=None)
else:
if lower < upper:
self.set_xlim(lower, upper, auto=None)
else:
self.set_xlim(upper, lower, auto=None)
def get_xlim(self):
"""
Get the x-axis range [*left*, *right*]
"""
return tuple(self.viewLim.intervalx)
def set_xlim(self, left=None, right=None, emit=True, auto=False, **kw):
"""
Call signature::
set_xlim(self, *args, **kwargs):
Set the data limits for the xaxis
Examples::
set_xlim((left, right))
set_xlim(left, right)
set_xlim(left=1) # right unchanged
set_xlim(right=1) # left unchanged
Keyword arguments:
*left*: scalar
The left xlim; *xmin*, the previous name, may still be used
*right*: scalar
The right xlim; *xmax*, the previous name, may still be used
*emit*: [ *True* | *False* ]
Notify observers of limit change
*auto*: [ *True* | *False* | *None* ]
Turn *x* autoscaling on (*True*), off (*False*; default),
or leave unchanged (*None*)
Note, the *left* (formerly *xmin*) value may be greater than
the *right* (formerly *xmax*).
For example, suppose *x* is years before present.
Then one might use::
set_ylim(5000, 0)
so 5000 years ago is on the left of the plot and the
present is on the right.
Returns the current xlimits as a length 2 tuple
ACCEPTS: length 2 sequence of floats
"""
if 'xmin' in kw:
left = kw.pop('xmin')
if 'xmax' in kw:
right = kw.pop('xmax')
if kw:
raise ValueError("unrecognized kwargs: %s" %
list(six.iterkeys(kw)))
if right is None and iterable(left):
left, right = left
self._process_unit_info(xdata=(left, right))
if left is not None:
left = self.convert_xunits(left)
if right is not None:
right = self.convert_xunits(right)
old_left, old_right = self.get_xlim()
if left is None:
left = old_left
if right is None:
right = old_right
if left == right:
warnings.warn(
('Attempting to set identical left==right results\n'
'in singular transformations; automatically expanding.\n'
'left=%s, right=%s') % (left, right))
left, right = mtransforms.nonsingular(left, right, increasing=False)
left, right = self.xaxis.limit_range_for_scale(left, right)
self.viewLim.intervalx = (left, right)
if auto is not None:
self._autoscaleXon = bool(auto)
if emit:
self.callbacks.process('xlim_changed', self)
# Call all of the other x-axes that are shared with this one
for other in self._shared_x_axes.get_siblings(self):
if other is not self:
other.set_xlim(self.viewLim.intervalx,
emit=False, auto=auto)
if (other.figure != self.figure and
other.figure.canvas is not None):
other.figure.canvas.draw_idle()
self.stale = True
return left, right
def get_xscale(self):
return self.xaxis.get_scale()
get_xscale.__doc__ = "Return the xaxis scale string: %s""" % (
", ".join(mscale.get_scale_names()))
@docstring.dedent_interpd
def set_xscale(self, value, **kwargs):
"""
Call signature::
set_xscale(value)
Set the scaling of the x-axis: %(scale)s
ACCEPTS: [%(scale)s]
Different kwargs are accepted, depending on the scale:
%(scale_docs)s
"""
# If the scale is being set to log, clip nonposx to prevent headaches
# around zero
if value.lower() == 'log' and 'nonposx' not in kwargs.keys():
kwargs['nonposx'] = 'clip'
g = self.get_shared_x_axes()
for ax in g.get_siblings(self):
ax.xaxis._set_scale(value, **kwargs)
ax._update_transScale()
ax.stale = True
self.autoscale_view(scaley=False)
def get_xticks(self, minor=False):
"""Return the x ticks as a list of locations"""
return self.xaxis.get_ticklocs(minor=minor)
def set_xticks(self, ticks, minor=False):
"""
Set the x ticks with list of *ticks*
ACCEPTS: sequence of floats
"""
ret = self.xaxis.set_ticks(ticks, minor=minor)
self.stale = True
return ret
def get_xmajorticklabels(self):
"""
Get the xtick labels as a list of :class:`~matplotlib.text.Text`
instances.
"""
return cbook.silent_list('Text xticklabel',
self.xaxis.get_majorticklabels())
def get_xminorticklabels(self):
"""
Get the x minor tick labels as a list of
:class:`matplotlib.text.Text` instances.
"""
return cbook.silent_list('Text xticklabel',
self.xaxis.get_minorticklabels())
def get_xticklabels(self, minor=False, which=None):
"""
Get the x tick labels as a list of :class:`~matplotlib.text.Text`
instances.
Parameters
----------
minor : bool
If True return the minor ticklabels,
else return the major ticklabels
which : None, ('minor', 'major', 'both')
Overrides `minor`.
Selects which ticklabels to return
Returns
-------
ret : list
List of :class:`~matplotlib.text.Text` instances.
"""
return cbook.silent_list('Text xticklabel',
self.xaxis.get_ticklabels(minor=minor,
which=which))
@docstring.dedent_interpd
def set_xticklabels(self, labels, fontdict=None, minor=False, **kwargs):
"""
Call signature::
set_xticklabels(labels, fontdict=None, minor=False, **kwargs)
Set the xtick labels with list of strings *labels*. Return a
list of axis text instances.
*kwargs* set the :class:`~matplotlib.text.Text` properties.
Valid properties are
%(Text)s
ACCEPTS: sequence of strings
"""
if fontdict is not None:
kwargs.update(fontdict)
ret = self.xaxis.set_ticklabels(labels,
minor=minor, **kwargs)
self.stale = True
return ret
def invert_yaxis(self):
"""
Invert the y-axis.
"""
bottom, top = self.get_ylim()
self.set_ylim(top, bottom, auto=None)
def yaxis_inverted(self):
"""Returns *True* if the y-axis is inverted."""
bottom, top = self.get_ylim()
return top < bottom
def get_ybound(self):
"""
Return y-axis numerical bounds in the form of
``lowerBound < upperBound``
"""
bottom, top = self.get_ylim()
if bottom < top:
return bottom, top
else:
return top, bottom
def set_ybound(self, lower=None, upper=None):
"""
Set the lower and upper numerical bounds of the y-axis.
This method will honor axes inversion regardless of parameter order.
It will not change the _autoscaleYon attribute.
"""
if upper is None and iterable(lower):
lower, upper = lower
old_lower, old_upper = self.get_ybound()
if lower is None:
lower = old_lower
if upper is None:
upper = old_upper
if self.yaxis_inverted():
if lower < upper:
self.set_ylim(upper, lower, auto=None)
else:
self.set_ylim(lower, upper, auto=None)
else:
if lower < upper:
self.set_ylim(lower, upper, auto=None)
else:
self.set_ylim(upper, lower, auto=None)
def get_ylim(self):
"""
Get the y-axis range [*bottom*, *top*]
"""
return tuple(self.viewLim.intervaly)
def set_ylim(self, bottom=None, top=None, emit=True, auto=False, **kw):
"""
Call signature::
set_ylim(self, *args, **kwargs):
Set the data limits for the yaxis
Examples::
set_ylim((bottom, top))
set_ylim(bottom, top)
set_ylim(bottom=1) # top unchanged
set_ylim(top=1) # bottom unchanged
Keyword arguments:
*bottom*: scalar
The bottom ylim; the previous name, *ymin*, may still be used
*top*: scalar
The top ylim; the previous name, *ymax*, may still be used
*emit*: [ *True* | *False* ]
Notify observers of limit change
*auto*: [ *True* | *False* | *None* ]
Turn *y* autoscaling on (*True*), off (*False*; default),
or leave unchanged (*None*)
Note, the *bottom* (formerly *ymin*) value may be greater than
the *top* (formerly *ymax*).
For example, suppose *y* is depth in the ocean.
Then one might use::
set_ylim(5000, 0)
so 5000 m depth is at the bottom of the plot and the
surface, 0 m, is at the top.
Returns the current ylimits as a length 2 tuple
ACCEPTS: length 2 sequence of floats
"""
if 'ymin' in kw:
bottom = kw.pop('ymin')
if 'ymax' in kw:
top = kw.pop('ymax')
if kw:
raise ValueError("unrecognized kwargs: %s" %
list(six.iterkeys(kw)))
if top is None and iterable(bottom):
bottom, top = bottom
if bottom is not None:
bottom = self.convert_yunits(bottom)
if top is not None:
top = self.convert_yunits(top)
old_bottom, old_top = self.get_ylim()
if bottom is None:
bottom = old_bottom
if top is None:
top = old_top
if bottom == top:
warnings.warn(
('Attempting to set identical bottom==top results\n'
'in singular transformations; automatically expanding.\n'
'bottom=%s, top=%s') % (bottom, top))
bottom, top = mtransforms.nonsingular(bottom, top, increasing=False)
bottom, top = self.yaxis.limit_range_for_scale(bottom, top)
self.viewLim.intervaly = (bottom, top)
if auto is not None:
self._autoscaleYon = bool(auto)
if emit:
self.callbacks.process('ylim_changed', self)
# Call all of the other y-axes that are shared with this one
for other in self._shared_y_axes.get_siblings(self):
if other is not self:
other.set_ylim(self.viewLim.intervaly,
emit=False, auto=auto)
if (other.figure != self.figure and
other.figure.canvas is not None):
other.figure.canvas.draw_idle()
self.stale = True
return bottom, top
def get_yscale(self):
return self.yaxis.get_scale()
get_yscale.__doc__ = "Return the yaxis scale string: %s""" % (
", ".join(mscale.get_scale_names()))
@docstring.dedent_interpd
def set_yscale(self, value, **kwargs):
"""
Call signature::
set_yscale(value)
Set the scaling of the y-axis: %(scale)s
ACCEPTS: [%(scale)s]
Different kwargs are accepted, depending on the scale:
%(scale_docs)s
"""
# If the scale is being set to log, clip nonposy to prevent headaches
# around zero
if value.lower() == 'log' and 'nonposy' not in kwargs.keys():
kwargs['nonposy'] = 'clip'
g = self.get_shared_y_axes()
for ax in g.get_siblings(self):
ax.yaxis._set_scale(value, **kwargs)
ax._update_transScale()
ax.stale = True
self.autoscale_view(scalex=False)
def get_yticks(self, minor=False):
"""Return the y ticks as a list of locations"""
return self.yaxis.get_ticklocs(minor=minor)
def set_yticks(self, ticks, minor=False):
"""
Set the y ticks with list of *ticks*
ACCEPTS: sequence of floats
Keyword arguments:
*minor*: [ *False* | *True* ]
Sets the minor ticks if *True*
"""
ret = self.yaxis.set_ticks(ticks, minor=minor)
return ret
def get_ymajorticklabels(self):
"""
Get the major y tick labels as a list of
:class:`~matplotlib.text.Text` instances.
"""
return cbook.silent_list('Text yticklabel',
self.yaxis.get_majorticklabels())
def get_yminorticklabels(self):
"""
Get the minor y tick labels as a list of
:class:`~matplotlib.text.Text` instances.
"""
return cbook.silent_list('Text yticklabel',
self.yaxis.get_minorticklabels())
def get_yticklabels(self, minor=False, which=None):
"""
Get the x tick labels as a list of :class:`~matplotlib.text.Text`
instances.
Parameters
----------
minor : bool
If True return the minor ticklabels,
else return the major ticklabels
which : None, ('minor', 'major', 'both')
Overrides `minor`.
Selects which ticklabels to return
Returns
-------
ret : list
List of :class:`~matplotlib.text.Text` instances.
"""
return cbook.silent_list('Text yticklabel',
self.yaxis.get_ticklabels(minor=minor,
which=which))
@docstring.dedent_interpd
def set_yticklabels(self, labels, fontdict=None, minor=False, **kwargs):
"""
Call signature::
set_yticklabels(labels, fontdict=None, minor=False, **kwargs)
Set the y tick labels with list of strings *labels*. Return a list of
:class:`~matplotlib.text.Text` instances.
*kwargs* set :class:`~matplotlib.text.Text` properties for the labels.
Valid properties are
%(Text)s
ACCEPTS: sequence of strings
"""
if fontdict is not None:
kwargs.update(fontdict)
return self.yaxis.set_ticklabels(labels,
minor=minor, **kwargs)
def xaxis_date(self, tz=None):
"""
Sets up x-axis ticks and labels that treat the x data as dates.
*tz* is a timezone string or :class:`tzinfo` instance.
Defaults to rc value.
"""
# should be enough to inform the unit conversion interface
# dates are coming in
self.xaxis.axis_date(tz)
def yaxis_date(self, tz=None):
"""
Sets up y-axis ticks and labels that treat the y data as dates.
*tz* is a timezone string or :class:`tzinfo` instance.
Defaults to rc value.
"""
self.yaxis.axis_date(tz)
def format_xdata(self, x):
"""
Return *x* string formatted. This function will use the attribute
self.fmt_xdata if it is callable, else will fall back on the xaxis
major formatter
"""
try:
return self.fmt_xdata(x)
except TypeError:
func = self.xaxis.get_major_formatter().format_data_short
val = func(x)
return val
def format_ydata(self, y):
"""
Return y string formatted. This function will use the
:attr:`fmt_ydata` attribute if it is callable, else will fall
back on the yaxis major formatter
"""
try:
return self.fmt_ydata(y)
except TypeError:
func = self.yaxis.get_major_formatter().format_data_short
val = func(y)
return val
def format_coord(self, x, y):
"""Return a format string formatting the *x*, *y* coord"""
if x is None:
xs = '???'
else:
xs = self.format_xdata(x)
if y is None:
ys = '???'
else:
ys = self.format_ydata(y)
return 'x=%s y=%s' % (xs, ys)
def minorticks_on(self):
'Add autoscaling minor ticks to the axes.'
for ax in (self.xaxis, self.yaxis):
scale = ax.get_scale()
if scale == 'log':
s = ax._scale
ax.set_minor_locator(mticker.LogLocator(s.base, s.subs))
elif scale == 'symlog':
s = ax._scale
ax.set_minor_locator(
mticker.SymmetricalLogLocator(s.base, s.subs))
else:
ax.set_minor_locator(mticker.AutoMinorLocator())
def minorticks_off(self):
"""Remove minor ticks from the axes."""
self.xaxis.set_minor_locator(mticker.NullLocator())
self.yaxis.set_minor_locator(mticker.NullLocator())
# Interactive manipulation
def can_zoom(self):
"""
Return *True* if this axes supports the zoom box button functionality.
"""
return True
def can_pan(self):
"""
Return *True* if this axes supports any pan/zoom button functionality.
"""
return True
def get_navigate(self):
"""
Get whether the axes responds to navigation commands
"""
return self._navigate
def set_navigate(self, b):
"""
Set whether the axes responds to navigation toolbar commands
ACCEPTS: [ *True* | *False* ]
"""
self._navigate = b
def get_navigate_mode(self):
"""
Get the navigation toolbar button status: 'PAN', 'ZOOM', or None
"""
return self._navigate_mode
def set_navigate_mode(self, b):
"""
Set the navigation toolbar button status;
.. warning::
this is not a user-API function.
"""
self._navigate_mode = b
def _get_view(self):
"""
Save information required to reproduce the current view.
Called before a view is changed, such as during a pan or zoom
initiated by the user. You may return any information you deem
necessary to describe the view.
.. note::
Intended to be overridden by new projection types, but if not, the
default implementation saves the view limits. You *must* implement
:meth:`_set_view` if you implement this method.
"""
xmin, xmax = self.get_xlim()
ymin, ymax = self.get_ylim()
return (xmin, xmax, ymin, ymax)
def _set_view(self, view):
"""
Apply a previously saved view.
Called when restoring a view, such as with the navigation buttons.
.. note::
Intended to be overridden by new projection types, but if not, the
default implementation restores the view limits. You *must*
implement :meth:`_get_view` if you implement this method.
"""
xmin, xmax, ymin, ymax = view
self.set_xlim((xmin, xmax))
self.set_ylim((ymin, ymax))
def _set_view_from_bbox(self, bbox, direction='in',
mode=None, twinx=False, twiny=False):
"""
Update view from a selection bbox.
.. note::
Intended to be overridden by new projection types, but if not, the
default implementation sets the view limits to the bbox directly.
Parameters
----------
bbox : tuple
The selected bounding box limits, in *display* coordinates.
direction : str
The direction to apply the bounding box.
* `'in'` - The bounding box describes the view directly, i.e.,
it zooms in.
* `'out'` - The bounding box describes the size to make the
existing view, i.e., it zooms out.
mode : str or None
The selection mode, whether to apply the bounding box in only the
`'x'` direction, `'y'` direction or both (`None`).
twinx : bool
Whether this axis is twinned in the *x*-direction.
twiny : bool
Whether this axis is twinned in the *y*-direction.
"""
lastx, lasty, x, y = bbox
# zoom to rect
inverse = self.transData.inverted()
lastx, lasty = inverse.transform_point((lastx, lasty))
x, y = inverse.transform_point((x, y))
Xmin, Xmax = self.get_xlim()
Ymin, Ymax = self.get_ylim()
if twinx:
x0, x1 = Xmin, Xmax
else:
if Xmin < Xmax:
if x < lastx:
x0, x1 = x, lastx
else:
x0, x1 = lastx, x
if x0 < Xmin:
x0 = Xmin
if x1 > Xmax:
x1 = Xmax
else:
if x > lastx:
x0, x1 = x, lastx
else:
x0, x1 = lastx, x
if x0 > Xmin:
x0 = Xmin
if x1 < Xmax:
x1 = Xmax
if twiny:
y0, y1 = Ymin, Ymax
else:
if Ymin < Ymax:
if y < lasty:
y0, y1 = y, lasty
else:
y0, y1 = lasty, y
if y0 < Ymin:
y0 = Ymin
if y1 > Ymax:
y1 = Ymax
else:
if y > lasty:
y0, y1 = y, lasty
else:
y0, y1 = lasty, y
if y0 > Ymin:
y0 = Ymin
if y1 < Ymax:
y1 = Ymax
if direction == 'in':
if mode == 'x':
self.set_xlim((x0, x1))
elif mode == 'y':
self.set_ylim((y0, y1))
else:
self.set_xlim((x0, x1))
self.set_ylim((y0, y1))
elif direction == 'out':
if self.get_xscale() == 'log':
alpha = np.log(Xmax / Xmin) / np.log(x1 / x0)
rx1 = pow(Xmin / x0, alpha) * Xmin
rx2 = pow(Xmax / x0, alpha) * Xmin
else:
alpha = (Xmax - Xmin) / (x1 - x0)
rx1 = alpha * (Xmin - x0) + Xmin
rx2 = alpha * (Xmax - x0) + Xmin
if self.get_yscale() == 'log':
alpha = np.log(Ymax / Ymin) / np.log(y1 / y0)
ry1 = pow(Ymin / y0, alpha) * Ymin
ry2 = pow(Ymax / y0, alpha) * Ymin
else:
alpha = (Ymax - Ymin) / (y1 - y0)
ry1 = alpha * (Ymin - y0) + Ymin
ry2 = alpha * (Ymax - y0) + Ymin
if mode == 'x':
self.set_xlim((rx1, rx2))
elif mode == 'y':
self.set_ylim((ry1, ry2))
else:
self.set_xlim((rx1, rx2))
self.set_ylim((ry1, ry2))
def start_pan(self, x, y, button):
"""
Called when a pan operation has started.
*x*, *y* are the mouse coordinates in display coords.
button is the mouse button number:
* 1: LEFT
* 2: MIDDLE
* 3: RIGHT
.. note::
Intended to be overridden by new projection types.
"""
self._pan_start = cbook.Bunch(
lim=self.viewLim.frozen(),
trans=self.transData.frozen(),
trans_inverse=self.transData.inverted().frozen(),
bbox=self.bbox.frozen(),
x=x,
y=y)
def end_pan(self):
"""
Called when a pan operation completes (when the mouse button
is up.)
.. note::
Intended to be overridden by new projection types.
"""
del self._pan_start
def drag_pan(self, button, key, x, y):
"""
Called when the mouse moves during a pan operation.
*button* is the mouse button number:
* 1: LEFT
* 2: MIDDLE
* 3: RIGHT
*key* is a "shift" key
*x*, *y* are the mouse coordinates in display coords.
.. note::
Intended to be overridden by new projection types.
"""
def format_deltas(key, dx, dy):
if key == 'control':
if abs(dx) > abs(dy):
dy = dx
else:
dx = dy
elif key == 'x':
dy = 0
elif key == 'y':
dx = 0
elif key == 'shift':
if 2 * abs(dx) < abs(dy):
dx = 0
elif 2 * abs(dy) < abs(dx):
dy = 0
elif abs(dx) > abs(dy):
dy = dy / abs(dy) * abs(dx)
else:
dx = dx / abs(dx) * abs(dy)
return (dx, dy)
p = self._pan_start
dx = x - p.x
dy = y - p.y
if dx == 0 and dy == 0:
return
if button == 1:
dx, dy = format_deltas(key, dx, dy)
result = p.bbox.translated(-dx, -dy) \
.transformed(p.trans_inverse)
elif button == 3:
try:
dx = -dx / float(self.bbox.width)
dy = -dy / float(self.bbox.height)
dx, dy = format_deltas(key, dx, dy)
if self.get_aspect() != 'auto':
dx = 0.5 * (dx + dy)
dy = dx
alpha = np.power(10.0, (dx, dy))
start = np.array([p.x, p.y])
oldpoints = p.lim.transformed(p.trans)
newpoints = start + alpha * (oldpoints - start)
result = mtransforms.Bbox(newpoints) \
.transformed(p.trans_inverse)
except OverflowError:
warnings.warn('Overflow while panning')
return
self.set_xlim(*result.intervalx)
self.set_ylim(*result.intervaly)
def get_cursor_props(self):
"""
Return the cursor propertiess as a (*linewidth*, *color*)
tuple, where *linewidth* is a float and *color* is an RGBA
tuple
"""
return self._cursorProps
def set_cursor_props(self, *args):
"""
Set the cursor property as::
ax.set_cursor_props(linewidth, color)
or::
ax.set_cursor_props((linewidth, color))
ACCEPTS: a (*float*, *color*) tuple
"""
if len(args) == 1:
lw, c = args[0]
elif len(args) == 2:
lw, c = args
else:
raise ValueError('args must be a (linewidth, color) tuple')
c = mcolors.colorConverter.to_rgba(c)
self._cursorProps = lw, c
def get_children(self):
"""return a list of child artists"""
children = []
children.extend(self.collections)
children.extend(self.patches)
children.extend(self.lines)
children.extend(self.texts)
children.extend(self.artists)
children.extend(six.itervalues(self.spines))
children.append(self.xaxis)
children.append(self.yaxis)
children.append(self.title)
children.append(self._left_title)
children.append(self._right_title)
children.extend(self.tables)
children.extend(self.images)
if self.legend_ is not None:
children.append(self.legend_)
children.append(self.patch)
return children
def contains(self, mouseevent):
"""
Test whether the mouse event occured in the axes.
Returns *True* / *False*, {}
"""
if six.callable(self._contains):
return self._contains(self, mouseevent)
return self.patch.contains(mouseevent)
def contains_point(self, point):
"""
Returns *True* if the point (tuple of x,y) is inside the axes
(the area defined by the its patch). A pixel coordinate is
required.
"""
return self.patch.contains_point(point, radius=1.0)
def pick(self, *args):
"""
Call signature::
pick(mouseevent)
each child artist will fire a pick event if mouseevent is over
the artist and the artist has picker set
"""
martist.Artist.pick(self, args[0])
def get_default_bbox_extra_artists(self):
return [artist for artist in self.get_children()
if artist.get_visible()]
def get_tightbbox(self, renderer, call_axes_locator=True):
"""
Return the tight bounding box of the axes.
The dimension of the Bbox in canvas coordinate.
If *call_axes_locator* is *False*, it does not call the
_axes_locator attribute, which is necessary to get the correct
bounding box. ``call_axes_locator==False`` can be used if the
caller is only intereted in the relative size of the tightbbox
compared to the axes bbox.
"""
bb = []
if not self.get_visible():
return None
locator = self.get_axes_locator()
if locator and call_axes_locator:
pos = locator(self, renderer)
self.apply_aspect(pos)
else:
self.apply_aspect()
bb.append(self.get_window_extent(renderer))
if self.title.get_visible():
bb.append(self.title.get_window_extent(renderer))
if self._left_title.get_visible():
bb.append(self._left_title.get_window_extent(renderer))
if self._right_title.get_visible():
bb.append(self._right_title.get_window_extent(renderer))
bb_xaxis = self.xaxis.get_tightbbox(renderer)
if bb_xaxis:
bb.append(bb_xaxis)
bb_yaxis = self.yaxis.get_tightbbox(renderer)
if bb_yaxis:
bb.append(bb_yaxis)
for child in self.get_children():
if isinstance(child, OffsetBox) and child.get_visible():
bb.append(child.get_window_extent(renderer))
_bbox = mtransforms.Bbox.union(
[b for b in bb if b.width != 0 or b.height != 0])
return _bbox
def _make_twin_axes(self, *kl, **kwargs):
"""
make a twinx axes of self. This is used for twinx and twiny.
"""
ax2 = self.figure.add_axes(self.get_position(True), *kl, **kwargs)
return ax2
def twinx(self):
"""
Call signature::
ax = twinx()
create a twin of Axes for generating a plot with a sharex
x-axis but independent y axis. The y-axis of self will have
ticks on left and the returned axes will have ticks on the
right.
.. note::
For those who are 'picking' artists while using twinx, pick
events are only called for the artists in the top-most axes.
"""
ax2 = self._make_twin_axes(sharex=self)
ax2.yaxis.tick_right()
ax2.yaxis.set_label_position('right')
ax2.yaxis.set_offset_position('right')
self.yaxis.tick_left()
ax2.xaxis.set_visible(False)
ax2.patch.set_visible(False)
return ax2
def twiny(self):
"""
Call signature::
ax = twiny()
create a twin of Axes for generating a plot with a shared
y-axis but independent x axis. The x-axis of self will have
ticks on bottom and the returned axes will have ticks on the
top.
.. note::
For those who are 'picking' artists while using twiny, pick
events are only called for the artists in the top-most axes.
"""
ax2 = self._make_twin_axes(sharey=self)
ax2.xaxis.tick_top()
ax2.xaxis.set_label_position('top')
self.xaxis.tick_bottom()
ax2.yaxis.set_visible(False)
ax2.patch.set_visible(False)
return ax2
def get_shared_x_axes(self):
'Return a copy of the shared axes Grouper object for x axes'
return self._shared_x_axes
def get_shared_y_axes(self):
'Return a copy of the shared axes Grouper object for y axes'
return self._shared_y_axes
| mit |
huiyiqun/check_mk | agents/windows/it/test_section_mem.py | 1 | 1385 | #!/usr/bin/env python
# -*- coding: utf-8; py-indent-offset: 4 -*-
import os
import pytest
import re
from remote import actual_output, config, remotetest, wait_agent, write_config
class Globals(object):
section = 'mem'
alone = True
@pytest.fixture
def testfile():
return os.path.basename(__file__)
@pytest.fixture(params=['alone', 'with_systemtime'])
def testconfig(request, config):
Globals.alone = request.param == 'alone'
if Globals.alone:
config.set('global', 'sections', Globals.section)
else:
config.set('global', 'sections', '%s systemtime' % Globals.section)
config.set('global', 'crash_debug', 'yes')
return config
@pytest.fixture
def expected_output():
expected = [
r'<<<mem>>>', #
r'MemTotal:\s+\d+\skB', #
r'MemFree:\s+\d+\skB', #
r'SwapTotal:\s+\d+\skB', #
r'SwapFree:\s+\d+\skB', #
r'PageTotal:\s+\d+\skB', #
r'PageFree:\s+\d+\skB', #
r'VirtualTotal:\s+\d+\skB', #
r'VirtualFree:\s+\d+\skB' #
]
if not Globals.alone:
expected += [re.escape(r'<<<systemtime>>>'), r'\d+']
return expected
def test_section_mem(request, testconfig, expected_output, actual_output,
testfile):
# request.node.name gives test name
remotetest(expected_output, actual_output, testfile, request.node.name)
| gpl-2.0 |
talha81/TACTIC-DEV | src/pyasm/prod/web/texture_wdg.py | 6 | 14159 | ###########################################################
#
# Copyright (c) 2005, Southpaw Technology
# All Rights Reserved
#
# PROPRIETARY INFORMATION. This software is proprietary to
# Southpaw Technology, and is not to be reproduced, transmitted,
# or disclosed in any way without written permission.
#
#
#
__all__ = ['TextureSourceElementWdg', 'TextureAddSourceWdg',
'TextureAddTexturesWdg', 'TextureAddSourceElementWdg', 'TextureAddSourceCmd',
'TextureAddSourceEditElement', 'TextureAddSourceAction' ]
from pyasm.common import Container, Xml, Environment, Common
from pyasm.biz import *
from pyasm.prod.biz import Texture, TextureSource
from pyasm.command import Command, DatabaseAction, FileUpload, CommandExitException
from pyasm.search import Search
from pyasm.web import *
from pyasm.widget import *
from pyasm.checkin import SnapshotBuilder, FileCheckin
from pyasm.prod.web import TextureFilterWdg
class TextureSourceElementWdg(BaseTableElementWdg):
'''display the source information about textures.
This class is shared by both Texture and Texture Source Tab'''
def is_source(my, sobject):
if isinstance(sobject, TextureSource):
return True
else:
return False
def get_display(my):
sobject = my.get_current_sobject()
if my.is_source(sobject):
widget = my.get_add_textures_wdg()
return widget
# get the latest snapshot
snapshot = Snapshot.get_latest_by_sobject(sobject)
if not snapshot:
return "No snapshots"
xml = snapshot.get_xml_value("snapshot")
nodes = xml.get_nodes("snapshot/ref")
if not nodes:
widget = Widget()
icon = my.get_add_source_wdg()
widget.add(icon)
widget.add("No Source")
return widget
# assume 1 reference source, WHY? changed to list all for now
#node = nodes[0]
widget = Widget()
for node in nodes:
widget.add(my.get_source_link(node))
widget.add(HtmlElement.br())
return widget
def get_source_link(my, node):
search_type = Xml.get_attribute(node, "search_type")
search_id = Xml.get_attribute(node, "search_id")
context = Xml.get_attribute(node, "context")
version = Xml.get_attribute(node, "version")
source_snapshot = Snapshot.get_by_version(search_type, search_id, \
context, version )
if not source_snapshot:
Environment.add_warning("Snapshot not found", "Reference snapshot for [%s|%s] does not exist" \
% (search_type, search_id) )
return ''
#raise WidgetException( "Reference snapshot in '%s' does not exist" \
# % snapshot.get_id() )
source = source_snapshot.get_sobject()
# get the file link
file_name = source_snapshot.get_name_by_type("main")
path = "%s/%s" % (source_snapshot.get_web_dir(), file_name)
return HtmlElement.href("Source: %s" %source.get_value("code"), \
ref=path )
def get_add_textures_wdg(my):
sobject = my.get_current_sobject()
widget = Widget()
search_type = sobject.get_search_type()
search_id = sobject.get_id()
url = WebContainer.get_web().get_widget_url()
url.set_option("widget", "pyasm.prod.web.TextureAddTexturesWdg")
url.set_option("search_type", search_type)
url.set_option("search_id", search_id)
url.set_option("refresh_mode", "page")
ref = url.get_url()
iframe = Container.get("iframe")
iframe.set_width(80)
action = iframe.get_on_script(ref)
div = DivWdg()
div.add_style("float: left")
button = IconButtonWdg("Add Texture", IconWdg.IMAGE_ADD)
button.add_event("onclick", action )
button.add_style("margin: 3px 5px")
div.add(button)
div.add("Add Textures")
widget.add(div)
# find all of the forward references
snapshot = Snapshot.get_latest_by_sobject(sobject)
if snapshot:
div = DivWdg()
xml = snapshot.get_xml_value("snapshot")
frefs = xml.get_nodes("snapshot/fref")
if frefs:
widget.add("<br clear='all'/><hr size='1px'/>")
for fref in frefs:
search_id = Xml.get_attribute(fref, "search_id")
search_type = Xml.get_attribute(fref, "search_type")
sobject = Search.get_by_id(search_type, search_id)
if not sobject:
sobject_code = "n/a"
else:
sobject_code = sobject.get_code()
thumb = ThumbWdg()
thumb.set_icon_size(30)
thumb.set_show_latest_icon(True)
thumb.set_sobject(sobject)
div.add(thumb)
div.add(sobject_code)
div.add("<br/>")
widget.add(div)
return widget
def get_add_source_wdg(my):
sobject = my.get_current_sobject()
search_type = sobject.get_search_type()
search_id = sobject.get_id()
url = WebContainer.get_web().get_widget_url()
url.set_option("widget", "pyasm.prod.web.TextureAddSourceWdg")
url.set_option("search_type", search_type)
url.set_option("search_id", search_id)
url.set_option("refresh_mode", 'page')
ref = url.get_url()
iframe = WebContainer.get_iframe()
iframe.set_width(60)
action = iframe.get_on_script(ref)
button = IconButtonWdg("Warning", IconWdg.ERROR)
button.add_event("onclick", action )
button.add_style("margin: 3px 5px")
return button
class TextureAddSourceWdg(Widget):
'''The entire widget for adding a source to the texture'''
def init(my):
search = Search("prod/texture")
search.add_filter("category", "source")
table = TableWdg("prod/texture", "source")
table.set_search(search)
my.add(table)
class TextureAddTexturesWdg(Widget):
'''The entire widget for adding a source to the texture'''
def is_error_free(my, web):
''' if it is instructed to close and is error-free , return True'''
if web.get_form_value('add'):
return True
return False
def get_sobject(my):
web = WebContainer.get_web()
# first try the search key
search_key = web.get_form_value("search_key")
if search_key != "":
sobject = Search.get_by_search_key(search_key)
return object
# next try te search_type/search_id pair
search_type = web.get_form_value("search_type")
if search_type != "":
search_id = web.get_form_value("search_id")
sobject = Search.get_by_id(search_type, search_id)
return sobject
return None
def init(my):
web = WebContainer.get_web()
if my.is_error_free(web):
event_container = WebContainer.get_event_container()
refresh_script = "window.parent.%s" % event_container.get_refresh_caller()
iframe = WebContainer.get_iframe()
off_script = "window.parent.%s" % iframe.get_off_script()
script = HtmlElement.script('''
%s
%s
''' % (off_script, refresh_script) )
my.add(script)
return
widget = Widget()
sobject = my.get_sobject()
search_type = sobject.get_search_type()
table = TableWdg(search_type, "source")
table.remove_widget("select")
table.set_sobject(sobject)
table.set_show_property(False)
widget.add(table)
widget.add(HtmlElement.h3("Latest Textures"))
search = Search("prod/texture")
search.add_order_by("timestamp desc")
texture_filter = TextureFilterWdg()
texture_filter.alter_search(search)
div = DivWdg(texture_filter, css='filter_box')
table = TableWdg("prod/texture", "texture")
table.set_search(search)
widget.add(div)
widget.add(table)
my.add(widget)
class TextureAddSourceElementWdg(BaseTableElementWdg):
'''The table element that adds a specific source'''
def get_title(my):
WebContainer.register_cmd("pyasm.prod.web.TextureAddSourceCmd")
add_button = SubmitWdg(my.name)
return add_button
def get_display(my):
sobject = my.get_current_sobject()
search_key = sobject.get_search_key()
checkbox = CheckboxWdg("selected")
checkbox.set_option("value", search_key)
return checkbox
class TextureAddSourceCmd(Command):
def check(my):
return True
def execute(my):
web = WebContainer.get_web()
if web.get_form_value("add") == "":
raise CommandExitException()
# get the source
search_type = web.get_form_value("search_type")
search_id = web.get_form_value("search_id")
parent = Search.get_by_id(search_type,search_id)
parent_snapshot = Snapshot.get_latest_by_sobject(parent)
parent_xml = parent_snapshot.get_xml_value("snapshot")
parent_builder = SnapshotBuilder(parent_xml)
# get the selected textures
selected = web.get_form_values("selected")
my.add_description("Adding source to texture for [%s]" %','.join(selected))
for select in selected:
sobject = Search.get_by_search_key(select)
# add a backward reference
sobject_snapshot = Snapshot.get_latest_by_sobject(sobject)
xml = sobject_snapshot.get_xml_value("snapshot")
builder = SnapshotBuilder(xml)
builder.add_ref_by_snapshot(parent_snapshot)
sobject_snapshot.set_value("snapshot", builder.to_string() )
sobject_snapshot.commit()
# add a forward reference
parent_builder.add_fref_by_snapshot(sobject_snapshot)
parent_snapshot.set_value("snapshot", parent_builder.to_string() )
parent_snapshot.commit()
# Edit element which appears on insert of a new texture
class TextureAddSourceEditElement(BaseInputWdg):
'''This widget is used to add a source to a texture at upload time'''
def get_display(my):
widget = Widget()
# get all of the sources, add reverse timestamp order
search = Search("prod/texture_source")
search.add_order_by("timestamp")
search.set_limit(10)
sources = search.get_sobjects()
if sources:
widget.add(HtmlElement.b("Predefined Sources:"))
widget.add(HtmlElement.br())
for source in sources:
search_key = source.get_search_key()
checkbox = CheckboxWdg("predefined_source")
checkbox.set_option("value",search_key)
widget.add(checkbox)
widget.add(source.get_value("code"))
widget.add(SpanWdg(">>", css='med'))
widget.add(source.get_value("description"))
widget.add("<br/>")
# or add a new one
widget.add(HtmlElement.b("New Source:"))
upload_wdg = SimpleUploadWdg("add_source")
widget.add(upload_wdg)
return widget
class TextureAddSourceAction(DatabaseAction):
def execute(my):
pass
def postprocess(my):
sobject = my.sobject
texture_snapshot = Snapshot.get_latest_by_sobject(sobject)
web = WebContainer.get_web()
source_search_key = web.get_form_value("predefined_source")
if source_search_key and texture_snapshot:
source = Search.get_by_search_key(source_search_key)
source_snapshot = Snapshot.get_latest_by_sobject(source)
xml = texture_snapshot.get_xml_value("snapshot")
builder = SnapshotBuilder(xml)
builder.add_ref_by_snapshot(source_snapshot)
texture_snapshot.set_value("snapshot", builder.to_string() )
texture_snapshot.commit()
return
# if no files have been uploaded, don't do anything
field_storage = web.get_form_value("add_source")
if field_storage == "":
return
# process and get the uploaded files
upload = FileUpload()
upload.set_field_storage(field_storage)
upload.execute()
files = upload.get_files()
if not files:
return
file_types = upload.get_file_types()
asset_code = sobject.get_value("asset_code")
# checkin this as a new source
import os
source_code = os.path.basename(files[0])
source_description = "Referred to %s" % my.sobject.get_code()
source_category = "default"
source = TextureSource.create(asset_code, source_code, \
source_category, source_description)
# add the file as a snapshot to this source
checkin = FileCheckin(source, files, file_types )
checkin.execute()
source_snapshot = Snapshot.get_latest_by_sobject(source)
xml = source_snapshot.get_xml_value("snapshot")
builder = SnapshotBuilder(xml)
builder.add_fref_by_snapshot(texture_snapshot)
source_snapshot.set_value("snapshot", builder.to_string() )
source_snapshot.commit()
# Modify the snapshot in the original texture to reference this
# source. This assumes that the other uploaded snapshot has
# already been dealt with.
source_snapshot = checkin.get_snapshot()
# FIXME: what if no texture was uploaded???
xml = texture_snapshot.get_xml_value("snapshot")
builder = SnapshotBuilder(xml)
builder.add_ref_by_snapshot(source_snapshot)
texture_snapshot.set_value("snapshot", builder.to_string() )
texture_snapshot.commit()
| epl-1.0 |
pshchelo/heat | contrib/heat_keystone/heat_keystone/tests/test_group.py | 2 | 11249 | #
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from heat.engine import constraints
from heat.engine import properties
from heat.engine import resource
from heat.engine import stack
from heat.engine import template
from heat.tests import common
from heat.tests import utils
from ..resources import group # noqa
keystone_group_template = {
'heat_template_version': '2013-05-23',
'resources': {
'test_group': {
'type': 'OS::Keystone::Group',
'properties': {
'name': 'test_group_1',
'description': 'Test group',
'domain': 'default'
}
}
}
}
RESOURCE_TYPE = 'OS::Keystone::Group'
class KeystoneGroupTest(common.HeatTestCase):
def setUp(self):
super(KeystoneGroupTest, self).setUp()
self.ctx = utils.dummy_context()
# For unit testing purpose. Register resource provider explicitly.
resource._register_class(RESOURCE_TYPE, group.KeystoneGroup)
self.stack = stack.Stack(
self.ctx, 'test_stack_keystone',
template.Template(keystone_group_template)
)
self.test_group = self.stack['test_group']
# Mock client
self.keystoneclient = mock.MagicMock()
self.test_group.keystone = mock.MagicMock()
self.test_group.keystone.return_value = self.keystoneclient
self.groups = self.keystoneclient.client.groups
# Mock client plugin
def _side_effect(value):
return value
self.keystone_client_plugin = mock.MagicMock()
(self.keystone_client_plugin.get_domain_id.
side_effect) = _side_effect
self.test_group.client_plugin = mock.MagicMock()
(self.test_group.client_plugin.
return_value) = self.keystone_client_plugin
def _get_mock_group(self):
value = mock.MagicMock()
group_id = '477e8273-60a7-4c41-b683-fdb0bc7cd151'
value.id = group_id
return value
def test_resource_mapping(self):
mapping = group.resource_mapping()
self.assertEqual(1, len(mapping))
self.assertEqual(group.KeystoneGroup, mapping[RESOURCE_TYPE])
self.assertIsInstance(self.test_group, group.KeystoneGroup)
def test_properties_title(self):
property_title_map = {
group.KeystoneGroup.NAME: 'name',
group.KeystoneGroup.DESCRIPTION: 'description',
group.KeystoneGroup.DOMAIN: 'domain'
}
for actual_title, expected_title in property_title_map.items():
self.assertEqual(
expected_title,
actual_title,
'KeystoneGroup PROPERTIES(%s) title modified.' %
actual_title)
def test_property_name_validate_schema(self):
schema = group.KeystoneGroup.properties_schema[
group.KeystoneGroup.NAME]
self.assertEqual(
True,
schema.update_allowed,
'update_allowed for property %s is modified' %
group.KeystoneGroup.NAME)
self.assertEqual(properties.Schema.STRING,
schema.type,
'type for property %s is modified' %
group.KeystoneGroup.NAME)
self.assertEqual('Name of keystone group.',
schema.description,
'description for property %s is modified' %
group.KeystoneGroup.NAME)
def test_property_description_validate_schema(self):
schema = group.KeystoneGroup.properties_schema[
group.KeystoneGroup.DESCRIPTION]
self.assertEqual(
True,
schema.update_allowed,
'update_allowed for property %s is modified' %
group.KeystoneGroup.DESCRIPTION)
self.assertEqual(properties.Schema.STRING,
schema.type,
'type for property %s is modified' %
group.KeystoneGroup.DESCRIPTION)
self.assertEqual('Description of keystone group.',
schema.description,
'description for property %s is modified' %
group.KeystoneGroup.DESCRIPTION)
self.assertEqual(
'',
schema.default,
'default for property %s is modified' %
group.KeystoneGroup.DESCRIPTION)
def test_property_domain_validate_schema(self):
schema = group.KeystoneGroup.properties_schema[
group.KeystoneGroup.DOMAIN]
self.assertEqual(
True,
schema.update_allowed,
'update_allowed for property %s is modified' %
group.KeystoneGroup.DOMAIN)
self.assertEqual(properties.Schema.STRING,
schema.type,
'type for property %s is modified' %
group.KeystoneGroup.DOMAIN)
self.assertEqual('Name or id of keystone domain.',
schema.description,
'description for property %s is modified' %
group.KeystoneGroup.DOMAIN)
self.assertEqual([constraints.CustomConstraint('keystone.domain')],
schema.constraints,
'constrains for property %s is modified' %
group.KeystoneGroup.DOMAIN)
self.assertEqual(
'default',
schema.default,
'default for property %s is modified' %
group.KeystoneGroup.DOMAIN)
def _get_property_schema_value_default(self, name):
schema = group.KeystoneGroup.properties_schema[name]
return schema.default
def test_group_handle_create(self):
mock_group = self._get_mock_group()
self.groups.create.return_value = mock_group
# validate the properties
self.assertEqual(
'test_group_1',
self.test_group.properties.get(group.KeystoneGroup.NAME))
self.assertEqual(
'Test group',
self.test_group.properties.get(group.KeystoneGroup.DESCRIPTION))
self.assertEqual(
'default',
self.test_group.properties.get(group.KeystoneGroup.DOMAIN))
self.test_group.handle_create()
# validate group creation
self.groups.create.assert_called_once_with(
name='test_group_1',
description='Test group',
domain='default')
# validate physical resource id
self.assertEqual(mock_group.id, self.test_group.resource_id)
def test_group_handle_create_default(self):
values = {
group.KeystoneGroup.NAME: None,
group.KeystoneGroup.DESCRIPTION:
(self._get_property_schema_value_default(
group.KeystoneGroup.DESCRIPTION)),
group.KeystoneGroup.DOMAIN:
(self._get_property_schema_value_default(
group.KeystoneGroup.DOMAIN)),
group.KeystoneGroup.ROLES: None
}
def _side_effect(key):
return values[key]
mock_group = self._get_mock_group()
self.groups.create.return_value = mock_group
self.test_group.properties = mock.MagicMock()
self.test_group.properties.get.side_effect = _side_effect
self.test_group.physical_resource_name = mock.MagicMock()
self.test_group.physical_resource_name.return_value = 'foo'
# validate the properties
self.assertEqual(
None,
self.test_group.properties.get(group.KeystoneGroup.NAME))
self.assertEqual(
'',
self.test_group.properties.get(group.KeystoneGroup.DESCRIPTION))
self.assertEqual(
'default',
self.test_group.properties.get(group.KeystoneGroup.DOMAIN))
self.test_group.handle_create()
# validate group creation
self.groups.create.assert_called_once_with(
name='foo',
description='',
domain='default')
def test_group_handle_update(self):
self.test_group.resource_id = '477e8273-60a7-4c41-b683-fdb0bc7cd151'
self.test_group._stored_properties_data = dict(roles=None)
prop_diff = {group.KeystoneGroup.NAME: 'test_group_1_updated',
group.KeystoneGroup.DESCRIPTION: 'Test Group updated',
group.KeystoneGroup.DOMAIN: 'test_domain'}
self.test_group.handle_update(json_snippet=None,
tmpl_diff=None,
prop_diff=prop_diff)
self.groups.update.assert_called_once_with(
group=self.test_group.resource_id,
name=prop_diff[group.KeystoneGroup.NAME],
description=prop_diff[group.KeystoneGroup.DESCRIPTION],
domain_id='test_domain'
)
def test_group_handle_update_default(self):
self.test_group.resource_id = '477e8273-60a7-4c41-b683-fdb0bc7cd151'
self.test_group._stored_properties_data = dict(domain='default')
self.test_group.physical_resource_name = mock.MagicMock()
self.test_group.physical_resource_name.return_value = 'foo'
prop_diff = {group.KeystoneGroup.DESCRIPTION: 'Test Project updated'}
self.test_group.handle_update(json_snippet=None,
tmpl_diff=None,
prop_diff=prop_diff)
# validate default name to physical resource name and
# domain is set from stored properties used during creation.
self.groups.update.assert_called_once_with(
group=self.test_group.resource_id,
name='foo',
description=prop_diff[group.KeystoneGroup.DESCRIPTION],
domain_id='default'
)
def test_group_handle_delete(self):
self.test_group.resource_id = '477e8273-60a7-4c41-b683-fdb0bc7cd151'
self.test_group._stored_properties_data = dict(roles=None)
self.groups.delete.return_value = None
self.assertIsNone(self.test_group.handle_delete())
self.groups.delete.assert_called_once_with(
self.test_group.resource_id
)
def test_group_handle_delete_resource_id_is_none(self):
self.resource_id = None
self.assertIsNone(self.test_group.handle_delete())
def test_group_handle_delete_not_found(self):
self.test_group._stored_properties_data = dict(roles=None)
exc = self.keystoneclient.NotFound
self.groups.delete.side_effect = exc
self.assertIsNone(self.test_group.handle_delete())
| apache-2.0 |
bright-sparks/titanium_mobile | support/common/mako/codegen.py | 40 | 32857 | # codegen.py
# Copyright (C) 2006, 2007, 2008 Michael Bayer [email protected]
#
# This module is part of Mako and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""provides functionality for rendering a parsetree constructing into module source code."""
import time
import re
from mako.pygen import PythonPrinter
from mako import util, ast, parsetree, filters
MAGIC_NUMBER = 5
def compile(node, uri, filename=None, default_filters=None, buffer_filters=None, imports=None, source_encoding=None, generate_unicode=True):
"""generate module source code given a parsetree node, uri, and optional source filename"""
buf = util.FastEncodingBuffer(unicode=generate_unicode)
printer = PythonPrinter(buf)
_GenerateRenderMethod(printer, _CompileContext(uri, filename, default_filters, buffer_filters, imports, source_encoding, generate_unicode), node)
return buf.getvalue()
class _CompileContext(object):
def __init__(self, uri, filename, default_filters, buffer_filters, imports, source_encoding, generate_unicode):
self.uri = uri
self.filename = filename
self.default_filters = default_filters
self.buffer_filters = buffer_filters
self.imports = imports
self.source_encoding = source_encoding
self.generate_unicode = generate_unicode
class _GenerateRenderMethod(object):
"""a template visitor object which generates the full module source for a template."""
def __init__(self, printer, compiler, node):
self.printer = printer
self.last_source_line = -1
self.compiler = compiler
self.node = node
self.identifier_stack = [None]
self.in_def = isinstance(node, parsetree.DefTag)
if self.in_def:
name = "render_" + node.name
args = node.function_decl.get_argument_expressions()
filtered = len(node.filter_args.args) > 0
buffered = eval(node.attributes.get('buffered', 'False'))
cached = eval(node.attributes.get('cached', 'False'))
defs = None
pagetag = None
else:
defs = self.write_toplevel()
pagetag = self.compiler.pagetag
name = "render_body"
if pagetag is not None:
args = pagetag.body_decl.get_argument_expressions()
if not pagetag.body_decl.kwargs:
args += ['**pageargs']
cached = eval(pagetag.attributes.get('cached', 'False'))
else:
args = ['**pageargs']
cached = False
buffered = filtered = False
if args is None:
args = ['context']
else:
args = [a for a in ['context'] + args]
self.write_render_callable(pagetag or node, name, args, buffered, filtered, cached)
if defs is not None:
for node in defs:
_GenerateRenderMethod(printer, compiler, node)
identifiers = property(lambda self:self.identifier_stack[-1])
def write_toplevel(self):
"""traverse a template structure for module-level directives and generate the
start of module-level code."""
inherit = []
namespaces = {}
module_code = []
encoding =[None]
self.compiler.pagetag = None
class FindTopLevel(object):
def visitInheritTag(s, node):
inherit.append(node)
def visitNamespaceTag(s, node):
namespaces[node.name] = node
def visitPageTag(s, node):
self.compiler.pagetag = node
def visitCode(s, node):
if node.ismodule:
module_code.append(node)
f = FindTopLevel()
for n in self.node.nodes:
n.accept_visitor(f)
self.compiler.namespaces = namespaces
module_ident = util.Set()
for n in module_code:
module_ident = module_ident.union(n.declared_identifiers())
module_identifiers = _Identifiers()
module_identifiers.declared = module_ident
# module-level names, python code
if not self.compiler.generate_unicode and self.compiler.source_encoding:
self.printer.writeline("# -*- encoding:%s -*-" % self.compiler.source_encoding)
self.printer.writeline("from mako import runtime, filters, cache")
self.printer.writeline("UNDEFINED = runtime.UNDEFINED")
self.printer.writeline("__M_dict_builtin = dict")
self.printer.writeline("__M_locals_builtin = locals")
self.printer.writeline("_magic_number = %s" % repr(MAGIC_NUMBER))
self.printer.writeline("_modified_time = %s" % repr(time.time()))
self.printer.writeline("_template_filename=%s" % repr(self.compiler.filename))
self.printer.writeline("_template_uri=%s" % repr(self.compiler.uri))
self.printer.writeline("_template_cache=cache.Cache(__name__, _modified_time)")
self.printer.writeline("_source_encoding=%s" % repr(self.compiler.source_encoding))
if self.compiler.imports:
buf = ''
for imp in self.compiler.imports:
buf += imp + "\n"
self.printer.writeline(imp)
impcode = ast.PythonCode(buf, source='', lineno=0, pos=0, filename='template defined imports')
else:
impcode = None
main_identifiers = module_identifiers.branch(self.node)
module_identifiers.topleveldefs = module_identifiers.topleveldefs.union(main_identifiers.topleveldefs)
[module_identifiers.declared.add(x) for x in ["UNDEFINED"]]
if impcode:
[module_identifiers.declared.add(x) for x in impcode.declared_identifiers]
self.compiler.identifiers = module_identifiers
self.printer.writeline("_exports = %s" % repr([n.name for n in main_identifiers.topleveldefs.values()]))
self.printer.write("\n\n")
if len(module_code):
self.write_module_code(module_code)
if len(inherit):
self.write_namespaces(namespaces)
self.write_inherit(inherit[-1])
elif len(namespaces):
self.write_namespaces(namespaces)
return main_identifiers.topleveldefs.values()
def write_render_callable(self, node, name, args, buffered, filtered, cached):
"""write a top-level render callable.
this could be the main render() method or that of a top-level def."""
self.printer.writelines(
"def %s(%s):" % (name, ','.join(args)),
"context.caller_stack._push_frame()",
"try:"
)
if buffered or filtered or cached:
self.printer.writeline("context._push_buffer()")
self.identifier_stack.append(self.compiler.identifiers.branch(self.node))
if not self.in_def and '**pageargs' in args:
self.identifier_stack[-1].argument_declared.add('pageargs')
if not self.in_def and (len(self.identifiers.locally_assigned) > 0 or len(self.identifiers.argument_declared)>0):
self.printer.writeline("__M_locals = __M_dict_builtin(%s)" % ','.join(["%s=%s" % (x, x) for x in self.identifiers.argument_declared]))
self.write_variable_declares(self.identifiers, toplevel=True)
for n in self.node.nodes:
n.accept_visitor(self)
self.write_def_finish(self.node, buffered, filtered, cached)
self.printer.writeline(None)
self.printer.write("\n\n")
if cached:
self.write_cache_decorator(node, name, args, buffered, self.identifiers, toplevel=True)
def write_module_code(self, module_code):
"""write module-level template code, i.e. that which is enclosed in <%! %> tags
in the template."""
for n in module_code:
self.write_source_comment(n)
self.printer.write_indented_block(n.text)
def write_inherit(self, node):
"""write the module-level inheritance-determination callable."""
self.printer.writelines(
"def _mako_inherit(template, context):",
"_mako_generate_namespaces(context)",
"return runtime._inherit_from(context, %s, _template_uri)" % (node.parsed_attributes['file']),
None
)
def write_namespaces(self, namespaces):
"""write the module-level namespace-generating callable."""
self.printer.writelines(
"def _mako_get_namespace(context, name):",
"try:",
"return context.namespaces[(__name__, name)]",
"except KeyError:",
"_mako_generate_namespaces(context)",
"return context.namespaces[(__name__, name)]",
None,None
)
self.printer.writeline("def _mako_generate_namespaces(context):")
for node in namespaces.values():
if node.attributes.has_key('import'):
self.compiler.has_ns_imports = True
self.write_source_comment(node)
if len(node.nodes):
self.printer.writeline("def make_namespace():")
export = []
identifiers = self.compiler.identifiers.branch(node)
class NSDefVisitor(object):
def visitDefTag(s, node):
self.write_inline_def(node, identifiers, nested=False)
export.append(node.name)
vis = NSDefVisitor()
for n in node.nodes:
n.accept_visitor(vis)
self.printer.writeline("return [%s]" % (','.join(export)))
self.printer.writeline(None)
callable_name = "make_namespace()"
else:
callable_name = "None"
self.printer.writeline("ns = runtime.Namespace(%s, context._clean_inheritance_tokens(), templateuri=%s, callables=%s, calling_uri=_template_uri, module=%s)" % (repr(node.name), node.parsed_attributes.get('file', 'None'), callable_name, node.parsed_attributes.get('module', 'None')))
if eval(node.attributes.get('inheritable', "False")):
self.printer.writeline("context['self'].%s = ns" % (node.name))
self.printer.writeline("context.namespaces[(__name__, %s)] = ns" % repr(node.name))
self.printer.write("\n")
if not len(namespaces):
self.printer.writeline("pass")
self.printer.writeline(None)
def write_variable_declares(self, identifiers, toplevel=False, limit=None):
"""write variable declarations at the top of a function.
the variable declarations are in the form of callable definitions for defs and/or
name lookup within the function's context argument. the names declared are based on the
names that are referenced in the function body, which don't otherwise have any explicit
assignment operation. names that are assigned within the body are assumed to be
locally-scoped variables and are not separately declared.
for def callable definitions, if the def is a top-level callable then a
'stub' callable is generated which wraps the current Context into a closure. if the def
is not top-level, it is fully rendered as a local closure."""
# collection of all defs available to us in this scope
comp_idents = dict([(c.name, c) for c in identifiers.defs])
to_write = util.Set()
# write "context.get()" for all variables we are going to need that arent in the namespace yet
to_write = to_write.union(identifiers.undeclared)
# write closure functions for closures that we define right here
to_write = to_write.union(util.Set([c.name for c in identifiers.closuredefs.values()]))
# remove identifiers that are declared in the argument signature of the callable
to_write = to_write.difference(identifiers.argument_declared)
# remove identifiers that we are going to assign to. in this way we mimic Python's behavior,
# i.e. assignment to a variable within a block means that variable is now a "locally declared" var,
# which cannot be referenced beforehand.
to_write = to_write.difference(identifiers.locally_declared)
# if a limiting set was sent, constraint to those items in that list
# (this is used for the caching decorator)
if limit is not None:
to_write = to_write.intersection(limit)
if toplevel and getattr(self.compiler, 'has_ns_imports', False):
self.printer.writeline("_import_ns = {}")
self.compiler.has_imports = True
for ident, ns in self.compiler.namespaces.iteritems():
if ns.attributes.has_key('import'):
self.printer.writeline("_mako_get_namespace(context, %s)._populate(_import_ns, %s)" % (repr(ident), repr(re.split(r'\s*,\s*', ns.attributes['import']))))
for ident in to_write:
if ident in comp_idents:
comp = comp_idents[ident]
if comp.is_root():
self.write_def_decl(comp, identifiers)
else:
self.write_inline_def(comp, identifiers, nested=True)
elif ident in self.compiler.namespaces:
self.printer.writeline("%s = _mako_get_namespace(context, %s)" % (ident, repr(ident)))
else:
if getattr(self.compiler, 'has_ns_imports', False):
self.printer.writeline("%s = _import_ns.get(%s, context.get(%s, UNDEFINED))" % (ident, repr(ident), repr(ident)))
else:
self.printer.writeline("%s = context.get(%s, UNDEFINED)" % (ident, repr(ident)))
self.printer.writeline("__M_writer = context.writer()")
def write_source_comment(self, node):
"""write a source comment containing the line number of the corresponding template line."""
if self.last_source_line != node.lineno:
self.printer.writeline("# SOURCE LINE %d" % node.lineno)
self.last_source_line = node.lineno
def write_def_decl(self, node, identifiers):
"""write a locally-available callable referencing a top-level def"""
funcname = node.function_decl.funcname
namedecls = node.function_decl.get_argument_expressions()
nameargs = node.function_decl.get_argument_expressions(include_defaults=False)
if not self.in_def and (len(self.identifiers.locally_assigned) > 0 or len(self.identifiers.argument_declared) > 0):
nameargs.insert(0, 'context.locals_(__M_locals)')
else:
nameargs.insert(0, 'context')
self.printer.writeline("def %s(%s):" % (funcname, ",".join(namedecls)))
self.printer.writeline("return render_%s(%s)" % (funcname, ",".join(nameargs)))
self.printer.writeline(None)
def write_inline_def(self, node, identifiers, nested):
"""write a locally-available def callable inside an enclosing def."""
namedecls = node.function_decl.get_argument_expressions()
self.printer.writeline("def %s(%s):" % (node.name, ",".join(namedecls)))
filtered = len(node.filter_args.args) > 0
buffered = eval(node.attributes.get('buffered', 'False'))
cached = eval(node.attributes.get('cached', 'False'))
self.printer.writelines(
"context.caller_stack._push_frame()",
"try:"
)
if buffered or filtered or cached:
self.printer.writelines(
"context._push_buffer()",
)
identifiers = identifiers.branch(node, nested=nested)
self.write_variable_declares(identifiers)
self.identifier_stack.append(identifiers)
for n in node.nodes:
n.accept_visitor(self)
self.identifier_stack.pop()
self.write_def_finish(node, buffered, filtered, cached)
self.printer.writeline(None)
if cached:
self.write_cache_decorator(node, node.name, namedecls, False, identifiers, inline=True, toplevel=False)
def write_def_finish(self, node, buffered, filtered, cached, callstack=True):
"""write the end section of a rendering function, either outermost or inline.
this takes into account if the rendering function was filtered, buffered, etc.
and closes the corresponding try: block if any, and writes code to retrieve captured content,
apply filters, send proper return value."""
if not buffered and not cached and not filtered:
self.printer.writeline("return ''")
if callstack:
self.printer.writelines(
"finally:",
"context.caller_stack._pop_frame()",
None
)
if buffered or filtered or cached:
if buffered or cached:
# in a caching scenario, don't try to get a writer
# from the context after popping; assume the caching
# implemenation might be using a context with no
# extra buffers
self.printer.writelines(
"finally:",
"__M_buf = context._pop_buffer()"
)
else:
self.printer.writelines(
"finally:",
"__M_buf, __M_writer = context._pop_buffer_and_writer()"
)
if callstack:
self.printer.writeline("context.caller_stack._pop_frame()")
s = "__M_buf.getvalue()"
if filtered:
s = self.create_filter_callable(node.filter_args.args, s, False)
self.printer.writeline(None)
if buffered and not cached:
s = self.create_filter_callable(self.compiler.buffer_filters, s, False)
if buffered or cached:
self.printer.writeline("return %s" % s)
else:
self.printer.writelines(
"__M_writer(%s)" % s,
"return ''"
)
def write_cache_decorator(self, node_or_pagetag, name, args, buffered, identifiers, inline=False, toplevel=False):
"""write a post-function decorator to replace a rendering callable with a cached version of itself."""
self.printer.writeline("__M_%s = %s" % (name, name))
cachekey = node_or_pagetag.parsed_attributes.get('cache_key', repr(name))
cacheargs = {}
for arg in (('cache_type', 'type'), ('cache_dir', 'data_dir'), ('cache_timeout', 'expiretime'), ('cache_url', 'url')):
val = node_or_pagetag.parsed_attributes.get(arg[0], None)
if val is not None:
if arg[1] == 'expiretime':
cacheargs[arg[1]] = int(eval(val))
else:
cacheargs[arg[1]] = val
else:
if self.compiler.pagetag is not None:
val = self.compiler.pagetag.parsed_attributes.get(arg[0], None)
if val is not None:
if arg[1] == 'expiretime':
cacheargs[arg[1]] == int(eval(val))
else:
cacheargs[arg[1]] = val
self.printer.writeline("def %s(%s):" % (name, ','.join(args)))
# form "arg1, arg2, arg3=arg3, arg4=arg4", etc.
pass_args = [ '=' in a and "%s=%s" % ((a.split('=')[0],)*2) or a for a in args]
self.write_variable_declares(identifiers, toplevel=toplevel, limit=node_or_pagetag.undeclared_identifiers())
if buffered:
s = "context.get('local').get_cached(%s, defname=%r, %screatefunc=lambda:__M_%s(%s))" % (cachekey, name, ''.join(["%s=%s, " % (k,v) for k, v in cacheargs.iteritems()]), name, ','.join(pass_args))
# apply buffer_filters
s = self.create_filter_callable(self.compiler.buffer_filters, s, False)
self.printer.writelines("return " + s,None)
else:
self.printer.writelines(
"__M_writer(context.get('local').get_cached(%s, defname=%r, %screatefunc=lambda:__M_%s(%s)))" % (cachekey, name, ''.join(["%s=%s, " % (k,v) for k, v in cacheargs.iteritems()]), name, ','.join(pass_args)),
"return ''",
None
)
def create_filter_callable(self, args, target, is_expression):
"""write a filter-applying expression based on the filters present in the given
filter names, adjusting for the global 'default' filter aliases as needed."""
def locate_encode(name):
if re.match(r'decode\..+', name):
return "filters." + name
else:
return filters.DEFAULT_ESCAPES.get(name, name)
if 'n' not in args:
if is_expression:
if self.compiler.pagetag:
args = self.compiler.pagetag.filter_args.args + args
if self.compiler.default_filters:
args = self.compiler.default_filters + args
for e in args:
# if filter given as a function, get just the identifier portion
if e == 'n':
continue
m = re.match(r'(.+?)(\(.*\))', e)
if m:
(ident, fargs) = m.group(1,2)
f = locate_encode(ident)
e = f + fargs
else:
x = e
e = locate_encode(e)
assert e is not None
target = "%s(%s)" % (e, target)
return target
def visitExpression(self, node):
self.write_source_comment(node)
if len(node.escapes) or (self.compiler.pagetag is not None and len(self.compiler.pagetag.filter_args.args)) or len(self.compiler.default_filters):
s = self.create_filter_callable(node.escapes_code.args, "%s" % node.text, True)
self.printer.writeline("__M_writer(%s)" % s)
else:
self.printer.writeline("__M_writer(%s)" % node.text)
def visitControlLine(self, node):
if node.isend:
self.printer.writeline(None)
else:
self.write_source_comment(node)
self.printer.writeline(node.text)
def visitText(self, node):
self.write_source_comment(node)
self.printer.writeline("__M_writer(%s)" % repr(node.content))
def visitTextTag(self, node):
filtered = len(node.filter_args.args) > 0
if filtered:
self.printer.writelines(
"__M_writer = context._push_writer()",
"try:",
)
for n in node.nodes:
n.accept_visitor(self)
if filtered:
self.printer.writelines(
"finally:",
"__M_buf, __M_writer = context._pop_buffer_and_writer()",
"__M_writer(%s)" % self.create_filter_callable(node.filter_args.args, "__M_buf.getvalue()", False),
None
)
def visitCode(self, node):
if not node.ismodule:
self.write_source_comment(node)
self.printer.write_indented_block(node.text)
if not self.in_def and len(self.identifiers.locally_assigned) > 0:
# if we are the "template" def, fudge locally declared/modified variables into the "__M_locals" dictionary,
# which is used for def calls within the same template, to simulate "enclosing scope"
self.printer.writeline('__M_locals.update(__M_dict_builtin([(__M_key, __M_locals_builtin()[__M_key]) for __M_key in [%s] if __M_key in __M_locals_builtin()]))' % ','.join([repr(x) for x in node.declared_identifiers()]))
def visitIncludeTag(self, node):
self.write_source_comment(node)
args = node.attributes.get('args')
if args:
self.printer.writeline("runtime._include_file(context, %s, _template_uri, %s)" % (node.parsed_attributes['file'], args))
else:
self.printer.writeline("runtime._include_file(context, %s, _template_uri)" % (node.parsed_attributes['file']))
def visitNamespaceTag(self, node):
pass
def visitDefTag(self, node):
pass
def visitCallNamespaceTag(self, node):
# TODO: we can put namespace-specific checks here, such
# as ensure the given namespace will be imported,
# pre-import the namespace, etc.
self.visitCallTag(node)
def visitCallTag(self, node):
self.printer.writeline("def ccall(caller):")
export = ['body']
callable_identifiers = self.identifiers.branch(node, nested=True)
body_identifiers = callable_identifiers.branch(node, nested=False)
# we want the 'caller' passed to ccall to be used for the body() function,
# but for other non-body() <%def>s within <%call> we want the current caller off the call stack (if any)
body_identifiers.add_declared('caller')
self.identifier_stack.append(body_identifiers)
class DefVisitor(object):
def visitDefTag(s, node):
self.write_inline_def(node, callable_identifiers, nested=False)
export.append(node.name)
# remove defs that are within the <%call> from the "closuredefs" defined
# in the body, so they dont render twice
if node.name in body_identifiers.closuredefs:
del body_identifiers.closuredefs[node.name]
vis = DefVisitor()
for n in node.nodes:
n.accept_visitor(vis)
self.identifier_stack.pop()
bodyargs = node.body_decl.get_argument_expressions()
self.printer.writeline("def body(%s):" % ','.join(bodyargs))
# TODO: figure out best way to specify buffering/nonbuffering (at call time would be better)
buffered = False
if buffered:
self.printer.writelines(
"context._push_buffer()",
"try:"
)
self.write_variable_declares(body_identifiers)
self.identifier_stack.append(body_identifiers)
for n in node.nodes:
n.accept_visitor(self)
self.identifier_stack.pop()
self.write_def_finish(node, buffered, False, False, callstack=False)
self.printer.writelines(
None,
"return [%s]" % (','.join(export)),
None
)
self.printer.writelines(
# get local reference to current caller, if any
"caller = context.caller_stack._get_caller()",
# push on caller for nested call
"context.caller_stack.nextcaller = runtime.Namespace('caller', context, callables=ccall(caller))",
"try:")
self.write_source_comment(node)
self.printer.writelines(
"__M_writer(%s)" % self.create_filter_callable([], node.expression, True),
"finally:",
"context.caller_stack.nextcaller = None",
None
)
class _Identifiers(object):
"""tracks the status of identifier names as template code is rendered."""
def __init__(self, node=None, parent=None, nested=False):
if parent is not None:
# things that have already been declared in an enclosing namespace (i.e. names we can just use)
self.declared = util.Set(parent.declared).union([c.name for c in parent.closuredefs.values()]).union(parent.locally_declared).union(parent.argument_declared)
# if these identifiers correspond to a "nested" scope, it means whatever the
# parent identifiers had as undeclared will have been declared by that parent,
# and therefore we have them in our scope.
if nested:
self.declared = self.declared.union(parent.undeclared)
# top level defs that are available
self.topleveldefs = util.SetLikeDict(**parent.topleveldefs)
else:
self.declared = util.Set()
self.topleveldefs = util.SetLikeDict()
# things within this level that are referenced before they are declared (e.g. assigned to)
self.undeclared = util.Set()
# things that are declared locally. some of these things could be in the "undeclared"
# list as well if they are referenced before declared
self.locally_declared = util.Set()
# assignments made in explicit python blocks. these will be propigated to
# the context of local def calls.
self.locally_assigned = util.Set()
# things that are declared in the argument signature of the def callable
self.argument_declared = util.Set()
# closure defs that are defined in this level
self.closuredefs = util.SetLikeDict()
self.node = node
if node is not None:
node.accept_visitor(self)
def branch(self, node, **kwargs):
"""create a new Identifiers for a new Node, with this Identifiers as the parent."""
return _Identifiers(node, self, **kwargs)
defs = property(lambda self:util.Set(self.topleveldefs.union(self.closuredefs).values()))
def __repr__(self):
return "Identifiers(declared=%s, locally_declared=%s, undeclared=%s, topleveldefs=%s, closuredefs=%s, argumenetdeclared=%s)" % (repr(list(self.declared)), repr(list(self.locally_declared)), repr(list(self.undeclared)), repr([c.name for c in self.topleveldefs.values()]), repr([c.name for c in self.closuredefs.values()]), repr(self.argument_declared))
def check_declared(self, node):
"""update the state of this Identifiers with the undeclared and declared identifiers of the given node."""
for ident in node.undeclared_identifiers():
if ident != 'context' and ident not in self.declared.union(self.locally_declared):
self.undeclared.add(ident)
for ident in node.declared_identifiers():
self.locally_declared.add(ident)
def add_declared(self, ident):
self.declared.add(ident)
if ident in self.undeclared:
self.undeclared.remove(ident)
def visitExpression(self, node):
self.check_declared(node)
def visitControlLine(self, node):
self.check_declared(node)
def visitCode(self, node):
if not node.ismodule:
self.check_declared(node)
self.locally_assigned = self.locally_assigned.union(node.declared_identifiers())
def visitDefTag(self, node):
if node.is_root():
self.topleveldefs[node.name] = node
elif node is not self.node:
self.closuredefs[node.name] = node
for ident in node.undeclared_identifiers():
if ident != 'context' and ident not in self.declared.union(self.locally_declared):
self.undeclared.add(ident)
# visit defs only one level deep
if node is self.node:
for ident in node.declared_identifiers():
self.argument_declared.add(ident)
for n in node.nodes:
n.accept_visitor(self)
def visitIncludeTag(self, node):
self.check_declared(node)
def visitPageTag(self, node):
for ident in node.declared_identifiers():
self.argument_declared.add(ident)
self.check_declared(node)
def visitCallNamespaceTag(self, node):
self.visitCallTag(node)
def visitCallTag(self, node):
if node is self.node:
for ident in node.undeclared_identifiers():
if ident != 'context' and ident not in self.declared.union(self.locally_declared):
self.undeclared.add(ident)
for ident in node.declared_identifiers():
self.argument_declared.add(ident)
for n in node.nodes:
n.accept_visitor(self)
else:
for ident in node.undeclared_identifiers():
if ident != 'context' and ident not in self.declared.union(self.locally_declared):
self.undeclared.add(ident)
| apache-2.0 |
joshainglis/ansible | contrib/inventory/cloudstack.py | 111 | 8221 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# (c) 2015, René Moser <[email protected]>
#
# This file is part of Ansible,
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
######################################################################
"""
Ansible CloudStack external inventory script.
=============================================
Generates Ansible inventory from CloudStack. Configuration is read from
'cloudstack.ini'. If you need to pass the project, write a simple wrapper
script, e.g. project_cloudstack.sh:
#!/bin/bash
cloudstack.py --project <your_project> $@
When run against a specific host, this script returns the following attributes
based on the data obtained from CloudStack API:
"web01": {
"cpu_number": 2,
"nic": [
{
"ip": "10.102.76.98",
"mac": "02:00:50:99:00:01",
"type": "Isolated",
"netmask": "255.255.255.0",
"gateway": "10.102.76.1"
},
{
"ip": "10.102.138.63",
"mac": "06:b7:5a:00:14:84",
"type": "Shared",
"netmask": "255.255.255.0",
"gateway": "10.102.138.1"
}
],
"default_ip": "10.102.76.98",
"zone": "ZUERICH",
"created": "2014-07-02T07:53:50+0200",
"hypervisor": "VMware",
"memory": 2048,
"state": "Running",
"tags": [],
"cpu_speed": 1800,
"affinity_group": [],
"service_offering": "Small",
"cpu_used": "62%"
}
usage: cloudstack.py [--list] [--host HOST] [--project PROJECT]
"""
from __future__ import print_function
import os
import sys
import argparse
try:
import json
except:
import simplejson as json
try:
from cs import CloudStack, CloudStackException, read_config
except ImportError:
print("Error: CloudStack library must be installed: pip install cs.",
file=sys.stderr)
sys.exit(1)
class CloudStackInventory(object):
def __init__(self):
parser = argparse.ArgumentParser()
parser.add_argument('--host')
parser.add_argument('--list', action='store_true')
parser.add_argument('--project')
options = parser.parse_args()
try:
self.cs = CloudStack(**read_config())
except CloudStackException as e:
print("Error: Could not connect to CloudStack API", file=sys.stderr)
project_id = ''
if options.project:
project_id = self.get_project_id(options.project)
if options.host:
data = self.get_host(options.host)
print(json.dumps(data, indent=2))
elif options.list:
data = self.get_list()
print(json.dumps(data, indent=2))
else:
print("usage: --list | --host <hostname> [--project <project>]",
file=sys.stderr)
sys.exit(1)
def get_project_id(self, project):
projects = self.cs.listProjects()
if projects:
for p in projects['project']:
if p['name'] == project or p['id'] == project:
return p['id']
print("Error: Project %s not found." % project, file=sys.stderr)
sys.exit(1)
def get_host(self, name, project_id=''):
hosts = self.cs.listVirtualMachines(projectid=project_id)
data = {}
if not hosts:
return data
for host in hosts['virtualmachine']:
host_name = host['displayname']
if name == host_name:
data['zone'] = host['zonename']
if 'group' in host:
data['group'] = host['group']
data['state'] = host['state']
data['service_offering'] = host['serviceofferingname']
data['affinity_group'] = host['affinitygroup']
data['security_group'] = host['securitygroup']
data['cpu_number'] = host['cpunumber']
data['cpu_speed'] = host['cpuspeed']
if 'cpuused' in host:
data['cpu_used'] = host['cpuused']
data['memory'] = host['memory']
data['tags'] = host['tags']
data['hypervisor'] = host['hypervisor']
data['created'] = host['created']
data['nic'] = []
for nic in host['nic']:
data['nic'].append({
'ip': nic['ipaddress'],
'mac': nic['macaddress'],
'netmask': nic['netmask'],
'gateway': nic['gateway'],
'type': nic['type'],
})
if nic['isdefault']:
data['default_ip'] = nic['ipaddress']
break;
return data
def get_list(self, project_id=''):
data = {
'all': {
'hosts': [],
},
'_meta': {
'hostvars': {},
},
}
groups = self.cs.listInstanceGroups(projectid=project_id)
if groups:
for group in groups['instancegroup']:
group_name = group['name']
if group_name and not group_name in data:
data[group_name] = {
'hosts': []
}
hosts = self.cs.listVirtualMachines(projectid=project_id)
if not hosts:
return data
for host in hosts['virtualmachine']:
host_name = host['displayname']
data['all']['hosts'].append(host_name)
data['_meta']['hostvars'][host_name] = {}
data['_meta']['hostvars'][host_name]['zone'] = host['zonename']
if 'group' in host:
data['_meta']['hostvars'][host_name]['group'] = host['group']
data['_meta']['hostvars'][host_name]['state'] = host['state']
data['_meta']['hostvars'][host_name]['service_offering'] = host['serviceofferingname']
data['_meta']['hostvars'][host_name]['affinity_group'] = host['affinitygroup']
data['_meta']['hostvars'][host_name]['security_group'] = host['securitygroup']
data['_meta']['hostvars'][host_name]['cpu_number'] = host['cpunumber']
data['_meta']['hostvars'][host_name]['cpu_speed'] = host['cpuspeed']
if 'cpuused' in host:
data['_meta']['hostvars'][host_name]['cpu_used'] = host['cpuused']
data['_meta']['hostvars'][host_name]['created'] = host['created']
data['_meta']['hostvars'][host_name]['memory'] = host['memory']
data['_meta']['hostvars'][host_name]['tags'] = host['tags']
data['_meta']['hostvars'][host_name]['hypervisor'] = host['hypervisor']
data['_meta']['hostvars'][host_name]['created'] = host['created']
data['_meta']['hostvars'][host_name]['nic'] = []
for nic in host['nic']:
data['_meta']['hostvars'][host_name]['nic'].append({
'ip': nic['ipaddress'],
'mac': nic['macaddress'],
'netmask': nic['netmask'],
'gateway': nic['gateway'],
'type': nic['type'],
})
if nic['isdefault']:
data['_meta']['hostvars'][host_name]['default_ip'] = nic['ipaddress']
group_name = ''
if 'group' in host:
group_name = host['group']
if group_name and group_name in data:
data[group_name]['hosts'].append(host_name)
return data
if __name__ == '__main__':
CloudStackInventory()
| gpl-3.0 |
mhnatiuk/phd_sociology_of_religion | scrapper/build/scrapy/scrapy/utils/request.py | 15 | 3096 | """
This module provides some useful functions for working with
scrapy.http.Request objects
"""
from __future__ import print_function
import hashlib
import weakref
from urlparse import urlunparse
from twisted.internet.defer import Deferred
from w3lib.http import basic_auth_header
from scrapy.utils.url import canonicalize_url
from scrapy.utils.httpobj import urlparse_cached
_fingerprint_cache = weakref.WeakKeyDictionary()
def request_fingerprint(request, include_headers=None):
"""
Return the request fingerprint.
The request fingerprint is a hash that uniquely identifies the resource the
request points to. For example, take the following two urls:
http://www.example.com/query?id=111&cat=222
http://www.example.com/query?cat=222&id=111
Even though those are two different URLs both point to the same resource
and are equivalent (ie. they should return the same response).
Another example are cookies used to store session ids. Suppose the
following page is only accesible to authenticated users:
http://www.example.com/members/offers.html
Lot of sites use a cookie to store the session id, which adds a random
component to the HTTP Request and thus should be ignored when calculating
the fingerprint.
For this reason, request headers are ignored by default when calculating
the fingeprint. If you want to include specific headers use the
include_headers argument, which is a list of Request headers to include.
"""
if include_headers:
include_headers = tuple([h.lower() for h in sorted(include_headers)])
cache = _fingerprint_cache.setdefault(request, {})
if include_headers not in cache:
fp = hashlib.sha1()
fp.update(request.method)
fp.update(canonicalize_url(request.url))
fp.update(request.body or '')
if include_headers:
for hdr in include_headers:
if hdr in request.headers:
fp.update(hdr)
for v in request.headers.getlist(hdr):
fp.update(v)
cache[include_headers] = fp.hexdigest()
return cache[include_headers]
def request_authenticate(request, username, password):
"""Autenticate the given request (in place) using the HTTP basic access
authentication mechanism (RFC 2617) and the given username and password
"""
request.headers['Authorization'] = basic_auth_header(username, password)
def request_httprepr(request):
"""Return the raw HTTP representation (as string) of the given request.
This is provided only for reference since it's not the actual stream of
bytes that will be send when performing the request (that's controlled
by Twisted).
"""
parsed = urlparse_cached(request)
path = urlunparse(('', '', parsed.path or '/', parsed.params, parsed.query, ''))
s = "%s %s HTTP/1.1\r\n" % (request.method, path)
s += "Host: %s\r\n" % parsed.hostname
if request.headers:
s += request.headers.to_string() + "\r\n"
s += "\r\n"
s += request.body
return s
| gpl-2.0 |
nishigori/boto | tests/unit/cloudsearch2/test_search.py | 114 | 12329 | #!/usr/bin env python
from boto.cloudsearch2.domain import Domain
from boto.cloudsearch2.layer1 import CloudSearchConnection
from tests.compat import mock, unittest
from httpretty import HTTPretty
import json
from boto.cloudsearch2.search import SearchConnection, SearchServiceException
from boto.compat import six, map
from tests.unit import AWSMockServiceTestCase
from tests.unit.cloudsearch2 import DEMO_DOMAIN_DATA
from tests.unit.cloudsearch2.test_connection import TestCloudSearchCreateDomain
HOSTNAME = "search-demo-userdomain.us-east-1.cloudsearch.amazonaws.com"
FULL_URL = 'http://%s/2013-01-01/search' % HOSTNAME
class CloudSearchSearchBaseTest(unittest.TestCase):
hits = [
{
'id': '12341',
'fields': {
'title': 'Document 1',
'rank': 1
}
},
{
'id': '12342',
'fields': {
'title': 'Document 2',
'rank': 2
}
},
{
'id': '12343',
'fields': {
'title': 'Document 3',
'rank': 3
}
},
{
'id': '12344',
'fields': {
'title': 'Document 4',
'rank': 4
}
},
{
'id': '12345',
'fields': {
'title': 'Document 5',
'rank': 5
}
},
{
'id': '12346',
'fields': {
'title': 'Document 6',
'rank': 6
}
},
{
'id': '12347',
'fields': {
'title': 'Document 7',
'rank': 7
}
},
]
content_type = "text/xml"
response_status = 200
def get_args(self, requestline):
(_, request, _) = requestline.split(b" ")
(_, request) = request.split(b"?", 1)
args = six.moves.urllib.parse.parse_qs(request)
return args
def setUp(self):
HTTPretty.enable()
body = self.response
if not isinstance(body, bytes):
body = json.dumps(body).encode('utf-8')
HTTPretty.register_uri(HTTPretty.GET, FULL_URL,
body=body,
content_type=self.content_type,
status=self.response_status)
def tearDown(self):
HTTPretty.disable()
class CloudSearchSearchTest(CloudSearchSearchBaseTest):
response = {
'rank': '-text_relevance',
'match-expr': "Test",
'hits': {
'found': 30,
'start': 0,
'hit': CloudSearchSearchBaseTest.hits
},
'status': {
'rid': 'b7c167f6c2da6d93531b9a7b314ad030b3a74803b4b7797edb905ba5a6a08',
'time-ms': 2,
'cpu-time-ms': 0
}
}
def test_cloudsearch_qsearch(self):
search = SearchConnection(endpoint=HOSTNAME)
search.search(q='Test', options='TestOptions')
args = self.get_args(HTTPretty.last_request.raw_requestline)
self.assertEqual(args[b'q'], [b"Test"])
self.assertEqual(args[b'q.options'], [b"TestOptions"])
self.assertEqual(args[b'start'], [b"0"])
self.assertEqual(args[b'size'], [b"10"])
def test_cloudsearch_search_details(self):
search = SearchConnection(endpoint=HOSTNAME)
search.search(q='Test', size=50, start=20)
args = self.get_args(HTTPretty.last_request.raw_requestline)
self.assertEqual(args[b'q'], [b"Test"])
self.assertEqual(args[b'size'], [b"50"])
self.assertEqual(args[b'start'], [b"20"])
def test_cloudsearch_facet_constraint_single(self):
search = SearchConnection(endpoint=HOSTNAME)
search.search(
q='Test',
facet={'author': "'John Smith','Mark Smith'"})
args = self.get_args(HTTPretty.last_request.raw_requestline)
self.assertEqual(args[b'facet.author'],
[b"'John Smith','Mark Smith'"])
def test_cloudsearch_facet_constraint_multiple(self):
search = SearchConnection(endpoint=HOSTNAME)
search.search(
q='Test',
facet={'author': "'John Smith','Mark Smith'",
'category': "'News','Reviews'"})
args = self.get_args(HTTPretty.last_request.raw_requestline)
self.assertEqual(args[b'facet.author'],
[b"'John Smith','Mark Smith'"])
self.assertEqual(args[b'facet.category'],
[b"'News','Reviews'"])
def test_cloudsearch_facet_sort_single(self):
search = SearchConnection(endpoint=HOSTNAME)
search.search(q='Test', facet={'author': {'sort': 'alpha'}})
args = self.get_args(HTTPretty.last_request.raw_requestline)
print(args)
self.assertEqual(args[b'facet.author'], [b'{"sort": "alpha"}'])
def test_cloudsearch_facet_sort_multiple(self):
search = SearchConnection(endpoint=HOSTNAME)
search.search(q='Test', facet={'author': {'sort': 'alpha'},
'cat': {'sort': 'count'}})
args = self.get_args(HTTPretty.last_request.raw_requestline)
self.assertEqual(args[b'facet.author'], [b'{"sort": "alpha"}'])
self.assertEqual(args[b'facet.cat'], [b'{"sort": "count"}'])
def test_cloudsearch_result_fields_single(self):
search = SearchConnection(endpoint=HOSTNAME)
search.search(q='Test', return_fields=['author'])
args = self.get_args(HTTPretty.last_request.raw_requestline)
self.assertEqual(args[b'return'], [b'author'])
def test_cloudsearch_result_fields_multiple(self):
search = SearchConnection(endpoint=HOSTNAME)
search.search(q='Test', return_fields=['author', 'title'])
args = self.get_args(HTTPretty.last_request.raw_requestline)
self.assertEqual(args[b'return'], [b'author,title'])
def test_cloudsearch_results_meta(self):
"""Check returned metadata is parsed correctly"""
search = SearchConnection(endpoint=HOSTNAME)
results = search.search(q='Test')
# These rely on the default response which is fed into HTTPretty
self.assertEqual(results.hits, 30)
self.assertEqual(results.docs[0]['fields']['rank'], 1)
def test_cloudsearch_results_info(self):
"""Check num_pages_needed is calculated correctly"""
search = SearchConnection(endpoint=HOSTNAME)
results = search.search(q='Test')
# This relies on the default response which is fed into HTTPretty
self.assertEqual(results.num_pages_needed, 3.0)
def test_cloudsearch_results_matched(self):
"""
Check that information objects are passed back through the API
correctly.
"""
search = SearchConnection(endpoint=HOSTNAME)
query = search.build_query(q='Test')
results = search(query)
self.assertEqual(results.search_service, search)
self.assertEqual(results.query, query)
def test_cloudsearch_results_hits(self):
"""Check that documents are parsed properly from AWS"""
search = SearchConnection(endpoint=HOSTNAME)
results = search.search(q='Test')
hits = list(map(lambda x: x['id'], results.docs))
# This relies on the default response which is fed into HTTPretty
self.assertEqual(
hits, ["12341", "12342", "12343", "12344",
"12345", "12346", "12347"])
def test_cloudsearch_results_iterator(self):
"""Check the results iterator"""
search = SearchConnection(endpoint=HOSTNAME)
results = search.search(q='Test')
results_correct = iter(["12341", "12342", "12343", "12344",
"12345", "12346", "12347"])
for x in results:
self.assertEqual(x['id'], next(results_correct))
def test_cloudsearch_results_internal_consistancy(self):
"""Check the documents length matches the iterator details"""
search = SearchConnection(endpoint=HOSTNAME)
results = search.search(q='Test')
self.assertEqual(len(results), len(results.docs))
def test_cloudsearch_search_nextpage(self):
"""Check next page query is correct"""
search = SearchConnection(endpoint=HOSTNAME)
query1 = search.build_query(q='Test')
query2 = search.build_query(q='Test')
results = search(query2)
self.assertEqual(results.next_page().query.start,
query1.start + query1.size)
self.assertEqual(query1.q, query2.q)
class CloudSearchSearchFacetTest(CloudSearchSearchBaseTest):
response = {
'rank': '-text_relevance',
'match-expr': "Test",
'hits': {
'found': 30,
'start': 0,
'hit': CloudSearchSearchBaseTest.hits
},
'status': {
'rid': 'b7c167f6c2da6d93531b9a7b314ad030b3a74803b4b7797edb905ba5a6a08',
'time-ms': 2,
'cpu-time-ms': 0
},
'facets': {
'tags': {},
'animals': {'buckets': [{'count': '2', 'value': 'fish'}, {'count': '1', 'value': 'lions'}]},
}
}
def test_cloudsearch_search_facets(self):
#self.response['facets'] = {'tags': {}}
search = SearchConnection(endpoint=HOSTNAME)
results = search.search(q='Test', facet={'tags': {}})
self.assertTrue('tags' not in results.facets)
self.assertEqual(results.facets['animals'], {u'lions': u'1', u'fish': u'2'})
class CloudSearchNonJsonTest(CloudSearchSearchBaseTest):
response = b'<html><body><h1>500 Internal Server Error</h1></body></html>'
response_status = 500
content_type = 'text/xml'
def test_response(self):
search = SearchConnection(endpoint=HOSTNAME)
with self.assertRaises(SearchServiceException):
search.search(q='Test')
class CloudSearchUnauthorizedTest(CloudSearchSearchBaseTest):
response = b'<html><body><h1>403 Forbidden</h1>foo bar baz</body></html>'
response_status = 403
content_type = 'text/html'
def test_response(self):
search = SearchConnection(endpoint=HOSTNAME)
with self.assertRaisesRegexp(SearchServiceException, 'foo bar baz'):
search.search(q='Test')
class FakeResponse(object):
status_code = 405
content = b''
class CloudSearchConnectionTest(AWSMockServiceTestCase):
cloudsearch = True
connection_class = CloudSearchConnection
def setUp(self):
super(CloudSearchConnectionTest, self).setUp()
self.conn = SearchConnection(
endpoint='test-domain.cloudsearch.amazonaws.com'
)
def test_expose_additional_error_info(self):
mpo = mock.patch.object
fake = FakeResponse()
fake.content = b'Nopenopenope'
# First, in the case of a non-JSON, non-403 error.
with mpo(self.conn.session, 'get', return_value=fake) as mock_request:
with self.assertRaises(SearchServiceException) as cm:
self.conn.search(q='not_gonna_happen')
self.assertTrue('non-json response' in str(cm.exception))
self.assertTrue('Nopenopenope' in str(cm.exception))
# Then with JSON & an 'error' key within.
fake.content = json.dumps({
'error': "Something went wrong. Oops."
}).encode('utf-8')
with mpo(self.conn.session, 'get', return_value=fake) as mock_request:
with self.assertRaises(SearchServiceException) as cm:
self.conn.search(q='no_luck_here')
self.assertTrue('Unknown error' in str(cm.exception))
self.assertTrue('went wrong. Oops' in str(cm.exception))
def test_proxy(self):
conn = self.service_connection
conn.proxy = "127.0.0.1"
conn.proxy_user = "john.doe"
conn.proxy_pass="p4ssw0rd"
conn.proxy_port="8180"
conn.use_proxy = True
domain = Domain(conn, DEMO_DOMAIN_DATA)
search = SearchConnection(domain=domain)
self.assertEqual(search.session.proxies, {'http': 'http://john.doe:[email protected]:8180'})
| mit |
tenXer/PyDevSrc | pydevsrc/pydevd.py | 1 | 55520 | #IMPORTANT: pydevd_constants must be the 1st thing defined because it'll keep a reference to the original sys._getframe
from pydevd_constants import * #@UnusedWildImport
import pydev_imports
from pydevd_comm import CMD_CHANGE_VARIABLE, \
CMD_EVALUATE_EXPRESSION, \
CMD_EXEC_EXPRESSION, \
CMD_GET_COMPLETIONS, \
CMD_GET_FRAME, \
CMD_SET_PY_EXCEPTION, \
CMD_GET_VARIABLE, \
CMD_LIST_THREADS, \
CMD_REMOVE_BREAK, \
CMD_RUN, \
CMD_SET_BREAK, \
CMD_SET_NEXT_STATEMENT,\
CMD_STEP_INTO, \
CMD_STEP_OVER, \
CMD_STEP_RETURN, \
CMD_THREAD_CREATE, \
CMD_THREAD_KILL, \
CMD_THREAD_RUN, \
CMD_THREAD_SUSPEND, \
CMD_RUN_TO_LINE, \
CMD_RELOAD_CODE, \
CMD_VERSION, \
CMD_GET_FILE_CONTENTS, \
GetGlobalDebugger, \
InternalChangeVariable, \
InternalGetCompletions, \
InternalEvaluateExpression, \
InternalGetFrame, \
InternalGetVariable, \
InternalTerminateThread, \
InternalRunThread, \
InternalStepThread, \
NetCommand, \
NetCommandFactory, \
PyDBDaemonThread, \
PydevQueue, \
ReaderThread, \
SetGlobalDebugger, \
WriterThread, \
PydevdFindThreadById, \
PydevdLog, \
StartClient, \
StartServer, \
InternalSetNextStatementThread
from pydevd_file_utils import NormFileToServer, GetFilenameAndBase
import pydevd_import_class
import pydevd_vars
import traceback
import pydevd_vm_type
import pydevd_tracing
import pydevd_io
from pydevd_additional_thread_info import PyDBAdditionalThreadInfo
import time
threadingEnumerate = threading.enumerate
threadingCurrentThread = threading.currentThread
DONT_TRACE = {
#commonly used things from the stdlib that we don't want to trace
'threading.py':1,
'Queue.py':1,
'socket.py':1,
#things from pydev that we don't want to trace
'pydevd_additional_thread_info.py':1,
'pydevd_comm.py':1,
'pydevd_constants.py':1,
'pydevd_file_utils.py':1,
'pydevd_frame.py':1,
'pydevd_io.py':1 ,
'pydevd_resolver.py':1 ,
'pydevd_tracing.py':1 ,
'pydevd_vars.py':1,
'pydevd_vm_type.py':1,
'pydevd.py':1 ,
'pydevd_psyco_stub.py':1
}
if IS_PY3K:
#if we try to trace io.py it seems it can get halted (see http://bugs.python.org/issue4716)
DONT_TRACE['io.py'] = 1
connected = False
bufferStdOutToServer = False
bufferStdErrToServer = False
#=======================================================================================================================
# PyDBCommandThread
#=======================================================================================================================
class PyDBCommandThread(PyDBDaemonThread):
def __init__(self, pyDb):
PyDBDaemonThread.__init__(self)
self.pyDb = pyDb
self.setName('pydevd.CommandThread')
def OnRun(self):
time.sleep(5) #this one will only start later on (because otherwise we may not have any non-daemon threads
run_traced = True
if pydevd_vm_type.GetVmType() == pydevd_vm_type.PydevdVmType.JYTHON and sys.hexversion <= 0x020201f0:
#don't run untraced threads if we're in jython 2.2.1 or lower
#jython bug: if we start a thread and another thread changes the tracing facility
#it affects other threads (it's not set only for the thread but globally)
#Bug: http://sourceforge.net/tracker/index.php?func=detail&aid=1870039&group_id=12867&atid=112867
run_traced = False
if run_traced:
pydevd_tracing.SetTrace(None) # no debugging on this thread
try:
while not self.killReceived:
try:
self.pyDb.processInternalCommands()
except:
PydevdLog(0, 'Finishing debug communication...(2)')
time.sleep(0.5)
except:
pass
#only got this error in interpreter shutdown
#PydevdLog(0, 'Finishing debug communication...(3)')
_original_excepthook = None
#=======================================================================================================================
# excepthook
#=======================================================================================================================
def excepthook(exctype, value, tb):
#Always call the original excepthook before going on to call the debugger post mortem to show it.
_original_excepthook(exctype, value, tb)
debugger = GetGlobalDebugger()
if debugger is None or not debugger.break_on_uncaught:
return
if debugger.handle_exceptions is not None:
if not issubclass(exctype, debugger.handle_exceptions):
return
frames = []
while tb:
frames.append(tb.tb_frame)
tb = tb.tb_next
thread = threadingCurrentThread()
frames_byid = dict([(id(frame),frame) for frame in frames])
frame = frames[-1]
thread.additionalInfo.pydev_force_stop_at_exception = (frame, frames_byid)
debugger = GetGlobalDebugger()
debugger.force_post_mortem_stop += 1
#=======================================================================================================================
# set_pm_excepthook
#=======================================================================================================================
def set_pm_excepthook(handle_exceptions=None):
'''
This function is now deprecated (PyDev provides an UI to handle that now).
'''
raise DeprecationWarning(
'This function is now replaced by GetGlobalDebugger().setExceptHook and is now controlled by the PyDev UI.')
try:
import thread
except ImportError:
import _thread as thread #Py3K changed it.
_original_start_new_thread = thread.start_new_thread
#=======================================================================================================================
# NewThreadStartup
#=======================================================================================================================
class NewThreadStartup:
def __init__(self, original_func, args, kwargs):
self.original_func = original_func
self.args = args
self.kwargs = kwargs
def __call__(self):
global_debugger = GetGlobalDebugger()
pydevd_tracing.SetTrace(global_debugger.trace_dispatch)
self.original_func(*self.args, **self.kwargs)
#=======================================================================================================================
# pydev_start_new_thread
#=======================================================================================================================
def pydev_start_new_thread(function, args, kwargs={}):
'''
We need to replace the original thread.start_new_thread with this function so that threads started through
it and not through the threading module are properly traced.
'''
return _original_start_new_thread(NewThreadStartup(function, args, kwargs), ())
#=======================================================================================================================
# PyDB
#=======================================================================================================================
class PyDB:
""" Main debugging class
Lots of stuff going on here:
PyDB starts two threads on startup that connect to remote debugger (RDB)
The threads continuously read & write commands to RDB.
PyDB communicates with these threads through command queues.
Every RDB command is processed by calling processNetCommand.
Every PyDB net command is sent to the net by posting NetCommand to WriterThread queue
Some commands need to be executed on the right thread (suspend/resume & friends)
These are placed on the internal command queue.
"""
def __init__(self):
SetGlobalDebugger(self)
pydevd_tracing.ReplaceSysSetTraceFunc()
self.reader = None
self.writer = None
self.quitting = None
self.cmdFactory = NetCommandFactory()
self._cmd_queue = {} # the hash of Queues. Key is thread id, value is thread
self.breakpoints = {}
self.readyToRun = False
self._main_lock = threading.Lock()
self._lock_running_thread_ids = threading.Lock()
self._finishDebuggingSession = False
self.force_post_mortem_stop = 0
self.break_on_uncaught = False
self.break_on_caught = False
self.handle_exceptions = None
#this is a dict of thread ids pointing to thread ids. Whenever a command is passed to the java end that
#acknowledges that a thread was created, the thread id should be passed here -- and if at some time we do not
#find that thread alive anymore, we must remove it from this list and make the java side know that the thread
#was killed.
self._running_thread_ids = {}
def FinishDebuggingSession(self):
self._finishDebuggingSession = True
def initializeNetwork(self, sock):
try:
sock.settimeout(None) # infinite, no timeouts from now on - jython does not have it
except:
pass
self.writer = WriterThread(sock)
self.reader = ReaderThread(sock)
self.writer.start()
self.reader.start()
time.sleep(0.1) # give threads time to start
def connect(self, host, port):
if host:
s = StartClient(host, port)
else:
s = StartServer(port)
self.initializeNetwork(s)
def getInternalQueue(self, thread_id):
""" returns internal command queue for a given thread.
if new queue is created, notify the RDB about it """
try:
return self._cmd_queue[thread_id]
except KeyError:
return self._cmd_queue.setdefault(thread_id, PydevQueue.Queue()) #@UndefinedVariable
def postInternalCommand(self, int_cmd, thread_id):
""" if thread_id is *, post to all """
if thread_id == "*":
for k in self._cmd_queue.keys():
self._cmd_queue[k].put(int_cmd)
else:
queue = self.getInternalQueue(thread_id)
queue.put(int_cmd)
def checkOutput(self, out, outCtx):
'''Checks the output to see if we have to send some buffered output to the debug server
@param out: sys.stdout or sys.stderr
@param outCtx: the context indicating: 1=stdout and 2=stderr (to know the colors to write it)
'''
try:
v = out.getvalue()
if v:
self.cmdFactory.makeIoMessage(v, outCtx, self)
except:
traceback.print_exc()
def processInternalCommands(self):
'''This function processes internal commands
'''
curr_thread_id = GetThreadId(threadingCurrentThread())
program_threads_alive = {}
all_threads = threadingEnumerate()
program_threads_dead = []
self._main_lock.acquire()
try:
if bufferStdOutToServer:
self.checkOutput(sys.stdoutBuf, 1) #@UndefinedVariable
if bufferStdErrToServer:
self.checkOutput(sys.stderrBuf, 2) #@UndefinedVariable
self._lock_running_thread_ids.acquire()
try:
for t in all_threads:
thread_id = GetThreadId(t)
if not isinstance(t, PyDBDaemonThread) and t.isAlive():
program_threads_alive[thread_id] = t
if not DictContains(self._running_thread_ids, thread_id):
if not hasattr(t, 'additionalInfo'):
#see http://sourceforge.net/tracker/index.php?func=detail&aid=1955428&group_id=85796&atid=577329
#Let's create the additional info right away!
t.additionalInfo = PyDBAdditionalThreadInfo()
self._running_thread_ids[thread_id] = t
self.writer.addCommand(self.cmdFactory.makeThreadCreatedMessage(t))
queue = self.getInternalQueue(thread_id)
cmdsToReadd = [] #some commands must be processed by the thread itself... if that's the case,
#we will re-add the commands to the queue after executing.
try:
while True:
int_cmd = queue.get(False)
if int_cmd.canBeExecutedBy(curr_thread_id):
PydevdLog(2, "processing internal command ", str(int_cmd))
int_cmd.doIt(self)
else:
PydevdLog(2, "NOT processing internal command ", str(int_cmd))
cmdsToReadd.append(int_cmd)
except PydevQueue.Empty: #@UndefinedVariable
for int_cmd in cmdsToReadd:
queue.put(int_cmd)
# this is how we exit
thread_ids = list(self._running_thread_ids.keys())
for tId in thread_ids:
if not DictContains(program_threads_alive, tId):
program_threads_dead.append(tId)
finally:
self._lock_running_thread_ids.release()
for tId in program_threads_dead:
try:
self.processThreadNotAlive(tId)
except:
sys.stderr.write('Error iterating through %s (%s) - %s\n' % (
program_threads_alive, program_threads_alive.__class__, dir(program_threads_alive)))
raise
if len(program_threads_alive) == 0:
self.FinishDebuggingSession()
for t in all_threads:
if hasattr(t, 'doKillPydevThread'):
t.doKillPydevThread()
finally:
self._main_lock.release()
def setTracingForUntracedContexts(self):
#Enable the tracing for existing threads (because there may be frames being executed that
#are currently untraced).
threads = threadingEnumerate()
for t in threads:
if not t.getName().startswith('pydevd.'):
#TODO: optimize so that we only actually add that tracing if it's in
#the new breakpoint context.
additionalInfo = None
try:
additionalInfo = t.additionalInfo
except AttributeError:
pass #that's ok, no info currently set
if additionalInfo is not None:
for frame in additionalInfo.IterFrames():
self.SetTraceForFrameAndParents(frame)
del frame
def processNetCommand(self, cmd_id, seq, text):
'''Processes a command received from the Java side
@param cmd_id: the id of the command
@param seq: the sequence of the command
@param text: the text received in the command
@note: this method is run as a big switch... after doing some tests, it's not clear whether changing it for
a dict id --> function call will have better performance result. A simple test with xrange(10000000) showed
that the gains from having a fast access to what should be executed are lost because of the function call in
a way that if we had 10 elements in the switch the if..elif are better -- but growing the number of choices
makes the solution with the dispatch look better -- so, if this gets more than 20-25 choices at some time,
it may be worth refactoring it (actually, reordering the ifs so that the ones used mostly come before
probably will give better performance).
'''
self._main_lock.acquire()
try:
try:
cmd = None
if cmd_id == CMD_RUN:
self.readyToRun = True
elif cmd_id == CMD_VERSION:
# response is version number
cmd = self.cmdFactory.makeVersionMessage(seq)
elif cmd_id == CMD_LIST_THREADS:
# response is a list of threads
cmd = self.cmdFactory.makeListThreadsMessage(seq)
elif cmd_id == CMD_THREAD_KILL:
int_cmd = InternalTerminateThread(text)
self.postInternalCommand(int_cmd, text)
elif cmd_id == CMD_THREAD_SUSPEND:
#Yes, thread suspend is still done at this point, not through an internal command!
t = PydevdFindThreadById(text)
if t:
additionalInfo = None
try:
additionalInfo = t.additionalInfo
except AttributeError:
pass #that's ok, no info currently set
if additionalInfo is not None:
for frame in additionalInfo.IterFrames():
self.SetTraceForFrameAndParents(frame)
del frame
self.setSuspend(t, CMD_THREAD_SUSPEND)
elif cmd_id == CMD_THREAD_RUN:
t = PydevdFindThreadById(text)
if t:
thread_id = GetThreadId(t)
int_cmd = InternalRunThread(thread_id)
self.postInternalCommand(int_cmd, thread_id)
elif cmd_id == CMD_STEP_INTO or cmd_id == CMD_STEP_OVER or cmd_id == CMD_STEP_RETURN:
#we received some command to make a single step
t = PydevdFindThreadById(text)
if t:
thread_id = GetThreadId(t)
int_cmd = InternalStepThread(thread_id, cmd_id)
self.postInternalCommand(int_cmd, thread_id)
elif cmd_id == CMD_RUN_TO_LINE or cmd_id == CMD_SET_NEXT_STATEMENT:
#we received some command to make a single step
thread_id, line, func_name = text.split('\t', 2)
t = PydevdFindThreadById(thread_id)
if t:
int_cmd = InternalSetNextStatementThread(thread_id, cmd_id, line, func_name)
self.postInternalCommand(int_cmd, thread_id)
elif cmd_id == CMD_RELOAD_CODE:
#we received some command to make a reload of a module
module_name = text.strip()
from pydevd_reload import xreload
if not DictContains(sys.modules, module_name):
if '.' in module_name:
new_module_name = module_name.split('.')[-1]
if DictContains(sys.modules, new_module_name):
module_name = new_module_name
if not DictContains(sys.modules, module_name):
sys.stderr.write('pydev debugger: Unable to find module to reload: "'+module_name+'".\n')
sys.stderr.write('pydev debugger: This usually means you are trying to reload the __main__ module (which cannot be reloaded).\n')
else:
sys.stderr.write('pydev debugger: Reloading: '+module_name+'\n')
xreload(sys.modules[module_name])
elif cmd_id == CMD_CHANGE_VARIABLE:
#the text is: thread\tstackframe\tFRAME|GLOBAL\tattribute_to_change\tvalue_to_change
try:
thread_id, frame_id, scope, attr_and_value = text.split('\t', 3)
tab_index = attr_and_value.rindex('\t')
attr = attr_and_value[0:tab_index].replace('\t', '.')
value = attr_and_value[tab_index + 1:]
int_cmd = InternalChangeVariable(seq, thread_id, frame_id, scope, attr, value)
self.postInternalCommand(int_cmd, thread_id)
except:
traceback.print_exc()
elif cmd_id == CMD_GET_VARIABLE:
#we received some command to get a variable
#the text is: thread_id\tframe_id\tFRAME|GLOBAL\tattributes*
try:
thread_id, frame_id, scopeattrs = text.split('\t', 2)
if scopeattrs.find('\t') != -1: # there are attributes beyond scope
scope, attrs = scopeattrs.split('\t', 1)
else:
scope, attrs = (scopeattrs, None)
int_cmd = InternalGetVariable(seq, thread_id, frame_id, scope, attrs)
self.postInternalCommand(int_cmd, thread_id)
except:
traceback.print_exc()
elif cmd_id == CMD_GET_COMPLETIONS:
#we received some command to get a variable
#the text is: thread_id\tframe_id\tactivation token
try:
thread_id, frame_id, scope, act_tok = text.split('\t', 3)
int_cmd = InternalGetCompletions(seq, thread_id, frame_id, act_tok)
self.postInternalCommand(int_cmd, thread_id)
except:
traceback.print_exc()
elif cmd_id == CMD_GET_FRAME:
thread_id, frame_id, scope = text.split('\t', 2)
int_cmd = InternalGetFrame(seq, thread_id, frame_id)
self.postInternalCommand(int_cmd, thread_id)
elif cmd_id == CMD_SET_BREAK:
#func name: 'None': match anything. Empty: match global, specified: only method context.
#command to add some breakpoint.
# text is file\tline. Add to breakpoints dictionary
file, line, condition = text.split('\t', 2)
if condition.startswith('**FUNC**'):
func_name, condition = condition.split('\t', 1)
#We must restore new lines and tabs as done in
#AbstractDebugTarget.breakpointAdded
condition = condition.replace("@_@NEW_LINE_CHAR@_@", '\n').\
replace("@_@TAB_CHAR@_@", '\t').strip()
func_name = func_name[8:]
else:
func_name = 'None' #Match anything if not specified.
file = NormFileToServer(file)
if not os.path.exists(file):
sys.stderr.write('pydev debugger: warning: trying to add breakpoint'\
' to file that does not exist: %s (will have no effect)\n' % (file,))
line = int(line)
if DEBUG_TRACE_BREAKPOINTS > 0:
sys.stderr.write('Added breakpoint:%s - line:%s - func_name:%s\n' % (file, line, func_name))
if DictContains(self.breakpoints, file):
breakDict = self.breakpoints[file]
else:
breakDict = {}
if len(condition) <= 0 or condition == None or condition == "None":
breakDict[line] = (True, None, func_name)
else:
breakDict[line] = (True, condition, func_name)
self.breakpoints[file] = breakDict
self.setTracingForUntracedContexts()
elif cmd_id == CMD_REMOVE_BREAK:
#command to remove some breakpoint
#text is file\tline. Remove from breakpoints dictionary
file, line = text.split('\t', 1)
file = NormFileToServer(file)
try:
line = int(line)
except ValueError:
pass
else:
try:
del self.breakpoints[file][line] #remove the breakpoint in that line
if DEBUG_TRACE_BREAKPOINTS > 0:
sys.stderr.write('Removed breakpoint:%s\n' % (file,))
except KeyError:
#ok, it's not there...
if DEBUG_TRACE_BREAKPOINTS > 0:
#Sometimes, when adding a breakpoint, it adds a remove command before (don't really know why)
sys.stderr.write("breakpoint not found: %s - %s\n" % (file, line))
elif cmd_id == CMD_EVALUATE_EXPRESSION or cmd_id == CMD_EXEC_EXPRESSION:
#command to evaluate the given expression
#text is: thread\tstackframe\tLOCAL\texpression
thread_id, frame_id, scope, expression = text.split('\t', 3)
int_cmd = InternalEvaluateExpression(seq, thread_id, frame_id, expression,
cmd_id == CMD_EXEC_EXPRESSION)
self.postInternalCommand(int_cmd, thread_id)
elif cmd_id == CMD_SET_PY_EXCEPTION:
# Command which receives set of exceptions on which user wants to break the debugger
# text is: break_on_uncaught;break_on_caught;TypeError;ImportError;zipimport.ZipImportError;
splitted = text.split(';')
if len(splitted) >= 2:
if splitted[0] == 'true':
break_on_uncaught = True
else:
break_on_uncaught = False
if splitted[1] == 'true':
break_on_caught = True
else:
break_on_caught = False
handle_exceptions = []
for exception_type in splitted[2:]:
exception_type = exception_type.strip()
if not exception_type:
continue
try:
handle_exceptions.append(eval(exception_type))
except:
try:
handle_exceptions.append(pydevd_import_class.ImportName(exception_type))
except:
sys.stderr.write("Unable to Import: %s when determining exceptions to break.\n" % (exception_type,))
if DEBUG_TRACE_BREAKPOINTS > 0:
sys.stderr.write("Exceptions to hook : %s\n" % (handle_exceptions,))
self.setExceptHook(tuple(handle_exceptions), break_on_uncaught, break_on_caught)
self.setTracingForUntracedContexts()
else:
sys.stderr.write("Error when setting exception list. Received: %s\n" % (text,))
elif cmd_id == CMD_GET_FILE_CONTENTS:
if os.path.exists(text):
f = open(text, 'r')
try:
source = f.read()
finally:
f.close()
cmd = self.cmdFactory.makeGetFileContents(seq, source)
else:
#I have no idea what this is all about
cmd = self.cmdFactory.makeErrorMessage(seq, "unexpected command " + str(cmd_id))
if cmd is not None:
self.writer.addCommand(cmd)
del cmd
except Exception:
traceback.print_exc()
cmd = self.cmdFactory.makeErrorMessage(seq,
"Unexpected exception in processNetCommand.\nInitial params: %s" % ((cmd_id, seq, text),))
self.writer.addCommand(cmd)
finally:
self._main_lock.release()
def setExceptHook(self, handle_exceptions, break_on_uncaught, break_on_caught):
'''
Should be called to set the exceptions to be handled and whether it should break on uncaught and
caught exceptions.
Can receive a parameter to stop only on some exceptions.
E.g.:
set_pm_excepthook((IndexError, ValueError), True, True)
or
set_pm_excepthook(IndexError, True, False)
if passed without a parameter, will break on any exception
@param handle_exceptions: exception or tuple(exceptions)
The exceptions that should be handled.
@param break_on_uncaught bool
Whether it should break on uncaught exceptions.
@param break_on_caught: bool
Whether it should break on caught exceptions.
'''
global _original_excepthook
if sys.excepthook != excepthook:
#Only keep the original if it's not our own excepthook (if called many times).
_original_excepthook = sys.excepthook
self.handle_exceptions = handle_exceptions
#Note that we won't set to break if we don't have any exception to break on
self.break_on_uncaught = handle_exceptions and break_on_uncaught
self.break_on_caught = handle_exceptions and break_on_caught
sys.excepthook = excepthook
def processThreadNotAlive(self, threadId):
""" if thread is not alive, cancel trace_dispatch processing """
self._lock_running_thread_ids.acquire()
try:
thread = self._running_thread_ids.pop(threadId, None)
if thread is None:
return
wasNotified = thread.additionalInfo.pydev_notify_kill
if not wasNotified:
thread.additionalInfo.pydev_notify_kill = True
finally:
self._lock_running_thread_ids.release()
cmd = self.cmdFactory.makeThreadKilledMessage(threadId)
self.writer.addCommand(cmd)
def setSuspend(self, thread, stop_reason):
thread.additionalInfo.pydev_state = STATE_SUSPEND
thread.stop_reason = stop_reason
def doWaitSuspend(self, thread, frame, event, arg): #@UnusedVariable
""" busy waits until the thread state changes to RUN
it expects thread's state as attributes of the thread.
Upon running, processes any outstanding Stepping commands.
"""
self.processInternalCommands()
cmd = self.cmdFactory.makeThreadSuspendMessage(GetThreadId(thread), frame, thread.stop_reason)
self.writer.addCommand(cmd)
info = thread.additionalInfo
while info.pydev_state == STATE_SUSPEND and not self._finishDebuggingSession:
self.processInternalCommands()
time.sleep(0.01)
#process any stepping instructions
if info.pydev_step_cmd == CMD_STEP_INTO:
info.pydev_step_stop = None
elif info.pydev_step_cmd == CMD_STEP_OVER:
info.pydev_step_stop = frame
self.SetTraceForFrameAndParents(frame)
elif info.pydev_step_cmd == CMD_RUN_TO_LINE or info.pydev_step_cmd == CMD_SET_NEXT_STATEMENT :
self.SetTraceForFrameAndParents(frame)
if event == 'line' or event == 'exception':
#If we're already in the correct context, we have to stop it now, because we can act only on
#line events -- if a return was the next statement it wouldn't work (so, we have this code
#repeated at pydevd_frame).
stop = False
curr_func_name = frame.f_code.co_name
#global context is set with an empty name
if curr_func_name in ('?', '<module>'):
curr_func_name = ''
if curr_func_name == info.pydev_func_name:
line = info.pydev_next_line
if frame.f_lineno == line:
stop = True
else:
if frame.f_trace is None:
frame.f_trace = self.trace_dispatch
frame.f_lineno = line
frame.f_trace = None
stop = True
if stop:
info.pydev_state = STATE_SUSPEND
self.doWaitSuspend(thread, frame, event, arg)
return
elif info.pydev_step_cmd == CMD_STEP_RETURN:
back_frame = frame.f_back
if back_frame is not None:
#steps back to the same frame (in a return call it will stop in the 'back frame' for the user)
info.pydev_step_stop = frame
self.SetTraceForFrameAndParents(frame)
else:
#No back frame?!? -- this happens in jython when we have some frame created from an awt event
#(the previous frame would be the awt event, but this doesn't make part of 'jython', only 'java')
#so, if we're doing a step return in this situation, it's the same as just making it run
info.pydev_step_stop = None
info.pydev_step_cmd = None
info.pydev_state = STATE_RUN
del frame
cmd = self.cmdFactory.makeThreadRunMessage(GetThreadId(thread), info.pydev_step_cmd)
self.writer.addCommand(cmd)
def trace_dispatch(self, frame, event, arg):
''' This is the callback used when we enter some context in the debugger.
We also decorate the thread we are in with info about the debugging.
The attributes added are:
pydev_state
pydev_step_stop
pydev_step_cmd
pydev_notify_kill
'''
try:
if self._finishDebuggingSession:
#that was not working very well because jython gave some socket errors
threads = threadingEnumerate()
for t in threads:
if hasattr(t, 'doKillPydevThread'):
t.doKillPydevThread()
return None
filename, base = GetFilenameAndBase(frame)
is_file_to_ignore = DictContains(DONT_TRACE, base) #we don't want to debug threading or anything related to pydevd
if not self.force_post_mortem_stop: #If we're in post mortem mode, we might not have another chance to show that info!
if is_file_to_ignore:
return None
#print('trace_dispatch', base, frame.f_lineno, event, frame.f_code.co_name)
try:
#this shouldn't give an exception, but it could happen... (python bug)
#see http://mail.python.org/pipermail/python-bugs-list/2007-June/038796.html
#and related bug: http://bugs.python.org/issue1733757
t = threadingCurrentThread()
except:
frame.f_trace = self.trace_dispatch
return self.trace_dispatch
try:
additionalInfo = t.additionalInfo
except:
additionalInfo = t.additionalInfo = PyDBAdditionalThreadInfo()
if self.force_post_mortem_stop: #If we're in post mortem mode, we might not have another chance to show that info!
if additionalInfo.pydev_force_stop_at_exception:
self.force_post_mortem_stop -= 1
frame, frames_byid = additionalInfo.pydev_force_stop_at_exception
thread_id = GetThreadId(t)
used_id = pydevd_vars.addAdditionalFrameById(thread_id, frames_byid)
try:
self.setSuspend(t, CMD_STEP_INTO)
self.doWaitSuspend(t, frame, 'exception', None)
finally:
additionalInfo.pydev_force_stop_at_exception = None
pydevd_vars.removeAdditionalFrameById(thread_id)
# if thread is not alive, cancel trace_dispatch processing
if not t.isAlive():
self.processThreadNotAlive(GetThreadId(t))
return None # suspend tracing
if is_file_to_ignore:
return None
#each new frame...
return additionalInfo.CreateDbFrame((self, filename, additionalInfo, t, frame)).trace_dispatch(frame, event, arg)
except SystemExit:
return None
except Exception:
#Log it
if traceback is not None:
#This can actually happen during the interpreter shutdown in Python 2.7
traceback.print_exc()
return None
if USE_PSYCO_OPTIMIZATION:
try:
import psyco
trace_dispatch = psyco.proxy(trace_dispatch)
processNetCommand = psyco.proxy(processNetCommand)
processInternalCommands = psyco.proxy(processInternalCommands)
doWaitSuspend = psyco.proxy(doWaitSuspend)
getInternalQueue = psyco.proxy(getInternalQueue)
except ImportError:
if hasattr(sys, 'exc_clear'): #jython does not have it
sys.exc_clear() #don't keep the traceback (let's keep it clear for when we go to the point of executing client code)
if not IS_PY3K and not IS_PY27 and not IS_64_BITS and not sys.platform.startswith("java") and not sys.platform.startswith("cli"):
sys.stderr.write("pydev debugger: warning: psyco not available for speedups (the debugger will still work correctly, but a bit slower)\n")
def SetTraceForFrameAndParents(self, frame, also_add_to_passed_frame=True):
dispatch_func = self.trace_dispatch
if also_add_to_passed_frame:
if frame.f_trace is None:
frame.f_trace = dispatch_func
else:
try:
#If it's the trace_exception, go back to the frame trace dispatch!
if frame.f_trace.im_func.__name__ == 'trace_exception':
frame.f_trace = frame.f_trace.im_self.trace_dispatch
except AttributeError:
pass
frame = frame.f_back
while frame:
if frame.f_trace is None:
frame.f_trace = dispatch_func
else:
try:
#If it's the trace_exception, go back to the frame trace dispatch!
if frame.f_trace.im_func.__name__ == 'trace_exception':
frame.f_trace = frame.f_trace.im_self.trace_dispatch
except AttributeError:
pass
frame = frame.f_back
del frame
def run(self, file, globals=None, locals=None):
if globals is None:
#patch provided by: Scott Schlesier - when script is run, it does not
#use globals from pydevd:
#This will prevent the pydevd script from contaminating the namespace for the script to be debugged
#pretend pydevd is not the main module, and
#convince the file to be debugged that it was loaded as main
sys.modules['pydevd'] = sys.modules['__main__']
sys.modules['pydevd'].__name__ = 'pydevd'
from imp import new_module
m = new_module('__main__')
sys.modules['__main__'] = m
m.__file__ = file
globals = m.__dict__
try:
globals['__builtins__'] = __builtins__
except NameError:
pass #Not there on Jython...
if locals is None:
locals = globals
#Predefined (writable) attributes: __name__ is the module's name;
#__doc__ is the module's documentation string, or None if unavailable;
#__file__ is the pathname of the file from which the module was loaded,
#if it was loaded from a file. The __file__ attribute is not present for
#C modules that are statically linked into the interpreter; for extension modules
#loaded dynamically from a shared library, it is the pathname of the shared library file.
#I think this is an ugly hack, bug it works (seems to) for the bug that says that sys.path should be the same in
#debug and run.
if m.__file__.startswith(sys.path[0]):
#print >> sys.stderr, 'Deleting: ', sys.path[0]
del sys.path[0]
#now, the local directory has to be added to the pythonpath
#sys.path.insert(0, os.getcwd())
#Changed: it's not the local directory, but the directory of the file launched
#The file being run ust be in the pythonpath (even if it was not before)
sys.path.insert(0, os.path.split(file)[0])
# for completness, we'll register the pydevd.reader & pydevd.writer threads
net = NetCommand(str(CMD_THREAD_CREATE), 0, '<xml><thread name="pydevd.reader" id="-1"/></xml>')
self.writer.addCommand(net)
net = NetCommand(str(CMD_THREAD_CREATE), 0, '<xml><thread name="pydevd.writer" id="-1"/></xml>')
self.writer.addCommand(net)
pydevd_tracing.SetTrace(self.trace_dispatch)
try:
#not available in jython!
threading.settrace(self.trace_dispatch) # for all future threads
except:
pass
try:
thread.start_new_thread = pydev_start_new_thread
thread.start_new = pydev_start_new_thread
except:
pass
while not self.readyToRun:
time.sleep(0.1) # busy wait until we receive run command
PyDBCommandThread(debugger).start()
pydev_imports.execfile(file, globals, locals) #execute the script
def processCommandLine(argv):
""" parses the arguments.
removes our arguments from the command line """
retVal = {}
retVal['client'] = ''
retVal['server'] = False
retVal['port'] = 0
retVal['file'] = ''
i = 0
del argv[0]
while (i < len(argv)):
if (argv[i] == '--port'):
del argv[i]
retVal['port'] = int(argv[i])
del argv[i]
elif (argv[i] == '--vm_type'):
del argv[i]
retVal['vm_type'] = argv[i]
del argv[i]
elif (argv[i] == '--client'):
del argv[i]
retVal['client'] = argv[i]
del argv[i]
elif (argv[i] == '--server'):
del argv[i]
retVal['server'] = True
elif (argv[i] == '--file'):
del argv[i]
retVal['file'] = argv[i];
i = len(argv) # pop out, file is our last argument
elif (argv[i] == '--DEBUG_RECORD_SOCKET_READS'):
del argv[i]
retVal['DEBUG_RECORD_SOCKET_READS'] = True
else:
raise ValueError("unexpected option " + argv[i])
return retVal
def usage(doExit=0):
sys.stdout.write('Usage:\n')
sys.stdout.write('pydevd.py --port=N [(--client hostname) | --server] --file executable [file_options]\n')
if doExit:
sys.exit(0)
#=======================================================================================================================
# patch_django_autoreload
#=======================================================================================================================
def patch_django_autoreload():
'''
Patch Django to work with remote debugger without adding an explicit
pydevd.settrace to set a breakpoint (i.e.: setup the remote debugger machinery
and don't suspend now -- this will load the breakpoints and will listen to
changes in them so that we do stop on the breakpoints set in the editor).
Checked with with Django 1.2.5.
Checked with with Django 1.3.
'''
if ('runserver' in sys.argv or 'testserver' in sys.argv):
from django.utils import autoreload
original_main = autoreload.main
def main(main_func, args=None, kwargs=None):
if os.environ.get("RUN_MAIN") == "true":
original_main_func = main_func
def pydev_debugger_main_func(*args, **kwargs):
settrace(suspend=False)
return original_main_func(*args, **kwargs)
main_func = pydev_debugger_main_func
return original_main(main_func, args, kwargs)
autoreload.main = main
#=======================================================================================================================
# settrace
#=======================================================================================================================
def settrace(host=None, stdoutToServer=False, stderrToServer=False, port=5678, suspend=True, trace_only_current_thread=True):
'''Sets the tracing function with the pydev debug function and initializes needed facilities.
@param host: the user may specify another host, if the debug server is not in the same machine (default is the local host)
@param stdoutToServer: when this is true, the stdout is passed to the debug server
@param stderrToServer: when this is true, the stderr is passed to the debug server
so that they are printed in its console and not in this process console.
@param port: specifies which port to use for communicating with the server (note that the server must be started
in the same port). @note: currently it's hard-coded at 5678 in the client
@param suspend: whether a breakpoint should be emulated as soon as this function is called.
@param trace_only_current_thread: determines if only the current thread will be traced or all future threads will also have the tracing enabled.
'''
_set_trace_lock.acquire()
try:
_locked_settrace(host, stdoutToServer, stderrToServer, port, suspend, trace_only_current_thread)
finally:
_set_trace_lock.release()
_set_trace_lock = threading.Lock()
def _locked_settrace(host, stdoutToServer, stderrToServer, port, suspend, trace_only_current_thread):
if host is None:
import pydev_localhost
host = pydev_localhost.get_localhost()
global connected
global bufferStdOutToServer
global bufferStdErrToServer
if not connected :
connected = True
bufferStdOutToServer = stdoutToServer
bufferStdErrToServer = stderrToServer
pydevd_vm_type.SetupType()
debugger = PyDB()
debugger.connect(host, port)
net = NetCommand(str(CMD_THREAD_CREATE), 0, '<xml><thread name="pydevd.reader" id="-1"/></xml>')
debugger.writer.addCommand(net)
net = NetCommand(str(CMD_THREAD_CREATE), 0, '<xml><thread name="pydevd.writer" id="-1"/></xml>')
debugger.writer.addCommand(net)
if bufferStdOutToServer:
sys.stdoutBuf = pydevd_io.IOBuf()
sys.stdout = pydevd_io.IORedirector(sys.stdout, sys.stdoutBuf) #@UndefinedVariable
if bufferStdErrToServer:
sys.stderrBuf = pydevd_io.IOBuf()
sys.stderr = pydevd_io.IORedirector(sys.stderr, sys.stderrBuf) #@UndefinedVariable
debugger.SetTraceForFrameAndParents(GetFrame(), False)
t = threadingCurrentThread()
try:
additionalInfo = t.additionalInfo
except AttributeError:
additionalInfo = PyDBAdditionalThreadInfo()
t.additionalInfo = additionalInfo
while not debugger.readyToRun:
time.sleep(0.1) # busy wait until we receive run command
if suspend:
debugger.setSuspend(t, CMD_SET_BREAK)
#note that we do that through pydevd_tracing.SetTrace so that the tracing
#is not warned to the user!
pydevd_tracing.SetTrace(debugger.trace_dispatch)
if not trace_only_current_thread:
#Trace future threads?
try:
#not available in jython!
threading.settrace(debugger.trace_dispatch) # for all future threads
except:
pass
try:
thread.start_new_thread = pydev_start_new_thread
thread.start_new = pydev_start_new_thread
except:
pass
PyDBCommandThread(debugger).start()
else:
#ok, we're already in debug mode, with all set, so, let's just set the break
debugger = GetGlobalDebugger()
debugger.SetTraceForFrameAndParents(GetFrame(), False)
t = threadingCurrentThread()
try:
additionalInfo = t.additionalInfo
except AttributeError:
additionalInfo = PyDBAdditionalThreadInfo()
t.additionalInfo = additionalInfo
pydevd_tracing.SetTrace(debugger.trace_dispatch)
if not trace_only_current_thread:
#Trace future threads?
try:
#not available in jython!
threading.settrace(debugger.trace_dispatch) # for all future threads
except:
pass
try:
thread.start_new_thread = pydev_start_new_thread
thread.start_new = pydev_start_new_thread
except:
pass
if suspend:
debugger.setSuspend(t, CMD_SET_BREAK)
#=======================================================================================================================
# main
#=======================================================================================================================
if __name__ == '__main__':
sys.stderr.write("pydev debugger: starting\n")
# parse the command line. --file is our last argument that is required
try:
setup = processCommandLine(sys.argv)
except ValueError:
traceback.print_exc()
usage(1)
#as to get here all our imports are already resolved, the psyco module can be
#changed and we'll still get the speedups in the debugger, as those functions
#are already compiled at this time.
try:
import psyco
except ImportError:
if hasattr(sys, 'exc_clear'): #jython does not have it
sys.exc_clear() #don't keep the traceback -- clients don't want to see it
pass #that's ok, no need to mock psyco if it's not available anyways
else:
#if it's available, let's change it for a stub (pydev already made use of it)
import pydevd_psyco_stub
sys.modules['psyco'] = pydevd_psyco_stub
PydevdLog(2, "Executing file ", setup['file'])
PydevdLog(2, "arguments:", str(sys.argv))
pydevd_vm_type.SetupType(setup.get('vm_type', None))
DebugInfoHolder.DEBUG_RECORD_SOCKET_READS = setup.get('DEBUG_RECORD_SOCKET_READS', False)
debugger = PyDB()
debugger.connect(setup['client'], setup['port'])
connected = True #Mark that we're connected when started from inside eclipse.
debugger.run(setup['file'], None, None)
| epl-1.0 |
texcaltech/windmilltownhomes-old | django/utils/http.py | 12 | 3871 | import re
import urllib
from email.Utils import formatdate
from django.utils.encoding import smart_str, force_unicode
from django.utils.functional import allow_lazy
ETAG_MATCH = re.compile(r'(?:W/)?"((?:\\.|[^"])*)"')
def urlquote(url, safe='/'):
"""
A version of Python's urllib.quote() function that can operate on unicode
strings. The url is first UTF-8 encoded before quoting. The returned string
can safely be used as part of an argument to a subsequent iri_to_uri() call
without double-quoting occurring.
"""
return force_unicode(urllib.quote(smart_str(url), smart_str(safe)))
urlquote = allow_lazy(urlquote, unicode)
def urlquote_plus(url, safe=''):
"""
A version of Python's urllib.quote_plus() function that can operate on
unicode strings. The url is first UTF-8 encoded before quoting. The
returned string can safely be used as part of an argument to a subsequent
iri_to_uri() call without double-quoting occurring.
"""
return force_unicode(urllib.quote_plus(smart_str(url), smart_str(safe)))
urlquote_plus = allow_lazy(urlquote_plus, unicode)
def urlencode(query, doseq=0):
"""
A version of Python's urllib.urlencode() function that can operate on
unicode strings. The parameters are first case to UTF-8 encoded strings and
then encoded as per normal.
"""
if hasattr(query, 'items'):
query = query.items()
return urllib.urlencode(
[(smart_str(k),
isinstance(v, (list,tuple)) and [smart_str(i) for i in v] or smart_str(v))
for k, v in query],
doseq)
def cookie_date(epoch_seconds=None):
"""
Formats the time to ensure compatibility with Netscape's cookie standard.
Accepts a floating point number expressed in seconds since the epoch, in
UTC - such as that outputted by time.time(). If set to None, defaults to
the current time.
Outputs a string in the format 'Wdy, DD-Mon-YYYY HH:MM:SS GMT'.
"""
rfcdate = formatdate(epoch_seconds)
return '%s-%s-%s GMT' % (rfcdate[:7], rfcdate[8:11], rfcdate[12:25])
def http_date(epoch_seconds=None):
"""
Formats the time to match the RFC1123 date format as specified by HTTP
RFC2616 section 3.3.1.
Accepts a floating point number expressed in seconds since the epoch, in
UTC - such as that outputted by time.time(). If set to None, defaults to
the current time.
Outputs a string in the format 'Wdy, DD Mon YYYY HH:MM:SS GMT'.
"""
rfcdate = formatdate(epoch_seconds)
return '%s GMT' % rfcdate[:25]
# Base 36 functions: useful for generating compact URLs
def base36_to_int(s):
"""
Convertd a base 36 string to an integer
"""
return int(s, 36)
def int_to_base36(i):
"""
Converts an integer to a base36 string
"""
digits = "0123456789abcdefghijklmnopqrstuvwxyz"
factor = 0
# Find starting factor
while True:
factor += 1
if i < 36 ** factor:
factor -= 1
break
base36 = []
# Construct base36 representation
while factor >= 0:
j = 36 ** factor
base36.append(digits[i / j])
i = i % j
factor -= 1
return ''.join(base36)
def parse_etags(etag_str):
"""
Parses a string with one or several etags passed in If-None-Match and
If-Match headers by the rules in RFC 2616. Returns a list of etags
without surrounding double quotes (") and unescaped from \<CHAR>.
"""
etags = ETAG_MATCH.findall(etag_str)
if not etags:
# etag_str has wrong format, treat it as an opaque string then
return [etag_str]
etags = [e.decode('string_escape') for e in etags]
return etags
def quote_etag(etag):
"""
Wraps a string in double quotes escaping contents as necesary.
"""
return '"%s"' % etag.replace('\\', '\\\\').replace('"', '\\"')
| bsd-3-clause |
ddamiani/pyqtgraph | pyqtgraph/graphicsItems/MultiPlotItem.py | 49 | 2062 | # -*- coding: utf-8 -*-
"""
MultiPlotItem.py - Graphics item used for displaying an array of PlotItems
Copyright 2010 Luke Campagnola
Distributed under MIT/X11 license. See license.txt for more infomation.
"""
from numpy import ndarray
from . import GraphicsLayout
from ..metaarray import *
__all__ = ['MultiPlotItem']
class MultiPlotItem(GraphicsLayout.GraphicsLayout):
"""
Automatically generates a grid of plots from a multi-dimensional array
"""
def __init__(self, *args, **kwds):
GraphicsLayout.GraphicsLayout.__init__(self, *args, **kwds)
self.plots = []
def plot(self, data):
#self.layout.clear()
if hasattr(data, 'implements') and data.implements('MetaArray'):
if data.ndim != 2:
raise Exception("MultiPlot currently only accepts 2D MetaArray.")
ic = data.infoCopy()
ax = 0
for i in [0, 1]:
if 'cols' in ic[i]:
ax = i
break
#print "Plotting using axis %d as columns (%d plots)" % (ax, data.shape[ax])
for i in range(data.shape[ax]):
pi = self.addPlot()
self.nextRow()
sl = [slice(None)] * 2
sl[ax] = i
pi.plot(data[tuple(sl)])
#self.layout.addItem(pi, i, 0)
self.plots.append((pi, i, 0))
info = ic[ax]['cols'][i]
title = info.get('title', info.get('name', None))
units = info.get('units', None)
pi.setLabel('left', text=title, units=units)
info = ic[1-ax]
title = info.get('title', info.get('name', None))
units = info.get('units', None)
pi.setLabel('bottom', text=title, units=units)
else:
raise Exception("Data type %s not (yet?) supported for MultiPlot." % type(data))
def close(self):
for p in self.plots:
p[0].close()
self.plots = None
self.clear()
| mit |
benkul/BBA | coach_init.py | 1 | 5208 | import random
import sqlite3
from game_variables import motivation, coach_off_iq, coach_def_iq, training, offense_playbook, defense_playbook, leadership, major_bonus
from coach_first_names import coach_first_names
from player_last_names import player_last_names
def create_stat(stat): # assumes a min/max tuple as input
min = stat[0] # helper function that aids in class object creation
max = stat[1]
selection = random.randrange(min, max)
return selection
#################################################
# Coach database structure for future reference
#
# database.execute('''CREATE TABLE coach_db (Id integer primary key,
# league_id REFERENCES league_table(Id),
# team_id REFERENCES team_db(Id),
# name, motivation, coach_off_iq, coach_def_iq, training,
# leadership, offense_playbook, defense_playbook, coach_rating)''')
#################################################
class Coach:
def __init__(self):
self.league_id = 0 # invalid pk value as default, needs to be overridden before being stuffed into db
self.team = 0 # invalid pk value as default, needs to be overridden before being stuffed into db
def update_coach(self): # updates every coach field except for name
connection = sqlite3.connect('league.db')
database = connection.cursor()
coach_attributes = (self.league_id, self.team, self.motivation, self.coach_off_iq, self.coach_def_iq, self.training, self.leadership, self.offense_playbook, self.defense_playbook, self.coach_rating, self.db_id)
database.execute('''UPDATE coach_db
SET league_id = ?,
team_id = ?,
motivation = ?,
coach_off_iq = ?,
coach_def_iq = ?,
training = ?,
leadership = ?,
offense_playbook = ?,
defense_playbook = ?,
coach_rating = ?
WHERE 'Id' = ?''', coach_attributes)
print "coach", self.name, "updated"
connection.commit()
connection.close()
def create_coach(self, league_id):
self.league_id = league_id
self.name = random.choice(coach_first_names) + " " + random.choice(player_last_names)
self.motivation = create_stat(motivation)
self.coach_off_iq = create_stat(coach_off_iq)
self.coach_def_iq = create_stat(coach_def_iq)
self.training = create_stat(training)
self.leadership = create_stat(leadership)
self.offense_playbook = offense_playbook[str(random.randint(1,3))]
self.defense_playbook = defense_playbook[str(random.randint(1,3))]
def rating_boost(): # because every coach should be good at 1 thing in the very least
to_boost = random.randint(1,5)
if to_boost == 1:
major_bonus(self.motivation)
return self.motivation
elif to_boost == 2:
major_bonus(self.coach_off_iq)
return self.coach_off_iq
elif to_boost == 3:
major_bonus(self.coach_def_iq)
return self.coach_def_iq
elif to_boost == 4:
major_bonus(self.training)
return self.training
elif to_boost == 5:
major_bonus(self.leadership)
def coach_rating():
total = self.motivation + self.coach_off_iq + self.coach_def_iq + self.training + self.leadership
rating = int(total / 4.5)
return rating
def insert_coach(self): # puts the coach class object into the coach database table
connection = sqlite3.connect('league.db')
database = connection.cursor()
coach_attributes = (self.league_id, self.team, self.name, self.motivation, self.coach_off_iq, self.coach_def_iq, self.training, self.leadership, self.offense_playbook, self.defense_playbook, self.coach_rating)
database.execute('''INSERT INTO coach_db
(league_id, team_id, name, motivation, coach_off_iq, coach_def_iq, training,leadership, offense_playbook, defense_playbook, coach_rating)
VALUES(?,?,?,?,?,?,?,?,?,?,?)''', coach_attributes)
connection.commit()
self.db_id = database.lastrowid
connection.close()
rating_boost()
self.coach_rating = coach_rating()
insert_coach(self)
def load_coaches(league_pk):
connection = sqlite3.connect('league.db')
database = connection.cursor()
league_id = league_pk
coach_pool = []
database.execute('''SELECT league_id, team_id, name, motivation, coach_off_iq, coach_def_iq, training,leadership, offense_playbook, defense_playbook, coach_rating, Id FROM coach_db WHERE league_id = ?''', league_id)
#for coach in range(number_of_coaches):
coach = 0
coach_attributes = database.fetchone()
while coach_attributes != None:
coach_pool.append(Coach())
print "attempting coach resurrection"
coach_pool[coach].league_id = coach_attributes[0]
coach_pool[coach].team = coach_attributes[1]
coach_pool[coach].name = coach_attributes[2]
coach_pool[coach].motivation = coach_attributes[3]
coach_pool[coach].coach_off_iq = coach_attributes[4]
coach_pool[coach].coach_def_iq = coach_attributes[5]
coach_pool[coach].training = coach_attributes[6]
coach_pool[coach].leadership = coach_attributes[7]
coach_pool[coach].offense_playbook = coach_attributes[8]
coach_pool[coach].defense_playbook = coach_attributes[9]
coach_pool[coach].coach_rating = coach_attributes[10]
coach_pool[coach].db_id = coach_attributes[11]
print coach_pool[coach].name, " resurrected"
coach += 1
coach_attributes = database.fetchone()
connection.commit()
connection.close()
return coach_pool
| mit |
ka7eh/django-oscar | tests/integration/offer/priority_offers_tests.py | 55 | 1700 | import mock
import datetime
from django.test import TestCase
from django.utils import timezone
from oscar.core.loading import get_model
from oscar.apps.offer import utils
from oscar.test import factories
Voucher = get_model('voucher', 'Voucher')
class TestPriorityOffers(TestCase):
def test_site_offers_are_ordered(self):
factories.create_offer(name="A", priority=0)
factories.create_offer(name="B", priority=7)
factories.create_offer(name="C", priority=5)
factories.create_offer(name="D", priority=7)
factories.create_offer(name="E", priority=1)
offers = utils.Applicator().get_site_offers()
ordered_names = [offer.name for offer in offers]
self.assertEqual(["B", "D", "C", "E", "A"], ordered_names)
def test_basket_offers_are_ordered(self):
voucher = Voucher.objects.create(
name="Test voucher",
code="test",
start_datetime=timezone.now(),
end_datetime=timezone.now() + datetime.timedelta(days=12))
voucher.offers = [
factories.create_offer(name="A", priority=0),
factories.create_offer(name="B", priority=7),
factories.create_offer(name="C", priority=5),
factories.create_offer(name="D", priority=7),
factories.create_offer(name="E", priority=1),
]
basket = factories.create_basket()
user = mock.Mock()
# Apply voucher to basket
basket.vouchers.add(voucher)
offers = utils.Applicator().get_basket_offers(basket, user)
ordered_names = [offer.name for offer in offers]
self.assertEqual(["B", "D", "C", "E", "A"], ordered_names)
| bsd-3-clause |
sebastianludwig/SensationDriver | src/sensationdriver/actor.py | 1 | 9850 | import asyncio
import time
import traceback
from sortedcontainers import SortedDict
from . import platform
from . import helper
if platform.is_raspberry():
from adafruit import pca9685
from adafruit import wirebus
else:
from .dummy import pca9685
from .dummy import wirebus
def parse_config(config, loop=None, logger=None):
# {
# "drivers": [<Driver>],
# "regions": {
# "LEFT_HAND": [<Actor>]
# }
# }
def driver_for_address(drivers, address, i2c_bus_number):
if address not in drivers:
if not wirebus.I2C.isDeviceAnswering(address, i2c_bus_number):
return None
driver = pca9685.Driver(address, i2c_bus_number, logger=logger)
drivers[address] = driver
return drivers[address]
loop = loop if loop is not None else asyncio.get_event_loop()
vibration_config = config['vibration']
global_actor_mapping_curve_degree = vibration_config.get('actor_mapping_curve_degree', None)
global_actor_min_intensity = vibration_config.get('actor_min_intensity', None)
global_actor_min_intensity_warmup = vibration_config.get('actor_min_intensity_warmup', None)
global_actor_min_instant_intensity = vibration_config.get('actor_min_instant_intensity', None)
drivers = {} # driver_address -> driver
regions = {} # region_name -> actor_index -> actor
for region_config in vibration_config['regions']:
dirver_address = region_config['driver_address']
if type(dirver_address) is str:
dirver_address = int(dirver_address, 16) if dirver_address.startswith('0x') else int(dirver_address)
driver = driver_for_address(drivers, dirver_address, region_config['i2c_bus_number'])
if driver is None:
if logger is not None:
logger.error("No driver found for at address 0x%02X on I2C bus %d for region %s - ignoring region", dirver_address, region_config['i2c_bus_number'], region_config['name'])
continue
if region_config['name'] not in regions:
regions[region_config['name']] = {}
region_actor_mapping_curve_degree = region_config.get('actor_mapping_curve_degree', global_actor_mapping_curve_degree)
region_actor_min_intensity = region_config.get('actor_min_intensity', global_actor_min_intensity)
region_actor_min_intensity_warmup = region_config.get('actor_min_intensity_warmup', global_actor_min_intensity_warmup)
region_actor_min_instant_intensity = region_config.get('actor_min_instant_intensity', global_actor_min_instant_intensity)
region_actors = regions[region_config['name']]
for actor_config in region_config['actors']:
if actor_config['index'] in region_actors:
if logger is not None:
logger.error("Multiple actors configured with index %d in region %s - ignoring subsequent definitions", actor_config['index'], region_config['name'])
continue
else:
vibration_motor = VibrationMotor(driver=driver, outlet=actor_config['outlet'], index_in_region=actor_config['index'], position=actor_config['position'], loop=loop, logger=logger)
mapping_curve_degree = actor_config.get('mapping_curve_degree', region_actor_mapping_curve_degree)
min_intensity = actor_config.get('min_intensity', region_actor_min_intensity)
min_intensity_warmup = actor_config.get('min_intensity_warmup', region_actor_min_intensity_warmup)
min_instant_intensity = actor_config.get('min_instant_intensity', region_actor_min_instant_intensity)
if mapping_curve_degree is not None:
vibration_motor.mapping_curve_degree = mapping_curve_degree
if min_intensity is not None:
vibration_motor.min_intensity = min_intensity
if min_intensity_warmup is not None:
vibration_motor.min_intensity_warmup = min_intensity_warmup
if min_instant_intensity is not None:
vibration_motor.min_instant_intensity = min_instant_intensity
region_actors[actor_config['index']] = vibration_motor
for region_name in regions:
regions[region_name] = list(regions[region_name].values())
return { "drivers": list(drivers.values()), "regions": regions }
class PrioritizedIntensity(object):
_MIN_VALUE = 0.005
def __init__(self):
self._values = SortedDict()
def set(self, value, priority=100):
value = float(value)
if value < self._MIN_VALUE and priority in self._values:
del self._values[priority]
else:
self._values[priority] = value
def eval(self):
if not self._values:
return 0.0
return self._values[self._values.iloc[- 1]]
def top_priority(self):
if not self._values:
return 0
return self._values.keys()[len(self._values) - 1]
def reset(self):
self._values.clear()
class VibrationMotor(object):
_SENSITIVITY = 0.005 # ignore any changes below the this value and treat values below as "motor off"
def __init__(self, driver, outlet, index_in_region, position=None, loop=None, logger=None):
self._loop = loop if loop is not None else asyncio.get_event_loop()
self.driver = driver
self.outlet = outlet
self.index_in_region = index_in_region
self.position = position
self.logger = logger
self.profiler = None
self.mapping_curve_degree = 1.5 # degree of the function used to map intensity values from [0, 1] to the supported motor range. Use '2' for square, '3' for cubic and so on. No matter which degree, it is ensured an intensity of 0 is always off and an intensity of 1 always equals full motor intensity. Only supports positive values.
self.min_intensity = 0.3 # minimum intensity at which the motor will keep running (maybe after being startet at a higher intensity)
self.min_instant_intensity = 0.5 # minimum intensity that can be applied to the motor directly
self.min_intensity_warmup = 0.2 # how long does the motor need to be run at _MOTOR_MIN_INSTANT_INTENSITY before it's okay to switch down to _MOTOR_MIN_INTENSITY
self._intensity = PrioritizedIntensity()
self._target_intensity = self._intensity.eval()
self.__current_intensity = 0
self._running_since = None
def _profile(self, action, *args):
if self.profiler is not None:
self.profiler.log(action, *args)
def _map_intensity(self, intensity):
return self.min_intensity + (1 - self.min_intensity) * intensity ** self.mapping_curve_degree
def _running_time(self):
if self._running_since is None:
return 0
else:
return time.time() - self._running_since
def _can_set_directly(self, intensity):
if intensity < self._SENSITIVITY: # turn off
return True
if intensity >= self.min_instant_intensity: # intense enough to start instantly
return True
if self._running_time() > self.min_intensity_warmup: # running long enough
return True
return False
@property
def _current_intensity(self):
return self.__current_intensity
@_current_intensity.setter
def _current_intensity(self, value):
if abs(value - self.__current_intensity) < self._SENSITIVITY:
return
if self.logger is not None:
self.logger.debug("setting %s to %.3f", self.position, value)
self.__current_intensity = value
self._profile("set_pwm", self.index_in_region, self.__current_intensity)
self.driver.setPWM(self.outlet, 0, self.__current_intensity)
if value < self._SENSITIVITY:
self._running_since = None
elif self._running_since is None:
self._running_since = time.time()
def intensity(self):
return self._intensity.eval()
@asyncio.coroutine
def set_intensity(self, intensity, priority=100):
intensity = float(intensity)
if (intensity < 0 or intensity > 1) and self.logger:
self.logger.warning('clamping intensity - not in interval [0, 1]: %s' % intensity)
intensity = max(min(intensity, 1), 0)
if __debug__:
self.logger.warning("".join(traceback.format_stack()))
self._intensity.set(intensity, priority)
if self._intensity.eval() < self._SENSITIVITY:
self._target_intensity = 0
else:
self._target_intensity = self._map_intensity(self._intensity.eval())
if self._can_set_directly(self._target_intensity):
self._profile("set_intensity", self.index_in_region, intensity, priority, self._target_intensity, 'direct')
self._current_intensity = self._target_intensity
future = asyncio.Future()
future.set_result(self._target_intensity)
return future
else:
self._profile("set_intensity", self.index_in_region, intensity, priority, self._target_intensity, 'delayed')
return helper.create_exception_reporting_task(self.set_intensity_delayed(), loop=self._loop, logger=self.logger)
@asyncio.coroutine
def set_intensity_delayed(self):
if self._current_intensity < self.min_intensity:
self._current_intensity = self.min_instant_intensity
delay = self.min_intensity_warmup - self._running_time()
yield from asyncio.sleep(delay)
self._current_intensity = self._target_intensity
| mit |
CoolCloud/ansible | lib/ansible/plugins/callback/osx_say.py | 144 | 2882 |
# (C) 2012, Michael DeHaan, <[email protected]>
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
import subprocess
import os
from ansible.plugins.callback import CallbackBase
FAILED_VOICE="Zarvox"
REGULAR_VOICE="Trinoids"
HAPPY_VOICE="Cellos"
LASER_VOICE="Princess"
SAY_CMD="/usr/bin/say"
class CallbackModule(CallbackBase):
"""
makes Ansible much more exciting on OS X.
"""
CALLBACK_VERSION = 2.0
CALLBACK_TYPE = 'notification'
CALLBACK_NAME = 'osx_say'
def __init__(self, display):
super(CallbackModule, self).__init__(display)
# plugin disable itself if say is not present
# ansible will not call any callback if disabled is set to True
if not os.path.exists(SAY_CMD):
self.disabled = True
self._display.warning("%s does not exist, plugin %s disabled" % (SAY_CMD, os.path.basename(__file__)) )
def say(self, msg, voice):
subprocess.call([SAY_CMD, msg, "--voice=%s" % (voice)])
def runner_on_failed(self, host, res, ignore_errors=False):
self.say("Failure on host %s" % host, FAILED_VOICE)
def runner_on_ok(self, host, res):
self.say("pew", LASER_VOICE)
def runner_on_skipped(self, host, item=None):
self.say("pew", LASER_VOICE)
def runner_on_unreachable(self, host, res):
self.say("Failure on host %s" % host, FAILED_VOICE)
def runner_on_async_ok(self, host, res, jid):
self.say("pew", LASER_VOICE)
def runner_on_async_failed(self, host, res, jid):
self.say("Failure on host %s" % host, FAILED_VOICE)
def playbook_on_start(self):
self.say("Running Playbook", REGULAR_VOICE)
def playbook_on_notify(self, host, handler):
self.say("pew", LASER_VOICE)
def playbook_on_task_start(self, name, is_conditional):
if not is_conditional:
self.say("Starting task: %s" % name, REGULAR_VOICE)
else:
self.say("Notifying task: %s" % name, REGULAR_VOICE)
def playbook_on_setup(self):
self.say("Gathering facts", REGULAR_VOICE)
def playbook_on_play_start(self, name):
self.say("Starting play: %s" % name, HAPPY_VOICE)
def playbook_on_stats(self, stats):
self.say("Play complete", HAPPY_VOICE)
| gpl-3.0 |
madebr/ADuC832_demoproject | loader/diffhex.py | 1 | 2206 | #!/usr/bin/env python3
#=============================================================================
#Copyright (C) 2016, Anonymous Maarten
#
#This file is part of ADuC832 demoproject.
#
#This program is free software; you can redistribute it and/or
#modify it under the terms of the GNU General Public License
#as published by the Free Software Foundation; either version 2
#of the License, or (at your option) any later version.
#
#This program is distributed in the hope that it will be useful,
#but WITHOUT ANY WARRANTY; without even the implied warranty of
#MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
#GNU General Public License for more details.
#
#You should have received a copy of the GNU General Public License
#along with this program; if not, write to the Free Software
#Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#=============================================================================
import intelhex
import sys
def diff_dumps(ih1, ih2, tofile=None, name1="a", name2="b", n_context=3):
"""Diff 2 IntelHex objects and produce unified diff output for their
hex dumps.
@param ih1 first IntelHex object to compare
@param ih2 second IntelHex object to compare
@param tofile file-like object to write output
@param name1 name of the first hex file to show in the diff header
@param name2 name of the first hex file to show in the diff header
@param n_context number of context lines in the unidiff output
"""
from io import StringIO
def prepare_lines(ih):
sio = StringIO()
ih.dump(sio)
dump = sio.getvalue()
lines = dump.splitlines()
return lines
a = prepare_lines(ih1)
b = prepare_lines(ih2)
import difflib
result = list(difflib.unified_diff(a, b, fromfile=name1, tofile=name2, n=n_context, lineterm=''))
if tofile is None:
tofile = sys.stdout
output = '\n'.join(result)+'\n'
tofile.write(output)
ihx1 = intelhex.IntelHex(sys.argv[1])
ihx2 = intelhex.IntelHex(sys.argv[2])
ihx1.padding = b'\xff'
ihx2.padding = b'\xff'
diff_dumps(ihx1, ihx2, name1=sys.argv[1], name2=sys.argv[2])
| gpl-2.0 |
andela-ooshodi/codango-debug | codango/comments/views.py | 1 | 1265 | import json
from django.http import HttpResponse
from django.views.generic import View
from resources.models import Resource
from comments.models import Comment
from comments.forms import CommentForm
from datetime import datetime
# Create your views here.
class CommentAction(View):
def delete(self, request, **kwargs):
comment_id = kwargs['comment_id']
comment = Comment.objects.filter(id=comment_id).first()
comment.delete()
return HttpResponse("success", content_type='text/plain')
def post(self, request, *args, **kwargs):
form = CommentForm(request.POST)
comment = form.save(commit=False)
comment.resource = Resource.objects.filter(
id=request.POST.get('resource_id')).first()
comment.author = self.request.user
comment.save()
return HttpResponse("success", content_type='text/plain')
def put(self, request, *args, **kwargs):
body = json.loads(request.body)
comment_id = kwargs['comment_id']
comment = Comment.objects.filter(id=comment_id).first()
comment.content = body['content']
comment.date_modified = datetime.now()
comment.save()
return HttpResponse("success", content_type='text/plain')
| mit |
gerald-yang/ubuntu-iotivity-demo | snappy/grovepi/pygrovepi/grove_touch_sensor.py | 7 | 1895 | #!/usr/bin/env python
#
# GrovePi Example for using the Grove Touch Sensor (http://www.seeedstudio.com/wiki/Grove_-_Touch_Sensor)
#
# The GrovePi connects the Raspberry Pi and Grove sensors. You can learn more about GrovePi here: http://www.dexterindustries.com/GrovePi
#
# Have a question about this example? Ask on the forums here: http://www.dexterindustries.com/forum/?forum=grovepi
#
'''
## License
The MIT License (MIT)
GrovePi for the Raspberry Pi: an open source platform for connecting Grove Sensors to the Raspberry Pi.
Copyright (C) 2015 Dexter Industries
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
'''
import time
import grovepi
# Connect the Grove Touch Sensor to digital port D4
# SIG,NC,VCC,GND
touch_sensor = 4
grovepi.pinMode(touch_sensor,"INPUT")
while True:
try:
print (grovepi.digitalRead(touch_sensor))
time.sleep(.5)
except IOError:
print ("Error")
| apache-2.0 |
crossbario/autobahn-python | autobahn/wamp/component.py | 2 | 37916 | ###############################################################################
#
# The MIT License (MIT)
#
# Copyright (c) Crossbar.io Technologies GmbH
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
###############################################################################
import itertools
import random
from functools import partial
import txaio
from autobahn.util import ObservableMixin
from autobahn.websocket.util import parse_url as parse_ws_url
from autobahn.rawsocket.util import parse_url as parse_rs_url
from autobahn.wamp.types import ComponentConfig, SubscribeOptions, RegisterOptions
from autobahn.wamp.exception import SessionNotReady, ApplicationError
from autobahn.wamp.auth import create_authenticator, IAuthenticator
from autobahn.wamp.serializer import SERID_TO_SER
__all__ = (
'Component'
)
def _validate_endpoint(endpoint, check_native_endpoint=None):
"""
Check a WAMP connecting endpoint configuration.
"""
if check_native_endpoint:
check_native_endpoint(endpoint)
elif not isinstance(endpoint, dict):
raise ValueError(
"'endpoint' must be a dict"
)
# note, we're falling through here -- check_native_endpoint can
# disallow or allow dict-based config as it likes, but if it
# *does* allow a dict through, we want to check "base options"
# here so that both Twisted and asyncio don't have to check these
# things as well.
if isinstance(endpoint, dict):
# XXX what about filling in anything missing from the URL? Or
# is that only for when *nothing* is provided for endpoint?
if 'type' not in endpoint:
# could maybe just make tcp the default?
raise ValueError("'type' required in endpoint configuration")
if endpoint['type'] not in ['tcp', 'unix']:
raise ValueError('invalid type "{}" in endpoint'.format(endpoint['type']))
for k in endpoint.keys():
if k not in ['type', 'host', 'port', 'path', 'tls']:
raise ValueError(
"Invalid key '{}' in endpoint configuration".format(k)
)
if endpoint['type'] == 'tcp':
for k in ['host', 'port']:
if k not in endpoint:
raise ValueError(
"'{}' required in 'tcp' endpoint config".format(k)
)
for k in ['path']:
if k in endpoint:
raise ValueError(
"'{}' not valid in 'tcp' endpoint config".format(k)
)
elif endpoint['type'] == 'unix':
for k in ['path']:
if k not in endpoint:
raise ValueError(
"'{}' required for 'unix' endpoint config".format(k)
)
for k in ['host', 'port', 'tls']:
if k in endpoint:
raise ValueError(
"'{}' not valid in 'unix' endpoint config".format(k)
)
else:
assert False, 'should not arrive here'
def _create_transport(index, transport, check_native_endpoint=None):
"""
Internal helper to insert defaults and create _Transport instances.
:param transport: a (possibly valid) transport configuration
:type transport: dict
:returns: a _Transport instance
:raises: ValueError on invalid configuration
"""
if type(transport) != dict:
raise ValueError('invalid type {} for transport configuration - must be a dict'.format(type(transport)))
valid_transport_keys = [
'type', 'url', 'endpoint', 'serializer', 'serializers', 'options',
'max_retries', 'max_retry_delay', 'initial_retry_delay',
'retry_delay_growth', 'retry_delay_jitter', 'proxy',
]
for k in transport.keys():
if k not in valid_transport_keys:
raise ValueError(
"'{}' is not a valid configuration item".format(k)
)
kind = 'websocket'
if 'type' in transport:
if transport['type'] not in ['websocket', 'rawsocket']:
raise ValueError('Invalid transport type {}'.format(transport['type']))
kind = transport['type']
else:
transport['type'] = 'websocket'
if 'proxy' in transport and kind != 'websocket':
raise ValueError(
"proxy= only supported for type=websocket transports"
)
proxy = transport.get("proxy", None)
if proxy is not None:
for k in proxy.keys():
if k not in ['host', 'port']:
raise ValueError(
"Unknown key '{}' in proxy config".format(k)
)
for k in ['host', 'port']:
if k not in proxy:
raise ValueError(
"Proxy config requires '{}'".formaT(k)
)
options = dict()
if 'options' in transport:
options = transport['options']
if not isinstance(options, dict):
raise ValueError(
'options must be a dict, not {}'.format(type(options))
)
if kind == 'websocket':
for key in ['url']:
if key not in transport:
raise ValueError("Transport requires '{}' key".format(key))
# endpoint not required; we will deduce from URL if it's not provided
# XXX not in the branch I rebased; can this go away? (is it redundant??)
if 'endpoint' not in transport:
is_secure, host, port, resource, path, params = parse_ws_url(transport['url'])
endpoint_config = {
'type': 'tcp',
'host': host,
'port': port,
'tls': is_secure,
}
else:
# note: we're avoiding mutating the incoming "configuration"
# dict, so this should avoid that too...
endpoint_config = transport['endpoint']
_validate_endpoint(endpoint_config, check_native_endpoint)
if 'serializer' in transport:
raise ValueError("'serializer' is only for rawsocket; use 'serializers'")
if 'serializers' in transport:
if not isinstance(transport['serializers'], (list, tuple)):
raise ValueError("'serializers' must be a list of strings")
if not all([
isinstance(s, (str, str))
for s in transport['serializers']]):
raise ValueError("'serializers' must be a list of strings")
valid_serializers = SERID_TO_SER.keys()
for serial in transport['serializers']:
if serial not in valid_serializers:
raise ValueError(
"Invalid serializer '{}' (expected one of: {})".format(
serial,
', '.join([repr(s) for s in valid_serializers]),
)
)
serializer_config = transport.get('serializers', ['cbor', 'json'])
elif kind == 'rawsocket':
if 'endpoint' not in transport:
if transport['url'].startswith('rs'):
# # try to parse RawSocket URL ..
isSecure, host, port = parse_rs_url(transport['url'])
elif transport['url'].startswith('ws'):
# try to parse WebSocket URL ..
isSecure, host, port, resource, path, params = parse_ws_url(transport['url'])
else:
raise RuntimeError()
if host == 'unix':
# here, "port" is actually holding the path on the host, eg "/tmp/file.sock"
endpoint_config = {
'type': 'unix',
'path': port,
}
else:
endpoint_config = {
'type': 'tcp',
'host': host,
'port': port,
}
else:
endpoint_config = transport['endpoint']
if 'serializers' in transport:
raise ValueError("'serializers' is only for websocket; use 'serializer'")
# always a list; len == 1 for rawsocket
if 'serializer' in transport:
if not isinstance(transport['serializer'], (str, str)):
raise ValueError("'serializer' must be a string")
serializer_config = [transport['serializer']]
else:
serializer_config = ['cbor']
else:
assert False, 'should not arrive here'
kw = {}
for key in ['max_retries', 'max_retry_delay', 'initial_retry_delay',
'retry_delay_growth', 'retry_delay_jitter']:
if key in transport:
kw[key] = transport[key]
return _Transport(
index,
kind=kind,
url=transport.get('url', None),
endpoint=endpoint_config,
serializers=serializer_config,
proxy=proxy,
options=options,
**kw
)
class _Transport(object):
"""
Thin-wrapper for WAMP transports used by a Connection.
"""
def __init__(self, idx, kind, url, endpoint, serializers,
max_retries=-1,
max_retry_delay=300,
initial_retry_delay=1.5,
retry_delay_growth=1.5,
retry_delay_jitter=0.1,
proxy=None,
options=None):
"""
"""
if options is None:
options = dict()
self.idx = idx
self.type = kind
self.url = url
self.endpoint = endpoint
self.options = options
self.serializers = serializers
if self.type == 'rawsocket' and len(serializers) != 1:
raise ValueError(
"'rawsocket' transport requires exactly one serializer"
)
self.max_retries = max_retries
self.max_retry_delay = max_retry_delay
self.initial_retry_delay = initial_retry_delay
self.retry_delay_growth = retry_delay_growth
self.retry_delay_jitter = retry_delay_jitter
self.proxy = proxy # this is a dict of proxy config
# used via can_reconnect() and failed() to record this
# transport is never going to work
self._permanent_failure = False
self.reset()
def reset(self):
"""
set connection failure rates and retry-delay to initial values
"""
self.connect_attempts = 0
self.connect_sucesses = 0
self.connect_failures = 0
self.retry_delay = self.initial_retry_delay
def failed(self):
"""
Mark this transport as failed, meaning we won't try to connect to
it any longer (that is: can_reconnect() will always return
False afer calling this).
"""
self._permanent_failure = True
def can_reconnect(self):
if self._permanent_failure:
return False
if self.max_retries == -1:
return True
return self.connect_attempts < self.max_retries + 1
def next_delay(self):
if self.connect_attempts == 0:
# if we never tried before, try immediately
return 0
elif self.max_retries != -1 and self.connect_attempts >= self.max_retries + 1:
raise RuntimeError('max reconnects reached')
else:
self.retry_delay = self.retry_delay * self.retry_delay_growth
self.retry_delay = random.normalvariate(self.retry_delay, self.retry_delay * self.retry_delay_jitter)
if self.retry_delay > self.max_retry_delay:
self.retry_delay = self.max_retry_delay
return self.retry_delay
def describe_endpoint(self):
"""
returns a human-readable description of the endpoint
"""
if isinstance(self.endpoint, dict):
return self.endpoint['type']
return repr(self.endpoint)
# this could probably implement twisted.application.service.IService
# if we wanted; or via an adapter...which just adds a startService()
# and stopService() [latter can be async]
class Component(ObservableMixin):
"""
A WAMP application component. A component holds configuration for
(and knows how to create) transports and sessions.
"""
session_factory = None
"""
The factory of the session we will instantiate.
"""
def subscribe(self, topic, options=None, check_types=False):
"""
A decorator as a shortcut for subscribing during on-join
For example::
@component.subscribe(
"some.topic",
options=SubscribeOptions(match='prefix'),
)
def topic(*args, **kw):
print("some.topic({}, {}): event received".format(args, kw))
"""
assert options is None or isinstance(options, SubscribeOptions)
def decorator(fn):
def do_subscription(session, details):
return session.subscribe(fn, topic=topic, options=options, check_types=check_types)
self.on('join', do_subscription)
return fn
return decorator
def register(self, uri, options=None, check_types=False):
"""
A decorator as a shortcut for registering during on-join
For example::
@component.register(
"com.example.add",
options=RegisterOptions(invoke='round_robin'),
)
def add(*args, **kw):
print("add({}, {}): event received".format(args, kw))
"""
assert options is None or isinstance(options, RegisterOptions)
def decorator(fn):
def do_registration(session, details):
return session.register(fn, procedure=uri, options=options, check_types=check_types)
self.on('join', do_registration)
return fn
return decorator
def __init__(self, main=None, transports=None, config=None, realm='realm1', extra=None,
authentication=None, session_factory=None, is_fatal=None):
"""
:param main: After a transport has been connected and a session
has been established and joined to a realm, this (async)
procedure will be run until it finishes -- which signals that
the component has run to completion. In this case, it usually
doesn't make sense to use the ``on_*`` kwargs. If you do not
pass a main() procedure, the session will not be closed
(unless you arrange for .leave() to be called).
:type main: callable taking two args ``reactor`` and ``ISession``
:param transports: Transport configurations for creating
transports. Each transport can be a WAMP URL, or a dict
containing the following configuration keys:
- ``type`` (optional): ``websocket`` (default) or ``rawsocket``
- ``url``: the router URL
- ``endpoint`` (optional, derived from URL if not provided):
- ``type``: "tcp" or "unix"
- ``host``, ``port``: only for TCP
- ``path``: only for unix
- ``timeout``: in seconds
- ``tls``: ``True`` or (under Twisted) an
``twisted.internet.ssl.IOpenSSLClientComponentCreator``
instance (such as returned from
``twisted.internet.ssl.optionsForClientTLS``) or
``CertificateOptions`` instance.
- ``max_retries``: Maximum number of reconnection attempts. Unlimited if set to -1.
- ``initial_retry_delay``: Initial delay for reconnection attempt in seconds (Default: 1.0s).
- ``max_retry_delay``: Maximum delay for reconnection attempts in seconds (Default: 60s).
- ``retry_delay_growth``: The growth factor applied to the retry delay between reconnection attempts (Default 1.5).
- ``retry_delay_jitter``: A 0-argument callable that introduces nose into the delay. (Default random.random)
- ``serializer`` (only for raw socket): Specify an accepted serializer (e.g. 'json', 'msgpack', 'cbor', 'ubjson', 'flatbuffers')
- ``serializers``: Specify list of accepted serializers
- ``options``: tbd
- ``proxy``: tbd
:type transports: None or str or list
:param realm: the realm to join
:type realm: str
:param authentication: configuration of authenticators
:type authentication: dict
:param session_factory: if None, ``ApplicationSession`` is
used, otherwise a callable taking a single ``config`` argument
that is used to create a new `ApplicationSession` instance.
:param is_fatal: a callable taking a single argument, an
``Exception`` instance. The callable should return ``True`` if
this error is "fatal", meaning we should not try connecting to
the current transport again. The default behavior (on None) is
to always return ``False``
"""
self.set_valid_events(
[
'start', # fired by base class
'connect', # fired by ApplicationSession
'join', # fired by ApplicationSession
'ready', # fired by ApplicationSession
'leave', # fired by ApplicationSession
'disconnect', # fired by ApplicationSession
'connectfailure', # fired by base class
]
)
if is_fatal is not None and not callable(is_fatal):
raise ValueError('"is_fatal" must be a callable or None')
self._is_fatal = is_fatal
if main is not None and not callable(main):
raise ValueError('"main" must be a callable if given')
self._entry = main
# use WAMP-over-WebSocket to localhost when no transport is specified at all
if transports is None:
transports = 'ws://127.0.0.1:8080/ws'
# allows to provide an URL instead of a list of transports
if isinstance(transports, (str, str)):
url = transports
# 'endpoint' will get filled in by parsing the 'url'
transport = {
'type': 'websocket',
'url': url,
}
transports = [transport]
# allows a single transport instead of a list (convenience)
elif isinstance(transports, dict):
transports = [transports]
# XXX do we want to be able to provide an infinite iterable of
# transports here? e.g. a generator that makes new transport
# to try?
# now check and save list of transports
self._transports = []
for idx, transport in enumerate(transports):
self._transports.append(
_create_transport(idx, transport, self._check_native_endpoint)
)
# XXX should have some checkconfig support
self._authentication = authentication or {}
if session_factory:
self.session_factory = session_factory
self._realm = realm
self._extra = extra
self._delay_f = None
self._done_f = None
self._session = None
self._stopping = False
def _can_reconnect(self):
# check if any of our transport has any reconnect attempt left
for transport in self._transports:
if transport.can_reconnect():
return True
return False
def _start(self, loop=None):
"""
This starts the Component, which means it will start connecting
(and re-connecting) to its configured transports. A Component
runs until it is "done", which means one of:
- There was a "main" function defined, and it completed successfully;
- Something called ``.leave()`` on our session, and we left successfully;
- ``.stop()`` was called, and completed successfully;
- none of our transports were able to connect successfully (failure);
:returns: a Future/Deferred which will resolve (to ``None``) when we are
"done" or with an error if something went wrong.
"""
# we can only be "start()ed" once before we stop .. but that
# doesn't have to be an error we can give back another future
# that fires when our "real" _done_f is completed.
if self._done_f is not None:
d = txaio.create_future()
def _cb(arg):
txaio.resolve(d, arg)
txaio.add_callbacks(self._done_f, _cb, _cb)
return d
# this future will be returned, and thus has the semantics
# specified in the docstring.
self._done_f = txaio.create_future()
def _reset(arg):
"""
if the _done_f future is resolved (good or bad), we want to set it
to None in our class
"""
self._done_f = None
return arg
txaio.add_callbacks(self._done_f, _reset, _reset)
# Create a generator of transports that .can_reconnect()
transport_gen = itertools.cycle(self._transports)
# this is a 1-element list so we can set it from closures in
# this function
transport_candidate = [0]
def error(fail):
self._delay_f = None
if self._stopping:
# might be better to add framework-specific checks in
# subclasses to see if this is CancelledError (for
# Twisted) and whatever asyncio does .. but tracking
# if we're in the shutdown path is fine too
txaio.resolve(self._done_f, None)
else:
self.log.info("Internal error {msg}", msg=txaio.failure_message(fail))
self.log.debug("{tb}", tb=txaio.failure_format_traceback(fail))
txaio.reject(self._done_f, fail)
def attempt_connect(_):
self._delay_f = None
def handle_connect_error(fail):
# FIXME - make txaio friendly
# Can connect_f ever be in a cancelled state?
# if txaio.using_asyncio and isinstance(fail.value, asyncio.CancelledError):
# unrecoverable_error = True
self.log.debug('component failed: {error}', error=txaio.failure_message(fail))
self.log.debug('{tb}', tb=txaio.failure_format_traceback(fail))
# If this is a "fatal error" that will never work,
# we bail out now
if isinstance(fail.value, ApplicationError):
self.log.error("{msg}", msg=fail.value.error_message())
elif isinstance(fail.value, OSError):
# failed to connect entirely, like nobody
# listening etc.
self.log.info("Connection failed: {msg}", msg=txaio.failure_message(fail))
elif self._is_ssl_error(fail.value):
# Quoting pyOpenSSL docs: "Whenever
# [SSL.Error] is raised directly, it has a
# list of error messages from the OpenSSL
# error queue, where each item is a tuple
# (lib, function, reason). Here lib, function
# and reason are all strings, describing where
# and what the problem is. See err(3) for more
# information."
# (and 'args' is a 1-tuple containing the above
# 3-tuple...)
ssl_lib, ssl_func, ssl_reason = fail.value.args[0][0]
self.log.error("TLS failure: {reason}", reason=ssl_reason)
else:
self.log.error(
'Connection failed: {error}',
error=txaio.failure_message(fail),
)
if self._is_fatal is None:
is_fatal = False
else:
is_fatal = self._is_fatal(fail.value)
if is_fatal:
self.log.info("Error was fatal; failing transport")
transport_candidate[0].failed()
txaio.call_later(0, transport_check, None)
return
def notify_connect_error(fail):
chain_f = txaio.create_future()
# hmm, if connectfailure took a _Transport instead of
# (or in addition to?) self it could .failed() the
# transport and we could do away with the is_fatal
# listener?
handler_f = self.fire('connectfailure', self, fail.value)
txaio.add_callbacks(
handler_f,
lambda _: txaio.reject(chain_f, fail),
lambda _: txaio.reject(chain_f, fail)
)
return chain_f
def connect_error(fail):
notify_f = notify_connect_error(fail)
txaio.add_callbacks(notify_f, None, handle_connect_error)
def session_done(x):
txaio.resolve(self._done_f, None)
connect_f = txaio.as_future(
self._connect_once,
loop, transport_candidate[0],
)
txaio.add_callbacks(connect_f, session_done, connect_error)
def transport_check(_):
self.log.debug('Entering re-connect loop')
if not self._can_reconnect():
err_msg = "Component failed: Exhausted all transport connect attempts"
self.log.info(err_msg)
try:
raise RuntimeError(err_msg)
except RuntimeError as e:
txaio.reject(self._done_f, e)
return
while True:
transport = next(transport_gen)
if transport.can_reconnect():
transport_candidate[0] = transport
break
delay = transport.next_delay()
self.log.debug(
'trying transport {transport_idx} using connect delay {transport_delay}',
transport_idx=transport.idx,
transport_delay=delay,
)
self._delay_f = txaio.sleep(delay)
txaio.add_callbacks(self._delay_f, attempt_connect, error)
# issue our first event, then start the reconnect loop
start_f = self.fire('start', loop, self)
txaio.add_callbacks(start_f, transport_check, error)
return self._done_f
def stop(self):
self._stopping = True
if self._session and self._session.is_attached():
return self._session.leave()
elif self._delay_f:
# This cancel request will actually call the "error" callback of
# the _delay_f future. Nothing to worry about.
return txaio.as_future(txaio.cancel, self._delay_f)
# if (for some reason -- should we log warning here to figure
# out if this can evern happen?) we've not fired _done_f, we
# do that now (causing our "main" to exit, and thus react() to
# quit)
if not txaio.is_called(self._done_f):
txaio.resolve(self._done_f, None)
return txaio.create_future_success(None)
def _connect_once(self, reactor, transport):
self.log.debug(
'connecting once using transport type "{transport_type}" '
'over endpoint "{endpoint_desc}"',
transport_type=transport.type,
endpoint_desc=transport.describe_endpoint(),
)
done = txaio.create_future()
# factory for ISession objects
def create_session():
cfg = ComponentConfig(self._realm, self._extra)
try:
self._session = session = self.session_factory(cfg)
for auth_name, auth_config in self._authentication.items():
if isinstance(auth_config, IAuthenticator):
session.add_authenticator(auth_config)
else:
authenticator = create_authenticator(auth_name, **auth_config)
session.add_authenticator(authenticator)
except Exception as e:
# couldn't instantiate session calls, which is fatal.
# let the reconnection logic deal with that
f = txaio.create_failure(e)
txaio.reject(done, f)
raise
else:
# hook up the listener to the parent so we can bubble
# up events happning on the session onto the
# connection. This lets you do component.on('join',
# cb) which will work just as if you called
# session.on('join', cb) for every session created.
session._parent = self
# listen on leave events; if we get errors
# (e.g. no_such_realm), an on_leave can happen without
# an on_join before
def on_leave(session, details):
self.log.info(
"session leaving '{details.reason}'",
details=details,
)
if not txaio.is_called(done):
if details.reason in ["wamp.close.normal", "wamp.close.goodbye_and_out"]:
txaio.resolve(done, None)
else:
f = txaio.create_failure(
ApplicationError(details.reason, details.message)
)
txaio.reject(done, f)
session.on('leave', on_leave)
# if we were given a "main" procedure, we run through
# it completely (i.e. until its Deferred fires) and
# then disconnect this session
def on_join(session, details):
transport.connect_sucesses += 1
self.log.debug("session on_join: {details}", details=details)
d = txaio.as_future(self._entry, reactor, session)
def main_success(_):
self.log.debug("main_success")
def leave():
try:
session.leave()
except SessionNotReady:
# someone may have already called
# leave()
pass
txaio.call_later(0, leave)
def main_error(err):
self.log.debug("main_error: {err}", err=err)
txaio.reject(done, err)
session.disconnect()
txaio.add_callbacks(d, main_success, main_error)
if self._entry is not None:
session.on('join', on_join)
# listen on disconnect events. Note that in case we
# had a "main" procedure, we could have already
# resolve()'d our "done" future
def on_disconnect(session, was_clean):
self.log.debug(
"session on_disconnect: was_clean={was_clean}",
was_clean=was_clean,
)
if not txaio.is_called(done):
if not was_clean:
self.log.warn(
"Session disconnected uncleanly"
)
else:
# eg the session has left the realm, and the transport was properly
# shut down. successfully finish the connection
txaio.resolve(done, None)
session.on('disconnect', on_disconnect)
# return the fresh session object
return session
transport.connect_attempts += 1
d = txaio.as_future(
self._connect_transport,
reactor, transport, create_session, done,
)
def on_error(err):
"""
this may seem redundant after looking at _connect_transport, but
it will handle a case where something goes wrong in
_connect_transport itself -- as the only connect our
caller has is the 'done' future
"""
transport.connect_failures += 1
# something bad has happened, and maybe didn't get caught
# upstream yet
if not txaio.is_called(done):
txaio.reject(done, err)
txaio.add_callbacks(d, None, on_error)
return done
def on_join(self, fn):
"""
A decorator as a shortcut for listening for 'join' events.
For example::
@component.on_join
def joined(session, details):
print("Session {} joined: {}".format(session, details))
"""
self.on('join', fn)
def on_leave(self, fn):
"""
A decorator as a shortcut for listening for 'leave' events.
"""
self.on('leave', fn)
def on_connect(self, fn):
"""
A decorator as a shortcut for listening for 'connect' events.
"""
self.on('connect', fn)
def on_disconnect(self, fn):
"""
A decorator as a shortcut for listening for 'disconnect' events.
"""
self.on('disconnect', fn)
def on_ready(self, fn):
"""
A decorator as a shortcut for listening for 'ready' events.
"""
self.on('ready', fn)
def on_connectfailure(self, fn):
"""
A decorator as a shortcut for listening for 'connectfailure' events.
"""
self.on('connectfailure', fn)
def _run(reactor, components, done_callback):
"""
Internal helper. Use "run" method from autobahn.twisted.wamp or
autobahn.asyncio.wamp
This is the generic parts of the run() method so that there's very
little code in the twisted/asyncio specific run() methods.
This is called by react() (or run_until_complete() so any errors
coming out of this should be handled properly. Logging will
already be started.
"""
# let user pass a single component to run, too
# XXX probably want IComponent? only demand it, here and below?
if isinstance(components, Component):
components = [components]
if type(components) != list:
raise ValueError(
'"components" must be a list of Component objects - encountered'
' {0}'.format(type(components))
)
for c in components:
if not isinstance(c, Component):
raise ValueError(
'"components" must be a list of Component objects - encountered'
'item of type {0}'.format(type(c))
)
# validation complete; proceed with startup
log = txaio.make_logger()
def component_success(comp, arg):
log.debug("Component '{c}' successfully completed: {arg}", c=comp, arg=arg)
return arg
def component_failure(comp, f):
log.error("Component '{c}' error: {msg}", c=comp, msg=txaio.failure_message(f))
log.debug("Component error: {tb}", tb=txaio.failure_format_traceback(f))
# double-check: is a component-failure still fatal to the
# startup process (because we passed consume_exception=False
# to gather() below?)
return None
def component_start(comp):
# the future from start() errbacks if we fail, or callbacks
# when the component is considered "done" (so maybe never)
d = txaio.as_future(comp.start, reactor)
txaio.add_callbacks(
d,
partial(component_success, comp),
partial(component_failure, comp),
)
return d
# note that these are started in parallel -- maybe we want to add
# a "connected" signal to components so we could start them in the
# order they're given to run() as "a" solution to dependencies.
dl = []
for comp in components:
d = component_start(comp)
dl.append(d)
done_d = txaio.gather(dl, consume_exceptions=False)
def all_done(arg):
log.debug("All components ended; stopping reactor")
done_callback(reactor, arg)
txaio.add_callbacks(done_d, all_done, all_done)
return done_d
| mit |
comparemetrics/GoogleAppsAccountManager | src/GoogleAppsAccountManager/http.py | 1 | 1152 | #
# GoogleAppsAccountManager: http
# Copyright (C) 2012-2013 KAMEI Yutaka
#
# License: GNU General Public License version 2 or later
# Date: 2013-01-08, since 2012-12-28
#
import socket
import httplib
from GoogleAppsAccountManager import errors
def httpsRequest(method, server, path, header, body):
for count in range(10):
try:
conn = httplib.HTTPSConnection(server)
conn.request(method, path, body, header)
res = conn.getresponse()
except socket.gaierror, e:
raise errors.NotFoundServer()
except Exception, e:
continue
else:
return (res.read(), res.status)
raise errors.UnknownError()
def httpsGETRequest(server, path, header, body=""):
return httpsRequest("GET", server, path, header, body)
def httpsPOSTRequest(server, path, header, body):
return httpsRequest("POST", server, path, header, body)
def httpsPUTRequest(server, path, header, body):
return httpsRequest("PUT", server, path, header, body)
def httpsDELETERequest(server, path, header, body=""):
return httpsRequest("DELETE", server, path, header, body)
| gpl-2.0 |
yfried/ansible | test/units/module_utils/facts/base.py | 47 | 2309 | # base unit test classes for ansible/module_utils/facts/ tests
# -*- coding: utf-8 -*-
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
# Make coding more python3-ish
from __future__ import (absolute_import, division)
__metaclass__ = type
from units.compat import unittest
from units.compat.mock import Mock
class BaseFactsTest(unittest.TestCase):
# just a base class, not an actual test
__test__ = False
gather_subset = ['all']
valid_subsets = None
fact_namespace = None
collector_class = None
# a dict ansible_facts. Some fact collectors depend on facts gathered by
# other collectors (like 'ansible_architecture' or 'ansible_system') which
# can be passed via the collected_facts arg to collect()
collected_facts = None
def _mock_module(self):
mock_module = Mock()
mock_module.params = {'gather_subset': self.gather_subset,
'gather_timeout': 5,
'filter': '*'}
mock_module.get_bin_path = Mock(return_value=None)
return mock_module
def test_collect(self):
module = self._mock_module()
fact_collector = self.collector_class()
facts_dict = fact_collector.collect(module=module, collected_facts=self.collected_facts)
self.assertIsInstance(facts_dict, dict)
return facts_dict
def test_collect_with_namespace(self):
module = self._mock_module()
fact_collector = self.collector_class()
facts_dict = fact_collector.collect_with_namespace(module=module,
collected_facts=self.collected_facts)
self.assertIsInstance(facts_dict, dict)
return facts_dict
| gpl-3.0 |
mikehulluk/morphforge | src/morphforge/traces/operators/op_variabledt_scalar.py | 1 | 9445 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# ---------------------------------------------------------------------
# Copyright (c) 2012 Michael Hull.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# - Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# - Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# ----------------------------------------------------------------------
from morphforge import units
import operator
from morphforge.traces.tracetypes import TraceVariableDT
from morphforge.traces.traceobjpluginctrl import TraceOperatorCtrl
class TraceOperator_TraceVariableDT_Quantity(object):
@classmethod
def do_add(cls, lhs, rhs):
assert (type(lhs) == TraceVariableDT and type(rhs) == units.Quantity)
return TraceVariableDT(lhs.time_pts, lhs.data_pts + rhs)
@classmethod
def do_sub(cls, lhs, rhs):
assert (type(lhs) == TraceVariableDT and type(rhs) == units.Quantity)
return TraceVariableDT(lhs.time_pts, lhs.data_pts - rhs)
@classmethod
def do_mul(cls, lhs, rhs):
assert (type(lhs) == TraceVariableDT and type(rhs) == units.Quantity)
return TraceVariableDT(lhs.time_pts, lhs.data_pts * rhs)
@classmethod
def do_div(cls, lhs, rhs):
assert (type(lhs) == TraceVariableDT and type(rhs) == units.Quantity)
return TraceVariableDT(lhs.time_pts, lhs.data_pts / rhs)
class TraceOperator_Quantity_TraceVariableDT(object):
@classmethod
def do_add(cls, lhs, rhs):
assert type(rhs) == TraceVariableDT and type(lhs) == units.Quantity
return TraceVariableDT(rhs.time_pts, lhs + rhs.data_pts)
@classmethod
def do_sub(cls, lhs, rhs):
assert type(rhs) == TraceVariableDT and type(lhs) == units.Quantity
return TraceVariableDT(rhs.time_pts, lhs - rhs.data_pts)
@classmethod
def do_mul(cls, lhs, rhs):
assert type(rhs) == TraceVariableDT and type(lhs) == units.Quantity
return TraceVariableDT(rhs.time_pts, lhs * rhs.data_pts)
@classmethod
def do_div(cls, lhs, rhs):
assert type(rhs) == TraceVariableDT and type(lhs) == units.Quantity
return TraceVariableDT(rhs.time_pts, lhs / rhs.data_pts)
TraceOperatorCtrl.add_trace_operator(
operator_type=operator.__add__,
lhs_type=TraceVariableDT, rhs_type=units.Quantity,
operator_func=TraceOperator_TraceVariableDT_Quantity.do_add,
flag='default')
TraceOperatorCtrl.add_trace_operator(
operator_type=operator.__sub__,
lhs_type=TraceVariableDT, rhs_type=units.Quantity,
operator_func=TraceOperator_TraceVariableDT_Quantity.do_sub,
flag='default')
TraceOperatorCtrl.add_trace_operator(
operator_type=operator.__mul__,
lhs_type=TraceVariableDT, rhs_type=units.Quantity,
operator_func=TraceOperator_TraceVariableDT_Quantity.do_mul,
flag='default')
TraceOperatorCtrl.add_trace_operator(
operator_type=operator.__div__,
lhs_type=TraceVariableDT, rhs_type=units.Quantity,
operator_func=TraceOperator_TraceVariableDT_Quantity.do_div,
flag='default')
TraceOperatorCtrl.add_trace_operator(
operator_type=operator.__add__,
lhs_type=units.Quantity, rhs_type=TraceVariableDT,
operator_func=TraceOperator_Quantity_TraceVariableDT.do_add,
flag='default')
TraceOperatorCtrl.add_trace_operator(
operator_type=operator.__sub__,
lhs_type=units.Quantity, rhs_type=TraceVariableDT,
operator_func=TraceOperator_Quantity_TraceVariableDT.do_sub,
flag='default')
TraceOperatorCtrl.add_trace_operator(
operator_type=operator.__mul__,
lhs_type=units.Quantity, rhs_type=TraceVariableDT,
operator_func=TraceOperator_Quantity_TraceVariableDT.do_mul,
flag='default')
TraceOperatorCtrl.add_trace_operator(
operator_type=operator.__div__,
lhs_type=units.Quantity, rhs_type=TraceVariableDT,
operator_func=TraceOperator_Quantity_TraceVariableDT.do_div,
flag='default')
class TraceOperator_TraceVariableDT_Scalar(object):
@classmethod
def do_add(cls, lhs, rhs):
assert (type(lhs) == TraceVariableDT and type(rhs) == float)
assert isinstance(lhs.data_pts, units.unitquantity.Dimensionless)
return TraceVariableDT(lhs.time_pts, lhs.data_pts + rhs)
@classmethod
def do_sub(cls, lhs, rhs):
assert (type(lhs) == TraceVariableDT and type(rhs) == float)
assert isinstance(lhs.data_pts, units.unitquantity.Dimensionless)
return TraceVariableDT(lhs.time_pts, lhs.data_pts - rhs)
@classmethod
def do_mul(cls, lhs, rhs):
assert (type(lhs) == TraceVariableDT and type(rhs) == float)
return TraceVariableDT(lhs.time_pts, lhs.data_pts * rhs)
@classmethod
def do_div(cls, lhs, rhs):
assert (type(lhs) == TraceVariableDT and type(rhs) == float)
return TraceVariableDT(lhs.time_pts, lhs.data_pts / rhs)
@classmethod
def do_pow(cls, lhs, rhs):
assert (type(lhs) == TraceVariableDT and (type(rhs) == float or type(rhs) == int))
return TraceVariableDT(lhs.time_pts, lhs.data_pts ** rhs)
class TraceOperator_Scalar_TraceVariableDT(object):
@classmethod
def do_add(cls, lhs, rhs):
assert type(rhs) == TraceVariableDT and type(lhs) == float
assert isinstance(rhs.data_pts, units.Dimensionless)
return TraceVariableDT(rhs.time_pts, lhs + rhs.data_pts)
@classmethod
def do_sub(cls, lhs, rhs):
assert type(rhs) == TraceVariableDT and type(lhs) == float
assert isinstance(rhs.data_pts, units.Dimensionless)
return TraceVariableDT(rhs.time_pts, lhs - rhs.data_pts)
@classmethod
def do_mul(cls, lhs, rhs):
assert type(rhs) == TraceVariableDT and type(lhs) == float
return TraceVariableDT(rhs.time_pts, lhs * rhs.data_pts)
@classmethod
def do_div(cls, lhs, rhs):
assert type(rhs) == TraceVariableDT and type(lhs) == float
return TraceVariableDT(rhs.time_pts, lhs / rhs.data_pts)
TraceOperatorCtrl.add_trace_operator(
operator_type=operator.__add__,
lhs_type=TraceVariableDT, rhs_type=float,
operator_func=TraceOperator_TraceVariableDT_Scalar.do_add,
flag='default')
TraceOperatorCtrl.add_trace_operator(
operator_type=operator.__sub__,
lhs_type=TraceVariableDT, rhs_type=float,
operator_func=TraceOperator_TraceVariableDT_Scalar.do_sub,
flag='default')
TraceOperatorCtrl.add_trace_operator(
operator_type=operator.__mul__,
lhs_type=TraceVariableDT, rhs_type=float,
operator_func=TraceOperator_TraceVariableDT_Scalar.do_mul,
flag='default')
TraceOperatorCtrl.add_trace_operator(
operator_type=operator.__div__,
lhs_type=TraceVariableDT, rhs_type=float,
operator_func=TraceOperator_TraceVariableDT_Scalar.do_div,
flag='default')
TraceOperatorCtrl.add_trace_operator(
operator_type=operator.__pow__,
lhs_type=TraceVariableDT, rhs_type=float,
operator_func=TraceOperator_TraceVariableDT_Scalar.do_pow,
flag='default')
TraceOperatorCtrl.add_trace_operator(
operator_type=operator.__pow__,
lhs_type=TraceVariableDT, rhs_type=int,
operator_func=TraceOperator_TraceVariableDT_Scalar.do_pow,
flag='default')
TraceOperatorCtrl.add_trace_operator(
operator_type=operator.__add__,
lhs_type=float, rhs_type=TraceVariableDT,
operator_func=TraceOperator_Scalar_TraceVariableDT.do_add,
flag='default')
TraceOperatorCtrl.add_trace_operator(
operator_type=operator.__sub__,
lhs_type=float, rhs_type=TraceVariableDT,
operator_func=TraceOperator_Scalar_TraceVariableDT.do_sub,
flag='default')
TraceOperatorCtrl.add_trace_operator(
operator_type=operator.__mul__,
lhs_type=float, rhs_type=TraceVariableDT,
operator_func=TraceOperator_Scalar_TraceVariableDT.do_mul,
flag='default')
TraceOperatorCtrl.add_trace_operator(
operator_type=operator.__div__,
lhs_type=float, rhs_type=TraceVariableDT,
operator_func=TraceOperator_Scalar_TraceVariableDT.do_div,
flag='default')
| bsd-2-clause |
hiteshgarg14/openstates | scrapers/ak/__init__.py | 2 | 1974 | from utils import State, url_xpath
from .bills import AKBillScraper
from .events import AKEventScraper
settings = dict(SCRAPELIB_TIMEOUT=600)
class Alaska(State):
scrapers = {"bills": AKBillScraper, "events": AKEventScraper}
legislative_sessions = [
{
"_scraped_name": "28th Legislature (2013-2014)",
"identifier": "28",
"name": "28th Legislature (2013-2014)",
"start_date": "2013-01-15",
"end_date": "2014-04-20",
},
{
"_scraped_name": "29th Legislature (2015-2016)",
"identifier": "29",
"name": "29th Legislature (2015-2016)",
"start_date": "2015-01-19",
"end_date": "2016-05-18",
},
{
"_scraped_name": "30th Legislature (2017-2018)",
"identifier": "30",
"name": "30th Legislature (2017-2018)",
"start_date": "2017-01-17",
"end_date": "2018-05-13",
},
{
"_scraped_name": "31st Legislature (2019-2020)",
"identifier": "31",
"name": "31st Legislature (2019-2020)",
"start_date": "2019-01-15",
"end_date": "2020-05-20",
},
]
ignored_scraped_sessions = [
"27th Legislature (2011-2012)",
"26th Legislature (2009-2010)",
"25th Legislature (2007-2008)",
"24th Legislature (2005-2006)",
"23rd Legislature (2003-2004)",
"22nd Legislature (2001-2002)",
"21st Legislature (1999-2000)",
"20th Legislature (1997-1998)",
"19th Legislature (1995-1996)",
"18th Legislature (1993-1994)",
]
def get_session_list(self):
return [session["_scraped_name"] for session in self.legislative_sessions]
return url_xpath(
"https://www.akleg.gov/basis/Home/Archive",
'//div[@id="fullpage"]//a[contains(@href, "/BillsandLaws/")]//text()',
)
| gpl-3.0 |
ehartsuyker/securedrop | securedrop/alembic/versions/fccf57ceef02_create_submission_uuid_column.py | 2 | 2002 | """create submission uuid column
Revision ID: fccf57ceef02
Revises: 3d91d6948753
Create Date: 2018-07-12 00:06:20.891213
"""
from alembic import op
import sqlalchemy as sa
import uuid
# revision identifiers, used by Alembic.
revision = 'fccf57ceef02'
down_revision = '3d91d6948753'
branch_labels = None
depends_on = None
def upgrade():
# Schema migration
op.rename_table('submissions', 'submissions_tmp')
# Add UUID column.
op.add_column('submissions_tmp', sa.Column('uuid', sa.String(length=36)))
# Add UUIDs to submissions_tmp table.
conn = op.get_bind()
submissions = conn.execute(
sa.text("SELECT * FROM submissions_tmp")).fetchall()
for submission in submissions:
conn.execute(
sa.text("""UPDATE submissions_tmp SET uuid=:submission_uuid WHERE
id=:id""").bindparams(submission_uuid=str(uuid.uuid4()),
id=submission.id)
)
# Now create new table with unique constraint applied.
op.create_table('submissions',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('uuid', sa.String(length=36), nullable=False),
sa.Column('source_id', sa.Integer(), nullable=True),
sa.Column('filename', sa.String(length=255), nullable=False),
sa.Column('size', sa.Integer(), nullable=False),
sa.Column('downloaded', sa.Boolean(), nullable=True),
sa.ForeignKeyConstraint(['source_id'], ['sources.id'], ),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('uuid')
)
# Data Migration: move all submissions into the new table.
conn.execute('''
INSERT INTO submissions
SELECT id, uuid, source_id, filename, size, downloaded
FROM submissions_tmp
''')
# Now delete the old table.
op.drop_table('submissions_tmp')
def downgrade():
with op.batch_alter_table('submissions', schema=None) as batch_op:
batch_op.drop_column('uuid')
| agpl-3.0 |
methoxid/micropystat | tests/bytecode/pylib-tests/dummy_threading.py | 210 | 2815 | """Faux ``threading`` version using ``dummy_thread`` instead of ``thread``.
The module ``_dummy_threading`` is added to ``sys.modules`` in order
to not have ``threading`` considered imported. Had ``threading`` been
directly imported it would have made all subsequent imports succeed
regardless of whether ``_thread`` was available which is not desired.
"""
from sys import modules as sys_modules
import _dummy_thread
# Declaring now so as to not have to nest ``try``s to get proper clean-up.
holding_thread = False
holding_threading = False
holding__threading_local = False
try:
# Could have checked if ``_thread`` was not in sys.modules and gone
# a different route, but decided to mirror technique used with
# ``threading`` below.
if '_thread' in sys_modules:
held_thread = sys_modules['_thread']
holding_thread = True
# Must have some module named ``_thread`` that implements its API
# in order to initially import ``threading``.
sys_modules['_thread'] = sys_modules['_dummy_thread']
if 'threading' in sys_modules:
# If ``threading`` is already imported, might as well prevent
# trying to import it more than needed by saving it if it is
# already imported before deleting it.
held_threading = sys_modules['threading']
holding_threading = True
del sys_modules['threading']
if '_threading_local' in sys_modules:
# If ``_threading_local`` is already imported, might as well prevent
# trying to import it more than needed by saving it if it is
# already imported before deleting it.
held__threading_local = sys_modules['_threading_local']
holding__threading_local = True
del sys_modules['_threading_local']
import threading
# Need a copy of the code kept somewhere...
sys_modules['_dummy_threading'] = sys_modules['threading']
del sys_modules['threading']
sys_modules['_dummy__threading_local'] = sys_modules['_threading_local']
del sys_modules['_threading_local']
from _dummy_threading import *
from _dummy_threading import __all__
finally:
# Put back ``threading`` if we overwrote earlier
if holding_threading:
sys_modules['threading'] = held_threading
del held_threading
del holding_threading
# Put back ``_threading_local`` if we overwrote earlier
if holding__threading_local:
sys_modules['_threading_local'] = held__threading_local
del held__threading_local
del holding__threading_local
# Put back ``thread`` if we overwrote, else del the entry we made
if holding_thread:
sys_modules['_thread'] = held_thread
del held_thread
else:
del sys_modules['_thread']
del holding_thread
del _dummy_thread
del sys_modules
| mit |
alexis-jacq/Story_CoWriting | src/naoStoryTelling/story_gestures.py | 4 | 3281 | #!/usr/bin/env python
#coding: utf-8
import sys
import time
import numpy as np
import random
import rospy
from geometry_msgs.msg import PointStamped
from std_msgs.msg import String, Empty, Header
#from naoqi_bridge_msgs.msg import JointAnglesWithSpeed
import tf
from naoqi import ALProxy
from naoqi import ALBroker
from naoqi import ALModule
#########################################"### moving functions
def StiffnessOn(motionProxy):
pNames = "Body"
pStiffnessLists = 1.0
pTimeLists = 1.0
motionProxy.stiffnessInterpolation(pNames, pStiffnessLists, pTimeLists)
def StiffnessOff(motionProxy):
speed = 0.1
motionProxy.setAngles("LShoulderPitch", 1.5, speed)
motionProxy.setAngles("RShoulderPitch", 1.5, speed)
time.sleep(2)
pNames = "Body"
pStiffnessLists = 0.0
pTimeLists = 1.0
motionProxy.stiffnessInterpolation(pNames, pStiffnessLists, pTimeLists)
def trackFace(motionProxy,tracker):
targetName = "Face"
faceWidth = 0.1
tracker.registerTarget(targetName, faceWidth)
# Then, start tracker.
motionProxy.setStiffnesses("Head", 1.0)
tracker.track(targetName)
def telling_arms_gesturs(motionProxy,tts,speed,wordToSay):
LShoulderPitch = np.random.uniform(-0.5,0.1)#-2.0857 to 2.0857
LShoulderRoll = np.random.uniform(0.2,0.8)#-0.3142 to 1.3265
LElbowYaw = np.random.uniform(-1,1)#-2.0857 to 2.0857
LElbowRoll = np.random.uniform(-1,-0.2)#-1.5446 to -0.0349
LWristYaw = -1.6#np.random.uniform(1,1.5)#-1.8238 to 1.8238
LHand = np.random.choice([0,1])
# assymetric
RShoulderPitch = np.random.uniform(-0.5,0.1)#-2.0857 to 2.0857
RShoulderRoll = np.random.uniform(-0.8,0.2)#-0.3142 to 1.3265
RElbowYaw = np.random.uniform(-1,1)#-2.0857 to 2.0857
RElbowRoll = np.random.uniform(0.2,1)#-1.5446 to -0.0349
RWristYaw = 1.6#np.random.uniform(1,1.5)#-1.8238 to 1.8238
RHand = LHand
'''
# symetric
RShoulderPitch = LShoulderPitch
RShoulderRoll = -LShoulderRoll
RElbowYaw = -LElbowYaw
RElbowRoll = -LElbowRoll
RWristYaw = -LWristYaw
RHand = LHand
'''
motionProxy.setAngles("LShoulderPitch", LShoulderPitch, speed)
motionProxy.setAngles("RShoulderPitch", RShoulderPitch, speed)
motionProxy.setAngles("LShoulderRoll", LShoulderRoll, speed)
motionProxy.setAngles("RShoulderRoll", RShoulderRoll, speed)
motionProxy.setAngles("LElbowYaw", LElbowYaw, speed)
motionProxy.setAngles("RElbowYaw", RElbowYaw, speed)
motionProxy.setAngles("LElbowRoll", LElbowRoll, speed)
motionProxy.setAngles("RElbowRoll", RElbowRoll, speed)
motionProxy.setAngles("LWristYaw", LWristYaw, speed)
motionProxy.setAngles("RWristYaw", RWristYaw, speed)
motionProxy.setAngles("LHand", LHand, speed)
motionProxy.setAngles("RHand", RHand, speed)
tts.say(wordToSay)
def pointing_object(motionProxy,tts,speed,wordToSay):
RShoulderPitch = 0
RShoulderRoll = 0
RElbowYaw = 0
RElbowRoll = np.random.uniform(0.2,0.5)#-1.5446 to -0.0349
RWristYaw = 0
RHand = 0
motionProxy.setAngles("RShoulderPitch", RShoulderPitch, speed)
motionProxy.setAngles("RShoulderRoll", RShoulderRoll, speed)
motionProxy.setAngles("RElbowYaw", RElbowYaw, speed)
motionProxy.setAngles("RElbowRoll", RElbowRoll, speed)
motionProxy.setAngles("RWristYaw", RWristYaw, speed)
motionProxy.setAngles("RHand", RHand, speed)
tts.setParameter("speed", 100)
tts.say(wordToSay)
| isc |
apdjustino/DRCOG_Urbansim | src/opus_gui/data_manager/controllers/xml_configuration/xml_controller_data_tools.py | 1 | 15456 | # Opus/UrbanSim urban simulation software.
# Copyright (C) 2010-2011 University of California, Berkeley, 2005-2009 University of Washington
# See opus_core/LICENSE
import os
from lxml.etree import Element, SubElement, ElementTree
from PyQt4.QtCore import QString, QFileInfo
from PyQt4.QtGui import QMenu, QCursor, QFileDialog
from opus_gui.util.exception_formatter import formatExceptionInfo
from opus_gui.main.controllers.dialogs.message_box import MessageBox
from opus_gui.data_manager.data_manager_functions import *
from opus_core.configurations.xml_configuration import XMLConfiguration
from opus_gui.main.controllers.instance_handlers import get_mainwindow_instance
from opus_gui.data_manager.controllers.dialogs.configuretool import ConfigureToolGui
from opus_gui.data_manager.controllers.dialogs.executetool import ExecuteToolGui
from opus_gui.abstract_manager.controllers.xml_configuration.xml_controller import XmlController
from opus_gui.data_manager.controllers.dialogs.executetoolset import ExecuteToolSetGui
from opus_gui.data_manager.controllers.dialogs.addparam import addParamGui
from opus_gui.abstract_manager.controllers.xml_configuration.renamedialog import RenameDialog
from opus_core import paths
class XmlController_DataTools(XmlController):
def __init__(self, manager):
XmlController.__init__(self, manager)
# Show dialog to execute/config tools
self.actExecToolFile = self.create_action('execute',"Execute Tool...", self.execToolFile)
self.actExecToolConfig = self.create_action('execute',"Execute Tool...", self.execToolConfig)
# Adding tools, groups, sets and configurations
self.actAddToolFile = self.create_action('add',"Add Tool", self.addToolFile)
self.actAddToolGroup = self.create_action('add',"Create Tool Group", self.addNewToolGroup)
self.actAddNewToolSet = self.create_action('add',"Create Tool Set",self.addNewToolSet)
self.actNewConfig = self.create_action('add',"Add New Tool Configuration",self.newConfig)
self.actAddParam = self.create_action('add',"Add New Param",self.addParam)
self.actEditParam = self.create_action('rename',"Edit Param",self.editParam)
self.actOpenDocumentation = self.create_action('calendar_view_day',"Open Documentation",self.openDocumentation)
self.actChangeClassModule = self.create_action('rename',"Change Module Class Name",self.changeClassModule)
self.actChangePathToTools = self.create_action('rename',"Change Path to Tools",self.changePathToTools)
# moving tools up and down
self.actMoveNodeUp = self.create_action('arrow_up',"Move Up",self.moveNodeUp)
self.actMoveNodeDown = self.create_action('arrow_down',"Move Down",self.moveNodeDown)
self.actExecBatch = self.create_action('execute',"Execute Tool Set",self.execBatch)
def addParam(self):
assert self.has_selected_item()
item = self.selected_item()
node = item.node
window = addParamGui(self.manager.base_widget, None)
window.setModal(True)
window.show()
if window.exec_() == window.Accepted:
name = str(window.nameEdit.text())
typ = str(window.typeComboBox.currentText())
default = str(window.defaultEdit.text())
required = str(window.requiredComboBox.currentText())
attributes = {'name': name, 'param_type': typ, 'required': required}
node = Element('param', attributes)
node.text = default
if self.model.insertRow(0, self.selected_index(), node) == False:
MessageBox.error(mainwindow = self.view,
text = 'Parameter Name Exists',
detailed_text = ('The parameter name to add is "%s" '
'but there is already a parameter with that name.' %
name))
return
self.view.expand(self.selected_index())
def editParam(self):
assert self.has_selected_item()
item = self.selected_item()
node = item.node
window = addParamGui(self.manager.base_widget, node)
window.setModal(True)
window.show()
if window.exec_() == window.Accepted:
name = str(window.nameEdit.text())
typ = str(window.typeComboBox.currentText())
default = str(window.defaultEdit.text())
required = str(window.requiredComboBox.currentText())
self.model.make_item_local(item)
node.set('name', name)
node.set('param_type', typ)
node.set('required', required)
node.text = default
def changeClassModule(self):
''' Opens a dialog box for changing the class module. '''
assert self.has_selected_item()
item = self.selected_item()
node = item.node
dialog = RenameDialog(node.text, [], self.view)
if dialog.exec_() == dialog.Accepted:
node.text = dialog.accepted_name
self.model.make_item_local(item)
def changePathToTools(self):
''' Opens a dialog box for changing the path to tools. '''
assert self.has_selected_item()
item = self.selected_item()
node = item.node
dialog = RenameDialog(node.text, [], self.view)
if dialog.exec_() == dialog.Accepted:
node.text = dialog.accepted_name
self.model.make_item_local(item)
def addToolFile(self):
''' NO DOCUMENTATION '''
assert self.has_selected_item()
tool_node = Element('tool', {'name': 'unnamed_tool'})
SubElement(tool_node, "class_module").text = 'unnamed_module'
SubElement(tool_node, "params")
self.model.insertRow(0, self.selected_index(), tool_node)
self.view.expand(self.selected_index())
def addNewToolSet(self):
''' NO DOCUMENTATION '''
assert self.has_selected_item()
# First add the dummy toolset shell
toolset_node = Element('tool_set', {'name': 'unnamed_tool_set'})
self.model.insertRow(0, self.selected_index(), toolset_node)
def addNewToolGroup(self):
''' NO DOCUMENTATION '''
assert self.has_selected_item()
# First add the dummy toolset shell
node = Element('tool_group', {'name': 'unnamed_tool_group',
'setexpanded':'True'})
self.model.insertRow(0, self.selected_index(), node)
def newConfig(self):
''' NO DOCUMENTATION '''
assert self.has_selected_item()
index = self.selected_index()
tool_library_node = get_tool_library_node(self.project)
callback = \
lambda node: self.model.insertRow(0, index, node)
window = ConfigureToolGui(tool_library_node, callback, self.view)
window.setModal(True)
window.show()
def moveNodeUp(self):
''' NO DOCUMENTATION '''
# TODO connect directly to lambda
assert self.has_selected_item()
new_index = self.model.move_up(self.selected_index(),view=self.view)
self.view.setCurrentIndex(new_index)
def moveNodeDown(self):
''' NO DOCUMENTATION '''
# TODO connect directly to lambda
assert self.has_selected_item()
new_index = self.model.move_down(self.selected_index(),view=self.view)
self.view.setCurrentIndex(new_index)
def openDocumentation(self):
''' NO DOCUMENTATION '''
print 'NOTE openDocumentation for tools is disabled for now.'
return
# assert self.has_selected_item()
# filePath = self.selected_item().node.text
# fileInfo = QFileInfo(filePath)
# baseInfo = QFileInfo(self.toolboxbase.xml_file)
# baseDir = baseInfo.absolutePath()
# newFile = QFileInfo(QString(baseDir).append("/").append(QString(fileInfo.filePath())))
# fileName = newFile.absoluteFilePath().trimmed()
# x = util.documentationbase.DocumentationTab(self.mainwindow,
# QString(fileName))
def execToolFile(self):
'''
Show a dialog box that lets the user configure and execute a
tool.
'''
assert self.has_selected_item()
tool_lib_node = get_tool_library_node(self.project)
params = {'tool_path': get_path_to_tool_modules(self.project)}
tool_node = self.selected_item().node
try:
module_name = tool_node.find('class_module').text
import_path = params['tool_path'] + '.' + module_name
importString = "from %s import opusRun" % (import_path)
exec(importString)
except Exception, e:
print e
MessageBox.error(mainwindow = self.view,
text = 'Invalid module name',
detailed_text = ('This tool points to a module named "%s", ' % import_path + \
'but there is no module with that name, or module returned import error: %s. ' \
% formatExceptionInfo()
))
return
window = ExecuteToolGui(parent_widget = self.manager.base_widget,
tool_node = tool_node,
tool_config = None,
tool_library_node = tool_lib_node,
params=params,
model=self.model)
window.setModal(True)
window.show()
def execToolConfig(self):
'''
Show the dialog box for executing a "tool config"
A tool config is has a pointer to an existing tool (a "tool hook") but
can provide an alternative configuration.
'''
assert self.has_selected_item()
# First we need to get the hooked node that we want to run
node = self.selected_item().node
hooked_tool_name = node.find('tool_hook').text
hooked_tool_node = get_tool_node_by_name(self.project, hooked_tool_name)
if hooked_tool_node is None:
MessageBox.error(mainwindow = self.view,
text = 'Invalid tool hook',
detailed_text = ('This tool config points to a tool named "%s" '
'but there is no tool with that name in this project.' %
hooked_tool_name))
return
# Open up a GUI element and populate with variable's
tool_lib_node = get_tool_library_node(self.project)
params = {'tool_path': get_path_to_tool_modules(self.project)}
window = ExecuteToolGui(parent_widget = self.manager.base_widget,
tool_node = hooked_tool_node,
tool_config = node,
tool_library_node = tool_lib_node,
params=params,
model=self.model)
window.setModal(True)
window.show()
def execBatch(self):
''' Present a dialog to execute a set of tool configurations '''
assert self.has_selected_item()
# Node representing the set to execute
tool_set_node = self.selected_item().node
# map tool config nodes in set -> name of the hooked node
tool_config_to_tool_name = {}
tool_config_nodes = tool_set_node[:]
for tool_config_node in tool_config_nodes:
hook_node = tool_config_node.find('tool_hook')
hooked_tool_name = str(hook_node.text or '').strip()
hooked_tool_node = get_tool_node_by_name(self.project, hooked_tool_name)
module_name = hooked_tool_node.find('class_module').text
try:
module_name = hooked_tool_node.find('class_module').text
tool_path = get_path_to_tool_modules(self.project)
import_path = tool_path + '.' + module_name
importString = "from %s import opusRun" % (import_path)
exec(importString)
tool_config_to_tool_name[tool_config_node] = import_path
except Exception, e:
MessageBox.error(mainwindow = self.view,
text = 'Invalid module name',
detailed_text = ('This tool points to a module named "%s", ' % import_path + \
'but there is no module with that name, or module returned import error: %s. ' \
% formatExceptionInfo()
))
return
ExecuteToolSetGui(get_mainwindow_instance(),
tool_config_nodes,
tool_config_to_tool_name).show()
def add_custom_menu_items_for_node(self, node, menu):
index = self.model.index_for_node(node)
cnt = self.model.rowCount(index.parent())
istop = index.row() == 0
isbottom = index.row() == cnt-1
isonly = cnt == 1
# Tool files are the "actual" tools
if node.tag == "tool":
menu.addAction(self.actExecToolFile)
if not isonly: menu.addSeparator()
if not istop: menu.addAction(self.actMoveNodeUp)
if not isbottom: menu.addAction(self.actMoveNodeDown)
elif node.tag == "class_module":
menu.addAction(self.actChangeClassModule)
elif node.tag == "path_to_tool_modules":
menu.addAction(self.actChangePathToTools)
# "Tool library is a collection of tool groups
elif node.tag == "tool_library":
menu.addAction(self.actAddToolGroup)
# Tool groups are groups of "tool files"
elif node.tag == "tool_group":
menu.addAction(self.actAddToolFile)
if not isonly: menu.addSeparator()
if not istop: menu.addAction(self.actMoveNodeUp)
if not isbottom: menu.addAction(self.actMoveNodeDown)
# Param Template is the parameter node for the tools -- where
# users can build up a list of parameters that gets passed to the
# tool function
elif node.tag == "params":
menu.addAction(self.actAddParam)
elif node.tag == "param":
menu.addAction(self.actEditParam)
# A "tool config" is an alternative configuration for a tool that can be
# put in a Tool Set
elif node.tag == "tool_config":
menu.addAction(self.actExecToolConfig)
if not isonly: menu.addSeparator()
if not istop: menu.addAction(self.actMoveNodeUp)
if not isbottom: menu.addAction(self.actMoveNodeDown)
# "Tool sets" is a collection of multiple tool sets
elif node.tag == "tool_sets":
menu.addAction(self.actAddNewToolSet)
# A Tool set is a collection of (alternative) configurations for
# existing tools.
elif node.tag == "tool_set":
menu.addAction(self.actExecBatch)
menu.addSeparator()
menu.addAction(self.actNewConfig)
if not isonly: menu.addSeparator()
if not istop: menu.addAction(self.actMoveNodeUp)
if not isbottom: menu.addAction(self.actMoveNodeDown)
elif node.tag == "documentation_path":
menu.addAction(self.actOpenDocumentation)
menu.addSeparator()
menu.addAction(self.actCloneNode)
| agpl-3.0 |
windedge/odoo | openerp/addons/base/tests/test_ir_actions.py | 291 | 20121 | import unittest2
from openerp.osv.orm import except_orm
import openerp.tests.common as common
from openerp.tools import mute_logger
class TestServerActionsBase(common.TransactionCase):
def setUp(self):
super(TestServerActionsBase, self).setUp()
cr, uid = self.cr, self.uid
# Models
self.ir_actions_server = self.registry('ir.actions.server')
self.ir_actions_client = self.registry('ir.actions.client')
self.ir_values = self.registry('ir.values')
self.ir_model = self.registry('ir.model')
self.ir_model_fields = self.registry('ir.model.fields')
self.res_partner = self.registry('res.partner')
self.res_country = self.registry('res.country')
# Data on which we will run the server action
self.test_country_id = self.res_country.create(cr, uid, {
'name': 'TestingCountry',
'code': 'TY',
'address_format': 'SuperFormat',
})
self.test_country = self.res_country.browse(cr, uid, self.test_country_id)
self.test_partner_id = self.res_partner.create(cr, uid, {
'name': 'TestingPartner',
'city': 'OrigCity',
'country_id': self.test_country_id,
})
self.test_partner = self.res_partner.browse(cr, uid, self.test_partner_id)
self.context = {
'active_id': self.test_partner_id,
'active_model': 'res.partner',
}
# Model data
self.res_partner_model_id = self.ir_model.search(cr, uid, [('model', '=', 'res.partner')])[0]
self.res_partner_name_field_id = self.ir_model_fields.search(cr, uid, [('model', '=', 'res.partner'), ('name', '=', 'name')])[0]
self.res_partner_city_field_id = self.ir_model_fields.search(cr, uid, [('model', '=', 'res.partner'), ('name', '=', 'city')])[0]
self.res_partner_country_field_id = self.ir_model_fields.search(cr, uid, [('model', '=', 'res.partner'), ('name', '=', 'country_id')])[0]
self.res_partner_parent_field_id = self.ir_model_fields.search(cr, uid, [('model', '=', 'res.partner'), ('name', '=', 'parent_id')])[0]
self.res_country_model_id = self.ir_model.search(cr, uid, [('model', '=', 'res.country')])[0]
self.res_country_name_field_id = self.ir_model_fields.search(cr, uid, [('model', '=', 'res.country'), ('name', '=', 'name')])[0]
self.res_country_code_field_id = self.ir_model_fields.search(cr, uid, [('model', '=', 'res.country'), ('name', '=', 'code')])[0]
# create server action to
self.act_id = self.ir_actions_server.create(cr, uid, {
'name': 'TestAction',
'condition': 'True',
'model_id': self.res_partner_model_id,
'state': 'code',
'code': 'obj.write({"comment": "MyComment"})',
})
class TestServerActions(TestServerActionsBase):
def test_00_action(self):
cr, uid = self.cr, self.uid
# Do: eval 'True' condition
self.ir_actions_server.run(cr, uid, [self.act_id], self.context)
self.test_partner.refresh()
self.assertEqual(self.test_partner.comment, 'MyComment', 'ir_actions_server: invalid condition check')
self.test_partner.write({'comment': False})
# Do: eval False condition, that should be considered as True (void = True)
self.ir_actions_server.write(cr, uid, [self.act_id], {'condition': False})
self.ir_actions_server.run(cr, uid, [self.act_id], self.context)
self.test_partner.refresh()
self.assertEqual(self.test_partner.comment, 'MyComment', 'ir_actions_server: invalid condition check')
# Do: create contextual action
self.ir_actions_server.create_action(cr, uid, [self.act_id])
# Test: ir_values created
ir_values_ids = self.ir_values.search(cr, uid, [('name', '=', 'Run TestAction')])
self.assertEqual(len(ir_values_ids), 1, 'ir_actions_server: create_action should have created an entry in ir_values')
ir_value = self.ir_values.browse(cr, uid, ir_values_ids[0])
self.assertEqual(ir_value.value, 'ir.actions.server,%s' % self.act_id, 'ir_actions_server: created ir_values should reference the server action')
self.assertEqual(ir_value.model, 'res.partner', 'ir_actions_server: created ir_values should be linked to the action base model')
# Do: remove contextual action
self.ir_actions_server.unlink_action(cr, uid, [self.act_id])
# Test: ir_values removed
ir_values_ids = self.ir_values.search(cr, uid, [('name', '=', 'Run TestAction')])
self.assertEqual(len(ir_values_ids), 0, 'ir_actions_server: unlink_action should remove the ir_values record')
def test_10_code(self):
cr, uid = self.cr, self.uid
self.ir_actions_server.write(cr, uid, self.act_id, {
'state': 'code',
'code': """partner_name = obj.name + '_code'
self.pool["res.partner"].create(cr, uid, {"name": partner_name}, context=context)
workflow"""
})
run_res = self.ir_actions_server.run(cr, uid, [self.act_id], context=self.context)
self.assertFalse(run_res, 'ir_actions_server: code server action correctly finished should return False')
pids = self.res_partner.search(cr, uid, [('name', 'ilike', 'TestingPartner_code')])
self.assertEqual(len(pids), 1, 'ir_actions_server: 1 new partner should have been created')
def test_20_trigger(self):
cr, uid = self.cr, self.uid
# Data: code server action (at this point code-based actions should work)
act_id2 = self.ir_actions_server.create(cr, uid, {
'name': 'TestAction2',
'type': 'ir.actions.server',
'condition': 'True',
'model_id': self.res_partner_model_id,
'state': 'code',
'code': 'obj.write({"comment": "MyComment"})',
})
act_id3 = self.ir_actions_server.create(cr, uid, {
'name': 'TestAction3',
'type': 'ir.actions.server',
'condition': 'True',
'model_id': self.res_country_model_id,
'state': 'code',
'code': 'obj.write({"code": "ZZ"})',
})
# Data: create workflows
partner_wf_id = self.registry('workflow').create(cr, uid, {
'name': 'TestWorkflow',
'osv': 'res.partner',
'on_create': True,
})
partner_act1_id = self.registry('workflow.activity').create(cr, uid, {
'name': 'PartnerStart',
'wkf_id': partner_wf_id,
'flow_start': True
})
partner_act2_id = self.registry('workflow.activity').create(cr, uid, {
'name': 'PartnerTwo',
'wkf_id': partner_wf_id,
'kind': 'function',
'action': 'True',
'action_id': act_id2,
})
partner_trs1_id = self.registry('workflow.transition').create(cr, uid, {
'signal': 'partner_trans',
'act_from': partner_act1_id,
'act_to': partner_act2_id
})
country_wf_id = self.registry('workflow').create(cr, uid, {
'name': 'TestWorkflow',
'osv': 'res.country',
'on_create': True,
})
country_act1_id = self.registry('workflow.activity').create(cr, uid, {
'name': 'CountryStart',
'wkf_id': country_wf_id,
'flow_start': True
})
country_act2_id = self.registry('workflow.activity').create(cr, uid, {
'name': 'CountryTwo',
'wkf_id': country_wf_id,
'kind': 'function',
'action': 'True',
'action_id': act_id3,
})
country_trs1_id = self.registry('workflow.transition').create(cr, uid, {
'signal': 'country_trans',
'act_from': country_act1_id,
'act_to': country_act2_id
})
# Data: re-create country and partner to benefit from the workflows
self.test_country_id = self.res_country.create(cr, uid, {
'name': 'TestingCountry2',
'code': 'T2',
})
self.test_country = self.res_country.browse(cr, uid, self.test_country_id)
self.test_partner_id = self.res_partner.create(cr, uid, {
'name': 'TestingPartner2',
'country_id': self.test_country_id,
})
self.test_partner = self.res_partner.browse(cr, uid, self.test_partner_id)
self.context = {
'active_id': self.test_partner_id,
'active_model': 'res.partner',
}
# Run the action on partner object itself ('base')
self.ir_actions_server.write(cr, uid, [self.act_id], {
'state': 'trigger',
'use_relational_model': 'base',
'wkf_model_id': self.res_partner_model_id,
'wkf_model_name': 'res.partner',
'wkf_transition_id': partner_trs1_id,
})
self.ir_actions_server.run(cr, uid, [self.act_id], self.context)
self.test_partner.refresh()
self.assertEqual(self.test_partner.comment, 'MyComment', 'ir_actions_server: incorrect signal trigger')
# Run the action on related country object ('relational')
self.ir_actions_server.write(cr, uid, [self.act_id], {
'use_relational_model': 'relational',
'wkf_model_id': self.res_country_model_id,
'wkf_model_name': 'res.country',
'wkf_field_id': self.res_partner_country_field_id,
'wkf_transition_id': country_trs1_id,
})
self.ir_actions_server.run(cr, uid, [self.act_id], self.context)
self.test_country.refresh()
self.assertEqual(self.test_country.code, 'ZZ', 'ir_actions_server: incorrect signal trigger')
# Clear workflow cache, otherwise openerp will try to create workflows even if it has been deleted
from openerp.workflow import clear_cache
clear_cache(cr, uid)
def test_30_client(self):
cr, uid = self.cr, self.uid
client_action_id = self.registry('ir.actions.client').create(cr, uid, {
'name': 'TestAction2',
'tag': 'Test',
})
self.ir_actions_server.write(cr, uid, [self.act_id], {
'state': 'client_action',
'action_id': client_action_id,
})
res = self.ir_actions_server.run(cr, uid, [self.act_id], context=self.context)
self.assertEqual(res['name'], 'TestAction2', 'ir_actions_server: incorrect return result for a client action')
def test_40_crud_create(self):
cr, uid = self.cr, self.uid
_city = 'TestCity'
_name = 'TestNew'
# Do: create a new record in the same model and link it
self.ir_actions_server.write(cr, uid, [self.act_id], {
'state': 'object_create',
'use_create': 'new',
'link_new_record': True,
'link_field_id': self.res_partner_parent_field_id,
'fields_lines': [(0, 0, {'col1': self.res_partner_name_field_id, 'value': _name}),
(0, 0, {'col1': self.res_partner_city_field_id, 'value': _city})],
})
run_res = self.ir_actions_server.run(cr, uid, [self.act_id], context=self.context)
self.assertFalse(run_res, 'ir_actions_server: create record action correctly finished should return False')
# Test: new partner created
pids = self.res_partner.search(cr, uid, [('name', 'ilike', _name)])
self.assertEqual(len(pids), 1, 'ir_actions_server: TODO')
partner = self.res_partner.browse(cr, uid, pids[0])
self.assertEqual(partner.city, _city, 'ir_actions_server: TODO')
# Test: new partner linked
self.test_partner.refresh()
self.assertEqual(self.test_partner.parent_id.id, pids[0], 'ir_actions_server: TODO')
# Do: copy current record
self.ir_actions_server.write(cr, uid, [self.act_id], {'fields_lines': [[5]]})
self.ir_actions_server.write(cr, uid, [self.act_id], {
'state': 'object_create',
'use_create': 'copy_current',
'link_new_record': False,
'fields_lines': [(0, 0, {'col1': self.res_partner_name_field_id, 'value': 'TestCopyCurrent'}),
(0, 0, {'col1': self.res_partner_city_field_id, 'value': 'TestCity'})],
})
run_res = self.ir_actions_server.run(cr, uid, [self.act_id], context=self.context)
self.assertFalse(run_res, 'ir_actions_server: create record action correctly finished should return False')
# Test: new partner created
pids = self.res_partner.search(cr, uid, [('name', 'ilike', 'TestingPartner (copy)')]) # currently res_partner overrides default['name'] whatever its value
self.assertEqual(len(pids), 1, 'ir_actions_server: TODO')
partner = self.res_partner.browse(cr, uid, pids[0])
self.assertEqual(partner.city, 'TestCity', 'ir_actions_server: TODO')
self.assertEqual(partner.country_id.id, self.test_partner.country_id.id, 'ir_actions_server: TODO')
# Do: create a new record in another model
self.ir_actions_server.write(cr, uid, [self.act_id], {'fields_lines': [[5]]})
self.ir_actions_server.write(cr, uid, [self.act_id], {
'state': 'object_create',
'use_create': 'new_other',
'crud_model_id': self.res_country_model_id,
'link_new_record': False,
'fields_lines': [(0, 0, {'col1': self.res_country_name_field_id, 'value': 'obj.name', 'type': 'equation'}),
(0, 0, {'col1': self.res_country_code_field_id, 'value': 'obj.name[0:2]', 'type': 'equation'})],
})
run_res = self.ir_actions_server.run(cr, uid, [self.act_id], context=self.context)
self.assertFalse(run_res, 'ir_actions_server: create record action correctly finished should return False')
# Test: new country created
cids = self.res_country.search(cr, uid, [('name', 'ilike', 'TestingPartner')])
self.assertEqual(len(cids), 1, 'ir_actions_server: TODO')
country = self.res_country.browse(cr, uid, cids[0])
self.assertEqual(country.code, 'TE', 'ir_actions_server: TODO')
# Do: copy a record in another model
self.ir_actions_server.write(cr, uid, [self.act_id], {'fields_lines': [[5]]})
self.ir_actions_server.write(cr, uid, [self.act_id], {
'state': 'object_create',
'use_create': 'copy_other',
'crud_model_id': self.res_country_model_id,
'link_new_record': False,
'ref_object': 'res.country,%s' % self.test_country_id,
'fields_lines': [(0, 0, {'col1': self.res_country_name_field_id, 'value': 'NewCountry', 'type': 'value'}),
(0, 0, {'col1': self.res_country_code_field_id, 'value': 'NY', 'type': 'value'})],
})
run_res = self.ir_actions_server.run(cr, uid, [self.act_id], context=self.context)
self.assertFalse(run_res, 'ir_actions_server: create record action correctly finished should return False')
# Test: new country created
cids = self.res_country.search(cr, uid, [('name', 'ilike', 'NewCountry')])
self.assertEqual(len(cids), 1, 'ir_actions_server: TODO')
country = self.res_country.browse(cr, uid, cids[0])
self.assertEqual(country.code, 'NY', 'ir_actions_server: TODO')
self.assertEqual(country.address_format, 'SuperFormat', 'ir_actions_server: TODO')
def test_50_crud_write(self):
cr, uid = self.cr, self.uid
_name = 'TestNew'
# Do: create a new record in the same model and link it
self.ir_actions_server.write(cr, uid, [self.act_id], {
'state': 'object_write',
'use_write': 'current',
'fields_lines': [(0, 0, {'col1': self.res_partner_name_field_id, 'value': _name})],
})
run_res = self.ir_actions_server.run(cr, uid, [self.act_id], context=self.context)
self.assertFalse(run_res, 'ir_actions_server: create record action correctly finished should return False')
# Test: new partner created
pids = self.res_partner.search(cr, uid, [('name', 'ilike', _name)])
self.assertEqual(len(pids), 1, 'ir_actions_server: TODO')
partner = self.res_partner.browse(cr, uid, pids[0])
self.assertEqual(partner.city, 'OrigCity', 'ir_actions_server: TODO')
# Do: copy current record
self.ir_actions_server.write(cr, uid, [self.act_id], {'fields_lines': [[5]]})
self.ir_actions_server.write(cr, uid, [self.act_id], {
'use_write': 'other',
'crud_model_id': self.res_country_model_id,
'ref_object': 'res.country,%s' % self.test_country_id,
'fields_lines': [(0, 0, {'col1': self.res_country_name_field_id, 'value': 'obj.name', 'type': 'equation'})],
})
run_res = self.ir_actions_server.run(cr, uid, [self.act_id], context=self.context)
self.assertFalse(run_res, 'ir_actions_server: create record action correctly finished should return False')
# Test: new country created
cids = self.res_country.search(cr, uid, [('name', 'ilike', 'TestNew')])
self.assertEqual(len(cids), 1, 'ir_actions_server: TODO')
# Do: copy a record in another model
self.ir_actions_server.write(cr, uid, [self.act_id], {'fields_lines': [[5]]})
self.ir_actions_server.write(cr, uid, [self.act_id], {
'use_write': 'expression',
'crud_model_id': self.res_country_model_id,
'write_expression': 'object.country_id',
'fields_lines': [(0, 0, {'col1': self.res_country_name_field_id, 'value': 'NewCountry', 'type': 'value'})],
})
run_res = self.ir_actions_server.run(cr, uid, [self.act_id], context=self.context)
self.assertFalse(run_res, 'ir_actions_server: create record action correctly finished should return False')
# Test: new country created
cids = self.res_country.search(cr, uid, [('name', 'ilike', 'NewCountry')])
self.assertEqual(len(cids), 1, 'ir_actions_server: TODO')
@mute_logger('openerp.addons.base.ir.ir_model', 'openerp.models')
def test_60_multi(self):
cr, uid = self.cr, self.uid
# Data: 2 server actions that will be nested
act1_id = self.ir_actions_server.create(cr, uid, {
'name': 'Subaction1',
'sequence': 1,
'model_id': self.res_partner_model_id,
'state': 'code',
'code': 'action = {"type": "ir.actions.act_window"}',
})
act2_id = self.ir_actions_server.create(cr, uid, {
'name': 'Subaction2',
'sequence': 2,
'model_id': self.res_partner_model_id,
'state': 'object_create',
'use_create': 'copy_current',
})
act3_id = self.ir_actions_server.create(cr, uid, {
'name': 'Subaction3',
'sequence': 3,
'model_id': self.res_partner_model_id,
'state': 'code',
'code': 'action = {"type": "ir.actions.act_url"}',
})
self.ir_actions_server.write(cr, uid, [self.act_id], {
'state': 'multi',
'child_ids': [(6, 0, [act1_id, act2_id, act3_id])],
})
# Do: run the action
res = self.ir_actions_server.run(cr, uid, [self.act_id], context=self.context)
# Test: new partner created
pids = self.res_partner.search(cr, uid, [('name', 'ilike', 'TestingPartner (copy)')]) # currently res_partner overrides default['name'] whatever its value
self.assertEqual(len(pids), 1, 'ir_actions_server: TODO')
# Test: action returned
self.assertEqual(res.get('type'), 'ir.actions.act_url')
# Test loops
with self.assertRaises(except_orm):
self.ir_actions_server.write(cr, uid, [self.act_id], {
'child_ids': [(6, 0, [self.act_id])]
})
if __name__ == '__main__':
unittest2.main()
| agpl-3.0 |
okcompute/vim-runners | compiler/tests/test_nose_runner.py | 2 | 10174 | #!/usr/bin/env python
# encoding: utf-8
import unittest
from runners.nose import (
parse,
)
class TestNoseRunner(unittest.TestCase):
"""Test case for runners.nose.py module"""
def test_parse_lines(self):
input = [
"FF...E..F.........................................",
"======================================================================",
"ERROR: Test authentication handler cannot be accessed if user sign in and",
"----------------------------------------------------------------------",
"Traceback (most recent call last):",
" File \"/Users/okcompute/Developer/Git/OkBudgetBackend/venv/lib/python3.4/site-packages/tornado/testing.py\", line 118, in __call__",
" result = self.orig_method(*args, **kwargs)",
" File \"/Users/okcompute/Developer/Git/OkBudgetBackend/okbudget/tests/test_authentication.py\", line 240, in test_signout",
" caca",
"nose.proxy.NameError: name 'caca' is not defined",
"-------------------- >> begin captured logging << --------------------",
"tornado.general: WARNING: tornado.autoreload started more than once in the same process",
"tornado.access: INFO: 200 PUT /private/reset_db (127.0.0.1) 2.43ms",
"tornado.access: INFO: 200 POST /api/signup (127.0.0.1) 2.72ms",
"tornado.access: INFO: 200 PUT /private/reset_db (127.0.0.1) 2.18ms",
"--------------------- >> end captured logging << ---------------------",
"",
"======================================================================",
"FAIL: test_false (okbudget.tests.test_authentication.TestAuthentication)",
"----------------------------------------------------------------------",
"Traceback (most recent call last):",
" File \"/Users/okcompute/Developer/Git/OkBudgetBackend/venv/lib/python3.4/site-packages/tornado/testing.py\", line 118, in __call__",
" result = self.orig_method(*args, **kwargs)",
" File \"/Users/okcompute/Developer/Git/OkBudgetBackend/okbudget/tests/test_authentication.py\", line 276, in test_false",
" assert False",
"nose.proxy.AssertionError:",
"-------------------- >> begin captured logging << --------------------",
"tornado.access: INFO: 200 PUT /private/reset_db (127.0.0.1) 3.07ms",
"tornado.access: INFO: 200 POST /api/signup (127.0.0.1) 3.15ms",
"tornado.access: INFO: 200 PUT /private/reset_db (127.0.0.1) 2.16ms",
"--------------------- >> end captured logging << ---------------------",
"",
"======================================================================",
"FAIL: test_false2 (okbudget.tests.test_authentication.TestAuthentication)",
"----------------------------------------------------------------------",
"Traceback (most recent call last):",
" File \"/Users/okcompute/Developer/Git/OkBudgetBackend/venv/lib/python3.4/site-packages/tornado/testing.py\", line 118, in __call__",
" result = self.orig_method(*args, **kwargs)",
" File \"/Users/okcompute/Developer/Git/OkBudgetBackend/okbudget/tests/test_authentication.py\", line 279, in test_false2",
" assert False",
"nose.proxy.AssertionError:",
"-------------------- >> begin captured logging << --------------------",
"tornado.general: WARNING: tornado.autoreload started more than once in the same process",
"tornado.access: INFO: 200 PUT /private/reset_db (127.0.0.1) 2.33ms",
"tornado.access: INFO: 200 POST /api/signup (127.0.0.1) 2.27ms",
"tornado.access: INFO: 200 PUT /private/reset_db (127.0.0.1) 2.20ms",
"--------------------- >> end captured logging << ---------------------",
"",
"======================================================================",
"FAIL: okbudget.tests.test_authentication.test_myfunc",
"----------------------------------------------------------------------",
"Traceback (most recent call last):",
" File \"/Users/okcompute/Developer/Git/OkBudgetBackend/venv/lib/python3.4/site-packages/nose/case.py\", line 198, in runTest",
" self.test(*self.arg)",
" File \"/Users/okcompute/Developer/Git/OkBudgetBackend/okbudget/tests/test_authentication.py\", line 283, in test_myfunc",
" assert False",
"AssertionError",
"",
"----------------------------------------------------------------------",
"Ran 50 tests in 1.684s",
"",
"FAILED (errors=1, failures=3)",
]
expected = [
"FF...E..F.........................................",
"======================================================================",
"ERROR: Test authentication handler cannot be accessed if user sign in and",
"----------------------------------------------------------------------",
"Traceback (most recent call last):",
" File \"/Users/okcompute/Developer/Git/OkBudgetBackend/venv/lib/python3.4/site-packages/tornado/testing.py\", line 118, in __call__",
" result = self.orig_method(*args, **kwargs)",
" File \"/Users/okcompute/Developer/Git/OkBudgetBackend/okbudget/tests/test_authentication.py\", line 240, in test_signout",
" caca",
"nose.proxy.NameError: name 'caca' is not defined",
"/Users/okcompute/Developer/Git/OkBudgetBackend/okbudget/tests/test_authentication.py:240 <nose.proxy.NameError: name 'caca' is not defined>",
"-------------------- >> begin captured logging << --------------------",
"tornado.general: WARNING: tornado.autoreload started more than once in the same process",
"tornado.access: INFO: 200 PUT /private/reset_db (127.0.0.1) 2.43ms",
"tornado.access: INFO: 200 POST /api/signup (127.0.0.1) 2.72ms",
"tornado.access: INFO: 200 PUT /private/reset_db (127.0.0.1) 2.18ms",
"--------------------- >> end captured logging << ---------------------",
"",
"======================================================================",
"FAIL: test_false (okbudget.tests.test_authentication.TestAuthentication)",
"----------------------------------------------------------------------",
"Traceback (most recent call last):",
" File \"/Users/okcompute/Developer/Git/OkBudgetBackend/venv/lib/python3.4/site-packages/tornado/testing.py\", line 118, in __call__",
" result = self.orig_method(*args, **kwargs)",
" File \"/Users/okcompute/Developer/Git/OkBudgetBackend/okbudget/tests/test_authentication.py\", line 276, in test_false",
" assert False",
"nose.proxy.AssertionError:",
"/Users/okcompute/Developer/Git/OkBudgetBackend/okbudget/tests/test_authentication.py:276 <nose.proxy.AssertionError:>",
"-------------------- >> begin captured logging << --------------------",
"tornado.access: INFO: 200 PUT /private/reset_db (127.0.0.1) 3.07ms",
"tornado.access: INFO: 200 POST /api/signup (127.0.0.1) 3.15ms",
"tornado.access: INFO: 200 PUT /private/reset_db (127.0.0.1) 2.16ms",
"--------------------- >> end captured logging << ---------------------",
"",
"======================================================================",
"FAIL: test_false2 (okbudget.tests.test_authentication.TestAuthentication)",
"----------------------------------------------------------------------",
"Traceback (most recent call last):",
" File \"/Users/okcompute/Developer/Git/OkBudgetBackend/venv/lib/python3.4/site-packages/tornado/testing.py\", line 118, in __call__",
" result = self.orig_method(*args, **kwargs)",
" File \"/Users/okcompute/Developer/Git/OkBudgetBackend/okbudget/tests/test_authentication.py\", line 279, in test_false2",
" assert False",
"nose.proxy.AssertionError:",
"/Users/okcompute/Developer/Git/OkBudgetBackend/okbudget/tests/test_authentication.py:279 <nose.proxy.AssertionError:>",
"-------------------- >> begin captured logging << --------------------",
"tornado.general: WARNING: tornado.autoreload started more than once in the same process",
"tornado.access: INFO: 200 PUT /private/reset_db (127.0.0.1) 2.33ms",
"tornado.access: INFO: 200 POST /api/signup (127.0.0.1) 2.27ms",
"tornado.access: INFO: 200 PUT /private/reset_db (127.0.0.1) 2.20ms",
"--------------------- >> end captured logging << ---------------------",
"",
"======================================================================",
"FAIL: okbudget.tests.test_authentication.test_myfunc",
"----------------------------------------------------------------------",
"Traceback (most recent call last):",
" File \"/Users/okcompute/Developer/Git/OkBudgetBackend/venv/lib/python3.4/site-packages/nose/case.py\", line 198, in runTest",
" self.test(*self.arg)",
" File \"/Users/okcompute/Developer/Git/OkBudgetBackend/okbudget/tests/test_authentication.py\", line 283, in test_myfunc",
" assert False",
"AssertionError",
"/Users/okcompute/Developer/Git/OkBudgetBackend/okbudget/tests/test_authentication.py:283 <AssertionError>",
"",
"----------------------------------------------------------------------",
"Ran 50 tests in 1.684s",
"",
"FAILED (errors=1, failures=3)",
]
result = parse(input)
self.assertEqual(expected, result)
| mit |
GrognardsFromHell/TemplePlus | tpdatasrc/co8fixes/scr/py00395ghost.py | 1 | 49243 | from toee import *
from utilities import *
from Co8 import *
from combat_standard_routines import *
def san_dialog( attachee, triggerer ):
san_start_combat(attachee, triggerer)
return SKIP_DEFAULT
def san_first_heartbeat( attachee, triggerer ):
if (attachee.name == 14662 or attachee.name == 14663):
# undead legion ghosts
if (attachee.map == 5121):
# verbobonc exterior
if (game.quests[83].state == qs_completed):
attachee.object_flag_set(OF_OFF)
elif (is_daytime() == 0):
# is nighttime
if (game.global_vars[765] >= 1):
# player has encountered Moathouse Ambush at any of the 3 locations, meaning they have killed Turuko, Zert, and Kobort and their ghosts will haunt them
if (attachee.name == 8699):
# turuko ghost
if (attachee.map == 5146):
# castle level 4 - upper hall
if (game.global_vars[696] == 0):
# turuko ghost not activated
attachee.object_flag_unset(OF_OFF)
# turn on turuko ghost
attachee.float_line(1000,triggerer)
# turuko ghost screeches!
game.global_vars[696] = 1
# turuko ghost is now on
game.global_flags[869] = 1
# castle sleep impossible flag set
elif (game.global_vars[696] == 6):
# kobort ghost has made his following speech and gone away
if anyone( triggerer.group_list(), "has_item", 12612 ) and anyone( triggerer.group_list(), "has_item", 12614 ) and anyone( triggerer.group_list(), "has_item", 12616 ):
# player has all the ghosts' parts
attachee.object_flag_unset(OF_OFF)
# turn on turuko ghost
attachee.float_line(1000,triggerer)
# turuko ghost screeches!
game.global_vars[696] = 7
# turuko ghost is now on
elif (attachee.name == 8859):
# zert ghost
if (attachee.map == 5121):
# verbo exterior - around castle
if (game.global_vars[696] == 2):
# turuko ghost has made his opening speech and gone away
attachee.object_flag_unset(OF_OFF)
# turn on zert ghost
attachee.float_line(2000,triggerer)
# zert ghost screeches!
game.global_vars[696] = 3
# zert ghost is now on
elif (game.global_vars[696] == 8):
# turuko ghost has made his concluding speech and gone away
attachee.object_flag_unset(OF_OFF)
# turn on zert ghost
undead_legion(attachee, triggerer)
# zert ghost spawns the undead legion
attachee.object_flag_set(OF_OFF)
# turn off zert ghost
elif (attachee.name == 8860):
# kobort ghost
if (attachee.map == 5143):
# castle level 1 - basement
if (game.global_vars[696] == 4):
# zert ghost has made his following speech and gone away
attachee.object_flag_unset(OF_OFF)
# turn on kobort ghost
attachee.float_line(3000,triggerer)
# kobort ghost moans!
game.global_vars[696] = 5
# kobort ghost is now on
game.global_flags[869] = 1
# castle sleep impossible flag set
elif (is_daytime() == 1):
# is daytime
attachee.object_flag_set(OF_OFF)
# turn ghosts off because they only roll at night
return RUN_DEFAULT
def san_dying( attachee, triggerer ):
if should_modify_CR( attachee ):
modify_CR( attachee, get_av_level() )
return RUN_DEFAULT
def san_start_combat( attachee, triggerer ):
for obj in game.obj_list_vicinity(attachee.location,OLC_PC):
if (attachee.name == 8699):
# turuko ghost
if (attachee.map == 5146):
# castle level 4 - upper hall
print('Fucking hell')
if (game.global_vars[696] == 1):
# turuko ghost activated
StopCombat(attachee, 0)
game.party[0].begin_dialog( attachee, 100 )
# turuko opening remarks, wants head back
return RUN_DEFAULT
elif (game.global_vars[696] == 7):
# turuko ghost reactivated
StopCombat(attachee, 0)
game.party[0].begin_dialog( attachee, 1 )
# turuko concluding remarks, got their stuff
return RUN_DEFAULT
elif (attachee.name == 8859):
# zert ghost
if (attachee.map == 5121):
# verbo exterior - around castle
if (game.global_vars[696] == 3):
# zert ghost activated
StopCombat(attachee, 0)
game.party[0].begin_dialog( attachee, 200 )
# zert following remarks, wants hands back
return RUN_DEFAULT
elif (attachee.name == 8860):
# kobort ghost
if (attachee.map == 5143):
# castle level 1 - basement
if (game.global_vars[696] == 5):
# kobort ghost activated
StopCombat(attachee, 0)
game.party[0].begin_dialog( attachee, 300 )
# kobort following remarks, wants feet back
return RUN_DEFAULT
else:
# random rest attacking ghosts
StopCombat(attachee, 0)
attachee.float_line(4010,triggerer)
# generic screech
attachee.object_flag_set(OF_OFF)
return RUN_DEFAULT
def san_heartbeat( attachee, triggerer ):
if (game.global_vars[696] >= 1):
if (attachee.name == 8699):
# turuko ghost
if (attachee.map == 5146):
# castle level 4 - upper hall
if (not game.combat_is_active()):
for obj in game.obj_list_vicinity(attachee.location,OLC_PC):
if (is_better_to_talk(attachee,obj)):
attachee.float_line(1010,triggerer)
# turuko ghost screeches!
game.new_sid = 0
elif (attachee.name == 8859):
# zert ghost
if (attachee.map == 5121):
# verbo exterior - around castle
if (not game.combat_is_active()):
for obj in game.obj_list_vicinity(attachee.location,OLC_PC):
if (is_cool_to_talk(attachee,obj)):
attachee.float_line(2010,triggerer)
# zert ghost screeches!
game.new_sid = 0
elif (attachee.name == 8860):
# kobort ghost
if (attachee.map == 5143):
# castle level 1 - basement
if (not game.combat_is_active()):
for obj in game.obj_list_vicinity(attachee.location,OLC_PC):
if (is_better_to_talk(attachee,obj)):
attachee.float_line(3010,triggerer)
# kobort ghost moans!
game.new_sid = 0
elif (attachee.name == 14819):
attachee.destroy()
return RUN_DEFAULT
def is_better_to_talk(speaker,listener):
if (speaker.distance_to(listener) <= 10):
return 1
return 0
def is_cool_to_talk(speaker,listener):
if (speaker.distance_to(listener) <= 25):
return 1
return 0
def go_ghost( attachee, triggerer ):
if (attachee.name == 8699):
# turuko ghost
if (attachee.map == 5146):
# castle level 4 - upper hall
if (game.global_vars[696] == 7):
game.global_vars[696] = 8
# increment var to turuko off
attachee.object_flag_set(OF_OFF)
# turn turuko ghost off
game.particles( "hit-SUBDUAL-medium", attachee )
# play particles
game.sound( 4114, 1 )
# play sound
else:
game.global_vars[696] = 2
# increment var to turuko off
attachee.object_flag_set(OF_OFF)
# turn turuko ghost off
game.particles( "hit-SUBDUAL-medium", attachee )
# play particles
game.sound( 4114, 1 )
# play sound
game.global_flags[869] = 0
# castle sleep impossible flag unset
elif (attachee.name == 8859):
# zert ghost
if (attachee.map == 5121):
# verbo exterior - around castle
game.global_vars[696] = 4
# increment var to zert off
attachee.object_flag_set(OF_OFF)
# turn zert ghost off
game.particles( "hit-SUBDUAL-medium", attachee )
# play particles
game.sound( 4114, 1 )
# play sound
elif (attachee.name == 8860):
# kobort ghost
if (attachee.map == 5143):
# castle level 1 - basement
game.global_vars[696] = 6
# increment var to kobort off
attachee.object_flag_set(OF_OFF)
# turn kobort ghost off
game.particles( "hit-SUBDUAL-medium", attachee )
# play particles
game.sound( 4114, 1 )
# play sound
game.global_flags[869] = 0
# castle sleep impossible flag unset
return RUN_DEFAULT
def dump_parts( attachee, triggerer ):
party_transfer_to( attachee, 12612 )
party_transfer_to( attachee, 12614 )
party_transfer_to( attachee, 12616 )
return RUN_DEFAULT
def undead_legion(attachee, triggerer):
q01 = game.obj_create(14662, location_from_axis(732, 393))
q01.rotation = 1.5
game.timevent_add( bye_bye, ( q01, triggerer ), 30300 )
q02 = game.obj_create(14663, location_from_axis(736, 393))
q02.rotation = 2.5
game.timevent_add( bye_bye, ( q02, triggerer ), 30300 )
q03 = game.obj_create(14663, location_from_axis(740, 393))
q03.rotation = 3.5
game.timevent_add( bye_bye, ( q03, triggerer ), 30300 )
q04 = game.obj_create(14663, location_from_axis(744, 393))
q04.rotation = 4.5
game.timevent_add( bye_bye, ( q04, triggerer ), 30300 )
q05 = game.obj_create(14663, location_from_axis(748, 397))
q05.rotation = 5.5
game.timevent_add( bye_bye, ( q05, triggerer ), 30300 )
q06 = game.obj_create(14662, location_from_axis(740, 389))
q06.rotation = 0.5
game.timevent_add( bye_bye, ( q06, triggerer ), 30300 )
q07 = game.obj_create(14663, location_from_axis(732, 389))
q07.rotation = 1.5
game.timevent_add( bye_bye, ( q07, triggerer ), 30300 )
q08 = game.obj_create(14663, location_from_axis(728, 389))
q08.rotation = 2.5
game.timevent_add( bye_bye, ( q08, triggerer ), 30300 )
q09 = game.obj_create(14663, location_from_axis(724, 389))
q09.rotation = 3.5
game.timevent_add( bye_bye, ( q09, triggerer ), 30300 )
q10 = game.obj_create(14663, location_from_axis(720, 389))
q10.rotation = 4.5
game.timevent_add( bye_bye, ( q10, triggerer ), 30300 )
q11 = game.obj_create(14662, location_from_axis(716, 389))
q11.rotation = 5.5
game.timevent_add( bye_bye, ( q11, triggerer ), 30300 )
q12 = game.obj_create(14663, location_from_axis(712, 389))
q12.rotation = 0.5
game.timevent_add( bye_bye, ( q12, triggerer ), 30000 )
q13 = game.obj_create(14663, location_from_axis(708, 389))
q13.rotation = 1.5
game.timevent_add( bye_bye, ( q13, triggerer ), 30300 )
q14 = game.obj_create(14663, location_from_axis(704, 389))
q14.rotation = 2.5
game.timevent_add( bye_bye, ( q14, triggerer ), 30300 )
q15 = game.obj_create(14663, location_from_axis(700, 389))
q15.rotation = 3.5
game.timevent_add( bye_bye, ( q15, triggerer ), 30300 )
q16 = game.obj_create(14662, location_from_axis(704, 393))
q16.rotation = 4.5
game.timevent_add( bye_bye, ( q16, triggerer ), 30300 )
q17 = game.obj_create(14663, location_from_axis(696, 389))
q17.rotation = 5.5
game.timevent_add( bye_bye, ( q17, triggerer ), 30300 )
q18 = game.obj_create(14663, location_from_axis(732, 385))
q18.rotation = 0.5
game.timevent_add( bye_bye, ( q18, triggerer ), 30300 )
q19 = game.obj_create(14663, location_from_axis(728, 385))
q19.rotation = 1.5
game.timevent_add( bye_bye, ( q19, triggerer ), 30300 )
q20 = game.obj_create(14663, location_from_axis(724, 385))
q20.rotation = 2.5
game.timevent_add( bye_bye, ( q20, triggerer ), 30300 )
q21 = game.obj_create(14662, location_from_axis(720, 385))
q21.rotation = 3.5
game.timevent_add( bye_bye, ( q21, triggerer ), 30300 )
q22 = game.obj_create(14663, location_from_axis(716, 385))
q22.rotation = 4.5
game.timevent_add( bye_bye, ( q22, triggerer ), 30300 )
q23 = game.obj_create(14663, location_from_axis(712, 385))
q23.rotation = 5.5
game.timevent_add( bye_bye, ( q23, triggerer ), 30300 )
q24 = game.obj_create(14663, location_from_axis(708, 385))
q24.rotation = 0.5
game.timevent_add( bye_bye, ( q24, triggerer ), 30300 )
q25 = game.obj_create(14663, location_from_axis(704, 385))
q25.rotation = 1.5
game.timevent_add( bye_bye, ( q25, triggerer ), 30300 )
q26 = game.obj_create(14662, location_from_axis(700, 385))
q26.rotation = 2.5
game.timevent_add( bye_bye, ( q26, triggerer ), 30300 )
q27 = game.obj_create(14663, location_from_axis(696, 385))
q27.rotation = 3.5
game.timevent_add( bye_bye, ( q27, triggerer ), 30300 )
q28 = game.obj_create(14663, location_from_axis(692, 385))
q28.rotation = 4.5
game.timevent_add( bye_bye, ( q28, triggerer ), 30300 )
q29 = game.obj_create(14663, location_from_axis(744, 389))
q29.rotation = 5.5
game.timevent_add( bye_bye, ( q29, triggerer ), 30300 )
q30 = game.obj_create(14663, location_from_axis(748, 393))
q30.rotation = 0.5
game.timevent_add( bye_bye, ( q30, triggerer ), 30300 )
r01 = game.obj_create(14662, location_from_axis(680, 449))
r01.rotation = 1.5
game.timevent_add( bye_bye, ( r01, triggerer ), 30300 )
r02 = game.obj_create(14663, location_from_axis(680, 453))
r02.rotation = 2.5
game.timevent_add( bye_bye, ( r02, triggerer ), 30300 )
r03 = game.obj_create(14663, location_from_axis(680, 457))
r03.rotation = 3.5
game.timevent_add( bye_bye, ( r03, triggerer ), 30300 )
r04 = game.obj_create(14663, location_from_axis(680, 461))
r04.rotation = 4.5
game.timevent_add( bye_bye, ( r04, triggerer ), 30300 )
r05 = game.obj_create(14663, location_from_axis(680, 465))
r05.rotation = 5.5
game.timevent_add( bye_bye, ( r05, triggerer ), 30300 )
r06 = game.obj_create(14662, location_from_axis(680, 469))
r06.rotation = 0.5
game.timevent_add( bye_bye, ( r06, triggerer ), 30300 )
r07 = game.obj_create(14663, location_from_axis(680, 473))
r07.rotation = 1.5
game.timevent_add( bye_bye, ( r07, triggerer ), 30300 )
r08 = game.obj_create(14663, location_from_axis(740, 409))
r08.rotation = 2.5
game.timevent_add( bye_bye, ( r08, triggerer ), 30300 )
r09 = game.obj_create(14663, location_from_axis(740, 413))
r09.rotation = 3.5
game.timevent_add( bye_bye, ( r09, triggerer ), 30300 )
r10 = game.obj_create(14663, location_from_axis(752, 409))
r10.rotation = 4.5
game.timevent_add( bye_bye, ( r10, triggerer ), 30300 )
r11 = game.obj_create(14662, location_from_axis(748, 405))
r11.rotation = 5.5
game.timevent_add( bye_bye, ( r11, triggerer ), 30300 )
r12 = game.obj_create(14663, location_from_axis(752, 405))
r12.rotation = 0.5
game.timevent_add( bye_bye, ( r12, triggerer ), 30000 )
r13 = game.obj_create(14663, location_from_axis(752, 401))
r13.rotation = 1.5
game.timevent_add( bye_bye, ( r13, triggerer ), 30300 )
r14 = game.obj_create(14663, location_from_axis(736, 401))
r14.rotation = 2.5
game.timevent_add( bye_bye, ( r14, triggerer ), 30300 )
r15 = game.obj_create(14663, location_from_axis(732, 401))
r15.rotation = 3.5
game.timevent_add( bye_bye, ( r15, triggerer ), 30300 )
r16 = game.obj_create(14662, location_from_axis(728, 401))
r16.rotation = 4.5
game.timevent_add( bye_bye, ( r16, triggerer ), 30300 )
r17 = game.obj_create(14663, location_from_axis(724, 401))
r17.rotation = 5.5
game.timevent_add( bye_bye, ( r17, triggerer ), 30300 )
r18 = game.obj_create(14663, location_from_axis(732, 397))
r18.rotation = 0.5
game.timevent_add( bye_bye, ( r18, triggerer ), 30300 )
r19 = game.obj_create(14663, location_from_axis(728, 397))
r19.rotation = 1.5
game.timevent_add( bye_bye, ( r19, triggerer ), 30300 )
r20 = game.obj_create(14663, location_from_axis(724, 397))
r20.rotation = 2.5
game.timevent_add( bye_bye, ( r20, triggerer ), 30300 )
r21 = game.obj_create(14662, location_from_axis(720, 397))
r21.rotation = 3.5
game.timevent_add( bye_bye, ( r21, triggerer ), 30300 )
r22 = game.obj_create(14663, location_from_axis(716, 397))
r22.rotation = 4.5
game.timevent_add( bye_bye, ( r22, triggerer ), 30300 )
r23 = game.obj_create(14663, location_from_axis(720, 401))
r23.rotation = 5.5
game.timevent_add( bye_bye, ( r23, triggerer ), 30300 )
r24 = game.obj_create(14663, location_from_axis(712, 397))
r24.rotation = 0.5
game.timevent_add( bye_bye, ( r24, triggerer ), 30300 )
r25 = game.obj_create(14663, location_from_axis(728, 393))
r25.rotation = 1.5
game.timevent_add( bye_bye, ( r25, triggerer ), 30300 )
r26 = game.obj_create(14662, location_from_axis(724, 393))
r26.rotation = 2.5
game.timevent_add( bye_bye, ( r26, triggerer ), 30300 )
r27 = game.obj_create(14663, location_from_axis(720, 393))
r27.rotation = 3.5
game.timevent_add( bye_bye, ( r27, triggerer ), 30300 )
r28 = game.obj_create(14663, location_from_axis(716, 393))
r28.rotation = 4.5
game.timevent_add( bye_bye, ( r28, triggerer ), 30300 )
r29 = game.obj_create(14663, location_from_axis(712, 393))
r29.rotation = 5.5
game.timevent_add( bye_bye, ( r29, triggerer ), 30300 )
r30 = game.obj_create(14663, location_from_axis(708, 393))
r30.rotation = 0.5
game.timevent_add( bye_bye, ( r30, triggerer ), 30300 )
s01 = game.obj_create(14662, location_from_axis(664, 417))
s01.rotation = 1.5
game.timevent_add( bye_bye, ( s01, triggerer ), 30300 )
s02 = game.obj_create(14663, location_from_axis(664, 421))
s02.rotation = 2.5
game.timevent_add( bye_bye, ( s02, triggerer ), 30300 )
s03 = game.obj_create(14663, location_from_axis(664, 425))
s03.rotation = 3.5
game.timevent_add( bye_bye, ( s03, triggerer ), 30300 )
s04 = game.obj_create(14663, location_from_axis(664, 429))
s04.rotation = 4.5
game.timevent_add( bye_bye, ( s04, triggerer ), 30300 )
s05 = game.obj_create(14663, location_from_axis(664, 437))
s05.rotation = 5.5
game.timevent_add( bye_bye, ( s05, triggerer ), 30300 )
s06 = game.obj_create(14662, location_from_axis(664, 441))
s06.rotation = 0.5
game.timevent_add( bye_bye, ( s06, triggerer ), 30300 )
s07 = game.obj_create(14663, location_from_axis(664, 449))
s07.rotation = 1.5
game.timevent_add( bye_bye, ( s07, triggerer ), 30300 )
s08 = game.obj_create(14663, location_from_axis(664, 453))
s08.rotation = 2.5
game.timevent_add( bye_bye, ( s08, triggerer ), 30300 )
s09 = game.obj_create(14663, location_from_axis(664, 457))
s09.rotation = 3.5
game.timevent_add( bye_bye, ( s09, triggerer ), 30300 )
s10 = game.obj_create(14663, location_from_axis(664, 461))
s10.rotation = 4.5
game.timevent_add( bye_bye, ( s10, triggerer ), 30300 )
s11 = game.obj_create(14662, location_from_axis(664, 465))
s11.rotation = 5.5
game.timevent_add( bye_bye, ( s11, triggerer ), 30300 )
s12 = game.obj_create(14663, location_from_axis(664, 469))
s12.rotation = 0.5
game.timevent_add( bye_bye, ( s12, triggerer ), 30000 )
s13 = game.obj_create(14663, location_from_axis(660, 421))
s13.rotation = 1.5
game.timevent_add( bye_bye, ( s13, triggerer ), 30300 )
s14 = game.obj_create(14663, location_from_axis(660, 425))
s14.rotation = 2.5
game.timevent_add( bye_bye, ( s14, triggerer ), 30300 )
s15 = game.obj_create(14663, location_from_axis(660, 429))
s15.rotation = 3.5
game.timevent_add( bye_bye, ( s15, triggerer ), 30300 )
s16 = game.obj_create(14662, location_from_axis(660, 433))
s16.rotation = 4.5
game.timevent_add( bye_bye, ( s16, triggerer ), 30300 )
s17 = game.obj_create(14663, location_from_axis(660, 437))
s17.rotation = 5.5
game.timevent_add( bye_bye, ( s17, triggerer ), 30300 )
s18 = game.obj_create(14663, location_from_axis(660, 449))
s18.rotation = 0.5
game.timevent_add( bye_bye, ( s18, triggerer ), 30300 )
s19 = game.obj_create(14663, location_from_axis(660, 453))
s19.rotation = 1.5
game.timevent_add( bye_bye, ( s19, triggerer ), 30300 )
s20 = game.obj_create(14663, location_from_axis(660, 457))
s20.rotation = 2.5
game.timevent_add( bye_bye, ( s20, triggerer ), 30300 )
s21 = game.obj_create(14662, location_from_axis(660, 461))
s21.rotation = 3.5
game.timevent_add( bye_bye, ( s21, triggerer ), 30300 )
s22 = game.obj_create(14663, location_from_axis(660, 465))
s22.rotation = 4.5
game.timevent_add( bye_bye, ( s22, triggerer ), 30300 )
s23 = game.obj_create(14663, location_from_axis(656, 425))
s23.rotation = 5.5
game.timevent_add( bye_bye, ( s23, triggerer ), 30300 )
s24 = game.obj_create(14663, location_from_axis(656, 429))
s24.rotation = 0.5
game.timevent_add( bye_bye, ( s24, triggerer ), 30300 )
s25 = game.obj_create(14663, location_from_axis(656, 433))
s25.rotation = 1.5
game.timevent_add( bye_bye, ( s25, triggerer ), 30300 )
s26 = game.obj_create(14662, location_from_axis(656, 437))
s26.rotation = 2.5
game.timevent_add( bye_bye, ( s26, triggerer ), 30300 )
s27 = game.obj_create(14663, location_from_axis(656, 441))
s27.rotation = 3.5
game.timevent_add( bye_bye, ( s27, triggerer ), 30300 )
s28 = game.obj_create(14663, location_from_axis(656, 445))
s28.rotation = 4.5
game.timevent_add( bye_bye, ( s28, triggerer ), 30300 )
s29 = game.obj_create(14663, location_from_axis(656, 449))
s29.rotation = 5.5
game.timevent_add( bye_bye, ( s29, triggerer ), 30300 )
s30 = game.obj_create(14663, location_from_axis(656, 457))
s30.rotation = 0.5
game.timevent_add( bye_bye, ( s30, triggerer ), 30300 )
t01 = game.obj_create(14662, location_from_axis(656, 461))
t01.rotation = 1.5
game.timevent_add( bye_bye, ( t01, triggerer ), 30300 )
t02 = game.obj_create(14663, location_from_axis(676, 445))
t02.rotation = 2.5
game.timevent_add( bye_bye, ( t02, triggerer ), 30300 )
t03 = game.obj_create(14663, location_from_axis(676, 449))
t03.rotation = 3.5
game.timevent_add( bye_bye, ( t03, triggerer ), 30300 )
t04 = game.obj_create(14663, location_from_axis(676, 453))
t04.rotation = 4.5
game.timevent_add( bye_bye, ( t04, triggerer ), 30300 )
t05 = game.obj_create(14663, location_from_axis(676, 457))
t05.rotation = 5.5
game.timevent_add( bye_bye, ( t05, triggerer ), 30300 )
t06 = game.obj_create(14662, location_from_axis(676, 461))
t06.rotation = 0.5
game.timevent_add( bye_bye, ( t06, triggerer ), 30300 )
t07 = game.obj_create(14663, location_from_axis(676, 465))
t07.rotation = 1.5
game.timevent_add( bye_bye, ( t07, triggerer ), 30300 )
t08 = game.obj_create(14663, location_from_axis(676, 469))
t08.rotation = 2.5
game.timevent_add( bye_bye, ( t08, triggerer ), 30300 )
t09 = game.obj_create(14663, location_from_axis(676, 473))
t09.rotation = 3.5
game.timevent_add( bye_bye, ( t09, triggerer ), 30300 )
t10 = game.obj_create(14663, location_from_axis(680, 433))
t10.rotation = 4.5
game.timevent_add( bye_bye, ( t10, triggerer ), 30300 )
t11 = game.obj_create(14662, location_from_axis(676, 433))
t11.rotation = 5.5
game.timevent_add( bye_bye, ( t11, triggerer ), 30300 )
t12 = game.obj_create(14663, location_from_axis(672, 433))
t12.rotation = 0.5
game.timevent_add( bye_bye, ( t12, triggerer ), 30300 )
t13 = game.obj_create(14663, location_from_axis(672, 437))
t13.rotation = 1.5
game.timevent_add( bye_bye, ( t13, triggerer ), 30300 )
t14 = game.obj_create(14663, location_from_axis(672, 441))
t14.rotation = 2.5
game.timevent_add( bye_bye, ( t14, triggerer ), 30300 )
t15 = game.obj_create(14663, location_from_axis(672, 445))
t15.rotation = 3.5
game.timevent_add( bye_bye, ( t15, triggerer ), 30300 )
t16 = game.obj_create(14662, location_from_axis(672, 449))
t16.rotation = 4.5
game.timevent_add( bye_bye, ( t16, triggerer ), 30300 )
t17 = game.obj_create(14663, location_from_axis(672, 453))
t17.rotation = 5.5
game.timevent_add( bye_bye, ( t17, triggerer ), 30300 )
t18 = game.obj_create(14663, location_from_axis(672, 457))
t18.rotation = 0.5
game.timevent_add( bye_bye, ( t18, triggerer ), 30300 )
t19 = game.obj_create(14663, location_from_axis(672, 461))
t19.rotation = 1.5
game.timevent_add( bye_bye, ( t19, triggerer ), 30300 )
t20 = game.obj_create(14663, location_from_axis(672, 465))
t20.rotation = 2.5
game.timevent_add( bye_bye, ( t20, triggerer ), 30300 )
t21 = game.obj_create(14662, location_from_axis(672, 469))
t21.rotation = 3.5
game.timevent_add( bye_bye, ( t21, triggerer ), 30300 )
t22 = game.obj_create(14663, location_from_axis(672, 473))
t22.rotation = 4.5
game.timevent_add( bye_bye, ( t22, triggerer ), 30300 )
t23 = game.obj_create(14663, location_from_axis(668, 421))
t23.rotation = 5.5
game.timevent_add( bye_bye, ( t23, triggerer ), 30300 )
t24 = game.obj_create(14663, location_from_axis(668, 425))
t24.rotation = 0.5
game.timevent_add( bye_bye, ( t24, triggerer ), 30300 )
t25 = game.obj_create(14663, location_from_axis(668, 429))
t25.rotation = 1.5
game.timevent_add( bye_bye, ( t25, triggerer ), 30300 )
t26 = game.obj_create(14662, location_from_axis(668, 441))
t26.rotation = 2.5
game.timevent_add( bye_bye, ( t26, triggerer ), 30300 )
t27 = game.obj_create(14663, location_from_axis(668, 445))
t27.rotation = 3.5
game.timevent_add( bye_bye, ( t27, triggerer ), 30300 )
t28 = game.obj_create(14663, location_from_axis(668, 449))
t28.rotation = 4.5
game.timevent_add( bye_bye, ( t28, triggerer ), 30300 )
t29 = game.obj_create(14663, location_from_axis(668, 461))
t29.rotation = 5.5
game.timevent_add( bye_bye, ( t29, triggerer ), 30300 )
t30 = game.obj_create(14663, location_from_axis(668, 473))
t30.rotation = 0.5
game.timevent_add( bye_bye, ( t30, triggerer ), 30300 )
u01 = game.obj_create(14662, location_from_axis(664, 413))
u01.rotation = 1.5
game.timevent_add( bye_bye, ( u01, triggerer ), 30300 )
u02 = game.obj_create(14663, location_from_axis(740, 417))
u02.rotation = 2.5
game.timevent_add( bye_bye, ( u02, triggerer ), 30300 )
u03 = game.obj_create(14663, location_from_axis(740, 421))
u03.rotation = 3.5
game.timevent_add( bye_bye, ( u03, triggerer ), 30300 )
u04 = game.obj_create(14663, location_from_axis(740, 425))
u04.rotation = 4.5
game.timevent_add( bye_bye, ( u04, triggerer ), 30300 )
u05 = game.obj_create(14663, location_from_axis(740, 429))
u05.rotation = 5.5
game.timevent_add( bye_bye, ( u05, triggerer ), 30300 )
u06 = game.obj_create(14662, location_from_axis(740, 433))
u06.rotation = 0.5
game.timevent_add( bye_bye, ( u06, triggerer ), 30300 )
u07 = game.obj_create(14663, location_from_axis(740, 437))
u07.rotation = 1.5
game.timevent_add( bye_bye, ( u07, triggerer ), 30300 )
u08 = game.obj_create(14663, location_from_axis(740, 441))
u08.rotation = 2.5
game.timevent_add( bye_bye, ( u08, triggerer ), 30300 )
u09 = game.obj_create(14663, location_from_axis(740, 445))
u09.rotation = 3.5
game.timevent_add( bye_bye, ( u09, triggerer ), 30300 )
u10 = game.obj_create(14663, location_from_axis(740, 449))
u10.rotation = 4.5
game.timevent_add( bye_bye, ( u10, triggerer ), 30300 )
u11 = game.obj_create(14662, location_from_axis(740, 453))
u11.rotation = 5.5
game.timevent_add( bye_bye, ( u11, triggerer ), 30300 )
u12 = game.obj_create(14663, location_from_axis(740, 457))
u12.rotation = 0.5
game.timevent_add( bye_bye, ( u12, triggerer ), 30300 )
u13 = game.obj_create(14663, location_from_axis(744, 413))
u13.rotation = 1.5
game.timevent_add( bye_bye, ( u13, triggerer ), 30300 )
u14 = game.obj_create(14663, location_from_axis(744, 417))
u14.rotation = 2.5
game.timevent_add( bye_bye, ( u14, triggerer ), 30300 )
u15 = game.obj_create(14663, location_from_axis(744, 421))
u15.rotation = 3.5
game.timevent_add( bye_bye, ( u15, triggerer ), 30300 )
u16 = game.obj_create(14662, location_from_axis(744, 425))
u16.rotation = 4.5
game.timevent_add( bye_bye, ( u16, triggerer ), 30300 )
u17 = game.obj_create(14663, location_from_axis(744, 429))
u17.rotation = 5.5
game.timevent_add( bye_bye, ( u17, triggerer ), 30300 )
u18 = game.obj_create(14663, location_from_axis(744, 433))
u18.rotation = 0.5
game.timevent_add( bye_bye, ( u18, triggerer ), 30300 )
u19 = game.obj_create(14663, location_from_axis(744, 437))
u19.rotation = 1.5
game.timevent_add( bye_bye, ( u19, triggerer ), 30300 )
u20 = game.obj_create(14663, location_from_axis(744, 441))
u20.rotation = 2.5
game.timevent_add( bye_bye, ( u20, triggerer ), 30300 )
u21 = game.obj_create(14662, location_from_axis(744, 445))
u21.rotation = 3.5
game.timevent_add( bye_bye, ( u21, triggerer ), 30300 )
u22 = game.obj_create(14663, location_from_axis(748, 413))
u22.rotation = 4.5
game.timevent_add( bye_bye, ( u22, triggerer ), 30300 )
u23 = game.obj_create(14663, location_from_axis(748, 417))
u23.rotation = 5.5
game.timevent_add( bye_bye, ( u23, triggerer ), 30300 )
u24 = game.obj_create(14663, location_from_axis(748, 421))
u24.rotation = 0.5
game.timevent_add( bye_bye, ( u24, triggerer ), 30300 )
u25 = game.obj_create(14663, location_from_axis(748, 425))
u25.rotation = 1.5
game.timevent_add( bye_bye, ( u25, triggerer ), 30300 )
u26 = game.obj_create(14662, location_from_axis(748, 429))
u26.rotation = 2.5
game.timevent_add( bye_bye, ( u26, triggerer ), 30300 )
u27 = game.obj_create(14663, location_from_axis(748, 433))
u27.rotation = 3.5
game.timevent_add( bye_bye, ( u27, triggerer ), 30300 )
u28 = game.obj_create(14663, location_from_axis(748, 437))
u28.rotation = 4.5
game.timevent_add( bye_bye, ( u28, triggerer ), 30300 )
u29 = game.obj_create(14663, location_from_axis(748, 441))
u29.rotation = 5.5
game.timevent_add( bye_bye, ( u29, triggerer ), 30300 )
u30 = game.obj_create(14663, location_from_axis(752, 413))
u30.rotation = 0.5
game.timevent_add( bye_bye, ( u30, triggerer ), 30300 )
v01 = game.obj_create(14662, location_from_axis(752, 417))
v01.rotation = 1.5
game.timevent_add( bye_bye, ( v01, triggerer ), 30300 )
v02 = game.obj_create(14663, location_from_axis(752, 421))
v02.rotation = 2.5
game.timevent_add( bye_bye, ( v02, triggerer ), 30300 )
v03 = game.obj_create(14663, location_from_axis(752, 425))
v03.rotation = 3.5
game.timevent_add( bye_bye, ( v03, triggerer ), 30300 )
v04 = game.obj_create(14663, location_from_axis(752, 429))
v04.rotation = 4.5
game.timevent_add( bye_bye, ( v04, triggerer ), 30300 )
v05 = game.obj_create(14663, location_from_axis(752, 433))
v05.rotation = 5.5
game.timevent_add( bye_bye, ( v05, triggerer ), 30300 )
v06 = game.obj_create(14662, location_from_axis(752, 437))
v06.rotation = 0.5
game.timevent_add( bye_bye, ( v06, triggerer ), 30300 )
v07 = game.obj_create(14663, location_from_axis(752, 441))
v07.rotation = 1.5
game.timevent_add( bye_bye, ( v07, triggerer ), 30300 )
v08 = game.obj_create(14663, location_from_axis(736, 417))
v08.rotation = 2.5
game.timevent_add( bye_bye, ( v08, triggerer ), 30300 )
v09 = game.obj_create(14663, location_from_axis(736, 429))
v09.rotation = 3.5
game.timevent_add( bye_bye, ( v09, triggerer ), 30300 )
v10 = game.obj_create(14663, location_from_axis(736, 433))
v10.rotation = 4.5
game.timevent_add( bye_bye, ( v10, triggerer ), 30300 )
v11 = game.obj_create(14662, location_from_axis(736, 437))
v11.rotation = 5.5
game.timevent_add( bye_bye, ( v11, triggerer ), 30300 )
v12 = game.obj_create(14663, location_from_axis(736, 441))
v12.rotation = 0.5
game.timevent_add( bye_bye, ( v12, triggerer ), 30300 )
v13 = game.obj_create(14663, location_from_axis(736, 445))
v13.rotation = 1.5
game.timevent_add( bye_bye, ( v13, triggerer ), 30300 )
v14 = game.obj_create(14663, location_from_axis(736, 449))
v14.rotation = 2.5
game.timevent_add( bye_bye, ( v14, triggerer ), 30300 )
v15 = game.obj_create(14663, location_from_axis(736, 453))
v15.rotation = 3.5
game.timevent_add( bye_bye, ( v15, triggerer ), 30300 )
v16 = game.obj_create(14662, location_from_axis(736, 457))
v16.rotation = 4.5
game.timevent_add( bye_bye, ( v16, triggerer ), 30300 )
v17 = game.obj_create(14663, location_from_axis(736, 461))
v17.rotation = 5.5
game.timevent_add( bye_bye, ( v17, triggerer ), 30300 )
v18 = game.obj_create(14663, location_from_axis(732, 441))
v18.rotation = 0.5
game.timevent_add( bye_bye, ( v18, triggerer ), 30300 )
v19 = game.obj_create(14663, location_from_axis(732, 445))
v19.rotation = 1.5
game.timevent_add( bye_bye, ( v19, triggerer ), 30300 )
v20 = game.obj_create(14663, location_from_axis(732, 449))
v20.rotation = 2.5
game.timevent_add( bye_bye, ( v20, triggerer ), 30300 )
v21 = game.obj_create(14662, location_from_axis(732, 453))
v21.rotation = 3.5
game.timevent_add( bye_bye, ( v21, triggerer ), 30300 )
v22 = game.obj_create(14663, location_from_axis(732, 457))
v22.rotation = 4.5
game.timevent_add( bye_bye, ( v22, triggerer ), 30300 )
v23 = game.obj_create(14663, location_from_axis(732, 461))
v23.rotation = 5.5
game.timevent_add( bye_bye, ( v23, triggerer ), 30300 )
v24 = game.obj_create(14663, location_from_axis(732, 465))
v24.rotation = 0.5
game.timevent_add( bye_bye, ( v24, triggerer ), 30300 )
v25 = game.obj_create(14663, location_from_axis(732, 469))
v25.rotation = 1.5
game.timevent_add( bye_bye, ( v25, triggerer ), 30300 )
v26 = game.obj_create(14662, location_from_axis(732, 473))
v26.rotation = 2.5
game.timevent_add( bye_bye, ( v26, triggerer ), 30300 )
v27 = game.obj_create(14663, location_from_axis(728, 445))
v27.rotation = 3.5
game.timevent_add( bye_bye, ( v27, triggerer ), 30300 )
v28 = game.obj_create(14663, location_from_axis(728, 449))
v28.rotation = 4.5
game.timevent_add( bye_bye, ( v28, triggerer ), 30300 )
v29 = game.obj_create(14663, location_from_axis(728, 453))
v29.rotation = 5.5
game.timevent_add( bye_bye, ( v29, triggerer ), 30300 )
v30 = game.obj_create(14663, location_from_axis(728, 457))
v30.rotation = 0.5
game.timevent_add( bye_bye, ( v30, triggerer ), 30300 )
w01 = game.obj_create(14662, location_from_axis(728, 461))
w01.rotation = 1.5
game.timevent_add( bye_bye, ( w01, triggerer ), 30300 )
w02 = game.obj_create(14663, location_from_axis(728, 465))
w02.rotation = 2.5
game.timevent_add( bye_bye, ( w02, triggerer ), 30300 )
w03 = game.obj_create(14663, location_from_axis(724, 445))
w03.rotation = 3.5
game.timevent_add( bye_bye, ( w03, triggerer ), 30300 )
w04 = game.obj_create(14663, location_from_axis(724, 449))
w04.rotation = 4.5
game.timevent_add( bye_bye, ( w04, triggerer ), 30300 )
w05 = game.obj_create(14663, location_from_axis(724, 453))
w05.rotation = 5.5
game.timevent_add( bye_bye, ( w05, triggerer ), 30300 )
w06 = game.obj_create(14662, location_from_axis(724, 457))
w06.rotation = 0.5
game.timevent_add( bye_bye, ( w06, triggerer ), 30300 )
w07 = game.obj_create(14663, location_from_axis(724, 461))
w07.rotation = 1.5
game.timevent_add( bye_bye, ( w07, triggerer ), 30300 )
w08 = game.obj_create(14663, location_from_axis(724, 465))
w08.rotation = 2.5
game.timevent_add( bye_bye, ( w08, triggerer ), 30300 )
w09 = game.obj_create(14663, location_from_axis(720, 449))
w09.rotation = 3.5
game.timevent_add( bye_bye, ( w09, triggerer ), 30300 )
w10 = game.obj_create(14663, location_from_axis(720, 453))
w10.rotation = 4.5
game.timevent_add( bye_bye, ( w10, triggerer ), 30300 )
w11 = game.obj_create(14662, location_from_axis(720, 457))
w11.rotation = 5.5
game.timevent_add( bye_bye, ( w11, triggerer ), 30300 )
w12 = game.obj_create(14663, location_from_axis(720, 461))
w12.rotation = 0.5
game.timevent_add( bye_bye, ( w12, triggerer ), 30000 )
w13 = game.obj_create(14663, location_from_axis(720, 465))
w13.rotation = 1.5
game.timevent_add( bye_bye, ( w13, triggerer ), 30300 )
w14 = game.obj_create(14663, location_from_axis(720, 469))
w14.rotation = 2.5
game.timevent_add( bye_bye, ( w14, triggerer ), 30300 )
w15 = game.obj_create(14663, location_from_axis(716, 449))
w15.rotation = 3.5
game.timevent_add( bye_bye, ( w15, triggerer ), 30300 )
w16 = game.obj_create(14662, location_from_axis(716, 453))
w16.rotation = 4.5
game.timevent_add( bye_bye, ( w16, triggerer ), 30300 )
w17 = game.obj_create(14663, location_from_axis(716, 457))
w17.rotation = 5.5
game.timevent_add( bye_bye, ( w17, triggerer ), 30300 )
w18 = game.obj_create(14663, location_from_axis(716, 461))
w18.rotation = 0.5
game.timevent_add( bye_bye, ( w18, triggerer ), 30300 )
w19 = game.obj_create(14663, location_from_axis(716, 465))
w19.rotation = 1.5
game.timevent_add( bye_bye, ( w19, triggerer ), 30300 )
w20 = game.obj_create(14663, location_from_axis(716, 469))
w20.rotation = 2.5
game.timevent_add( bye_bye, ( w20, triggerer ), 30300 )
w21 = game.obj_create(14662, location_from_axis(716, 473))
w21.rotation = 3.5
game.timevent_add( bye_bye, ( w21, triggerer ), 30300 )
w22 = game.obj_create(14663, location_from_axis(716, 477))
w22.rotation = 4.5
game.timevent_add( bye_bye, ( w22, triggerer ), 30300 )
w23 = game.obj_create(14663, location_from_axis(716, 481))
w23.rotation = 5.5
game.timevent_add( bye_bye, ( w23, triggerer ), 30300 )
w24 = game.obj_create(14663, location_from_axis(716, 485))
w24.rotation = 0.5
game.timevent_add( bye_bye, ( w24, triggerer ), 30300 )
w25 = game.obj_create(14663, location_from_axis(716, 489))
w25.rotation = 1.5
game.timevent_add( bye_bye, ( w25, triggerer ), 30300 )
w26 = game.obj_create(14662, location_from_axis(716, 493))
w26.rotation = 2.5
game.timevent_add( bye_bye, ( w26, triggerer ), 30300 )
w27 = game.obj_create(14663, location_from_axis(716, 497))
w27.rotation = 3.5
game.timevent_add( bye_bye, ( w27, triggerer ), 30300 )
w28 = game.obj_create(14663, location_from_axis(712, 453))
w28.rotation = 4.5
game.timevent_add( bye_bye, ( w28, triggerer ), 30300 )
w29 = game.obj_create(14663, location_from_axis(712, 457))
w29.rotation = 5.5
game.timevent_add( bye_bye, ( w29, triggerer ), 30300 )
w30 = game.obj_create(14663, location_from_axis(712, 461))
w30.rotation = 0.5
game.timevent_add( bye_bye, ( w30, triggerer ), 30300 )
x01 = game.obj_create(14662, location_from_axis(712, 465))
x01.rotation = 1.5
game.timevent_add( bye_bye, ( x01, triggerer ), 30300 )
x02 = game.obj_create(14663, location_from_axis(708, 453))
x02.rotation = 2.5
game.timevent_add( bye_bye, ( x02, triggerer ), 30300 )
x03 = game.obj_create(14663, location_from_axis(712, 473))
x03.rotation = 3.5
game.timevent_add( bye_bye, ( x03, triggerer ), 30300 )
x04 = game.obj_create(14663, location_from_axis(712, 477))
x04.rotation = 4.5
game.timevent_add( bye_bye, ( x04, triggerer ), 30300 )
x05 = game.obj_create(14663, location_from_axis(712, 481))
x05.rotation = 5.5
game.timevent_add( bye_bye, ( x05, triggerer ), 30300 )
x06 = game.obj_create(14662, location_from_axis(712, 485))
x06.rotation = 0.5
game.timevent_add( bye_bye, ( x06, triggerer ), 30300 )
x07 = game.obj_create(14663, location_from_axis(712, 489))
x07.rotation = 1.5
game.timevent_add( bye_bye, ( x07, triggerer ), 30300 )
x08 = game.obj_create(14663, location_from_axis(712, 493))
x08.rotation = 2.5
game.timevent_add( bye_bye, ( x08, triggerer ), 30300 )
x09 = game.obj_create(14663, location_from_axis(712, 497))
x09.rotation = 3.5
game.timevent_add( bye_bye, ( x09, triggerer ), 30300 )
x10 = game.obj_create(14663, location_from_axis(712, 501))
x10.rotation = 4.5
game.timevent_add( bye_bye, ( x10, triggerer ), 30300 )
x11 = game.obj_create(14662, location_from_axis(708, 457))
x11.rotation = 5.5
game.timevent_add( bye_bye, ( x11, triggerer ), 30300 )
x12 = game.obj_create(14663, location_from_axis(708, 461))
x12.rotation = 0.5
game.timevent_add( bye_bye, ( x12, triggerer ), 30300 )
x13 = game.obj_create(14663, location_from_axis(708, 469))
x13.rotation = 1.5
game.timevent_add( bye_bye, ( x13, triggerer ), 30300 )
x14 = game.obj_create(14663, location_from_axis(708, 473))
x14.rotation = 2.5
game.timevent_add( bye_bye, ( x14, triggerer ), 30300 )
x15 = game.obj_create(14663, location_from_axis(708, 477))
x15.rotation = 3.5
game.timevent_add( bye_bye, ( x15, triggerer ), 30300 )
x16 = game.obj_create(14662, location_from_axis(708, 481))
x16.rotation = 4.5
game.timevent_add( bye_bye, ( x16, triggerer ), 30300 )
x17 = game.obj_create(14663, location_from_axis(708, 485))
x17.rotation = 5.5
game.timevent_add( bye_bye, ( x17, triggerer ), 30300 )
x18 = game.obj_create(14663, location_from_axis(708, 489))
x18.rotation = 0.5
game.timevent_add( bye_bye, ( x18, triggerer ), 30300 )
x19 = game.obj_create(14663, location_from_axis(708, 493))
x19.rotation = 1.5
game.timevent_add( bye_bye, ( x19, triggerer ), 30300 )
x20 = game.obj_create(14663, location_from_axis(708, 497))
x20.rotation = 2.5
game.timevent_add( bye_bye, ( x20, triggerer ), 30300 )
x21 = game.obj_create(14662, location_from_axis(704, 457))
x21.rotation = 3.5
game.timevent_add( bye_bye, ( x21, triggerer ), 30300 )
x22 = game.obj_create(14663, location_from_axis(704, 461))
x22.rotation = 4.5
game.timevent_add( bye_bye, ( x22, triggerer ), 30300 )
x23 = game.obj_create(14663, location_from_axis(704, 469))
x23.rotation = 5.5
game.timevent_add( bye_bye, ( x23, triggerer ), 30300 )
x24 = game.obj_create(14663, location_from_axis(704, 473))
x24.rotation = 0.5
game.timevent_add( bye_bye, ( x24, triggerer ), 30300 )
x25 = game.obj_create(14663, location_from_axis(704, 477))
x25.rotation = 1.5
game.timevent_add( bye_bye, ( x25, triggerer ), 30300 )
x26 = game.obj_create(14662, location_from_axis(704, 481))
x26.rotation = 2.5
game.timevent_add( bye_bye, ( x26, triggerer ), 30300 )
x27 = game.obj_create(14663, location_from_axis(704, 485))
x27.rotation = 3.5
game.timevent_add( bye_bye, ( x27, triggerer ), 30300 )
x28 = game.obj_create(14663, location_from_axis(704, 489))
x28.rotation = 4.5
game.timevent_add( bye_bye, ( x28, triggerer ), 30300 )
x29 = game.obj_create(14663, location_from_axis(705, 493))
x29.rotation = 5.5
game.timevent_add( bye_bye, ( x29, triggerer ), 30300 )
x30 = game.obj_create(14663, location_from_axis(748, 409))
x30.rotation = 0.5
game.timevent_add( bye_bye, ( x30, triggerer ), 30300 )
y01 = game.obj_create(14662, location_from_axis(744, 409))
y01.rotation = 1.5
game.timevent_add( bye_bye, ( y01, triggerer ), 30300 )
y02 = game.obj_create(14663, location_from_axis(720, 485))
y02.rotation = 2.5
game.timevent_add( bye_bye, ( y02, triggerer ), 30300 )
y03 = game.obj_create(14663, location_from_axis(720, 489))
y03.rotation = 3.5
game.timevent_add( bye_bye, ( y03, triggerer ), 30300 )
y04 = game.obj_create(14663, location_from_axis(720, 493))
y04.rotation = 4.5
game.timevent_add( bye_bye, ( y04, triggerer ), 30300 )
y05 = game.obj_create(14663, location_from_axis(724, 493))
y05.rotation = 5.5
game.timevent_add( bye_bye, ( y05, triggerer ), 30300 )
y06 = game.obj_create(14662, location_from_axis(728, 493))
y06.rotation = 0.5
game.timevent_add( bye_bye, ( y06, triggerer ), 30300 )
y07 = game.obj_create(14663, location_from_axis(732, 489))
y07.rotation = 1.5
game.timevent_add( bye_bye, ( y07, triggerer ), 30300 )
y08 = game.obj_create(14663, location_from_axis(736, 489))
y08.rotation = 2.5
game.timevent_add( bye_bye, ( y08, triggerer ), 30300 )
y09 = game.obj_create(14663, location_from_axis(740, 489))
y09.rotation = 3.5
game.timevent_add( bye_bye, ( y09, triggerer ), 30300 )
y10 = game.obj_create(14663, location_from_axis(740, 485))
y10.rotation = 4.5
game.timevent_add( bye_bye, ( y10, triggerer ), 30300 )
y11 = game.obj_create(14662, location_from_axis(736, 481))
y11.rotation = 5.5
game.timevent_add( bye_bye, ( y11, triggerer ), 30300 )
y12 = game.obj_create(14663, location_from_axis(736, 477))
y12.rotation = 0.5
game.timevent_add( bye_bye, ( y12, triggerer ), 30000 )
y13 = game.obj_create(14663, location_from_axis(744, 485))
y13.rotation = 1.5
game.timevent_add( bye_bye, ( y13, triggerer ), 30300 )
y14 = game.obj_create(14663, location_from_axis(748, 481))
y14.rotation = 2.5
game.timevent_add( bye_bye, ( y14, triggerer ), 30300 )
y15 = game.obj_create(14663, location_from_axis(752, 481))
y15.rotation = 3.5
game.timevent_add( bye_bye, ( y15, triggerer ), 30300 )
y16 = game.obj_create(14662, location_from_axis(752, 477))
y16.rotation = 4.5
game.timevent_add( bye_bye, ( y16, triggerer ), 30300 )
y17 = game.obj_create(14663, location_from_axis(756, 477))
y17.rotation = 5.5
game.timevent_add( bye_bye, ( y17, triggerer ), 30300 )
y18 = game.obj_create(14663, location_from_axis(756, 473))
y18.rotation = 0.5
game.timevent_add( bye_bye, ( y18, triggerer ), 30300 )
y19 = game.obj_create(14663, location_from_axis(760, 469))
y19.rotation = 1.5
game.timevent_add( bye_bye, ( y19, triggerer ), 30300 )
y20 = game.obj_create(14663, location_from_axis(760, 465))
y20.rotation = 2.5
game.timevent_add( bye_bye, ( y20, triggerer ), 30300 )
y21 = game.obj_create(14662, location_from_axis(756, 465))
y21.rotation = 3.5
game.timevent_add( bye_bye, ( y21, triggerer ), 30300 )
y22 = game.obj_create(14663, location_from_axis(748, 461))
y22.rotation = 4.5
game.timevent_add( bye_bye, ( y22, triggerer ), 30300 )
y23 = game.obj_create(14663, location_from_axis(756, 425))
y23.rotation = 5.5
game.timevent_add( bye_bye, ( y23, triggerer ), 30300 )
y24 = game.obj_create(14663, location_from_axis(756, 429))
y24.rotation = 0.5
game.timevent_add( bye_bye, ( y24, triggerer ), 30300 )
y25 = game.obj_create(14663, location_from_axis(756, 433))
y25.rotation = 1.5
game.timevent_add( bye_bye, ( y25, triggerer ), 30300 )
y26 = game.obj_create(14662, location_from_axis(756, 437))
y25.rotation = 2.5
game.timevent_add( bye_bye, ( y26, triggerer ), 30300 )
y27 = game.obj_create(14663, location_from_axis(756, 441))
y27.rotation = 3.5
game.timevent_add( bye_bye, ( y27, triggerer ), 30300 )
y28 = game.obj_create(14663, location_from_axis(756, 445))
y28.rotation = 4.5
game.timevent_add( bye_bye, ( y28, triggerer ), 30300 )
y29 = game.obj_create(14663, location_from_axis(756, 421))
y29.rotation = 5.5
game.timevent_add( bye_bye, ( y29, triggerer ), 30300 )
y30 = game.obj_create(14663, location_from_axis(760, 429))
y30.rotation = 0.5
game.timevent_add( bye_bye, ( y30, triggerer ), 30300 )
z01 = game.obj_create(14662, location_from_axis(760, 433))
z01.rotation = 1.5
game.timevent_add( bye_bye, ( z01, triggerer ), 30300 )
z02 = game.obj_create(14663, location_from_axis(760, 437))
z02.rotation = 2.5
game.timevent_add( bye_bye, ( z02, triggerer ), 30300 )
z03 = game.obj_create(14663, location_from_axis(760, 441))
z03.rotation = 3.5
game.timevent_add( bye_bye, ( z03, triggerer ), 30300 )
z04 = game.obj_create(14663, location_from_axis(760, 445))
z04.rotation = 4.5
game.timevent_add( bye_bye, ( z04, triggerer ), 30300 )
z05 = game.obj_create(14663, location_from_axis(764, 433))
z05.rotation = 5.5
game.timevent_add( bye_bye, ( z05, triggerer ), 30300 )
z06 = game.obj_create(14662, location_from_axis(764, 437))
z06.rotation = 0.5
game.timevent_add( bye_bye, ( z06, triggerer ), 30300 )
z07 = game.obj_create(14663, location_from_axis(764, 441))
z07.rotation = 1.5
game.timevent_add( bye_bye, ( z07, triggerer ), 30300 )
z08 = game.obj_create(14663, location_from_axis(764, 445))
z08.rotation = 2.5
game.timevent_add( bye_bye, ( z08, triggerer ), 30300 )
z09 = game.obj_create(14663, location_from_axis(764, 449))
z09.rotation = 3.5
game.timevent_add( bye_bye, ( z09, triggerer ), 30300 )
z10 = game.obj_create(14663, location_from_axis(764, 461))
z10.rotation = 4.5
game.timevent_add( bye_bye, ( z10, triggerer ), 30300 )
z11 = game.obj_create(14662, location_from_axis(764, 457))
z11.rotation = 5.5
game.timevent_add( bye_bye, ( z11, triggerer ), 30300 )
z12 = game.obj_create(14663, location_from_axis(768, 433))
z12.rotation = 0.5
game.timevent_add( bye_bye, ( z12, triggerer ), 30300 )
z13 = game.obj_create(14663, location_from_axis(768, 437))
z13.rotation = 1.5
game.timevent_add( bye_bye, ( z13, triggerer ), 30300 )
z14 = game.obj_create(14663, location_from_axis(768, 441))
z14.rotation = 2.5
game.timevent_add( bye_bye, ( z14, triggerer ), 30300 )
z15 = game.obj_create(14663, location_from_axis(768, 445))
z15.rotation = 3.5
game.timevent_add( bye_bye, ( z15, triggerer ), 30300 )
z16 = game.obj_create(14662, location_from_axis(768, 449))
z16.rotation = 4.5
game.timevent_add( bye_bye, ( z16, triggerer ), 30300 )
z17 = game.obj_create(14663, location_from_axis(768, 453))
z17.rotation = 5.5
game.timevent_add( bye_bye, ( z17, triggerer ), 30300 )
z18 = game.obj_create(14663, location_from_axis(772, 437))
z18.rotation = 0.5
game.timevent_add( bye_bye, ( z18, triggerer ), 30300 )
z19 = game.obj_create(14663, location_from_axis(772, 441))
z19.rotation = 1.5
game.timevent_add( bye_bye, ( z19, triggerer ), 30300 )
z20 = game.obj_create(14663, location_from_axis(688, 445))
z20.rotation = 2.5
game.timevent_add( bye_bye, ( z20, triggerer ), 30300 )
z21 = game.obj_create(14662, location_from_axis(684, 445))
z21.rotation = 3.5
game.timevent_add( bye_bye, ( z21, triggerer ), 30300 )
z22 = game.obj_create(14663, location_from_axis(684, 449))
z22.rotation = 4.5
game.timevent_add( bye_bye, ( z22, triggerer ), 30300 )
z23 = game.obj_create(14663, location_from_axis(684, 453))
z23.rotation = 5.5
game.timevent_add( bye_bye, ( z23, triggerer ), 30300 )
z24 = game.obj_create(14663, location_from_axis(684, 457))
z24.rotation = 0.5
game.timevent_add( bye_bye, ( z24, triggerer ), 30300 )
z25 = game.obj_create(14663, location_from_axis(692, 465))
z25.rotation = 1.5
game.timevent_add( bye_bye, ( z25, triggerer ), 30300 )
z26 = game.obj_create(14662, location_from_axis(692, 469))
z26.rotation = 2.5
game.timevent_add( bye_bye, ( z26, triggerer ), 30300 )
z27 = game.obj_create(14663, location_from_axis(692, 473))
z27.rotation = 3.5
game.timevent_add( bye_bye, ( z27, triggerer ), 30300 )
z28 = game.obj_create(14663, location_from_axis(688, 473))
z28.rotation = 4.5
game.timevent_add( bye_bye, ( z28, triggerer ), 30300 )
z29 = game.obj_create(14663, location_from_axis(688, 457))
z29.rotation = 5.5
game.timevent_add( bye_bye, ( z29, triggerer ), 30300 )
z30 = game.obj_create(14663, location_from_axis(680, 445))
z30.rotation = 0.5
game.timevent_add( bye_bye, ( z30, triggerer ), 30300 )
game.sound( 4115, 1 )
game.global_vars[696] = 9
game.global_flags[869] = 0
game.quests[83].state = qs_completed
return RUN_DEFAULT
def bye_bye( attachee, triggerer ):
attachee.object_flag_set(OF_OFF)
return RUN_DEFAULT | mit |
mahak/neutron | neutron/plugins/ml2/drivers/mech_sriov/agent/sriov_nic_agent.py | 2 | 25945 | # Copyright 2014 Mellanox Technologies, Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import collections
import itertools
import socket
import sys
import time
from neutron_lib.agent import topics
from neutron_lib.api.definitions import portbindings
from neutron_lib import constants as n_constants
from neutron_lib import context
from neutron_lib.placement import utils as place_utils
from neutron_lib.utils import helpers
from oslo_config import cfg
from oslo_log import log as logging
import oslo_messaging
from oslo_service import loopingcall
from osprofiler import profiler
import pyroute2
from neutron._i18n import _
from neutron.agent.common import utils
from neutron.agent.l2 import l2_agent_extensions_manager as ext_manager
from neutron.agent import rpc as agent_rpc
from neutron.agent import securitygroups_rpc as agent_sg_rpc
from neutron.api.rpc.callbacks import resources
from neutron.api.rpc.handlers import securitygroups_rpc as sg_rpc
from neutron.common import config as common_config
from neutron.common import profiler as setup_profiler
from neutron.common import utils as n_utils
from neutron.conf.agent import common as agent_config
from neutron.conf import service as service_conf
from neutron.plugins.ml2.drivers.mech_sriov.agent.common import config
from neutron.plugins.ml2.drivers.mech_sriov.agent.common \
import exceptions as exc
from neutron.plugins.ml2.drivers.mech_sriov.agent import eswitch_manager as esm
from neutron.privileged.agent.linux import ip_lib as priv_ip_lib
LOG = logging.getLogger(__name__)
class SriovNicSwitchRpcCallbacks(sg_rpc.SecurityGroupAgentRpcCallbackMixin):
# Set RPC API version to 1.0 by default.
# history
# 1.1 Support Security Group RPC (works with NoopFirewallDriver)
# 1.2 Support DVR (Distributed Virtual Router) RPC (not supported)
# 1.3 Added param devices_to_update to security_groups_provider_updated
# (works with NoopFirewallDriver)
# 1.4 Added support for network_update
# 1.5 Added support for binding_activate and binding_deactivate
target = oslo_messaging.Target(version='1.5')
def __init__(self, context, agent, sg_agent):
super(SriovNicSwitchRpcCallbacks, self).__init__()
self.context = context
self.agent = agent
self.sg_agent = sg_agent
def port_update(self, context, **kwargs):
LOG.debug("port_update received")
port = kwargs.get('port')
vnic_type = port.get(portbindings.VNIC_TYPE)
if vnic_type and vnic_type == portbindings.VNIC_DIRECT_PHYSICAL:
LOG.debug("The SR-IOV agent doesn't handle %s ports.",
portbindings.VNIC_DIRECT_PHYSICAL)
return
# Put the port mac address in the updated_devices set.
# Do not store port details, as if they're used for processing
# notifications there is no guarantee the notifications are
# processed in the same order as the relevant API requests.
mac = port['mac_address']
pci_slot = port.get(portbindings.PROFILE, {}).get('pci_slot')
if pci_slot:
self.agent.updated_devices.add(agent_rpc.DeviceInfo(mac, pci_slot))
LOG.debug("port_update RPC received for port: %(id)s with MAC "
"%(mac)s and PCI slot %(pci_slot)s slot",
{'id': port['id'], 'mac': mac, 'pci_slot': pci_slot})
else:
LOG.debug("No PCI Slot for port %(id)s with MAC %(mac)s; "
"skipping", {'id': port['id'], 'mac': mac,
'pci_slot': pci_slot})
def network_update(self, context, **kwargs):
network_id = kwargs['network']['id']
LOG.debug("network_update message received for network "
"%(network_id)s, with ports: %(ports)s",
{'network_id': network_id,
'ports': self.agent.network_ports[network_id]})
for port_data in self.agent.network_ports[network_id]:
self.agent.updated_devices.add(port_data['device'])
def binding_activate(self, context, **kwargs):
if kwargs.get('host') != self.agent.conf.host:
return
LOG.debug("binding activate for port %s", kwargs.get('port_id'))
device_details = self.agent.get_device_details_from_port_id(
kwargs.get('port_id'))
mac = device_details.get('mac_address')
binding_profile = device_details.get('profile')
if binding_profile:
pci_slot = binding_profile.get('pci_slot')
self.agent.activated_bindings.add((mac, pci_slot))
else:
LOG.warning("binding_profile not found for port %s.",
kwargs.get('port_id'))
def binding_deactivate(self, context, **kwargs):
if kwargs.get('host') != self.agent.conf.host:
return
LOG.debug("binding deactivate for port %s. NOOP.",
kwargs.get('port_id'))
@profiler.trace_cls("rpc")
class SriovNicSwitchAgent(object):
def __init__(self, physical_devices_mappings, exclude_devices,
polling_interval, rp_bandwidths, rp_inventory_defaults,
rp_hypervisors):
self.polling_interval = polling_interval
self.network_ports = collections.defaultdict(list)
self.conf = cfg.CONF
self.device_mappings = physical_devices_mappings
self.exclude_devices = exclude_devices
self.setup_eswitch_mgr(physical_devices_mappings,
exclude_devices)
# Stores port update notifications for processing in the main loop
self.updated_devices = set()
# Stores <mac, pci_slot> pairs for ports whose binding has been
# activated.
self.activated_bindings = set()
self.context = context.get_admin_context_without_session()
self.plugin_rpc = agent_rpc.PluginApi(topics.PLUGIN)
self.sg_plugin_rpc = sg_rpc.SecurityGroupServerRpcApi(topics.PLUGIN)
self.sg_agent = agent_sg_rpc.SecurityGroupAgentRpc(
self.context, self.sg_plugin_rpc)
self._setup_rpc()
self.ext_manager = self._create_agent_extension_manager(
self.connection)
configurations = {'device_mappings': physical_devices_mappings,
n_constants.RP_BANDWIDTHS: rp_bandwidths,
n_constants.RP_INVENTORY_DEFAULTS:
rp_inventory_defaults,
'resource_provider_hypervisors': rp_hypervisors,
'extensions': self.ext_manager.names()}
# TODO(mangelajo): optimize resource_versions (see ovs agent)
self.agent_state = {
'binary': n_constants.AGENT_PROCESS_NIC_SWITCH,
'host': self.conf.host,
'topic': n_constants.L2_AGENT_TOPIC,
'configurations': configurations,
'agent_type': n_constants.AGENT_TYPE_NIC_SWITCH,
'resource_versions': resources.LOCAL_RESOURCE_VERSIONS,
'start_flag': True}
# The initialization is complete; we can start receiving messages
self.connection.consume_in_threads()
# Initialize iteration counter
self.iter_num = 0
def _setup_rpc(self):
self.agent_id = 'nic-switch-agent.%s' % socket.gethostname()
LOG.info("RPC agent_id: %s", self.agent_id)
self.topic = topics.AGENT
self.failed_report_state = False
self.state_rpc = agent_rpc.PluginReportStateAPI(topics.REPORTS)
# RPC network init
# Handle updates from service
self.endpoints = [SriovNicSwitchRpcCallbacks(self.context, self,
self.sg_agent)]
# Define the listening consumers for the agent
consumers = [[topics.PORT, topics.UPDATE],
[topics.NETWORK, topics.UPDATE],
[topics.SECURITY_GROUP, topics.UPDATE],
[topics.PORT_BINDING, topics.DEACTIVATE],
[topics.PORT_BINDING, topics.ACTIVATE]]
self.connection = agent_rpc.create_consumers(self.endpoints,
self.topic,
consumers,
start_listening=False)
report_interval = cfg.CONF.AGENT.report_interval
if report_interval:
heartbeat = loopingcall.FixedIntervalLoopingCall(
self._report_state)
heartbeat.start(interval=report_interval)
def _report_state(self):
try:
self.state_rpc.report_state(self.context,
self.agent_state)
# we only want to update resource versions on startup
self.agent_state.pop('resource_versions', None)
self.agent_state.pop('start_flag', None)
except Exception:
self.failed_report_state = True
LOG.exception("Failed reporting state!")
return
if self.failed_report_state:
self.failed_report_state = False
LOG.info("Successfully reported state after a previous failure.")
def _create_agent_extension_manager(self, connection):
ext_manager.register_opts(self.conf)
mgr = ext_manager.L2AgentExtensionsManager(self.conf)
mgr.initialize(connection, 'sriov')
return mgr
def setup_eswitch_mgr(self, device_mappings, exclude_devices=None):
exclude_devices = exclude_devices or {}
self.eswitch_mgr = esm.ESwitchManager()
self.eswitch_mgr.discover_devices(device_mappings, exclude_devices)
def scan_devices(self, registered_devices, updated_devices):
curr_devices = self.eswitch_mgr.get_assigned_devices_info()
self.agent_state.get('configurations')['devices'] = len(curr_devices)
device_info = {}
device_info['current'] = curr_devices
device_info['added'] = curr_devices - registered_devices
# we need to clean up after devices are removed
device_info['removed'] = registered_devices - curr_devices
# we don't want to process updates for devices that don't exist
device_info['updated'] = (updated_devices & curr_devices -
device_info['removed'])
return device_info
def _device_info_has_changes(self, device_info):
return (device_info.get('added') or
device_info.get('updated') or
device_info.get('removed'))
def process_network_devices(self, device_info):
resync_a = False
resync_b = False
self.sg_agent.prepare_devices_filter(device_info.get('added'))
if device_info.get('updated'):
self.sg_agent.refresh_firewall()
# Updated devices are processed the same as new ones, as their
# admin_state_up may have changed. The set union prevents duplicating
# work when a device is new and updated in the same polling iteration.
devices_added_updated = (set(device_info.get('added')) |
set(device_info.get('updated')))
if devices_added_updated:
resync_a = self.treat_devices_added_updated(devices_added_updated)
if device_info.get('removed'):
resync_b = self.treat_devices_removed(device_info['removed'])
# If one of the above operations fails => resync with plugin
return (resync_a | resync_b)
def treat_device(self, device_info, admin_state_up, spoofcheck=True,
propagate_uplink_state=False):
if self.eswitch_mgr.device_exists(device_info.mac,
device_info.pci_slot):
try:
self.eswitch_mgr.set_device_spoofcheck(
device_info.mac, device_info.pci_slot, spoofcheck)
except Exception:
LOG.warning("Failed to set spoofcheck for device %s",
device_info)
LOG.info("Device %(device)s spoofcheck %(spoofcheck)s",
{"device": device_info, "spoofcheck": spoofcheck})
try:
self.eswitch_mgr.set_device_state(
device_info.mac, device_info.pci_slot, admin_state_up,
propagate_uplink_state)
except priv_ip_lib.InterfaceOperationNotSupported:
LOG.warning("Device %s does not support state change",
device_info)
except pyroute2.NetlinkError:
LOG.warning("Failed to set device %s state", device_info)
return False
else:
LOG.info("No device %s defined on agent.", device_info)
return False
return True
def _update_network_ports(self, network_id, port_id, device):
self._clean_network_ports(device)
self.network_ports[network_id].append({
"port_id": port_id,
"device": device})
def _clean_network_ports(self, device_to_clean):
for netid, ports_list in dict(self.network_ports).items():
for port_data in list(ports_list):
if device_to_clean == port_data['device']:
ports_list.remove(port_data)
if not ports_list:
self.network_ports.pop(netid)
return port_data['port_id']
def treat_devices_added_updated(self, devices_info):
try:
rpc_devices_details = self.plugin_rpc.get_devices_details_list(
self.context, devices_info, self.agent_id, self.conf.host)
except Exception as e:
LOG.debug("Unable to get port details for devices "
"with MAC addresses %(devices)s: %(e)s",
{'devices': devices_info, 'e': e})
# resync is needed
return True
devices_up = set()
devices_down = set()
resync = False
for device_details in rpc_devices_details:
mac_address = device_details['device']
LOG.debug("Port with MAC address %s is added", mac_address)
if 'port_id' in device_details:
LOG.info("Port %(device)s updated. Details: %(details)s",
{'device': mac_address, 'details': device_details})
port_id = device_details['port_id']
profile = device_details['profile']
device_info = agent_rpc.DeviceInfo(mac_address,
profile.get('pci_slot'))
spoofcheck = device_details.get('port_security_enabled', True)
if self.treat_device(
device_info,
device_details['admin_state_up'],
spoofcheck,
device_details['propagate_uplink_status']):
if device_details['admin_state_up']:
devices_up.add(device_info)
else:
devices_down.add(device_info)
else:
resync = True
self._update_network_ports(device_details['network_id'],
port_id, device_info)
self.ext_manager.handle_port(self.context, device_details)
elif n_constants.NO_ACTIVE_BINDING in device_details:
# Port was added but its binding in this agent
# hasn't been activated yet. It will be treated as
# added when binding is activated
LOG.info("Device with MAC %s has no active binding in host",
mac_address)
else:
LOG.info("Device with MAC %s not defined on plugin",
mac_address)
self.plugin_rpc.update_device_list(self.context,
devices_up,
devices_down,
self.agent_id,
self.conf.host)
return resync
def treat_devices_removed(self, devices):
resync = False
for device in devices:
LOG.info("Removing device with MAC address %(mac)s and "
"PCI slot %(pci_slot)s",
{'mac': device.mac, 'pci_slot': device.pci_slot})
try:
port_id = self._clean_network_ports(device)
if port_id:
port = {'port_id': port_id,
'device': device.mac,
'profile': {'pci_slot': device.pci_slot}}
self.ext_manager.delete_port(self.context, port)
else:
LOG.warning("port_id to device %s not found", device)
dev_details = self.plugin_rpc.update_device_down(self.context,
device,
self.agent_id,
cfg.CONF.host)
except Exception as e:
LOG.debug("Removing port failed for device %(device)s due to "
"%(exc)s", {'device': device, 'exc': e})
resync = True
continue
if dev_details['exists']:
LOG.info("Port from device %s updated", device)
else:
LOG.debug("Device %s not defined on plugin", device)
return resync
def process_activated_bindings(self, device_info, activated_bindings_copy):
"""Process activated bindings.
Add activated bindings to the 'added' set in device info.
:param device_info: A dict that contains the set of 'current', 'added',
'removed' and 'updated' ports.
:param activated_bindings_copy: A set of activated port bindings.
:return: None
"""
LOG.debug("Processing activated bindings: %s", activated_bindings_copy)
# Compute which ports for activated bindings are already present
activated_bindings_copy &= device_info['current']
# Treat them as just added
device_info['added'] |= activated_bindings_copy
def get_device_details_from_port_id(self, port_id):
"""Get device details from server
:param port_id: Port identifier (UUID).
:return: A dict containing various port attributes if the port is
bound to the host. In case the port is not bound to the host
then the method will return A dict with a minimal set of
attributes e.g {'device': port_id}.
"""
return self.plugin_rpc.get_device_details(self.context,
port_id,
self.agent_id,
host=cfg.CONF.host)
def daemon_loop(self):
sync = True
devices = set()
LOG.info("SRIOV NIC Agent RPC Daemon Started!")
while True:
start = time.time()
LOG.debug("Agent rpc_loop - iteration:%d started",
self.iter_num)
if sync:
LOG.info("Agent out of sync with plugin!")
devices.clear()
sync = False
device_info = {}
# Save updated devices dict to perform rollback in case
# resync would be needed, and then clear self.updated_devices.
# As the greenthread should not yield between these
# two statements, this will should be thread-safe.
updated_devices_copy = self.updated_devices
self.updated_devices = set()
activated_bindings_copy = self.activated_bindings
self.activated_bindings = set()
try:
self.eswitch_mgr.discover_devices(self.device_mappings,
self.exclude_devices)
device_info = self.scan_devices(devices, updated_devices_copy)
if activated_bindings_copy:
self.process_activated_bindings(device_info,
activated_bindings_copy)
if self._device_info_has_changes(device_info):
LOG.debug("Agent loop found changes! %s", device_info)
# If treat devices fails - indicates must resync with
# plugin
sync = self.process_network_devices(device_info)
devices = device_info['current']
except Exception:
LOG.exception("Error in agent loop. Devices info: %s",
device_info)
sync = True
# Restore devices that were removed from this set earlier
# without overwriting ones that may have arrived since.
self.updated_devices |= updated_devices_copy
self.activated_bindings |= activated_bindings_copy
# sleep till end of polling interval
elapsed = (time.time() - start)
if (elapsed < self.polling_interval):
time.sleep(self.polling_interval - elapsed)
else:
LOG.debug("Loop iteration exceeded interval "
"(%(polling_interval)s vs. %(elapsed)s)!",
{'polling_interval': self.polling_interval,
'elapsed': elapsed})
self.iter_num = self.iter_num + 1
class SriovNicAgentConfigParser(object):
def __init__(self):
self.device_mappings = {}
self.exclude_devices = {}
def parse(self):
"""Parses device_mappings and exclude_devices.
Parse and validate the consistency in both mappings
"""
self.device_mappings = helpers.parse_mappings(
cfg.CONF.SRIOV_NIC.physical_device_mappings, unique_keys=False)
self.exclude_devices = config.parse_exclude_devices(
cfg.CONF.SRIOV_NIC.exclude_devices)
self.rp_bandwidths = place_utils.parse_rp_bandwidths(
cfg.CONF.SRIOV_NIC.resource_provider_bandwidths)
self.rp_inventory_defaults = place_utils.parse_rp_inventory_defaults(
cfg.CONF.SRIOV_NIC.resource_provider_inventory_defaults)
self.rp_hypervisors = utils.default_rp_hypervisors(
cfg.CONF.SRIOV_NIC.resource_provider_hypervisors,
self.device_mappings,
cfg.CONF.SRIOV_NIC.resource_provider_default_hypervisor,
)
self._validate()
def _validate(self):
"""Validate configuration.
Validate that network_device in excluded_device
exists in device mappings.
Validate that network_device in resource_provider_bandwidths
exists in device mappings.
"""
dev_net_set = set(itertools.chain.from_iterable(
self.device_mappings.values()))
for dev_name in self.exclude_devices.keys():
if dev_name not in dev_net_set:
raise ValueError(_(
"Invalid exclude_devices: "
"Device name %(dev_name)s is missing from "
"physical_device_mappings") % {'dev_name': dev_name})
n_utils.validate_rp_bandwidth(self.rp_bandwidths, dev_net_set)
def main():
common_config.init(sys.argv[1:])
common_config.setup_logging()
agent_config.setup_privsep()
service_conf.register_service_opts(service_conf.RPC_EXTRA_OPTS, cfg.CONF)
try:
config_parser = SriovNicAgentConfigParser()
config_parser.parse()
device_mappings = config_parser.device_mappings
exclude_devices = config_parser.exclude_devices
rp_bandwidths = config_parser.rp_bandwidths
rp_inventory_defaults = config_parser.rp_inventory_defaults
rp_hypervisors = config_parser.rp_hypervisors
except ValueError:
LOG.exception("Failed on Agent configuration parse. "
"Agent terminated!")
raise SystemExit(1)
LOG.info("Physical Devices mappings: %s", device_mappings)
LOG.info("Exclude Devices: %s", exclude_devices)
LOG.info("Resource provider bandwidths: %s", rp_bandwidths)
LOG.info("Resource provider inventory defaults: %s", rp_inventory_defaults)
LOG.info("Resource provider hypervisors: %s", rp_hypervisors)
polling_interval = cfg.CONF.AGENT.polling_interval
try:
agent = SriovNicSwitchAgent(device_mappings,
exclude_devices,
polling_interval,
rp_bandwidths,
rp_inventory_defaults,
rp_hypervisors)
except exc.SriovNicError:
LOG.exception("Agent Initialization Failed")
raise SystemExit(1)
# Start everything.
setup_profiler.setup(n_constants.AGENT_PROCESS_NIC_SWITCH, cfg.CONF.host)
LOG.info("Agent initialized successfully, now running... ")
agent.daemon_loop()
| apache-2.0 |
ravibhure/ansible | test/units/modules/network/nxos/test_nxos_evpn_vni.py | 17 | 2837 | # (c) 2016 Red Hat Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.compat.tests.mock import patch
from ansible.modules.network.nxos import nxos_evpn_vni
from .nxos_module import TestNxosModule, load_fixture, set_module_args
class TestNxosEvpnVniModule(TestNxosModule):
module = nxos_evpn_vni
def setUp(self):
super(TestNxosEvpnVniModule, self).setUp()
self.mock_run_commands = patch('ansible.modules.network.nxos.nxos_evpn_vni.run_commands')
self.run_commands = self.mock_run_commands.start()
self.mock_load_config = patch('ansible.modules.network.nxos.nxos_evpn_vni.load_config')
self.load_config = self.mock_load_config.start()
self.mock_get_config = patch('ansible.modules.network.nxos.nxos_evpn_vni.get_config')
self.get_config = self.mock_get_config.start()
def tearDown(self):
super(TestNxosEvpnVniModule, self).tearDown()
self.mock_run_commands.stop()
self.mock_load_config.stop()
self.mock_get_config.stop()
def load_fixtures(self, commands=None, device=''):
self.get_config.return_value = load_fixture('', 'nxos_evpn_vni_config.cfg')
self.load_config.return_value = None
def test_nxos_evpn_vni_present(self):
set_module_args(dict(vni='6000',
route_target_import='5000:10',
state='present'))
result = self.execute_module(changed=True)
self.assertEqual(result['commands'], ['evpn',
'vni 6000 l2',
'route-target import 5000:10'])
def test_nxos_evpn_vni_absent_not_existing(self):
set_module_args(dict(vni='12000', state='absent'))
result = self.execute_module(changed=False)
self.assertEqual(result['commands'], [])
def test_nxos_evpn_vni_absent_existing(self):
set_module_args(dict(vni='6000', state='absent'))
result = self.execute_module(changed=True)
self.assertEqual(result['commands'], ['evpn', 'no vni 6000 l2'])
| gpl-3.0 |
danakj/chromium | third_party/WebKit/Tools/Scripts/webkitpy/tool/bot/commit_announcer_unittest.py | 6 | 5713 | # Copyright (C) 2013 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import unittest
from webkitpy.tool.bot.commit_announcer import CommitAnnouncer
from webkitpy.tool.mock_tool import MockWebKitPatch
class CommitAnnouncerTest(unittest.TestCase):
def test_format_commit(self):
tool = MockWebKitPatch()
bot = CommitAnnouncer(tool, "test/directory", "test_password")
self.assertEqual(
'https://crrev.com/456789 [email protected] committed "Commit test subject line"',
bot._format_commit_detail("""\
1234commit1234
[email protected]
Commit test subject line
Multiple
lines
of
description.
BUG=654321
Review URL: https://codereview.chromium.org/123456
Cr-Commit-Position: refs/heads/master@{#456789}
"""))
self.assertEqual(
'https://crrev.com/456789 '
'[email protected] committed "Commit test subject line"',
bot._format_commit_detail("""\
1234commit1234
[email protected]
Commit test subject line
Multiple
lines
of
description.
BUG=654321
Cr-Commit-Position: refs/heads/master@{#456789}
"""))
self.assertEqual(
'https://crrev.com/1234comm [email protected] committed "Commit test subject line"',
bot._format_commit_detail("""\
1234commit1234
[email protected]
Commit test subject line
Multiple
lines
of
description.
BUG=654321
Review URL: https://codereview.chromium.org/123456
"""))
self.assertEqual(
'https://crrev.com/1234comm [email protected] committed "Commit test subject line"',
bot._format_commit_detail("""\
1234commit1234
[email protected]
Commit test subject line
Multiple
lines
of
description.
"""))
self.assertEqual(
'https://crrev.com/456789 [email protected] committed "Commit test subject line"',
bot._format_commit_detail("""\
1234commit1234
[email protected]
Commit test subject line
Multiple
lines
of
description.
Review URL: http://fake.review.url
Cr-Commit-Position: refs/heads/master@{#000000}
BUG=654321
Review URL: https://codereview.chromium.org/123456
Cr-Commit-Position: refs/heads/master@{#456789}
"""))
self.assertEqual(
'https://crrev.com/456789 [email protected] committed "Commit test subject line" '
'\[email protected]\x03',
bot._format_commit_detail("""\
1234commit1234
[email protected]
Commit test subject line
Multiple
lines
of
description.
BUG=654321
[email protected]
Review URL: https://codereview.chromium.org/123456
Cr-Commit-Position: refs/heads/master@{#456789}
"""))
self.assertEqual(
'https://crrev.com/456789 [email protected] committed "Commit test subject line" '
'\x037NOTRY=true\x03',
bot._format_commit_detail("""\
1234commit1234
[email protected]
Commit test subject line
Multiple
lines
of
description.
BUG=654321
NOTRY=true
Review URL: https://codereview.chromium.org/123456
Cr-Commit-Position: refs/heads/master@{#456789}
"""))
self.assertEqual(
'https://crrev.com/456789 [email protected] committed "Commit test subject line" '
'\x037NOTRY=true [email protected]\x03',
bot._format_commit_detail("""\
1234commit1234
[email protected]
Commit test subject line
Multiple
lines
of
description.
NOTRY=true
BUG=654321
[email protected]
Review URL: https://codereview.chromium.org/123456
Cr-Commit-Position: refs/heads/master@{#456789}
"""))
self.assertEqual(
'https://crrev.com/456789 [email protected] committed "Commit test subject line" '
'\[email protected], [email protected], [email protected] notry=TRUE\x03',
bot._format_commit_detail("""\
1234commit1234
[email protected]
Commit test subject line
Multiple
lines
of
description.
BUG=654321
[email protected], [email protected], [email protected]
notry=TRUE
Review URL: https://codereview.chromium.org/123456
Cr-Commit-Position: refs/heads/master@{#456789}
"""))
def test_sanitize_string(self):
bot = CommitAnnouncer(MockWebKitPatch(), "test/directory", "test_password")
self.assertEqual('normal ascii', bot._sanitize_string('normal ascii'))
self.assertEqual('uni\\u0441ode!', bot._sanitize_string(u'uni\u0441ode!'))
| bsd-3-clause |
njvack/masterfile | tests/scripts/test_validate_masterfile.py | 1 | 1658 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Part of the masterfile package: https://github.com/uwmadison-chm/masterfile
# Copyright (c) 2020 Board of Regents of the University of Wisconsin System
# Written by Nate Vack <[email protected]> at the Center for Healthy Minds
# at the University of Wisconsin-Madison.
# Released under MIT licence; see LICENSE at the package root.
from __future__ import absolute_import
from os import path
import glob
from masterfile.scripts import masterfile as mf
class TestValidateMasterfile(object):
def test_retval_zero_for_good_dir(self, good_path, capsys):
retval = mf.main(['validate', good_path])
out, err = capsys.readouterr()
assert out.startswith('No problems found')
assert err == ''
assert retval == 0
def test_retval_nonzero_for_bad_dir(self, example_path, capsys):
retval = mf.main(['validate', example_path])
out, _err = capsys.readouterr()
assert not retval == 0
assert len(out) > 0
def test_retval_nonzero_for_problems_dir(self, problems_path, capsys):
retval = mf.main(['validate', problems_path])
out, _err = capsys.readouterr()
assert not retval == 0
assert len(out) > 0
assert 'problems' in out
def test_retval_nonzero_for_good_with_problem_files(
self, good_path, problems_path, capsys):
problem_file = glob.glob(path.join(problems_path, '*csv'))[0]
retval = mf.main(['validate', good_path, problem_file])
out, _err = capsys.readouterr()
assert not retval == 0
assert len(out) > 0
assert 'problems' in out
| mit |
ruibarreira/linuxtrail | usr/lib/python3/dist-packages/gi/overrides/Gdk.py | 5 | 14014 | # -*- Mode: Python; py-indent-offset: 4 -*-
# vim: tabstop=4 shiftwidth=4 expandtab
#
# Copyright (C) 2009 Johan Dahlin <[email protected]>
# 2010 Simon van der Linden <[email protected]>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301
# USA
import sys
import warnings
from ..overrides import override, strip_boolean_result
from ..module import get_introspection_module
from gi import PyGIDeprecationWarning
Gdk = get_introspection_module('Gdk')
__all__ = []
class Color(Gdk.Color):
MAX_VALUE = 65535
def __init__(self, red, green, blue):
Gdk.Color.__init__(self)
self.red = red
self.green = green
self.blue = blue
def __eq__(self, other):
return self.equal(other)
def __repr__(self):
return 'Gdk.Color(red=%d, green=%d, blue=%d)' % (self.red, self.green, self.blue)
red_float = property(fget=lambda self: self.red / float(self.MAX_VALUE),
fset=lambda self, v: setattr(self, 'red', int(v * self.MAX_VALUE)))
green_float = property(fget=lambda self: self.green / float(self.MAX_VALUE),
fset=lambda self, v: setattr(self, 'green', int(v * self.MAX_VALUE)))
blue_float = property(fget=lambda self: self.blue / float(self.MAX_VALUE),
fset=lambda self, v: setattr(self, 'blue', int(v * self.MAX_VALUE)))
def to_floats(self):
"""Return (red_float, green_float, blue_float) triple."""
return (self.red_float, self.green_float, self.blue_float)
@staticmethod
def from_floats(red, green, blue):
"""Return a new Color object from red/green/blue values from 0.0 to 1.0."""
return Color(int(red * Color.MAX_VALUE),
int(green * Color.MAX_VALUE),
int(blue * Color.MAX_VALUE))
Color = override(Color)
__all__.append('Color')
if Gdk._version == '3.0':
class RGBA(Gdk.RGBA):
def __init__(self, red=1.0, green=1.0, blue=1.0, alpha=1.0):
Gdk.RGBA.__init__(self)
self.red = red
self.green = green
self.blue = blue
self.alpha = alpha
def __eq__(self, other):
return self.equal(other)
def __repr__(self):
return 'Gdk.RGBA(red=%f, green=%f, blue=%f, alpha=%f)' % (self.red, self.green, self.blue, self.alpha)
def __iter__(self):
"""Iterator which allows easy conversion to tuple and list types."""
yield self.red
yield self.green
yield self.blue
yield self.alpha
def to_color(self):
"""Converts this RGBA into a Color instance which excludes alpha."""
return Color(int(self.red * Color.MAX_VALUE),
int(self.green * Color.MAX_VALUE),
int(self.blue * Color.MAX_VALUE))
@classmethod
def from_color(cls, color):
"""Returns a new RGBA instance given a Color instance."""
return cls(color.red_float, color.green_float, color.blue_float)
RGBA = override(RGBA)
__all__.append('RGBA')
if Gdk._version == '2.0':
class Rectangle(Gdk.Rectangle):
def __init__(self, x, y, width, height):
Gdk.Rectangle.__init__(self)
self.x = x
self.y = y
self.width = width
self.height = height
def __repr__(self):
return 'Gdk.Rectangle(x=%d, y=%d, width=%d, height=%d)' % (self.x, self.y, self.height, self.width)
Rectangle = override(Rectangle)
__all__.append('Rectangle')
else:
from gi.repository import cairo as _cairo
Rectangle = _cairo.RectangleInt
__all__.append('Rectangle')
if Gdk._version == '2.0':
class Drawable(Gdk.Drawable):
def cairo_create(self):
return Gdk.cairo_create(self)
Drawable = override(Drawable)
__all__.append('Drawable')
else:
class Window(Gdk.Window):
def __new__(cls, parent, attributes, attributes_mask):
# Gdk.Window had to be made abstract,
# this override allows using the standard constructor
return Gdk.Window.new(parent, attributes, attributes_mask)
def __init__(self, parent, attributes, attributes_mask):
pass
def cairo_create(self):
return Gdk.cairo_create(self)
Window = override(Window)
__all__.append('Window')
Gdk.EventType._2BUTTON_PRESS = getattr(Gdk.EventType, "2BUTTON_PRESS")
Gdk.EventType._3BUTTON_PRESS = getattr(Gdk.EventType, "3BUTTON_PRESS")
class Event(Gdk.Event):
_UNION_MEMBERS = {
Gdk.EventType.DELETE: 'any',
Gdk.EventType.DESTROY: 'any',
Gdk.EventType.EXPOSE: 'expose',
Gdk.EventType.MOTION_NOTIFY: 'motion',
Gdk.EventType.BUTTON_PRESS: 'button',
Gdk.EventType._2BUTTON_PRESS: 'button',
Gdk.EventType._3BUTTON_PRESS: 'button',
Gdk.EventType.BUTTON_RELEASE: 'button',
Gdk.EventType.KEY_PRESS: 'key',
Gdk.EventType.KEY_RELEASE: 'key',
Gdk.EventType.ENTER_NOTIFY: 'crossing',
Gdk.EventType.LEAVE_NOTIFY: 'crossing',
Gdk.EventType.FOCUS_CHANGE: 'focus_change',
Gdk.EventType.CONFIGURE: 'configure',
Gdk.EventType.MAP: 'any',
Gdk.EventType.UNMAP: 'any',
Gdk.EventType.PROPERTY_NOTIFY: 'property',
Gdk.EventType.SELECTION_CLEAR: 'selection',
Gdk.EventType.SELECTION_REQUEST: 'selection',
Gdk.EventType.SELECTION_NOTIFY: 'selection',
Gdk.EventType.PROXIMITY_IN: 'proximity',
Gdk.EventType.PROXIMITY_OUT: 'proximity',
Gdk.EventType.DRAG_ENTER: 'dnd',
Gdk.EventType.DRAG_LEAVE: 'dnd',
Gdk.EventType.DRAG_MOTION: 'dnd',
Gdk.EventType.DRAG_STATUS: 'dnd',
Gdk.EventType.DROP_START: 'dnd',
Gdk.EventType.DROP_FINISHED: 'dnd',
Gdk.EventType.CLIENT_EVENT: 'client',
Gdk.EventType.VISIBILITY_NOTIFY: 'visibility',
}
if Gdk._version == '2.0':
_UNION_MEMBERS[Gdk.EventType.NO_EXPOSE] = 'no_expose'
def __getattr__(self, name):
real_event = getattr(self, '_UNION_MEMBERS').get(self.type)
if real_event:
return getattr(getattr(self, real_event), name)
else:
raise AttributeError("'%s' object has no attribute '%s'" % (self.__class__.__name__, name))
def __setattr__(self, name, value):
real_event = getattr(self, '_UNION_MEMBERS').get(self.type)
if real_event:
setattr(getattr(self, real_event), name, value)
else:
Gdk.Event.__setattr__(self, name, value)
def __repr__(self):
base_repr = Gdk.Event.__repr__(self).strip("><")
return "<%s type=%r>" % (base_repr, self.type)
Event = override(Event)
__all__.append('Event')
# manually bind GdkEvent members to GdkEvent
modname = globals()['__name__']
module = sys.modules[modname]
# right now we can't get the type_info from the
# field info so manually list the class names
event_member_classes = ['EventAny',
'EventExpose',
'EventVisibility',
'EventMotion',
'EventButton',
'EventScroll',
'EventKey',
'EventCrossing',
'EventFocus',
'EventConfigure',
'EventProperty',
'EventSelection',
'EventOwnerChange',
'EventProximity',
'EventDND',
'EventWindowState',
'EventSetting',
'EventGrabBroken']
if Gdk._version == '2.0':
event_member_classes.append('EventNoExpose')
# whitelist all methods that have a success return we want to mask
gsuccess_mask_funcs = ['get_state',
'get_axis',
'get_coords',
'get_root_coords']
for event_class in event_member_classes:
override_class = type(event_class, (getattr(Gdk, event_class),), {})
# add the event methods
for method_info in Gdk.Event.__info__.get_methods():
name = method_info.get_name()
event_method = getattr(Gdk.Event, name)
# python2 we need to use the __func__ attr to avoid internal
# instance checks
event_method = getattr(event_method, '__func__', event_method)
# use the _gsuccess_mask decorator if this method is whitelisted
if name in gsuccess_mask_funcs:
event_method = strip_boolean_result(event_method)
setattr(override_class, name, event_method)
setattr(module, event_class, override_class)
__all__.append(event_class)
# end GdkEvent overrides
class DragContext(Gdk.DragContext):
def finish(self, success, del_, time):
Gtk = get_introspection_module('Gtk')
Gtk.drag_finish(self, success, del_, time)
DragContext = override(DragContext)
__all__.append('DragContext')
class Cursor(Gdk.Cursor):
def __new__(cls, *args, **kwds):
arg_len = len(args)
kwd_len = len(kwds)
total_len = arg_len + kwd_len
if total_len == 1:
# Since g_object_newv (super.__new__) does not seem valid for
# direct use with GdkCursor, we must assume usage of at least
# one of the C constructors to be valid.
return cls.new(*args, **kwds)
elif total_len == 2:
warnings.warn('Calling "Gdk.Cursor(display, cursor_type)" has been deprecated. '
'Please use Gdk.Cursor.new_for_display(display, cursor_type). '
'See: https://wiki.gnome.org/PyGObject/InitializerDeprecations',
PyGIDeprecationWarning)
return cls.new_for_display(*args, **kwds)
elif total_len == 4:
warnings.warn('Calling "Gdk.Cursor(display, pixbuf, x, y)" has been deprecated. '
'Please use Gdk.Cursor.new_from_pixbuf(display, pixbuf, x, y). '
'See: https://wiki.gnome.org/PyGObject/InitializerDeprecations',
PyGIDeprecationWarning)
return cls.new_from_pixbuf(*args, **kwds)
elif total_len == 6:
if Gdk._version != '2.0':
# pixmaps don't exist in Gdk 3.0
raise ValueError("Wrong number of parameters")
warnings.warn('Calling "Gdk.Cursor(source, mask, fg, bg, x, y)" has been deprecated. '
'Please use Gdk.Cursor.new_from_pixmap(source, mask, fg, bg, x, y). '
'See: https://wiki.gnome.org/PyGObject/InitializerDeprecations',
PyGIDeprecationWarning)
return cls.new_from_pixmap(*args, **kwds)
else:
raise ValueError("Wrong number of parameters")
Cursor = override(Cursor)
__all__.append('Cursor')
color_parse = strip_boolean_result(Gdk.color_parse)
__all__.append('color_parse')
# Note, we cannot override the entire class as Gdk.Atom has no gtype, so just
# hack some individual methods
def _gdk_atom_str(atom):
n = atom.name()
if n:
return n
# fall back to atom index
return 'Gdk.Atom<%i>' % hash(atom)
def _gdk_atom_repr(atom):
n = atom.name()
if n:
return 'Gdk.Atom.intern("%s", False)' % n
# fall back to atom index
return '<Gdk.Atom(%i)>' % hash(atom)
Gdk.Atom.__str__ = _gdk_atom_str
Gdk.Atom.__repr__ = _gdk_atom_repr
# constants
if Gdk._version >= '3.0':
SELECTION_PRIMARY = Gdk.atom_intern('PRIMARY', True)
__all__.append('SELECTION_PRIMARY')
SELECTION_SECONDARY = Gdk.atom_intern('SECONDARY', True)
__all__.append('SELECTION_SECONDARY')
SELECTION_CLIPBOARD = Gdk.atom_intern('CLIPBOARD', True)
__all__.append('SELECTION_CLIPBOARD')
TARGET_BITMAP = Gdk.atom_intern('BITMAP', True)
__all__.append('TARGET_BITMAP')
TARGET_COLORMAP = Gdk.atom_intern('COLORMAP', True)
__all__.append('TARGET_COLORMAP')
TARGET_DRAWABLE = Gdk.atom_intern('DRAWABLE', True)
__all__.append('TARGET_DRAWABLE')
TARGET_PIXMAP = Gdk.atom_intern('PIXMAP', True)
__all__.append('TARGET_PIXMAP')
TARGET_STRING = Gdk.atom_intern('STRING', True)
__all__.append('TARGET_STRING')
SELECTION_TYPE_ATOM = Gdk.atom_intern('ATOM', True)
__all__.append('SELECTION_TYPE_ATOM')
SELECTION_TYPE_BITMAP = Gdk.atom_intern('BITMAP', True)
__all__.append('SELECTION_TYPE_BITMAP')
SELECTION_TYPE_COLORMAP = Gdk.atom_intern('COLORMAP', True)
__all__.append('SELECTION_TYPE_COLORMAP')
SELECTION_TYPE_DRAWABLE = Gdk.atom_intern('DRAWABLE', True)
__all__.append('SELECTION_TYPE_DRAWABLE')
SELECTION_TYPE_INTEGER = Gdk.atom_intern('INTEGER', True)
__all__.append('SELECTION_TYPE_INTEGER')
SELECTION_TYPE_PIXMAP = Gdk.atom_intern('PIXMAP', True)
__all__.append('SELECTION_TYPE_PIXMAP')
SELECTION_TYPE_WINDOW = Gdk.atom_intern('WINDOW', True)
__all__.append('SELECTION_TYPE_WINDOW')
SELECTION_TYPE_STRING = Gdk.atom_intern('STRING', True)
__all__.append('SELECTION_TYPE_STRING')
import sys
initialized, argv = Gdk.init_check(sys.argv)
| gpl-3.0 |
openhatch/oh-mainline | vendor/packages/twisted/twisted/names/root.py | 25 | 16400 | # -*- test-case-name: twisted.names.test.test_rootresolve -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Resolver implementation for querying successive authoritative servers to
lookup a record, starting from the root nameservers.
@author: Jp Calderone
todo::
robustify it
documentation
"""
import warnings
from twisted.python.failure import Failure
from twisted.internet import defer
from twisted.names import dns, common, error
def retry(t, p, *args):
"""
Issue a query one or more times.
This function is deprecated. Use one of the resolver classes for retry
logic, or implement it yourself.
"""
warnings.warn(
"twisted.names.root.retry is deprecated since Twisted 10.0. Use a "
"Resolver object for retry logic.", category=DeprecationWarning,
stacklevel=2)
assert t, "Timeout is required"
t = list(t)
def errback(failure):
failure.trap(defer.TimeoutError)
if not t:
return failure
return p.query(timeout=t.pop(0), *args
).addErrback(errback
)
return p.query(timeout=t.pop(0), *args
).addErrback(errback
)
class _DummyController:
"""
A do-nothing DNS controller. This is useful when all messages received
will be responses to previously issued queries. Anything else received
will be ignored.
"""
def messageReceived(self, *args):
pass
class Resolver(common.ResolverBase):
"""
L{Resolver} implements recursive lookup starting from a specified list of
root servers.
@ivar hints: A C{list} of C{str} giving the dotted quad representation
of IP addresses of root servers at which to begin resolving names.
@ivar _maximumQueries: A C{int} giving the maximum number of queries
which will be attempted to resolve a single name.
@ivar _reactor: A L{IReactorTime} and L{IReactorUDP} provider to use to
bind UDP ports and manage timeouts.
"""
def __init__(self, hints, maximumQueries=10, reactor=None):
common.ResolverBase.__init__(self)
self.hints = hints
self._maximumQueries = maximumQueries
self._reactor = reactor
def _roots(self):
"""
Return a list of two-tuples representing the addresses of the root
servers, as defined by C{self.hints}.
"""
return [(ip, dns.PORT) for ip in self.hints]
def _query(self, query, servers, timeout, filter):
"""
Issue one query and return a L{Deferred} which fires with its response.
@param query: The query to issue.
@type query: L{dns.Query}
@param servers: The servers which might have an answer for this
query.
@type servers: L{list} of L{tuple} of L{str} and L{int}
@param timeout: A timeout on how long to wait for the response.
@type timeout: L{tuple} of L{int}
@param filter: A flag indicating whether to filter the results. If
C{True}, the returned L{Deferred} will fire with a three-tuple of
lists of L{RRHeaders} (like the return value of the I{lookup*}
methods of L{IResolver}. IF C{False}, the result will be a
L{Message} instance.
@type filter: L{bool}
@return: A L{Deferred} which fires with the response or a timeout
error.
@rtype: L{Deferred}
"""
from twisted.names import client
r = client.Resolver(servers=servers, reactor=self._reactor)
d = r.queryUDP([query], timeout)
if filter:
d.addCallback(r.filterAnswers)
return d
def _lookup(self, name, cls, type, timeout):
"""
Implement name lookup by recursively discovering the authoritative
server for the name and then asking it, starting at one of the servers
in C{self.hints}.
"""
if timeout is None:
# A series of timeouts for semi-exponential backoff, summing to an
# arbitrary total of 60 seconds.
timeout = (1, 3, 11, 45)
return self._discoverAuthority(
dns.Query(name, type, cls), self._roots(), timeout,
self._maximumQueries)
def _discoverAuthority(self, query, servers, timeout, queriesLeft):
"""
Issue a query to a server and follow a delegation if necessary.
@param query: The query to issue.
@type query: L{dns.Query}
@param servers: The servers which might have an answer for this
query.
@type servers: L{list} of L{tuple} of L{str} and L{int}
@param timeout: A C{tuple} of C{int} giving the timeout to use for this
query.
@param queriesLeft: A C{int} giving the number of queries which may
yet be attempted to answer this query before the attempt will be
abandoned.
@return: A L{Deferred} which fires with a three-tuple of lists of
L{RRHeaders} giving the response, or with a L{Failure} if there is
a timeout or response error.
"""
# Stop now if we've hit the query limit.
if queriesLeft <= 0:
return Failure(
error.ResolverError("Query limit reached without result"))
d = self._query(query, servers, timeout, False)
d.addCallback(
self._discoveredAuthority, query, timeout, queriesLeft - 1)
return d
def _discoveredAuthority(self, response, query, timeout, queriesLeft):
"""
Interpret the response to a query, checking for error codes and
following delegations if necessary.
@param response: The L{Message} received in response to issuing C{query}.
@type response: L{Message}
@param query: The L{dns.Query} which was issued.
@type query: L{dns.Query}.
@param timeout: The timeout to use if another query is indicated by
this response.
@type timeout: L{tuple} of L{int}
@param queriesLeft: A C{int} giving the number of queries which may
yet be attempted to answer this query before the attempt will be
abandoned.
@return: A L{Failure} indicating a response error, a three-tuple of
lists of L{RRHeaders} giving the response to C{query} or a
L{Deferred} which will fire with one of those.
"""
if response.rCode != dns.OK:
return Failure(self.exceptionForCode(response.rCode)(response))
# Turn the answers into a structure that's a little easier to work with.
records = {}
for answer in response.answers:
records.setdefault(answer.name, []).append(answer)
def findAnswerOrCName(name, type, cls):
cname = None
for record in records.get(name, []):
if record.cls == cls:
if record.type == type:
return record
elif record.type == dns.CNAME:
cname = record
# If there were any CNAME records, return the last one. There's
# only supposed to be zero or one, though.
return cname
seen = set()
name = query.name
record = None
while True:
seen.add(name)
previous = record
record = findAnswerOrCName(name, query.type, query.cls)
if record is None:
if name == query.name:
# If there's no answer for the original name, then this may
# be a delegation. Code below handles it.
break
else:
# Try to resolve the CNAME with another query.
d = self._discoverAuthority(
dns.Query(str(name), query.type, query.cls),
self._roots(), timeout, queriesLeft)
# We also want to include the CNAME in the ultimate result,
# otherwise this will be pretty confusing.
def cbResolved((answers, authority, additional)):
answers.insert(0, previous)
return (answers, authority, additional)
d.addCallback(cbResolved)
return d
elif record.type == query.type:
return (
response.answers,
response.authority,
response.additional)
else:
# It's a CNAME record. Try to resolve it from the records
# in this response with another iteration around the loop.
if record.payload.name in seen:
raise error.ResolverError("Cycle in CNAME processing")
name = record.payload.name
# Build a map to use to convert NS names into IP addresses.
addresses = {}
for rr in response.additional:
if rr.type == dns.A:
addresses[str(rr.name)] = rr.payload.dottedQuad()
hints = []
traps = []
for rr in response.authority:
if rr.type == dns.NS:
ns = str(rr.payload.name)
if ns in addresses:
hints.append((addresses[ns], dns.PORT))
else:
traps.append(ns)
if hints:
return self._discoverAuthority(
query, hints, timeout, queriesLeft)
elif traps:
d = self.lookupAddress(traps[0], timeout)
d.addCallback(
lambda (answers, authority, additional):
answers[0].payload.dottedQuad())
d.addCallback(
lambda hint: self._discoverAuthority(
query, [(hint, dns.PORT)], timeout, queriesLeft - 1))
return d
else:
return Failure(error.ResolverError(
"Stuck at response without answers or delegation"))
def discoveredAuthority(self, auth, name, cls, type, timeout):
warnings.warn(
'twisted.names.root.Resolver.discoveredAuthority is deprecated since '
'Twisted 10.0. Use twisted.names.client.Resolver directly, instead.',
category=DeprecationWarning, stacklevel=2)
from twisted.names import client
q = dns.Query(name, type, cls)
r = client.Resolver(servers=[(auth, dns.PORT)])
d = r.queryUDP([q], timeout)
d.addCallback(r.filterAnswers)
return d
def lookupNameservers(host, atServer, p=None):
warnings.warn(
'twisted.names.root.lookupNameservers is deprecated since Twisted '
'10.0. Use twisted.names.root.Resolver.lookupNameservers instead.',
category=DeprecationWarning, stacklevel=2)
# print 'Nameserver lookup for', host, 'at', atServer, 'with', p
if p is None:
p = dns.DNSDatagramProtocol(_DummyController())
p.noisy = False
return retry(
(1, 3, 11, 45), # Timeouts
p, # Protocol instance
(atServer, dns.PORT), # Server to query
[dns.Query(host, dns.NS, dns.IN)] # Question to ask
)
def lookupAddress(host, atServer, p=None):
warnings.warn(
'twisted.names.root.lookupAddress is deprecated since Twisted '
'10.0. Use twisted.names.root.Resolver.lookupAddress instead.',
category=DeprecationWarning, stacklevel=2)
# print 'Address lookup for', host, 'at', atServer, 'with', p
if p is None:
p = dns.DNSDatagramProtocol(_DummyController())
p.noisy = False
return retry(
(1, 3, 11, 45), # Timeouts
p, # Protocol instance
(atServer, dns.PORT), # Server to query
[dns.Query(host, dns.A, dns.IN)] # Question to ask
)
def extractAuthority(msg, cache):
warnings.warn(
'twisted.names.root.extractAuthority is deprecated since Twisted '
'10.0. Please inspect the Message object directly.',
category=DeprecationWarning, stacklevel=2)
records = msg.answers + msg.authority + msg.additional
nameservers = [r for r in records if r.type == dns.NS]
# print 'Records for', soFar, ':', records
# print 'NS for', soFar, ':', nameservers
if not nameservers:
return None, nameservers
if not records:
raise IOError("No records")
for r in records:
if r.type == dns.A:
cache[str(r.name)] = r.payload.dottedQuad()
for r in records:
if r.type == dns.NS:
if str(r.payload.name) in cache:
return cache[str(r.payload.name)], nameservers
for addr in records:
if addr.type == dns.A and addr.name == r.name:
return addr.payload.dottedQuad(), nameservers
return None, nameservers
def discoverAuthority(host, roots, cache=None, p=None):
warnings.warn(
'twisted.names.root.discoverAuthority is deprecated since Twisted '
'10.0. Use twisted.names.root.Resolver.lookupNameservers instead.',
category=DeprecationWarning, stacklevel=4)
if cache is None:
cache = {}
rootAuths = list(roots)
parts = host.rstrip('.').split('.')
parts.reverse()
authority = rootAuths.pop()
soFar = ''
for part in parts:
soFar = part + '.' + soFar
# print '///////', soFar, authority, p
msg = defer.waitForDeferred(lookupNameservers(soFar, authority, p))
yield msg
msg = msg.getResult()
newAuth, nameservers = extractAuthority(msg, cache)
if newAuth is not None:
# print "newAuth is not None"
authority = newAuth
else:
if nameservers:
r = str(nameservers[0].payload.name)
# print 'Recursively discovering authority for', r
authority = defer.waitForDeferred(discoverAuthority(r, roots, cache, p))
yield authority
authority = authority.getResult()
# print 'Discovered to be', authority, 'for', r
## else:
## # print 'Doing address lookup for', soFar, 'at', authority
## msg = defer.waitForDeferred(lookupAddress(soFar, authority, p))
## yield msg
## msg = msg.getResult()
## records = msg.answers + msg.authority + msg.additional
## addresses = [r for r in records if r.type == dns.A]
## if addresses:
## authority = addresses[0].payload.dottedQuad()
## else:
## raise IOError("Resolution error")
# print "Yielding authority", authority
yield authority
discoverAuthority = defer.deferredGenerator(discoverAuthority)
def makePlaceholder(deferred, name):
def placeholder(*args, **kw):
deferred.addCallback(lambda r: getattr(r, name)(*args, **kw))
return deferred
return placeholder
class DeferredResolver:
def __init__(self, resolverDeferred):
self.waiting = []
resolverDeferred.addCallback(self.gotRealResolver)
def gotRealResolver(self, resolver):
w = self.waiting
self.__dict__ = resolver.__dict__
self.__class__ = resolver.__class__
for d in w:
d.callback(resolver)
def __getattr__(self, name):
if name.startswith('lookup') or name in ('getHostByName', 'query'):
self.waiting.append(defer.Deferred())
return makePlaceholder(self.waiting[-1], name)
raise AttributeError(name)
def bootstrap(resolver):
"""Lookup the root nameserver addresses using the given resolver
Return a Resolver which will eventually become a C{root.Resolver}
instance that has references to all the root servers that we were able
to look up.
"""
domains = [chr(ord('a') + i) for i in range(13)]
# f = lambda r: (log.msg('Root server address: ' + str(r)), r)[1]
f = lambda r: r
L = [resolver.getHostByName('%s.root-servers.net' % d).addCallback(f) for d in domains]
d = defer.DeferredList(L)
d.addCallback(lambda r: Resolver([e[1] for e in r if e[0]]))
return DeferredResolver(d)
| agpl-3.0 |
hpproliant/ironic | ironic/common/glance_service/v2/image_service.py | 1 | 11995 | # Copyright 2013 Hewlett-Packard Development Company, L.P.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
from oslo_utils import uuidutils
from six.moves.urllib import parse as urlparse
from swiftclient import utils as swift_utils
from ironic.common import exception as exc
from ironic.common.glance_service import base_image_service
from ironic.common.glance_service import service
from ironic.common.glance_service import service_utils
from ironic.common.i18n import _
glance_opts = [
cfg.ListOpt('allowed_direct_url_schemes',
default=[],
help=_('A list of URL schemes that can be downloaded directly '
'via the direct_url. Currently supported schemes: '
'[file].')),
# To upload this key to Swift:
# swift post -m Temp-Url-Key:secretkey
# When using radosgw, temp url key could be uploaded via the above swift
# command, or with:
# radosgw-admin user modify --uid=user --temp-url-key=secretkey
cfg.StrOpt('swift_temp_url_key',
help=_('The secret token given to Swift to allow temporary URL '
'downloads. Required for temporary URLs.'),
secret=True),
cfg.IntOpt('swift_temp_url_duration',
default=1200,
help=_('The length of time in seconds that the temporary URL '
'will be valid for. Defaults to 20 minutes. If some '
'deploys get a 401 response code when trying to '
'download from the temporary URL, try raising this '
'duration.')),
cfg.StrOpt(
'swift_endpoint_url',
help=_('The "endpoint" (scheme, hostname, optional port) for '
'the Swift URL of the form '
'"endpoint_url/api_version/[account/]container/object_id". '
'Do not include trailing "/". '
'For example, use "https://swift.example.com". If using RADOS '
'Gateway, endpoint may also contain /swift path; if it does '
'not, it will be appended. Required for temporary URLs.')),
cfg.StrOpt(
'swift_api_version',
default='v1',
help=_('The Swift API version to create a temporary URL for. '
'Defaults to "v1". Swift temporary URL format: '
'"endpoint_url/api_version/[account/]container/object_id"')),
cfg.StrOpt(
'swift_account',
help=_('The account that Glance uses to communicate with '
'Swift. The format is "AUTH_uuid". "uuid" is the '
'UUID for the account configured in the glance-api.conf. '
'Required for temporary URLs when Glance backend is Swift. '
'For example: "AUTH_a422b2-91f3-2f46-74b7-d7c9e8958f5d30". '
'Swift temporary URL format: '
'"endpoint_url/api_version/[account/]container/object_id"')),
cfg.StrOpt(
'swift_container',
default='glance',
help=_('The Swift container Glance is configured to store its '
'images in. Defaults to "glance", which is the default '
'in glance-api.conf. '
'Swift temporary URL format: '
'"endpoint_url/api_version/[account/]container/object_id"')),
cfg.IntOpt('swift_store_multiple_containers_seed',
default=0,
help=_('This should match a config by the same name in the '
'Glance configuration file. When set to 0, a '
'single-tenant store will only use one '
'container to store all images. When set to an integer '
'value between 1 and 32, a single-tenant store will use '
'multiple containers to store images, and this value '
'will determine how many containers are created.')),
cfg.StrOpt('temp_url_endpoint_type',
default='swift',
choices=['swift', 'radosgw'],
help=_('Type of endpoint to use for temporary URLs. If the '
'Glance backend is Swift, use "swift"; if it is CEPH '
'with RADOS gateway, use "radosgw".'))
]
CONF = cfg.CONF
CONF.register_opts(glance_opts, group='glance')
class GlanceImageService(base_image_service.BaseImageService,
service.ImageService):
def detail(self, **kwargs):
return self._detail(method='list', **kwargs)
def show(self, image_id):
return self._show(image_id, method='get')
def download(self, image_id, data=None):
return self._download(image_id, method='data', data=data)
def create(self, image_meta, data=None):
image_id = self._create(image_meta, method='create', data=None)['id']
return self.update(image_id, None, data)
def update(self, image_id, image_meta, data=None, purge_props=False):
# NOTE(ghe): purge_props not working until bug 1206472 solved
return self._update(image_id, image_meta, data, method='update',
purge_props=False)
def delete(self, image_id):
return self._delete(image_id, method='delete')
def swift_temp_url(self, image_info):
"""Generate a no-auth Swift temporary URL.
This function will generate the temporary Swift URL using the image
id from Glance and the config options: 'swift_endpoint_url',
'swift_api_version', 'swift_account' and 'swift_container'.
The temporary URL will be valid for 'swift_temp_url_duration' seconds.
This allows Ironic to download a Glance image without passing around
an auth_token.
:param image_info: The return from a GET request to Glance for a
certain image_id. Should be a dictionary, with keys like 'name' and
'checksum'. See
http://docs.openstack.org/developer/glance/glanceapi.html for
examples.
:returns: A signed Swift URL from which an image can be downloaded,
without authentication.
:raises: InvalidParameterValue if Swift config options are not set
correctly.
:raises: MissingParameterValue if a required parameter is not set.
:raises: ImageUnacceptable if the image info from Glance does not
have a image ID.
"""
self._validate_temp_url_config()
if ('id' not in image_info or not
uuidutils.is_uuid_like(image_info['id'])):
raise exc.ImageUnacceptable(_(
'The given image info does not have a valid image id: %s')
% image_info)
url_fragments = {
'api_version': CONF.glance.swift_api_version,
'account': CONF.glance.swift_account,
'container': self._get_swift_container(image_info['id']),
'object_id': image_info['id']
}
endpoint_url = CONF.glance.swift_endpoint_url
if CONF.glance.temp_url_endpoint_type == 'radosgw':
chunks = urlparse.urlsplit(CONF.glance.swift_endpoint_url)
if not chunks.path:
endpoint_url = urlparse.urljoin(
endpoint_url, 'swift')
elif chunks.path != '/swift':
raise exc.InvalidParameterValue(
_('Swift endpoint URL should only contain scheme, '
'hostname, optional port and optional /swift path '
'without trailing slash; provided value is: %s')
% endpoint_url)
template = '/{api_version}/{container}/{object_id}'
else:
template = '/{api_version}/{account}/{container}/{object_id}'
url_path = template.format(**url_fragments)
path = swift_utils.generate_temp_url(
path=url_path,
seconds=CONF.glance.swift_temp_url_duration,
key=CONF.glance.swift_temp_url_key,
method='GET')
return '{endpoint_url}{url_path}'.format(
endpoint_url=endpoint_url, url_path=path)
def _validate_temp_url_config(self):
"""Validate the required settings for a temporary URL."""
if not CONF.glance.swift_temp_url_key:
raise exc.MissingParameterValue(_(
'Swift temporary URLs require a shared secret to be created. '
'You must provide "swift_temp_url_key" as a config option.'))
if not CONF.glance.swift_endpoint_url:
raise exc.MissingParameterValue(_(
'Swift temporary URLs require a Swift endpoint URL. '
'You must provide "swift_endpoint_url" as a config option.'))
if (not CONF.glance.swift_account and
CONF.glance.temp_url_endpoint_type == 'swift'):
raise exc.MissingParameterValue(_(
'Swift temporary URLs require a Swift account string. '
'You must provide "swift_account" as a config option.'))
if CONF.glance.swift_temp_url_duration < 0:
raise exc.InvalidParameterValue(_(
'"swift_temp_url_duration" must be a positive integer.'))
seed_num_chars = CONF.glance.swift_store_multiple_containers_seed
if (seed_num_chars is None or seed_num_chars < 0
or seed_num_chars > 32):
raise exc.InvalidParameterValue(_(
"An integer value between 0 and 32 is required for"
" swift_store_multiple_containers_seed."))
def _get_swift_container(self, image_id):
"""Get the Swift container the image is stored in.
Code based on: https://github.com/openstack/glance_store/blob/3cd690b3
7dc9d935445aca0998e8aec34a3e3530/glance_store/
_drivers/swift/store.py#L725
Returns appropriate container name depending upon value of
``swift_store_multiple_containers_seed``. In single-container mode,
which is a seed value of 0, simply returns ``swift_container``.
In multiple-container mode, returns ``swift_container`` as the
prefix plus a suffix determined by the multiple container seed
examples:
single-container mode: 'glance'
multiple-container mode: 'glance_3a1' for image uuid 3A1xxxxxxx...
:param image_id: UUID of image
:returns: The name of the swift container the image is stored in
"""
seed_num_chars = CONF.glance.swift_store_multiple_containers_seed
if seed_num_chars > 0:
image_id = str(image_id).lower()
num_dashes = image_id[:seed_num_chars].count('-')
num_chars = seed_num_chars + num_dashes
name_suffix = image_id[:num_chars]
new_container_name = (CONF.glance.swift_container +
'_' + name_suffix)
return new_container_name
else:
return CONF.glance.swift_container
def _get_location(self, image_id):
"""Get storage URL.
Returns the direct url representing the backend storage location,
or None if this attribute is not shown by Glance.
"""
image_meta = self.call('get', image_id)
if not service_utils.is_image_available(self.context, image_meta):
raise exc.ImageNotFound(image_id=image_id)
return getattr(image_meta, 'direct_url', None)
| apache-2.0 |
MwanzanFelipe/rockletonfortune | lib/django/db/migrations/operations/models.py | 290 | 21735 | from __future__ import unicode_literals
from django.db import models
from django.db.migrations.operations.base import Operation
from django.db.migrations.state import ModelState
from django.db.models.options import normalize_together
from django.utils import six
from django.utils.functional import cached_property
class CreateModel(Operation):
"""
Create a model's table.
"""
serialization_expand_args = ['fields', 'options', 'managers']
def __init__(self, name, fields, options=None, bases=None, managers=None):
self.name = name
self.fields = fields
self.options = options or {}
self.bases = bases or (models.Model,)
self.managers = managers or []
@cached_property
def name_lower(self):
return self.name.lower()
def deconstruct(self):
kwargs = {
'name': self.name,
'fields': self.fields,
}
if self.options:
kwargs['options'] = self.options
if self.bases and self.bases != (models.Model,):
kwargs['bases'] = self.bases
if self.managers and self.managers != [('objects', models.Manager())]:
kwargs['managers'] = self.managers
return (
self.__class__.__name__,
[],
kwargs
)
def state_forwards(self, app_label, state):
state.add_model(ModelState(
app_label,
self.name,
list(self.fields),
dict(self.options),
tuple(self.bases),
list(self.managers),
))
def database_forwards(self, app_label, schema_editor, from_state, to_state):
model = to_state.apps.get_model(app_label, self.name)
if self.allow_migrate_model(schema_editor.connection.alias, model):
schema_editor.create_model(model)
def database_backwards(self, app_label, schema_editor, from_state, to_state):
model = from_state.apps.get_model(app_label, self.name)
if self.allow_migrate_model(schema_editor.connection.alias, model):
schema_editor.delete_model(model)
def describe(self):
return "Create %smodel %s" % ("proxy " if self.options.get("proxy", False) else "", self.name)
def references_model(self, name, app_label=None):
strings_to_check = [self.name]
# Check we didn't inherit from the model
for base in self.bases:
if isinstance(base, six.string_types):
strings_to_check.append(base.split(".")[-1])
# Check we have no FKs/M2Ms with it
for fname, field in self.fields:
if field.remote_field:
if isinstance(field.remote_field.model, six.string_types):
strings_to_check.append(field.remote_field.model.split(".")[-1])
# Now go over all the strings and compare them
for string in strings_to_check:
if string.lower() == name.lower():
return True
return False
class DeleteModel(Operation):
"""
Drops a model's table.
"""
def __init__(self, name):
self.name = name
@cached_property
def name_lower(self):
return self.name.lower()
def deconstruct(self):
kwargs = {
'name': self.name,
}
return (
self.__class__.__name__,
[],
kwargs
)
def state_forwards(self, app_label, state):
state.remove_model(app_label, self.name_lower)
def database_forwards(self, app_label, schema_editor, from_state, to_state):
model = from_state.apps.get_model(app_label, self.name)
if self.allow_migrate_model(schema_editor.connection.alias, model):
schema_editor.delete_model(model)
def database_backwards(self, app_label, schema_editor, from_state, to_state):
model = to_state.apps.get_model(app_label, self.name)
if self.allow_migrate_model(schema_editor.connection.alias, model):
schema_editor.create_model(model)
def references_model(self, name, app_label=None):
return name.lower() == self.name_lower
def describe(self):
return "Delete model %s" % (self.name, )
class RenameModel(Operation):
"""
Renames a model.
"""
def __init__(self, old_name, new_name):
self.old_name = old_name
self.new_name = new_name
@cached_property
def old_name_lower(self):
return self.old_name.lower()
@cached_property
def new_name_lower(self):
return self.new_name.lower()
def deconstruct(self):
kwargs = {
'old_name': self.old_name,
'new_name': self.new_name,
}
return (
self.__class__.__name__,
[],
kwargs
)
def state_forwards(self, app_label, state):
apps = state.apps
model = apps.get_model(app_label, self.old_name)
model._meta.apps = apps
# Get all of the related objects we need to repoint
all_related_objects = (
f for f in model._meta.get_fields(include_hidden=True)
if f.auto_created and not f.concrete and (not f.hidden or f.many_to_many)
)
# Rename the model
state.models[app_label, self.new_name_lower] = state.models[app_label, self.old_name_lower]
state.models[app_label, self.new_name_lower].name = self.new_name
state.remove_model(app_label, self.old_name_lower)
# Repoint the FKs and M2Ms pointing to us
for related_object in all_related_objects:
if related_object.model is not model:
# The model being renamed does not participate in this relation
# directly. Rather, a superclass does.
continue
# Use the new related key for self referential related objects.
if related_object.related_model == model:
related_key = (app_label, self.new_name_lower)
else:
related_key = (
related_object.related_model._meta.app_label,
related_object.related_model._meta.model_name,
)
new_fields = []
for name, field in state.models[related_key].fields:
if name == related_object.field.name:
field = field.clone()
field.remote_field.model = "%s.%s" % (app_label, self.new_name)
new_fields.append((name, field))
state.models[related_key].fields = new_fields
state.reload_model(*related_key)
state.reload_model(app_label, self.new_name_lower)
def database_forwards(self, app_label, schema_editor, from_state, to_state):
new_model = to_state.apps.get_model(app_label, self.new_name)
if self.allow_migrate_model(schema_editor.connection.alias, new_model):
old_model = from_state.apps.get_model(app_label, self.old_name)
# Move the main table
schema_editor.alter_db_table(
new_model,
old_model._meta.db_table,
new_model._meta.db_table,
)
# Alter the fields pointing to us
for related_object in old_model._meta.related_objects:
if related_object.related_model == old_model:
model = new_model
related_key = (app_label, self.new_name_lower)
else:
model = related_object.related_model
related_key = (
related_object.related_model._meta.app_label,
related_object.related_model._meta.model_name,
)
to_field = to_state.apps.get_model(
*related_key
)._meta.get_field(related_object.field.name)
schema_editor.alter_field(
model,
related_object.field,
to_field,
)
# Rename M2M fields whose name is based on this model's name.
fields = zip(old_model._meta.local_many_to_many, new_model._meta.local_many_to_many)
for (old_field, new_field) in fields:
# Skip self-referential fields as these are renamed above.
if new_field.model == new_field.related_model or not new_field.remote_field.through._meta.auto_created:
continue
# Rename the M2M table that's based on this model's name.
old_m2m_model = old_field.remote_field.through
new_m2m_model = new_field.remote_field.through
schema_editor.alter_db_table(
new_m2m_model,
old_m2m_model._meta.db_table,
new_m2m_model._meta.db_table,
)
# Rename the column in the M2M table that's based on this
# model's name.
schema_editor.alter_field(
new_m2m_model,
old_m2m_model._meta.get_field(old_model._meta.model_name),
new_m2m_model._meta.get_field(new_model._meta.model_name),
)
def database_backwards(self, app_label, schema_editor, from_state, to_state):
self.new_name_lower, self.old_name_lower = self.old_name_lower, self.new_name_lower
self.new_name, self.old_name = self.old_name, self.new_name
self.database_forwards(app_label, schema_editor, from_state, to_state)
self.new_name_lower, self.old_name_lower = self.old_name_lower, self.new_name_lower
self.new_name, self.old_name = self.old_name, self.new_name
def references_model(self, name, app_label=None):
return (
name.lower() == self.old_name_lower or
name.lower() == self.new_name_lower
)
def describe(self):
return "Rename model %s to %s" % (self.old_name, self.new_name)
class AlterModelTable(Operation):
"""
Renames a model's table
"""
def __init__(self, name, table):
self.name = name
self.table = table
@cached_property
def name_lower(self):
return self.name.lower()
def deconstruct(self):
kwargs = {
'name': self.name,
'table': self.table,
}
return (
self.__class__.__name__,
[],
kwargs
)
def state_forwards(self, app_label, state):
state.models[app_label, self.name_lower].options["db_table"] = self.table
state.reload_model(app_label, self.name_lower)
def database_forwards(self, app_label, schema_editor, from_state, to_state):
new_model = to_state.apps.get_model(app_label, self.name)
if self.allow_migrate_model(schema_editor.connection.alias, new_model):
old_model = from_state.apps.get_model(app_label, self.name)
schema_editor.alter_db_table(
new_model,
old_model._meta.db_table,
new_model._meta.db_table,
)
# Rename M2M fields whose name is based on this model's db_table
for (old_field, new_field) in zip(old_model._meta.local_many_to_many, new_model._meta.local_many_to_many):
if new_field.remote_field.through._meta.auto_created:
schema_editor.alter_db_table(
new_field.remote_field.through,
old_field.remote_field.through._meta.db_table,
new_field.remote_field.through._meta.db_table,
)
def database_backwards(self, app_label, schema_editor, from_state, to_state):
return self.database_forwards(app_label, schema_editor, from_state, to_state)
def references_model(self, name, app_label=None):
return name.lower() == self.name_lower
def describe(self):
return "Rename table for %s to %s" % (self.name, self.table)
class AlterUniqueTogether(Operation):
"""
Changes the value of unique_together to the target one.
Input value of unique_together must be a set of tuples.
"""
option_name = "unique_together"
def __init__(self, name, unique_together):
self.name = name
unique_together = normalize_together(unique_together)
self.unique_together = set(tuple(cons) for cons in unique_together)
@cached_property
def name_lower(self):
return self.name.lower()
def deconstruct(self):
kwargs = {
'name': self.name,
'unique_together': self.unique_together,
}
return (
self.__class__.__name__,
[],
kwargs
)
def state_forwards(self, app_label, state):
model_state = state.models[app_label, self.name_lower]
model_state.options[self.option_name] = self.unique_together
state.reload_model(app_label, self.name_lower)
def database_forwards(self, app_label, schema_editor, from_state, to_state):
new_model = to_state.apps.get_model(app_label, self.name)
if self.allow_migrate_model(schema_editor.connection.alias, new_model):
old_model = from_state.apps.get_model(app_label, self.name)
schema_editor.alter_unique_together(
new_model,
getattr(old_model._meta, self.option_name, set()),
getattr(new_model._meta, self.option_name, set()),
)
def database_backwards(self, app_label, schema_editor, from_state, to_state):
return self.database_forwards(app_label, schema_editor, from_state, to_state)
def references_model(self, name, app_label=None):
return name.lower() == self.name_lower
def references_field(self, model_name, name, app_label=None):
return (
self.references_model(model_name, app_label) and
(
not self.unique_together or
any((name in together) for together in self.unique_together)
)
)
def describe(self):
return "Alter %s for %s (%s constraint(s))" % (self.option_name, self.name, len(self.unique_together or ''))
class AlterIndexTogether(Operation):
"""
Changes the value of index_together to the target one.
Input value of index_together must be a set of tuples.
"""
option_name = "index_together"
def __init__(self, name, index_together):
self.name = name
index_together = normalize_together(index_together)
self.index_together = set(tuple(cons) for cons in index_together)
@cached_property
def name_lower(self):
return self.name.lower()
def deconstruct(self):
kwargs = {
'name': self.name,
'index_together': self.index_together,
}
return (
self.__class__.__name__,
[],
kwargs
)
def state_forwards(self, app_label, state):
model_state = state.models[app_label, self.name_lower]
model_state.options[self.option_name] = self.index_together
state.reload_model(app_label, self.name_lower)
def database_forwards(self, app_label, schema_editor, from_state, to_state):
new_model = to_state.apps.get_model(app_label, self.name)
if self.allow_migrate_model(schema_editor.connection.alias, new_model):
old_model = from_state.apps.get_model(app_label, self.name)
schema_editor.alter_index_together(
new_model,
getattr(old_model._meta, self.option_name, set()),
getattr(new_model._meta, self.option_name, set()),
)
def database_backwards(self, app_label, schema_editor, from_state, to_state):
return self.database_forwards(app_label, schema_editor, from_state, to_state)
def references_model(self, name, app_label=None):
return name.lower() == self.name_lower
def references_field(self, model_name, name, app_label=None):
return (
self.references_model(model_name, app_label) and
(
not self.index_together or
any((name in together) for together in self.index_together)
)
)
def describe(self):
return "Alter %s for %s (%s constraint(s))" % (self.option_name, self.name, len(self.index_together or ''))
class AlterOrderWithRespectTo(Operation):
"""
Represents a change with the order_with_respect_to option.
"""
def __init__(self, name, order_with_respect_to):
self.name = name
self.order_with_respect_to = order_with_respect_to
@cached_property
def name_lower(self):
return self.name.lower()
def deconstruct(self):
kwargs = {
'name': self.name,
'order_with_respect_to': self.order_with_respect_to,
}
return (
self.__class__.__name__,
[],
kwargs
)
def state_forwards(self, app_label, state):
model_state = state.models[app_label, self.name_lower]
model_state.options['order_with_respect_to'] = self.order_with_respect_to
state.reload_model(app_label, self.name_lower)
def database_forwards(self, app_label, schema_editor, from_state, to_state):
to_model = to_state.apps.get_model(app_label, self.name)
if self.allow_migrate_model(schema_editor.connection.alias, to_model):
from_model = from_state.apps.get_model(app_label, self.name)
# Remove a field if we need to
if from_model._meta.order_with_respect_to and not to_model._meta.order_with_respect_to:
schema_editor.remove_field(from_model, from_model._meta.get_field("_order"))
# Add a field if we need to (altering the column is untouched as
# it's likely a rename)
elif to_model._meta.order_with_respect_to and not from_model._meta.order_with_respect_to:
field = to_model._meta.get_field("_order")
if not field.has_default():
field.default = 0
schema_editor.add_field(
from_model,
field,
)
def database_backwards(self, app_label, schema_editor, from_state, to_state):
self.database_forwards(app_label, schema_editor, from_state, to_state)
def references_model(self, name, app_label=None):
return name.lower() == self.name_lower
def references_field(self, model_name, name, app_label=None):
return (
self.references_model(model_name, app_label) and
(
self.order_with_respect_to is None or
name == self.order_with_respect_to
)
)
def describe(self):
return "Set order_with_respect_to on %s to %s" % (self.name, self.order_with_respect_to)
class AlterModelOptions(Operation):
"""
Sets new model options that don't directly affect the database schema
(like verbose_name, permissions, ordering). Python code in migrations
may still need them.
"""
# Model options we want to compare and preserve in an AlterModelOptions op
ALTER_OPTION_KEYS = [
"get_latest_by",
"managed",
"ordering",
"permissions",
"default_permissions",
"select_on_save",
"verbose_name",
"verbose_name_plural",
]
def __init__(self, name, options):
self.name = name
self.options = options
@cached_property
def name_lower(self):
return self.name.lower()
def deconstruct(self):
kwargs = {
'name': self.name,
'options': self.options,
}
return (
self.__class__.__name__,
[],
kwargs
)
def state_forwards(self, app_label, state):
model_state = state.models[app_label, self.name_lower]
model_state.options = dict(model_state.options)
model_state.options.update(self.options)
for key in self.ALTER_OPTION_KEYS:
if key not in self.options and key in model_state.options:
del model_state.options[key]
state.reload_model(app_label, self.name_lower)
def database_forwards(self, app_label, schema_editor, from_state, to_state):
pass
def database_backwards(self, app_label, schema_editor, from_state, to_state):
pass
def references_model(self, name, app_label=None):
return name.lower() == self.name_lower
def describe(self):
return "Change Meta options on %s" % (self.name, )
class AlterModelManagers(Operation):
"""
Alters the model's managers
"""
serialization_expand_args = ['managers']
def __init__(self, name, managers):
self.name = name
self.managers = managers
@cached_property
def name_lower(self):
return self.name.lower()
def deconstruct(self):
return (
self.__class__.__name__,
[self.name, self.managers],
{}
)
def state_forwards(self, app_label, state):
model_state = state.models[app_label, self.name_lower]
model_state.managers = list(self.managers)
def database_forwards(self, app_label, schema_editor, from_state, to_state):
pass
def database_backwards(self, app_label, schema_editor, from_state, to_state):
pass
def references_model(self, name, app_label=None):
return name.lower() == self.name_lower
def describe(self):
return "Change managers on %s" % (self.name, )
| bsd-3-clause |
SINGROUP/pycp2k | pycp2k/classes/_each423.py | 1 | 1114 | from pycp2k.inputsection import InputSection
class _each423(InputSection):
def __init__(self):
InputSection.__init__(self)
self.Just_energy = None
self.Powell_opt = None
self.Qs_scf = None
self.Xas_scf = None
self.Md = None
self.Pint = None
self.Metadynamics = None
self.Geo_opt = None
self.Rot_opt = None
self.Cell_opt = None
self.Band = None
self.Ep_lin_solver = None
self.Spline_find_coeffs = None
self.Replica_eval = None
self.Bsse = None
self.Shell_opt = None
self.Tddft_scf = None
self._name = "EACH"
self._keywords = {'Bsse': 'BSSE', 'Cell_opt': 'CELL_OPT', 'Just_energy': 'JUST_ENERGY', 'Band': 'BAND', 'Xas_scf': 'XAS_SCF', 'Rot_opt': 'ROT_OPT', 'Replica_eval': 'REPLICA_EVAL', 'Tddft_scf': 'TDDFT_SCF', 'Shell_opt': 'SHELL_OPT', 'Md': 'MD', 'Pint': 'PINT', 'Metadynamics': 'METADYNAMICS', 'Geo_opt': 'GEO_OPT', 'Spline_find_coeffs': 'SPLINE_FIND_COEFFS', 'Powell_opt': 'POWELL_OPT', 'Qs_scf': 'QS_SCF', 'Ep_lin_solver': 'EP_LIN_SOLVER'}
| lgpl-3.0 |
asttra/pysces | pysces/PyscesSBML.py | 1 | 29664 | """
PySCeS - Python Simulator for Cellular Systems (http://pysces.sourceforge.net)
Copyright (C) 2004-2015 B.G. Olivier, J.M. Rohwer, J.-H.S Hofmeyr all rights reserved,
Brett G. Olivier ([email protected])
Triple-J Group for Molecular Cell Physiology
Stellenbosch University, South Africa.
Permission to use, modify, and distribute this software is given under the
terms of the PySceS (BSD style) license. See LICENSE.txt that came with
this distribution for specifics.
NO WARRANTY IS EXPRESSED OR IMPLIED. USE AT YOUR OWN RISK.
Brett G. Olivier
"""
from pysces.version import __version__
__doc__ = '''SBML reading/writing module - now replaced by PySCeS Core2'''
import os,sys
from time import sleep, strftime
from getpass import getuser
if sys.platform == 'win32':
try:
import pysces.libsbml.libsbmlinit as SBML
except Exception, e:
print 'Windows sbml load error',e
else:
try:
import libsbml as SBML
except Exception, e:
print 'Posix sbml load error',e
class PyscesSBML:
"""The PySCeS interface to libSBML and SBML utilities"""
mode_number_format = '%2.4e'
sbml_level = 2
_debug = 0
SBML = SBML
def SBML_buildBasicModel(self,mod,filename,slvl=2,dir=None,substance=(1,0),volume=(1,0),time=(1,0),arules=None,notes=None):
"""
SBML_buildBasicModel(mod,filename,slvl=2,dir=None)
Create a basic SBML model.
Arguments:
=========
mod: an active PySCeS model object
filename: the output SBML file name
slvl [default=2]: the SBML level that should be used
dir [default=None]: the output directory
substance [default=(1,0)]: the model substance unit - SBML default is "mole"
volume [default=(1,0)]: the model volume unit - SBML default is "litre"
time [default=(1,0)]: the model time unit - SBML default is "second"
"""
self.SBML_createModel(mod,filename,slvl,dir)
self.SBML_setCompartment()
self.SBML_setNotes(txt=notes)
self.SBML_setUnits(substance=substance,volume=volume,time=time)
if arules != None: self.SBML_setAssignmentRules(arules)
self.SBML_setSpecies()
self.SBML_setReactions()
self.SBML_setModel()
def write(self,mod,filename,slvl=2,dir=None,substance=(1,0),volume=(1,0),time=(1,0),arules=None,notes=None):
"""
write(mod,filename,slvl=2,dir=None)
Write a PySCeS model as an SBML file.
Arguments:
=========
mod: an active PySCeS model object
filename: the output SBML file name
slvl [default=2]: the SBML level that should be used
dir [default=None]: the output directory
substance [default=(1,0)]: the model substance unit - SBML default is "mole"
volume [default=(1,0)]: the model volume unit - SBML default is "litre"
time [default=(1,0)]: the model time unit - SBML default is "second"
"""
self.SBML_buildBasicModel(mod,filename,slvl,dir,substance,volume,time,arules,notes)
self.SBML_writeFile()
def getSBML_document(self,mod,substance=(1,0),volume=(1,0),time=(1,0),arules=None,notes=None):
"""
Returns an SBML document object
Arguments:
=========
mod: an active PySCeS model object
substance [default=(1,0)]: the model substance unit - SBML default is "mole"
volume [default=(1,0)]: the model volume unit - SBML default is "litre"
time [default=(1,0)]: the model time unit - SBML default is "second"
"""
filename = 'tempXML'
slvl = 2
dir = None
self.SBML_buildBasicModel(mod,filename,slvl,dir,substance,volume,time,arules,notes)
return self.sbml_document
def getSBML_string(self,mod,substance=(1,0),volume=(1,0),time=(1,0),arules=None,notes=None):
"""
Returns an SBML file as a string
Arguments:
=========
mod: an active PySCeS model object
substance [default=(1,0)]: the model substance unit - SBML default is "mole"
volume [default=(1,0)]: the model volume unit - SBML default is "litre"
time [default=(1,0)]: the model time unit - SBML default is "second"
"""
filename = 'tempXML'
slvl = 2
dir = None
self.SBML_buildBasicModel(mod,filename,slvl,dir,substance,volume,time,arules,notes)
return self.sbml_document.toSBML()
def __cleanString__(self,s):
s = s.lstrip()
s = s.rstrip()
return s
def parseForcingFunctions(self):
self.__forcing_function_dic__ = {}
ff = self.model_obj._Function_forced.split('\n')
for f in ff:
if f != '':
f = f.split('=')
f[0] = f[0].replace('self.','')
f[1] = f[1].replace('self.','')
self.__forcing_function_dic__.setdefault(self.__cleanString__(f[0]), self.__cleanString__(f[1]))
def getAssignmentRules(self):
self.parseForcingFunctions()
out = []
for key in self.__forcing_function_dic__.keys():
out.append((key, self.__forcing_function_dic__[key]))
return out
def SBML_createModel(self,mod,filename,slvl=2,dir=None):
"""
SBML_createModel(mod,filename,slvl=2,dir=None)
Set up an SBML document and extract the model NetworkDict
Arguments:
=========
mod: a PySCeS model object
filename: the output filename
slvl [default=2]: SBML level required
dir [default=None]: output directory
"""
if self._debug: print 'SBML_createModel'
self.model_obj = mod
self.__nDict__= self.model_obj.__nDict__
self.model_filename = filename
if dir == None:
self.model_dir = self.model_obj.ModelOutput
else:
self.model_dir = dir
self.sbml_level = slvl
self.sbml_model = self.SBML.Model()
self.sbml_model.setName(self.model_obj.ModelFile[:-4])
self.sbml_document = self.SBML.SBMLDocument()
# new stuff
self.global_parameters = []
self.model_compartment_name = None
# create initdict
self.__InitStrings__ = [s.replace('self.','') for s in self.model_obj._PysMod__InitStrings]
self.__InitDict__ = {}
for ii in self.__InitStrings__:
l,r = ii.split('=')
self.__InitDict__.setdefault(self.__cleanString__(l), float(self.__cleanString__(r)))
# create forcing function dic
try:
self.parseForcingFunctions()
except:
print "No pre-defined forcing functions"
if self.sbml_level == 1:
if sys.platform == 'win32':
print 'Due to a bug in self.SBML for Windows writing a lvl 1 file will crash your session writing lvl 2 instead ... sorry'
self.sbml_document.setLevel(2)
else:
self.sbml_document.setLevel(self.sbml_level)
else:
self.sbml_document.setLevel(2)
def SBML_setCompartment(self,name=None,vol=1):
"""
SBML_setCompartment(name=None,vol=1)
Initialise SBML compartments (note PySCeS currently utilises a single compartment)
Arguments:
=========
name [default=None]: the compartment name, default is compartment1
vol [default=1]: the compartment volume
"""
if self._debug: print 'SBML_setCompartment'
comp_def = self.sbml_model.createCompartment()
if not name:
self.model_compartment_name = 'Cell'
else:
self.model_compartment_name = name
for char in [' ','.','-','*','?','!','\t','\n']:
self.model_compartment_name = self.model_compartment_name.replace(char,'_')
self.model_compartment_name = name
comp_def.setId(self.model_compartment_name)
comp_def.setVolume(vol)
def SBML_setNotes(self,txt=None):
notes = '<body xmlns="http://www.w3.org/1999/xhtml">'
if txt != None:
notes += '<span style="font-family: Courier New,Courier,monospace;">'
notes += txt
notes += '</span>'
notes += '</body>'
self.sbml_model.setNotes(notes)
def SBML_setUnits(self, **kwargs):
"""
SBML_setUnits(substance=(1,0), volume=(1,0), time=(1,0))
Set the SBML default units note that the input here is the factor and index multiplying
the SBML default, so for example the default substance (1,0) is (1*10**0)*mole So e.g.
if you were specifing default units of millimoles and seconds you would
set substance=(1,-3) and time=(60,0) i.e. (1*10**-3)*mole and (60*10**0)*seconds
Arguments:
=========
substance [default=(1,0)]: the model substance unit - SBML default is "mole"
volume [default=(1,0)]: the model volume unit - SBML default is "litre"
time [default=(1,0)]: the model time unit - SBML default is "second"
"""
for un in kwargs.keys():
vdef = self.sbml_model.createUnitDefinition()
vdef.setId(un)
vu = self.sbml_model.createUnit()
if un == 'substance': vu.setKind(self.SBML.UnitKind_forName('mole'))
elif un == 'volume': vu.setKind(self.SBML.UnitKind_forName('litre'))
elif un == 'time': vu.setKind(self.SBML.UnitKind_forName('second'))
vu.setMultiplier(kwargs[un][0])
vu.setScale(kwargs[un][1])
vu.setOffset(0)
def SBML_setSpecies(self):
"""
SBML_setSpecies()
Initialise and add species information to the SBML model
Arguments:
None
"""
if self._debug: print 'SBML_setSpecies'
reagList = self.model_obj.__species__ + self.model_obj.__fixed_species__
for reagent in range(len(reagList)):
s = self.sbml_model.createSpecies()
s.setId(reagList[reagent])
s.setName(reagList[reagent])
s.setCompartment(self.model_compartment_name)
if reagList[reagent] in self.model_obj.__fixed_species__:
s.setBoundaryCondition(True)
s.setConstant(True)
else:
s.setBoundaryCondition(False)
if reagent < len(self.model_obj.__species__ ):
reagName = reagList[reagent] + '_init'
else:
reagName = reagList[reagent]
if self.sbml_level == 1:
s.setInitialAmount(getattr(self.model_obj,reagName))
else:
s.setInitialConcentration(getattr(self.model_obj,reagName))
def SBML_setAssignmentRules(self, rules=[]):
for rule in rules:
print rule
self.global_parameters.append(rule[0])
p = self.sbml_model.createParameter()
p.setId(rule[0])
p.setValue(getattr(self.model_obj,rule[0]))
p.setConstant(False)
r = self.sbml_model.createAssignmentRule()
r.setVariable(rule[0])
r.setFormula(rule[1])
r.setMathFromFormula()
def SBML_setReactions(self):
"""
SBML_setReactions()
Add kinetic rate laws to the SBMl model
Arguments:
None
"""
if self._debug: print 'SBML_setReactions'
#TotSpecies = list(self.model_obj._PysMod__FixedReagents)+list(self.model_obj._PysMod__VarReagents)
reaction_params = []
for rxn in self.model_obj._PysMod__ReactionIDs:
print 'Adding reaction:', rxn
i = self.sbml_model.createReaction()
i.setId(rxn)
ndr = self.model_network_dict[rxn]
for reagent in ndr['Reagents']:
stoich = ndr['Reagents'][reagent]
species = self.SBML.SpeciesReference(reagent.replace('self.',''),abs(stoich))
if stoich < 0:
i.addReactant(species)
elif stoich > 0:
i.addProduct(species)
elif stoich == 0:
i.addModifier(species)
# add a volume to convert rate equation to kinetic law
kineticLaw = ndr['RateEq'].replace('self.','')
kineticLaw = kineticLaw.replace('scipy.','')
if self.model_compartment_name not in self.model_obj.parameters:
kineticLaw = self.model_compartment_name + ' * ('+ kineticLaw +')'
else:
kineticLaw = kineticLaw
kineticLaw = self.SBML.KineticLaw(kineticLaw)
# local parameters retired in favour of globals
## for parameter in ndr['Params']:
## p = parameter.replace('self.','')
## if p not in self.model_obj.__fixed_species__ and p not in self.global_parameters:
## try:
## kineticLaw.addParameter(self.SBML.Parameter(p, getattr(self.model_obj,p)))
## reaction_params.append(p)
## except AttributeError,err :
## print '\n', err
## print "Parameter set error ... are there forcing functions??"
## sleep(0.5)
i.setKineticLaw(kineticLaw)
if ndr['Type'] == 'Rever':
rev = True
else:
rev = False
i.setReversible(rev)
# Add modifiers to reaction - brett 20050607
for reac in self.model_obj.__modifiers__:
if reac[0] == rxn:
for x in reac[1]:
print ' ' + reac[0] +' has modifier: ' + x
self.sbml_model.createModifier().setSpecies(x)
# add extra parameter initialised but not in reactions
# we have to do this in case the assignment rules are added after we build the model
hack = self.__forcing_function_dic__.keys()
not_xparams = self.global_parameters + reaction_params+\
list(self.model_obj.species)+\
list(self.model_obj.fixed_species) + [self.model_compartment_name] +hack
for k in self.__InitDict__.keys():
if k not in not_xparams:
print 'Adding parameter:', k
self.global_parameters.append(k)
p = self.sbml_model.createParameter()
p.setId(k)
p.setValue(getattr(self.model_obj, k))
def SBML_setModel(self):
"""
SBML_setModel()
Add the SBML model to the predefined SBML document
Arguments:
None
"""
if self._debug: print 'SBML_setModel'
self.sbml_document.setModel(self.sbml_model)
def SBML_writeFile(self):
"""
SBML_writeFile()
Write the SBML document to predefined output file
Arguments:
None
"""
self.SBML.writeSBML(self.sbml_document,'pysces_sbml_tmp.xml')
Fin = open('pysces_sbml_tmp.xml','r')
Fout = open(os.path.join(self.model_dir,self.model_filename+'.xml'),'w')
cntr = 0
try:
UseR = getuser()
except:
UseR = ''
for line in Fin:
if cntr == 1:
Fout.write('<!-- Created with PySCeS ('+ __version__ + ') on ' + strftime("%a, %d %b %Y %H:%M:%S") + ' by '+UseR+' -->\n'+line)
else:
Fout.write(line)
cntr += 1
Fout.close()
Fin.close()
os.remove('pysces_sbml_tmp.xml')
def convert2psc(self,filename,dir=None,dirOut=None):
"""
convert2psc(filename,dir=None,dirOut=None)
Convert an SBML file into a PySCeS input file
Arguments:
=========
filename: the SBML source file
dir [default=None]: specify the SBMl file directory
dirOut [default=None]: the PSC file output directory
"""
if dir == None:
dir = os.getcwd()
File = os.path.join(dir,filename)
assert os.path.exists(File), "Invalid path"
self.model_filename = filename
r = self.SBML.SBMLReader()
d = r.readSBML(File)
m = d.getModel()
def getName(i):
if d.getLevel() == 1:
return i.getName()
else:
return i.getId()
reactions = m.getListOfReactions()
ReactionIDs = []
for i in reactions:
ReactionIDs.append(getName(i))
init_fixed = []
init_var = []
init_par = []
parameters = []
for i in m.getListOfSpecies():
parName = getName(i)
# if a species is a BoundaryCondition or constant it becomes fixed - brett 20050111
if i.getBoundaryCondition() or i.getConstant():
if i.getConstant() and not i.getBoundaryCondition():
print parName, ' is set as constant, assuming: BoundaryCondition = True'
init_fixed.append((parName,i.getInitialConcentration()))
else:
init_var.append((parName,i.getInitialConcentration()))
NetworkDict = dict([(i,dict.fromkeys(['Params',
'RateEq',
'Reagents',
'Type'])) for i in ReactionIDs])
for i in reactions:
rDict = NetworkDict[getName(i)]
j = i.getKineticLaw()
par = []
try:
for k in j.getListOfParameters():
par.append(getName(k))
init_par.append((getName(k),k.getValue()))
parameters.append(getName(k))
rDict['Params'] = par
rDict['RateEq'] = j.getFormula()
if d.getLevel() == 1:
rDict['RateEq'] = rDict['RateEq'].replace(' ', '' )
rDict['RateEq'] = rDict['RateEq'].replace('^', '**')
except Exception, err:
rDict['Params'] = []
rDict['RateEq'] = ''
print err
Substrates = []
Products = []
for k in i.getListOfReactants():
species = k.getSpecies()
stoich = -k.getStoichiometry()
Substrates.append((species,stoich))
for k in i.getListOfProducts():
species = k.getSpecies()
stoich = k.getStoichiometry()
Products.append((species,stoich))
# this is to eliminate zero stoichiometries {0}xyz
badList = []
for sub in Substrates:
if sub[1] == 0:
badList.append(sub)
for bad in badList:
Substrates.pop(Substrates.index(bad))
badList = []
for prod in Products:
if prod[1] == 0:
badList.append(prod)
for bad in badList:
Products.pop(Products.index(bad))
# add source/sink pools to nasty substrate/productless reactions - brett 20050908
if len(Substrates) == 0:
Substrates.append(('$pool',-1.0))
if len(Products) == 0:
Products.append(('$pool',1.0))
# print Substrates
# print Products
rDict['Reagents'] = dict(Substrates+Products)
if i.getReversible() == True:
t = 'Rever'
else:
t = 'Irrev'
rDict['Type'] = t
NetworkDict[getName(i)].update(rDict)
# Add extra model parameters not defined in reactions (apparently)
if len(m.getListOfParameters()) > 0:
for x in m.getListOfParameters():
if getName(x) not in parameters:
#print getName(x)
init_par.append((getName(x),x.getValue()))
if dirOut == None:
self.model_filename = os.path.join(os.getcwd(),self.model_filename)
else:
self.model_filename = os.path.join(dirOut,self.model_filename)
#print 'init_par'
#print init_par
#print 'init_var'
#print init_var
#print 'init_fixed'
#print init_fixed
# sometimes things just work lekker (replaced all the old showS^&t) - brett 20050913
outFile = file(self.model_filename+'.psc','w')
self.PSC_writeHeader(outFile)
self.PSC_writeFixedSpeciesList(outFile,init_fixed)
self.PSC_writeRateEquations(outFile,NetworkDict,number_format='%2.3f')
self.PSC_writeSpecies(outFile,init_var)
self.PSC_writeFixedSpecies(outFile,init_fixed)
self.PSC_writeParameters(outFile,init_par)
outFile.close()
# Initialise compartment volumes as a parameter - brett 20050908
compartmentList = []
for comp in m.getListOfCompartments():
#print comp
compartmentList.append((getName(comp),comp.getVolume()))
if len(compartmentList) > 1:
print '\nINFO: PySCeS models are assumed to have a single compartment'
if len(compartmentList) > 0:
F = open(self.model_filename+'.psc','a')
F.write('\n## Initialise compartment volumes')
for comp in compartmentList:
F.write('\n' + comp[0] + ' = ' + str(comp[1]))
## parameters.append(x[0])
## init_par.append(x)
F.write('\n')
F.close()
# Add assignment rules as forcing functions - brett 20050908
pscRules = []
for rule in m.getListOfRules():
pscRules.append((rule.getVariable(),rule.getFormula()))
if len(pscRules) > 0:
F = open(self.model_filename+'.psc','a')
F.write('\n## Assignment rules translated to forcing functions\n')
for rule in pscRules:
rule0 = 'self.' + rule[0]
rule1l = rule[1].split()
for word in range(len(rule1l)):
if rule1l[word].isalnum():
if rule1l[word] not in ['1','2','3','4','5','6','7','8','9']:
rule1l[word] = 'self.'+rule1l[word]
F.write('!F '+ rule0 + ' = ')
for word in rule1l:
F.write(word + ' ')
F.write('\n')
F.write('\n')
F.close()
if len(m.getNotes()) > 0:
F = open(self.model_filename+'.psc','a')
F.write('\n## Model notes' + m.getNotes().replace('\n','\n# ')+'\n\n')
F.close()
def PSC_writeHeader(self,File):
"""
PSC_writeHeader(File)
Write a PSC file header to an open file object
Arguments:
=========
File: a writable open text file object
"""
try:
UseR = getuser()
except:
UseR = ''
header = ''
#header += '############################################################\n'
header += '# PySCeS (' + __version__ + ') model input file\n'
header += '# PySCeS can be found at http://pysces.sourceforge.net/\n'
#header += '###########################################################\n\n'
header += '# Original input file: ' + File.name.split('\\')[-1][:-4] + '\n'
header += '# This file generated: ' + strftime("%a, %d %b %Y %H:%M:%S") + ' by '+UseR+'\n\n'
File.write(header)
File.write('\n')
def PSC_writeSpecies(self,File,species):
"""
PSC_writeSpecies(File,species)
Write out model species initiaiisations to file
Arguments:
=========
File: a writable open file object
species: a list of (species.value) pairs
"""
out_list = []
out_list.append('\n## Variable species initial values\n')
for x in range(len(species)):
out_list.append(species[x][0] + ' = ' + self.mode_number_format % species[x][1] + '\n')
for x in out_list:
File.write(x)
File.write('\n')
def PSC_writeFixedSpeciesList(self,File,fixed_species):
"""
PSC_writeFixedSpeciesList(File,fixed_species)
Write fixed species declaration to a PSC file
Arguments:
=========
File: open, writable file object
fixed_species: a list of (species,value) pairs
"""
File.write('## Fixed species\n')
if len(fixed_species) == 0:
File.write('# <none>')
else:
File.write('FIX: ')
for x in fixed_species:
File.write(x[0] + ' ')
File.write('\n\n')
def PSC_writeFixedSpecies(self,File,fixed_species):
"""
PSC_writeFixedSpecies(File,fixed_species)
Write fixed species initialisations to a PSC file
Arguments:
=========
File: open, writable file object
fixed_species: a list of (species,value) pairs
"""
out_list = []
out_list.append('\n## Fixed species\n')
for x in range(len(fixed_species)):
out_list.append(fixed_species[x][0] + ' = ' + self.mode_number_format % fixed_species[x][1] + '\n')
for x in out_list:
File.write(x)
File.write('\n')
def PSC_writeParameters(self,File,parameters):
"""
PSC_writeParameters(File,parameters)
Write mode parameter initialisations to a PSC file
Arguments:
=========
File: open, writable file object
parameters: a list of (parameter,value) pairs
"""
out_list = []
out_list.append('\n## Parameters\n')
for x in range(len(parameters)):
out_list.append(parameters[x][0] + ' = ' + self.mode_number_format % parameters[x][1] + '\n')
for x in out_list:
File.write(x)
File.write('\n')
def PSC_writeRateEquations(self,File,NetworkDict,number_format='%2.3f'):
"""
PSC_writeRateEquations(File,NetworkDict,number_format='%2.3f')
Write model rate equations to a PSC file
Arguments:
=========
File: open, writable file object
NetworkDict: a PySCeS network dictionary
number_format [default='%2.3f']: number formatting to use in rate laws
"""
out_list = []
out_list.append('\n## Reaction stoichiometry and rate equations\n')
for key in NetworkDict:
out_list.append(key + ':\n')
reagL = []
reagR = []
for reagent in NetworkDict[key]['Reagents']:
if NetworkDict[key]['Reagents'][reagent] > 0:
if NetworkDict[key]['Reagents'][reagent] == 1.0:
reagR.append(reagent.replace('self.',''))
else:
reagR.append('{' + number_format % abs(NetworkDict[key]['Reagents'][reagent]) + '}' + reagent.replace('self.',''))
elif NetworkDict[key]['Reagents'][reagent] < 0:
if NetworkDict[key]['Reagents'][reagent] == -1.0:
reagL.append(reagent.replace('self.',''))
else:
reagL.append('{' + number_format % abs(NetworkDict[key]['Reagents'][reagent]) + '}' + reagent.replace('self.',''))
elif NetworkDict[key]['Reagents'][reagent] == 0:
#reagL.append(reagent.replace('self.',''))
print NetworkDict[key]['Reagents']
raw_input('WTF: please contact developers')
if len(reagL) == 0:
print 'Zero pool substrate', File.name
reagL.append('$pool')
if len(reagR) == 0:
print 'Zero pool product', File.name
reagR.append('$pool')
substring = ''
count = 0
for x in reagL:
if count != 0:
substring += ' + '
substring += x.replace(' ','')
count += 1
prodstring = ''
count = 0
for x in reagR:
if count != 0:
prodstring += ' + '
prodstring += x.replace(' ','')
count += 1
if NetworkDict[key]['Type'] == 'Rever':
symbol = ' = '
else:
symbol = ' > '
out_list.append('\t' + substring + symbol + prodstring + '\n')
out_list.append('\t' + NetworkDict[key]['RateEq'].replace('self.','') + '\n\n')
for x in out_list:
File.write(x)
| bsd-3-clause |
rvalyi/geraldo | site/newsite/django_1_0/django/utils/html.py | 26 | 7416 | """HTML utilities suitable for global use."""
import re
import string
from django.utils.safestring import SafeData, mark_safe
from django.utils.encoding import force_unicode
from django.utils.functional import allow_lazy
from django.utils.http import urlquote
# Configuration for urlize() function.
LEADING_PUNCTUATION = ['(', '<', '<']
TRAILING_PUNCTUATION = ['.', ',', ')', '>', '\n', '>']
# List of possible strings used for bullets in bulleted lists.
DOTS = ['·', '*', '\xe2\x80\xa2', '•', '•', '•']
unencoded_ampersands_re = re.compile(r'&(?!(\w+|#\d+);)')
word_split_re = re.compile(r'(\s+)')
punctuation_re = re.compile('^(?P<lead>(?:%s)*)(?P<middle>.*?)(?P<trail>(?:%s)*)$' % \
('|'.join([re.escape(x) for x in LEADING_PUNCTUATION]),
'|'.join([re.escape(x) for x in TRAILING_PUNCTUATION])))
simple_email_re = re.compile(r'^\S+@[a-zA-Z0-9._-]+\.[a-zA-Z0-9._-]+$')
link_target_attribute_re = re.compile(r'(<a [^>]*?)target=[^\s>]+')
html_gunk_re = re.compile(r'(?:<br clear="all">|<i><\/i>|<b><\/b>|<em><\/em>|<strong><\/strong>|<\/?smallcaps>|<\/?uppercase>)', re.IGNORECASE)
hard_coded_bullets_re = re.compile(r'((?:<p>(?:%s).*?[a-zA-Z].*?</p>\s*)+)' % '|'.join([re.escape(x) for x in DOTS]), re.DOTALL)
trailing_empty_content_re = re.compile(r'(?:<p>(?: |\s|<br \/>)*?</p>\s*)+\Z')
del x # Temporary variable
def escape(html):
"""Returns the given HTML with ampersands, quotes and carets encoded."""
return mark_safe(force_unicode(html).replace('&', '&').replace('<', '<').replace('>', '>').replace('"', '"').replace("'", '''))
escape = allow_lazy(escape, unicode)
def conditional_escape(html):
"""
Similar to escape(), except that it doesn't operate on pre-escaped strings.
"""
if isinstance(html, SafeData):
return html
else:
return escape(html)
def linebreaks(value, autoescape=False):
"""Converts newlines into <p> and <br />s."""
value = re.sub(r'\r\n|\r|\n', '\n', force_unicode(value)) # normalize newlines
paras = re.split('\n{2,}', value)
if autoescape:
paras = [u'<p>%s</p>' % escape(p.strip()).replace('\n', '<br />') for p in paras]
else:
paras = [u'<p>%s</p>' % p.strip().replace('\n', '<br />') for p in paras]
return u'\n\n'.join(paras)
linebreaks = allow_lazy(linebreaks, unicode)
def strip_tags(value):
"""Returns the given HTML with all tags stripped."""
return re.sub(r'<[^>]*?>', '', force_unicode(value))
strip_tags = allow_lazy(strip_tags)
def strip_spaces_between_tags(value):
"""Returns the given HTML with spaces between tags removed."""
return re.sub(r'>\s+<', '><', force_unicode(value))
strip_spaces_between_tags = allow_lazy(strip_spaces_between_tags, unicode)
def strip_entities(value):
"""Returns the given HTML with all entities (&something;) stripped."""
return re.sub(r'&(?:\w+|#\d+);', '', force_unicode(value))
strip_entities = allow_lazy(strip_entities, unicode)
def fix_ampersands(value):
"""Returns the given HTML with all unencoded ampersands encoded correctly."""
return unencoded_ampersands_re.sub('&', force_unicode(value))
fix_ampersands = allow_lazy(fix_ampersands, unicode)
def urlize(text, trim_url_limit=None, nofollow=False, autoescape=False):
"""
Converts any URLs in text into clickable links.
Works on http://, https://, www. links and links ending in .org, .net or
.com. Links can have trailing punctuation (periods, commas, close-parens)
and leading punctuation (opening parens) and it'll still do the right
thing.
If trim_url_limit is not None, the URLs in link text longer than this limit
will truncated to trim_url_limit-3 characters and appended with an elipsis.
If nofollow is True, the URLs in link text will get a rel="nofollow"
attribute.
If autoescape is True, the link text and URLs will get autoescaped.
"""
trim_url = lambda x, limit=trim_url_limit: limit is not None and (len(x) > limit and ('%s...' % x[:max(0, limit - 3)])) or x
safe_input = isinstance(text, SafeData)
words = word_split_re.split(force_unicode(text))
nofollow_attr = nofollow and ' rel="nofollow"' or ''
for i, word in enumerate(words):
match = None
if '.' in word or '@' in word or ':' in word:
match = punctuation_re.match(word)
if match:
lead, middle, trail = match.groups()
# Make URL we want to point to.
url = None
if middle.startswith('http://') or middle.startswith('https://'):
url = urlquote(middle, safe='/&=:;#?+*')
elif middle.startswith('www.') or ('@' not in middle and \
middle and middle[0] in string.ascii_letters + string.digits and \
(middle.endswith('.org') or middle.endswith('.net') or middle.endswith('.com'))):
url = urlquote('http://%s' % middle, safe='/&=:;#?+*')
elif '@' in middle and not ':' in middle and simple_email_re.match(middle):
url = 'mailto:%s' % middle
nofollow_attr = ''
# Make link.
if url:
trimmed = trim_url(middle)
if autoescape and not safe_input:
lead, trail = escape(lead), escape(trail)
url, trimmed = escape(url), escape(trimmed)
middle = '<a href="%s"%s>%s</a>' % (url, nofollow_attr, trimmed)
words[i] = mark_safe('%s%s%s' % (lead, middle, trail))
else:
if safe_input:
words[i] = mark_safe(word)
elif autoescape:
words[i] = escape(word)
elif safe_input:
words[i] = mark_safe(word)
elif autoescape:
words[i] = escape(word)
return u''.join(words)
urlize = allow_lazy(urlize, unicode)
def clean_html(text):
"""
Clean the given HTML. Specifically, do the following:
* Convert <b> and <i> to <strong> and <em>.
* Encode all ampersands correctly.
* Remove all "target" attributes from <a> tags.
* Remove extraneous HTML, such as presentational tags that open and
immediately close and <br clear="all">.
* Convert hard-coded bullets into HTML unordered lists.
* Remove stuff like "<p> </p>", but only if it's at the
bottom of the text.
"""
from django.utils.text import normalize_newlines
text = normalize_newlines(force_unicode(text))
text = re.sub(r'<(/?)\s*b\s*>', '<\\1strong>', text)
text = re.sub(r'<(/?)\s*i\s*>', '<\\1em>', text)
text = fix_ampersands(text)
# Remove all target="" attributes from <a> tags.
text = link_target_attribute_re.sub('\\1', text)
# Trim stupid HTML such as <br clear="all">.
text = html_gunk_re.sub('', text)
# Convert hard-coded bullets into HTML unordered lists.
def replace_p_tags(match):
s = match.group().replace('</p>', '</li>')
for d in DOTS:
s = s.replace('<p>%s' % d, '<li>')
return u'<ul>\n%s\n</ul>' % s
text = hard_coded_bullets_re.sub(replace_p_tags, text)
# Remove stuff like "<p> </p>", but only if it's at the bottom
# of the text.
text = trailing_empty_content_re.sub('', text)
return text
clean_html = allow_lazy(clean_html, unicode)
| lgpl-3.0 |
fumen/gae-fumen | lib/tests/contrib/django_util/test_django_storage.py | 21 | 5763 | # Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for the DjangoORM storage class."""
# Mock a Django environment
import datetime
import unittest
from django.db import models
import mock
from oauth2client import GOOGLE_TOKEN_URI
from oauth2client.client import OAuth2Credentials
from oauth2client.contrib.django_util.models import CredentialsField
from oauth2client.contrib.django_util.storage import (
DjangoORMStorage as Storage)
class TestStorage(unittest.TestCase):
def setUp(self):
access_token = 'foo'
client_id = 'some_client_id'
client_secret = 'cOuDdkfjxxnv+'
refresh_token = '1/0/a.df219fjls0'
token_expiry = datetime.datetime.utcnow()
user_agent = 'refresh_checker/1.0'
self.credentials = OAuth2Credentials(
access_token, client_id, client_secret,
refresh_token, token_expiry, GOOGLE_TOKEN_URI,
user_agent)
self.key_name = 'id'
self.key_value = '1'
self.property_name = 'credentials'
def test_constructor(self):
storage = Storage(FakeCredentialsModel, self.key_name,
self.key_value, self.property_name)
self.assertEqual(storage.model_class, FakeCredentialsModel)
self.assertEqual(storage.key_name, self.key_name)
self.assertEqual(storage.key_value, self.key_value)
self.assertEqual(storage.property_name, self.property_name)
@mock.patch('django.db.models')
def test_locked_get(self, djangoModel):
fake_model_with_credentials = FakeCredentialsModelMock()
entities = [
fake_model_with_credentials
]
filter_mock = mock.Mock(return_value=entities)
object_mock = mock.Mock()
object_mock.filter = filter_mock
FakeCredentialsModelMock.objects = object_mock
storage = Storage(FakeCredentialsModelMock, self.key_name,
self.key_value, self.property_name)
credential = storage.locked_get()
self.assertEqual(
credential, fake_model_with_credentials.credentials)
@mock.patch('django.db.models')
def test_locked_get_no_entities(self, djangoModel):
entities = []
filter_mock = mock.Mock(return_value=entities)
object_mock = mock.Mock()
object_mock.filter = filter_mock
FakeCredentialsModelMock.objects = object_mock
storage = Storage(FakeCredentialsModelMock, self.key_name,
self.key_value, self.property_name)
credential = storage.locked_get()
self.assertIsNone(credential)
@mock.patch('django.db.models')
def test_locked_get_no_set_store(self, djangoModel):
fake_model_with_credentials = FakeCredentialsModelMockNoSet()
entities = [
fake_model_with_credentials
]
filter_mock = mock.Mock(return_value=entities)
object_mock = mock.Mock()
object_mock.filter = filter_mock
FakeCredentialsModelMockNoSet.objects = object_mock
storage = Storage(FakeCredentialsModelMockNoSet, self.key_name,
self.key_value, self.property_name)
credential = storage.locked_get()
self.assertEqual(
credential, fake_model_with_credentials.credentials)
@mock.patch('django.db.models')
def test_locked_put(self, djangoModel):
entity_mock = mock.Mock(credentials=None)
objects = mock.Mock(
get_or_create=mock.Mock(return_value=(entity_mock, None)))
FakeCredentialsModelMock.objects = objects
storage = Storage(FakeCredentialsModelMock, self.key_name,
self.key_value, self.property_name)
storage.locked_put(self.credentials)
@mock.patch('django.db.models')
def test_locked_delete(self, djangoModel):
class FakeEntities(object):
def __init__(self):
self.deleted = False
def delete(self):
self.deleted = True
fake_entities = FakeEntities()
entities = fake_entities
filter_mock = mock.Mock(return_value=entities)
object_mock = mock.Mock()
object_mock.filter = filter_mock
FakeCredentialsModelMock.objects = object_mock
storage = Storage(FakeCredentialsModelMock, self.key_name,
self.key_value, self.property_name)
storage.locked_delete()
self.assertTrue(fake_entities.deleted)
class CredentialWithSetStore(CredentialsField):
def __init__(self):
self.model = CredentialWithSetStore
def set_store(self, storage):
pass
class FakeCredentialsModel(models.Model):
credentials = CredentialsField()
class FakeCredentialsModelMock(object):
def __init__(self, *args, **kwargs):
self.model = FakeCredentialsModelMock
self.saved = False
self.deleted = False
credentials = CredentialWithSetStore()
class FakeCredentialsModelMockNoSet(object):
def __init__(self, set_store=False, *args, **kwargs):
self.model = FakeCredentialsModelMock
self.saved = False
self.deleted = False
credentials = CredentialsField()
| bsd-3-clause |
waytai/odoo | addons/account_chart/__openerp__.py | 313 | 1451 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Template of Charts of Accounts',
'version': '1.1',
'category': 'Hidden/Dependency',
'description': """
Remove minimal account chart.
=============================
Deactivates minimal chart of accounts.
""",
'author': 'OpenERP SA',
'website': 'https://www.odoo.com/page/accounting',
'depends': ['account'],
'data': [],
'demo': [],
'installable': True,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
McNetic/CouchPotatoServer-de | libs/dateutil/tz.py | 217 | 32988 | """
Copyright (c) 2003-2007 Gustavo Niemeyer <[email protected]>
This module offers extensions to the standard Python
datetime module.
"""
__license__ = "Simplified BSD"
from six import string_types, PY3
import datetime
import struct
import time
import sys
import os
relativedelta = None
parser = None
rrule = None
__all__ = ["tzutc", "tzoffset", "tzlocal", "tzfile", "tzrange",
"tzstr", "tzical", "tzwin", "tzwinlocal", "gettz"]
try:
from dateutil.tzwin import tzwin, tzwinlocal
except (ImportError, OSError):
tzwin, tzwinlocal = None, None
def tzname_in_python2(myfunc):
"""Change unicode output into bytestrings in Python 2
tzname() API changed in Python 3. It used to return bytes, but was changed
to unicode strings
"""
def inner_func(*args, **kwargs):
if PY3:
return myfunc(*args, **kwargs)
else:
return myfunc(*args, **kwargs).encode()
return inner_func
ZERO = datetime.timedelta(0)
EPOCHORDINAL = datetime.datetime.utcfromtimestamp(0).toordinal()
class tzutc(datetime.tzinfo):
def utcoffset(self, dt):
return ZERO
def dst(self, dt):
return ZERO
@tzname_in_python2
def tzname(self, dt):
return "UTC"
def __eq__(self, other):
return (isinstance(other, tzutc) or
(isinstance(other, tzoffset) and other._offset == ZERO))
def __ne__(self, other):
return not self.__eq__(other)
def __repr__(self):
return "%s()" % self.__class__.__name__
__reduce__ = object.__reduce__
class tzoffset(datetime.tzinfo):
def __init__(self, name, offset):
self._name = name
self._offset = datetime.timedelta(seconds=offset)
def utcoffset(self, dt):
return self._offset
def dst(self, dt):
return ZERO
@tzname_in_python2
def tzname(self, dt):
return self._name
def __eq__(self, other):
return (isinstance(other, tzoffset) and
self._offset == other._offset)
def __ne__(self, other):
return not self.__eq__(other)
def __repr__(self):
return "%s(%s, %s)" % (self.__class__.__name__,
repr(self._name),
self._offset.days*86400+self._offset.seconds)
__reduce__ = object.__reduce__
class tzlocal(datetime.tzinfo):
_std_offset = datetime.timedelta(seconds=-time.timezone)
if time.daylight:
_dst_offset = datetime.timedelta(seconds=-time.altzone)
else:
_dst_offset = _std_offset
def utcoffset(self, dt):
if self._isdst(dt):
return self._dst_offset
else:
return self._std_offset
def dst(self, dt):
if self._isdst(dt):
return self._dst_offset-self._std_offset
else:
return ZERO
@tzname_in_python2
def tzname(self, dt):
return time.tzname[self._isdst(dt)]
def _isdst(self, dt):
# We can't use mktime here. It is unstable when deciding if
# the hour near to a change is DST or not.
#
# timestamp = time.mktime((dt.year, dt.month, dt.day, dt.hour,
# dt.minute, dt.second, dt.weekday(), 0, -1))
# return time.localtime(timestamp).tm_isdst
#
# The code above yields the following result:
#
#>>> import tz, datetime
#>>> t = tz.tzlocal()
#>>> datetime.datetime(2003,2,15,23,tzinfo=t).tzname()
#'BRDT'
#>>> datetime.datetime(2003,2,16,0,tzinfo=t).tzname()
#'BRST'
#>>> datetime.datetime(2003,2,15,23,tzinfo=t).tzname()
#'BRST'
#>>> datetime.datetime(2003,2,15,22,tzinfo=t).tzname()
#'BRDT'
#>>> datetime.datetime(2003,2,15,23,tzinfo=t).tzname()
#'BRDT'
#
# Here is a more stable implementation:
#
timestamp = ((dt.toordinal() - EPOCHORDINAL) * 86400
+ dt.hour * 3600
+ dt.minute * 60
+ dt.second)
return time.localtime(timestamp+time.timezone).tm_isdst
def __eq__(self, other):
if not isinstance(other, tzlocal):
return False
return (self._std_offset == other._std_offset and
self._dst_offset == other._dst_offset)
return True
def __ne__(self, other):
return not self.__eq__(other)
def __repr__(self):
return "%s()" % self.__class__.__name__
__reduce__ = object.__reduce__
class _ttinfo(object):
__slots__ = ["offset", "delta", "isdst", "abbr", "isstd", "isgmt"]
def __init__(self):
for attr in self.__slots__:
setattr(self, attr, None)
def __repr__(self):
l = []
for attr in self.__slots__:
value = getattr(self, attr)
if value is not None:
l.append("%s=%s" % (attr, repr(value)))
return "%s(%s)" % (self.__class__.__name__, ", ".join(l))
def __eq__(self, other):
if not isinstance(other, _ttinfo):
return False
return (self.offset == other.offset and
self.delta == other.delta and
self.isdst == other.isdst and
self.abbr == other.abbr and
self.isstd == other.isstd and
self.isgmt == other.isgmt)
def __ne__(self, other):
return not self.__eq__(other)
def __getstate__(self):
state = {}
for name in self.__slots__:
state[name] = getattr(self, name, None)
return state
def __setstate__(self, state):
for name in self.__slots__:
if name in state:
setattr(self, name, state[name])
class tzfile(datetime.tzinfo):
# http://www.twinsun.com/tz/tz-link.htm
# ftp://ftp.iana.org/tz/tz*.tar.gz
def __init__(self, fileobj):
if isinstance(fileobj, string_types):
self._filename = fileobj
fileobj = open(fileobj, 'rb')
elif hasattr(fileobj, "name"):
self._filename = fileobj.name
else:
self._filename = repr(fileobj)
# From tzfile(5):
#
# The time zone information files used by tzset(3)
# begin with the magic characters "TZif" to identify
# them as time zone information files, followed by
# sixteen bytes reserved for future use, followed by
# six four-byte values of type long, written in a
# ``standard'' byte order (the high-order byte
# of the value is written first).
if fileobj.read(4).decode() != "TZif":
raise ValueError("magic not found")
fileobj.read(16)
(
# The number of UTC/local indicators stored in the file.
ttisgmtcnt,
# The number of standard/wall indicators stored in the file.
ttisstdcnt,
# The number of leap seconds for which data is
# stored in the file.
leapcnt,
# The number of "transition times" for which data
# is stored in the file.
timecnt,
# The number of "local time types" for which data
# is stored in the file (must not be zero).
typecnt,
# The number of characters of "time zone
# abbreviation strings" stored in the file.
charcnt,
) = struct.unpack(">6l", fileobj.read(24))
# The above header is followed by tzh_timecnt four-byte
# values of type long, sorted in ascending order.
# These values are written in ``standard'' byte order.
# Each is used as a transition time (as returned by
# time(2)) at which the rules for computing local time
# change.
if timecnt:
self._trans_list = struct.unpack(">%dl" % timecnt,
fileobj.read(timecnt*4))
else:
self._trans_list = []
# Next come tzh_timecnt one-byte values of type unsigned
# char; each one tells which of the different types of
# ``local time'' types described in the file is associated
# with the same-indexed transition time. These values
# serve as indices into an array of ttinfo structures that
# appears next in the file.
if timecnt:
self._trans_idx = struct.unpack(">%dB" % timecnt,
fileobj.read(timecnt))
else:
self._trans_idx = []
# Each ttinfo structure is written as a four-byte value
# for tt_gmtoff of type long, in a standard byte
# order, followed by a one-byte value for tt_isdst
# and a one-byte value for tt_abbrind. In each
# structure, tt_gmtoff gives the number of
# seconds to be added to UTC, tt_isdst tells whether
# tm_isdst should be set by localtime(3), and
# tt_abbrind serves as an index into the array of
# time zone abbreviation characters that follow the
# ttinfo structure(s) in the file.
ttinfo = []
for i in range(typecnt):
ttinfo.append(struct.unpack(">lbb", fileobj.read(6)))
abbr = fileobj.read(charcnt).decode()
# Then there are tzh_leapcnt pairs of four-byte
# values, written in standard byte order; the
# first value of each pair gives the time (as
# returned by time(2)) at which a leap second
# occurs; the second gives the total number of
# leap seconds to be applied after the given time.
# The pairs of values are sorted in ascending order
# by time.
# Not used, for now
if leapcnt:
leap = struct.unpack(">%dl" % (leapcnt*2),
fileobj.read(leapcnt*8))
# Then there are tzh_ttisstdcnt standard/wall
# indicators, each stored as a one-byte value;
# they tell whether the transition times associated
# with local time types were specified as standard
# time or wall clock time, and are used when
# a time zone file is used in handling POSIX-style
# time zone environment variables.
if ttisstdcnt:
isstd = struct.unpack(">%db" % ttisstdcnt,
fileobj.read(ttisstdcnt))
# Finally, there are tzh_ttisgmtcnt UTC/local
# indicators, each stored as a one-byte value;
# they tell whether the transition times associated
# with local time types were specified as UTC or
# local time, and are used when a time zone file
# is used in handling POSIX-style time zone envi-
# ronment variables.
if ttisgmtcnt:
isgmt = struct.unpack(">%db" % ttisgmtcnt,
fileobj.read(ttisgmtcnt))
# ** Everything has been read **
# Build ttinfo list
self._ttinfo_list = []
for i in range(typecnt):
gmtoff, isdst, abbrind = ttinfo[i]
# Round to full-minutes if that's not the case. Python's
# datetime doesn't accept sub-minute timezones. Check
# http://python.org/sf/1447945 for some information.
gmtoff = (gmtoff+30)//60*60
tti = _ttinfo()
tti.offset = gmtoff
tti.delta = datetime.timedelta(seconds=gmtoff)
tti.isdst = isdst
tti.abbr = abbr[abbrind:abbr.find('\x00', abbrind)]
tti.isstd = (ttisstdcnt > i and isstd[i] != 0)
tti.isgmt = (ttisgmtcnt > i and isgmt[i] != 0)
self._ttinfo_list.append(tti)
# Replace ttinfo indexes for ttinfo objects.
trans_idx = []
for idx in self._trans_idx:
trans_idx.append(self._ttinfo_list[idx])
self._trans_idx = tuple(trans_idx)
# Set standard, dst, and before ttinfos. before will be
# used when a given time is before any transitions,
# and will be set to the first non-dst ttinfo, or to
# the first dst, if all of them are dst.
self._ttinfo_std = None
self._ttinfo_dst = None
self._ttinfo_before = None
if self._ttinfo_list:
if not self._trans_list:
self._ttinfo_std = self._ttinfo_first = self._ttinfo_list[0]
else:
for i in range(timecnt-1, -1, -1):
tti = self._trans_idx[i]
if not self._ttinfo_std and not tti.isdst:
self._ttinfo_std = tti
elif not self._ttinfo_dst and tti.isdst:
self._ttinfo_dst = tti
if self._ttinfo_std and self._ttinfo_dst:
break
else:
if self._ttinfo_dst and not self._ttinfo_std:
self._ttinfo_std = self._ttinfo_dst
for tti in self._ttinfo_list:
if not tti.isdst:
self._ttinfo_before = tti
break
else:
self._ttinfo_before = self._ttinfo_list[0]
# Now fix transition times to become relative to wall time.
#
# I'm not sure about this. In my tests, the tz source file
# is setup to wall time, and in the binary file isstd and
# isgmt are off, so it should be in wall time. OTOH, it's
# always in gmt time. Let me know if you have comments
# about this.
laststdoffset = 0
self._trans_list = list(self._trans_list)
for i in range(len(self._trans_list)):
tti = self._trans_idx[i]
if not tti.isdst:
# This is std time.
self._trans_list[i] += tti.offset
laststdoffset = tti.offset
else:
# This is dst time. Convert to std.
self._trans_list[i] += laststdoffset
self._trans_list = tuple(self._trans_list)
def _find_ttinfo(self, dt, laststd=0):
timestamp = ((dt.toordinal() - EPOCHORDINAL) * 86400
+ dt.hour * 3600
+ dt.minute * 60
+ dt.second)
idx = 0
for trans in self._trans_list:
if timestamp < trans:
break
idx += 1
else:
return self._ttinfo_std
if idx == 0:
return self._ttinfo_before
if laststd:
while idx > 0:
tti = self._trans_idx[idx-1]
if not tti.isdst:
return tti
idx -= 1
else:
return self._ttinfo_std
else:
return self._trans_idx[idx-1]
def utcoffset(self, dt):
if not self._ttinfo_std:
return ZERO
return self._find_ttinfo(dt).delta
def dst(self, dt):
if not self._ttinfo_dst:
return ZERO
tti = self._find_ttinfo(dt)
if not tti.isdst:
return ZERO
# The documentation says that utcoffset()-dst() must
# be constant for every dt.
return tti.delta-self._find_ttinfo(dt, laststd=1).delta
# An alternative for that would be:
#
# return self._ttinfo_dst.offset-self._ttinfo_std.offset
#
# However, this class stores historical changes in the
# dst offset, so I belive that this wouldn't be the right
# way to implement this.
@tzname_in_python2
def tzname(self, dt):
if not self._ttinfo_std:
return None
return self._find_ttinfo(dt).abbr
def __eq__(self, other):
if not isinstance(other, tzfile):
return False
return (self._trans_list == other._trans_list and
self._trans_idx == other._trans_idx and
self._ttinfo_list == other._ttinfo_list)
def __ne__(self, other):
return not self.__eq__(other)
def __repr__(self):
return "%s(%s)" % (self.__class__.__name__, repr(self._filename))
def __reduce__(self):
if not os.path.isfile(self._filename):
raise ValueError("Unpickable %s class" % self.__class__.__name__)
return (self.__class__, (self._filename,))
class tzrange(datetime.tzinfo):
def __init__(self, stdabbr, stdoffset=None,
dstabbr=None, dstoffset=None,
start=None, end=None):
global relativedelta
if not relativedelta:
from dateutil import relativedelta
self._std_abbr = stdabbr
self._dst_abbr = dstabbr
if stdoffset is not None:
self._std_offset = datetime.timedelta(seconds=stdoffset)
else:
self._std_offset = ZERO
if dstoffset is not None:
self._dst_offset = datetime.timedelta(seconds=dstoffset)
elif dstabbr and stdoffset is not None:
self._dst_offset = self._std_offset+datetime.timedelta(hours=+1)
else:
self._dst_offset = ZERO
if dstabbr and start is None:
self._start_delta = relativedelta.relativedelta(
hours=+2, month=4, day=1, weekday=relativedelta.SU(+1))
else:
self._start_delta = start
if dstabbr and end is None:
self._end_delta = relativedelta.relativedelta(
hours=+1, month=10, day=31, weekday=relativedelta.SU(-1))
else:
self._end_delta = end
def utcoffset(self, dt):
if self._isdst(dt):
return self._dst_offset
else:
return self._std_offset
def dst(self, dt):
if self._isdst(dt):
return self._dst_offset-self._std_offset
else:
return ZERO
@tzname_in_python2
def tzname(self, dt):
if self._isdst(dt):
return self._dst_abbr
else:
return self._std_abbr
def _isdst(self, dt):
if not self._start_delta:
return False
year = datetime.datetime(dt.year, 1, 1)
start = year+self._start_delta
end = year+self._end_delta
dt = dt.replace(tzinfo=None)
if start < end:
return dt >= start and dt < end
else:
return dt >= start or dt < end
def __eq__(self, other):
if not isinstance(other, tzrange):
return False
return (self._std_abbr == other._std_abbr and
self._dst_abbr == other._dst_abbr and
self._std_offset == other._std_offset and
self._dst_offset == other._dst_offset and
self._start_delta == other._start_delta and
self._end_delta == other._end_delta)
def __ne__(self, other):
return not self.__eq__(other)
def __repr__(self):
return "%s(...)" % self.__class__.__name__
__reduce__ = object.__reduce__
class tzstr(tzrange):
def __init__(self, s):
global parser
if not parser:
from dateutil import parser
self._s = s
res = parser._parsetz(s)
if res is None:
raise ValueError("unknown string format")
# Here we break the compatibility with the TZ variable handling.
# GMT-3 actually *means* the timezone -3.
if res.stdabbr in ("GMT", "UTC"):
res.stdoffset *= -1
# We must initialize it first, since _delta() needs
# _std_offset and _dst_offset set. Use False in start/end
# to avoid building it two times.
tzrange.__init__(self, res.stdabbr, res.stdoffset,
res.dstabbr, res.dstoffset,
start=False, end=False)
if not res.dstabbr:
self._start_delta = None
self._end_delta = None
else:
self._start_delta = self._delta(res.start)
if self._start_delta:
self._end_delta = self._delta(res.end, isend=1)
def _delta(self, x, isend=0):
kwargs = {}
if x.month is not None:
kwargs["month"] = x.month
if x.weekday is not None:
kwargs["weekday"] = relativedelta.weekday(x.weekday, x.week)
if x.week > 0:
kwargs["day"] = 1
else:
kwargs["day"] = 31
elif x.day:
kwargs["day"] = x.day
elif x.yday is not None:
kwargs["yearday"] = x.yday
elif x.jyday is not None:
kwargs["nlyearday"] = x.jyday
if not kwargs:
# Default is to start on first sunday of april, and end
# on last sunday of october.
if not isend:
kwargs["month"] = 4
kwargs["day"] = 1
kwargs["weekday"] = relativedelta.SU(+1)
else:
kwargs["month"] = 10
kwargs["day"] = 31
kwargs["weekday"] = relativedelta.SU(-1)
if x.time is not None:
kwargs["seconds"] = x.time
else:
# Default is 2AM.
kwargs["seconds"] = 7200
if isend:
# Convert to standard time, to follow the documented way
# of working with the extra hour. See the documentation
# of the tzinfo class.
delta = self._dst_offset-self._std_offset
kwargs["seconds"] -= delta.seconds+delta.days*86400
return relativedelta.relativedelta(**kwargs)
def __repr__(self):
return "%s(%s)" % (self.__class__.__name__, repr(self._s))
class _tzicalvtzcomp(object):
def __init__(self, tzoffsetfrom, tzoffsetto, isdst,
tzname=None, rrule=None):
self.tzoffsetfrom = datetime.timedelta(seconds=tzoffsetfrom)
self.tzoffsetto = datetime.timedelta(seconds=tzoffsetto)
self.tzoffsetdiff = self.tzoffsetto-self.tzoffsetfrom
self.isdst = isdst
self.tzname = tzname
self.rrule = rrule
class _tzicalvtz(datetime.tzinfo):
def __init__(self, tzid, comps=[]):
self._tzid = tzid
self._comps = comps
self._cachedate = []
self._cachecomp = []
def _find_comp(self, dt):
if len(self._comps) == 1:
return self._comps[0]
dt = dt.replace(tzinfo=None)
try:
return self._cachecomp[self._cachedate.index(dt)]
except ValueError:
pass
lastcomp = None
lastcompdt = None
for comp in self._comps:
if not comp.isdst:
# Handle the extra hour in DST -> STD
compdt = comp.rrule.before(dt-comp.tzoffsetdiff, inc=True)
else:
compdt = comp.rrule.before(dt, inc=True)
if compdt and (not lastcompdt or lastcompdt < compdt):
lastcompdt = compdt
lastcomp = comp
if not lastcomp:
# RFC says nothing about what to do when a given
# time is before the first onset date. We'll look for the
# first standard component, or the first component, if
# none is found.
for comp in self._comps:
if not comp.isdst:
lastcomp = comp
break
else:
lastcomp = comp[0]
self._cachedate.insert(0, dt)
self._cachecomp.insert(0, lastcomp)
if len(self._cachedate) > 10:
self._cachedate.pop()
self._cachecomp.pop()
return lastcomp
def utcoffset(self, dt):
return self._find_comp(dt).tzoffsetto
def dst(self, dt):
comp = self._find_comp(dt)
if comp.isdst:
return comp.tzoffsetdiff
else:
return ZERO
@tzname_in_python2
def tzname(self, dt):
return self._find_comp(dt).tzname
def __repr__(self):
return "<tzicalvtz %s>" % repr(self._tzid)
__reduce__ = object.__reduce__
class tzical(object):
def __init__(self, fileobj):
global rrule
if not rrule:
from dateutil import rrule
if isinstance(fileobj, string_types):
self._s = fileobj
fileobj = open(fileobj, 'r') # ical should be encoded in UTF-8 with CRLF
elif hasattr(fileobj, "name"):
self._s = fileobj.name
else:
self._s = repr(fileobj)
self._vtz = {}
self._parse_rfc(fileobj.read())
def keys(self):
return list(self._vtz.keys())
def get(self, tzid=None):
if tzid is None:
keys = list(self._vtz.keys())
if len(keys) == 0:
raise ValueError("no timezones defined")
elif len(keys) > 1:
raise ValueError("more than one timezone available")
tzid = keys[0]
return self._vtz.get(tzid)
def _parse_offset(self, s):
s = s.strip()
if not s:
raise ValueError("empty offset")
if s[0] in ('+', '-'):
signal = (-1, +1)[s[0]=='+']
s = s[1:]
else:
signal = +1
if len(s) == 4:
return (int(s[:2])*3600+int(s[2:])*60)*signal
elif len(s) == 6:
return (int(s[:2])*3600+int(s[2:4])*60+int(s[4:]))*signal
else:
raise ValueError("invalid offset: "+s)
def _parse_rfc(self, s):
lines = s.splitlines()
if not lines:
raise ValueError("empty string")
# Unfold
i = 0
while i < len(lines):
line = lines[i].rstrip()
if not line:
del lines[i]
elif i > 0 and line[0] == " ":
lines[i-1] += line[1:]
del lines[i]
else:
i += 1
tzid = None
comps = []
invtz = False
comptype = None
for line in lines:
if not line:
continue
name, value = line.split(':', 1)
parms = name.split(';')
if not parms:
raise ValueError("empty property name")
name = parms[0].upper()
parms = parms[1:]
if invtz:
if name == "BEGIN":
if value in ("STANDARD", "DAYLIGHT"):
# Process component
pass
else:
raise ValueError("unknown component: "+value)
comptype = value
founddtstart = False
tzoffsetfrom = None
tzoffsetto = None
rrulelines = []
tzname = None
elif name == "END":
if value == "VTIMEZONE":
if comptype:
raise ValueError("component not closed: "+comptype)
if not tzid:
raise ValueError("mandatory TZID not found")
if not comps:
raise ValueError("at least one component is needed")
# Process vtimezone
self._vtz[tzid] = _tzicalvtz(tzid, comps)
invtz = False
elif value == comptype:
if not founddtstart:
raise ValueError("mandatory DTSTART not found")
if tzoffsetfrom is None:
raise ValueError("mandatory TZOFFSETFROM not found")
if tzoffsetto is None:
raise ValueError("mandatory TZOFFSETFROM not found")
# Process component
rr = None
if rrulelines:
rr = rrule.rrulestr("\n".join(rrulelines),
compatible=True,
ignoretz=True,
cache=True)
comp = _tzicalvtzcomp(tzoffsetfrom, tzoffsetto,
(comptype == "DAYLIGHT"),
tzname, rr)
comps.append(comp)
comptype = None
else:
raise ValueError("invalid component end: "+value)
elif comptype:
if name == "DTSTART":
rrulelines.append(line)
founddtstart = True
elif name in ("RRULE", "RDATE", "EXRULE", "EXDATE"):
rrulelines.append(line)
elif name == "TZOFFSETFROM":
if parms:
raise ValueError("unsupported %s parm: %s "%(name, parms[0]))
tzoffsetfrom = self._parse_offset(value)
elif name == "TZOFFSETTO":
if parms:
raise ValueError("unsupported TZOFFSETTO parm: "+parms[0])
tzoffsetto = self._parse_offset(value)
elif name == "TZNAME":
if parms:
raise ValueError("unsupported TZNAME parm: "+parms[0])
tzname = value
elif name == "COMMENT":
pass
else:
raise ValueError("unsupported property: "+name)
else:
if name == "TZID":
if parms:
raise ValueError("unsupported TZID parm: "+parms[0])
tzid = value
elif name in ("TZURL", "LAST-MODIFIED", "COMMENT"):
pass
else:
raise ValueError("unsupported property: "+name)
elif name == "BEGIN" and value == "VTIMEZONE":
tzid = None
comps = []
invtz = True
def __repr__(self):
return "%s(%s)" % (self.__class__.__name__, repr(self._s))
if sys.platform != "win32":
TZFILES = ["/etc/localtime", "localtime"]
TZPATHS = ["/usr/share/zoneinfo", "/usr/lib/zoneinfo", "/etc/zoneinfo"]
else:
TZFILES = []
TZPATHS = []
def gettz(name=None):
tz = None
if not name:
try:
name = os.environ["TZ"]
except KeyError:
pass
if name is None or name == ":":
for filepath in TZFILES:
if not os.path.isabs(filepath):
filename = filepath
for path in TZPATHS:
filepath = os.path.join(path, filename)
if os.path.isfile(filepath):
break
else:
continue
if os.path.isfile(filepath):
try:
tz = tzfile(filepath)
break
except (IOError, OSError, ValueError):
pass
else:
tz = tzlocal()
else:
if name.startswith(":"):
name = name[:-1]
if os.path.isabs(name):
if os.path.isfile(name):
tz = tzfile(name)
else:
tz = None
else:
for path in TZPATHS:
filepath = os.path.join(path, name)
if not os.path.isfile(filepath):
filepath = filepath.replace(' ', '_')
if not os.path.isfile(filepath):
continue
try:
tz = tzfile(filepath)
break
except (IOError, OSError, ValueError):
pass
else:
tz = None
if tzwin:
try:
tz = tzwin(name)
except OSError:
pass
if not tz:
from dateutil.zoneinfo import gettz
tz = gettz(name)
if not tz:
for c in name:
# name must have at least one offset to be a tzstr
if c in "0123456789":
try:
tz = tzstr(name)
except ValueError:
pass
break
else:
if name in ("GMT", "UTC"):
tz = tzutc()
elif name in time.tzname:
tz = tzlocal()
return tz
# vim:ts=4:sw=4:et
| gpl-3.0 |
tomkat83/PlexKodiConnect | resources/lib/library_sync/sections.py | 2 | 28788 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, unicode_literals
from logging import getLogger
import copy
from . import nodes
from ..plex_db import PlexDB
from ..plex_api import API
from .. import kodi_db
from .. import itemtypes, path_ops
from .. import plex_functions as PF, music, utils, variables as v, app
from ..utils import etree
LOG = getLogger('PLEX.sync.sections')
BATCH_SIZE = 500
# Need a way to interrupt our synching process
SHOULD_CANCEL = None
LIBRARY_PATH = path_ops.translate_path('special://profile/library/video/')
# The video library might not yet exist for this user - create it
if not path_ops.exists(LIBRARY_PATH):
path_ops.copy_tree(
src=path_ops.translate_path('special://xbmc/system/library/video'),
dst=LIBRARY_PATH,
preserve_mode=0) # dont copy permission bits so we have write access!
PLAYLISTS_PATH = path_ops.translate_path("special://profile/playlists/video/")
if not path_ops.exists(PLAYLISTS_PATH):
path_ops.makedirs(PLAYLISTS_PATH)
# Windows variables we set for each node
WINDOW_ARGS = ('index', 'title', 'id', 'path', 'type', 'content', 'artwork')
class Section(object):
"""
Setting the attribute section_type will automatically set content and
sync_to_kodi
"""
def __init__(self, index=None, xml_element=None, section_db_element=None):
# Unique Plex id of this Plex library section
self._section_id = None # int
# Building block for window variable
self._node = None # unicode
# Index of this section (as section_id might not be subsequent)
# This follows 1:1 the sequence in with the PMS returns the sections
self._index = None # Codacy-bug
self.index = index # int
# This section's name for the user to display
self.name = None # unicode
# Library type section (NOT the same as the KODI_TYPE_...)
# E.g. 'movies', 'tvshows', 'episodes'
self.content = None # unicode
# Setting the section_type WILL re_set sync_to_kodi!
self._section_type = None # unicode
# E.g. "season" or "movie" (translated)
self.section_type_text = None
# Do we sync all items of this section to the Kodi DB?
# This will be set with section_type!!
self.sync_to_kodi = None # bool
# For sections to be synched, the section name will be recorded as a
# tag. This is the corresponding id for this tag
self.kodi_tagid = None # int
# When was this section last successfully/completely synched to the
# Kodi database?
self.last_sync = None # int
# Path to the Kodi userdata library FOLDER for this section
self._path = None # unicode
# Path to the smart playlist for this section
self._playlist_path = None
# "Poster" for this section
self.icon = None # unicode
# Background image for this section
self.artwork = None
# Thumbnail for this section, similar for each section type
self.thumb = None
# Order number in which xmls will be listed inside Kodei
self.order = None
# Original PMS xml for this section, including children
self.xml = None
# A section_type encompasses possible several plex_types! E.g. shows
# contain shows, seasons, episodes
self._plex_type = None
if xml_element is not None:
self.from_xml(xml_element)
elif section_db_element:
self.from_db_element(section_db_element)
def __repr__(self):
return ("{{"
"'index': {self.index}, "
"'name': '{self.name}', "
"'section_id': {self.section_id}, "
"'section_type': '{self.section_type}', "
"'sync_to_kodi': {self.sync_to_kodi}, "
"'last_sync': {self.last_sync}"
"}}").format(self=self).encode('utf-8')
__str__ = __repr__
def __nonzero__(self):
return (self.section_id is not None and
self.name is not None and
self.section_type is not None)
def __eq__(self, section):
"""
Sections compare equal if their section_id, name and plex_type (first
prio) OR section_type (if there is no plex_type is set) compare equal
"""
return (self.section_id == section.section_id and
self.name == section.name and
(self.plex_type == section.plex_type if self.plex_type else
self.section_type == section.section_type))
def __ne__(self, section):
return not self == section
@property
def section_id(self):
return self._section_id
@section_id.setter
def section_id(self, value):
self._section_id = value
self._path = path_ops.path.join(LIBRARY_PATH, 'Plex-%s' % value, '')
self._playlist_path = path_ops.path.join(PLAYLISTS_PATH,
'Plex %s.xsp' % value)
@property
def section_type(self):
return self._section_type
@section_type.setter
def section_type(self, value):
self._section_type = value
self.content = v.CONTENT_FROM_PLEX_TYPE[value]
# Default values whether we sync or not based on the Plex type
if value == v.PLEX_TYPE_PHOTO:
self.sync_to_kodi = False
elif not app.SYNC.enable_music and value == v.PLEX_TYPE_ARTIST:
self.sync_to_kodi = False
else:
self.sync_to_kodi = True
@property
def plex_type(self):
return self._plex_type
@plex_type.setter
def plex_type(self, value):
self._plex_type = value
self.section_type_text = utils.lang(v.TRANSLATION_FROM_PLEXTYPE[value])
@property
def index(self):
return self._index
@index.setter
def index(self, value):
self._index = value
self._node = 'Plex.nodes.%s' % value
@property
def node(self):
return self._node
@property
def path(self):
return self._path
@property
def playlist_path(self):
return self._playlist_path
def from_db_element(self, section_db_element):
self.section_id = section_db_element['section_id']
self.name = section_db_element['section_name']
self.section_type = section_db_element['plex_type']
self.kodi_tagid = section_db_element['kodi_tagid']
self.sync_to_kodi = section_db_element['sync_to_kodi']
self.last_sync = section_db_element['last_sync']
def from_xml(self, xml_element):
"""
Reads section from a PMS xml (Plex id, name, Plex type)
"""
api = API(xml_element)
self.section_id = utils.cast(int, xml_element.get('key'))
self.name = api.title()
self.section_type = api.plex_type
self.icon = api.one_artwork('composite')
self.artwork = api.one_artwork('art')
self.thumb = api.one_artwork('thumb')
self.xml = xml_element
def from_plex_db(self, section_id, plexdb=None):
"""
Reads section with id section_id from the plex.db
"""
if plexdb:
section = plexdb.section(section_id)
else:
with PlexDB(lock=False) as plexdb:
section = plexdb.section(section_id)
if section:
self.from_db_element(section)
def to_plex_db(self, plexdb=None):
"""
Writes this Section to the plex.db, potentially overwriting
(INSERT OR REPLACE)
"""
if not self:
raise RuntimeError('Section not clearly defined: %s' % self)
if plexdb:
plexdb.add_section(self.section_id,
self.name,
self.section_type,
self.kodi_tagid,
self.sync_to_kodi,
self.last_sync)
else:
with PlexDB(lock=False) as plexdb:
plexdb.add_section(self.section_id,
self.name,
self.section_type,
self.kodi_tagid,
self.sync_to_kodi,
self.last_sync)
def addon_path(self, args):
"""
Returns the plugin path pointing back to PKC for key in order to browse
args is a dict. Its values may contain string info of the form
{key: '{self.<Section attribute>}'}
"""
args = copy.deepcopy(args)
for key, value in args.iteritems():
args[key] = value.format(self=self)
return utils.extend_url('plugin://%s' % v.ADDON_ID, args)
def to_kodi(self):
"""
Writes this section's nodes to the library folder in the Kodi userdata
directory
Won't do anything if self.sync_to_kodi is not True
"""
if self.index is None:
raise RuntimeError('Index not initialized')
# Main list entry for this section - which will show the different
# nodes as "submenus" once the user navigates into this section
if self.sync_to_kodi and self.section_type in v.PLEX_VIDEOTYPES:
# Node showing a menu for this section
args = {
'mode': 'show_section',
'section_index': self.index
}
index = utils.extend_url('plugin://%s' % v.ADDON_ID, args)
# Node directly displaying all content
path = 'library://video/Plex-{0}/{0}_all.xml'
path = path.format(self.section_id)
else:
# Node showing a menu for this section
args = {
'mode': 'browseplex',
'key': '/library/sections/%s' % self.section_id,
'section_id': unicode(self.section_id)
}
if not self.sync_to_kodi:
args['synched'] = 'false'
# No library xmls to speed things up
# Immediately show the PMS options for this section
index = self.addon_path(args)
# Node directly displaying all content
args = {
'mode': 'browseplex',
'key': '/library/sections/%s/all' % self.section_id,
'section_id': unicode(self.section_id)
}
if not self.sync_to_kodi:
args['synched'] = 'false'
path = self.addon_path(args)
utils.window('%s.index' % self.node, value=index)
utils.window('%s.title' % self.node, value=self.name)
utils.window('%s.type' % self.node, value=self.content)
utils.window('%s.content' % self.node, value=index)
# .path leads to all elements of this library
if self.section_type in v.PLEX_VIDEOTYPES:
utils.window('%s.path' % self.node,
value='ActivateWindow(videos,%s,return)' % path)
elif self.section_type == v.PLEX_TYPE_ARTIST:
utils.window('%s.path' % self.node,
value='ActivateWindow(music,%s,return)' % path)
else:
# Pictures
utils.window('%s.path' % self.node,
value='ActivateWindow(pictures,%s,return)' % path)
utils.window('%s.id' % self.node, value=str(self.section_id))
if not self.sync_to_kodi:
self.remove_files_from_kodi()
return
if self.section_type == v.PLEX_TYPE_ARTIST:
# Todo: Write window variables for music
return
if self.section_type == v.PLEX_TYPE_PHOTO:
# Todo: Write window variables for photos
return
# Create a dedicated directory for this section
if not path_ops.exists(self.path):
path_ops.makedirs(self.path)
# Create a tag just like the section name in the Kodi DB
with kodi_db.KodiVideoDB(lock=False) as kodidb:
self.kodi_tagid = kodidb.create_tag(self.name)
# The xmls are numbered in order of appearance
self.order = 0
if not path_ops.exists(path_ops.path.join(self.path, 'index.xml')):
LOG.debug('Creating index.xml for section %s', self.name)
xml = etree.Element('node',
attrib={'order': unicode(self.order)})
etree.SubElement(xml, 'label').text = self.name
etree.SubElement(xml, 'icon').text = self.icon or nodes.ICON_PATH
self._write_xml(xml, 'index.xml')
self.order += 1
# Create the one smart playlist for this section
if not path_ops.exists(self.playlist_path):
self._write_playlist()
# Now build all nodes for this section - potentially creating xmls
for node in nodes.NODE_TYPES[self.section_type]:
self._build_node(*node)
def _build_node(self, node_type, node_name, args, content, pms_node):
self.content = content
node_name = node_name.format(self=self)
if pms_node:
# Do NOT write a Kodi video library xml - can't use type="filter"
# to point back to plugin://plugin.video.plexkodiconnect
xml = nodes.node_pms(self, node_name, args)
args.pop('folder', None)
path = self.addon_path(args)
else:
# Write a Kodi video library xml
xml_name = '%s_%s.xml' % (self.section_id, node_type)
path = path_ops.path.join(self.path, xml_name)
if not path_ops.exists(path):
# Let's use Kodi's logic to sort/filter the Kodi library
xml = getattr(nodes, 'node_%s' % node_type)(self, node_name)
self._write_xml(xml, xml_name)
path = 'library://video/Plex-%s/%s' % (self.section_id, xml_name)
self.order += 1
self._window_node(path, node_name, node_type, pms_node)
def _write_xml(self, xml, xml_name):
LOG.debug('Creating xml for section %s: %s', self.name, xml_name)
utils.indent(xml)
etree.ElementTree(xml).write(path_ops.path.join(self.path, xml_name),
encoding='utf-8',
xml_declaration=True)
def _write_playlist(self):
LOG.debug('Creating smart playlist for section %s: %s',
self.name, self.playlist_path)
xml = etree.Element('smartplaylist',
attrib={'type': v.CONTENT_FROM_PLEX_TYPE[self.section_type]})
etree.SubElement(xml, 'name').text = self.name
etree.SubElement(xml, 'match').text = 'all'
rule = etree.SubElement(xml, 'rule', attrib={'field': 'tag',
'operator': 'is'})
etree.SubElement(rule, 'value').text = self.name
utils.indent(xml)
etree.ElementTree(xml).write(self.playlist_path, encoding='utf-8')
def _window_node(self, path, node_name, node_type, pms_node):
"""
Will save this section's node to the Kodi window variables
Uses the same conventions/logic as Emby for Kodi does
"""
if pms_node or not self.sync_to_kodi:
# Check: elif node_type in ('browse', 'homevideos', 'photos'):
window_path = path
elif self.section_type == v.PLEX_TYPE_ARTIST:
window_path = 'ActivateWindow(Music,%s,return)' % path
else:
window_path = 'ActivateWindow(Videos,%s,return)' % path
# if node_type == 'all':
# var = self.node
# utils.window('%s.index' % var,
# value=path.replace('%s_all.xml' % self.section_id, ''))
# utils.window('%s.title' % var, value=self.name)
# else:
var = '%s.%s' % (self.node, node_type)
utils.window('%s.index' % var, value=path)
utils.window('%s.title' % var, value=node_name)
utils.window('%s.id' % var, value=str(self.section_id))
utils.window('%s.path' % var, value=window_path)
utils.window('%s.type' % var, value=self.content)
utils.window('%s.content' % var, value=path)
utils.window('%s.artwork' % var, value=self.artwork)
def remove_files_from_kodi(self):
"""
Removes this sections from the Kodi userdata library folder (if appl.)
Also removes the smart playlist
"""
if self.section_type in (v.PLEX_TYPE_ARTIST, v.PLEX_TYPE_PHOTO):
# No files created for these types
return
if path_ops.exists(self.path):
path_ops.rmtree(self.path, ignore_errors=True)
if path_ops.exists(self.playlist_path):
try:
path_ops.remove(self.playlist_path)
except (OSError, IOError):
LOG.warn('Could not delete smart playlist for section %s: %s',
self.name, self.playlist_path)
def remove_window_vars(self):
"""
Removes all windows variables 'Plex.nodes.<section_id>.xxx'
"""
if self.index is not None:
_clear_window_vars(self.index)
def remove_from_plex(self, plexdb=None):
"""
Removes this sections completely from the Plex DB
"""
if plexdb:
plexdb.remove_section(self.section_id)
else:
with PlexDB(lock=False) as plexdb:
plexdb.remove_section(self.section_id)
def remove(self):
"""
Completely and utterly removes this section from Kodi and Plex DB
as well as from the window variables
"""
self.remove_files_from_kodi()
self.remove_window_vars()
self.remove_from_plex()
def _get_children(plex_type):
if plex_type == v.PLEX_TYPE_ALBUM:
return True
else:
return False
def get_sync_section(section, plex_type):
"""
Deep-copies section and adds certain arguments in order to prep section
for the library sync
"""
section = copy.deepcopy(section)
section.plex_type = plex_type
section.context = itemtypes.ITEMTYPE_FROM_PLEXTYPE[plex_type]
section.get_children = _get_children(plex_type)
# Some more init stuff
# Has sync for this section been successful?
section.sync_successful = True
# List of tuples: (collection index [as in an item's metadata with
# "Collection id"], collection plex id)
section.collection_match = None
# Dict with entries of the form <collection index>: <collection xml>
section.collection_xmls = {}
# Keep count during sync
section.count = 0
# Total number of items that we need to sync
section.number_of_items = 0
# Iterator to get one sync item after the other
section.iterator = None
return section
def force_full_sync():
"""
Resets the sync timestamp for all sections to 0, thus forcing a subsequent
full sync (not delta)
"""
LOG.info('Telling PKC to do a full sync instead of a delta sync')
with PlexDB() as plexdb:
plexdb.force_full_sync()
def _save_sections_to_plex_db(sections):
with PlexDB() as plexdb:
for section in sections:
section.to_plex_db(plexdb=plexdb)
def _retrieve_old_settings(sections, old_sections):
"""
Overwrites the PKC settings for sections, grabing them from old_sections
if a particular section is in both sections and old_sections
Thus sets to the old values:
section.last_sync
section.kodi_tagid
section.sync_to_kodi
section.last_sync
"""
for section in sections:
for old_section in old_sections:
if section == old_section:
section.last_sync = old_section.last_sync
section.kodi_tagid = old_section.kodi_tagid
section.sync_to_kodi = old_section.sync_to_kodi
section.last_sync = old_section.last_sync
def _delete_kodi_db_items(section):
if section.section_type == v.PLEX_TYPE_MOVIE:
kodi_context = kodi_db.KodiVideoDB
types = ((v.PLEX_TYPE_MOVIE, itemtypes.Movie), )
elif section.section_type == v.PLEX_TYPE_SHOW:
kodi_context = kodi_db.KodiVideoDB
types = ((v.PLEX_TYPE_SHOW, itemtypes.Show),
(v.PLEX_TYPE_SEASON, itemtypes.Season),
(v.PLEX_TYPE_EPISODE, itemtypes.Episode))
elif section.section_type == v.PLEX_TYPE_ARTIST:
kodi_context = kodi_db.KodiMusicDB
types = ((v.PLEX_TYPE_ARTIST, itemtypes.Artist),
(v.PLEX_TYPE_ALBUM, itemtypes.Album),
(v.PLEX_TYPE_SONG, itemtypes.Song))
else:
types = ()
LOG.debug('Skipping deletion of DB elements for section %s', section)
for plex_type, context in types:
while True:
with PlexDB() as plexdb:
plex_ids = list(plexdb.plexid_by_sectionid(section.section_id,
plex_type,
BATCH_SIZE))
with kodi_context(texture_db=True) as kodidb:
typus = context(None, plexdb=plexdb, kodidb=kodidb)
for plex_id in plex_ids:
if SHOULD_CANCEL():
return False
typus.remove(plex_id)
if len(plex_ids) < BATCH_SIZE:
break
return True
def _choose_libraries(sections):
"""
Displays a dialog for the user to select the libraries he wants synched
Returns True if the user chose new sections, False if he aborted
"""
import xbmcgui
selectable_sections = []
preselected = []
index = 0
for section in sections:
if not app.SYNC.enable_music and section.section_type == v.PLEX_TYPE_ARTIST:
LOG.info('Ignoring music section: %s', section)
continue
elif section.section_type == v.PLEX_TYPE_PHOTO:
# We won't ever show Photo sections
continue
else:
# Offer user the new section
selectable_sections.append(section.name)
# Sections have been either preselected by the user or they are new
if section.sync_to_kodi:
preselected.append(index)
index += 1
# Don't ask the user again for this PMS even if user cancel the sync dialog
utils.settings('sections_asked_for_machine_identifier',
value=app.CONN.machine_identifier)
# "Select Plex libraries to sync"
selected_sections = xbmcgui.Dialog().multiselect(utils.lang(30524),
selectable_sections,
preselect=preselected,
useDetails=False)
if selected_sections is None:
LOG.info('User chose not to select which libraries to sync')
return False
index = 0
for section in sections:
if not app.SYNC.enable_music and section.section_type == v.PLEX_TYPE_ARTIST:
continue
elif section.section_type == v.PLEX_TYPE_PHOTO:
continue
else:
section.sync_to_kodi = index in selected_sections
index += 1
return True
def delete_playlists():
"""
Clean up the playlists
"""
path = path_ops.translate_path('special://profile/playlists/video/')
for root, _, files in path_ops.walk(path):
for file in files:
if file.startswith('Plex'):
path_ops.remove(path_ops.path.join(root, file))
def delete_nodes():
"""
Clean up video nodes
"""
path = path_ops.translate_path("special://profile/library/video/")
for root, dirs, _ in path_ops.walk(path):
for directory in dirs:
if directory.startswith('Plex-'):
path_ops.rmtree(path_ops.path.join(root, directory))
break
def delete_files():
"""
Deletes both all the Plex-xxx video node xmls as well as smart playlists
"""
delete_nodes()
delete_playlists()
def sync_from_pms(parent_self, pick_libraries=False):
"""
Sync the Plex library sections.
pick_libraries=True will prompt the user the select the libraries he
wants to sync
"""
global SHOULD_CANCEL
LOG.info('Starting synching sections from the PMS')
SHOULD_CANCEL = parent_self.should_cancel
try:
return _sync_from_pms(pick_libraries)
finally:
SHOULD_CANCEL = None
LOG.info('Done synching sections from the PMS: %s', app.SYNC.sections)
def _sync_from_pms(pick_libraries):
# Re-set value in order to make sure we got the lastest user input
app.SYNC.enable_music = utils.settings('enableMusic') == 'true'
xml = PF.get_plex_sections()
if xml is None:
LOG.error("Error download PMS sections, abort")
return False
sections = []
old_sections = []
for i, xml_element in enumerate(xml.findall('Directory')):
api = API(xml_element)
if api.plex_type in v.UNSUPPORTED_PLEX_TYPES:
continue
sections.append(Section(index=i, xml_element=xml_element))
with PlexDB() as plexdb:
for section_db in plexdb.all_sections():
old_sections.append(Section(section_db_element=section_db))
# Update our latest PMS sections with info saved in the PMS DB
_retrieve_old_settings(sections, old_sections)
if (app.CONN.machine_identifier != utils.settings('sections_asked_for_machine_identifier') or
pick_libraries):
if not pick_libraries:
LOG.info('First time connecting to this PMS, choosing libraries')
_choose_libraries(sections)
# We got everything - save to Plex db in case Kodi restarts before we're
# done here
_save_sections_to_plex_db(sections)
# Tweak some settings so Kodi does NOT scan the music folders
if app.SYNC.direct_paths is True:
# Will reboot Kodi is new library detected
music.excludefromscan_music_folders(sections)
# Delete all old sections that are obsolete
# This will also delete sections whose name (or type) have changed
for old_section in old_sections:
for section in sections:
if old_section == section:
break
else:
if not old_section.sync_to_kodi:
continue
LOG.info('Deleting entire section: %s', old_section)
# Remove all linked items
if not _delete_kodi_db_items(old_section):
return False
# Remove the section itself
old_section.remove()
# Clear all existing window vars because we did NOT remove them with the
# command section.remove()
clear_window_vars()
# Time to write the sections to Kodi
for section in sections:
section.to_kodi()
# Counter that tells us how many sections we have - e.g. for skins and
# listings
utils.window('Plex.nodes.total', str(len(sections)))
app.SYNC.sections = sections
return True
def _clear_window_vars(index):
node = 'Plex.nodes.%s' % index
utils.window('%s.index' % node, clear=True)
utils.window('%s.title' % node, clear=True)
utils.window('%s.type' % node, clear=True)
utils.window('%s.content' % node, clear=True)
utils.window('%s.path' % node, clear=True)
utils.window('%s.id' % node, clear=True)
# Just clear everything here, ignore the plex_type
for typus in (x[0] for y in nodes.NODE_TYPES.values() for x in y):
for kind in WINDOW_ARGS:
node = 'Plex.nodes.%s.%s.%s' % (index, typus, kind)
utils.window(node, clear=True)
def clear_window_vars():
"""
Removes all references to sections stored in window vars 'Plex.nodes...'
"""
LOG.debug('Clearing all the Plex video node variables')
number_of_nodes = int(utils.window('Plex.nodes.total') or 0)
utils.window('Plex.nodes.total', clear=True)
for index in range(number_of_nodes):
_clear_window_vars(index)
def delete_videonode_files():
"""
Removes all the PKC video node files under userdata/library/video that
start with 'Plex-'
"""
for root, dirs, _ in path_ops.walk(LIBRARY_PATH):
for directory in dirs:
if directory.startswith('Plex-'):
abs_path = path_ops.path.join(root, directory)
LOG.info('Removing video node directory %s', abs_path)
path_ops.rmtree(abs_path, ignore_errors=True)
break
| gpl-2.0 |
hamzehd/edx-platform | cms/djangoapps/contentstore/views/export_git.py | 146 | 1723 | """
This views handles exporting the course xml to a git repository if
the giturl attribute is set.
"""
import logging
from django.contrib.auth.decorators import login_required
from django.core.exceptions import PermissionDenied
from django.views.decorators.csrf import ensure_csrf_cookie
from django.utils.translation import ugettext as _
from student.auth import has_course_author_access
import contentstore.git_export_utils as git_export_utils
from edxmako.shortcuts import render_to_response
from xmodule.modulestore.django import modulestore
from opaque_keys.edx.keys import CourseKey
log = logging.getLogger(__name__)
@ensure_csrf_cookie
@login_required
def export_git(request, course_key_string):
"""
This method serves up the 'Export to Git' page
"""
course_key = CourseKey.from_string(course_key_string)
if not has_course_author_access(request.user, course_key):
raise PermissionDenied()
course_module = modulestore().get_course(course_key)
failed = False
log.debug('export_git course_module=%s', course_module)
msg = ""
if 'action' in request.GET and course_module.giturl:
if request.GET['action'] == 'push':
try:
git_export_utils.export_to_git(
course_module.id,
course_module.giturl,
request.user,
)
msg = _('Course successfully exported to git repository')
except git_export_utils.GitExportError as ex:
failed = True
msg = unicode(ex)
return render_to_response('export_git.html', {
'context_course': course_module,
'msg': msg,
'failed': failed,
})
| agpl-3.0 |
caot/intellij-community | plugins/hg4idea/testData/bin/hgext/largefiles/overrides.py | 90 | 46667 | # Copyright 2009-2010 Gregory P. Ward
# Copyright 2009-2010 Intelerad Medical Systems Incorporated
# Copyright 2010-2011 Fog Creek Software
# Copyright 2010-2011 Unity Technologies
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
'''Overridden Mercurial commands and functions for the largefiles extension'''
import os
import copy
from mercurial import hg, commands, util, cmdutil, scmutil, match as match_, \
node, archival, error, merge, discovery
from mercurial.i18n import _
from mercurial.node import hex
from hgext import rebase
import lfutil
import lfcommands
import basestore
# -- Utility functions: commonly/repeatedly needed functionality ---------------
def installnormalfilesmatchfn(manifest):
'''overrides scmutil.match so that the matcher it returns will ignore all
largefiles'''
oldmatch = None # for the closure
def overridematch(ctx, pats=[], opts={}, globbed=False,
default='relpath'):
match = oldmatch(ctx, pats, opts, globbed, default)
m = copy.copy(match)
notlfile = lambda f: not (lfutil.isstandin(f) or lfutil.standin(f) in
manifest)
m._files = filter(notlfile, m._files)
m._fmap = set(m._files)
m._always = False
origmatchfn = m.matchfn
m.matchfn = lambda f: notlfile(f) and origmatchfn(f) or None
return m
oldmatch = installmatchfn(overridematch)
def installmatchfn(f):
oldmatch = scmutil.match
setattr(f, 'oldmatch', oldmatch)
scmutil.match = f
return oldmatch
def restorematchfn():
'''restores scmutil.match to what it was before installnormalfilesmatchfn
was called. no-op if scmutil.match is its original function.
Note that n calls to installnormalfilesmatchfn will require n calls to
restore matchfn to reverse'''
scmutil.match = getattr(scmutil.match, 'oldmatch', scmutil.match)
def addlargefiles(ui, repo, *pats, **opts):
large = opts.pop('large', None)
lfsize = lfutil.getminsize(
ui, lfutil.islfilesrepo(repo), opts.pop('lfsize', None))
lfmatcher = None
if lfutil.islfilesrepo(repo):
lfpats = ui.configlist(lfutil.longname, 'patterns', default=[])
if lfpats:
lfmatcher = match_.match(repo.root, '', list(lfpats))
lfnames = []
m = scmutil.match(repo[None], pats, opts)
m.bad = lambda x, y: None
wctx = repo[None]
for f in repo.walk(m):
exact = m.exact(f)
lfile = lfutil.standin(f) in wctx
nfile = f in wctx
exists = lfile or nfile
# Don't warn the user when they attempt to add a normal tracked file.
# The normal add code will do that for us.
if exact and exists:
if lfile:
ui.warn(_('%s already a largefile\n') % f)
continue
if (exact or not exists) and not lfutil.isstandin(f):
wfile = repo.wjoin(f)
# In case the file was removed previously, but not committed
# (issue3507)
if not os.path.exists(wfile):
continue
abovemin = (lfsize and
os.lstat(wfile).st_size >= lfsize * 1024 * 1024)
if large or abovemin or (lfmatcher and lfmatcher(f)):
lfnames.append(f)
if ui.verbose or not exact:
ui.status(_('adding %s as a largefile\n') % m.rel(f))
bad = []
standins = []
# Need to lock, otherwise there could be a race condition between
# when standins are created and added to the repo.
wlock = repo.wlock()
try:
if not opts.get('dry_run'):
lfdirstate = lfutil.openlfdirstate(ui, repo)
for f in lfnames:
standinname = lfutil.standin(f)
lfutil.writestandin(repo, standinname, hash='',
executable=lfutil.getexecutable(repo.wjoin(f)))
standins.append(standinname)
if lfdirstate[f] == 'r':
lfdirstate.normallookup(f)
else:
lfdirstate.add(f)
lfdirstate.write()
bad += [lfutil.splitstandin(f)
for f in repo[None].add(standins)
if f in m.files()]
finally:
wlock.release()
return bad
def removelargefiles(ui, repo, *pats, **opts):
after = opts.get('after')
if not pats and not after:
raise util.Abort(_('no files specified'))
m = scmutil.match(repo[None], pats, opts)
try:
repo.lfstatus = True
s = repo.status(match=m, clean=True)
finally:
repo.lfstatus = False
manifest = repo[None].manifest()
modified, added, deleted, clean = [[f for f in list
if lfutil.standin(f) in manifest]
for list in [s[0], s[1], s[3], s[6]]]
def warn(files, msg):
for f in files:
ui.warn(msg % m.rel(f))
return int(len(files) > 0)
result = 0
if after:
remove, forget = deleted, []
result = warn(modified + added + clean,
_('not removing %s: file still exists\n'))
else:
remove, forget = deleted + clean, []
result = warn(modified, _('not removing %s: file is modified (use -f'
' to force removal)\n'))
result = warn(added, _('not removing %s: file has been marked for add'
' (use forget to undo)\n')) or result
for f in sorted(remove + forget):
if ui.verbose or not m.exact(f):
ui.status(_('removing %s\n') % m.rel(f))
# Need to lock because standin files are deleted then removed from the
# repository and we could race in-between.
wlock = repo.wlock()
try:
lfdirstate = lfutil.openlfdirstate(ui, repo)
for f in remove:
if not after:
# If this is being called by addremove, notify the user that we
# are removing the file.
if getattr(repo, "_isaddremove", False):
ui.status(_('removing %s\n') % f)
util.unlinkpath(repo.wjoin(f), ignoremissing=True)
lfdirstate.remove(f)
lfdirstate.write()
forget = [lfutil.standin(f) for f in forget]
remove = [lfutil.standin(f) for f in remove]
repo[None].forget(forget)
# If this is being called by addremove, let the original addremove
# function handle this.
if not getattr(repo, "_isaddremove", False):
for f in remove:
util.unlinkpath(repo.wjoin(f), ignoremissing=True)
repo[None].forget(remove)
finally:
wlock.release()
return result
# For overriding mercurial.hgweb.webcommands so that largefiles will
# appear at their right place in the manifests.
def decodepath(orig, path):
return lfutil.splitstandin(path) or path
# -- Wrappers: modify existing commands --------------------------------
# Add works by going through the files that the user wanted to add and
# checking if they should be added as largefiles. Then it makes a new
# matcher which matches only the normal files and runs the original
# version of add.
def overrideadd(orig, ui, repo, *pats, **opts):
normal = opts.pop('normal')
if normal:
if opts.get('large'):
raise util.Abort(_('--normal cannot be used with --large'))
return orig(ui, repo, *pats, **opts)
bad = addlargefiles(ui, repo, *pats, **opts)
installnormalfilesmatchfn(repo[None].manifest())
result = orig(ui, repo, *pats, **opts)
restorematchfn()
return (result == 1 or bad) and 1 or 0
def overrideremove(orig, ui, repo, *pats, **opts):
installnormalfilesmatchfn(repo[None].manifest())
result = orig(ui, repo, *pats, **opts)
restorematchfn()
return removelargefiles(ui, repo, *pats, **opts) or result
def overridestatusfn(orig, repo, rev2, **opts):
try:
repo._repo.lfstatus = True
return orig(repo, rev2, **opts)
finally:
repo._repo.lfstatus = False
def overridestatus(orig, ui, repo, *pats, **opts):
try:
repo.lfstatus = True
return orig(ui, repo, *pats, **opts)
finally:
repo.lfstatus = False
def overridedirty(orig, repo, ignoreupdate=False):
try:
repo._repo.lfstatus = True
return orig(repo, ignoreupdate)
finally:
repo._repo.lfstatus = False
def overridelog(orig, ui, repo, *pats, **opts):
def overridematch(ctx, pats=[], opts={}, globbed=False,
default='relpath'):
"""Matcher that merges root directory with .hglf, suitable for log.
It is still possible to match .hglf directly.
For any listed files run log on the standin too.
matchfn tries both the given filename and with .hglf stripped.
"""
match = oldmatch(ctx, pats, opts, globbed, default)
m = copy.copy(match)
standins = [lfutil.standin(f) for f in m._files]
m._files.extend(standins)
m._fmap = set(m._files)
m._always = False
origmatchfn = m.matchfn
def lfmatchfn(f):
lf = lfutil.splitstandin(f)
if lf is not None and origmatchfn(lf):
return True
r = origmatchfn(f)
return r
m.matchfn = lfmatchfn
return m
oldmatch = installmatchfn(overridematch)
try:
repo.lfstatus = True
return orig(ui, repo, *pats, **opts)
finally:
repo.lfstatus = False
restorematchfn()
def overrideverify(orig, ui, repo, *pats, **opts):
large = opts.pop('large', False)
all = opts.pop('lfa', False)
contents = opts.pop('lfc', False)
result = orig(ui, repo, *pats, **opts)
if large or all or contents:
result = result or lfcommands.verifylfiles(ui, repo, all, contents)
return result
def overridedebugstate(orig, ui, repo, *pats, **opts):
large = opts.pop('large', False)
if large:
lfcommands.debugdirstate(ui, repo)
else:
orig(ui, repo, *pats, **opts)
# Override needs to refresh standins so that update's normal merge
# will go through properly. Then the other update hook (overriding repo.update)
# will get the new files. Filemerge is also overridden so that the merge
# will merge standins correctly.
def overrideupdate(orig, ui, repo, *pats, **opts):
lfdirstate = lfutil.openlfdirstate(ui, repo)
s = lfdirstate.status(match_.always(repo.root, repo.getcwd()), [], False,
False, False)
(unsure, modified, added, removed, missing, unknown, ignored, clean) = s
# Need to lock between the standins getting updated and their
# largefiles getting updated
wlock = repo.wlock()
try:
if opts['check']:
mod = len(modified) > 0
for lfile in unsure:
standin = lfutil.standin(lfile)
if repo['.'][standin].data().strip() != \
lfutil.hashfile(repo.wjoin(lfile)):
mod = True
else:
lfdirstate.normal(lfile)
lfdirstate.write()
if mod:
raise util.Abort(_('uncommitted local changes'))
# XXX handle removed differently
if not opts['clean']:
for lfile in unsure + modified + added:
lfutil.updatestandin(repo, lfutil.standin(lfile))
finally:
wlock.release()
return orig(ui, repo, *pats, **opts)
# Before starting the manifest merge, merge.updates will call
# _checkunknown to check if there are any files in the merged-in
# changeset that collide with unknown files in the working copy.
#
# The largefiles are seen as unknown, so this prevents us from merging
# in a file 'foo' if we already have a largefile with the same name.
#
# The overridden function filters the unknown files by removing any
# largefiles. This makes the merge proceed and we can then handle this
# case further in the overridden manifestmerge function below.
def overridecheckunknownfile(origfn, repo, wctx, mctx, f):
if lfutil.standin(repo.dirstate.normalize(f)) in wctx:
return False
return origfn(repo, wctx, mctx, f)
# The manifest merge handles conflicts on the manifest level. We want
# to handle changes in largefile-ness of files at this level too.
#
# The strategy is to run the original manifestmerge and then process
# the action list it outputs. There are two cases we need to deal with:
#
# 1. Normal file in p1, largefile in p2. Here the largefile is
# detected via its standin file, which will enter the working copy
# with a "get" action. It is not "merge" since the standin is all
# Mercurial is concerned with at this level -- the link to the
# existing normal file is not relevant here.
#
# 2. Largefile in p1, normal file in p2. Here we get a "merge" action
# since the largefile will be present in the working copy and
# different from the normal file in p2. Mercurial therefore
# triggers a merge action.
#
# In both cases, we prompt the user and emit new actions to either
# remove the standin (if the normal file was kept) or to remove the
# normal file and get the standin (if the largefile was kept). The
# default prompt answer is to use the largefile version since it was
# presumably changed on purpose.
#
# Finally, the merge.applyupdates function will then take care of
# writing the files into the working copy and lfcommands.updatelfiles
# will update the largefiles.
def overridemanifestmerge(origfn, repo, p1, p2, pa, branchmerge, force,
partial, acceptremote=False):
overwrite = force and not branchmerge
actions = origfn(repo, p1, p2, pa, branchmerge, force, partial,
acceptremote)
processed = []
for action in actions:
if overwrite:
processed.append(action)
continue
f, m, args, msg = action
choices = (_('&Largefile'), _('&Normal file'))
splitstandin = lfutil.splitstandin(f)
if (m == "g" and splitstandin is not None and
splitstandin in p1 and f in p2):
# Case 1: normal file in the working copy, largefile in
# the second parent
lfile = splitstandin
standin = f
msg = _('%s has been turned into a largefile\n'
'use (l)argefile or keep as (n)ormal file?') % lfile
if repo.ui.promptchoice(msg, choices, 0) == 0:
processed.append((lfile, "r", None, msg))
processed.append((standin, "g", (p2.flags(standin),), msg))
else:
processed.append((standin, "r", None, msg))
elif m == "g" and lfutil.standin(f) in p1 and f in p2:
# Case 2: largefile in the working copy, normal file in
# the second parent
standin = lfutil.standin(f)
lfile = f
msg = _('%s has been turned into a normal file\n'
'keep as (l)argefile or use (n)ormal file?') % lfile
if repo.ui.promptchoice(msg, choices, 0) == 0:
processed.append((lfile, "r", None, msg))
else:
processed.append((standin, "r", None, msg))
processed.append((lfile, "g", (p2.flags(lfile),), msg))
else:
processed.append(action)
return processed
# Override filemerge to prompt the user about how they wish to merge
# largefiles. This will handle identical edits, and copy/rename +
# edit without prompting the user.
def overridefilemerge(origfn, repo, mynode, orig, fcd, fco, fca):
# Use better variable names here. Because this is a wrapper we cannot
# change the variable names in the function declaration.
fcdest, fcother, fcancestor = fcd, fco, fca
if not lfutil.isstandin(orig):
return origfn(repo, mynode, orig, fcdest, fcother, fcancestor)
else:
if not fcother.cmp(fcdest): # files identical?
return None
# backwards, use working dir parent as ancestor
if fcancestor == fcother:
fcancestor = fcdest.parents()[0]
if orig != fcother.path():
repo.ui.status(_('merging %s and %s to %s\n')
% (lfutil.splitstandin(orig),
lfutil.splitstandin(fcother.path()),
lfutil.splitstandin(fcdest.path())))
else:
repo.ui.status(_('merging %s\n')
% lfutil.splitstandin(fcdest.path()))
if fcancestor.path() != fcother.path() and fcother.data() == \
fcancestor.data():
return 0
if fcancestor.path() != fcdest.path() and fcdest.data() == \
fcancestor.data():
repo.wwrite(fcdest.path(), fcother.data(), fcother.flags())
return 0
if repo.ui.promptchoice(_('largefile %s has a merge conflict\n'
'keep (l)ocal or take (o)ther?') %
lfutil.splitstandin(orig),
(_('&Local'), _('&Other')), 0) == 0:
return 0
else:
repo.wwrite(fcdest.path(), fcother.data(), fcother.flags())
return 0
# Copy first changes the matchers to match standins instead of
# largefiles. Then it overrides util.copyfile in that function it
# checks if the destination largefile already exists. It also keeps a
# list of copied files so that the largefiles can be copied and the
# dirstate updated.
def overridecopy(orig, ui, repo, pats, opts, rename=False):
# doesn't remove largefile on rename
if len(pats) < 2:
# this isn't legal, let the original function deal with it
return orig(ui, repo, pats, opts, rename)
def makestandin(relpath):
path = scmutil.canonpath(repo.root, repo.getcwd(), relpath)
return os.path.join(repo.wjoin(lfutil.standin(path)))
fullpats = scmutil.expandpats(pats)
dest = fullpats[-1]
if os.path.isdir(dest):
if not os.path.isdir(makestandin(dest)):
os.makedirs(makestandin(dest))
# This could copy both lfiles and normal files in one command,
# but we don't want to do that. First replace their matcher to
# only match normal files and run it, then replace it to just
# match largefiles and run it again.
nonormalfiles = False
nolfiles = False
try:
try:
installnormalfilesmatchfn(repo[None].manifest())
result = orig(ui, repo, pats, opts, rename)
except util.Abort, e:
if str(e) != _('no files to copy'):
raise e
else:
nonormalfiles = True
result = 0
finally:
restorematchfn()
# The first rename can cause our current working directory to be removed.
# In that case there is nothing left to copy/rename so just quit.
try:
repo.getcwd()
except OSError:
return result
try:
try:
# When we call orig below it creates the standins but we don't add
# them to the dir state until later so lock during that time.
wlock = repo.wlock()
manifest = repo[None].manifest()
oldmatch = None # for the closure
def overridematch(ctx, pats=[], opts={}, globbed=False,
default='relpath'):
newpats = []
# The patterns were previously mangled to add the standin
# directory; we need to remove that now
for pat in pats:
if match_.patkind(pat) is None and lfutil.shortname in pat:
newpats.append(pat.replace(lfutil.shortname, ''))
else:
newpats.append(pat)
match = oldmatch(ctx, newpats, opts, globbed, default)
m = copy.copy(match)
lfile = lambda f: lfutil.standin(f) in manifest
m._files = [lfutil.standin(f) for f in m._files if lfile(f)]
m._fmap = set(m._files)
m._always = False
origmatchfn = m.matchfn
m.matchfn = lambda f: (lfutil.isstandin(f) and
(f in manifest) and
origmatchfn(lfutil.splitstandin(f)) or
None)
return m
oldmatch = installmatchfn(overridematch)
listpats = []
for pat in pats:
if match_.patkind(pat) is not None:
listpats.append(pat)
else:
listpats.append(makestandin(pat))
try:
origcopyfile = util.copyfile
copiedfiles = []
def overridecopyfile(src, dest):
if (lfutil.shortname in src and
dest.startswith(repo.wjoin(lfutil.shortname))):
destlfile = dest.replace(lfutil.shortname, '')
if not opts['force'] and os.path.exists(destlfile):
raise IOError('',
_('destination largefile already exists'))
copiedfiles.append((src, dest))
origcopyfile(src, dest)
util.copyfile = overridecopyfile
result += orig(ui, repo, listpats, opts, rename)
finally:
util.copyfile = origcopyfile
lfdirstate = lfutil.openlfdirstate(ui, repo)
for (src, dest) in copiedfiles:
if (lfutil.shortname in src and
dest.startswith(repo.wjoin(lfutil.shortname))):
srclfile = src.replace(repo.wjoin(lfutil.standin('')), '')
destlfile = dest.replace(repo.wjoin(lfutil.standin('')), '')
destlfiledir = os.path.dirname(repo.wjoin(destlfile)) or '.'
if not os.path.isdir(destlfiledir):
os.makedirs(destlfiledir)
if rename:
os.rename(repo.wjoin(srclfile), repo.wjoin(destlfile))
lfdirstate.remove(srclfile)
else:
util.copyfile(repo.wjoin(srclfile),
repo.wjoin(destlfile))
lfdirstate.add(destlfile)
lfdirstate.write()
except util.Abort, e:
if str(e) != _('no files to copy'):
raise e
else:
nolfiles = True
finally:
restorematchfn()
wlock.release()
if nolfiles and nonormalfiles:
raise util.Abort(_('no files to copy'))
return result
# When the user calls revert, we have to be careful to not revert any
# changes to other largefiles accidentally. This means we have to keep
# track of the largefiles that are being reverted so we only pull down
# the necessary largefiles.
#
# Standins are only updated (to match the hash of largefiles) before
# commits. Update the standins then run the original revert, changing
# the matcher to hit standins instead of largefiles. Based on the
# resulting standins update the largefiles. Then return the standins
# to their proper state
def overriderevert(orig, ui, repo, *pats, **opts):
# Because we put the standins in a bad state (by updating them)
# and then return them to a correct state we need to lock to
# prevent others from changing them in their incorrect state.
wlock = repo.wlock()
try:
lfdirstate = lfutil.openlfdirstate(ui, repo)
(modified, added, removed, missing, unknown, ignored, clean) = \
lfutil.lfdirstatestatus(lfdirstate, repo, repo['.'].rev())
lfdirstate.write()
for lfile in modified:
lfutil.updatestandin(repo, lfutil.standin(lfile))
for lfile in missing:
if (os.path.exists(repo.wjoin(lfutil.standin(lfile)))):
os.unlink(repo.wjoin(lfutil.standin(lfile)))
try:
ctx = scmutil.revsingle(repo, opts.get('rev'))
oldmatch = None # for the closure
def overridematch(ctx, pats=[], opts={}, globbed=False,
default='relpath'):
match = oldmatch(ctx, pats, opts, globbed, default)
m = copy.copy(match)
def tostandin(f):
if lfutil.standin(f) in ctx:
return lfutil.standin(f)
elif lfutil.standin(f) in repo[None]:
return None
return f
m._files = [tostandin(f) for f in m._files]
m._files = [f for f in m._files if f is not None]
m._fmap = set(m._files)
m._always = False
origmatchfn = m.matchfn
def matchfn(f):
if lfutil.isstandin(f):
# We need to keep track of what largefiles are being
# matched so we know which ones to update later --
# otherwise we accidentally revert changes to other
# largefiles. This is repo-specific, so duckpunch the
# repo object to keep the list of largefiles for us
# later.
if origmatchfn(lfutil.splitstandin(f)) and \
(f in repo[None] or f in ctx):
lfileslist = getattr(repo, '_lfilestoupdate', [])
lfileslist.append(lfutil.splitstandin(f))
repo._lfilestoupdate = lfileslist
return True
else:
return False
return origmatchfn(f)
m.matchfn = matchfn
return m
oldmatch = installmatchfn(overridematch)
scmutil.match
matches = overridematch(repo[None], pats, opts)
orig(ui, repo, *pats, **opts)
finally:
restorematchfn()
lfileslist = getattr(repo, '_lfilestoupdate', [])
lfcommands.updatelfiles(ui, repo, filelist=lfileslist,
printmessage=False)
# empty out the largefiles list so we start fresh next time
repo._lfilestoupdate = []
for lfile in modified:
if lfile in lfileslist:
if os.path.exists(repo.wjoin(lfutil.standin(lfile))) and lfile\
in repo['.']:
lfutil.writestandin(repo, lfutil.standin(lfile),
repo['.'][lfile].data().strip(),
'x' in repo['.'][lfile].flags())
lfdirstate = lfutil.openlfdirstate(ui, repo)
for lfile in added:
standin = lfutil.standin(lfile)
if standin not in ctx and (standin in matches or opts.get('all')):
if lfile in lfdirstate:
lfdirstate.drop(lfile)
util.unlinkpath(repo.wjoin(standin))
lfdirstate.write()
finally:
wlock.release()
def hgupdaterepo(orig, repo, node, overwrite):
if not overwrite:
# Only call updatelfiles on the standins that have changed to save time
oldstandins = lfutil.getstandinsstate(repo)
result = orig(repo, node, overwrite)
filelist = None
if not overwrite:
newstandins = lfutil.getstandinsstate(repo)
filelist = lfutil.getlfilestoupdate(oldstandins, newstandins)
lfcommands.updatelfiles(repo.ui, repo, filelist=filelist)
return result
def hgmerge(orig, repo, node, force=None, remind=True):
result = orig(repo, node, force, remind)
lfcommands.updatelfiles(repo.ui, repo)
return result
# When we rebase a repository with remotely changed largefiles, we need to
# take some extra care so that the largefiles are correctly updated in the
# working copy
def overridepull(orig, ui, repo, source=None, **opts):
revsprepull = len(repo)
if not source:
source = 'default'
repo.lfpullsource = source
if opts.get('rebase', False):
repo._isrebasing = True
try:
if opts.get('update'):
del opts['update']
ui.debug('--update and --rebase are not compatible, ignoring '
'the update flag\n')
del opts['rebase']
cmdutil.bailifchanged(repo)
origpostincoming = commands.postincoming
def _dummy(*args, **kwargs):
pass
commands.postincoming = _dummy
try:
result = commands.pull(ui, repo, source, **opts)
finally:
commands.postincoming = origpostincoming
revspostpull = len(repo)
if revspostpull > revsprepull:
result = result or rebase.rebase(ui, repo)
finally:
repo._isrebasing = False
else:
result = orig(ui, repo, source, **opts)
revspostpull = len(repo)
lfrevs = opts.get('lfrev', [])
if opts.get('all_largefiles'):
lfrevs.append('pulled()')
if lfrevs and revspostpull > revsprepull:
numcached = 0
repo.firstpulled = revsprepull # for pulled() revset expression
try:
for rev in scmutil.revrange(repo, lfrevs):
ui.note(_('pulling largefiles for revision %s\n') % rev)
(cached, missing) = lfcommands.cachelfiles(ui, repo, rev)
numcached += len(cached)
finally:
del repo.firstpulled
ui.status(_("%d largefiles cached\n") % numcached)
return result
def pulledrevsetsymbol(repo, subset, x):
"""``pulled()``
Changesets that just has been pulled.
Only available with largefiles from pull --lfrev expressions.
.. container:: verbose
Some examples:
- pull largefiles for all new changesets::
hg pull -lfrev "pulled()"
- pull largefiles for all new branch heads::
hg pull -lfrev "head(pulled()) and not closed()"
"""
try:
firstpulled = repo.firstpulled
except AttributeError:
raise util.Abort(_("pulled() only available in --lfrev"))
return [r for r in subset if r >= firstpulled]
def overrideclone(orig, ui, source, dest=None, **opts):
d = dest
if d is None:
d = hg.defaultdest(source)
if opts.get('all_largefiles') and not hg.islocal(d):
raise util.Abort(_(
'--all-largefiles is incompatible with non-local destination %s' %
d))
return orig(ui, source, dest, **opts)
def hgclone(orig, ui, opts, *args, **kwargs):
result = orig(ui, opts, *args, **kwargs)
if result is not None:
sourcerepo, destrepo = result
repo = destrepo.local()
# Caching is implicitly limited to 'rev' option, since the dest repo was
# truncated at that point. The user may expect a download count with
# this option, so attempt whether or not this is a largefile repo.
if opts.get('all_largefiles'):
success, missing = lfcommands.downloadlfiles(ui, repo, None)
if missing != 0:
return None
return result
def overriderebase(orig, ui, repo, **opts):
repo._isrebasing = True
try:
return orig(ui, repo, **opts)
finally:
repo._isrebasing = False
def overridearchive(orig, repo, dest, node, kind, decode=True, matchfn=None,
prefix=None, mtime=None, subrepos=None):
# No need to lock because we are only reading history and
# largefile caches, neither of which are modified.
lfcommands.cachelfiles(repo.ui, repo, node)
if kind not in archival.archivers:
raise util.Abort(_("unknown archive type '%s'") % kind)
ctx = repo[node]
if kind == 'files':
if prefix:
raise util.Abort(
_('cannot give prefix when archiving to files'))
else:
prefix = archival.tidyprefix(dest, kind, prefix)
def write(name, mode, islink, getdata):
if matchfn and not matchfn(name):
return
data = getdata()
if decode:
data = repo.wwritedata(name, data)
archiver.addfile(prefix + name, mode, islink, data)
archiver = archival.archivers[kind](dest, mtime or ctx.date()[0])
if repo.ui.configbool("ui", "archivemeta", True):
def metadata():
base = 'repo: %s\nnode: %s\nbranch: %s\n' % (
hex(repo.changelog.node(0)), hex(node), ctx.branch())
tags = ''.join('tag: %s\n' % t for t in ctx.tags()
if repo.tagtype(t) == 'global')
if not tags:
repo.ui.pushbuffer()
opts = {'template': '{latesttag}\n{latesttagdistance}',
'style': '', 'patch': None, 'git': None}
cmdutil.show_changeset(repo.ui, repo, opts).show(ctx)
ltags, dist = repo.ui.popbuffer().split('\n')
tags = ''.join('latesttag: %s\n' % t for t in ltags.split(':'))
tags += 'latesttagdistance: %s\n' % dist
return base + tags
write('.hg_archival.txt', 0644, False, metadata)
for f in ctx:
ff = ctx.flags(f)
getdata = ctx[f].data
if lfutil.isstandin(f):
path = lfutil.findfile(repo, getdata().strip())
if path is None:
raise util.Abort(
_('largefile %s not found in repo store or system cache')
% lfutil.splitstandin(f))
f = lfutil.splitstandin(f)
def getdatafn():
fd = None
try:
fd = open(path, 'rb')
return fd.read()
finally:
if fd:
fd.close()
getdata = getdatafn
write(f, 'x' in ff and 0755 or 0644, 'l' in ff, getdata)
if subrepos:
for subpath in sorted(ctx.substate):
sub = ctx.sub(subpath)
submatch = match_.narrowmatcher(subpath, matchfn)
sub.archive(repo.ui, archiver, prefix, submatch)
archiver.done()
def hgsubrepoarchive(orig, repo, ui, archiver, prefix, match=None):
repo._get(repo._state + ('hg',))
rev = repo._state[1]
ctx = repo._repo[rev]
lfcommands.cachelfiles(ui, repo._repo, ctx.node())
def write(name, mode, islink, getdata):
# At this point, the standin has been replaced with the largefile name,
# so the normal matcher works here without the lfutil variants.
if match and not match(f):
return
data = getdata()
archiver.addfile(prefix + repo._path + '/' + name, mode, islink, data)
for f in ctx:
ff = ctx.flags(f)
getdata = ctx[f].data
if lfutil.isstandin(f):
path = lfutil.findfile(repo._repo, getdata().strip())
if path is None:
raise util.Abort(
_('largefile %s not found in repo store or system cache')
% lfutil.splitstandin(f))
f = lfutil.splitstandin(f)
def getdatafn():
fd = None
try:
fd = open(os.path.join(prefix, path), 'rb')
return fd.read()
finally:
if fd:
fd.close()
getdata = getdatafn
write(f, 'x' in ff and 0755 or 0644, 'l' in ff, getdata)
for subpath in sorted(ctx.substate):
sub = ctx.sub(subpath)
submatch = match_.narrowmatcher(subpath, match)
sub.archive(ui, archiver, os.path.join(prefix, repo._path) + '/',
submatch)
# If a largefile is modified, the change is not reflected in its
# standin until a commit. cmdutil.bailifchanged() raises an exception
# if the repo has uncommitted changes. Wrap it to also check if
# largefiles were changed. This is used by bisect and backout.
def overridebailifchanged(orig, repo):
orig(repo)
repo.lfstatus = True
modified, added, removed, deleted = repo.status()[:4]
repo.lfstatus = False
if modified or added or removed or deleted:
raise util.Abort(_('outstanding uncommitted changes'))
# Fetch doesn't use cmdutil.bailifchanged so override it to add the check
def overridefetch(orig, ui, repo, *pats, **opts):
repo.lfstatus = True
modified, added, removed, deleted = repo.status()[:4]
repo.lfstatus = False
if modified or added or removed or deleted:
raise util.Abort(_('outstanding uncommitted changes'))
return orig(ui, repo, *pats, **opts)
def overrideforget(orig, ui, repo, *pats, **opts):
installnormalfilesmatchfn(repo[None].manifest())
result = orig(ui, repo, *pats, **opts)
restorematchfn()
m = scmutil.match(repo[None], pats, opts)
try:
repo.lfstatus = True
s = repo.status(match=m, clean=True)
finally:
repo.lfstatus = False
forget = sorted(s[0] + s[1] + s[3] + s[6])
forget = [f for f in forget if lfutil.standin(f) in repo[None].manifest()]
for f in forget:
if lfutil.standin(f) not in repo.dirstate and not \
os.path.isdir(m.rel(lfutil.standin(f))):
ui.warn(_('not removing %s: file is already untracked\n')
% m.rel(f))
result = 1
for f in forget:
if ui.verbose or not m.exact(f):
ui.status(_('removing %s\n') % m.rel(f))
# Need to lock because standin files are deleted then removed from the
# repository and we could race in-between.
wlock = repo.wlock()
try:
lfdirstate = lfutil.openlfdirstate(ui, repo)
for f in forget:
if lfdirstate[f] == 'a':
lfdirstate.drop(f)
else:
lfdirstate.remove(f)
lfdirstate.write()
standins = [lfutil.standin(f) for f in forget]
for f in standins:
util.unlinkpath(repo.wjoin(f), ignoremissing=True)
repo[None].forget(standins)
finally:
wlock.release()
return result
def getoutgoinglfiles(ui, repo, dest=None, **opts):
dest = ui.expandpath(dest or 'default-push', dest or 'default')
dest, branches = hg.parseurl(dest, opts.get('branch'))
revs, checkout = hg.addbranchrevs(repo, repo, branches, opts.get('rev'))
if revs:
revs = [repo.lookup(rev) for rev in scmutil.revrange(repo, revs)]
try:
remote = hg.peer(repo, opts, dest)
except error.RepoError:
return None
outgoing = discovery.findcommonoutgoing(repo, remote.peer(), force=False)
if not outgoing.missing:
return outgoing.missing
o = repo.changelog.nodesbetween(outgoing.missing, revs)[0]
if opts.get('newest_first'):
o.reverse()
toupload = set()
for n in o:
parents = [p for p in repo.changelog.parents(n) if p != node.nullid]
ctx = repo[n]
files = set(ctx.files())
if len(parents) == 2:
mc = ctx.manifest()
mp1 = ctx.parents()[0].manifest()
mp2 = ctx.parents()[1].manifest()
for f in mp1:
if f not in mc:
files.add(f)
for f in mp2:
if f not in mc:
files.add(f)
for f in mc:
if mc[f] != mp1.get(f, None) or mc[f] != mp2.get(f, None):
files.add(f)
toupload = toupload.union(
set([f for f in files if lfutil.isstandin(f) and f in ctx]))
return sorted(toupload)
def overrideoutgoing(orig, ui, repo, dest=None, **opts):
result = orig(ui, repo, dest, **opts)
if opts.pop('large', None):
toupload = getoutgoinglfiles(ui, repo, dest, **opts)
if toupload is None:
ui.status(_('largefiles: No remote repo\n'))
elif not toupload:
ui.status(_('largefiles: no files to upload\n'))
else:
ui.status(_('largefiles to upload:\n'))
for file in toupload:
ui.status(lfutil.splitstandin(file) + '\n')
ui.status('\n')
return result
def overridesummary(orig, ui, repo, *pats, **opts):
try:
repo.lfstatus = True
orig(ui, repo, *pats, **opts)
finally:
repo.lfstatus = False
if opts.pop('large', None):
toupload = getoutgoinglfiles(ui, repo, None, **opts)
if toupload is None:
# i18n: column positioning for "hg summary"
ui.status(_('largefiles: (no remote repo)\n'))
elif not toupload:
# i18n: column positioning for "hg summary"
ui.status(_('largefiles: (no files to upload)\n'))
else:
# i18n: column positioning for "hg summary"
ui.status(_('largefiles: %d to upload\n') % len(toupload))
def scmutiladdremove(orig, repo, pats=[], opts={}, dry_run=None,
similarity=None):
if not lfutil.islfilesrepo(repo):
return orig(repo, pats, opts, dry_run, similarity)
# Get the list of missing largefiles so we can remove them
lfdirstate = lfutil.openlfdirstate(repo.ui, repo)
s = lfdirstate.status(match_.always(repo.root, repo.getcwd()), [], False,
False, False)
(unsure, modified, added, removed, missing, unknown, ignored, clean) = s
# Call into the normal remove code, but the removing of the standin, we want
# to have handled by original addremove. Monkey patching here makes sure
# we don't remove the standin in the largefiles code, preventing a very
# confused state later.
if missing:
m = [repo.wjoin(f) for f in missing]
repo._isaddremove = True
removelargefiles(repo.ui, repo, *m, **opts)
repo._isaddremove = False
# Call into the normal add code, and any files that *should* be added as
# largefiles will be
addlargefiles(repo.ui, repo, *pats, **opts)
# Now that we've handled largefiles, hand off to the original addremove
# function to take care of the rest. Make sure it doesn't do anything with
# largefiles by installing a matcher that will ignore them.
installnormalfilesmatchfn(repo[None].manifest())
result = orig(repo, pats, opts, dry_run, similarity)
restorematchfn()
return result
# Calling purge with --all will cause the largefiles to be deleted.
# Override repo.status to prevent this from happening.
def overridepurge(orig, ui, repo, *dirs, **opts):
# XXX large file status is buggy when used on repo proxy.
# XXX this needs to be investigate.
repo = repo.unfiltered()
oldstatus = repo.status
def overridestatus(node1='.', node2=None, match=None, ignored=False,
clean=False, unknown=False, listsubrepos=False):
r = oldstatus(node1, node2, match, ignored, clean, unknown,
listsubrepos)
lfdirstate = lfutil.openlfdirstate(ui, repo)
modified, added, removed, deleted, unknown, ignored, clean = r
unknown = [f for f in unknown if lfdirstate[f] == '?']
ignored = [f for f in ignored if lfdirstate[f] == '?']
return modified, added, removed, deleted, unknown, ignored, clean
repo.status = overridestatus
orig(ui, repo, *dirs, **opts)
repo.status = oldstatus
def overriderollback(orig, ui, repo, **opts):
result = orig(ui, repo, **opts)
merge.update(repo, node=None, branchmerge=False, force=True,
partial=lfutil.isstandin)
wlock = repo.wlock()
try:
lfdirstate = lfutil.openlfdirstate(ui, repo)
lfiles = lfutil.listlfiles(repo)
oldlfiles = lfutil.listlfiles(repo, repo[None].parents()[0].rev())
for file in lfiles:
if file in oldlfiles:
lfdirstate.normallookup(file)
else:
lfdirstate.add(file)
lfdirstate.write()
finally:
wlock.release()
return result
def overridetransplant(orig, ui, repo, *revs, **opts):
try:
oldstandins = lfutil.getstandinsstate(repo)
repo._istransplanting = True
result = orig(ui, repo, *revs, **opts)
newstandins = lfutil.getstandinsstate(repo)
filelist = lfutil.getlfilestoupdate(oldstandins, newstandins)
lfcommands.updatelfiles(repo.ui, repo, filelist=filelist,
printmessage=True)
finally:
repo._istransplanting = False
return result
def overridecat(orig, ui, repo, file1, *pats, **opts):
ctx = scmutil.revsingle(repo, opts.get('rev'))
err = 1
notbad = set()
m = scmutil.match(ctx, (file1,) + pats, opts)
origmatchfn = m.matchfn
def lfmatchfn(f):
lf = lfutil.splitstandin(f)
if lf is None:
return origmatchfn(f)
notbad.add(lf)
return origmatchfn(lf)
m.matchfn = lfmatchfn
origbadfn = m.bad
def lfbadfn(f, msg):
if not f in notbad:
return origbadfn(f, msg)
m.bad = lfbadfn
for f in ctx.walk(m):
fp = cmdutil.makefileobj(repo, opts.get('output'), ctx.node(),
pathname=f)
lf = lfutil.splitstandin(f)
if lf is None:
# duplicating unreachable code from commands.cat
data = ctx[f].data()
if opts.get('decode'):
data = repo.wwritedata(f, data)
fp.write(data)
else:
hash = lfutil.readstandin(repo, lf, ctx.rev())
if not lfutil.inusercache(repo.ui, hash):
store = basestore._openstore(repo)
success, missing = store.get([(lf, hash)])
if len(success) != 1:
raise util.Abort(
_('largefile %s is not in cache and could not be '
'downloaded') % lf)
path = lfutil.usercachepath(repo.ui, hash)
fpin = open(path, "rb")
for chunk in util.filechunkiter(fpin, 128 * 1024):
fp.write(chunk)
fpin.close()
fp.close()
err = 0
return err
def mercurialsinkbefore(orig, sink):
sink.repo._isconverting = True
orig(sink)
def mercurialsinkafter(orig, sink):
sink.repo._isconverting = False
orig(sink)
| apache-2.0 |
mavit/ansible | test/units/module_utils/aws/test_aws_module.py | 12 | 6353 | # -*- coding: utf-8 -*-
# (c) 2017, Michael De La Rue
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
from pytest import importorskip
import unittest
from ansible.module_utils import basic
from ansible.module_utils.aws.core import AnsibleAWSModule
from ansible.module_utils._text import to_bytes
from ansible.compat.tests.mock import Mock, patch
import json
importorskip("boto3")
botocore = importorskip("botocore")
class AWSModuleTestCase(unittest.TestCase):
basic._ANSIBLE_ARGS = to_bytes(json.dumps({'ANSIBLE_MODULE_ARGS': {'_ansible_tmpdir': '/tmp/ansible-abc'}}))
def test_create_aws_module_should_set_up_params(self):
m = AnsibleAWSModule(argument_spec=dict(
win_string_arg=dict(type='list', default=['win'])
))
m_noretry_no_customargs = AnsibleAWSModule(
auto_retry=False, default_args=False,
argument_spec=dict(
success_string_arg=dict(type='list', default=['success'])
)
)
assert m, "module wasn't true!!"
assert m_noretry_no_customargs, "module wasn't true!!"
m_params = m.params
m_no_defs_params = m_noretry_no_customargs.params
assert 'region' in m_params
assert 'win' in m_params["win_string_arg"]
assert 'success' in m_no_defs_params["success_string_arg"]
assert 'aws_secret_key' not in m_no_defs_params
class ErrorReportingTestcase(unittest.TestCase):
def test_botocore_exception_reports_nicely_via_fail_json_aws(self):
basic._ANSIBLE_ARGS = to_bytes(json.dumps({'ANSIBLE_MODULE_ARGS': {'_ansible_tmpdir': '/tmp/ansible-abc'}}))
module = AnsibleAWSModule(argument_spec=dict(
fail_mode=dict(type='list', default=['success'])
))
fail_json_double = Mock()
err_msg = {'Error': {'Code': 'FakeClass.FakeError'}}
with patch.object(basic.AnsibleModule, 'fail_json', fail_json_double):
try:
raise botocore.exceptions.ClientError(err_msg, 'Could not find you')
except Exception as e:
print("exception is " + str(e))
module.fail_json_aws(e, msg="Fake failure for testing boto exception messages")
assert(len(fail_json_double.mock_calls) >
0), "failed to call fail_json when should have"
assert(len(fail_json_double.mock_calls) <
2), "called fail_json multiple times when once would do"
assert("test_botocore_exception_reports_nicely"
in fail_json_double.mock_calls[0][2]["exception"]), \
"exception traceback doesn't include correct function, fail call was actually: " \
+ str(fail_json_double.mock_calls[0])
assert("Fake failure for testing boto exception messages:"
in fail_json_double.mock_calls[0][2]["msg"]), \
"error message doesn't include the local message; was: " \
+ str(fail_json_double.mock_calls[0])
assert("Could not find you" in fail_json_double.mock_calls[0][2]["msg"]), \
"error message doesn't include the botocore exception message; was: " \
+ str(fail_json_double.mock_calls[0])
try:
fail_json_double.mock_calls[0][2]["error"]
except KeyError:
raise Exception("error was missing; call was: " + str(fail_json_double.mock_calls[0]))
assert("FakeClass.FakeError" == fail_json_double.mock_calls[0][2]["error"]["code"]), \
"Failed to find error/code; was: " + str(fail_json_double.mock_calls[0])
def test_botocore_exception_without_response_reports_nicely_via_fail_json_aws(self):
basic._ANSIBLE_ARGS = to_bytes(json.dumps({'ANSIBLE_MODULE_ARGS': {'_ansible_tmpdir': '/tmp/ansible-abc'}}))
module = AnsibleAWSModule(argument_spec=dict(
fail_mode=dict(type='list', default=['success'])
))
fail_json_double = Mock()
err_msg = None
with patch.object(basic.AnsibleModule, 'fail_json', fail_json_double):
try:
raise botocore.exceptions.ClientError(err_msg, 'Could not find you')
except Exception as e:
print("exception is " + str(e))
module.fail_json_aws(e, msg="Fake failure for testing boto exception messages")
assert(len(fail_json_double.mock_calls) > 0), "failed to call fail_json when should have"
assert(len(fail_json_double.mock_calls) < 2), "called fail_json multiple times"
assert("test_botocore_exception_without_response_reports_nicely_via_fail_json_aws"
in fail_json_double.mock_calls[0][2]["exception"]), \
"exception traceback doesn't include correct function, fail call was actually: " \
+ str(fail_json_double.mock_calls[0])
assert("Fake failure for testing boto exception messages"
in fail_json_double.mock_calls[0][2]["msg"]), \
"error message doesn't include the local message; was: " \
+ str(fail_json_double.mock_calls[0])
# I would have thought this should work, however the botocore exception comes back with
# "argument of type 'NoneType' is not iterable" so it's probably not really designed
# to handle "None" as an error response.
#
# assert("Could not find you" in fail_json_double.mock_calls[0][2]["msg"]), \
# "error message doesn't include the botocore exception message; was: " \
# + str(fail_json_double.mock_calls[0])
# TODO:
# - an exception without a message
# - plain boto exception
# - socket errors and other standard things.
| gpl-3.0 |
zding5/Microblog-Flask | flask/lib/python2.7/site-packages/whoosh/analysis/intraword.py | 92 | 18991 | # Copyright 2007 Matt Chaput. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY MATT CHAPUT ``AS IS'' AND ANY EXPRESS OR
# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
# EVENT SHALL MATT CHAPUT OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
# OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
# EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# The views and conclusions contained in the software and documentation are
# those of the authors and should not be interpreted as representing official
# policies, either expressed or implied, of Matt Chaput.
import re
from collections import deque
from whoosh.compat import u, text_type
from whoosh.compat import xrange
from whoosh.analysis.filters import Filter
class CompoundWordFilter(Filter):
"""Given a set of words (or any object with a ``__contains__`` method),
break any tokens in the stream that are composites of words in the word set
into their individual parts.
Given the correct set of words, this filter can break apart run-together
words and trademarks (e.g. "turbosquid", "applescript"). It can also be
useful for agglutinative languages such as German.
The ``keep_compound`` argument lets you decide whether to keep the
compound word in the token stream along with the word segments.
>>> cwf = CompoundWordFilter(wordset, keep_compound=True)
>>> analyzer = RegexTokenizer(r"\S+") | cwf
>>> [t.text for t in analyzer("I do not like greeneggs and ham")
["I", "do", "not", "like", "greeneggs", "green", "eggs", "and", "ham"]
>>> cwf.keep_compound = False
>>> [t.text for t in analyzer("I do not like greeneggs and ham")
["I", "do", "not", "like", "green", "eggs", "and", "ham"]
"""
def __init__(self, wordset, keep_compound=True):
"""
:param wordset: an object with a ``__contains__`` method, such as a
set, containing strings to look for inside the tokens.
:param keep_compound: if True (the default), the original compound
token will be retained in the stream before the subwords.
"""
self.wordset = wordset
self.keep_compound = keep_compound
def subwords(self, s, memo):
if s in self.wordset:
return [s]
if s in memo:
return memo[s]
for i in xrange(1, len(s)):
prefix = s[:i]
if prefix in self.wordset:
suffix = s[i:]
suffix_subs = self.subwords(suffix, memo)
if suffix_subs:
result = [prefix] + suffix_subs
memo[s] = result
return result
return None
def __call__(self, tokens):
keep_compound = self.keep_compound
memo = {}
subwords = self.subwords
for t in tokens:
subs = subwords(t.text, memo)
if subs:
if len(subs) > 1 and keep_compound:
yield t
for subword in subs:
t.text = subword
yield t
else:
yield t
class BiWordFilter(Filter):
"""Merges adjacent tokens into "bi-word" tokens, so that for example::
"the", "sign", "of", "four"
becomes::
"the-sign", "sign-of", "of-four"
This can be used to create fields for pseudo-phrase searching, where if
all the terms match the document probably contains the phrase, but the
searching is faster than actually doing a phrase search on individual word
terms.
The ``BiWordFilter`` is much faster than using the otherwise equivalent
``ShingleFilter(2)``.
"""
def __init__(self, sep="-"):
self.sep = sep
def __call__(self, tokens):
sep = self.sep
prev_text = None
prev_startchar = None
prev_pos = None
atleastone = False
for token in tokens:
# Save the original text of this token
text = token.text
# Save the original position
positions = token.positions
if positions:
ps = token.pos
# Save the original start char
chars = token.chars
if chars:
sc = token.startchar
if prev_text is not None:
# Use the pos and startchar from the previous token
if positions:
token.pos = prev_pos
if chars:
token.startchar = prev_startchar
# Join the previous token text and the current token text to
# form the biword token
token.text = "".join((prev_text, sep, text))
yield token
atleastone = True
# Save the originals and the new "previous" values
prev_text = text
if chars:
prev_startchar = sc
if positions:
prev_pos = ps
# If no bi-words were emitted, that is, the token stream only had
# a single token, then emit that single token.
if not atleastone:
yield token
class ShingleFilter(Filter):
"""Merges a certain number of adjacent tokens into multi-word tokens, so
that for example::
"better", "a", "witty", "fool", "than", "a", "foolish", "wit"
with ``ShingleFilter(3, ' ')`` becomes::
'better a witty', 'a witty fool', 'witty fool than', 'fool than a',
'than a foolish', 'a foolish wit'
This can be used to create fields for pseudo-phrase searching, where if
all the terms match the document probably contains the phrase, but the
searching is faster than actually doing a phrase search on individual word
terms.
If you're using two-word shingles, you should use the functionally
equivalent ``BiWordFilter`` instead because it's faster than
``ShingleFilter``.
"""
def __init__(self, size=2, sep="-"):
self.size = size
self.sep = sep
def __call__(self, tokens):
size = self.size
sep = self.sep
buf = deque()
atleastone = False
def make_token():
tk = buf[0]
tk.text = sep.join([t.text for t in buf])
if tk.chars:
tk.endchar = buf[-1].endchar
return tk
for token in tokens:
if not token.stopped:
buf.append(token.copy())
if len(buf) == size:
atleastone = True
yield make_token()
buf.popleft()
# If no shingles were emitted, that is, the token stream had fewer than
# 'size' tokens, then emit a single token with whatever tokens there
# were
if not atleastone and buf:
yield make_token()
class IntraWordFilter(Filter):
"""Splits words into subwords and performs optional transformations on
subword groups. This filter is funtionally based on yonik's
WordDelimiterFilter in Solr, but shares no code with it.
* Split on intra-word delimiters, e.g. `Wi-Fi` -> `Wi`, `Fi`.
* When splitwords=True, split on case transitions,
e.g. `PowerShot` -> `Power`, `Shot`.
* When splitnums=True, split on letter-number transitions,
e.g. `SD500` -> `SD`, `500`.
* Leading and trailing delimiter characters are ignored.
* Trailing possesive "'s" removed from subwords,
e.g. `O'Neil's` -> `O`, `Neil`.
The mergewords and mergenums arguments turn on merging of subwords.
When the merge arguments are false, subwords are not merged.
* `PowerShot` -> `0`:`Power`, `1`:`Shot` (where `0` and `1` are token
positions).
When one or both of the merge arguments are true, consecutive runs of
alphabetic and/or numeric subwords are merged into an additional token with
the same position as the last sub-word.
* `PowerShot` -> `0`:`Power`, `1`:`Shot`, `1`:`PowerShot`
* `A's+B's&C's` -> `0`:`A`, `1`:`B`, `2`:`C`, `2`:`ABC`
* `Super-Duper-XL500-42-AutoCoder!` -> `0`:`Super`, `1`:`Duper`, `2`:`XL`,
`2`:`SuperDuperXL`,
`3`:`500`, `4`:`42`, `4`:`50042`, `5`:`Auto`, `6`:`Coder`,
`6`:`AutoCoder`
When using this filter you should use a tokenizer that only splits on
whitespace, so the tokenizer does not remove intra-word delimiters before
this filter can see them, and put this filter before any use of
LowercaseFilter.
>>> rt = RegexTokenizer(r"\\S+")
>>> iwf = IntraWordFilter()
>>> lcf = LowercaseFilter()
>>> analyzer = rt | iwf | lcf
One use for this filter is to help match different written representations
of a concept. For example, if the source text contained `wi-fi`, you
probably want `wifi`, `WiFi`, `wi-fi`, etc. to match. One way of doing this
is to specify mergewords=True and/or mergenums=True in the analyzer used
for indexing, and mergewords=False / mergenums=False in the analyzer used
for querying.
>>> iwf_i = IntraWordFilter(mergewords=True, mergenums=True)
>>> iwf_q = IntraWordFilter(mergewords=False, mergenums=False)
>>> iwf = MultiFilter(index=iwf_i, query=iwf_q)
>>> analyzer = RegexTokenizer(r"\S+") | iwf | LowercaseFilter()
(See :class:`MultiFilter`.)
"""
is_morph = True
__inittypes__ = dict(delims=text_type, splitwords=bool, splitnums=bool,
mergewords=bool, mergenums=bool)
def __init__(self, delims=u("-_'\"()!@#$%^&*[]{}<>\|;:,./?`~=+"),
splitwords=True, splitnums=True,
mergewords=False, mergenums=False):
"""
:param delims: a string of delimiter characters.
:param splitwords: if True, split at case transitions,
e.g. `PowerShot` -> `Power`, `Shot`
:param splitnums: if True, split at letter-number transitions,
e.g. `SD500` -> `SD`, `500`
:param mergewords: merge consecutive runs of alphabetic subwords into
an additional token with the same position as the last subword.
:param mergenums: merge consecutive runs of numeric subwords into an
additional token with the same position as the last subword.
"""
from whoosh.support.unicode import digits, lowercase, uppercase
self.delims = re.escape(delims)
# Expression for text between delimiter characters
self.between = re.compile(u("[^%s]+") % (self.delims,), re.UNICODE)
# Expression for removing "'s" from the end of sub-words
dispat = u("(?<=[%s%s])'[Ss](?=$|[%s])") % (lowercase, uppercase,
self.delims)
self.possessive = re.compile(dispat, re.UNICODE)
# Expression for finding case and letter-number transitions
lower2upper = u("[%s][%s]") % (lowercase, uppercase)
letter2digit = u("[%s%s][%s]") % (lowercase, uppercase, digits)
digit2letter = u("[%s][%s%s]") % (digits, lowercase, uppercase)
if splitwords and splitnums:
splitpat = u("(%s|%s|%s)") % (lower2upper, letter2digit,
digit2letter)
self.boundary = re.compile(splitpat, re.UNICODE)
elif splitwords:
self.boundary = re.compile(text_type(lower2upper), re.UNICODE)
elif splitnums:
numpat = u("(%s|%s)") % (letter2digit, digit2letter)
self.boundary = re.compile(numpat, re.UNICODE)
self.splitting = splitwords or splitnums
self.mergewords = mergewords
self.mergenums = mergenums
def __eq__(self, other):
return other and self.__class__ is other.__class__\
and self.__dict__ == other.__dict__
def _split(self, string):
bound = self.boundary
# Yields (startchar, endchar) pairs for each indexable substring in
# the given string, e.g. "WikiWord" -> (0, 4), (4, 8)
# Whether we're splitting on transitions (case changes, letter -> num,
# num -> letter, etc.)
splitting = self.splitting
# Make a list (dispos, for "dispossessed") of (startchar, endchar)
# pairs for runs of text between "'s"
if "'" in string:
# Split on possessive 's
dispos = []
prev = 0
for match in self.possessive.finditer(string):
dispos.append((prev, match.start()))
prev = match.end()
if prev < len(string):
dispos.append((prev, len(string)))
else:
# Shortcut if there's no apostrophe in the string
dispos = ((0, len(string)),)
# For each run between 's
for sc, ec in dispos:
# Split on boundary characters
for part_match in self.between.finditer(string, sc, ec):
part_start = part_match.start()
part_end = part_match.end()
if splitting:
# The point to start splitting at
prev = part_start
# Find transitions (e.g. "iW" or "a0")
for bmatch in bound.finditer(string, part_start, part_end):
# The point in the middle of the transition
pivot = bmatch.start() + 1
# Yield from the previous match to the transition
yield (prev, pivot)
# Make the transition the new starting point
prev = pivot
# If there's leftover text at the end, yield it too
if prev < part_end:
yield (prev, part_end)
else:
# Not splitting on transitions, just yield the part
yield (part_start, part_end)
def _merge(self, parts):
mergewords = self.mergewords
mergenums = self.mergenums
# Current type (1=alpah, 2=digit)
last = 0
# Where to insert a merged term in the original list
insertat = 0
# Buffer for parts to merge
buf = []
# Iterate on a copy of the parts list so we can modify the original as
# we go
def insert_item(buf, at, newpos):
newtext = "".join(item[0] for item in buf)
newsc = buf[0][2] # start char of first item in buffer
newec = buf[-1][3] # end char of last item in buffer
parts.insert(insertat, (newtext, newpos, newsc, newec))
for item in list(parts):
# item = (text, pos, startchar, endchar)
text = item[0]
pos = item[1]
# Set the type of this part
if text.isalpha():
this = 1
elif text.isdigit():
this = 2
else:
this = None
# Is this the same type as the previous part?
if (buf and (this == last == 1 and mergewords)
or (this == last == 2 and mergenums)):
# This part is the same type as the previous. Add it to the
# buffer of parts to merge.
buf.append(item)
else:
# This part is different than the previous.
if len(buf) > 1:
# If the buffer has at least two parts in it, merge them
# and add them to the original list of parts.
insert_item(buf, insertat, pos - 1)
insertat += 1
# Reset the buffer
buf = [item]
last = this
insertat += 1
# If there are parts left in the buffer at the end, merge them and add
# them to the original list.
if len(buf) > 1:
insert_item(buf, len(parts), pos)
def __call__(self, tokens):
mergewords = self.mergewords
mergenums = self.mergenums
# This filter renumbers tokens as it expands them. New position
# counter.
newpos = None
for t in tokens:
text = t.text
# If this is the first token we've seen, use it to set the new
# position counter
if newpos is None:
if t.positions:
newpos = t.pos
else:
# Token doesn't have positions, just use 0
newpos = 0
if ((text.isalpha() and (text.islower() or text.isupper()))
or text.isdigit()):
# Short-circuit the common cases of no delimiters, no case
# transitions, only digits, etc.
t.pos = newpos
yield t
newpos += 1
else:
# Split the token text on delimiters, word and/or number
# boundaries into a list of (text, pos, startchar, endchar)
# tuples
ranges = self._split(text)
parts = [(text[sc:ec], i + newpos, sc, ec)
for i, (sc, ec) in enumerate(ranges)]
# Did the split yield more than one part?
if len(parts) > 1:
# If the options are set, merge consecutive runs of all-
# letters and/or all-numbers.
if mergewords or mergenums:
self._merge(parts)
# Yield tokens for the parts
chars = t.chars
if chars:
base = t.startchar
for text, pos, startchar, endchar in parts:
t.text = text
t.pos = pos
if t.chars:
t.startchar = base + startchar
t.endchar = base + endchar
yield t
if parts:
# Set the new position counter based on the last part
newpos = parts[-1][1] + 1
| mit |
supunkamburugamuva/course-builder | models/utils.py | 4 | 2019 | # Copyright 2012 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Helper functions to work with various models."""
__author__ = 'Sean Lip ([email protected])'
import transforms
def set_answer(answers, assessment_name, answer):
"""Stores the answer array for the given student and assessment.
The caller must call answers.put() to commit.
This does not do any type-checking on 'answer'; it just stores whatever
is passed in.
Args:
answers: the StudentAnswers entity in which the answer should be stored.
assessment_name: the name of the assessment.
answer: an array containing the student's answers.
"""
if not answers.data:
score_dict = {}
else:
score_dict = transforms.loads(answers.data)
score_dict[assessment_name] = answer
answers.data = transforms.dumps(score_dict)
def set_score(student, assessment_name, score):
"""Stores the score for the given student and assessment.
The caller must call student.put() to commit.
This does not do any type-checking on 'score'; it just stores whatever
is passed in.
Args:
student: the student whose answer should be stored.
assessment_name: the name of the assessment.
score: the student's score.
"""
if not student.scores:
score_dict = {}
else:
score_dict = transforms.loads(student.scores)
score_dict[assessment_name] = score
student.scores = transforms.dumps(score_dict)
| apache-2.0 |
jwlawson/tensorflow | tensorflow/python/training/basic_loops_test.py | 142 | 3191 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for basic_loops.py."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import shutil
from tensorflow.python.framework import errors_impl
from tensorflow.python.framework import ops
from tensorflow.python.platform import test
from tensorflow.python.training import basic_loops
from tensorflow.python.training import supervisor
def _test_dir(test_name):
test_dir = os.path.join(test.get_temp_dir(), test_name)
if os.path.exists(test_dir):
shutil.rmtree(test_dir)
return test_dir
class BasicTrainLoopTest(test.TestCase):
def testBasicTrainLoop(self):
logdir = _test_dir("basic_train_loop")
sv = supervisor.Supervisor(logdir=logdir)
# Counts the number of calls.
num_calls = [0]
def train_fn(unused_sess, sv, y, a):
num_calls[0] += 1
self.assertEqual("y", y)
self.assertEqual("A", a)
if num_calls[0] == 3:
sv.request_stop()
with ops.Graph().as_default():
basic_loops.basic_train_loop(
sv, train_fn, args=(sv, "y"), kwargs={"a": "A"})
self.assertEqual(3, num_calls[0])
def testBasicTrainLoopExceptionAborts(self):
logdir = _test_dir("basic_train_loop_exception_aborts")
sv = supervisor.Supervisor(logdir=logdir)
def train_fn(unused_sess):
train_fn.counter += 1
if train_fn.counter == 3:
raise RuntimeError("Failed")
# Function attribute use to count the number of calls.
train_fn.counter = 0
with ops.Graph().as_default():
with self.assertRaisesRegexp(RuntimeError, "Failed"):
basic_loops.basic_train_loop(sv, train_fn)
def testBasicTrainLoopRetryOnAborted(self):
logdir = _test_dir("basic_train_loop_exception_aborts")
sv = supervisor.Supervisor(logdir=logdir)
class AbortAndRetry(object):
def __init__(self):
self.num_calls = 0
self.retries_left = 2
def train_fn(self, unused_sess):
self.num_calls += 1
if self.num_calls % 3 == 2:
self.retries_left -= 1
if self.retries_left > 0:
raise errors_impl.AbortedError(None, None, "Aborted here")
else:
raise RuntimeError("Failed Again")
with ops.Graph().as_default():
aar = AbortAndRetry()
with self.assertRaisesRegexp(RuntimeError, "Failed Again"):
basic_loops.basic_train_loop(sv, aar.train_fn)
self.assertEquals(0, aar.retries_left)
if __name__ == "__main__":
test.main()
| apache-2.0 |
open-synergy/server-tools | mail_environment/__openerp__.py | 17 | 2173 | # -*- coding: utf-8 -*-
##############################################################################
#
# Author: Nicolas Bessi
# Copyright 2012 Camptocamp SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Server env config for mail + fetchmail',
'version': '8.0.0.1.0',
'category': 'Tools',
'description': """
Extend mail and fetch mail with server environment module.
In config files, sections outgoing_mail and incoming_mails are default values
for all Outgoing Mail Servers and Fetchmail Servers.
For each server, you can (re)define values with a section named
"outgoing_mail.resource_name" where resource_name is the name of your server.
Exemple of config file :
[outgoing_mail]
smtp_host = smtp.myserver.com
smtp_port = 587
smtp_user =
smtp_pass =
smtp_encryption = ssl
[outgoing_mail.openerp_smtp_server1]
smtp_user = openerp
smtp_pass = openerp
[incoming_mail.openerp_pop_mail1]
server = mail.myserver.com
port = 110
type = pop
is_ssl = 0
attach = 0
original = 0
user = [email protected]
password = openerp
""",
'author': "Camptocamp,Odoo Community Association (OCA)",
'license': 'AGPL-3',
'website': 'http://openerp.camptocamp.com',
'depends': ['mail',
'fetchmail',
'server_environment',
'server_environment_files',
],
'data': ['mail_view.xml'],
'demo': [],
'installable': True,
'active': False,
}
| agpl-3.0 |
2014c2g6/c2g6 | exts/wsgi/static/Brython2.1.0-20140419-113919/Lib/binascii.py | 103 | 24362 | """A pure Python implementation of binascii.
Rather slow and buggy in corner cases.
PyPy provides an RPython version too.
"""
# borrowed from https://bitbucket.org/pypy/pypy/src/f2bf94943a41/lib_pypy/binascii.py
class Error(Exception):
pass
class Done(Exception):
pass
class Incomplete(Exception):
pass
def a2b_uu(s):
if not s:
return ''
length = (ord(s[0]) - 0x20) % 64
def quadruplets_gen(s):
while s:
try:
yield ord(s[0]), ord(s[1]), ord(s[2]), ord(s[3])
except IndexError:
s += ' '
yield ord(s[0]), ord(s[1]), ord(s[2]), ord(s[3])
return
s = s[4:]
try:
result = [''.join(
[chr((A - 0x20) << 2 | (((B - 0x20) >> 4) & 0x3)),
chr(((B - 0x20) & 0xf) << 4 | (((C - 0x20) >> 2) & 0xf)),
chr(((C - 0x20) & 0x3) << 6 | ((D - 0x20) & 0x3f))
]) for A, B, C, D in quadruplets_gen(s[1:].rstrip())]
except ValueError:
raise Error('Illegal char')
result = ''.join(result)
trailingdata = result[length:]
if trailingdata.strip('\x00'):
raise Error('Trailing garbage')
result = result[:length]
if len(result) < length:
result += ((length - len(result)) * '\x00')
return result
def b2a_uu(s):
length = len(s)
if length > 45:
raise Error('At most 45 bytes at once')
def triples_gen(s):
while s:
try:
yield ord(s[0]), ord(s[1]), ord(s[2])
except IndexError:
s += '\0\0'
yield ord(s[0]), ord(s[1]), ord(s[2])
return
s = s[3:]
result = [''.join(
[chr(0x20 + (( A >> 2 ) & 0x3F)),
chr(0x20 + (((A << 4) | ((B >> 4) & 0xF)) & 0x3F)),
chr(0x20 + (((B << 2) | ((C >> 6) & 0x3)) & 0x3F)),
chr(0x20 + (( C ) & 0x3F))])
for A, B, C in triples_gen(s)]
return chr(ord(' ') + (length & 077)) + ''.join(result) + '\n'
table_a2b_base64 = {
'A': 0,
'B': 1,
'C': 2,
'D': 3,
'E': 4,
'F': 5,
'G': 6,
'H': 7,
'I': 8,
'J': 9,
'K': 10,
'L': 11,
'M': 12,
'N': 13,
'O': 14,
'P': 15,
'Q': 16,
'R': 17,
'S': 18,
'T': 19,
'U': 20,
'V': 21,
'W': 22,
'X': 23,
'Y': 24,
'Z': 25,
'a': 26,
'b': 27,
'c': 28,
'd': 29,
'e': 30,
'f': 31,
'g': 32,
'h': 33,
'i': 34,
'j': 35,
'k': 36,
'l': 37,
'm': 38,
'n': 39,
'o': 40,
'p': 41,
'q': 42,
'r': 43,
's': 44,
't': 45,
'u': 46,
'v': 47,
'w': 48,
'x': 49,
'y': 50,
'z': 51,
'0': 52,
'1': 53,
'2': 54,
'3': 55,
'4': 56,
'5': 57,
'6': 58,
'7': 59,
'8': 60,
'9': 61,
'+': 62,
'/': 63,
'=': 0,
}
def a2b_base64(s):
if not isinstance(s, (str, unicode)):
raise TypeError("expected string or unicode, got %r" % (s,))
s = s.rstrip()
# clean out all invalid characters, this also strips the final '=' padding
# check for correct padding
def next_valid_char(s, pos):
for i in range(pos + 1, len(s)):
c = s[i]
if c < '\x7f':
try:
table_a2b_base64[c]
return c
except KeyError:
pass
return None
quad_pos = 0
leftbits = 0
leftchar = 0
res = []
for i, c in enumerate(s):
if c > '\x7f' or c == '\n' or c == '\r' or c == ' ':
continue
if c == '=':
if quad_pos < 2 or (quad_pos == 2 and next_valid_char(s, i) != '='):
continue
else:
leftbits = 0
break
try:
next_c = table_a2b_base64[c]
except KeyError:
continue
quad_pos = (quad_pos + 1) & 0x03
leftchar = (leftchar << 6) | next_c
leftbits += 6
if leftbits >= 8:
leftbits -= 8
res.append((leftchar >> leftbits & 0xff))
leftchar &= ((1 << leftbits) - 1)
if leftbits != 0:
raise Error('Incorrect padding')
return ''.join([chr(i) for i in res])
table_b2a_base64 = \
"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/"
def b2a_base64(s):
length = len(s)
final_length = length % 3
def triples_gen(s):
while s:
try:
yield ord(s[0]), ord(s[1]), ord(s[2])
except IndexError:
s += '\0\0'
yield ord(s[0]), ord(s[1]), ord(s[2])
return
s = s[3:]
a = triples_gen(s[ :length - final_length])
result = [''.join(
[table_b2a_base64[( A >> 2 ) & 0x3F],
table_b2a_base64[((A << 4) | ((B >> 4) & 0xF)) & 0x3F],
table_b2a_base64[((B << 2) | ((C >> 6) & 0x3)) & 0x3F],
table_b2a_base64[( C ) & 0x3F]])
for A, B, C in a]
final = s[length - final_length:]
if final_length == 0:
snippet = ''
elif final_length == 1:
a = ord(final[0])
snippet = table_b2a_base64[(a >> 2 ) & 0x3F] + \
table_b2a_base64[(a << 4 ) & 0x3F] + '=='
else:
a = ord(final[0])
b = ord(final[1])
snippet = table_b2a_base64[(a >> 2) & 0x3F] + \
table_b2a_base64[((a << 4) | (b >> 4) & 0xF) & 0x3F] + \
table_b2a_base64[(b << 2) & 0x3F] + '='
return ''.join(result) + snippet + '\n'
def a2b_qp(s, header=False):
inp = 0
odata = []
while inp < len(s):
if s[inp] == '=':
inp += 1
if inp >= len(s):
break
# Soft line breaks
if (s[inp] == '\n') or (s[inp] == '\r'):
if s[inp] != '\n':
while inp < len(s) and s[inp] != '\n':
inp += 1
if inp < len(s):
inp += 1
elif s[inp] == '=':
# broken case from broken python qp
odata.append('=')
inp += 1
elif s[inp] in hex_numbers and s[inp + 1] in hex_numbers:
ch = chr(int(s[inp:inp+2], 16))
inp += 2
odata.append(ch)
else:
odata.append('=')
elif header and s[inp] == '_':
odata.append(' ')
inp += 1
else:
odata.append(s[inp])
inp += 1
return ''.join(odata)
def b2a_qp(data, quotetabs=False, istext=True, header=False):
"""quotetabs=True means that tab and space characters are always
quoted.
istext=False means that \r and \n are treated as regular characters
header=True encodes space characters with '_' and requires
real '_' characters to be quoted.
"""
MAXLINESIZE = 76
# See if this string is using CRLF line ends
lf = data.find('\n')
crlf = lf > 0 and data[lf-1] == '\r'
inp = 0
linelen = 0
odata = []
while inp < len(data):
c = data[inp]
if (c > '~' or
c == '=' or
(header and c == '_') or
(c == '.' and linelen == 0 and (inp+1 == len(data) or
data[inp+1] == '\n' or
data[inp+1] == '\r')) or
(not istext and (c == '\r' or c == '\n')) or
((c == '\t' or c == ' ') and (inp + 1 == len(data))) or
(c <= ' ' and c != '\r' and c != '\n' and
(quotetabs or (not quotetabs and (c != '\t' and c != ' '))))):
linelen += 3
if linelen >= MAXLINESIZE:
odata.append('=')
if crlf: odata.append('\r')
odata.append('\n')
linelen = 3
odata.append('=' + two_hex_digits(ord(c)))
inp += 1
else:
if (istext and
(c == '\n' or (inp+1 < len(data) and c == '\r' and
data[inp+1] == '\n'))):
linelen = 0
# Protect against whitespace on end of line
if (len(odata) > 0 and
(odata[-1] == ' ' or odata[-1] == '\t')):
ch = ord(odata[-1])
odata[-1] = '='
odata.append(two_hex_digits(ch))
if crlf: odata.append('\r')
odata.append('\n')
if c == '\r':
inp += 2
else:
inp += 1
else:
if (inp + 1 < len(data) and
data[inp+1] != '\n' and
(linelen + 1) >= MAXLINESIZE):
odata.append('=')
if crlf: odata.append('\r')
odata.append('\n')
linelen = 0
linelen += 1
if header and c == ' ':
c = '_'
odata.append(c)
inp += 1
return ''.join(odata)
hex_numbers = '0123456789ABCDEF'
def hex(n):
if n == 0:
return '0'
if n < 0:
n = -n
sign = '-'
else:
sign = ''
arr = []
def hex_gen(n):
""" Yield a nibble at a time. """
while n:
yield n % 0x10
n = n / 0x10
for nibble in hex_gen(n):
arr = [hex_numbers[nibble]] + arr
return sign + ''.join(arr)
def two_hex_digits(n):
return hex_numbers[n / 0x10] + hex_numbers[n % 0x10]
def strhex_to_int(s):
i = 0
for c in s:
i = i * 0x10 + hex_numbers.index(c)
return i
hqx_encoding = '!"#$%&\'()*+,-012345689@ABCDEFGHIJKLMNPQRSTUVXYZ[`abcdefhijklmpqr'
DONE = 0x7f
SKIP = 0x7e
FAIL = 0x7d
table_a2b_hqx = [
#^@ ^A ^B ^C ^D ^E ^F ^G
FAIL, FAIL, FAIL, FAIL, FAIL, FAIL, FAIL, FAIL,
#\b \t \n ^K ^L \r ^N ^O
FAIL, FAIL, SKIP, FAIL, FAIL, SKIP, FAIL, FAIL,
#^P ^Q ^R ^S ^T ^U ^V ^W
FAIL, FAIL, FAIL, FAIL, FAIL, FAIL, FAIL, FAIL,
#^X ^Y ^Z ^[ ^\ ^] ^^ ^_
FAIL, FAIL, FAIL, FAIL, FAIL, FAIL, FAIL, FAIL,
# ! " # $ % & '
FAIL, 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06,
#( ) * + , - . /
0x07, 0x08, 0x09, 0x0A, 0x0B, 0x0C, FAIL, FAIL,
#0 1 2 3 4 5 6 7
0x0D, 0x0E, 0x0F, 0x10, 0x11, 0x12, 0x13, FAIL,
#8 9 : ; < = > ?
0x14, 0x15, DONE, FAIL, FAIL, FAIL, FAIL, FAIL,
#@ A B C D E F G
0x16, 0x17, 0x18, 0x19, 0x1A, 0x1B, 0x1C, 0x1D,
#H I J K L M N O
0x1E, 0x1F, 0x20, 0x21, 0x22, 0x23, 0x24, FAIL,
#P Q R S T U V W
0x25, 0x26, 0x27, 0x28, 0x29, 0x2A, 0x2B, FAIL,
#X Y Z [ \ ] ^ _
0x2C, 0x2D, 0x2E, 0x2F, FAIL, FAIL, FAIL, FAIL,
#` a b c d e f g
0x30, 0x31, 0x32, 0x33, 0x34, 0x35, 0x36, FAIL,
#h i j k l m n o
0x37, 0x38, 0x39, 0x3A, 0x3B, 0x3C, FAIL, FAIL,
#p q r s t u v w
0x3D, 0x3E, 0x3F, FAIL, FAIL, FAIL, FAIL, FAIL,
#x y z { | } ~ ^?
FAIL, FAIL, FAIL, FAIL, FAIL, FAIL, FAIL, FAIL,
FAIL, FAIL, FAIL, FAIL, FAIL, FAIL, FAIL, FAIL,
FAIL, FAIL, FAIL, FAIL, FAIL, FAIL, FAIL, FAIL,
FAIL, FAIL, FAIL, FAIL, FAIL, FAIL, FAIL, FAIL,
FAIL, FAIL, FAIL, FAIL, FAIL, FAIL, FAIL, FAIL,
FAIL, FAIL, FAIL, FAIL, FAIL, FAIL, FAIL, FAIL,
FAIL, FAIL, FAIL, FAIL, FAIL, FAIL, FAIL, FAIL,
FAIL, FAIL, FAIL, FAIL, FAIL, FAIL, FAIL, FAIL,
FAIL, FAIL, FAIL, FAIL, FAIL, FAIL, FAIL, FAIL,
FAIL, FAIL, FAIL, FAIL, FAIL, FAIL, FAIL, FAIL,
FAIL, FAIL, FAIL, FAIL, FAIL, FAIL, FAIL, FAIL,
FAIL, FAIL, FAIL, FAIL, FAIL, FAIL, FAIL, FAIL,
FAIL, FAIL, FAIL, FAIL, FAIL, FAIL, FAIL, FAIL,
FAIL, FAIL, FAIL, FAIL, FAIL, FAIL, FAIL, FAIL,
FAIL, FAIL, FAIL, FAIL, FAIL, FAIL, FAIL, FAIL,
FAIL, FAIL, FAIL, FAIL, FAIL, FAIL, FAIL, FAIL,
FAIL, FAIL, FAIL, FAIL, FAIL, FAIL, FAIL, FAIL,
]
def a2b_hqx(s):
result = []
def quadruples_gen(s):
t = []
for c in s:
res = table_a2b_hqx[ord(c)]
if res == SKIP:
continue
elif res == FAIL:
raise Error('Illegal character')
elif res == DONE:
yield t
raise Done
else:
t.append(res)
if len(t) == 4:
yield t
t = []
yield t
done = 0
try:
for snippet in quadruples_gen(s):
length = len(snippet)
if length == 4:
result.append(chr(((snippet[0] & 0x3f) << 2) | (snippet[1] >> 4)))
result.append(chr(((snippet[1] & 0x0f) << 4) | (snippet[2] >> 2)))
result.append(chr(((snippet[2] & 0x03) << 6) | (snippet[3])))
elif length == 3:
result.append(chr(((snippet[0] & 0x3f) << 2) | (snippet[1] >> 4)))
result.append(chr(((snippet[1] & 0x0f) << 4) | (snippet[2] >> 2)))
elif length == 2:
result.append(chr(((snippet[0] & 0x3f) << 2) | (snippet[1] >> 4)))
except Done:
done = 1
except Error:
raise
return (''.join(result), done)
def b2a_hqx(s):
result =[]
def triples_gen(s):
while s:
try:
yield ord(s[0]), ord(s[1]), ord(s[2])
except IndexError:
yield tuple([ord(c) for c in s])
s = s[3:]
for snippet in triples_gen(s):
length = len(snippet)
if length == 3:
result.append(
hqx_encoding[(snippet[0] & 0xfc) >> 2])
result.append(hqx_encoding[
((snippet[0] & 0x03) << 4) | ((snippet[1] & 0xf0) >> 4)])
result.append(hqx_encoding[
(snippet[1] & 0x0f) << 2 | ((snippet[2] & 0xc0) >> 6)])
result.append(hqx_encoding[snippet[2] & 0x3f])
elif length == 2:
result.append(
hqx_encoding[(snippet[0] & 0xfc) >> 2])
result.append(hqx_encoding[
((snippet[0] & 0x03) << 4) | ((snippet[1] & 0xf0) >> 4)])
result.append(hqx_encoding[
(snippet[1] & 0x0f) << 2])
elif length == 1:
result.append(
hqx_encoding[(snippet[0] & 0xfc) >> 2])
result.append(hqx_encoding[
((snippet[0] & 0x03) << 4)])
return ''.join(result)
crctab_hqx = [
0x0000, 0x1021, 0x2042, 0x3063, 0x4084, 0x50a5, 0x60c6, 0x70e7,
0x8108, 0x9129, 0xa14a, 0xb16b, 0xc18c, 0xd1ad, 0xe1ce, 0xf1ef,
0x1231, 0x0210, 0x3273, 0x2252, 0x52b5, 0x4294, 0x72f7, 0x62d6,
0x9339, 0x8318, 0xb37b, 0xa35a, 0xd3bd, 0xc39c, 0xf3ff, 0xe3de,
0x2462, 0x3443, 0x0420, 0x1401, 0x64e6, 0x74c7, 0x44a4, 0x5485,
0xa56a, 0xb54b, 0x8528, 0x9509, 0xe5ee, 0xf5cf, 0xc5ac, 0xd58d,
0x3653, 0x2672, 0x1611, 0x0630, 0x76d7, 0x66f6, 0x5695, 0x46b4,
0xb75b, 0xa77a, 0x9719, 0x8738, 0xf7df, 0xe7fe, 0xd79d, 0xc7bc,
0x48c4, 0x58e5, 0x6886, 0x78a7, 0x0840, 0x1861, 0x2802, 0x3823,
0xc9cc, 0xd9ed, 0xe98e, 0xf9af, 0x8948, 0x9969, 0xa90a, 0xb92b,
0x5af5, 0x4ad4, 0x7ab7, 0x6a96, 0x1a71, 0x0a50, 0x3a33, 0x2a12,
0xdbfd, 0xcbdc, 0xfbbf, 0xeb9e, 0x9b79, 0x8b58, 0xbb3b, 0xab1a,
0x6ca6, 0x7c87, 0x4ce4, 0x5cc5, 0x2c22, 0x3c03, 0x0c60, 0x1c41,
0xedae, 0xfd8f, 0xcdec, 0xddcd, 0xad2a, 0xbd0b, 0x8d68, 0x9d49,
0x7e97, 0x6eb6, 0x5ed5, 0x4ef4, 0x3e13, 0x2e32, 0x1e51, 0x0e70,
0xff9f, 0xefbe, 0xdfdd, 0xcffc, 0xbf1b, 0xaf3a, 0x9f59, 0x8f78,
0x9188, 0x81a9, 0xb1ca, 0xa1eb, 0xd10c, 0xc12d, 0xf14e, 0xe16f,
0x1080, 0x00a1, 0x30c2, 0x20e3, 0x5004, 0x4025, 0x7046, 0x6067,
0x83b9, 0x9398, 0xa3fb, 0xb3da, 0xc33d, 0xd31c, 0xe37f, 0xf35e,
0x02b1, 0x1290, 0x22f3, 0x32d2, 0x4235, 0x5214, 0x6277, 0x7256,
0xb5ea, 0xa5cb, 0x95a8, 0x8589, 0xf56e, 0xe54f, 0xd52c, 0xc50d,
0x34e2, 0x24c3, 0x14a0, 0x0481, 0x7466, 0x6447, 0x5424, 0x4405,
0xa7db, 0xb7fa, 0x8799, 0x97b8, 0xe75f, 0xf77e, 0xc71d, 0xd73c,
0x26d3, 0x36f2, 0x0691, 0x16b0, 0x6657, 0x7676, 0x4615, 0x5634,
0xd94c, 0xc96d, 0xf90e, 0xe92f, 0x99c8, 0x89e9, 0xb98a, 0xa9ab,
0x5844, 0x4865, 0x7806, 0x6827, 0x18c0, 0x08e1, 0x3882, 0x28a3,
0xcb7d, 0xdb5c, 0xeb3f, 0xfb1e, 0x8bf9, 0x9bd8, 0xabbb, 0xbb9a,
0x4a75, 0x5a54, 0x6a37, 0x7a16, 0x0af1, 0x1ad0, 0x2ab3, 0x3a92,
0xfd2e, 0xed0f, 0xdd6c, 0xcd4d, 0xbdaa, 0xad8b, 0x9de8, 0x8dc9,
0x7c26, 0x6c07, 0x5c64, 0x4c45, 0x3ca2, 0x2c83, 0x1ce0, 0x0cc1,
0xef1f, 0xff3e, 0xcf5d, 0xdf7c, 0xaf9b, 0xbfba, 0x8fd9, 0x9ff8,
0x6e17, 0x7e36, 0x4e55, 0x5e74, 0x2e93, 0x3eb2, 0x0ed1, 0x1ef0,
]
def crc_hqx(s, crc):
for c in s:
crc = ((crc << 8) & 0xff00) ^ crctab_hqx[((crc >> 8) & 0xff) ^ ord(c)]
return crc
def rlecode_hqx(s):
"""
Run length encoding for binhex4.
The CPython implementation does not do run length encoding
of \x90 characters. This implementation does.
"""
if not s:
return ''
result = []
prev = s[0]
count = 1
# Add a dummy character to get the loop to go one extra round.
# The dummy must be different from the last character of s.
# In the same step we remove the first character, which has
# already been stored in prev.
if s[-1] == '!':
s = s[1:] + '?'
else:
s = s[1:] + '!'
for c in s:
if c == prev and count < 255:
count += 1
else:
if count == 1:
if prev != '\x90':
result.append(prev)
else:
result.extend(['\x90', '\x00'])
elif count < 4:
if prev != '\x90':
result.extend([prev] * count)
else:
result.extend(['\x90', '\x00'] * count)
else:
if prev != '\x90':
result.extend([prev, '\x90', chr(count)])
else:
result.extend(['\x90', '\x00', '\x90', chr(count)])
count = 1
prev = c
return ''.join(result)
def rledecode_hqx(s):
s = s.split('\x90')
result = [s[0]]
prev = s[0]
for snippet in s[1:]:
count = ord(snippet[0])
if count > 0:
result.append(prev[-1] * (count-1))
prev = snippet
else:
result.append('\x90')
prev = '\x90'
result.append(snippet[1:])
return ''.join(result)
crc_32_tab = [
0x00000000, 0x77073096, 0xee0e612c, 0x990951ba, 0x076dc419,
0x706af48f, 0xe963a535, 0x9e6495a3, 0x0edb8832, 0x79dcb8a4,
0xe0d5e91e, 0x97d2d988, 0x09b64c2b, 0x7eb17cbd, 0xe7b82d07,
0x90bf1d91, 0x1db71064, 0x6ab020f2, 0xf3b97148, 0x84be41de,
0x1adad47d, 0x6ddde4eb, 0xf4d4b551, 0x83d385c7, 0x136c9856,
0x646ba8c0, 0xfd62f97a, 0x8a65c9ec, 0x14015c4f, 0x63066cd9,
0xfa0f3d63, 0x8d080df5, 0x3b6e20c8, 0x4c69105e, 0xd56041e4,
0xa2677172, 0x3c03e4d1, 0x4b04d447, 0xd20d85fd, 0xa50ab56b,
0x35b5a8fa, 0x42b2986c, 0xdbbbc9d6, 0xacbcf940, 0x32d86ce3,
0x45df5c75, 0xdcd60dcf, 0xabd13d59, 0x26d930ac, 0x51de003a,
0xc8d75180, 0xbfd06116, 0x21b4f4b5, 0x56b3c423, 0xcfba9599,
0xb8bda50f, 0x2802b89e, 0x5f058808, 0xc60cd9b2, 0xb10be924,
0x2f6f7c87, 0x58684c11, 0xc1611dab, 0xb6662d3d, 0x76dc4190,
0x01db7106, 0x98d220bc, 0xefd5102a, 0x71b18589, 0x06b6b51f,
0x9fbfe4a5, 0xe8b8d433, 0x7807c9a2, 0x0f00f934, 0x9609a88e,
0xe10e9818, 0x7f6a0dbb, 0x086d3d2d, 0x91646c97, 0xe6635c01,
0x6b6b51f4, 0x1c6c6162, 0x856530d8, 0xf262004e, 0x6c0695ed,
0x1b01a57b, 0x8208f4c1, 0xf50fc457, 0x65b0d9c6, 0x12b7e950,
0x8bbeb8ea, 0xfcb9887c, 0x62dd1ddf, 0x15da2d49, 0x8cd37cf3,
0xfbd44c65, 0x4db26158, 0x3ab551ce, 0xa3bc0074, 0xd4bb30e2,
0x4adfa541, 0x3dd895d7, 0xa4d1c46d, 0xd3d6f4fb, 0x4369e96a,
0x346ed9fc, 0xad678846, 0xda60b8d0, 0x44042d73, 0x33031de5,
0xaa0a4c5f, 0xdd0d7cc9, 0x5005713c, 0x270241aa, 0xbe0b1010,
0xc90c2086, 0x5768b525, 0x206f85b3, 0xb966d409, 0xce61e49f,
0x5edef90e, 0x29d9c998, 0xb0d09822, 0xc7d7a8b4, 0x59b33d17,
0x2eb40d81, 0xb7bd5c3b, 0xc0ba6cad, 0xedb88320, 0x9abfb3b6,
0x03b6e20c, 0x74b1d29a, 0xead54739, 0x9dd277af, 0x04db2615,
0x73dc1683, 0xe3630b12, 0x94643b84, 0x0d6d6a3e, 0x7a6a5aa8,
0xe40ecf0b, 0x9309ff9d, 0x0a00ae27, 0x7d079eb1, 0xf00f9344,
0x8708a3d2, 0x1e01f268, 0x6906c2fe, 0xf762575d, 0x806567cb,
0x196c3671, 0x6e6b06e7, 0xfed41b76, 0x89d32be0, 0x10da7a5a,
0x67dd4acc, 0xf9b9df6f, 0x8ebeeff9, 0x17b7be43, 0x60b08ed5,
0xd6d6a3e8, 0xa1d1937e, 0x38d8c2c4, 0x4fdff252, 0xd1bb67f1,
0xa6bc5767, 0x3fb506dd, 0x48b2364b, 0xd80d2bda, 0xaf0a1b4c,
0x36034af6, 0x41047a60, 0xdf60efc3, 0xa867df55, 0x316e8eef,
0x4669be79, 0xcb61b38c, 0xbc66831a, 0x256fd2a0, 0x5268e236,
0xcc0c7795, 0xbb0b4703, 0x220216b9, 0x5505262f, 0xc5ba3bbe,
0xb2bd0b28, 0x2bb45a92, 0x5cb36a04, 0xc2d7ffa7, 0xb5d0cf31,
0x2cd99e8b, 0x5bdeae1d, 0x9b64c2b0, 0xec63f226, 0x756aa39c,
0x026d930a, 0x9c0906a9, 0xeb0e363f, 0x72076785, 0x05005713,
0x95bf4a82, 0xe2b87a14, 0x7bb12bae, 0x0cb61b38, 0x92d28e9b,
0xe5d5be0d, 0x7cdcefb7, 0x0bdbdf21, 0x86d3d2d4, 0xf1d4e242,
0x68ddb3f8, 0x1fda836e, 0x81be16cd, 0xf6b9265b, 0x6fb077e1,
0x18b74777, 0x88085ae6, 0xff0f6a70, 0x66063bca, 0x11010b5c,
0x8f659eff, 0xf862ae69, 0x616bffd3, 0x166ccf45, 0xa00ae278,
0xd70dd2ee, 0x4e048354, 0x3903b3c2, 0xa7672661, 0xd06016f7,
0x4969474d, 0x3e6e77db, 0xaed16a4a, 0xd9d65adc, 0x40df0b66,
0x37d83bf0, 0xa9bcae53, 0xdebb9ec5, 0x47b2cf7f, 0x30b5ffe9,
0xbdbdf21c, 0xcabac28a, 0x53b39330, 0x24b4a3a6, 0xbad03605,
0xcdd70693, 0x54de5729, 0x23d967bf, 0xb3667a2e, 0xc4614ab8,
0x5d681b02, 0x2a6f2b94, 0xb40bbe37, 0xc30c8ea1, 0x5a05df1b,
0x2d02ef8d
]
def crc32(s, crc=0):
result = 0
crc = ~int(crc) & 0xffffffff
#crc = ~long(crc) & 0xffffffffL
for c in s:
crc = crc_32_tab[(crc ^ int(ord(c))) & 0xff] ^ (crc >> 8)
#crc = crc_32_tab[(crc ^ long(ord(c))) & 0xffL] ^ (crc >> 8)
#/* Note: (crc >> 8) MUST zero fill on left
result = crc ^ 0xffffffff
if result > 2**31:
result = ((result + 2**31) % 2**32) - 2**31
return result
def b2a_hex(s):
result = []
for char in s:
c = (ord(char) >> 4) & 0xf
if c > 9:
c = c + ord('a') - 10
else:
c = c + ord('0')
result.append(chr(c))
c = ord(char) & 0xf
if c > 9:
c = c + ord('a') - 10
else:
c = c + ord('0')
result.append(chr(c))
return ''.join(result)
hexlify = b2a_hex
table_hex = [
-1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1,
-1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1,
-1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1,
0, 1, 2, 3, 4, 5, 6, 7, 8, 9,-1,-1, -1,-1,-1,-1,
-1,10,11,12, 13,14,15,-1, -1,-1,-1,-1, -1,-1,-1,-1,
-1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1,
-1,10,11,12, 13,14,15,-1, -1,-1,-1,-1, -1,-1,-1,-1,
-1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1
]
def a2b_hex(t):
result = []
def pairs_gen(s):
while s:
try:
yield table_hex[ord(s[0])], table_hex[ord(s[1])]
except IndexError:
if len(s):
raise TypeError('Odd-length string')
return
s = s[2:]
for a, b in pairs_gen(t):
if a < 0 or b < 0:
raise TypeError('Non-hexadecimal digit found')
result.append(chr((a << 4) + b))
return ''.join(result)
unhexlify = a2b_hex
| gpl-2.0 |
benchmark-subsetting/cere | src/cere/regions_selector.py | 3 | 2931 | #!/usr/bin/env python
# This file is part of CERE.
#
# Copyright (c) 2013-2016, Universite de Versailles St-Quentin-en-Yvelines
#
# CERE is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License,
# or (at your option) any later version.
#
# CERE is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with CERE. If not, see <http://www.gnu.org/licenses/>.
import sys
import os
import cPickle as pickle
import networkx as nx
from graph_utils import *
import max_cov_update_graph as update_graph
import logging
import csv
import utils
import vars as var
logger = logging.getLogger('Max-Cov selector')
def solve(graph, max_error):
coverage = 0
nodes = []
for n, d in graph.nodes(data=True):
d["_selected"] = False
if d['_error'] <= max_error:
coverage = coverage + d['_self_coverage']
nodes.append(n)
d["_selected"] = True
if coverage > 100:
coverage = 100
return nodes, coverage
def solve_with_best_granularity(args):
graph = load_graph()
if graph == None:
logger.critical("Can't load graph. did you run cere profile?")
return False
if( len(graph.nodes()) == 0):
logger.info('Graph is empty, nothing to select')
return True
args.force=False
#Compute coverage for different error
error_filename = "{0}/table_error.csv".format(var.CERE_REPORT_PATH)
table = utils.Error_table()
args.max_error = 100
while args.max_error >= 5:
logger.info("Computing matching with a maximum error of {0}%".format(args.max_error))
update_graph.update(args)
graph = load_graph()
table_chosen, table_coverage = solve(graph, args.max_error)
table.complete_error_table(args.max_error, table_coverage)
args.max_error = args.max_error-5
table.write_table(error_filename)
args.max_error = 15
update_graph.update(args)
graph = load_graph()
padding = max([len(d['_name']) for n,d in graph.nodes(data=True)])
chosen, coverage = solve(graph, args.max_error)
if coverage == 0:
logger.error("Solution impossible")
else:
logger.info("Solved with coverage >= {0}".format(coverage))
graph.graph['coverage'] = 0
for c in chosen:
graph.graph['coverage'] = graph.graph['coverage'] + graph.node[c]['_self_coverage']
print >>sys.stderr, "> {0} {1}".format(graph.node[c]['_name'].ljust(padding), graph.node[c]['_self_coverage'])
graph.graph['selector'] = "MAX_COV"
save_graph(graph)
return True
| lgpl-3.0 |
ikoz/mitmproxy | mitmproxy/console/__init__.py | 2 | 22577 | from __future__ import absolute_import
import mailcap
import mimetypes
import tempfile
import os
import os.path
import shlex
import signal
import stat
import subprocess
import sys
import traceback
import urwid
import weakref
from netlib import tcp
from .. import controller, flow, script, contentviews
from . import flowlist, flowview, help, window, signals, options
from . import grideditor, palettes, statusbar, palettepicker
EVENTLOG_SIZE = 500
class ConsoleState(flow.State):
def __init__(self):
flow.State.__init__(self)
self.focus = None
self.follow_focus = None
self.default_body_view = contentviews.get("Auto")
self.flowsettings = weakref.WeakKeyDictionary()
self.last_search = None
def __setattr__(self, name, value):
self.__dict__[name] = value
signals.update_settings.send(self)
def add_flow_setting(self, flow, key, value):
d = self.flowsettings.setdefault(flow, {})
d[key] = value
def get_flow_setting(self, flow, key, default=None):
d = self.flowsettings.get(flow, {})
return d.get(key, default)
def add_flow(self, f):
super(ConsoleState, self).add_flow(f)
if self.focus is None:
self.set_focus(0)
elif self.follow_focus:
self.set_focus(len(self.view) - 1)
self.set_flow_marked(f, False)
return f
def update_flow(self, f):
super(ConsoleState, self).update_flow(f)
if self.focus is None:
self.set_focus(0)
return f
def set_limit(self, limit):
ret = flow.State.set_limit(self, limit)
self.set_focus(self.focus)
return ret
def get_focus(self):
if not self.view or self.focus is None:
return None, None
return self.view[self.focus], self.focus
def set_focus(self, idx):
if self.view:
if idx >= len(self.view):
idx = len(self.view) - 1
elif idx < 0:
idx = 0
self.focus = idx
else:
self.focus = None
def set_focus_flow(self, f):
self.set_focus(self.view.index(f))
def get_from_pos(self, pos):
if len(self.view) <= pos or pos < 0:
return None, None
return self.view[pos], pos
def get_next(self, pos):
return self.get_from_pos(pos + 1)
def get_prev(self, pos):
return self.get_from_pos(pos - 1)
def delete_flow(self, f):
if f in self.view and self.view.index(f) <= self.focus:
self.focus -= 1
if self.focus < 0:
self.focus = None
ret = flow.State.delete_flow(self, f)
self.set_focus(self.focus)
return ret
def clear(self):
marked_flows = []
for f in self.flows:
if self.flow_marked(f):
marked_flows.append(f)
super(ConsoleState, self).clear()
for f in marked_flows:
self.add_flow(f)
self.set_flow_marked(f, True)
if len(self.flows.views) == 0:
self.focus = None
else:
self.focus = 0
self.set_focus(self.focus)
def flow_marked(self, flow):
return self.get_flow_setting(flow, "marked", False)
def set_flow_marked(self, flow, marked):
self.add_flow_setting(flow, "marked", marked)
class Options(object):
attributes = [
"app",
"app_domain",
"app_ip",
"anticache",
"anticomp",
"client_replay",
"eventlog",
"follow",
"keepserving",
"kill",
"intercept",
"limit",
"no_server",
"refresh_server_playback",
"rfile",
"scripts",
"showhost",
"replacements",
"rheaders",
"setheaders",
"server_replay",
"stickycookie",
"stickyauth",
"stream_large_bodies",
"verbosity",
"wfile",
"nopop",
"palette",
"palette_transparent",
"no_mouse"
]
def __init__(self, **kwargs):
for k, v in kwargs.items():
setattr(self, k, v)
for i in self.attributes:
if not hasattr(self, i):
setattr(self, i, None)
class ConsoleMaster(flow.FlowMaster):
palette = []
def __init__(self, server, options):
flow.FlowMaster.__init__(self, server, ConsoleState())
self.stream_path = None
self.options = options
for i in options.replacements:
self.replacehooks.add(*i)
for i in options.setheaders:
self.setheaders.add(*i)
r = self.set_intercept(options.intercept)
if r:
print >> sys.stderr, "Intercept error:", r
sys.exit(1)
if options.limit:
self.set_limit(options.limit)
r = self.set_stickycookie(options.stickycookie)
if r:
print >> sys.stderr, "Sticky cookies error:", r
sys.exit(1)
r = self.set_stickyauth(options.stickyauth)
if r:
print >> sys.stderr, "Sticky auth error:", r
sys.exit(1)
self.set_stream_large_bodies(options.stream_large_bodies)
self.refresh_server_playback = options.refresh_server_playback
self.anticache = options.anticache
self.anticomp = options.anticomp
self.killextra = options.kill
self.rheaders = options.rheaders
self.nopop = options.nopop
self.showhost = options.showhost
self.palette = options.palette
self.palette_transparent = options.palette_transparent
self.eventlog = options.eventlog
self.eventlist = urwid.SimpleListWalker([])
self.follow = options.follow
if options.client_replay:
self.client_playback_path(options.client_replay)
if options.server_replay:
self.server_playback_path(options.server_replay)
if options.scripts:
for i in options.scripts:
err = self.load_script(i)
if err:
print >> sys.stderr, "Script load error:", err
sys.exit(1)
if options.outfile:
err = self.start_stream_to_path(
options.outfile[0],
options.outfile[1]
)
if err:
print >> sys.stderr, "Stream file error:", err
sys.exit(1)
self.view_stack = []
if options.app:
self.start_app(self.options.app_host, self.options.app_port)
signals.call_in.connect(self.sig_call_in)
signals.pop_view_state.connect(self.sig_pop_view_state)
signals.push_view_state.connect(self.sig_push_view_state)
signals.sig_add_event.connect(self.sig_add_event)
def __setattr__(self, name, value):
self.__dict__[name] = value
signals.update_settings.send(self)
def load_script(self, command, use_reloader=True):
# We default to using the reloader in the console ui.
super(ConsoleMaster, self).load_script(command, use_reloader)
def sig_add_event(self, sender, e, level):
needed = dict(error=0, info=1, debug=2).get(level, 1)
if self.options.verbosity < needed:
return
if level == "error":
e = urwid.Text(("error", str(e)))
else:
e = urwid.Text(str(e))
self.eventlist.append(e)
if len(self.eventlist) > EVENTLOG_SIZE:
self.eventlist.pop(0)
self.eventlist.set_focus(len(self.eventlist) - 1)
def add_event(self, e, level):
signals.add_event(e, level)
def sig_call_in(self, sender, seconds, callback, args=()):
def cb(*_):
return callback(*args)
self.loop.set_alarm_in(seconds, cb)
def sig_pop_view_state(self, sender):
if len(self.view_stack) > 1:
self.view_stack.pop()
self.loop.widget = self.view_stack[-1]
else:
signals.status_prompt_onekey.send(
self,
prompt = "Quit",
keys = (
("yes", "y"),
("no", "n"),
),
callback = self.quit,
)
def sig_push_view_state(self, sender, window):
self.view_stack.append(window)
self.loop.widget = window
self.loop.draw_screen()
def _run_script_method(self, method, s, f):
status, val = s.run(method, f)
if val:
if status:
signals.add_event("Method %s return: %s" % (method, val), "debug")
else:
signals.add_event(
"Method %s error: %s" %
(method, val[1]), "error")
def run_script_once(self, command, f):
if not command:
return
signals.add_event("Running script on flow: %s" % command, "debug")
try:
s = script.Script(command, script.ScriptContext(self))
except script.ScriptException as v:
signals.status_message.send(
message = "Error loading script."
)
signals.add_event("Error loading script:\n%s" % v.args[0], "error")
return
if f.request:
self._run_script_method("request", s, f)
if f.response:
self._run_script_method("response", s, f)
if f.error:
self._run_script_method("error", s, f)
s.unload()
signals.flow_change.send(self, flow = f)
def set_script(self, command):
if not command:
return
ret = self.load_script(command)
if ret:
signals.status_message.send(message=ret)
def toggle_eventlog(self):
self.eventlog = not self.eventlog
signals.pop_view_state.send(self)
self.view_flowlist()
def _readflows(self, path):
"""
Utitility function that reads a list of flows
or prints an error to the UI if that fails.
Returns
- None, if there was an error.
- a list of flows, otherwise.
"""
try:
return flow.read_flows_from_paths(path)
except flow.FlowReadError as e:
signals.status_message.send(message=e.strerror)
def client_playback_path(self, path):
if not isinstance(path, list):
path = [path]
flows = self._readflows(path)
if flows:
self.start_client_playback(flows, False)
def server_playback_path(self, path):
if not isinstance(path, list):
path = [path]
flows = self._readflows(path)
if flows:
self.start_server_playback(
flows,
self.killextra, self.rheaders,
False, self.nopop,
self.options.replay_ignore_params,
self.options.replay_ignore_content,
self.options.replay_ignore_payload_params,
self.options.replay_ignore_host
)
def spawn_editor(self, data):
fd, name = tempfile.mkstemp('', "mproxy")
os.write(fd, data)
os.close(fd)
c = os.environ.get("EDITOR")
# if no EDITOR is set, assume 'vi'
if not c:
c = "vi"
cmd = shlex.split(c)
cmd.append(name)
self.ui.stop()
try:
subprocess.call(cmd)
except:
signals.status_message.send(
message = "Can't start editor: %s" % " ".join(c)
)
else:
data = open(name, "rb").read()
self.ui.start()
os.unlink(name)
return data
def spawn_external_viewer(self, data, contenttype):
if contenttype:
contenttype = contenttype.split(";")[0]
ext = mimetypes.guess_extension(contenttype) or ""
else:
ext = ""
fd, name = tempfile.mkstemp(ext, "mproxy")
os.write(fd, data)
os.close(fd)
# read-only to remind the user that this is a view function
os.chmod(name, stat.S_IREAD)
cmd = None
shell = False
if contenttype:
c = mailcap.getcaps()
cmd, _ = mailcap.findmatch(c, contenttype, filename=name)
if cmd:
shell = True
if not cmd:
# hm which one should get priority?
c = os.environ.get("PAGER") or os.environ.get("EDITOR")
if not c:
c = "less"
cmd = shlex.split(c)
cmd.append(name)
self.ui.stop()
try:
subprocess.call(cmd, shell=shell)
except:
signals.status_message.send(
message="Can't start external viewer: %s" % " ".join(c)
)
self.ui.start()
os.unlink(name)
def set_palette(self, name):
self.palette = name
self.ui.register_palette(
palettes.palettes[name].palette(self.palette_transparent)
)
self.ui.clear()
def ticker(self, *userdata):
changed = self.tick(self.masterq, timeout=0)
if changed:
self.loop.draw_screen()
signals.update_settings.send()
self.loop.set_alarm_in(0.01, self.ticker)
def run(self):
self.ui = urwid.raw_display.Screen()
self.ui.set_terminal_properties(256)
self.set_palette(self.palette)
self.loop = urwid.MainLoop(
urwid.SolidFill("x"),
screen = self.ui,
handle_mouse = not self.options.no_mouse,
)
self.server.start_slave(
controller.Slave,
controller.Channel(self.masterq, self.should_exit)
)
if self.options.rfile:
ret = self.load_flows_path(self.options.rfile)
if ret and self.state.flow_count():
signals.add_event(
"File truncated or corrupted. "
"Loaded as many flows as possible.",
"error"
)
elif ret and not self.state.flow_count():
self.shutdown()
print >> sys.stderr, "Could not load file:", ret
sys.exit(1)
self.loop.set_alarm_in(0.01, self.ticker)
if self.server.config.http2 and not tcp.HAS_ALPN: # pragma: no cover
def http2err(*args, **kwargs):
signals.status_message.send(
message = "HTTP/2 disabled - OpenSSL 1.0.2+ required."
" Use --no-http2 to silence this warning.",
expire=5
)
self.loop.set_alarm_in(0.01, http2err)
# It's not clear why we need to handle this explicitly - without this,
# mitmproxy hangs on keyboard interrupt. Remove if we ever figure it
# out.
def exit(s, f):
raise urwid.ExitMainLoop
signal.signal(signal.SIGINT, exit)
self.loop.set_alarm_in(
0.0001,
lambda *args: self.view_flowlist()
)
try:
self.loop.run()
except Exception:
self.loop.stop()
sys.stdout.flush()
print >> sys.stderr, traceback.format_exc()
print >> sys.stderr, "mitmproxy has crashed!"
print >> sys.stderr, "Please lodge a bug report at:"
print >> sys.stderr, "\thttps://github.com/mitmproxy/mitmproxy"
print >> sys.stderr, "Shutting down..."
sys.stderr.flush()
self.shutdown()
def view_help(self, helpctx):
signals.push_view_state.send(
self,
window = window.Window(
self,
help.HelpView(helpctx),
None,
statusbar.StatusBar(self, help.footer),
None
)
)
def view_options(self):
for i in self.view_stack:
if isinstance(i["body"], options.Options):
return
signals.push_view_state.send(
self,
window = window.Window(
self,
options.Options(self),
None,
statusbar.StatusBar(self, options.footer),
options.help_context,
)
)
def view_palette_picker(self):
signals.push_view_state.send(
self,
window = window.Window(
self,
palettepicker.PalettePicker(self),
None,
statusbar.StatusBar(self, palettepicker.footer),
palettepicker.help_context,
)
)
def view_grideditor(self, ge):
signals.push_view_state.send(
self,
window = window.Window(
self,
ge,
None,
statusbar.StatusBar(self, grideditor.FOOTER),
ge.make_help()
)
)
def view_flowlist(self):
if self.ui.started:
self.ui.clear()
if self.state.follow_focus:
self.state.set_focus(self.state.flow_count())
if self.eventlog:
body = flowlist.BodyPile(self)
else:
body = flowlist.FlowListBox(self)
if self.follow:
self.toggle_follow_flows()
signals.push_view_state.send(
self,
window = window.Window(
self,
body,
None,
statusbar.StatusBar(self, flowlist.footer),
flowlist.help_context
)
)
def view_flow(self, flow, tab_offset=0):
self.state.set_focus_flow(flow)
signals.push_view_state.send(
self,
window = window.Window(
self,
flowview.FlowView(self, self.state, flow, tab_offset),
flowview.FlowViewHeader(self, flow),
statusbar.StatusBar(self, flowview.footer),
flowview.help_context
)
)
def _write_flows(self, path, flows):
if not path:
return
path = os.path.expanduser(path)
try:
f = file(path, "wb")
fw = flow.FlowWriter(f)
for i in flows:
fw.add(i)
f.close()
except IOError as v:
signals.status_message.send(message=v.strerror)
def save_one_flow(self, path, flow):
return self._write_flows(path, [flow])
def save_flows(self, path):
return self._write_flows(path, self.state.view)
def save_marked_flows(self, path):
marked_flows = []
for f in self.state.view:
if self.state.flow_marked(f):
marked_flows.append(f)
return self._write_flows(path, marked_flows)
def load_flows_callback(self, path):
if not path:
return
ret = self.load_flows_path(path)
return ret or "Flows loaded from %s" % path
def load_flows_path(self, path):
reterr = None
try:
flow.FlowMaster.load_flows_file(self, path)
except flow.FlowReadError as v:
reterr = str(v)
signals.flowlist_change.send(self)
return reterr
def accept_all(self):
self.state.accept_all(self)
def set_limit(self, txt):
v = self.state.set_limit(txt)
signals.flowlist_change.send(self)
return v
def set_intercept(self, txt):
return self.state.set_intercept(txt)
def change_default_display_mode(self, t):
v = contentviews.get_by_shortcut(t)
self.state.default_body_view = v
self.refresh_focus()
def edit_scripts(self, scripts):
commands = [x[0] for x in scripts] # remove outer array
if commands == [s.command for s in self.scripts]:
return
self.unload_scripts()
for command in commands:
self.load_script(command)
signals.update_settings.send(self)
def stop_client_playback_prompt(self, a):
if a != "n":
self.stop_client_playback()
def stop_server_playback_prompt(self, a):
if a != "n":
self.stop_server_playback()
def quit(self, a):
if a != "n":
raise urwid.ExitMainLoop
def shutdown(self):
self.state.killall(self)
flow.FlowMaster.shutdown(self)
def clear_flows(self):
self.state.clear()
signals.flowlist_change.send(self)
def toggle_follow_flows(self):
# toggle flow follow
self.state.follow_focus = not self.state.follow_focus
# jump to most recent flow if follow is now on
if self.state.follow_focus:
self.state.set_focus(self.state.flow_count())
signals.flowlist_change.send(self)
def delete_flow(self, f):
self.state.delete_flow(f)
signals.flowlist_change.send(self)
def refresh_focus(self):
if self.state.view:
signals.flow_change.send(
self,
flow = self.state.view[self.state.focus]
)
def process_flow(self, f):
if self.state.intercept and f.match(
self.state.intercept) and not f.request.is_replay:
f.intercept(self)
else:
# check if flow was intercepted within an inline script by flow.intercept()
if f.intercepted:
f.intercept(self)
else:
f.reply()
signals.flowlist_change.send(self)
signals.flow_change.send(self, flow = f)
def clear_events(self):
self.eventlist[:] = []
# Handlers
def handle_error(self, f):
f = flow.FlowMaster.handle_error(self, f)
if f:
self.process_flow(f)
return f
def handle_request(self, f):
f = flow.FlowMaster.handle_request(self, f)
if f:
self.process_flow(f)
return f
def handle_response(self, f):
f = flow.FlowMaster.handle_response(self, f)
if f:
self.process_flow(f)
return f
def handle_script_change(self, script):
if super(ConsoleMaster, self).handle_script_change(script):
signals.status_message.send(message='"{}" reloaded.'.format(script.filename))
else:
signals.status_message.send(message='Error reloading "{}".'.format(script.filename))
| mit |
bitcity/django | tests/admin_utils/tests.py | 45 | 13439 | from __future__ import unicode_literals
from datetime import datetime
from decimal import Decimal
from django import forms
from django.conf import settings
from django.contrib import admin
from django.contrib.admin import helpers
from django.contrib.admin.utils import (
NestedObjects, display_for_field, flatten, flatten_fieldsets,
label_for_field, lookup_field, quote,
)
from django.db import DEFAULT_DB_ALIAS, models
from django.test import SimpleTestCase, TestCase, override_settings
from django.utils import six
from django.utils.formats import localize
from django.utils.safestring import mark_safe
from .models import (
Article, Car, Count, Event, EventGuide, Location, Site, Vehicle,
)
class NestedObjectsTests(TestCase):
"""
Tests for ``NestedObject`` utility collection.
"""
def setUp(self):
self.n = NestedObjects(using=DEFAULT_DB_ALIAS)
self.objs = [Count.objects.create(num=i) for i in range(5)]
def _check(self, target):
self.assertEqual(self.n.nested(lambda obj: obj.num), target)
def _connect(self, i, j):
self.objs[i].parent = self.objs[j]
self.objs[i].save()
def _collect(self, *indices):
self.n.collect([self.objs[i] for i in indices])
def test_unrelated_roots(self):
self._connect(2, 1)
self._collect(0)
self._collect(1)
self._check([0, 1, [2]])
def test_siblings(self):
self._connect(1, 0)
self._connect(2, 0)
self._collect(0)
self._check([0, [1, 2]])
def test_non_added_parent(self):
self._connect(0, 1)
self._collect(0)
self._check([0])
def test_cyclic(self):
self._connect(0, 2)
self._connect(1, 0)
self._connect(2, 1)
self._collect(0)
self._check([0, [1, [2]]])
def test_queries(self):
self._connect(1, 0)
self._connect(2, 0)
# 1 query to fetch all children of 0 (1 and 2)
# 1 query to fetch all children of 1 and 2 (none)
# Should not require additional queries to populate the nested graph.
self.assertNumQueries(2, self._collect, 0)
def test_on_delete_do_nothing(self):
"""
Check that the nested collector doesn't query for DO_NOTHING objects.
"""
n = NestedObjects(using=DEFAULT_DB_ALIAS)
objs = [Event.objects.create()]
EventGuide.objects.create(event=objs[0])
with self.assertNumQueries(2):
# One for Location, one for Guest, and no query for EventGuide
n.collect(objs)
def test_relation_on_abstract(self):
"""
#21846 -- Check that `NestedObjects.collect()` doesn't trip
(AttributeError) on the special notation for relations on abstract
models (related_name that contains %(app_label)s and/or %(class)s).
"""
n = NestedObjects(using=DEFAULT_DB_ALIAS)
Car.objects.create()
n.collect([Vehicle.objects.first()])
class UtilsTests(SimpleTestCase):
empty_value = '-empty-'
def test_values_from_lookup_field(self):
"""
Regression test for #12654: lookup_field
"""
SITE_NAME = 'example.com'
TITLE_TEXT = 'Some title'
CREATED_DATE = datetime.min
ADMIN_METHOD = 'admin method'
SIMPLE_FUNCTION = 'function'
INSTANCE_ATTRIBUTE = 'attr'
class MockModelAdmin(object):
def get_admin_value(self, obj):
return ADMIN_METHOD
simple_function = lambda obj: SIMPLE_FUNCTION
site_obj = Site(domain=SITE_NAME)
article = Article(
site=site_obj,
title=TITLE_TEXT,
created=CREATED_DATE,
)
article.non_field = INSTANCE_ATTRIBUTE
verifications = (
('site', SITE_NAME),
('created', localize(CREATED_DATE)),
('title', TITLE_TEXT),
('get_admin_value', ADMIN_METHOD),
(simple_function, SIMPLE_FUNCTION),
('test_from_model', article.test_from_model()),
('non_field', INSTANCE_ATTRIBUTE)
)
mock_admin = MockModelAdmin()
for name, value in verifications:
field, attr, resolved_value = lookup_field(name, article, mock_admin)
if field is not None:
resolved_value = display_for_field(resolved_value, field, self.empty_value)
self.assertEqual(value, resolved_value)
def test_null_display_for_field(self):
"""
Regression test for #12550: display_for_field should handle None
value.
"""
display_value = display_for_field(None, models.CharField(), self.empty_value)
self.assertEqual(display_value, self.empty_value)
display_value = display_for_field(None, models.CharField(
choices=(
(None, "test_none"),
)
), self.empty_value)
self.assertEqual(display_value, "test_none")
display_value = display_for_field(None, models.DateField(), self.empty_value)
self.assertEqual(display_value, self.empty_value)
display_value = display_for_field(None, models.TimeField(), self.empty_value)
self.assertEqual(display_value, self.empty_value)
# Regression test for #13071: NullBooleanField has special
# handling.
display_value = display_for_field(None, models.NullBooleanField(), self.empty_value)
expected = '<img src="%sadmin/img/icon-unknown.gif" alt="None" />' % settings.STATIC_URL
self.assertHTMLEqual(display_value, expected)
display_value = display_for_field(None, models.DecimalField(), self.empty_value)
self.assertEqual(display_value, self.empty_value)
display_value = display_for_field(None, models.FloatField(), self.empty_value)
self.assertEqual(display_value, self.empty_value)
def test_number_formats_display_for_field(self):
display_value = display_for_field(12345.6789, models.FloatField(), self.empty_value)
self.assertEqual(display_value, '12345.6789')
display_value = display_for_field(Decimal('12345.6789'), models.DecimalField(), self.empty_value)
self.assertEqual(display_value, '12345.6789')
display_value = display_for_field(12345, models.IntegerField(), self.empty_value)
self.assertEqual(display_value, '12345')
@override_settings(USE_L10N=True, USE_THOUSAND_SEPARATOR=True)
def test_number_formats_with_thousand_seperator_display_for_field(self):
display_value = display_for_field(12345.6789, models.FloatField(), self.empty_value)
self.assertEqual(display_value, '12,345.6789')
display_value = display_for_field(Decimal('12345.6789'), models.DecimalField(), self.empty_value)
self.assertEqual(display_value, '12,345.6789')
display_value = display_for_field(12345, models.IntegerField(), self.empty_value)
self.assertEqual(display_value, '12,345')
def test_label_for_field(self):
"""
Tests for label_for_field
"""
self.assertEqual(
label_for_field("title", Article),
"title"
)
self.assertEqual(
label_for_field("title2", Article),
"another name"
)
self.assertEqual(
label_for_field("title2", Article, return_attr=True),
("another name", None)
)
self.assertEqual(
label_for_field("__unicode__", Article),
"article"
)
self.assertEqual(
label_for_field("__str__", Article),
str("article")
)
self.assertRaises(
AttributeError,
lambda: label_for_field("unknown", Article)
)
def test_callable(obj):
return "nothing"
self.assertEqual(
label_for_field(test_callable, Article),
"Test callable"
)
self.assertEqual(
label_for_field(test_callable, Article, return_attr=True),
("Test callable", test_callable)
)
self.assertEqual(
label_for_field("test_from_model", Article),
"Test from model"
)
self.assertEqual(
label_for_field("test_from_model", Article, return_attr=True),
("Test from model", Article.test_from_model)
)
self.assertEqual(
label_for_field("test_from_model_with_override", Article),
"not What you Expect"
)
self.assertEqual(
label_for_field(lambda x: "nothing", Article),
"--"
)
class MockModelAdmin(object):
def test_from_model(self, obj):
return "nothing"
test_from_model.short_description = "not Really the Model"
self.assertEqual(
label_for_field("test_from_model", Article, model_admin=MockModelAdmin),
"not Really the Model"
)
self.assertEqual(
label_for_field("test_from_model", Article,
model_admin=MockModelAdmin,
return_attr=True),
("not Really the Model", MockModelAdmin.test_from_model)
)
def test_label_for_property(self):
# NOTE: cannot use @property decorator, because of
# AttributeError: 'property' object has no attribute 'short_description'
class MockModelAdmin(object):
def my_property(self):
return "this if from property"
my_property.short_description = 'property short description'
test_from_property = property(my_property)
self.assertEqual(
label_for_field("test_from_property", Article, model_admin=MockModelAdmin),
'property short description'
)
def test_related_name(self):
"""
Regression test for #13963
"""
self.assertEqual(
label_for_field('location', Event, return_attr=True),
('location', None),
)
self.assertEqual(
label_for_field('event', Location, return_attr=True),
('awesome event', None),
)
self.assertEqual(
label_for_field('guest', Event, return_attr=True),
('awesome guest', None),
)
def test_logentry_unicode(self):
"""
Regression test for #15661
"""
log_entry = admin.models.LogEntry()
log_entry.action_flag = admin.models.ADDITION
self.assertTrue(
six.text_type(log_entry).startswith('Added ')
)
log_entry.action_flag = admin.models.CHANGE
self.assertTrue(
six.text_type(log_entry).startswith('Changed ')
)
log_entry.action_flag = admin.models.DELETION
self.assertTrue(
six.text_type(log_entry).startswith('Deleted ')
)
# Make sure custom action_flags works
log_entry.action_flag = 4
self.assertEqual(six.text_type(log_entry), 'LogEntry Object')
def test_safestring_in_field_label(self):
# safestring should not be escaped
class MyForm(forms.Form):
text = forms.CharField(label=mark_safe('<i>text</i>'))
cb = forms.BooleanField(label=mark_safe('<i>cb</i>'))
form = MyForm()
self.assertHTMLEqual(helpers.AdminField(form, 'text', is_first=False).label_tag(),
'<label for="id_text" class="required inline"><i>text</i>:</label>')
self.assertHTMLEqual(helpers.AdminField(form, 'cb', is_first=False).label_tag(),
'<label for="id_cb" class="vCheckboxLabel required inline"><i>cb</i></label>')
# normal strings needs to be escaped
class MyForm(forms.Form):
text = forms.CharField(label='&text')
cb = forms.BooleanField(label='&cb')
form = MyForm()
self.assertHTMLEqual(helpers.AdminField(form, 'text', is_first=False).label_tag(),
'<label for="id_text" class="required inline">&text:</label>')
self.assertHTMLEqual(helpers.AdminField(form, 'cb', is_first=False).label_tag(),
'<label for="id_cb" class="vCheckboxLabel required inline">&cb</label>')
def test_flatten(self):
flat_all = ['url', 'title', 'content', 'sites']
inputs = (
((), []),
(('url', 'title', ('content', 'sites')), flat_all),
(('url', 'title', 'content', 'sites'), flat_all),
((('url', 'title'), ('content', 'sites')), flat_all)
)
for orig, expected in inputs:
self.assertEqual(flatten(orig), expected)
def test_flatten_fieldsets(self):
"""
Regression test for #18051
"""
fieldsets = (
(None, {
'fields': ('url', 'title', ('content', 'sites'))
}),
)
self.assertEqual(flatten_fieldsets(fieldsets), ['url', 'title', 'content', 'sites'])
fieldsets = (
(None, {
'fields': ('url', 'title', ['content', 'sites'])
}),
)
self.assertEqual(flatten_fieldsets(fieldsets), ['url', 'title', 'content', 'sites'])
def test_quote(self):
self.assertEqual(quote('something\nor\nother'), 'something_0Aor_0Aother')
| bsd-3-clause |
davidbkemp/node-gyp | gyp/test/copies/gyptest-default.py | 264 | 1268 | #!/usr/bin/env python
# Copyright (c) 2009 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies file copies using the build tool default.
"""
import TestGyp
test = TestGyp.TestGyp()
test.run_gyp('copies.gyp', chdir='src')
test.relocate('src', 'relocate/src')
test.build('copies.gyp', chdir='relocate/src')
test.must_match(['relocate', 'src', 'copies-out', 'file1'], 'file1 contents\n')
test.built_file_must_match('copies-out/file2',
'file2 contents\n',
chdir='relocate/src')
test.built_file_must_match('copies-out/directory/file3',
'file3 contents\n',
chdir='relocate/src')
test.built_file_must_match('copies-out/directory/file4',
'file4 contents\n',
chdir='relocate/src')
test.built_file_must_match('copies-out/directory/subdir/file5',
'file5 contents\n',
chdir='relocate/src')
test.built_file_must_match('copies-out/subdir/file6',
'file6 contents\n',
chdir='relocate/src')
test.pass_test()
| mit |
scottpurdy/nupic.vision | src/nupic/vision/regions/ImageSensorFilters/MultipleScales.py | 2 | 2631 | # ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2013, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero Public License for more details.
#
# You should have received a copy of the GNU Affero Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
"""
## @file
"""
from PIL import Image
from nupic.vision.regions.ImageSensorFilters.BaseFilter import BaseFilter
class MultipleScales(BaseFilter):
"""
** DEPRECATED ** Create scaled versions of the original image.
"""
def __init__(self, scales=[1], simultaneous=False):
"""
** DEPRECATED **
@param scales -- List of factors used for scaling. scales = [.5, 1] returns
two images, one half the size of the original in each dimension, and one
which is the original image.
@param simultaneous -- Whether the images should be sent out of the sensor
simultaneously.
"""
BaseFilter.__init__(self)
self.scales = scales
self.simultaneous = simultaneous
def process(self, image):
"""
@param image -- The image to process.
Returns a single image, or a list containing one or more images.
"""
BaseFilter.process(self, image)
sizes = [(int(round(image.size[0]*s)), int(round(image.size[1]*s)))
for s in self.scales]
resizedImages = []
for size in sizes:
if size < image.size:
resizedImages.append(image.resize(size,Image.ANTIALIAS))
else:
resizedImages.append(image.resize(size,Image.BICUBIC))
if not self.simultaneous:
return resizedImages
else:
return [resizedImages]
def getOutputCount(self):
"""
Return the number of images returned by each call to process().
If the filter creates multiple simultaneous outputs, return a tuple:
(outputCount, simultaneousOutputCount).
"""
if not self.simultaneous:
return len(self.scales)
else:
return (1, len(self.scales))
| agpl-3.0 |
alkyl1978/gnuradio | gr-filter/python/filter/qa_freq_xlating_fir_filter.py | 51 | 10944 | #!/usr/bin/env python
#
# Copyright 2008,2010,2012,2013 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# GNU Radio is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# GNU Radio is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
from gnuradio import gr, gr_unittest, filter, blocks
import cmath, math
def fir_filter(x, taps, decim=1):
y = []
x2 = (len(taps)-1)*[0,] + x
for i in range(0, len(x), decim):
yi = 0
for j in range(len(taps)):
yi += taps[len(taps)-1-j] * x2[i+j]
y.append(yi)
return y
def sig_source_s(samp_rate, freq, amp, N):
t = map(lambda x: float(x)/samp_rate, xrange(N))
y = map(lambda x: int(100*math.sin(2.*math.pi*freq*x)), t)
return y
def sig_source_c(samp_rate, freq, amp, N):
t = map(lambda x: float(x)/samp_rate, xrange(N))
y = map(lambda x: math.cos(2.*math.pi*freq*x) + \
1j*math.sin(2.*math.pi*freq*x), t)
return y
def mix(lo, data):
y = [lo_i*data_i for lo_i, data_i in zip(lo, data)]
return y
class test_freq_xlating_filter(gr_unittest.TestCase):
def setUp(self):
self.tb = gr.top_block ()
def tearDown(self):
self.tb = None
def generate_ccf_source(self):
self.fs = fs = 1
self.fc = fc = 0.3
self.bw = bw = 0.1
self.taps = filter.firdes.low_pass(1, fs, bw, bw/4)
times = xrange(100)
self.src_data = map(lambda t: cmath.exp(-2j*cmath.pi*fc/fs*(t/100.0)), times)
def generate_ccc_source(self):
self.fs = fs = 1
self.fc = fc = 0.3
self.bw = bw = 0.1
self.taps = filter.firdes.complex_band_pass(1, fs, -bw/2, bw/2, bw/4)
times = xrange(100)
self.src_data = map(lambda t: cmath.exp(-2j*cmath.pi*fc/fs*(t/100.0)), times)
def generate_fcf_source(self):
self.fs = fs = 1
self.fc = fc = 0.3
self.bw = bw = 0.1
self.taps = filter.firdes.low_pass(1, fs, bw, bw/4)
times = xrange(100)
self.src_data = map(lambda t: math.sin(2*cmath.pi*fc/fs*(t/100.0)), times)
def generate_fcc_source(self):
self.fs = fs = 1
self.fc = fc = 0.3
self.bw = bw = 0.1
self.taps = filter.firdes.complex_band_pass(1, fs, -bw/2, bw/2, bw/4)
times = xrange(100)
self.src_data = map(lambda t: math.sin(2*cmath.pi*fc/fs*(t/100.0)), times)
def generate_scf_source(self):
self.fs = fs = 1
self.fc = fc = 0.3
self.bw = bw = 0.12
self.taps = filter.firdes.low_pass(1, fs, bw, bw/4)
times = xrange(100)
self.src_data = map(lambda t: int(100*math.sin(2*cmath.pi*fc/fs*(t/100.0))), times)
def generate_scc_source(self):
self.fs = fs = 1
self.fc = fc = 0.3
self.bw = bw = 0.12
self.taps = filter.firdes.complex_band_pass(1, fs, -bw/2, bw/2, bw/4)
times = xrange(100)
self.src_data = map(lambda t: int(100*math.sin(2*cmath.pi*fc/fs*(t/100.0))), times)
def test_fir_filter_ccf_001(self):
self.generate_ccf_source()
decim = 1
lo = sig_source_c(self.fs, -self.fc, 1, len(self.src_data))
despun = mix(lo, self.src_data)
expected_data = fir_filter(despun, self.taps, decim)
src = blocks.vector_source_c(self.src_data)
op = filter.freq_xlating_fir_filter_ccf(decim, self.taps, self.fc, self.fs)
dst = blocks.vector_sink_c()
self.tb.connect(src, op, dst)
self.tb.run()
result_data = dst.data()
self.assertComplexTuplesAlmostEqual(expected_data, result_data, 5)
def test_fir_filter_ccf_002(self):
self.generate_ccf_source()
decim = 4
lo = sig_source_c(self.fs, -self.fc, 1, len(self.src_data))
despun = mix(lo, self.src_data)
expected_data = fir_filter(despun, self.taps, decim)
src = blocks.vector_source_c(self.src_data)
op = filter.freq_xlating_fir_filter_ccf(decim, self.taps, self.fc, self.fs)
dst = blocks.vector_sink_c()
self.tb.connect(src, op, dst)
self.tb.run()
result_data = dst.data()
self.assertComplexTuplesAlmostEqual(expected_data, result_data, 5)
def test_fir_filter_ccc_001(self):
self.generate_ccc_source()
decim = 1
lo = sig_source_c(self.fs, -self.fc, 1, len(self.src_data))
despun = mix(lo, self.src_data)
expected_data = fir_filter(despun, self.taps, decim)
src = blocks.vector_source_c(self.src_data)
op = filter.freq_xlating_fir_filter_ccc(decim, self.taps, self.fc, self.fs)
dst = blocks.vector_sink_c()
self.tb.connect(src, op, dst)
self.tb.run()
result_data = dst.data()
self.assertComplexTuplesAlmostEqual(expected_data, result_data, 5)
def test_fir_filter_ccc_002(self):
self.generate_ccc_source()
decim = 4
lo = sig_source_c(self.fs, -self.fc, 1, len(self.src_data))
despun = mix(lo, self.src_data)
expected_data = fir_filter(despun, self.taps, decim)
src = blocks.vector_source_c(self.src_data)
op = filter.freq_xlating_fir_filter_ccc(decim, self.taps, self.fc, self.fs)
dst = blocks.vector_sink_c()
self.tb.connect(src, op, dst)
self.tb.run()
result_data = dst.data()
self.assertComplexTuplesAlmostEqual(expected_data, result_data, 5)
def test_fir_filter_fcf_001(self):
self.generate_fcf_source()
decim = 1
lo = sig_source_c(self.fs, -self.fc, 1, len(self.src_data))
despun = mix(lo, self.src_data)
expected_data = fir_filter(despun, self.taps, decim)
src = blocks.vector_source_f(self.src_data)
op = filter.freq_xlating_fir_filter_fcf(decim, self.taps, self.fc, self.fs)
dst = blocks.vector_sink_c()
self.tb.connect(src, op, dst)
self.tb.run()
result_data = dst.data()
self.assertComplexTuplesAlmostEqual(expected_data, result_data, 5)
def test_fir_filter_fcf_002(self):
self.generate_fcf_source()
decim = 4
lo = sig_source_c(self.fs, -self.fc, 1, len(self.src_data))
despun = mix(lo, self.src_data)
expected_data = fir_filter(despun, self.taps, decim)
src = blocks.vector_source_f(self.src_data)
op = filter.freq_xlating_fir_filter_fcf(decim, self.taps, self.fc, self.fs)
dst = blocks.vector_sink_c()
self.tb.connect(src, op, dst)
self.tb.run()
result_data = dst.data()
self.assertComplexTuplesAlmostEqual(expected_data, result_data, 5)
def test_fir_filter_fcc_001(self):
self.generate_fcc_source()
decim = 1
lo = sig_source_c(self.fs, -self.fc, 1, len(self.src_data))
despun = mix(lo, self.src_data)
expected_data = fir_filter(despun, self.taps, decim)
src = blocks.vector_source_f(self.src_data)
op = filter.freq_xlating_fir_filter_fcc(decim, self.taps, self.fc, self.fs)
dst = blocks.vector_sink_c()
self.tb.connect(src, op, dst)
self.tb.run()
result_data = dst.data()
self.assertComplexTuplesAlmostEqual(expected_data, result_data, 5)
def test_fir_filter_fcc_002(self):
self.generate_fcc_source()
decim = 4
lo = sig_source_c(self.fs, -self.fc, 1, len(self.src_data))
despun = mix(lo, self.src_data)
expected_data = fir_filter(despun, self.taps, decim)
src = blocks.vector_source_f(self.src_data)
op = filter.freq_xlating_fir_filter_fcc(decim, self.taps, self.fc, self.fs)
dst = blocks.vector_sink_c()
self.tb.connect(src, op, dst)
self.tb.run()
result_data = dst.data()
self.assertComplexTuplesAlmostEqual(expected_data, result_data, 5)
def test_fir_filter_scf_001(self):
self.generate_scf_source()
decim = 1
lo = sig_source_c(self.fs, -self.fc, 1, len(self.src_data))
despun = mix(lo, self.src_data)
expected_data = fir_filter(despun, self.taps, decim)
src = blocks.vector_source_s(self.src_data)
op = filter.freq_xlating_fir_filter_scf(decim, self.taps, self.fc, self.fs)
dst = blocks.vector_sink_c()
self.tb.connect(src, op, dst)
self.tb.run()
result_data = dst.data()
self.assertComplexTuplesAlmostEqual(expected_data, result_data, 4)
def test_fir_filter_scf_002(self):
self.generate_scf_source()
decim = 4
lo = sig_source_c(self.fs, -self.fc, 1, len(self.src_data))
despun = mix(lo, self.src_data)
expected_data = fir_filter(despun, self.taps, decim)
src = blocks.vector_source_s(self.src_data)
op = filter.freq_xlating_fir_filter_scf(decim, self.taps, self.fc, self.fs)
dst = blocks.vector_sink_c()
self.tb.connect(src, op, dst)
self.tb.run()
result_data = dst.data()
self.assertComplexTuplesAlmostEqual(expected_data, result_data, 4)
def test_fir_filter_scc_001(self):
self.generate_scc_source()
decim = 1
lo = sig_source_c(self.fs, -self.fc, 1, len(self.src_data))
despun = mix(lo, self.src_data)
expected_data = fir_filter(despun, self.taps, decim)
src = blocks.vector_source_s(self.src_data)
op = filter.freq_xlating_fir_filter_scc(decim, self.taps, self.fc, self.fs)
dst = blocks.vector_sink_c()
self.tb.connect(src, op, dst)
self.tb.run()
result_data = dst.data()
self.assertComplexTuplesAlmostEqual(expected_data, result_data, 4)
def test_fir_filter_scc_002(self):
self.generate_scc_source()
decim = 4
lo = sig_source_c(self.fs, -self.fc, 1, len(self.src_data))
despun = mix(lo, self.src_data)
expected_data = fir_filter(despun, self.taps, decim)
src = blocks.vector_source_s(self.src_data)
op = filter.freq_xlating_fir_filter_scc(decim, self.taps, self.fc, self.fs)
dst = blocks.vector_sink_c()
self.tb.connect(src, op, dst)
self.tb.run()
result_data = dst.data()
self.assertComplexTuplesAlmostEqual(expected_data, result_data, 4)
if __name__ == '__main__':
gr_unittest.run(test_freq_xlating_filter, "test_freq_xlating_filter.xml")
| gpl-3.0 |
ludwiktrammer/odoo | addons/mail/models/res_config.py | 43 | 1546 | # -*- coding: utf-8 -*-
import urlparse
import datetime
from openerp import api, fields, models, tools
class BaseConfiguration(models.TransientModel):
""" Inherit the base settings to add a counter of failed email + configure
the alias domain. """
_inherit = 'base.config.settings'
fail_counter = fields.Integer('Fail Mail', readonly=True)
alias_domain = fields.Char('Alias Domain', help="If you have setup a catch-all email domain redirected to "
"the Odoo server, enter the domain name here.")
@api.multi
def get_default_fail_counter(self):
previous_date = datetime.datetime.now() - datetime.timedelta(days=30)
return {
'fail_counter': self.env['mail.mail'].sudo().search_count([('date', '>=', previous_date.strftime(tools.DEFAULT_SERVER_DATETIME_FORMAT)), ('state', '=', 'exception')]),
}
@api.multi
def get_default_alias_domain(self):
alias_domain = self.env["ir.config_parameter"].get_param("mail.catchall.domain", default=None)
if alias_domain is None:
domain = self.env["ir.config_parameter"].get_param("web.base.url")
try:
alias_domain = urlparse.urlsplit(domain).netloc.split(':')[0]
except Exception:
pass
return {'alias_domain': alias_domain or False}
@api.multi
def set_alias_domain(self):
for record in self:
self.env['ir.config_parameter'].set_param("mail.catchall.domain", record.alias_domain or '')
| agpl-3.0 |
cpcloud/banyan | setup.py | 1 | 1717 | #!/usr/bin/env python
from __future__ import print_function
import os
from fnmatch import fnmatch
from setuptools import setup, Extension
def find_recursive(pattern, package='banyan'):
for r, _, fs in os.walk(package):
for f in fs:
if fnmatch(f, pattern):
yield os.path.join(r, f)
banyan_c = Extension('banyan_c',
include_dirs=['/usr/local/include', 'banyan'],
language='c++',
depends=list(find_recursive('*.hpp')),
sources=list(find_recursive('*.cpp')))
def read_text(filename):
with open(filename, 'rt') as f:
return f.read()
setup(
name='Banyan',
version='0.1.5',
author='Ami Tavory',
author_email='atavory at gmail.com',
maintainer='Phillip Cloud',
maintainer_email='cpcloud at gmail.com',
packages=['banyan'],
url='https://github.com/cpcloud/banyan',
license='BSD',
description=('Highly-optimized search trees (red-black, splay, and '
'sorted-list) with optional augmentation '
'(dynamic order statistics, interval trees, etc.)'),
long_description=read_text('README.rst'),
requires=['UnittestRandGenState'],
ext_modules=[banyan_c],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Programming Language :: C++',
'Topic :: Software Development :: Libraries :: Python Modules'])
| bsd-3-clause |
mfaruqui/crosslingual-cca | eval_cluster_embeddings.py | 2 | 2715 | import os
import re
import time
import io
import sys
import argparse
from collections import defaultdict
import random
def compute_shared_prefix_length(bitstring1, bitstring2):
common_prefix_length = 0
for i in xrange(min(len(bitstring1), len(bitstring2))):
if bitstring1[i] == bitstring2[i]:
common_prefix_length += 1
else:
break
return common_prefix_length
# parse/validate arguments
argparser = argparse.ArgumentParser()
argparser.add_argument("-c", "--clusters_filename", help='word types and their corresponding cluster bitstrings', required=True)
argparser.add_argument("-d", "--synonyms_filename", help='pairs of words which should fall in the same cluster', required=True)
argparser.add_argument("-s", "--sample_size", help='number of word samples to compare against for each synonym pair', default=10, type=float)
args = argparser.parse_args()
print "Reading word cluster bitstrings..."
word_to_bitstring = {}
with io.open(args.clusters_filename, encoding='utf8') as clusters_file:
for line in clusters_file:
word, bitstring = line.strip().split(' ')
word_to_bitstring[word] = bitstring
word_list = word_to_bitstring.keys()
print "Reading synonyms..."
word_to_synonyms = defaultdict(set)
with io.open(args.synonyms_filename, encoding='utf8') as synonyms_file:
for line in synonyms_file:
word1, word2 = line.strip().split(' ||| ')
if word1 not in word_to_bitstring or word2 not in word_to_bitstring: continue
word_to_synonyms[word1].add(word2)
word_to_synonyms[word2].add(word1)
print "Evaluating clusters..."
correct, incorrect = 0, 0
for word in word_to_synonyms.keys():
word_synonyms = list(word_to_synonyms[word])
word_nonsynonyms = random.sample(word_list, args.sample_size)
similarity_to_synonyms = 0.0
for word_synonym in word_synonyms:
similarity_to_synonyms += compute_shared_prefix_length(word_to_bitstring[word], word_to_bitstring[word_synonym])
average_similarity_to_synonyms = similarity_to_synonyms * 1.0 / len(word_synonyms)
similarity_to_nonsynonyms = 0.0
for word_nonsynonym in word_nonsynonyms:
similarity_to_nonsynonyms += compute_shared_prefix_length(word_to_bitstring[word], word_to_bitstring[word_nonsynonym])
average_similarity_to_nonsynonyms = similarity_to_nonsynonyms * 1.0 / len(word_nonsynonyms)
print u'{}: {} (avg of {}) > {} (avg of {}) ?'.format(word, similarity_to_synonyms, len(word_synonyms), average_similarity_to_nonsynonyms, len(word_nonsynonyms))
if average_similarity_to_synonyms > average_similarity_to_nonsynonyms:
correct += 1
else:
incorrect += 1
print 'correct = {}, incorrect = {}, accuracy = {}'.format(correct, incorrect, 1.0 * correct / (correct + incorrect))
| gpl-2.0 |
jc0n/scrapy | scrapy/downloadermiddlewares/httpproxy.py | 10 | 2034 | import base64
from six.moves.urllib.request import getproxies, proxy_bypass
from six.moves.urllib.parse import unquote
try:
from urllib2 import _parse_proxy
except ImportError:
from urllib.request import _parse_proxy
from six.moves.urllib.parse import urlunparse
from scrapy.utils.httpobj import urlparse_cached
from scrapy.exceptions import NotConfigured
from scrapy.utils.python import to_bytes
class HttpProxyMiddleware(object):
def __init__(self, auth_encoding='latin-1'):
self.auth_encoding = auth_encoding
self.proxies = {}
for type, url in getproxies().items():
self.proxies[type] = self._get_proxy(url, type)
if not self.proxies:
raise NotConfigured
@classmethod
def from_crawler(cls, crawler):
auth_encoding = crawler.settings.get('HTTPPROXY_AUTH_ENCODING')
return cls(auth_encoding)
def _get_proxy(self, url, orig_type):
proxy_type, user, password, hostport = _parse_proxy(url)
proxy_url = urlunparse((proxy_type or orig_type, hostport, '', '', '', ''))
if user:
user_pass = to_bytes(
'%s:%s' % (unquote(user), unquote(password)),
encoding=self.auth_encoding)
creds = base64.b64encode(user_pass).strip()
else:
creds = None
return creds, proxy_url
def process_request(self, request, spider):
# ignore if proxy is already set
if 'proxy' in request.meta:
return
parsed = urlparse_cached(request)
scheme = parsed.scheme
# 'no_proxy' is only supported by http schemes
if scheme in ('http', 'https') and proxy_bypass(parsed.hostname):
return
if scheme in self.proxies:
self._set_proxy(request, scheme)
def _set_proxy(self, request, scheme):
creds, proxy = self.proxies[scheme]
request.meta['proxy'] = proxy
if creds:
request.headers['Proxy-Authorization'] = b'Basic ' + creds
| bsd-3-clause |
Mazecreator/tensorflow | tensorflow/python/debug/cli/readline_ui_test.py | 81 | 5646 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests of the readline-based CLI."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
import tempfile
from tensorflow.python.debug.cli import debugger_cli_common
from tensorflow.python.debug.cli import readline_ui
from tensorflow.python.debug.cli import ui_factory
from tensorflow.python.framework import test_util
from tensorflow.python.platform import gfile
from tensorflow.python.platform import googletest
class MockReadlineUI(readline_ui.ReadlineUI):
"""Test subclass of ReadlineUI that bypasses terminal manipulations."""
def __init__(self, on_ui_exit=None, command_sequence=None):
readline_ui.ReadlineUI.__init__(self, on_ui_exit=on_ui_exit)
self._command_sequence = command_sequence
self._command_counter = 0
self.observers = {"screen_outputs": []}
def _get_user_command(self):
command = self._command_sequence[self._command_counter]
self._command_counter += 1
return command
def _display_output(self, screen_output):
self.observers["screen_outputs"].append(screen_output)
class CursesTest(test_util.TensorFlowTestCase):
def _babble(self, args, screen_info=None):
ap = argparse.ArgumentParser(
description="Do babble.", usage=argparse.SUPPRESS)
ap.add_argument(
"-n",
"--num_times",
dest="num_times",
type=int,
default=60,
help="How many times to babble")
parsed = ap.parse_args(args)
lines = ["bar"] * parsed.num_times
return debugger_cli_common.RichTextLines(lines)
def testUIFactoryCreatesReadlineUI(self):
ui = ui_factory.get_ui("readline")
self.assertIsInstance(ui, readline_ui.ReadlineUI)
def testUIFactoryRaisesExceptionOnInvalidUIType(self):
with self.assertRaisesRegexp(ValueError, "Invalid ui_type: 'foobar'"):
ui_factory.get_ui("foobar")
def testUIFactoryRaisesExceptionOnInvalidUITypeGivenAvailable(self):
with self.assertRaisesRegexp(ValueError, "Invalid ui_type: 'readline'"):
ui_factory.get_ui("readline", available_ui_types=["curses"])
def testRunUIExitImmediately(self):
"""Make sure that the UI can exit properly after launch."""
ui = MockReadlineUI(command_sequence=["exit"])
ui.run_ui()
# No screen output should have happened.
self.assertEqual(0, len(ui.observers["screen_outputs"]))
def testRunUIEmptyCommand(self):
"""Issue an empty command then exit."""
ui = MockReadlineUI(command_sequence=["", "exit"])
ui.run_ui()
self.assertEqual(1, len(ui.observers["screen_outputs"]))
def testRunUIWithInitCmd(self):
"""Run UI with an initial command specified."""
ui = MockReadlineUI(command_sequence=["exit"])
ui.register_command_handler("babble", self._babble, "")
ui.run_ui(init_command="babble")
screen_outputs = ui.observers["screen_outputs"]
self.assertEqual(1, len(screen_outputs))
self.assertEqual(["bar"] * 60, screen_outputs[0].lines)
def testRunUIWithValidUsersCommands(self):
"""Run UI with an initial command specified."""
ui = MockReadlineUI(command_sequence=["babble -n 3", "babble -n 6", "exit"])
ui.register_command_handler("babble", self._babble, "")
ui.run_ui()
screen_outputs = ui.observers["screen_outputs"]
self.assertEqual(2, len(screen_outputs))
self.assertEqual(["bar"] * 3, screen_outputs[0].lines)
self.assertEqual(["bar"] * 6, screen_outputs[1].lines)
def testRunUIWithInvalidUsersCommands(self):
"""Run UI with an initial command specified."""
ui = MockReadlineUI(command_sequence=["babble -n 3", "wobble", "exit"])
ui.register_command_handler("babble", self._babble, "")
ui.run_ui()
screen_outputs = ui.observers["screen_outputs"]
self.assertEqual(2, len(screen_outputs))
self.assertEqual(["bar"] * 3, screen_outputs[0].lines)
self.assertEqual(["ERROR: Invalid command prefix \"wobble\""],
screen_outputs[1].lines)
def testRunUIWithOnUIExitCallback(self):
observer = {"callback_invoked": False}
def callback_for_test():
observer["callback_invoked"] = True
ui = MockReadlineUI(on_ui_exit=callback_for_test, command_sequence=["exit"])
self.assertFalse(observer["callback_invoked"])
ui.run_ui()
self.assertEqual(0, len(ui.observers["screen_outputs"]))
self.assertTrue(observer["callback_invoked"])
def testIncompleteRedirectWorks(self):
output_path = tempfile.mktemp()
ui = MockReadlineUI(
command_sequence=["babble -n 2 > %s" % output_path, "exit"])
ui.register_command_handler("babble", self._babble, "")
ui.run_ui()
screen_outputs = ui.observers["screen_outputs"]
self.assertEqual(1, len(screen_outputs))
self.assertEqual(["bar"] * 2, screen_outputs[0].lines)
with gfile.Open(output_path, "r") as f:
self.assertEqual("bar\nbar\n", f.read())
if __name__ == "__main__":
googletest.main()
| apache-2.0 |
jfcarr/astro-now | client_gui.py | 1 | 1983 | #!/usr/bin/python3
import astro_now_lib as AL
from tkinter import *
# Dayton, Ohio
testLatitude = '39.759'
testLongitude = '-84.192'
myAstro = AL.CAstroNow(lat=testLatitude, long=testLongitude, prettyprint=True)
#myAstro = AL.CAstro(lat=testLatitude, long=testLongitude, prettyprint=True, calcdate="2016/01/17 20:00:00")
def DisplayCurrentConditions():
textOutput.delete(1.0, END)
textOutput.insert(END, myAstro.GetCurrentConditions())
def DisplaySun():
textOutput.delete(1.0, END)
textOutput.insert(END, "Sun")
textOutput.insert(END, "\n")
textOutput.insert(END, myAstro.GetSunInfo())
def DisplayMoon():
textOutput.delete(1.0, END)
textOutput.insert(END, "Moon")
textOutput.insert(END, "\n")
textOutput.insert(END, myAstro.GetMoonInfo())
def DisplayPlanet(planetName):
textOutput.delete(1.0, END)
textOutput.insert(END, myAstro.GetPlanetInfo(planetName))
def DisplayTwilight():
textOutput.delete(1.0, END)
textOutput.insert(END, myAstro.GetTwilight())
def DisplayKIC():
textOutput.delete(1.0, END)
textOutput.insert(END, myAstro.GetObjectInfo("KIC 8462852","20:6:15","44:27:25",11))
if __name__ == '__main__':
root = Tk()
menubar = Menu(root)
textOutput = Text(root)
menubar.add_command(label="Current Conditions", command=DisplayCurrentConditions)
menubar.add_command(label="Sun", command=DisplaySun)
menubar.add_command(label="Moon", command=DisplayMoon)
menubar.add_command(label="Venus", command= lambda: DisplayPlanet("Venus"))
menubar.add_command(label="Mars", command= lambda: DisplayPlanet("Mars"))
menubar.add_command(label="Jupiter", command= lambda: DisplayPlanet("Jupiter"))
menubar.add_command(label="Saturn", command= lambda: DisplayPlanet("Saturn"))
menubar.add_command(label="Twilight", command=DisplayTwilight)
menubar.add_command(label="KIC", command=DisplayKIC)
menubar.add_command(label="Exit", command=root.quit)
root.config(menu=menubar)
textOutput.insert(INSERT, "Ready...")
textOutput.pack()
root.mainloop()
| mit |
Oscarbralo/TopBlogCoder | Checkio/MooreNeightbourhood.py | 1 | 1787 | def count_neighbours(grid, row, col):
neig = 0
if (col - 1 >= 0):
if (grid[row][col - 1] == 1):
neig += 1
if (col - 1 >= 0 and row - 1 >= 0):
if (grid[row - 1][col -1] == 1):
neig += 1
if (row - 1 >= 0):
if (grid[row - 1][col] == 1):
neig += 1
if (col + 1 < len(grid[0]) and row - 1 >= 0):
if (grid[row - 1][col + 1] == 1):
neig += 1
if (col + 1 < len(grid[0])):
if (grid[row][col + 1] == 1):
neig += 1
if (col + 1 < len(grid[0]) and row + 1 < len(grid)):
if (grid[row + 1][col + 1] == 1):
neig += 1
if (row + 1 < len(grid)):
if (grid[row + 1][col] == 1):
neig += 1
if (col - 1 >= 0 and row + 1 < len(grid)):
if (grid[row + 1][col - 1] == 1):
neig += 1
return neig
if __name__ == '__main__':
#These "asserts" using only for self-checking and not necessary for auto-testing
assert count_neighbours(((1, 0, 0, 1, 0),
(0, 1, 0, 0, 0),
(0, 0, 1, 0, 1),
(1, 0, 0, 0, 0),
(0, 0, 1, 0, 0),), 1, 2) == 3, "1st example"
assert count_neighbours(((1, 0, 0, 1, 0),
(0, 1, 0, 0, 0),
(0, 0, 1, 0, 1),
(1, 0, 0, 0, 0),
(0, 0, 1, 0, 0),), 0, 0) == 1, "2nd example"
assert count_neighbours(((1, 1, 1),
(1, 1, 1),
(1, 1, 1),), 0, 2) == 3, "Dense corner"
assert count_neighbours(((0, 0, 0),
(0, 1, 0),
(0, 0, 0),), 1, 1) == 0, "Single"
| mit |
mbr0wn/gnuradio | gr-qtgui/examples/pyqt_time_raster_b.py | 6 | 2126 | #!/usr/bin/env python
#
# Copyright 2012,2013,2015 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# SPDX-License-Identifier: GPL-3.0-or-later
#
#
from gnuradio import gr
from gnuradio import blocks
from gnuradio import blocks
import sys
try:
from gnuradio import qtgui
from PyQt5 import QtWidgets, Qt
import sip
except ImportError:
print("Error: Program requires PyQt5 and gr-qtgui.")
sys.exit(1)
class dialog_box(QtWidgets.QWidget):
def __init__(self, display):
QtWidgets.QWidget.__init__(self, None)
self.setWindowTitle('PyQt Test GUI')
self.boxlayout = QtWidgets.QBoxLayout(QtWidgets.QBoxLayout.LeftToRight, self)
self.boxlayout.addWidget(display, 1)
self.resize(800, 500)
class my_top_block(gr.top_block):
def __init__(self):
gr.top_block.__init__(self)
self.qapp = QtWidgets.QApplication(sys.argv)
data0 = 10*[0,] + 40*[1,0] + 10*[0,]
data0 += 10*[0,] + 40*[0,1] + 10*[0,]
data1 = 20*[0,] + [0,0,0,1,1,1,0,0,0,0] + 70*[0,]
# Adjust these to change the layout of the plot.
# Can be set to fractions.
ncols = 100.25
nrows = 100
fs = 200
src0 = blocks.vector_source_b(data0, True)
src1 = blocks.vector_source_b(data1, True)
thr = blocks.throttle(gr.sizeof_char, 50000)
head = blocks.head(gr.sizeof_char, 10000000)
self.snk1 = qtgui.time_raster_sink_b(fs, nrows, ncols, [], [],
"Time Raster Example", 2, None)
self.connect(src0, thr, (self.snk1, 0))
self.connect(src1, (self.snk1, 1))
# Get the reference pointer to the SpectrumDisplayForm QWidget
pyQt = self.snk1.pyqwidget()
# Wrap the pointer as a PyQt SIP object
# This can now be manipulated as a PyQt5.QtWidgets.QWidget
pyWin = sip.wrapinstance(pyQt, QtWidgets.QWidget)
self.main_box = dialog_box(pyWin)
self.main_box.show()
if __name__ == "__main__":
tb = my_top_block();
tb.start()
tb.qapp.exec_()
tb.stop()
| gpl-3.0 |
SUNNYANDPJ/MongoAlchemy | mongoalchemy/fields/fields.py | 1 | 25282 | # The MIT License
#
# Copyright (c) 2010 Jeffrey Jenkins
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from __future__ import print_function
from mongoalchemy.py3compat import *
from mongoalchemy.fields.base import *
class PrimitiveField(Field):
''' Primitive fields are fields where a single constructor can be used
for wrapping and unwrapping an object.'''
valid_modifiers = SCALAR_MODIFIERS
def __init__(self, constructor, **kwargs):
super(PrimitiveField, self).__init__(**kwargs)
self.constructor = constructor
def wrap(self, value):
self.validate_wrap(value)
return self.constructor(value)
def unwrap(self, value, session=None):
self.validate_unwrap(value)
return self.constructor(value)
class StringField(PrimitiveField):
''' Unicode Strings. ``unicode`` is used to wrap and unwrap values,
and any subclass of basestring is an acceptable input'''
def __init__(self, max_length=None, min_length=None, **kwargs):
''' :param max_length: maximum string length
:param min_length: minimum string length
:param kwargs: arguments for :class:`Field`
'''
self.max = max_length
self.min = min_length
super(StringField, self).__init__(constructor=unicode, **kwargs)
def validate_wrap(self, value):
''' Validates the type and length of ``value`` '''
if not isinstance(value, basestring):
self._fail_validation_type(value, basestring)
if self.max is not None and len(value) > self.max:
self._fail_validation(value, 'Value too long (%d)' % len(value))
if self.min is not None and len(value) < self.min:
self._fail_validation(value, 'Value too short (%d)' % len(value))
class RegExStringField(PrimitiveField):
''' Unicode Strings. ``unicode`` is used to wrap and unwrap values,
and any subclass of basestring is an acceptable input, as long as
it matches the provided regex.'''
def __init__(self, regex, **kwargs):
''' :param regex: instance of :class: `RegexObject` to match against
:param kwargs: arguments for :class:`Field`
'''
self.regex = regex
super(RegExStringField, self).__init__(constructor=unicode, **kwargs)
def validate_wrap(self, value):
''' Validates the type and length of ``value`` '''
if not isinstance(value, basestring):
self._fail_validation_type(value, basestring)
if self.regex.match(value) is None:
self._fail_validation(value, 'Value does not match regular expression')
class BinaryField(PrimitiveField):
def __init__(self, **kwargs):
super(BinaryField, self).__init__(constructor=Binary, **kwargs)
def validate_wrap(self, value):
if not isinstance(value, bytes) and not isinstance(value, Binary):
self._fail_validation_type(value, str, Binary)
class BoolField(PrimitiveField):
''' ``True`` or ``False``.'''
def __init__(self, **kwargs):
super(BoolField, self).__init__(constructor=bool, **kwargs)
def validate_wrap(self, value):
if not isinstance(value, bool):
self._fail_validation_type(value, bool)
class NumberField(PrimitiveField):
''' Base class for numeric fields '''
valid_modifiers = NUMBER_MODIFIERS
def __init__(self, constructor, min_value=None, max_value=None, **kwargs):
''' :param max_value: maximum value
:param min_value: minimum value
:param kwargs: arguments for :class:`Field`
'''
super(NumberField, self).__init__(constructor=constructor, **kwargs)
self.min = min_value
self.max = max_value
def schema_json(self):
super_schema = super(NumberField, self).schema_json()
return dict(min_value=self.min,
max_value=self.max, **super_schema)
def validate_wrap(self, value, *types):
''' Validates the type and value of ``value`` '''
for type in types:
if isinstance(value, type):
break
else:
self._fail_validation_type(value, *types)
if self.min is not None and value < self.min:
self._fail_validation(value, 'Value too small')
if self.max is not None and value > self.max:
self._fail_validation(value, 'Value too large')
class IntField(NumberField):
''' Subclass of :class:`~NumberField` for ``int``'''
def __init__(self, **kwargs):
''' :param max_length: maximum value
:param min_length: minimum value
:param kwargs: arguments for :class:`Field`
'''
super(IntField, self).__init__(constructor=int, **kwargs)
def validate_wrap(self, value):
''' Validates the type and value of ``value`` '''
NumberField.validate_wrap(self, value, int, long)
class FloatField(NumberField):
''' Subclass of :class:`~NumberField` for ``float`` '''
def __init__(self, **kwargs):
''' :param max_value: maximum value
:param min_value: minimum value
:param kwargs: arguments for :class:`Field`
'''
super(FloatField, self).__init__(constructor=float, **kwargs)
def validate_wrap(self, value):
''' Validates the type and value of ``value`` '''
return NumberField.validate_wrap(self, value, float, int)
class DateTimeField(PrimitiveField):
''' Field for datetime objects. '''
has_autoload = True
def __init__(self, min_date=None, max_date=None, use_tz=False, **kwargs):
''' :param max_date: maximum date
:param min_date: minimum date
:param use_tz: Require a timezone-aware datetime (via pytz).
Values are converted to UTC before saving. min and max dates
are currently ignored when use_tz is on. You MUST pass a
timezone into the session
:param kwargs: arguments for :class:`Field`
'''
super(DateTimeField, self).__init__(lambda dt : dt, **kwargs)
self.min = min_date
self.max = max_date
self.use_tz = use_tz
if self.use_tz:
import pytz
self.utc = pytz.utc
assert self.min is None and self.max is None
def schema_json(self):
super_schema = super(DateTimeField, self).schema_json()
return dict(min_date=self.min,
max_date=self.max,
use_tz=self.use_tz, **super_schema)
def wrap(self, value):
self.validate_wrap(value)
value = self.constructor(value)
if self.use_tz:
return value
return value
def unwrap(self, value, session=None):
self.validate_unwrap(value)
value = self.constructor(value)
if value.tzinfo is not None:
import pytz
value = value.replace(tzinfo=pytz.utc)
if session and session.timezone:
value = value.astimezone(session.timezone)
return value
def localize(self, session, value):
if value is None or not self.use_tz:
return value
return value.astimezone(session.timezone)
def validate_wrap(self, value):
''' Validates the value's type as well as it being in the valid
date range'''
if not isinstance(value, datetime):
self._fail_validation_type(value, datetime)
if self.use_tz and value.tzinfo is None:
self._fail_validation(value, '''datetime is not timezone aware and use_tz is on. make sure timezone is set on the session''')
# if using timezone support it isn't clear how min and max should work,
# so the problem is being punted on for now.
if self.use_tz:
return
# min/max
if self.min is not None and value < self.min:
self._fail_validation(value, 'DateTime too old')
if self.max is not None and value > self.max:
self._fail_validation(value, 'DateTime too new')
class TupleField(Field):
''' Represents a field which is a tuple of a fixed size with specific
types for each element in the field.
**Examples** ``TupleField(IntField(), BoolField())`` would accept
``[19, False]`` as a value for both wrapping and unwrapping. '''
# uses scalar modifiers since it is not variable length
valid_modifiers = SCALAR_MODIFIERS
def __init__(self, *item_types, **kwargs):
''' :param item_types: instances of :class:`Field`, in the order they \
will appear in the tuples.
:param kwargs: arguments for :class:`Field`
'''
super(TupleField, self).__init__(**kwargs)
self.size = len(item_types)
self.types = item_types
def schema_json(self):
super_schema = super(TupleField, self).schema_json()
types = [t.schema_json() for t in self.types]
return dict(types=types, **super_schema)
def set_parent_on_subtypes(self, parent):
for type in self.types:
type._set_parent(parent)
def validate_wrap(self, value):
''' Checks that the correct number of elements are in ``value`` and that
each element validates agains the associated Field class
'''
if not isinstance(value, list) and not isinstance(value, tuple):
self._fail_validation_type(value, tuple, list)
for field, value in izip(self.types, list(value)):
field.validate_wrap(value)
def validate_unwrap(self, value):
''' Checks that the correct number of elements are in ``value`` and that
each element validates agains the associated Field class
'''
if not isinstance(value, list) and not isinstance(value, tuple):
self._fail_validation_type(value, tuple, list)
for field, value in izip(self.types, value):
field.validate_unwrap(value)
def wrap(self, value):
''' Validate and then wrap ``value`` for insertion.
:param value: the tuple (or list) to wrap
'''
self.validate_wrap(value)
ret = []
for field, value in izip(self.types, value):
ret.append(field.wrap(value))
return ret
def unwrap(self, value, session=None):
''' Validate and then unwrap ``value`` for object creation.
:param value: list returned from the database.
'''
self.validate_unwrap(value)
ret = []
for field, value in izip(self.types, value):
ret.append(field.unwrap(value, session=session))
return tuple(ret)
class GeoField(TupleField):
def __init__(self, **kwargs):
''' :param item_types: instances of :class:`Field`, in the order they \
will appear in the tuples.
:param kwargs: arguments for :class:`Field`
'''
super(GeoField, self).__init__(FloatField(), FloatField(), **kwargs)
def schema_json(self):
super_schema = super(GeoField, self).schema_json()
return dict(**super_schema)
class EnumField(Field):
''' Represents a single value out of a list of possible values, all
of the same type. == is used for comparison
**Example**: ``EnumField(IntField(), 4, 6, 7)`` would accept anything
in ``(4, 6, 7)`` as a value. It would not accept ``5``.
'''
valid_modifiers = SCALAR_MODIFIERS
def __init__(self, item_type, *values, **kwargs):
''' :param item_type: Instance of :class:`Field` to use for validation, and (un)wrapping
:param values: Possible values. ``item_type.is_valid_wrap(value)`` should be ``True``
'''
super(EnumField, self).__init__(**kwargs)
self.item_type = item_type
self.values = values
# Jan 22, 2011: Commenting this out. We already check that the value
# is the right type, and that it is equal to one of the enum values.
# If those are true, the enum values are the right type. If we do it
# now it causes validation issues in some cases with the
# string-reference document fields
#
# for value in values:
# self.item_type.validate_wrap(value)
def schema_json(self):
super_schema = super(EnumField, self).schema_json()
return dict(item_type=self.item_type.schema_json(),
values=[self.item_type.wrap(v) for v in self.values],
**super_schema)
def set_parent_on_subtypes(self, parent):
self.item_type._set_parent(parent)
def validate_wrap(self, value):
''' Checks that value is valid for `EnumField.item_type` and that
value is one of the values specified when the EnumField was
constructed '''
self.item_type.validate_wrap(value)
if value not in self.values:
self._fail_validation(value, 'Value was not in the enum values')
def validate_unwrap(self, value):
''' Checks that value is valid for `EnumField.item_type`.
.. note ::
Since checking the value itself is not possible until is is
actually unwrapped, that check is done in :func:`EnumField.unwrap`'''
self.item_type.validate_unwrap(value)
def wrap(self, value):
''' Validate and wrap value using the wrapping function from
``EnumField.item_type``
'''
self.validate_wrap(value)
return self.item_type.wrap(value)
def unwrap(self, value, session=None):
''' Unwrap value using the unwrap function from ``EnumField.item_type``.
Since unwrap validation could not happen in is_valid_wrap, it
happens in this function.'''
self.validate_unwrap(value)
value = self.item_type.unwrap(value, session=session)
for val in self.values:
if val == value:
return val
self._fail_validation(value, 'Value was not in the enum values')
class AnythingField(Field):
''' A field that passes through whatever is set with no validation. Useful
for free-form objects '''
valid_modifiers = ANY_MODIFIER
def schema_json(self):
return super(AnythingField, self).schema_json()
def wrap(self, value):
''' Always returns the value passed in'''
return value
def unwrap(self, value, session=None):
''' Always returns the value passed in'''
return value
def validate_unwrap(self, value):
''' Always passes'''
pass
def validate_wrap(self, value):
''' Always passes'''
pass
class ObjectIdField(Field):
''' pymongo Object ID object. Currently this is probably too strict. A
string version of an ObjectId should also be acceptable'''
valid_modifiers = SCALAR_MODIFIERS
def __init__(self, session=None, auto=False, **kwargs):
if auto:
kwargs['default_f'] = lambda : ObjectId()
super(ObjectIdField, self).__init__(**kwargs)
def schema_json(self):
super_schema = super(ObjectIdField, self).schema_json()
return dict(auth=self.auto, **super_schema)
# def set_default(self, value):
# super(ObjectIdField, self).set_default(value)
# def get_default(self):
# if self.auto:
# self.set_default(ObjectId())
# return super(ObjectIdField, self).get_default()
# default = property(get_default, set_default)
def gen(self):
""" Helper method to create a new ObjectId """
return ObjectId()
def validate_wrap(self, value):
''' Checks that ``value`` is a pymongo ``ObjectId`` or a string
representation of one'''
if (not isinstance(value, ObjectId)
and not isinstance(value, basestring)
and not isinstance(value, bytes)
):
self._fail_validation_type(value, ObjectId)
if isinstance(value, ObjectId):
return
#: bytes
if len(value) == 12:
return
# hex
if len(value) != 24:
self._fail_validation(value, 'hex object ID is the wrong length')
def wrap(self, value, session=None):
''' Validates that ``value`` is an ObjectId (or hex representation
of one), then returns it '''
self.validate_wrap(value)
if isinstance(value, bytes) or isinstance(value, basestring):
return ObjectId(value)
return value
def unwrap(self, value, session=None):
''' Validates that ``value`` is an ObjectId, then returns it '''
self.validate_unwrap(value)
return value
class ComputedField(Field):
''' A computed field is generated based on an object's other values. It
will generally be created with the @computed_field decorator, but
can be passed an arbitrary function.
The function should take a dict which will contains keys with the names
of the dependencies mapped to their values.
The computed value is recalculated every the field is accessed unless
the one_time field is set to True.
Example::
>>> class SomeDoc(Document):
... @computed_field
... def last_modified(obj):
... return datetime.datetime.utcnow()
.. warning::
The computed field interacts in an undefined way with partially loaded
documents right now. If using this class watch out for strange behaviour.
'''
valid_modifiers = SCALAR_MODIFIERS
auto = True
def __init__(self,
computed_type,
fun,
one_time=False,
deps=None,
**kwargs):
''' :param fun: the function to compute the value of the computed field
:param computed_type: the type to use when wrapping the computed field
:param deps: the names of fields on the current object which should be \
passed in to compute the value
'''
super(ComputedField, self).__init__(**kwargs)
self.computed_type = computed_type
if deps is None:
deps = set()
self.deps = set(deps)
self.fun = fun
self.one_time = one_time
self.__cached_value = UNSET
def schema_json(self):
super_schema = super(ComputedField, self).schema_json()
return dict(computed_type=self.computed_type.schema_json(),
one_time=self.one_time,
deps=list(self.deps), **super_schema)
def __get__(self, instance, owner):
# class method
if instance is None:
return QueryField(self)
obj_value = instance._values[self._name]
if obj_value.set and self.one_time:
return obj_value.value
computed_value = self.compute_value(instance)
if self.one_time:
self.set_value(instance, computed_value)
return computed_value
def __set__(self, instance, value):
obj_value = instance._values[self._name]
if obj_value.set and self.one_time:
raise BadValueException(self._name, value, 'Cannot set a one-time field once it has been set')
super(ComputedField, self).__set__(instance, value)
def set_parent_on_subtypes(self, parent):
self.computed_type._set_parent(parent)
def dirty_ops(self, instance):
dirty = False
for dep in self.deps:
dep_value = instance._values[dep._name]
if dep_value.dirty:
dirty = True
break
else:
if len(self.deps) > 0:
return {}
# make sure we recompute if this is a recompute-on-save
value = getattr(instance, self._name)
return {
self.on_update : {
self._name : self.wrap(value)
}
}
def compute_value(self, doc):
args = {}
for dep in self.deps:
args[dep._name] = getattr(doc, dep._name)
value = self.fun(args)
try:
self.computed_type.validate_wrap(value)
except BadValueException as bve:
self._fail_validation(value, 'Computed Function return a bad value', cause=bve)
return value
def wrap_value(self, value):
''' A function used to wrap a value used in a comparison. It will
first try to wrap as the sequence's sub-type, and then as the
sequence itself'''
return self.computed_type.wrap_value(value)
def validate_wrap(self, value):
''' Check that ``value`` is valid for unwrapping with ``ComputedField.computed_type``'''
try:
self.computed_type.validate_wrap(value)
except BadValueException as bve:
self._fail_validation(value, 'Bad value for computed field', cause=bve)
def validate_unwrap(self, value):
''' Check that ``value`` is valid for unwrapping with ``ComputedField.computed_type``'''
try:
self.computed_type.validate_unwrap(value)
except BadValueException as bve:
self._fail_validation(value, 'Bad value for computed field', cause=bve)
def wrap(self, value):
''' Validates ``value`` and wraps it with ``ComputedField.computed_type``'''
self.validate_wrap(value)
return self.computed_type.wrap(value)
def unwrap(self, value, session=None):
''' Validates ``value`` and unwraps it with ``ComputedField.computed_type``'''
self.validate_unwrap(value)
return self.computed_type.unwrap(value, session=session)
class computed_field(object):
def __init__(self, computed_type, deps=None, **kwargs):
self.computed_type = computed_type
self.deps = deps
self.kwargs = kwargs
def __call__(self, fun):
return ComputedField(self.computed_type, fun, deps=self.deps, **self.kwargs)
def CreatedField(name='created', tz_aware=False, **kwargs):
''' A shortcut field for creation time. It sets the current date and time
when it enters the database and then doesn't update on further saves.
If you've used the Django ORM, this is the equivalent of auto_now_add
:param tz_aware: If this is True, the value will be returned in the
local time of the session. It is always saved in UTC
'''
@computed_field(DateTimeField(), one_time=True, **kwargs)
def created(obj):
if tz_aware:
import pytz
return pytz.utc.localize(datetime.utcnow())
return datetime.utcnow()
created.__name__ = name
return created
class ModifiedField(DateTimeField):
''' A shortcut field for modified time. It sets the current date and time
when it enters the database and then updates when the document is
saved or updated
If you've used the Django ORM, this is the equivalent of auto_now
**WARNINGS**: When this field's parent object is sent to the database
its modified time is set. The local copy is not updated for technical
reasons. Hopefully this will not be the case in the future.
:param tz_aware: If this is True, the value will be returned in the
local time of the session. It is always saved in UTC
'''
def __init__(self, tz_aware=False, **kwargs):
if 'use_tz' not in kwargs:
kwargs['use_tz'] = tz_aware
kwargs['default_f'] = lambda: self.__value()
super(ModifiedField, self).__init__(**kwargs)
def __value(self):
if self.use_tz:
import pytz
return pytz.utc.localize(datetime.utcnow())
return datetime.utcnow()
def wrap(self, obj):
value = self.__value()
return value
def __get__(self, instance, owner):
# class method
if instance is None:
return QueryField(self)
obj_value = instance._values[self._name]
if obj_value.set:
return obj_value.value
value = self.__value()
self.set_value(instance, value)
return value
| mit |
Cojacfar/Maker | comm/lib/python2.7/site-packages/django/http/utils.py | 40 | 3501 | """
Functions that modify an HTTP request or response in some way.
"""
# This group of functions are run as part of the response handling, after
# everything else, including all response middleware. Think of them as
# "compulsory response middleware". Be careful about what goes here, because
# it's a little fiddly to override this behavior, so they should be truly
# universally applicable.
def fix_location_header(request, response):
"""
Ensures that we always use an absolute URI in any location header in the
response. This is required by RFC 2616, section 14.30.
Code constructing response objects is free to insert relative paths, as
this function converts them to absolute paths.
"""
if 'Location' in response and request.get_host():
response['Location'] = request.build_absolute_uri(response['Location'])
return response
def conditional_content_removal(request, response):
"""
Removes the content of responses for HEAD requests, 1xx, 204 and 304
responses. Ensures compliance with RFC 2616, section 4.3.
"""
if 100 <= response.status_code < 200 or response.status_code in (204, 304):
if response.streaming:
response.streaming_content = []
else:
response.content = b''
response['Content-Length'] = '0'
if request.method == 'HEAD':
if response.streaming:
response.streaming_content = []
else:
response.content = b''
return response
def fix_IE_for_attach(request, response):
"""
This function will prevent Django from serving a Content-Disposition header
while expecting the browser to cache it (only when the browser is IE). This
leads to IE not allowing the client to download.
"""
useragent = request.META.get('HTTP_USER_AGENT', '').upper()
if 'MSIE' not in useragent and 'CHROMEFRAME' not in useragent:
return response
offending_headers = ('no-cache', 'no-store')
if response.has_header('Content-Disposition'):
try:
del response['Pragma']
except KeyError:
pass
if response.has_header('Cache-Control'):
cache_control_values = [value.strip() for value in
response['Cache-Control'].split(',')
if value.strip().lower() not in offending_headers]
if not len(cache_control_values):
del response['Cache-Control']
else:
response['Cache-Control'] = ', '.join(cache_control_values)
return response
def fix_IE_for_vary(request, response):
"""
This function will fix the bug reported at
http://support.microsoft.com/kb/824847/en-us?spid=8722&sid=global
by clearing the Vary header whenever the mime-type is not safe
enough for Internet Explorer to handle. Poor thing.
"""
useragent = request.META.get('HTTP_USER_AGENT', '').upper()
if 'MSIE' not in useragent and 'CHROMEFRAME' not in useragent:
return response
# These mime-types that are decreed "Vary-safe" for IE:
safe_mime_types = ('text/html', 'text/plain', 'text/sgml')
# The first part of the Content-Type field will be the MIME type,
# everything after ';', such as character-set, can be ignored.
mime_type = response.get('Content-Type', '').partition(';')[0]
if mime_type not in safe_mime_types:
try:
del response['Vary']
except KeyError:
pass
return response
| gpl-2.0 |
feredean/cs313 | notes/6_anagrams_prof.py | 1 | 2070 | def anagrams(phrase, shortest = 2):
return find_anagrams(phrase.replace(' ',''), '', shortest)
def find_anagrams(letters, previous_word, shortest):
results = set()
print results
for w in find_words(letters):
if len(w) >= shortest and w >= previous_word:
remainder = removed(letters, w)
if remainder:
for rest in find_anagrams(remainder, w, shortest):
results.add(w + ' ' + rest)
else:
print 'prim'
results.add(w)
return results
def removed(letters, remove):
"Return a str of letters, but with each letter in remove removed once."
for L in remove:
letters = letters.replace(L, '', 1)
return letters
def find_words(letters):
return extend_prefix('', letters, set())
def extend_prefix(pre, letters, results):
if pre in WORDS: results.add(pre)
if pre in PREFIXES:
for L in letters:
extend_prefix(pre+L, letters.replace(L, '', 1), results)
return results
def prefixes(word):
"A list of the initial sequences of a word, not including the complete word."
return [word[:i] for i in range(len(word))]
def readwordlist(filename):
"Return a pair of sets: all the words in a file, and all the prefixes. (Uppercased.)"
wordset = set(open(filename).read().upper().split())
prefixset = set(p for word in wordset for p in prefixes(word))
return wordset, prefixset
WORDS, PREFIXES = readwordlist('words4k.txt')
def test():
assert 'DOCTOR WHO' in anagrams('ADMINISTRATION')
assert 'BOOK SEC TRY' in anagrams('OCTOBER SKY')
assert 'SEE THEY' in anagrams('THE EYES')
assert 'LIVES' in anagrams('ELVIS')
assert anagrams('PYTHONIC') == set([
'NTH PIC YO', 'NTH OY PIC', 'ON PIC THY', 'NO PIC THY', 'COY IN PHT',
'ICY NO PHT', 'ICY ON PHT', 'ICY NTH OP', 'COP IN THY', 'HYP ON TIC',
'CON PI THY', 'HYP NO TIC', 'COY NTH PI', 'CON HYP IT', 'COT HYP IN',
'CON HYP TI'])
return 'tests pass'
print anagrams('HOHO')
| mit |
tophatmonocle/django-tastypie | tests/core/tests/authentication.py | 4 | 8542 | import base64
import time
import warnings
from django.contrib.auth.models import User
from django.core import mail
from django.http import HttpRequest
from django.test import TestCase
from tastypie.authentication import Authentication, BasicAuthentication, ApiKeyAuthentication, DigestAuthentication, OAuthAuthentication
from tastypie.http import HttpUnauthorized
from tastypie.models import ApiKey, create_api_key
# Be tricky.
from tastypie.authentication import python_digest, oauth2, oauth_provider
if python_digest is None:
warnings.warn("Running tests without python_digest! Bad news!")
if oauth2 is None:
warnings.warn("Running tests without oauth2! Bad news!")
if oauth_provider is None:
warnings.warn("Running tests without oauth_provider! Bad news!")
class AuthenticationTestCase(TestCase):
def test_is_authenticated(self):
auth = Authentication()
request = HttpRequest()
# Doesn't matter. Always true.
self.assertTrue(auth.is_authenticated(None))
self.assertTrue(auth.is_authenticated(request))
def test_get_identifier(self):
auth = Authentication()
request = HttpRequest()
self.assertEqual(auth.get_identifier(request), 'noaddr_nohost')
request = HttpRequest()
request.META['REMOTE_ADDR'] = '127.0.0.1'
request.META['REMOTE_HOST'] = 'nebula.local'
self.assertEqual(auth.get_identifier(request), '127.0.0.1_nebula.local')
class BasicAuthenticationTestCase(TestCase):
fixtures = ['note_testdata.json']
def test_is_authenticated(self):
auth = BasicAuthentication()
request = HttpRequest()
# No HTTP Basic auth details should fail.
self.assertEqual(isinstance(auth.is_authenticated(request), HttpUnauthorized), True)
# HttpUnauthorized with auth type and realm
self.assertEqual(auth.is_authenticated(request)['WWW-Authenticate'], 'Basic Realm="django-tastypie"')
# Wrong basic auth details.
request.META['HTTP_AUTHORIZATION'] = 'abcdefg'
self.assertEqual(isinstance(auth.is_authenticated(request), HttpUnauthorized), True)
# No password.
request.META['HTTP_AUTHORIZATION'] = base64.b64encode('daniel')
self.assertEqual(isinstance(auth.is_authenticated(request), HttpUnauthorized), True)
# Wrong user/password.
request.META['HTTP_AUTHORIZATION'] = base64.b64encode('daniel:pass')
self.assertEqual(isinstance(auth.is_authenticated(request), HttpUnauthorized), True)
# Correct user/password.
john_doe = User.objects.get(username='johndoe')
john_doe.set_password('pass')
john_doe.save()
request.META['HTTP_AUTHORIZATION'] = 'Basic %s' % base64.b64encode('johndoe:pass')
self.assertEqual(auth.is_authenticated(request), True)
# Regression: Password with colon.
john_doe = User.objects.get(username='johndoe')
john_doe.set_password('pass:word')
john_doe.save()
request.META['HTTP_AUTHORIZATION'] = 'Basic %s' % base64.b64encode('johndoe:pass:word')
self.assertEqual(auth.is_authenticated(request), True)
class ApiKeyAuthenticationTestCase(TestCase):
fixtures = ['note_testdata.json']
def setUp(self):
super(ApiKeyAuthenticationTestCase, self).setUp()
ApiKey.objects.all().delete()
def test_is_authenticated(self):
auth = ApiKeyAuthentication()
request = HttpRequest()
# Simulate sending the signal.
john_doe = User.objects.get(username='johndoe')
create_api_key(User, instance=john_doe, created=True)
# No username/api_key details should fail.
self.assertEqual(isinstance(auth.is_authenticated(request), HttpUnauthorized), True)
# Wrong username details.
request.GET['username'] = 'foo'
self.assertEqual(isinstance(auth.is_authenticated(request), HttpUnauthorized), True)
# No api_key.
request.GET['username'] = 'daniel'
self.assertEqual(isinstance(auth.is_authenticated(request), HttpUnauthorized), True)
# Wrong user/api_key.
request.GET['username'] = 'daniel'
request.GET['api_key'] = 'foo'
self.assertEqual(isinstance(auth.is_authenticated(request), HttpUnauthorized), True)
# Correct user/api_key.
john_doe = User.objects.get(username='johndoe')
request.GET['username'] = 'johndoe'
request.GET['api_key'] = john_doe.api_key.key
self.assertEqual(auth.is_authenticated(request), True)
class DigestAuthenticationTestCase(TestCase):
fixtures = ['note_testdata.json']
def setUp(self):
super(DigestAuthenticationTestCase, self).setUp()
ApiKey.objects.all().delete()
def test_is_authenticated(self):
auth = DigestAuthentication()
request = HttpRequest()
# Simulate sending the signal.
john_doe = User.objects.get(username='johndoe')
create_api_key(User, instance=john_doe, created=True)
# No HTTP Basic auth details should fail.
auth_request = auth.is_authenticated(request)
self.assertEqual(isinstance(auth_request, HttpUnauthorized), True)
# HttpUnauthorized with auth type and realm
self.assertEqual(auth_request['WWW-Authenticate'].find('Digest'), 0)
self.assertEqual(auth_request['WWW-Authenticate'].find(' realm="django-tastypie"') > 0, True)
self.assertEqual(auth_request['WWW-Authenticate'].find(' opaque=') > 0, True)
self.assertEqual(auth_request['WWW-Authenticate'].find('nonce=') > 0, True)
# Wrong basic auth details.
request.META['HTTP_AUTHORIZATION'] = 'abcdefg'
auth_request = auth.is_authenticated(request)
self.assertEqual(isinstance(auth_request, HttpUnauthorized), True)
# No password.
request.META['HTTP_AUTHORIZATION'] = base64.b64encode('daniel')
auth_request = auth.is_authenticated(request)
self.assertEqual(isinstance(auth_request, HttpUnauthorized), True)
# Wrong user/password.
request.META['HTTP_AUTHORIZATION'] = base64.b64encode('daniel:pass')
auth_request = auth.is_authenticated(request)
self.assertEqual(isinstance(auth_request, HttpUnauthorized), True)
# Correct user/password.
john_doe = User.objects.get(username='johndoe')
request.META['HTTP_AUTHORIZATION'] = python_digest.build_authorization_request(
john_doe.username,
request.method,
'/', # uri
1, # nonce_count
digest_challenge=auth_request['WWW-Authenticate'],
password=john_doe.api_key.key
)
auth_request = auth.is_authenticated(request)
self.assertEqual(auth_request, True)
class OAuthAuthenticationTestCase(TestCase):
fixtures = ['note_testdata.json']
def test_is_authenticated(self):
from oauth_provider.models import Consumer, Token, Resource
auth = OAuthAuthentication()
request = HttpRequest()
request.META['SERVER_NAME'] = 'testsuite'
request.META['SERVER_PORT'] = '8080'
request.REQUEST = request.GET = {}
request.method = "GET"
# Invalid request.
resp = auth.is_authenticated(request)
self.assertEqual(resp.status_code, 401)
# No username/api_key details should fail.
request.REQUEST = request.GET = {
'oauth_consumer_key': '123',
'oauth_nonce': 'abc',
'oauth_signature': '&',
'oauth_signature_method': 'PLAINTEXT',
'oauth_timestamp': str(int(time.time())),
'oauth_token': 'foo',
}
user = User.objects.create_user('daniel', '[email protected]', 'password')
request.META['Authorization'] = 'OAuth ' + ','.join([key+'='+value for key, value in request.REQUEST.items()])
resource, _ = Resource.objects.get_or_create(url='test', defaults={
'name': 'Test Resource'
})
consumer, _ = Consumer.objects.get_or_create(key='123', defaults={
'name': 'Test',
'description': 'Testing...'
})
token, _ = Token.objects.get_or_create(key='foo', token_type=Token.ACCESS, defaults={
'consumer': consumer,
'resource': resource,
'secret': '',
'user': user,
})
resp = auth.is_authenticated(request)
self.assertEqual(resp, True)
self.assertEqual(request.user.pk, user.pk)
| bsd-3-clause |
shoyer/numpy | tools/swig/test/testVector.py | 5 | 15010 | #! /usr/bin/env python
from __future__ import division, absolute_import, print_function
# System imports
from distutils.util import get_platform
import os
import sys
import unittest
# Import NumPy
import numpy as np
major, minor = [ int(d) for d in np.__version__.split(".")[:2] ]
if major == 0: BadListError = TypeError
else: BadListError = ValueError
import Vector
######################################################################
class VectorTestCase(unittest.TestCase):
def __init__(self, methodName="runTest"):
unittest.TestCase.__init__(self, methodName)
self.typeStr = "double"
self.typeCode = "d"
# Test the (type IN_ARRAY1[ANY]) typemap
def testLength(self):
"Test length function"
print(self.typeStr, "... ", end=' ', file=sys.stderr)
length = Vector.__dict__[self.typeStr + "Length"]
self.assertEqual(length([5, 12, 0]), 13)
# Test the (type IN_ARRAY1[ANY]) typemap
def testLengthBadList(self):
"Test length function with bad list"
print(self.typeStr, "... ", end=' ', file=sys.stderr)
length = Vector.__dict__[self.typeStr + "Length"]
self.assertRaises(BadListError, length, [5, "twelve", 0])
# Test the (type IN_ARRAY1[ANY]) typemap
def testLengthWrongSize(self):
"Test length function with wrong size"
print(self.typeStr, "... ", end=' ', file=sys.stderr)
length = Vector.__dict__[self.typeStr + "Length"]
self.assertRaises(TypeError, length, [5, 12])
# Test the (type IN_ARRAY1[ANY]) typemap
def testLengthWrongDim(self):
"Test length function with wrong dimensions"
print(self.typeStr, "... ", end=' ', file=sys.stderr)
length = Vector.__dict__[self.typeStr + "Length"]
self.assertRaises(TypeError, length, [[1, 2], [3, 4]])
# Test the (type IN_ARRAY1[ANY]) typemap
def testLengthNonContainer(self):
"Test length function with non-container"
print(self.typeStr, "... ", end=' ', file=sys.stderr)
length = Vector.__dict__[self.typeStr + "Length"]
self.assertRaises(TypeError, length, None)
# Test the (type* IN_ARRAY1, int DIM1) typemap
def testProd(self):
"Test prod function"
print(self.typeStr, "... ", end=' ', file=sys.stderr)
prod = Vector.__dict__[self.typeStr + "Prod"]
self.assertEqual(prod([1, 2, 3, 4]), 24)
# Test the (type* IN_ARRAY1, int DIM1) typemap
def testProdBadList(self):
"Test prod function with bad list"
print(self.typeStr, "... ", end=' ', file=sys.stderr)
prod = Vector.__dict__[self.typeStr + "Prod"]
self.assertRaises(BadListError, prod, [[1, "two"], ["e", "pi"]])
# Test the (type* IN_ARRAY1, int DIM1) typemap
def testProdWrongDim(self):
"Test prod function with wrong dimensions"
print(self.typeStr, "... ", end=' ', file=sys.stderr)
prod = Vector.__dict__[self.typeStr + "Prod"]
self.assertRaises(TypeError, prod, [[1, 2], [8, 9]])
# Test the (type* IN_ARRAY1, int DIM1) typemap
def testProdNonContainer(self):
"Test prod function with non-container"
print(self.typeStr, "... ", end=' ', file=sys.stderr)
prod = Vector.__dict__[self.typeStr + "Prod"]
self.assertRaises(TypeError, prod, None)
# Test the (int DIM1, type* IN_ARRAY1) typemap
def testSum(self):
"Test sum function"
print(self.typeStr, "... ", end=' ', file=sys.stderr)
sum = Vector.__dict__[self.typeStr + "Sum"]
self.assertEqual(sum([5, 6, 7, 8]), 26)
# Test the (int DIM1, type* IN_ARRAY1) typemap
def testSumBadList(self):
"Test sum function with bad list"
print(self.typeStr, "... ", end=' ', file=sys.stderr)
sum = Vector.__dict__[self.typeStr + "Sum"]
self.assertRaises(BadListError, sum, [3, 4, 5, "pi"])
# Test the (int DIM1, type* IN_ARRAY1) typemap
def testSumWrongDim(self):
"Test sum function with wrong dimensions"
print(self.typeStr, "... ", end=' ', file=sys.stderr)
sum = Vector.__dict__[self.typeStr + "Sum"]
self.assertRaises(TypeError, sum, [[3, 4], [5, 6]])
# Test the (int DIM1, type* IN_ARRAY1) typemap
def testSumNonContainer(self):
"Test sum function with non-container"
print(self.typeStr, "... ", end=' ', file=sys.stderr)
sum = Vector.__dict__[self.typeStr + "Sum"]
self.assertRaises(TypeError, sum, True)
# Test the (type INPLACE_ARRAY1[ANY]) typemap
def testReverse(self):
"Test reverse function"
print(self.typeStr, "... ", end=' ', file=sys.stderr)
reverse = Vector.__dict__[self.typeStr + "Reverse"]
vector = np.array([1, 2, 4], self.typeCode)
reverse(vector)
self.assertEqual((vector == [4, 2, 1]).all(), True)
# Test the (type INPLACE_ARRAY1[ANY]) typemap
def testReverseWrongDim(self):
"Test reverse function with wrong dimensions"
print(self.typeStr, "... ", end=' ', file=sys.stderr)
reverse = Vector.__dict__[self.typeStr + "Reverse"]
vector = np.array([[1, 2], [3, 4]], self.typeCode)
self.assertRaises(TypeError, reverse, vector)
# Test the (type INPLACE_ARRAY1[ANY]) typemap
def testReverseWrongSize(self):
"Test reverse function with wrong size"
print(self.typeStr, "... ", end=' ', file=sys.stderr)
reverse = Vector.__dict__[self.typeStr + "Reverse"]
vector = np.array([9, 8, 7, 6, 5, 4], self.typeCode)
self.assertRaises(TypeError, reverse, vector)
# Test the (type INPLACE_ARRAY1[ANY]) typemap
def testReverseWrongType(self):
"Test reverse function with wrong type"
print(self.typeStr, "... ", end=' ', file=sys.stderr)
reverse = Vector.__dict__[self.typeStr + "Reverse"]
vector = np.array([1, 2, 4], 'c')
self.assertRaises(TypeError, reverse, vector)
# Test the (type INPLACE_ARRAY1[ANY]) typemap
def testReverseNonArray(self):
"Test reverse function with non-array"
print(self.typeStr, "... ", end=' ', file=sys.stderr)
reverse = Vector.__dict__[self.typeStr + "Reverse"]
self.assertRaises(TypeError, reverse, [2, 4, 6])
# Test the (type* INPLACE_ARRAY1, int DIM1) typemap
def testOnes(self):
"Test ones function"
print(self.typeStr, "... ", end=' ', file=sys.stderr)
ones = Vector.__dict__[self.typeStr + "Ones"]
vector = np.zeros(5, self.typeCode)
ones(vector)
np.testing.assert_array_equal(vector, np.array([1, 1, 1, 1, 1]))
# Test the (type* INPLACE_ARRAY1, int DIM1) typemap
def testOnesWrongDim(self):
"Test ones function with wrong dimensions"
print(self.typeStr, "... ", end=' ', file=sys.stderr)
ones = Vector.__dict__[self.typeStr + "Ones"]
vector = np.zeros((5, 5), self.typeCode)
self.assertRaises(TypeError, ones, vector)
# Test the (type* INPLACE_ARRAY1, int DIM1) typemap
def testOnesWrongType(self):
"Test ones function with wrong type"
print(self.typeStr, "... ", end=' ', file=sys.stderr)
ones = Vector.__dict__[self.typeStr + "Ones"]
vector = np.zeros((5, 5), 'c')
self.assertRaises(TypeError, ones, vector)
# Test the (type* INPLACE_ARRAY1, int DIM1) typemap
def testOnesNonArray(self):
"Test ones function with non-array"
print(self.typeStr, "... ", end=' ', file=sys.stderr)
ones = Vector.__dict__[self.typeStr + "Ones"]
self.assertRaises(TypeError, ones, [2, 4, 6, 8])
# Test the (int DIM1, type* INPLACE_ARRAY1) typemap
def testZeros(self):
"Test zeros function"
print(self.typeStr, "... ", end=' ', file=sys.stderr)
zeros = Vector.__dict__[self.typeStr + "Zeros"]
vector = np.ones(5, self.typeCode)
zeros(vector)
np.testing.assert_array_equal(vector, np.array([0, 0, 0, 0, 0]))
# Test the (int DIM1, type* INPLACE_ARRAY1) typemap
def testZerosWrongDim(self):
"Test zeros function with wrong dimensions"
print(self.typeStr, "... ", end=' ', file=sys.stderr)
zeros = Vector.__dict__[self.typeStr + "Zeros"]
vector = np.ones((5, 5), self.typeCode)
self.assertRaises(TypeError, zeros, vector)
# Test the (int DIM1, type* INPLACE_ARRAY1) typemap
def testZerosWrongType(self):
"Test zeros function with wrong type"
print(self.typeStr, "... ", end=' ', file=sys.stderr)
zeros = Vector.__dict__[self.typeStr + "Zeros"]
vector = np.ones(6, 'c')
self.assertRaises(TypeError, zeros, vector)
# Test the (int DIM1, type* INPLACE_ARRAY1) typemap
def testZerosNonArray(self):
"Test zeros function with non-array"
print(self.typeStr, "... ", end=' ', file=sys.stderr)
zeros = Vector.__dict__[self.typeStr + "Zeros"]
self.assertRaises(TypeError, zeros, [1, 3, 5, 7, 9])
# Test the (type ARGOUT_ARRAY1[ANY]) typemap
def testEOSplit(self):
"Test eoSplit function"
print(self.typeStr, "... ", end=' ', file=sys.stderr)
eoSplit = Vector.__dict__[self.typeStr + "EOSplit"]
even, odd = eoSplit([1, 2, 3])
self.assertEqual((even == [1, 0, 3]).all(), True)
self.assertEqual((odd == [0, 2, 0]).all(), True)
# Test the (type* ARGOUT_ARRAY1, int DIM1) typemap
def testTwos(self):
"Test twos function"
print(self.typeStr, "... ", end=' ', file=sys.stderr)
twos = Vector.__dict__[self.typeStr + "Twos"]
vector = twos(5)
self.assertEqual((vector == [2, 2, 2, 2, 2]).all(), True)
# Test the (type* ARGOUT_ARRAY1, int DIM1) typemap
def testTwosNonInt(self):
"Test twos function with non-integer dimension"
print(self.typeStr, "... ", end=' ', file=sys.stderr)
twos = Vector.__dict__[self.typeStr + "Twos"]
self.assertRaises(TypeError, twos, 5.0)
# Test the (int DIM1, type* ARGOUT_ARRAY1) typemap
def testThrees(self):
"Test threes function"
print(self.typeStr, "... ", end=' ', file=sys.stderr)
threes = Vector.__dict__[self.typeStr + "Threes"]
vector = threes(6)
self.assertEqual((vector == [3, 3, 3, 3, 3, 3]).all(), True)
# Test the (type* ARGOUT_ARRAY1, int DIM1) typemap
def testThreesNonInt(self):
"Test threes function with non-integer dimension"
print(self.typeStr, "... ", end=' ', file=sys.stderr)
threes = Vector.__dict__[self.typeStr + "Threes"]
self.assertRaises(TypeError, threes, "threes")
######################################################################
class scharTestCase(VectorTestCase):
def __init__(self, methodName="runTest"):
VectorTestCase.__init__(self, methodName)
self.typeStr = "schar"
self.typeCode = "b"
######################################################################
class ucharTestCase(VectorTestCase):
def __init__(self, methodName="runTest"):
VectorTestCase.__init__(self, methodName)
self.typeStr = "uchar"
self.typeCode = "B"
######################################################################
class shortTestCase(VectorTestCase):
def __init__(self, methodName="runTest"):
VectorTestCase.__init__(self, methodName)
self.typeStr = "short"
self.typeCode = "h"
######################################################################
class ushortTestCase(VectorTestCase):
def __init__(self, methodName="runTest"):
VectorTestCase.__init__(self, methodName)
self.typeStr = "ushort"
self.typeCode = "H"
######################################################################
class intTestCase(VectorTestCase):
def __init__(self, methodName="runTest"):
VectorTestCase.__init__(self, methodName)
self.typeStr = "int"
self.typeCode = "i"
######################################################################
class uintTestCase(VectorTestCase):
def __init__(self, methodName="runTest"):
VectorTestCase.__init__(self, methodName)
self.typeStr = "uint"
self.typeCode = "I"
######################################################################
class longTestCase(VectorTestCase):
def __init__(self, methodName="runTest"):
VectorTestCase.__init__(self, methodName)
self.typeStr = "long"
self.typeCode = "l"
######################################################################
class ulongTestCase(VectorTestCase):
def __init__(self, methodName="runTest"):
VectorTestCase.__init__(self, methodName)
self.typeStr = "ulong"
self.typeCode = "L"
######################################################################
class longLongTestCase(VectorTestCase):
def __init__(self, methodName="runTest"):
VectorTestCase.__init__(self, methodName)
self.typeStr = "longLong"
self.typeCode = "q"
######################################################################
class ulongLongTestCase(VectorTestCase):
def __init__(self, methodName="runTest"):
VectorTestCase.__init__(self, methodName)
self.typeStr = "ulongLong"
self.typeCode = "Q"
######################################################################
class floatTestCase(VectorTestCase):
def __init__(self, methodName="runTest"):
VectorTestCase.__init__(self, methodName)
self.typeStr = "float"
self.typeCode = "f"
######################################################################
class doubleTestCase(VectorTestCase):
def __init__(self, methodName="runTest"):
VectorTestCase.__init__(self, methodName)
self.typeStr = "double"
self.typeCode = "d"
######################################################################
if __name__ == "__main__":
# Build the test suite
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite( scharTestCase))
suite.addTest(unittest.makeSuite( ucharTestCase))
suite.addTest(unittest.makeSuite( shortTestCase))
suite.addTest(unittest.makeSuite( ushortTestCase))
suite.addTest(unittest.makeSuite( intTestCase))
suite.addTest(unittest.makeSuite( uintTestCase))
suite.addTest(unittest.makeSuite( longTestCase))
suite.addTest(unittest.makeSuite( ulongTestCase))
suite.addTest(unittest.makeSuite( longLongTestCase))
suite.addTest(unittest.makeSuite(ulongLongTestCase))
suite.addTest(unittest.makeSuite( floatTestCase))
suite.addTest(unittest.makeSuite( doubleTestCase))
# Execute the test suite
print("Testing 1D Functions of Module Vector")
print("NumPy version", np.__version__)
print()
result = unittest.TextTestRunner(verbosity=2).run(suite)
sys.exit(bool(result.errors + result.failures))
| bsd-3-clause |
elssar/storm | filter_clause.py | 1 | 1640 | import datetime
class FilterClause(object):
FILTER_MAP = {
"eq": "=",
"neq": "!=",
"lt": "<",
"lte": "<=",
"gt": ">",
"gte": ">=",
}
def __init__(self, key=None, value=None):
self.key = key
self.value = value
def __and__(self, obj):
if isinstance(obj, FilterClause):
return "{0} AND {1}".format(self.to_query(), obj.to_query())
elif isinstance(obj, (str, unicode)):
return "{0} AND {1}".format(self.to_query(), obj)
raise TypeError
def __or__(self, obj):
if isinstance(obj, FilterClause):
return "{0} OR {1}".format(self.to_query(), obj.to_query())
elif isinstance(obj, (str, unicode)):
return "{0} OR {1}".format(self.to_query(), obj)
raise TypeError
def _clean_value(cls, value):
if isinstance(value, (str, unicode)):
return "\"{0}\"".format(value)
elif isinstance(value, (datetime.datetime, datetime.date, datetime.time)):
return "\"{0}\"".format(value)
elif isinstance(value, (int, float)):
return value
def to_query(self):
filter_key = self.key.split("__")
if len(filter_key) > 1:
filter_operator = self.FILTER_MAP.get(filter_key[-1])
if not filter_operator:
filter_operator = "="
else:
filter_key = filter_key[:-1]
else:
filter_operator = "="
return u'{}{}{}'.format(filter_key[0], filter_operator,
self._clean_value(self.value))
| mit |
Alwnikrotikz/cortex-vfx | test/IECore/PolygonAlgoTest.py | 12 | 4478 | ##########################################################################
#
# Copyright (c) 2008-2009, Image Engine Design Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# * Neither the name of Image Engine Design nor the names of any
# other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
import unittest
from IECore import *
import math
class PolygonAlgoTest( unittest.TestCase ) :
def testNormal( self ) :
p = V3fVectorData( [
V3f( 0, 0, 0 ),
V3f( 1, 0, 0 ),
V3f( 1, 1, 0 ),
V3f( 0, 1, 0 )
] )
self.assertEqual( polygonNormal( p ), V3f( 0, 0, 1 ) )
p = V3fVectorData( [
V3f( 0, 0, 0 ),
V3f( 0, 1, 0 ),
V3f( 1, 1, 0 ),
V3f( 1, 0, 0 ),
] )
self.assertEqual( polygonNormal( p ), V3f( 0, 0, -1 ) )
def testConcaveNormal( self ) :
p = V3fVectorData( [
V3f( 0, 0, 0 ),
V3f( 1, -1, 0 ),
V3f( 0.2, 0, 0 ),
V3f( 1, 1, 0 ),
] )
self.assertEqual( polygonNormal( p ), V3f( 0, 0, 1 ) )
p = V3fVectorData( [
V3f( 0, 0, 0 ),
V3f( 1, 1, 0 ),
V3f( 0.2, 0, 0 ),
V3f( 1, -1, 0 ),
] )
self.assertEqual( polygonNormal( p ), V3f( 0, 0, -1 ) )
def testWinding2D( self ) :
p = V2fVectorData( [
V2f( 0, 0 ),
V2f( 1, 0 ),
V2f( 1, 1 ),
V2f( 0, 1 ),
] )
self.assertEqual( polygonWinding( p ), Winding.CounterClockwise )
self.assertNotEqual( polygonWinding( p ), Winding.Clockwise )
p = V2fVectorData( [
V2f( 0, 0 ),
V2f( 0, 1 ),
V2f( 1, 1 ),
V2f( 1, 0 ),
] )
self.assertNotEqual( polygonWinding( p ), Winding.CounterClockwise )
self.assertEqual( polygonWinding( p ), Winding.Clockwise )
def testWinding3D( self ) :
p = V3fVectorData( [
V3f( 0, 0, 0 ),
V3f( 1, 0, 0 ),
V3f( 1, 1, 0 ),
V3f( 0, 1, 0 ),
] )
self.assertEqual( polygonWinding( p, V3f( 0, 0, -1 ) ), Winding.CounterClockwise )
self.assertNotEqual( polygonWinding( p, V3f( 0, 0, -1 ) ), Winding.Clockwise )
self.assertEqual( polygonWinding( p, V3f( 0, 0, 1 ) ), Winding.Clockwise )
self.assertNotEqual( polygonWinding( p, V3f( 0, 0, 1 ) ), Winding.CounterClockwise )
p = V3fVectorData( [
V3f( 0, 0, 0 ),
V3f( 0, 1, 0 ),
V3f( 1, 1, 0 ),
V3f( 1, 0, 0 ),
] )
self.assertNotEqual( polygonWinding( p, V3f( 0, 0, -1 ) ), Winding.CounterClockwise )
self.assertEqual( polygonWinding( p, V3f( 0, 0, -1 ) ), Winding.Clockwise )
self.assertEqual( polygonWinding( p, V3f( 0, 0, 1 ) ), Winding.CounterClockwise )
self.assertNotEqual( polygonWinding( p, V3f( 0, 0, 1 ) ), Winding.Clockwise )
def testBound( self ) :
p = V3fVectorData( [
V3f( 0, 0, 0 ),
V3f( 1, 0, 0 ),
V3f( 1, 1, 0 ),
V3f( 0, 1, 0 ),
] )
self.assertEqual( polygonBound( p ), Box3f( V3f( 0 ), V3f( 1, 1, 0 ) ) )
def testArea3D( self ) :
r = Rand32()
for i in range( 0, 1000 ) :
p = V3fVectorData( [ r.nextV3f(), r.nextV3f(), r.nextV3f() ] )
self.assertAlmostEqual( polygonArea( p ), triangleArea( p[0], p[1], p[2] ), 4 )
if __name__ == "__main__":
unittest.main()
| bsd-3-clause |
umlfri/umlfri2 | umlfri2/metamodel/projecttemplate/diagram.py | 1 | 1079 | from enum import Enum, unique
from .checkdata import check_any
@unique
class DiagramTemplateState(Enum):
closed = 1
opened = 2
locked = 3
class DiagramTemplate:
def __init__(self, type, data, elements, connections, parent_id, state=DiagramTemplateState.closed):
self.__type = type
self.__data = data
self.__elements = elements
self.__connections = connections
self.__parent_id = parent_id
self.__state = state
@property
def type(self):
return self.__type
@property
def data(self):
return self.__data
@property
def elements(self):
yield from self.__elements
@property
def connections(self):
yield from self.__connections
@property
def parent_id(self):
return self.__parent_id
@property
def state(self):
return self.__state
def _compile(self, metamodel):
self.__type = metamodel.get_diagram_type(self.__type)
self.__data = check_any(self.__type.ufl_type, self.__data)
| gpl-3.0 |
danielharbor/openerp | openerp/report/print_xml.py | 338 | 11063 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from lxml import etree
import openerp
import openerp.tools as tools
from openerp.tools.safe_eval import safe_eval
import print_fnc
from openerp.osv.orm import BaseModel
class InheritDict(dict):
# Might be usefull when we're doing name lookup for call or eval.
def __init__(self, parent=None):
self.parent = parent
def __getitem__(self, name):
if name in self:
return super(InheritDict, self).__getitem__(name)
else:
if not self.parent:
raise KeyError
else:
return self.parent[name]
def tounicode(val):
if isinstance(val, str):
unicode_val = unicode(val, 'utf-8')
elif isinstance(val, unicode):
unicode_val = val
else:
unicode_val = unicode(val)
return unicode_val
class document(object):
def __init__(self, cr, uid, datas, func=False):
# create a new document
self.cr = cr
self.pool = openerp.registry(cr.dbname)
self.func = func or {}
self.datas = datas
self.uid = uid
self.bin_datas = {}
def node_attrs_get(self, node):
if len(node.attrib):
return node.attrib
return {}
def get_value(self, browser, field_path):
fields = field_path.split('.')
if not len(fields):
return ''
value = browser
for f in fields:
if isinstance(value, (BaseModel, list)):
if not value:
return ''
value = value[0]
value = value[f]
return value or ''
def get_value2(self, browser, field_path):
value = self.get_value(browser, field_path)
if isinstance(value, BaseModel):
return value.id
else:
return value
def eval(self, record, expr):
#TODO: support remote variables (eg address.title) in expr
# how to do that: parse the string, find dots, replace those dotted variables by temporary
# "simple ones", fetch the value of those variables and add them (temporarily) to the _data
# dictionary passed to eval
#FIXME: it wont work if the data hasn't been fetched yet... this could
# happen if the eval node is the first one using this Record
# the next line is a workaround for the problem: it causes the resource to be loaded
#Pinky: Why not this ? eval(expr, browser) ?
# name = browser.name
# data_dict = browser._data[self.get_value(browser, 'id')]
return safe_eval(expr, {}, {'obj': record})
def parse_node(self, node, parent, browser, datas=None):
attrs = self.node_attrs_get(node)
if 'type' in attrs:
if attrs['type']=='field':
value = self.get_value(browser, attrs['name'])
#TODO: test this
if value == '' and 'default' in attrs:
value = attrs['default']
el = etree.SubElement(parent, node.tag)
el.text = tounicode(value)
#TODO: test this
for key, value in attrs.iteritems():
if key not in ('type', 'name', 'default'):
el.set(key, value)
elif attrs['type']=='attachment':
model = browser._name
value = self.get_value(browser, attrs['name'])
ids = self.pool['ir.attachment'].search(self.cr, self.uid, [('res_model','=',model),('res_id','=',int(value))])
datas = self.pool['ir.attachment'].read(self.cr, self.uid, ids)
if len(datas):
# if there are several, pick first
datas = datas[0]
fname = str(datas['datas_fname'])
ext = fname.split('.')[-1].lower()
if ext in ('jpg','jpeg', 'png'):
import base64
from StringIO import StringIO
dt = base64.decodestring(datas['datas'])
fp = StringIO()
fp.write(dt)
i = str(len(self.bin_datas))
self.bin_datas[i] = fp
el = etree.SubElement(parent, node.tag)
el.text = i
elif attrs['type']=='data':
#TODO: test this
txt = self.datas.get('form', {}).get(attrs['name'], '')
el = etree.SubElement(parent, node.tag)
el.text = txt
elif attrs['type']=='function':
if attrs['name'] in self.func:
txt = self.func[attrs['name']](node)
else:
txt = print_fnc.print_fnc(attrs['name'], node)
el = etree.SubElement(parent, node.tag)
el.text = txt
elif attrs['type']=='eval':
value = self.eval(browser, attrs['expr'])
el = etree.SubElement(parent, node.tag)
el.text = str(value)
elif attrs['type']=='fields':
fields = attrs['name'].split(',')
vals = {}
for b in browser:
value = tuple([self.get_value2(b, f) for f in fields])
if not value in vals:
vals[value]=[]
vals[value].append(b)
keys = vals.keys()
keys.sort()
if 'order' in attrs and attrs['order']=='desc':
keys.reverse()
v_list = [vals[k] for k in keys]
for v in v_list:
el = etree.SubElement(parent, node.tag)
for el_cld in node:
self.parse_node(el_cld, el, v)
elif attrs['type']=='call':
if len(attrs['args']):
#TODO: test this
# fetches the values of the variables which names where passed in the args attribute
args = [self.eval(browser, arg) for arg in attrs['args'].split(',')]
else:
args = []
# get the object
if 'model' in attrs:
obj = self.pool[attrs['model']]
else:
obj = browser # the record(set) is an instance of the model
# get the ids
if 'ids' in attrs:
ids = self.eval(browser, attrs['ids'])
else:
ids = browse.ids
# call the method itself
newdatas = getattr(obj, attrs['name'])(self.cr, self.uid, ids, *args)
def parse_result_tree(node, parent, datas):
if not node.tag == etree.Comment:
el = etree.SubElement(parent, node.tag)
atr = self.node_attrs_get(node)
if 'value' in atr:
if not isinstance(datas[atr['value']], (str, unicode)):
txt = str(datas[atr['value']])
else:
txt = datas[atr['value']]
el.text = txt
else:
for el_cld in node:
parse_result_tree(el_cld, el, datas)
if not isinstance(newdatas, (BaseModel, list)):
newdatas = [newdatas]
for newdata in newdatas:
parse_result_tree(node, parent, newdata)
elif attrs['type']=='zoom':
value = self.get_value(browser, attrs['name'])
if value:
if not isinstance(value, (BaseModel, list)):
v_list = [value]
else:
v_list = value
for v in v_list:
el = etree.SubElement(parent, node.tag)
for el_cld in node:
self.parse_node(el_cld, el, v)
else:
# if there is no "type" attribute in the node, copy it to the xml data and parse its children
if not node.tag == etree.Comment:
if node.tag == parent.tag:
el = parent
else:
el = etree.SubElement(parent, node.tag)
for el_cld in node:
self.parse_node(el_cld,el, browser)
def xml_get(self):
return etree.tostring(self.doc,encoding="utf-8",xml_declaration=True,pretty_print=True)
def parse_tree(self, ids, model, context=None):
if not context:
context={}
browser = self.pool[model].browse(self.cr, self.uid, ids, context)
self.parse_node(self.dom, self.doc, browser)
def parse_string(self, xml, ids, model, context=None):
if not context:
context={}
# parses the xml template to memory
self.dom = etree.XML(xml)
# create the xml data from the xml template
self.parse_tree(ids, model, context)
def parse(self, filename, ids, model, context=None):
if not context:
context={}
# parses the xml template to memory
src_file = tools.file_open(filename)
try:
self.dom = etree.XML(src_file.read())
self.doc = etree.Element(self.dom.tag)
self.parse_tree(ids, model, context)
finally:
src_file.close()
def close(self):
self.doc = None
self.dom = None
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
luckielordie/conan | conans/client/cmd/download.py | 1 | 2899 | import os
from conans.model.ref import PackageReference, ConanFileReference
from conans.client.output import ScopedOutput
from conans.errors import ConanException
from conans.client.source import complete_recipe_sources
def download(reference, package_ids, remote_name, recipe, registry, remote_manager,
client_cache, out, recorder, loader, plugin_manager):
assert(isinstance(reference, ConanFileReference))
output = ScopedOutput(str(reference), out)
remote = registry.remote(remote_name) if remote_name else registry.default_remote
package = remote_manager.search_recipes(remote, reference, None)
if not package: # Search the reference first, and raise if it doesn't exist
raise ConanException("'%s' not found in remote" % str(reference))
plugin_manager.execute("pre_download", reference=reference, remote=remote)
# First of all download package recipe
remote_manager.get_recipe(reference, remote)
registry.set_ref(reference, remote.name)
conan_file_path = client_cache.conanfile(reference)
conanfile = loader.load_class(conan_file_path)
if not recipe:
# Download the sources too, don't be lazy
complete_recipe_sources(remote_manager, client_cache, registry,
conanfile, reference)
if package_ids:
_download_binaries(reference, package_ids, client_cache, remote_manager,
remote, output, recorder, loader)
else:
output.info("Getting the complete package list "
"from '%s'..." % str(reference))
packages_props = remote_manager.search_packages(remote, reference, None)
if not packages_props:
output = ScopedOutput(str(reference), out)
output.warn("No remote binary packages found in remote")
else:
_download_binaries(reference, list(packages_props.keys()), client_cache,
remote_manager, remote, output, recorder, loader)
plugin_manager.execute("post_download", conanfile_path=conan_file_path, reference=reference,
remote=remote)
def _download_binaries(reference, package_ids, client_cache, remote_manager, remote, output,
recorder, loader):
conanfile_path = client_cache.conanfile(reference)
if not os.path.exists(conanfile_path):
raise Exception("Download recipe first")
conanfile = loader.load_class(conanfile_path)
short_paths = conanfile.short_paths
for package_id in package_ids:
package_ref = PackageReference(reference, package_id)
package_folder = client_cache.package(package_ref, short_paths=short_paths)
output.info("Downloading %s" % str(package_ref))
remote_manager.get_package(package_ref, package_folder, remote, output, recorder)
| mit |
irenedet/Maxwell-ATC | mygeometry_1.py | 1 | 3995 | from ngsolve import *
from netgen.csg import *
from ngsolve.internal import *
def ATCerror_brick_geometry(Rminus, Rplus, Rext, Rpml, delta, hmax):
geometry = CSGeometry()
o_ext = (Sphere(Pnt(0,0,0), Rext)).bc("outer")
pml = Sphere(Pnt(0,0,0),Rpml)
o_plus = Sphere(Pnt(0,0,0), Rplus).bc("interface")
#This is to define the two close surfaces for the thin layer:
box = OrthoBrick(Pnt(-Rminus,-Rminus,-Rminus),Pnt(Rminus,Rminus,Rminus+delta))
pl1 = Plane(Pnt(0,0,Rminus),Vec(0,0,-1)).bc("crack")
pl2 = Plane(Pnt(0,0,Rminus+delta),Vec(0,0,1))#.bc("top")
o_minus = (box - pl1)
geometry.Add ((box - pl1).mat("ominus"),bcmod=[(o_minus,"nocrack")])
geometry.Add ((o_ext - pml).mat("pml"))
geometry.Add ((pml-o_plus).mat("air"))
geometry.Add ((o_plus-box).mat("oplus").maxh(hmax))
geometry.Add ((box * pl1 * pl2).mat("olayer").maxh(hmax),bcmod=[(pl1,"crack"),(box,"sides"),(pl2,"top")])
#slices = [2**(-i) for i in reversed(range(1,6))]
geometry.CloseSurfaces(pl1,pl2)#,slices)
return geometry
def ATCerror_halfsphere_geometry(Rminus, Rplus, Rext, Rpml, delta, hmax):
geometry = CSGeometry()
o_ext = (Sphere(Pnt(0,0,0), Rext)).bc("outer")
pml = Sphere(Pnt(0,0,0),Rpml)
o_plus = Sphere(Pnt(0,0,0), Rplus+.2).bc("interface")
#This is to define the two close surfaces for the thin layer:
circle = Sphere(Pnt(0,0,0),Rminus)
small_cylinder = Cylinder(Pnt(0,0,1),Pnt(0,0,-1),Rminus*0.5)
pl1 = Plane(Pnt(0,0,0),Vec(0,0,-1))
pl2 = Plane(Pnt(0,0,delta),Vec(0,0,1))#.bc("top")
o_minus_with_layer = (circle*pl2)
o_minus = (circle - pl1).maxh(hmax)
no_olayer = ((pl1 * pl2 * circle) - small_cylinder)
yes_olayer =(pl1 * pl2 * small_cylinder)
geometry.Add ((o_ext - pml).mat("pml"))
geometry.Add ((pml-o_plus).mat("air"))
geometry.Add ((o_plus-o_minus_with_layer).mat("oplus").maxh(hmax))
geometry.Add ((o_minus).mat("ominus").maxh(hmax))
geometry.Add (yes_olayer.mat("olayer").maxh(hmax),bcmod=[(pl1,"crack")])
geometry.Add (no_olayer.mat("oplus").maxh(hmax))
slices = [2**(-i) for i in reversed(range(1,6))]
geometry.CloseSurfaces(pl1,pl2,slices)
return geometry
def brick_geometry(Rminus, Rplus, Rext, Rpml, delta, hsample, hmax):
geometry = CSGeometry()
o_ext = (Sphere(Pnt(0,0,0), Rext)).bc("outer")
pml = Sphere(Pnt(0,0,0),Rpml)
o_plus = Sphere(Pnt(0,0,0), Rplus).bc("interface")
#This is to define the two close surfaces for the thin layer:
box = OrthoBrick(Pnt(-Rminus,-Rminus,-Rminus),Pnt(Rminus,Rminus,Rminus))
pl1 = Plane(Pnt(0,0,Rminus),Vec(0,0,-1)).bc("crack")
pl2 = Plane(Pnt(0,0,Rminus+delta),Vec(0,0,1))#.bc("top")
o_minus = (box - pl1).maxh(hsample)
geometry.Add (o_minus.mat("ominus").maxh(hmax))
geometry.Add ((o_ext - pml).mat("pml"))
geometry.Add ((pml-o_plus).mat("air"))
geometry.Add ((o_plus-box).mat("oplus").maxh(hmax))
geometry.Add ((box * pl1 * pl2).mat("olayer").maxh(hmax))
#slices = [2**(-i) for i in reversed(range(1,6))]
geometry.CloseSurfaces(pl1,pl2)#,slices)
return geometry
def sphere_geometry(Rminus, Rplus, Rext, Rpml, Rother, c, delta, hsample, hmax):
geometry = CSGeometry()
o_ext = (Sphere(Pnt(0,0,0), Rext)).bc("outer")
pml = Sphere(Pnt(0,0,0),Rpml)
o_plus = Sphere(Pnt(0,0,0), Rplus).bc("interface")
#This is to define the two close surfaces for the thin layer:
o_minus = (Sphere(Pnt(0,0,0), Rminus)).maxh(hmax).mat("ominus").maxh(hsample)
other = Sphere(Pnt(0,c,0),Rother)
withCrack = (o_minus * other)
withoutCrack = (o_minus - other)
geometry.Add ((o_ext - pml).mat("air"))
geometry.Add ((pml-o_plus).mat("air"))
geometry.Add ((o_plus-o_minus-other).mat("oplus"))
geometry.Add ((other-o_minus).mat("olayer"))
geometry.Add (withCrack,bcmod=[(o_minus,"crack")])
geometry.Add (withoutCrack,bcmod=[(o_minus,"nocrack")])
return geometry
| mit |
USGSDenverPychron/pychron | pychron/hardware/aerotech/aerotech_axis.py | 1 | 4812 | # ===============================================================================
# Copyright 2011 Jake Ross
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ===============================================================================
# ============= enthought library imports =======================
from traits.api import Int, Enum, CFloat
from traitsui.api import View, Item, Group
# ============= standard library imports ========================
# ============= local library imports ==========================
from pychron.hardware.axis import Axis
class AerotechAxis(Axis):
id = Int
home_direction_ccw = Enum('yes', 'no')
home_switch_normally_open = Enum('yes', 'no')
home_feedrate = CFloat
home_offset = CFloat
limit_switch_normally_open = Enum('yes', 'no')
def load(self, path):
for key, value in self._get_parameters(path):
setattr(self, key, value)
def _validate_velocity(self, v):
return self._validate_float(v)
def _set_velocity(self, v):
self.nominal_velocity = v
self.trait_set(_velocity=v, trait_change_notify=False)
def load_parameters(self):
# homing and limts 4.7
attrs = [('home_direction_ccw', 2),
('home_switch_normally_open', 3),
('home_feedrate', 4),
('home_offset', 6),
('limit_switch_normally_open', 9),
('limit_to_mechanical_stop', 10),
('ccw_software_limit', 22),
('cw_software_limit', 23),
('position_channel', 38),
('velocity_channel', 39),
('position_setup_code', 40),
('velocity_setup_code', 41),
('amplifier_type', 42),
('commutation_cycles_per_rev', 43),
('feedback_steps_per_rev', 44),
('commutation_phase_offset', 45),
('stepper_high_current', 46),
('stepper_low_current', 47),
('microstepping_resolution', 63),
('stepper_correction', 64),
('stepper_correction_speed', 65),
('base_speed', 66),
('base_speed_advance', 67),
('phase_speed', 68),
('phase_speed_advance', 69),
('primary_dac_offset', 79),
('secondary_dac_offset', 80),
('encoder_factor', 82),
('global_fault_mask', 55),
('disable', 56),
('interrupt', 57),
('aux_output', 58),
('halt_queue', 59),
('abort_motion', 60),
('enable_brake', 61),
('top_feedrate', 17),
('maximum_velocity_error', 18),
('maximum_position_error', 19),
('maximum_integral_error', 20),
('rms_current_trap', 48),
('rms_current_sample_time', 49),
('clamp_current_output', 53),
('aux_fault_output_bit', 54),
('amplifier_fault_active_low', 70)]
param_table = []
for name, code in attrs:
cmd = self._build_query(code)
rp = self.ask(cmd)
if rp is not None:
rp = rp.strip()
try:
setattr(self, name, rp)
param_table.append(rp)
except Exception:
self.warning('{} not set invalid value {}'.format(name, rp))
names, codes = zip(*attrs)
return names, codes, param_table
def _build_query(self, code):
cmd = 'RP{}{:02}'.format(self.id, code)
return cmd
def simple_view(self):
home_grp = Group(Item('home_direction_ccw'),
Item('home_switch_normally_open'),
Item('home_feedrate'),
Item('home_offset'),
label='Home')
limits_grp = Group(Item('limit_switch_normally_open'),
label='Limits')
v = View(Item('id', style='readonly'),
home_grp,
limits_grp)
return v
# ============= EOF ====================================
| apache-2.0 |
GhostThrone/django | tests/utils_tests/test_decorators.py | 319 | 4870 | from django.http import HttpResponse
from django.template import engines
from django.template.response import TemplateResponse
from django.test import RequestFactory, SimpleTestCase
from django.utils.decorators import classproperty, decorator_from_middleware
class ProcessViewMiddleware(object):
def process_view(self, request, view_func, view_args, view_kwargs):
pass
process_view_dec = decorator_from_middleware(ProcessViewMiddleware)
@process_view_dec
def process_view(request):
return HttpResponse()
class ClassProcessView(object):
def __call__(self, request):
return HttpResponse()
class_process_view = process_view_dec(ClassProcessView())
class FullMiddleware(object):
def process_request(self, request):
request.process_request_reached = True
def process_view(self, request, view_func, view_args, view_kwargs):
request.process_view_reached = True
def process_template_response(self, request, response):
request.process_template_response_reached = True
return response
def process_response(self, request, response):
# This should never receive unrendered content.
request.process_response_content = response.content
request.process_response_reached = True
return response
full_dec = decorator_from_middleware(FullMiddleware)
class DecoratorFromMiddlewareTests(SimpleTestCase):
"""
Tests for view decorators created using
``django.utils.decorators.decorator_from_middleware``.
"""
rf = RequestFactory()
def test_process_view_middleware(self):
"""
Test a middleware that implements process_view.
"""
process_view(self.rf.get('/'))
def test_callable_process_view_middleware(self):
"""
Test a middleware that implements process_view, operating on a callable class.
"""
class_process_view(self.rf.get('/'))
def test_full_dec_normal(self):
"""
Test that all methods of middleware are called for normal HttpResponses
"""
@full_dec
def normal_view(request):
template = engines['django'].from_string("Hello world")
return HttpResponse(template.render())
request = self.rf.get('/')
normal_view(request)
self.assertTrue(getattr(request, 'process_request_reached', False))
self.assertTrue(getattr(request, 'process_view_reached', False))
# process_template_response must not be called for HttpResponse
self.assertFalse(getattr(request, 'process_template_response_reached', False))
self.assertTrue(getattr(request, 'process_response_reached', False))
def test_full_dec_templateresponse(self):
"""
Test that all methods of middleware are called for TemplateResponses in
the right sequence.
"""
@full_dec
def template_response_view(request):
template = engines['django'].from_string("Hello world")
return TemplateResponse(request, template)
request = self.rf.get('/')
response = template_response_view(request)
self.assertTrue(getattr(request, 'process_request_reached', False))
self.assertTrue(getattr(request, 'process_view_reached', False))
self.assertTrue(getattr(request, 'process_template_response_reached', False))
# response must not be rendered yet.
self.assertFalse(response._is_rendered)
# process_response must not be called until after response is rendered,
# otherwise some decorators like csrf_protect and gzip_page will not
# work correctly. See #16004
self.assertFalse(getattr(request, 'process_response_reached', False))
response.render()
self.assertTrue(getattr(request, 'process_response_reached', False))
# Check that process_response saw the rendered content
self.assertEqual(request.process_response_content, b"Hello world")
class ClassPropertyTest(SimpleTestCase):
def test_getter(self):
class Foo(object):
foo_attr = 123
def __init__(self):
self.foo_attr = 456
@classproperty
def foo(cls):
return cls.foo_attr
class Bar(object):
bar = classproperty()
@bar.getter
def bar(cls):
return 123
self.assertEqual(Foo.foo, 123)
self.assertEqual(Foo().foo, 123)
self.assertEqual(Bar.bar, 123)
self.assertEqual(Bar().bar, 123)
def test_override_getter(self):
class Foo(object):
@classproperty
def foo(cls):
return 123
@foo.getter
def foo(cls):
return 456
self.assertEqual(Foo.foo, 456)
self.assertEqual(Foo().foo, 456)
| bsd-3-clause |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.