repo_name
stringlengths 5
100
| path
stringlengths 4
299
| copies
stringclasses 990
values | size
stringlengths 4
7
| content
stringlengths 666
1.03M
| license
stringclasses 15
values | hash
int64 -9,223,351,895,964,839,000
9,223,297,778B
| line_mean
float64 3.17
100
| line_max
int64 7
1k
| alpha_frac
float64 0.25
0.98
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
Lujeni/ansible | lib/ansible/module_utils/hetzner.py | 10 | 5986 | # -*- coding: utf-8 -*-
# This code is part of Ansible, but is an independent component.
# This particular file snippet, and this file snippet only, is BSD licensed.
# Modules you write using this snippet, which is embedded dynamically by Ansible
# still belong to the author of the module, and may assign their own license
# to the complete work.
#
# Copyright (c), Felix Fontein <[email protected]>, 2019
#
# Simplified BSD License (see licenses/simplified_bsd.txt or https://opensource.org/licenses/BSD-2-Clause)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
from ansible.module_utils.urls import fetch_url
from ansible.module_utils.six.moves.urllib.parse import urlencode
import time
HETZNER_DEFAULT_ARGUMENT_SPEC = dict(
hetzner_user=dict(type='str', required=True),
hetzner_password=dict(type='str', required=True, no_log=True),
)
# The API endpoint is fixed.
BASE_URL = "https://robot-ws.your-server.de"
def fetch_url_json(module, url, method='GET', timeout=10, data=None, headers=None, accept_errors=None):
'''
Make general request to Hetzner's JSON robot API.
'''
module.params['url_username'] = module.params['hetzner_user']
module.params['url_password'] = module.params['hetzner_password']
resp, info = fetch_url(module, url, method=method, timeout=timeout, data=data, headers=headers)
try:
content = resp.read()
except AttributeError:
content = info.pop('body', None)
if not content:
module.fail_json(msg='Cannot retrieve content from {0}'.format(url))
try:
result = module.from_json(content.decode('utf8'))
if 'error' in result:
if accept_errors:
if result['error']['code'] in accept_errors:
return result, result['error']['code']
module.fail_json(msg='Request failed: {0} {1} ({2})'.format(
result['error']['status'],
result['error']['code'],
result['error']['message']
))
return result, None
except ValueError:
module.fail_json(msg='Cannot decode content retrieved from {0}'.format(url))
class CheckDoneTimeoutException(Exception):
def __init__(self, result, error):
super(CheckDoneTimeoutException, self).__init__()
self.result = result
self.error = error
def fetch_url_json_with_retries(module, url, check_done_callback, check_done_delay=10, check_done_timeout=180, skip_first=False, **kwargs):
'''
Make general request to Hetzner's JSON robot API, with retries until a condition is satisfied.
The condition is tested by calling ``check_done_callback(result, error)``. If it is not satisfied,
it will be retried with delays ``check_done_delay`` (in seconds) until a total timeout of
``check_done_timeout`` (in seconds) since the time the first request is started is reached.
If ``skip_first`` is specified, will assume that a first call has already been made and will
directly start with waiting.
'''
start_time = time.time()
if not skip_first:
result, error = fetch_url_json(module, url, **kwargs)
if check_done_callback(result, error):
return result, error
while True:
elapsed = (time.time() - start_time)
left_time = check_done_timeout - elapsed
time.sleep(max(min(check_done_delay, left_time), 0))
result, error = fetch_url_json(module, url, **kwargs)
if check_done_callback(result, error):
return result, error
if left_time < check_done_delay:
raise CheckDoneTimeoutException(result, error)
# #####################################################################################
# ## FAILOVER IP ######################################################################
def get_failover_record(module, ip):
'''
Get information record of failover IP.
See https://robot.your-server.de/doc/webservice/en.html#get-failover-failover-ip
'''
url = "{0}/failover/{1}".format(BASE_URL, ip)
result, error = fetch_url_json(module, url)
if 'failover' not in result:
module.fail_json(msg='Cannot interpret result: {0}'.format(result))
return result['failover']
def get_failover(module, ip):
'''
Get current routing target of failover IP.
The value ``None`` represents unrouted.
See https://robot.your-server.de/doc/webservice/en.html#get-failover-failover-ip
'''
return get_failover_record(module, ip)['active_server_ip']
def set_failover(module, ip, value, timeout=180):
'''
Set current routing target of failover IP.
Return a pair ``(value, changed)``. The value ``None`` for ``value`` represents unrouted.
See https://robot.your-server.de/doc/webservice/en.html#post-failover-failover-ip
and https://robot.your-server.de/doc/webservice/en.html#delete-failover-failover-ip
'''
url = "{0}/failover/{1}".format(BASE_URL, ip)
if value is None:
result, error = fetch_url_json(
module,
url,
method='DELETE',
timeout=timeout,
accept_errors=['FAILOVER_ALREADY_ROUTED']
)
else:
headers = {"Content-type": "application/x-www-form-urlencoded"}
data = dict(
active_server_ip=value,
)
result, error = fetch_url_json(
module,
url,
method='POST',
timeout=timeout,
data=urlencode(data),
headers=headers,
accept_errors=['FAILOVER_ALREADY_ROUTED']
)
if error is not None:
return value, False
else:
return result['failover']['active_server_ip'], True
def get_failover_state(value):
'''
Create result dictionary for failover IP's value.
The value ``None`` represents unrouted.
'''
return dict(
value=value,
state='routed' if value else 'unrouted'
)
| gpl-3.0 | -298,158,531,239,043,460 | 34.005848 | 139 | 0.622619 | false |
pando85/gourmet | gourmet/gtk_extras/timeEntry.py | 6 | 4398 | ### Copyright (C) 2005 Thomas M. Hinkle
### Copyright (C) 2009 Rolf Leggewie
###
### This library is free software; you can redistribute it and/or
### modify it under the terms of the GNU General Public License as
### published by the Free Software Foundation; either version 2 of the
### License, or (at your option) any later version.
###
### This library is distributed in the hope that it will be useful,
### but WITHOUT ANY WARRANTY; without even the implied warranty of
### MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
### General Public License for more details.
###
### You should have received a copy of the GNU General Public License
### along with this library; if not, write to the Free Software
### Foundation, 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301
### USA
import gtk
from gettext import gettext as _
import gourmet.convert as convert
import validatingEntry
TIME_TO_READ = 1000
class TimeEntry (validatingEntry.ValidatingEntry):
__gtype_name__ = 'TimeEntry'
def __init__ (self, conv=None):
if not conv: self.conv = convert.get_converter()
else: self.conv = conv
validatingEntry.ValidatingEntry.__init__(self)
self.entry.get_value = self.get_value
self.entry.set_value = self.set_value
def find_errors_in_progress (self, txt):
if (not txt) or self.conv.timestring_to_seconds(txt):
return None
elif not convert.NUMBER_MATCHER.match(txt.split()[0]):
return _('Time must begin with a number or fraction followed by a unit (minutes, hours, etc.).')
else:
words = txt.split()
#if len(words) == 1:
# self._hide_warning_slowly()
# return
if convert.NUMBER_MATCHER.match(words[-1]):
return None
else:
partial_unit = words[-1]
for u in self.conv.unit_to_seconds.keys():
if u.lower().find(partial_unit.lower())==0:
return None
#self._hide_warning_slowly()
#return
return _('Invalid input.') + \
_('Time must be expressed in hours, minutes, seconds, etc.')
self._show_warning()
#else:
# self.set_warning_text("Invalid or incomplete time")
# self._show_warning()
def find_completed_errors (self,*args):
txt = self.entry.get_text()
if txt and not self.conv.timestring_to_seconds(txt):
return _('Invalid input.') + \
_('Time must be expressed in hours, minutes, seconds, etc.')
words = txt.split()
if len(words) == 1:
self._hide_warning_slowly()
return
elif convert.NUMBER_MATCHER.match(words[-1]):
return
else:
partial_unit = words[-1]
for u in self.conv.unit_to_seconds.keys():
if u.lower().find(partial_unit.lower())==0:
self._hide_warning_slowly()
return
self.valid = False
self.warn = True
self.set_warning_text('Invalid input.' + 'Time must be expressed in hours, minutes, seconds, etc.')
self._show_warning()
def set_value (self,seconds):
self.entry.set_text(
convert.seconds_to_timestring(seconds,
fractions=convert.FRACTIONS_ASCII)
)
def get_value (self):
return self.conv.timestring_to_seconds(self.entry.get_text())
def make_time_entry():
te=TimeEntry()
te.show()
return te
if __name__ == '__main__':
w=gtk.Window()
vb = gtk.VBox()
hb = gtk.HBox()
l=gtk.Label('_Label')
l.set_use_underline(True)
l.set_alignment(0,0.5)
hb.pack_start(l)
te=TimeEntry()
import sys
te.connect('changed',lambda w: sys.stderr.write('Time value: %s'%w.get_value()))
l.set_mnemonic_widget(te)
hb.pack_start(te,expand=False,fill=False)
vb.add(hb)
qb = gtk.Button(stock=gtk.STOCK_QUIT)
vb.add(qb)
l.show()
hb.show()
qb.show()
te.show()
vb.show()
qb.connect('clicked',lambda *args: w.hide() and gtk.main_quit() or gtk.main_quit())
w.add(vb)
w.show()
w.connect('delete_event',gtk.main_quit)
gtk.main()
| gpl-2.0 | -5,533,820,669,749,137,000 | 34.184 | 111 | 0.577763 | false |
devGregA/code | scrapy/core/scraper.py | 9 | 8992 | """This module implements the Scraper component which parses responses and
extracts information from them"""
from collections import deque
from twisted.python.failure import Failure
from twisted.internet import defer
from scrapy.utils.defer import defer_result, defer_succeed, parallel, iter_errback
from scrapy.utils.spider import iterate_spider_output
from scrapy.utils.misc import load_object
from scrapy.exceptions import CloseSpider, DropItem, IgnoreRequest
from scrapy import signals
from scrapy.http import Request, Response
from scrapy.item import BaseItem
from scrapy.core.spidermw import SpiderMiddlewareManager
from scrapy import log
class Slot(object):
"""Scraper slot (one per running spider)"""
MIN_RESPONSE_SIZE = 1024
def __init__(self, max_active_size=5000000):
self.max_active_size = max_active_size
self.queue = deque()
self.active = set()
self.active_size = 0
self.itemproc_size = 0
self.closing = None
def add_response_request(self, response, request):
deferred = defer.Deferred()
self.queue.append((response, request, deferred))
if isinstance(response, Response):
self.active_size += max(len(response.body), self.MIN_RESPONSE_SIZE)
else:
self.active_size += self.MIN_RESPONSE_SIZE
return deferred
def next_response_request_deferred(self):
response, request, deferred = self.queue.popleft()
self.active.add(request)
return response, request, deferred
def finish_response(self, response, request):
self.active.remove(request)
if isinstance(response, Response):
self.active_size -= max(len(response.body), self.MIN_RESPONSE_SIZE)
else:
self.active_size -= self.MIN_RESPONSE_SIZE
def is_idle(self):
return not (self.queue or self.active)
def needs_backout(self):
return self.active_size > self.max_active_size
class Scraper(object):
def __init__(self, crawler):
self.slot = None
self.spidermw = SpiderMiddlewareManager.from_crawler(crawler)
itemproc_cls = load_object(crawler.settings['ITEM_PROCESSOR'])
self.itemproc = itemproc_cls.from_crawler(crawler)
self.concurrent_items = crawler.settings.getint('CONCURRENT_ITEMS')
self.crawler = crawler
self.signals = crawler.signals
self.logformatter = crawler.logformatter
@defer.inlineCallbacks
def open_spider(self, spider):
"""Open the given spider for scraping and allocate resources for it"""
self.slot = Slot()
yield self.itemproc.open_spider(spider)
def close_spider(self, spider):
"""Close a spider being scraped and release its resources"""
slot = self.slot
slot.closing = defer.Deferred()
slot.closing.addCallback(self.itemproc.close_spider)
self._check_if_closing(spider, slot)
return slot.closing
def is_idle(self):
"""Return True if there isn't any more spiders to process"""
return not self.slot
def _check_if_closing(self, spider, slot):
if slot.closing and slot.is_idle():
slot.closing.callback(spider)
def enqueue_scrape(self, response, request, spider):
slot = self.slot
dfd = slot.add_response_request(response, request)
def finish_scraping(_):
slot.finish_response(response, request)
self._check_if_closing(spider, slot)
self._scrape_next(spider, slot)
return _
dfd.addBoth(finish_scraping)
dfd.addErrback(log.err, 'Scraper bug processing %s' % request, \
spider=spider)
self._scrape_next(spider, slot)
return dfd
def _scrape_next(self, spider, slot):
while slot.queue:
response, request, deferred = slot.next_response_request_deferred()
self._scrape(response, request, spider).chainDeferred(deferred)
def _scrape(self, response, request, spider):
"""Handle the downloaded response or failure trough the spider
callback/errback"""
assert isinstance(response, (Response, Failure))
dfd = self._scrape2(response, request, spider) # returns spiders processed output
dfd.addErrback(self.handle_spider_error, request, response, spider)
dfd.addCallback(self.handle_spider_output, request, response, spider)
return dfd
def _scrape2(self, request_result, request, spider):
"""Handle the different cases of request's result been a Response or a
Failure"""
if not isinstance(request_result, Failure):
return self.spidermw.scrape_response(self.call_spider, \
request_result, request, spider)
else:
# FIXME: don't ignore errors in spider middleware
dfd = self.call_spider(request_result, request, spider)
return dfd.addErrback(self._log_download_errors, \
request_result, request, spider)
def call_spider(self, result, request, spider):
result.request = request
dfd = defer_result(result)
dfd.addCallbacks(request.callback or spider.parse, request.errback)
return dfd.addCallback(iterate_spider_output)
def handle_spider_error(self, _failure, request, response, spider):
exc = _failure.value
if isinstance(exc, CloseSpider):
self.crawler.engine.close_spider(spider, exc.reason or 'cancelled')
return
log.err(_failure, "Spider error processing %s" % request, spider=spider)
self.signals.send_catch_log(signal=signals.spider_error, failure=_failure, response=response, \
spider=spider)
self.crawler.stats.inc_value("spider_exceptions/%s" % _failure.value.__class__.__name__, \
spider=spider)
def handle_spider_output(self, result, request, response, spider):
if not result:
return defer_succeed(None)
it = iter_errback(result, self.handle_spider_error, request, response, spider)
dfd = parallel(it, self.concurrent_items,
self._process_spidermw_output, request, response, spider)
return dfd
def _process_spidermw_output(self, output, request, response, spider):
"""Process each Request/Item (given in the output parameter) returned
from the given spider
"""
if isinstance(output, Request):
self.crawler.engine.crawl(request=output, spider=spider)
elif isinstance(output, BaseItem):
self.slot.itemproc_size += 1
dfd = self.itemproc.process_item(output, spider)
dfd.addBoth(self._itemproc_finished, output, response, spider)
return dfd
elif output is None:
pass
else:
typename = type(output).__name__
log.msg(format='Spider must return Request, BaseItem or None, '
'got %(typename)r in %(request)s',
level=log.ERROR, spider=spider, request=request, typename=typename)
def _log_download_errors(self, spider_failure, download_failure, request, spider):
"""Log and silence errors that come from the engine (typically download
errors that got propagated thru here)
"""
if isinstance(download_failure, Failure) \
and not download_failure.check(IgnoreRequest):
if download_failure.frames:
log.err(download_failure, 'Error downloading %s' % request,
spider=spider)
else:
errmsg = download_failure.getErrorMessage()
if errmsg:
log.msg(format='Error downloading %(request)s: %(errmsg)s',
level=log.ERROR, spider=spider, request=request,
errmsg=errmsg)
if spider_failure is not download_failure:
return spider_failure
def _itemproc_finished(self, output, item, response, spider):
"""ItemProcessor finished for the given ``item`` and returned ``output``
"""
self.slot.itemproc_size -= 1
if isinstance(output, Failure):
ex = output.value
if isinstance(ex, DropItem):
logkws = self.logformatter.dropped(item, ex, response, spider)
log.msg(spider=spider, **logkws)
return self.signals.send_catch_log_deferred(signal=signals.item_dropped, \
item=item, response=response, spider=spider, exception=output.value)
else:
log.err(output, 'Error processing %s' % item, spider=spider)
else:
logkws = self.logformatter.scraped(output, response, spider)
log.msg(spider=spider, **logkws)
return self.signals.send_catch_log_deferred(signal=signals.item_scraped, \
item=output, response=response, spider=spider)
| bsd-3-clause | 8,148,322,741,549,687,000 | 40.62963 | 103 | 0.636788 | false |
xen0l/ansible | lib/ansible/plugins/inventory/script.py | 6 | 8459 | # Copyright (c) 2012-2014, Michael DeHaan <[email protected]>
# Copyright (c) 2017 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
DOCUMENTATION = '''
inventory: script
version_added: "2.4"
short_description: Executes an inventory script that returns JSON
options:
cache:
description: Toggle the usage of the configured Cache plugin.
default: False
type: boolean
ini:
- section: inventory_plugin_script
key: cache
env:
- name: ANSIBLE_INVENTORY_PLUGIN_SCRIPT_CACHE
always_show_stderr:
description: Toggle display of stderr even when script was successful
version_added: "2.5.1"
default: True
type: boolean
ini:
- section: inventory_plugin_script
key: always_show_stderr
env:
- name: ANSIBLE_INVENTORY_PLUGIN_SCRIPT_STDERR
description:
- The source provided must be an executable that returns Ansible inventory JSON
- The source must accept C(--list) and C(--host <hostname>) as arguments.
C(--host) will only be used if no C(_meta) key is present.
This is a performance optimization as the script would be called per host otherwise.
notes:
- It takes the place of the previously hardcoded script inventory.
- In order to function, it requires being whitelisted in configuration, which is true by default.
'''
import os
import subprocess
from collections import Mapping
from ansible.errors import AnsibleError, AnsibleParserError
from ansible.module_utils.basic import json_dict_bytes_to_unicode
from ansible.module_utils.six import iteritems
from ansible.module_utils._text import to_native, to_text
from ansible.plugins.inventory import BaseInventoryPlugin, Cacheable
class InventoryModule(BaseInventoryPlugin, Cacheable):
''' Host inventory parser for ansible using external inventory scripts. '''
NAME = 'script'
def __init__(self):
super(InventoryModule, self).__init__()
self._hosts = set()
def verify_file(self, path):
''' Verify if file is usable by this plugin, base does minimal accessibility check '''
valid = super(InventoryModule, self).verify_file(path)
if valid:
# not only accessible, file must be executable and/or have shebang
shebang_present = False
try:
with open(path, 'rb') as inv_file:
initial_chars = inv_file.read(2)
if initial_chars.startswith(b'#!'):
shebang_present = True
except Exception:
pass
if not os.access(path, os.X_OK) and not shebang_present:
valid = False
return valid
def parse(self, inventory, loader, path, cache=None):
super(InventoryModule, self).parse(inventory, loader, path)
self.set_options()
if cache is None:
cache = self.get_option('cache')
# Support inventory scripts that are not prefixed with some
# path information but happen to be in the current working
# directory when '.' is not in PATH.
cmd = [path, "--list"]
try:
cache_key = self._get_cache_prefix(path)
if not cache or cache_key not in self._cache:
try:
sp = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
except OSError as e:
raise AnsibleParserError("problem running %s (%s)" % (' '.join(cmd), to_native(e)))
(stdout, stderr) = sp.communicate()
path = to_native(path)
err = to_native(stderr or "")
if err and not err.endswith('\n'):
err += '\n'
if sp.returncode != 0:
raise AnsibleError("Inventory script (%s) had an execution error: %s " % (path, err))
# make sure script output is unicode so that json loader will output unicode strings itself
try:
data = to_text(stdout, errors="strict")
except Exception as e:
raise AnsibleError("Inventory {0} contained characters that cannot be interpreted as UTF-8: {1}".format(path, to_native(e)))
try:
self._cache[cache_key] = self.loader.load(data, file_name=path)
except Exception as e:
raise AnsibleError("failed to parse executable inventory script results from {0}: {1}\n{2}".format(path, to_native(e), err))
# if no other errors happened and you want to force displaying stderr, do so now
if stderr and self.get_option('always_show_stderr'):
self.display.error(msg=to_text(err))
processed = self._cache[cache_key]
if not isinstance(processed, Mapping):
raise AnsibleError("failed to parse executable inventory script results from {0}: needs to be a json dict\n{1}".format(path, err))
group = None
data_from_meta = None
# A "_meta" subelement may contain a variable "hostvars" which contains a hash for each host
# if this "hostvars" exists at all then do not call --host for each # host.
# This is for efficiency and scripts should still return data
# if called with --host for backwards compat with 1.2 and earlier.
for (group, gdata) in processed.items():
if group == '_meta':
if 'hostvars' in gdata:
data_from_meta = gdata['hostvars']
else:
self._parse_group(group, gdata)
for host in self._hosts:
got = {}
if data_from_meta is None:
got = self.get_host_variables(path, host)
else:
try:
got = data_from_meta.get(host, {})
except AttributeError as e:
raise AnsibleError("Improperly formatted host information for %s: %s" % (host, to_native(e)))
self._populate_host_vars([host], got)
except Exception as e:
raise AnsibleParserError(to_native(e))
def _parse_group(self, group, data):
self.inventory.add_group(group)
if not isinstance(data, dict):
data = {'hosts': data}
# is not those subkeys, then simplified syntax, host with vars
elif not any(k in data for k in ('hosts', 'vars', 'children')):
data = {'hosts': [group], 'vars': data}
if 'hosts' in data:
if not isinstance(data['hosts'], list):
raise AnsibleError("You defined a group '%s' with bad data for the host list:\n %s" % (group, data))
for hostname in data['hosts']:
self._hosts.add(hostname)
self.inventory.add_host(hostname, group)
if 'vars' in data:
if not isinstance(data['vars'], dict):
raise AnsibleError("You defined a group '%s' with bad data for variables:\n %s" % (group, data))
for k, v in iteritems(data['vars']):
self.inventory.set_variable(group, k, v)
if group != '_meta' and isinstance(data, dict) and 'children' in data:
for child_name in data['children']:
self.inventory.add_group(child_name)
self.inventory.add_child(group, child_name)
def get_host_variables(self, path, host):
""" Runs <script> --host <hostname>, to determine additional host variables """
cmd = [path, "--host", host]
try:
sp = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
except OSError as e:
raise AnsibleError("problem running %s (%s)" % (' '.join(cmd), e))
(out, err) = sp.communicate()
if out.strip() == '':
return {}
try:
return json_dict_bytes_to_unicode(self.loader.load(out, file_name=path))
except ValueError:
raise AnsibleError("could not parse post variable response: %s, %s" % (cmd, out))
| gpl-3.0 | 30,018,334,422,511,492 | 39.668269 | 146 | 0.581156 | false |
nullishzero/Portage | pym/_emerge/EbuildMerge.py | 8 | 1973 | # Copyright 1999-2011 Gentoo Foundation
# Distributed under the terms of the GNU General Public License v2
from _emerge.CompositeTask import CompositeTask
from portage import os
from portage.dbapi._MergeProcess import MergeProcess
class EbuildMerge(CompositeTask):
__slots__ = ("exit_hook", "find_blockers", "logger", "ldpath_mtimes",
"pkg", "pkg_count", "pkg_path", "pretend",
"settings", "tree", "world_atom")
def _start(self):
root_config = self.pkg.root_config
settings = self.settings
mycat = settings["CATEGORY"]
mypkg = settings["PF"]
pkgloc = settings["D"]
infloc = os.path.join(settings["PORTAGE_BUILDDIR"], "build-info")
myebuild = settings["EBUILD"]
mydbapi = root_config.trees[self.tree].dbapi
vartree = root_config.trees["vartree"]
background = (settings.get('PORTAGE_BACKGROUND') == '1')
logfile = settings.get('PORTAGE_LOG_FILE')
merge_task = MergeProcess(
mycat=mycat, mypkg=mypkg, settings=settings,
treetype=self.tree, vartree=vartree, scheduler=self.scheduler,
background=background, blockers=self.find_blockers, pkgloc=pkgloc,
infloc=infloc, myebuild=myebuild, mydbapi=mydbapi,
prev_mtimes=self.ldpath_mtimes, logfile=logfile)
self._start_task(merge_task, self._merge_exit)
def _merge_exit(self, merge_task):
if self._final_exit(merge_task) != os.EX_OK:
self.exit_hook(self)
self.wait()
return
pkg = self.pkg
self.world_atom(pkg)
pkg_count = self.pkg_count
pkg_path = self.pkg_path
logger = self.logger
if "noclean" not in self.settings.features:
short_msg = "emerge: (%s of %s) %s Clean Post" % \
(pkg_count.curval, pkg_count.maxval, pkg.cpv)
logger.log((" === (%s of %s) " + \
"Post-Build Cleaning (%s::%s)") % \
(pkg_count.curval, pkg_count.maxval, pkg.cpv, pkg_path),
short_msg=short_msg)
logger.log(" ::: completed emerge (%s of %s) %s to %s" % \
(pkg_count.curval, pkg_count.maxval, pkg.cpv, pkg.root))
self.exit_hook(self)
self.wait()
| gpl-2.0 | -4,844,223,992,076,462,000 | 33.017241 | 70 | 0.687278 | false |
emilroz/openmicroscopy | examples/Training/markup.py | 3 | 10816 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2014 University of Dundee & Open Microscopy Environment.
# All Rights Reserved.
# Use is subject to license terms supplied in LICENSE.txt
#
"""
This script parses a list of text files and generates Wiki output.
E.g.
$ python markup.py # to stdout
$ python markup.py > wikiText.txt # to file
"""
import sys, re
import fileinput
def lines(file):
if file.name.endswith(".py"):
all = [line for line in file]
skip = check_header(all, quiet=True)
for line in all[skip:]:
yield line
else:
for line in file:
yield line
yield '\n'
def blocks(file):
block = []
for line in lines(file):
if line.strip() and not line.startswith("# ======================="):
block.append(line.rstrip())
elif block:
#yield ''.join(block).strip()
yield block
block = []
class Rule:
"""
Base class for all rules.
"""
def __init__(self, comment_char=None):
self.comment_char = comment_char
def action(self, block, handler):
handler.start(self.type)
handler.feed(block)
handler.end(self.type)
return True
class SubtitleRule(Rule):
"""
A single line that is a comment and follows a Code block
"""
type = 'subtitle'
afterCode = True
def condition(self, block):
if len(block) == 1 and block[0].startswith(self.comment_char) and self.afterCode:
block[0] = block[0].lstrip('%s ' % self.comment_char)
self.afterCode = False
return True
# see if this is a code block - if so, reset flag
for line in block:
if not line.startswith(self.comment_char):
self.afterCode = True
break
return False
class SphinxSubtitleRule(SubtitleRule):
""" Need this action to only start (all on one line - no feed) """
def action(self, block, handler):
handler.start(self.type, block)
return True
class CommentRule(Rule):
"""
A comment block is a block where every line starts with a comment character
"""
type = 'comment'
def condition(self, block):
for line in block:
if not line.startswith(self.comment_char):
return False
# since we have a comment (not code), lets remove all the '#'
for i in range(len(block)):
block[i] = block[i].lstrip('%s ' % self.comment_char)
return True
class SphinxCommentRule(CommentRule):
""" Need this action to feed without indent) """
def action(self, block, handler):
handler.start(self.type)
handler.feed(block, indent="")
handler.end(self.type)
return True
class CodeRule(Rule):
"""
A code block is simply a block that isn't covered by any of the
other rules. NB: Other rules will be tested before this one
"""
type = 'code'
def condition(self, block):
return True
class Handler:
"""
An object that handles method calls from the Parser.
The Parser will call the start() and end() methods at the
beginning of each block, with the proper block name as
parameter. The sub() method will be used in regular expression
substitution. When called with a name such as 'emphasis', it will
return a proper substitution function.
"""
def callback(self, prefix, name, *args):
method = getattr(self, prefix+name, None)
if callable(method): return method(*args)
def start(self, name, *args):
self.callback('start_', name, *args)
def end(self, name):
self.callback('end_', name)
def sub(self, name):
return lambda match: self.callback('sub_', name, match) or match.group(0)
class SphinxRenderer(Handler):
"""
A specific handler used for rendering reStrunctured Text (sphinx Docs).
"""
def start_document(self, title):
print title
print "^" * len(title)
def end_document(self):
print ''
def start_code(self):
print '\n::\n'
def end_code(self):
print ''
def start_subtitle(self, block):
print "\n- **%s**" % block[0]
def end_subtitle(self):
print ""
def start_comment(self):
print '\n'
def end_comment(self):
print '\n'
def start_list(self):
print '\n'
def end_list(self):
print '\n'
def start_listitem(self):
print ' * '
def end_listitem(self):
print ''
def start_title(self):
print '='
def end_title(self):
print '='
def sub_emphasis(self, match):
return '**%s**' % match.group(1)
def sub_url(self, match):
return '[%s]' % (match.group(1))
def sub_mail(self, match):
return '<a href="mailto:%s">%s</a>' % (match.group(1), match.group(1))
def feed(self, block, indent=" "):
for i in range(len(block)-1):
print indent + block[i]
print indent + block[-1],
class WikiRenderer(Handler):
"""
A specific handler used for rendering Wiki.
"""
def start_document(self, title):
print '== %s ==' % title
def end_document(self):
print ''
def start_code(self):
print '\n{{{'
def end_code(self):
print '\n}}}'
def start_subtitle(self):
print " * ''' ",
def end_subtitle(self):
print " ''' "
def start_comment(self):
print '\n'
def end_comment(self):
print '\n'
def start_list(self):
print '\n'
def end_list(self):
print '\n'
def start_listitem(self):
print ' * '
def end_listitem(self):
print ''
def start_title(self):
print '='
def end_title(self):
print '='
def sub_emphasis(self, match):
return '**%s**' % match.group(1)
def sub_url(self, match):
return '[%s]' % (match.group(1))
def sub_mail(self, match):
return '<a href="mailto:%s">%s</a>' % (match.group(1), match.group(1))
def feed(self, block):
for i in range(len(block)-1):
print block[i]
print block[-1],
class Parser:
"""
A Parser reads a text file, applying rules and controlling a
handler.
"""
def __init__(self, handler):
self.handler = handler
self.rules = []
self.filters = []
def addRule(self, rule):
self.rules.append(rule)
def addFilter(self, pattern, name):
def filter(block, handler):
return re.sub(pattern, handler.sub(name), block)
self.filters.append(filter)
def parse(self, file, title):
self.handler.start('document', title)
for c, block in enumerate(blocks(file)):
if c == 0:
continue # don't output the first block (connection, imports etc)
for i in range(len(block)):
for filter in self.filters:
block[i] = filter(block[i], self.handler)
for rule in self.rules:
if rule.condition(block):
last = rule.action(block, self.handler)
if last: break
self.handler.end('document')
class PythonParser(Parser):
"""
A specific Parser that adds rules for Python
"""
def __init__(self, handler):
Parser.__init__(self, handler)
self.addRule(SphinxSubtitleRule('#'))
self.addRule(SphinxCommentRule('#'))
self.addRule(CodeRule())
#self.addFilter(r'\*(.+?)\*', 'emphasis')
self.addFilter(r'(http://[\.a-zA-Z0-9_/]+)', 'url')
class MatlabParser(Parser):
"""
A specific Parser that adds rules for Matlab
"""
def __init__(self, handler):
Parser.__init__(self, handler)
self.addRule(SphinxSubtitleRule('%'))
self.addRule(SphinxCommentRule('%'))
self.addRule(CodeRule())
#self.addFilter(r'\*(.+?)\*', 'emphasis')
self.addFilter(r'(http://[\.a-zA-Z_/]+)', 'url')
PYHEADER="""#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2014 University of Dundee & Open Microscopy Environment.
# All Rights Reserved.
# Use is subject to license terms supplied in LICENSE.txt
#
\"\"\"
FOR TRAINING PURPOSES ONLY!
\"\"\"""".split("\n")
def check_header(file_lines, quiet=False):
"""
Checks the first N lines of the file that they match
PYHEADER. Returns the number of lines which should be skipped.
"""
lines = []
for line in file_lines:
idx = len(lines)
lines.append(line)
try:
test = PYHEADER[idx]
if test.strip() != line.strip():
raise Exception("bad header. expected: '%s'. found: '%s'." % \
(line.strip(), test.strip()))
except IndexError:
if not quiet:
print "ok"
break
return len(lines)
if __name__ == "__main__":
pythonFiles = ['python/Connect_To_OMERO.py', 'python/Read_Data.py', 'python/Groups_Permissions.py', 'python/Raw_Data_Access.py',
'python/Write_Data.py', 'python/Tables.py', 'python/ROIs.py', 'python/Delete.py', 'python/Render_Images.py',
'python/Create_Image.py', 'python/Filesets.py']
titles = ['Connect to OMERO', 'Read data', 'Groups and permissions', 'Raw data access', 'Write data',
'OMERO tables', 'ROIs', 'Delete data', 'Render Images', 'Create Image', 'Filesets - New in OMERO 5']
if "--check_header" in sys.argv:
for py in pythonFiles:
print "check_header(%s)" % py,
check_header([x for x in open(py, "r")])
else:
#handler = HTMLRenderer()
handler = SphinxRenderer()
#parser.parse(sys.stdin)
print "\n\n------------------------------------------------PYTHON-------------------------------------------------------------\n\n"
parser = PythonParser(handler)
for f, name in zip(pythonFiles, titles):
read = open(f, 'r')
parser.parse(read, name)
matlabFiles = [ 'matlab/ConnectToOMERO.m', 'matlab/ReadData.m', 'matlab/RawDataAccess.m', \
'matlab/WriteData.m', 'matlab/ROIs.m', 'matlab/DeleteData.m', 'matlab/RenderImages.m']
mTitles = ['Connect to OMERO', 'Read data', 'Raw data access', 'Write data', \
'ROIs', 'Delete data', 'Render Images']
print "\n\n------------------------------------------------MATLAB-------------------------------------------------------------\n\n"
parser = MatlabParser(handler)
for f, name in zip(matlabFiles, mTitles):
read = open(f, 'r')
parser.parse(read, name)
| gpl-2.0 | 6,194,083,919,877,627,000 | 29.467606 | 139 | 0.5527 | false |
mostateresnet/django-resnet-survey | setup.py | 1 | 1104 | import os
import sys
from setuptools import setup, find_packages
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name='django-resnet-survey',
version='0.1',
description='Very simple survey app made with django',
long_description=read('README.md'),
author='ResNet, Missouri State University',
author_email='[email protected]',
license='MIT',
url='https://github.com/mostateresnet/django-resnet-survey',
packages=find_packages(),
include_package_data=True,
classifiers=[
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Framework :: Django',
],
zip_safe=False,
install_requires=[
'django-autofixture',
'qrcode',
'xlwt',
],
setup_requires=[
'versiontools >= 1.6',
],
)
| mit | 6,995,968,578,881,168,000 | 27.307692 | 70 | 0.615942 | false |
mixja/eap-sim-lab | lib/pyscard-1.6.16/smartcard/Examples/framework/sample_ATR.py | 3 | 1473 | #! /usr/bin/env python
"""
Sample script for the smartcard.ATR utility class.
__author__ = "http://www.gemalto.com"
Copyright 2001-2009 gemalto
Author: Jean-Daniel Aussel, mailto:[email protected]
This file is part of pyscard.
pyscard is free software; you can redistribute it and/or modify
it under the terms of the GNU Lesser General Public License as published by
the Free Software Foundation; either version 2.1 of the License, or
(at your option) any later version.
pyscard is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public License
along with pyscard; if not, write to the Free Software
Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
"""
from smartcard.ATR import ATR
from smartcard.util import toHexString
atr = ATR([0x3B, 0x9E, 0x95, 0x80, 0x1F, 0xC3, 0x80, 0x31, 0xA0, 0x73,
0xBE, 0x21, 0x13, 0x67, 0x29, 0x02, 0x01, 0x01, 0x81,
0xCD, 0xB9])
print atr
print 'historical bytes: ', toHexString(atr.getHistoricalBytes())
print 'checksum: ', "0x%X" % atr.getChecksum()
print 'checksum OK: ', atr.checksumOK
print 'T0 supported: ', atr.isT0Supported()
print 'T1 supported: ', atr.isT1Supported()
print 'T15 supported: ', atr.isT15Supported()
| mit | 4,910,726,669,414,712,000 | 36.769231 | 75 | 0.741344 | false |
xsteadfastx/subsonic-xbmc-addon | plugin.audio.subsonic/resources/lib/requests/models.py | 2 | 26945 | # -*- coding: utf-8 -*-
"""
requests.models
~~~~~~~~~~~~~~~
This module contains the primary objects that power Requests.
"""
import collections
import datetime
from io import BytesIO, UnsupportedOperation
from .hooks import default_hooks
from .structures import CaseInsensitiveDict
from .auth import HTTPBasicAuth
from .cookies import cookiejar_from_dict, get_cookie_header
from .packages.urllib3.fields import RequestField
from .packages.urllib3.filepost import encode_multipart_formdata
from .packages.urllib3.util import parse_url
from .packages.urllib3.exceptions import DecodeError
from .exceptions import (
HTTPError, RequestException, MissingSchema, InvalidURL,
ChunkedEncodingError, ContentDecodingError)
from .utils import (
guess_filename, get_auth_from_url, requote_uri,
stream_decode_response_unicode, to_key_val_list, parse_header_links,
iter_slices, guess_json_utf, super_len, to_native_string)
from .compat import (
cookielib, urlunparse, urlsplit, urlencode, str, bytes, StringIO,
is_py2, chardet, json, builtin_str, basestring, IncompleteRead)
from .status_codes import codes
#: The set of HTTP status codes that indicate an automatically
#: processable redirect.
REDIRECT_STATI = (
codes.moved, # 301
codes.found, # 302
codes.other, # 303
codes.temporary_redirect, # 307
codes.permanent_redirect, # 308
)
DEFAULT_REDIRECT_LIMIT = 30
CONTENT_CHUNK_SIZE = 10 * 1024
ITER_CHUNK_SIZE = 512
class RequestEncodingMixin(object):
@property
def path_url(self):
"""Build the path URL to use."""
url = []
p = urlsplit(self.url)
path = p.path
if not path:
path = '/'
url.append(path)
query = p.query
if query:
url.append('?')
url.append(query)
return ''.join(url)
@staticmethod
def _encode_params(data):
"""Encode parameters in a piece of data.
Will successfully encode parameters when passed as a dict or a list of
2-tuples. Order is retained if data is a list of 2-tuples but arbitrary
if parameters are supplied as a dict.
"""
if isinstance(data, (str, bytes)):
return data
elif hasattr(data, 'read'):
return data
elif hasattr(data, '__iter__'):
result = []
for k, vs in to_key_val_list(data):
if isinstance(vs, basestring) or not hasattr(vs, '__iter__'):
vs = [vs]
for v in vs:
if v is not None:
result.append(
(k.encode('utf-8') if isinstance(k, str) else k,
v.encode('utf-8') if isinstance(v, str) else v))
return urlencode(result, doseq=True)
else:
return data
@staticmethod
def _encode_files(files, data):
"""Build the body for a multipart/form-data request.
Will successfully encode files when passed as a dict or a list of
2-tuples. Order is retained if data is a list of 2-tuples but arbitrary
if parameters are supplied as a dict.
"""
if (not files):
raise ValueError("Files must be provided.")
elif isinstance(data, basestring):
raise ValueError("Data must not be a string.")
new_fields = []
fields = to_key_val_list(data or {})
files = to_key_val_list(files or {})
for field, val in fields:
if isinstance(val, basestring) or not hasattr(val, '__iter__'):
val = [val]
for v in val:
if v is not None:
# Don't call str() on bytestrings: in Py3 it all goes wrong.
if not isinstance(v, bytes):
v = str(v)
new_fields.append(
(field.decode('utf-8') if isinstance(field, bytes) else field,
v.encode('utf-8') if isinstance(v, str) else v))
for (k, v) in files:
# support for explicit filename
ft = None
fh = None
if isinstance(v, (tuple, list)):
if len(v) == 2:
fn, fp = v
elif len(v) == 3:
fn, fp, ft = v
else:
fn, fp, ft, fh = v
else:
fn = guess_filename(v) or k
fp = v
if isinstance(fp, str):
fp = StringIO(fp)
if isinstance(fp, bytes):
fp = BytesIO(fp)
rf = RequestField(name=k, data=fp.read(),
filename=fn, headers=fh)
rf.make_multipart(content_type=ft)
new_fields.append(rf)
body, content_type = encode_multipart_formdata(new_fields)
return body, content_type
class RequestHooksMixin(object):
def register_hook(self, event, hook):
"""Properly register a hook."""
if event not in self.hooks:
raise ValueError('Unsupported event specified, with event name "%s"' % (event))
if isinstance(hook, collections.Callable):
self.hooks[event].append(hook)
elif hasattr(hook, '__iter__'):
self.hooks[event].extend(h for h in hook if isinstance(h, collections.Callable))
def deregister_hook(self, event, hook):
"""Deregister a previously registered hook.
Returns True if the hook existed, False if not.
"""
try:
self.hooks[event].remove(hook)
return True
except ValueError:
return False
class Request(RequestHooksMixin):
"""A user-created :class:`Request <Request>` object.
Used to prepare a :class:`PreparedRequest <PreparedRequest>`, which is sent to the server.
:param method: HTTP method to use.
:param url: URL to send.
:param headers: dictionary of headers to send.
:param files: dictionary of {filename: fileobject} files to multipart upload.
:param data: the body to attach the request. If a dictionary is provided, form-encoding will take place.
:param params: dictionary of URL parameters to append to the URL.
:param auth: Auth handler or (user, pass) tuple.
:param cookies: dictionary or CookieJar of cookies to attach to this request.
:param hooks: dictionary of callback hooks, for internal usage.
Usage::
>>> import requests
>>> req = requests.Request('GET', 'http://httpbin.org/get')
>>> req.prepare()
<PreparedRequest [GET]>
"""
def __init__(self,
method=None,
url=None,
headers=None,
files=None,
data=None,
params=None,
auth=None,
cookies=None,
hooks=None):
# Default empty dicts for dict params.
data = [] if data is None else data
files = [] if files is None else files
headers = {} if headers is None else headers
params = {} if params is None else params
hooks = {} if hooks is None else hooks
self.hooks = default_hooks()
for (k, v) in list(hooks.items()):
self.register_hook(event=k, hook=v)
self.method = method
self.url = url
self.headers = headers
self.files = files
self.data = data
self.params = params
self.auth = auth
self.cookies = cookies
def __repr__(self):
return '<Request [%s]>' % (self.method)
def prepare(self):
"""Constructs a :class:`PreparedRequest <PreparedRequest>` for transmission and returns it."""
p = PreparedRequest()
p.prepare(
method=self.method,
url=self.url,
headers=self.headers,
files=self.files,
data=self.data,
params=self.params,
auth=self.auth,
cookies=self.cookies,
hooks=self.hooks,
)
return p
class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
"""The fully mutable :class:`PreparedRequest <PreparedRequest>` object,
containing the exact bytes that will be sent to the server.
Generated from either a :class:`Request <Request>` object or manually.
Usage::
>>> import requests
>>> req = requests.Request('GET', 'http://httpbin.org/get')
>>> r = req.prepare()
<PreparedRequest [GET]>
>>> s = requests.Session()
>>> s.send(r)
<Response [200]>
"""
def __init__(self):
#: HTTP verb to send to the server.
self.method = None
#: HTTP URL to send the request to.
self.url = None
#: dictionary of HTTP headers.
self.headers = None
# The `CookieJar` used to create the Cookie header will be stored here
# after prepare_cookies is called
self._cookies = None
#: request body to send to the server.
self.body = None
#: dictionary of callback hooks, for internal usage.
self.hooks = default_hooks()
def prepare(self, method=None, url=None, headers=None, files=None,
data=None, params=None, auth=None, cookies=None, hooks=None):
"""Prepares the entire request with the given parameters."""
self.prepare_method(method)
self.prepare_url(url, params)
self.prepare_headers(headers)
self.prepare_cookies(cookies)
self.prepare_body(data, files)
self.prepare_auth(auth, url)
# Note that prepare_auth must be last to enable authentication schemes
# such as OAuth to work on a fully prepared request.
# This MUST go after prepare_auth. Authenticators could add a hook
self.prepare_hooks(hooks)
def __repr__(self):
return '<PreparedRequest [%s]>' % (self.method)
def copy(self):
p = PreparedRequest()
p.method = self.method
p.url = self.url
p.headers = self.headers.copy() if self.headers is not None else None
p._cookies = self._cookies.copy() if self._cookies is not None else None
p.body = self.body
p.hooks = self.hooks
return p
def prepare_method(self, method):
"""Prepares the given HTTP method."""
self.method = method
if self.method is not None:
self.method = self.method.upper()
def prepare_url(self, url, params):
"""Prepares the given HTTP URL."""
#: Accept objects that have string representations.
try:
url = unicode(url)
except NameError:
# We're on Python 3.
url = str(url)
except UnicodeDecodeError:
pass
# Don't do any URL preparation for oddball schemes
if ':' in url and not url.lower().startswith('http'):
self.url = url
return
# Support for unicode domain names and paths.
scheme, auth, host, port, path, query, fragment = parse_url(url)
if not scheme:
raise MissingSchema("Invalid URL {0!r}: No schema supplied. "
"Perhaps you meant http://{0}?".format(url))
if not host:
raise InvalidURL("Invalid URL %r: No host supplied" % url)
# Only want to apply IDNA to the hostname
try:
host = host.encode('idna').decode('utf-8')
except UnicodeError:
raise InvalidURL('URL has an invalid label.')
# Carefully reconstruct the network location
netloc = auth or ''
if netloc:
netloc += '@'
netloc += host
if port:
netloc += ':' + str(port)
# Bare domains aren't valid URLs.
if not path:
path = '/'
if is_py2:
if isinstance(scheme, str):
scheme = scheme.encode('utf-8')
if isinstance(netloc, str):
netloc = netloc.encode('utf-8')
if isinstance(path, str):
path = path.encode('utf-8')
if isinstance(query, str):
query = query.encode('utf-8')
if isinstance(fragment, str):
fragment = fragment.encode('utf-8')
enc_params = self._encode_params(params)
if enc_params:
if query:
query = '%s&%s' % (query, enc_params)
else:
query = enc_params
url = requote_uri(urlunparse([scheme, netloc, path, None, query, fragment]))
self.url = url
def prepare_headers(self, headers):
"""Prepares the given HTTP headers."""
if headers:
self.headers = CaseInsensitiveDict((to_native_string(name), value) for name, value in headers.items())
else:
self.headers = CaseInsensitiveDict()
def prepare_body(self, data, files):
"""Prepares the given HTTP body data."""
# Check if file, fo, generator, iterator.
# If not, run through normal process.
# Nottin' on you.
body = None
content_type = None
length = None
is_stream = all([
hasattr(data, '__iter__'),
not isinstance(data, (basestring, list, tuple, dict))
])
try:
length = super_len(data)
except (TypeError, AttributeError, UnsupportedOperation):
length = None
if is_stream:
body = data
if files:
raise NotImplementedError('Streamed bodies and files are mutually exclusive.')
if length is not None:
self.headers['Content-Length'] = builtin_str(length)
else:
self.headers['Transfer-Encoding'] = 'chunked'
else:
# Multi-part file uploads.
if files:
(body, content_type) = self._encode_files(files, data)
else:
if data:
body = self._encode_params(data)
if isinstance(data, basestring) or hasattr(data, 'read'):
content_type = None
else:
content_type = 'application/x-www-form-urlencoded'
self.prepare_content_length(body)
# Add content-type if it wasn't explicitly provided.
if (content_type) and (not 'content-type' in self.headers):
self.headers['Content-Type'] = content_type
self.body = body
def prepare_content_length(self, body):
if hasattr(body, 'seek') and hasattr(body, 'tell'):
body.seek(0, 2)
self.headers['Content-Length'] = builtin_str(body.tell())
body.seek(0, 0)
elif body is not None:
l = super_len(body)
if l:
self.headers['Content-Length'] = builtin_str(l)
elif self.method not in ('GET', 'HEAD'):
self.headers['Content-Length'] = '0'
def prepare_auth(self, auth, url=''):
"""Prepares the given HTTP auth data."""
# If no Auth is explicitly provided, extract it from the URL first.
if auth is None:
url_auth = get_auth_from_url(self.url)
auth = url_auth if any(url_auth) else None
if auth:
if isinstance(auth, tuple) and len(auth) == 2:
# special-case basic HTTP auth
auth = HTTPBasicAuth(*auth)
# Allow auth to make its changes.
r = auth(self)
# Update self to reflect the auth changes.
self.__dict__.update(r.__dict__)
# Recompute Content-Length
self.prepare_content_length(self.body)
def prepare_cookies(self, cookies):
"""Prepares the given HTTP cookie data."""
if isinstance(cookies, cookielib.CookieJar):
self._cookies = cookies
else:
self._cookies = cookiejar_from_dict(cookies)
cookie_header = get_cookie_header(self._cookies, self)
if cookie_header is not None:
self.headers['Cookie'] = cookie_header
def prepare_hooks(self, hooks):
"""Prepares the given hooks."""
for event in hooks:
self.register_hook(event, hooks[event])
class Response(object):
"""The :class:`Response <Response>` object, which contains a
server's response to an HTTP request.
"""
__attrs__ = [
'_content',
'status_code',
'headers',
'url',
'history',
'encoding',
'reason',
'cookies',
'elapsed',
'request',
]
def __init__(self):
super(Response, self).__init__()
self._content = False
self._content_consumed = False
#: Integer Code of responded HTTP Status, e.g. 404 or 200.
self.status_code = None
#: Case-insensitive Dictionary of Response Headers.
#: For example, ``headers['content-encoding']`` will return the
#: value of a ``'Content-Encoding'`` response header.
self.headers = CaseInsensitiveDict()
#: File-like object representation of response (for advanced usage).
#: Use of ``raw`` requires that ``stream=True`` be set on the request.
# This requirement does not apply for use internally to Requests.
self.raw = None
#: Final URL location of Response.
self.url = None
#: Encoding to decode with when accessing r.text.
self.encoding = None
#: A list of :class:`Response <Response>` objects from
#: the history of the Request. Any redirect responses will end
#: up here. The list is sorted from the oldest to the most recent request.
self.history = []
#: Textual reason of responded HTTP Status, e.g. "Not Found" or "OK".
self.reason = None
#: A CookieJar of Cookies the server sent back.
self.cookies = cookiejar_from_dict({})
#: The amount of time elapsed between sending the request
#: and the arrival of the response (as a timedelta)
self.elapsed = datetime.timedelta(0)
#: The :class:`PreparedRequest <PreparedRequest>` object to which this
#: is a response.
self.request = None
def __getstate__(self):
# Consume everything; accessing the content attribute makes
# sure the content has been fully read.
if not self._content_consumed:
self.content
return dict(
(attr, getattr(self, attr, None))
for attr in self.__attrs__
)
def __setstate__(self, state):
for name, value in state.items():
setattr(self, name, value)
# pickled objects do not have .raw
setattr(self, '_content_consumed', True)
setattr(self, 'raw', None)
def __repr__(self):
return '<Response [%s]>' % (self.status_code)
def __bool__(self):
"""Returns true if :attr:`status_code` is 'OK'."""
return self.ok
def __nonzero__(self):
"""Returns true if :attr:`status_code` is 'OK'."""
return self.ok
def __iter__(self):
"""Allows you to use a response as an iterator."""
return self.iter_content(128)
@property
def ok(self):
try:
self.raise_for_status()
except RequestException:
return False
return True
@property
def is_redirect(self):
"""True if this Response is a well-formed HTTP redirect that could have
been processed automatically (by :meth:`Session.resolve_redirects`).
"""
return ('location' in self.headers and self.status_code in REDIRECT_STATI)
@property
def is_permanent_redirect(self):
"""True if this Response one of the permanant versions of redirect"""
return ('location' in self.headers and self.status_code in (codes.moved_permanently, codes.permanent_redirect))
@property
def apparent_encoding(self):
"""The apparent encoding, provided by the chardet library"""
return chardet.detect(self.content)['encoding']
def iter_content(self, chunk_size=1, decode_unicode=False):
"""Iterates over the response data. When stream=True is set on the
request, this avoids reading the content at once into memory for
large responses. The chunk size is the number of bytes it should
read into memory. This is not necessarily the length of each item
returned as decoding can take place.
If decode_unicode is True, content will be decoded using the best
available encoding based on the response.
"""
def generate():
try:
# Special case for urllib3.
try:
for chunk in self.raw.stream(chunk_size, decode_content=True):
yield chunk
except IncompleteRead as e:
raise ChunkedEncodingError(e)
except DecodeError as e:
raise ContentDecodingError(e)
except AttributeError:
# Standard file-like object.
while True:
chunk = self.raw.read(chunk_size)
if not chunk:
break
yield chunk
self._content_consumed = True
# simulate reading small chunks of the content
reused_chunks = iter_slices(self._content, chunk_size)
stream_chunks = generate()
chunks = reused_chunks if self._content_consumed else stream_chunks
if decode_unicode:
chunks = stream_decode_response_unicode(chunks, self)
return chunks
def iter_lines(self, chunk_size=ITER_CHUNK_SIZE, decode_unicode=None):
"""Iterates over the response data, one line at a time. When
stream=True is set on the request, this avoids reading the
content at once into memory for large responses.
"""
pending = None
for chunk in self.iter_content(chunk_size=chunk_size, decode_unicode=decode_unicode):
if pending is not None:
chunk = pending + chunk
lines = chunk.splitlines()
if lines and lines[-1] and chunk and lines[-1][-1] == chunk[-1]:
pending = lines.pop()
else:
pending = None
for line in lines:
yield line
if pending is not None:
yield pending
@property
def content(self):
"""Content of the response, in bytes."""
if self._content is False:
# Read the contents.
try:
if self._content_consumed:
raise RuntimeError(
'The content for this response was already consumed')
if self.status_code == 0:
self._content = None
else:
self._content = bytes().join(self.iter_content(CONTENT_CHUNK_SIZE)) or bytes()
except AttributeError:
self._content = None
self._content_consumed = True
# don't need to release the connection; that's been handled by urllib3
# since we exhausted the data.
return self._content
@property
def text(self):
"""Content of the response, in unicode.
If Response.encoding is None, encoding will be guessed using
``chardet``.
The encoding of the response content is determined based solely on HTTP
headers, following RFC 2616 to the letter. If you can take advantage of
non-HTTP knowledge to make a better guess at the encoding, you should
set ``r.encoding`` appropriately before accessing this property.
"""
# Try charset from content-type
content = None
encoding = self.encoding
if not self.content:
return str('')
# Fallback to auto-detected encoding.
if self.encoding is None:
encoding = self.apparent_encoding
# Decode unicode from given encoding.
try:
content = str(self.content, encoding, errors='replace')
except (LookupError, TypeError):
# A LookupError is raised if the encoding was not found which could
# indicate a misspelling or similar mistake.
#
# A TypeError can be raised if encoding is None
#
# So we try blindly encoding.
content = str(self.content, errors='replace')
return content
def json(self, **kwargs):
"""Returns the json-encoded content of a response, if any.
:param \*\*kwargs: Optional arguments that ``json.loads`` takes.
"""
if not self.encoding and len(self.content) > 3:
# No encoding set. JSON RFC 4627 section 3 states we should expect
# UTF-8, -16 or -32. Detect which one to use; If the detection or
# decoding fails, fall back to `self.text` (using chardet to make
# a best guess).
encoding = guess_json_utf(self.content)
if encoding is not None:
try:
return json.loads(self.content.decode(encoding), **kwargs)
except UnicodeDecodeError:
# Wrong UTF codec detected; usually because it's not UTF-8
# but some other 8-bit codec. This is an RFC violation,
# and the server didn't bother to tell us what codec *was*
# used.
pass
return json.loads(self.text, **kwargs)
@property
def links(self):
"""Returns the parsed header links of the response, if any."""
header = self.headers.get('link')
# l = MultiDict()
l = {}
if header:
links = parse_header_links(header)
for link in links:
key = link.get('rel') or link.get('url')
l[key] = link
return l
def raise_for_status(self):
"""Raises stored :class:`HTTPError`, if one occurred."""
http_error_msg = ''
if 400 <= self.status_code < 500:
http_error_msg = '%s Client Error: %s' % (self.status_code, self.reason)
elif 500 <= self.status_code < 600:
http_error_msg = '%s Server Error: %s' % (self.status_code, self.reason)
if http_error_msg:
raise HTTPError(http_error_msg, response=self)
def close(self):
"""Releases the connection back to the pool. Once this method has been
called the underlying ``raw`` object must not be accessed again.
*Note: Should not normally need to be called explicitly.*
"""
return self.raw.release_conn()
| mit | -220,950,383,986,727,650 | 32.142681 | 119 | 0.567267 | false |
rohitwaghchaure/frappe | frappe/tests/test_global_search.py | 1 | 4662 | # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
import unittest
import frappe
from frappe.utils import global_search
from frappe.test_runner import make_test_objects
import frappe.utils
class TestGlobalSearch(unittest.TestCase):
def setUp(self):
global_search.setup_global_search_table()
self.assertTrue('__global_search' in frappe.db.get_tables())
doctype = "Event"
global_search.reset()
from frappe.custom.doctype.property_setter.property_setter import make_property_setter
make_property_setter(doctype, "subject", "in_global_search", 1, "Int")
make_property_setter(doctype, "event_type", "in_global_search", 1, "Int")
make_property_setter(doctype, "roles", "in_global_search", 1, "Int")
make_property_setter(doctype, "repeat_on", "in_global_search", 0, "Int")
def tearDown(self):
frappe.db.sql('delete from `tabProperty Setter` where doc_type="Event"')
frappe.clear_cache(doctype='Event')
frappe.db.sql('delete from `tabEvent`')
frappe.db.sql('delete from __global_search')
make_test_objects('Event')
frappe.db.commit()
def insert_test_events(self):
frappe.db.sql('delete from tabEvent')
phrases = ['"The Sixth Extinction II: Amor Fati" is the second episode of the seventh season of the American science fiction.',
'After Mulder awakens from his coma, he realizes his duty to prevent alien colonization. ',
'Carter explored themes of extraterrestrial involvement in ancient mass extinctions in this episode, the third in a trilogy.']
for text in phrases:
frappe.get_doc(dict(
doctype='Event',
subject=text,
repeat_on='Every Month',
starts_on=frappe.utils.now_datetime())).insert()
frappe.db.commit()
def test_search(self):
self.insert_test_events()
results = global_search.search('awakens')
self.assertTrue('After Mulder awakens from his coma, he realizes his duty to prevent alien colonization. ' in results[0].content)
results = global_search.search('extraterrestrial')
self.assertTrue('Carter explored themes of extraterrestrial involvement in ancient mass extinctions in this episode, the third in a trilogy.' in results[0].content)
def test_update_doc(self):
self.insert_test_events()
test_subject = 'testing global search'
event = frappe.get_doc('Event', frappe.get_all('Event')[0].name)
event.subject = test_subject
event.save()
frappe.db.commit()
results = global_search.search('testing global search')
self.assertTrue('testing global search' in results[0].content)
def test_update_fields(self):
self.insert_test_events()
results = global_search.search('Every Month')
self.assertEquals(len(results), 0)
doctype = "Event"
from frappe.custom.doctype.property_setter.property_setter import make_property_setter
make_property_setter(doctype, "repeat_on", "in_global_search", 1, "Int")
global_search.rebuild_for_doctype(doctype)
results = global_search.search('Every Month')
self.assertEquals(len(results), 3)
def test_delete_doc(self):
self.insert_test_events()
event_name = frappe.get_all('Event')[0].name
event = frappe.get_doc('Event', event_name)
test_subject = event.subject
results = global_search.search(test_subject)
self.assertEquals(len(results), 1)
frappe.delete_doc('Event', event_name)
results = global_search.search(test_subject)
self.assertEquals(len(results), 0)
def test_insert_child_table(self):
frappe.db.sql('delete from tabEvent')
phrases = ['Hydrus is a small constellation in the deep southern sky. ',
'It was first depicted on a celestial atlas by Johann Bayer in his 1603 Uranometria. ',
'The French explorer and astronomer Nicolas Louis de Lacaille charted the brighter stars and gave their Bayer designations in 1756. ',
'Its name means "male water snake", as opposed to Hydra, a much larger constellation that represents a female water snake. ',
'It remains below the horizon for most Northern Hemisphere observers.',
'The brightest star is the 2.8-magnitude Beta Hydri, also the closest reasonably bright star to the south celestial pole. ',
'Pulsating between magnitude 3.26 and 3.33, Gamma Hydri is a variable red giant some 60 times the diameter of our Sun. ',
'Lying near it is VW Hydri, one of the brightest dwarf novae in the heavens. ',
'Four star systems have been found to have exoplanets to date, most notably HD 10180, which could bear up to nine planetary companions.']
for text in phrases:
doc = frappe.get_doc({
'doctype':'Event',
'subject': text,
'starts_on': frappe.utils.now_datetime()
})
doc.insert()
frappe.db.commit() | mit | 4,174,737,366,077,080,600 | 40.633929 | 166 | 0.736165 | false |
philpep/testinfra | testinfra/backend/openshift.py | 1 | 1712 | # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from testinfra.backend import base
class OpenShiftBackend(base.BaseBackend):
NAME = "openshift"
def __init__(self, name, *args, **kwargs):
self.name = name
self.container = kwargs.get('container')
self.namespace = kwargs.get('namespace')
self.kubeconfig = kwargs.get('kubeconfig')
super().__init__(self.name, *args, **kwargs)
def run(self, command, *args, **kwargs):
cmd = self.get_command(command, *args)
# `oc exec` does not support specifying the user to run as.
# See https://github.com/kubernetes/kubernetes/issues/30656
oscmd = 'oc '
oscmd_args = []
if self.kubeconfig is not None:
oscmd += '--kubeconfig="%s" '
oscmd_args.append(self.kubeconfig)
if self.namespace is not None:
oscmd += '-n %s '
oscmd_args.append(self.namespace)
if self.container is not None:
oscmd += '-c %s '
oscmd_args.append(self.container)
oscmd += 'exec %s -- /bin/sh -c %s'
oscmd_args.extend([self.name, cmd])
out = self.run_local(oscmd, *oscmd_args)
return out
| apache-2.0 | 362,022,122,710,024,450 | 37.909091 | 74 | 0.634346 | false |
aigamo/primecloud-controller | iaas-gw/src/iaasgw/client/vCloudiaasclient.py | 5 | 49263 | # coding: UTF-8
#
# Copyright 2014 by SCSK Corporation.
#
# This file is part of PrimeCloud Controller(TM).
#
# PrimeCloud Controller(TM) is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# PrimeCloud Controller(TM) is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with PrimeCloud Controller(TM). If not, see <http://www.gnu.org/licenses/>.
#
from iaasgw.common.pccConnections import PCCVCloudConnection
from iaasgw.exception.iaasException import IaasException
from iaasgw.log.log import IaasLogger
from iaasgw.module.vcloud.vCloudModule import PccVMDisk, PccStorageProfile, \
PccVAppNetwork, PccVMNetwork
from iaasgw.module.vcloud.vcloudXMLWriter import RecomposeVAppXML_ADD_VM, \
RecomposeVAppXML_DEL_VM, InstantiateVAppXML, ComposeVAppXML, \
SetProductSectionListXML
from iaasgw.utils.propertyUtil import getVCloudInfo, getOtherProperty
from iaasgw.utils.stringUtils import isNotEmpty, isBit
from libcloud.compute.base import Node
from libcloud.compute.drivers.vcloud import VCloud_1_5_NodeDriver, get_url_path, \
fixxpath
from libcloud.utils.py3 import urlparse
from xml.etree import ElementTree as ET
import copy
import time
import traceback
urlparse = urlparse.urlparse
class VCloudIaasClient(VCloud_1_5_NodeDriver):
STOPPED = "Stopped"
RUNNING = "Running"
UNKNOWN = "Unknown"
RESOLVED = "Resolved"
WAITING = "Waiting"
RETRY_MAX = 3
NODE_STATE_MAP = {'-1': UNKNOWN,
'0': UNKNOWN,
'1': RESOLVED,
'2': WAITING,
'3': UNKNOWN,
'4': RUNNING,
'5': UNKNOWN,
'6': UNKNOWN,
'7': UNKNOWN,
'8': STOPPED,
'9': UNKNOWN,
'10': UNKNOWN,
'11': WAITING,
'12': WAITING,
'13': WAITING,
'14': WAITING,
'15': WAITING,
}
logger = IaasLogger()
platformNo = None
timeout = 600
username = None
vdc_name = None
defnet = None
connectionCls = PCCVCloudConnection
def __init__(self, platforminfo, username, key, secret=None):
self.platformNo = platforminfo["platformNo"]
self.username = username
self.logger.info(u"利用プラットフォーム" + str(self.platformNo))
#接続情報
vCloudInfo = getVCloudInfo(self.platformNo)
host = vCloudInfo["host"]
secure = vCloudInfo["secure"]
pltfmNotimeout = vCloudInfo["timeout"]
self.vdc_name = vCloudInfo["vdc"]
self.defnet = vCloudInfo["defnetwork"]
#アクセスキー生成
key = key + "@" + vCloudInfo["org"]
#タイムアウト
if pltfmNotimeout is not None:
#コネクションタイムアウト
self.connectionCls.timeout = int(pltfmNotimeout)
#タスク待ちタイムアウト
self.timeout = int(pltfmNotimeout)
#プロキシ利用
useProxy = platforminfo["proxy"]
if useProxy == 1:
useProxy = True
else:
useProxy = False
self.connectionCls.useProxy = useProxy
#プロトコル
if secure == 1:
secure = True
else:
secure = False
self.logger.info(u"接続情報==> "+host+": secure=" + str(secure))
VCloud_1_5_NodeDriver.__init__(self, key=key, secret=secret, secure=secure, host= host)
############################################################
#
# 基礎データ
#
############################################################
#########################
#
# プラットフォームNo取得
#
#########################
def getPlatformNo(self):
return self.platformNo
#########################
#
# ユーザーネーム取得
#
#########################
def getUsername(self):
return self.username
############################################################
#
# 参照系 describe
#
############################################################
#########################
#
# 組織取得
#
#########################
def getUseVdc(self):
return self._get_vdc(self.vdc_name)
#########################
#
# 組織設定ストレージプロファイル取得
#
#########################
def getVdcStorageprofiles(self, vdc):
res = self.requestLoop(get_url_path(vdc.id)).object
#ストレージプロファイル
storageprofiles = {}
for storageprofile in res.findall(fixxpath(res, 'VdcStorageProfiles/VdcStorageProfile')):
storageprofiles[storageprofile.get('name')] = PccStorageProfile(storageprofile.get('name'), storageprofile.get('href'))
return storageprofiles
#########################
#
# 組織ネットワーク取得
#
#########################
def describeVdcNetwork(self):
vdc = self.getUseVdc()
res = self.requestLoop(get_url_path(vdc.id)).object
#VDCネットワーク
vdcnetworks = []
for vdcnetworkconfig in res.findall(fixxpath(res, 'AvailableNetworks/Network')):
name = vdcnetworkconfig.get('name')
href = vdcnetworkconfig.get('href')
res2 = self.requestLoop(get_url_path(href)).object
vdcnetworks.append(self._makePccVAppNetwork(res2, name, href))
return vdcnetworks
#########################
#
# VApp取得
#
#########################
def describeMyCloud(self, vdc, vApp_name):
vAppName = vdc.name + "-" + vApp_name
return self.ex_find_node(vAppName, vdc)
#########################
#
# VApp取得
#
#########################
def describeVdcMyCloud(self, vdc):
res = self.connection.request(get_url_path(vdc.id))
elms = res.object.findall(fixxpath(
res.object, "ResourceEntities/ResourceEntity")
)
vapps = [
(i.get('name'), i.get('href'))
for i in elms
if i.get('type')
== 'application/vnd.vmware.vcloud.vApp+xml'
and i.get('name')
]
nodes = []
for vapp_name, vapp_href in vapps:
try:
res = self.connection.request(
get_url_path(vapp_href),
headers={'Content-Type': 'application/vnd.vmware.vcloud.vApp+xml'}
)
nodes.append(self._to_node(res.object))
except Exception:
self.logger.error(traceback.format_exc())
raise
return nodes
#########################
#
# VAPPネットワーク取得
#
#########################
def describeVappNetwork(self, vapp):
vappnetworks = []
res = self.requestLoop(get_url_path(vapp.id)).object
for networkconfig in res.findall(fixxpath(res, 'NetworkConfigSection/NetworkConfig')):
name = networkconfig.get('networkName')
#未設定用NW「'none'」は無視する
if name == 'none':
continue
vappnetworks.append(self._makePccVAppNetwork(networkconfig, name))
return vappnetworks
#########################
#
# VM取得(全件)
#
#########################
def describeInstances(self, vApp):
return vApp.extra['vms']
#########################
#
# VM取得(単)
#
#########################
def describeInstance(self, vApp, vm_name):
vms = vApp.extra['vms']
target = None
for vm in vms:
if vm["name"] == vm_name:
target = vm
return target
#########################
#
# VMネットワーク取得
#
#########################
def describeVMNetwork(self, vm):
res = self.requestLoop('%s/networkConnectionSection' % get_url_path(vm["id"]))
print ET.tostring(res.object)
primary_index = res.object.find(fixxpath(res.object, 'PrimaryNetworkConnectionIndex')).text
net_conns = res.object.findall(fixxpath(res.object, 'NetworkConnection'))
retNetworks = []
for item in net_conns:
name = item.get('network')
ipMode = item.find(fixxpath(item, 'IpAddressAllocationMode')).text
ipAddress = item.find(fixxpath(item, 'IpAddress')).text
index = item.find(fixxpath(item, 'NetworkConnectionIndex')).text
isPrimary = False
if index == primary_index:
isPrimary = True
retNetworks.append(PccVMNetwork(name, ipAddress, ipMode, index, isPrimary))
return retNetworks
#########################
#
# 名称からストレージプロファイル取得
#
#########################
def describeStorageProfile(self, vdc, sp_Name):
res = self.requestLoop(get_url_path(vdc.id)).object
for storageprofile in res.findall(fixxpath(res, 'VdcStorageProfiles/VdcStorageProfile')):
if storageprofile.get('name') == sp_Name:
return PccStorageProfile(storageprofile.get('name'), storageprofile.get('href'))
return None
#########################
#
# イメージの名称(リスト)を取得
#
#########################
def describeImageNames(self, location=None):
imageNames = []
images = self.list_images()
for image in images:
res = self.requestLoop(image.id).object
res_ents = res.findall(fixxpath(res, "Children/Vm"))
for i in res_ents:
imageNames.append(i.get("name"))
return imageNames
#########################
#
# イメージのリンクを取得
#
#########################
def describeImageHref(self, imagename):
images = self.list_images()
for image in images:
res = self.requestLoop(image.id).object
res_ents = res.findall(fixxpath(res, "Children/Vm"))
for i in res_ents:
if imagename == i.get("name"):
return i.get("href")
#########################
#
# VMに設定されているディスクを取得
#
#########################
def describeVolumes(self, vm):
rasd_ns = '{http://schemas.dmtf.org/wbem/wscim/1/cim-schema/2/CIM_ResourceAllocationSettingData}'
# Get virtualHardwareSection/disks section
res = self.requestLoop('%s/virtualHardwareSection/disks' % get_url_path(vm["id"]))
disks =[]
for item in res.object.findall(fixxpath(res.object, 'Item')):
if item.find('%sHostResource' % rasd_ns) is not None:
name = item.find('%sInstanceID' % rasd_ns).text
size = item.find('%sHostResource' % rasd_ns).get(fixxpath(item, 'capacity'))
busType = item.find('%sHostResource' % rasd_ns).get(fixxpath(item, 'busType'))
unitNo = item.find('%sAddressOnParent' % rasd_ns).text
disks.append(PccVMDisk(name, size, busType, unitNo))
return disks
#########################
#
# VMに設定されているディスクを取得(単)
#
#########################
def describeVolume(self, vm, deiskid):
disks = self.describeVolumes(vm)
for desk in disks:
if str(desk.name) == str(deiskid):
return desk
return None
#########################
#
# ネットワークの名称(リスト)を取得
# describeVdcNetworkを利用してください
#
#########################
#def describeNetworkNames(self):
# networkNames = []
# for network in self.networks:
# networkNames.append(network.get("name"))
# return networkNames
#########################
#
# ネットワークのリンクを取得
#
#########################
def describeNetworkHref(self, name):
for network in self.networks:
if name == network.get("name"):
return network.get("href")
#########################
#
# VMに設定されているCPU数を取得
#
#########################
def describeCPU(self, vm):
res = self.requestLoop('%s/virtualHardwareSection/cpu' % get_url_path(vm["id"]))
cpu =res.object.find('{http://schemas.dmtf.org/wbem/wscim/1/cim-schema/2/CIM_ResourceAllocationSettingData}VirtualQuantity').text
return cpu
#########################
#
# VMに設定されているメモリ数を取得
#
#########################
def describeMemory(self, vm):
res = self.requestLoop('%s/virtualHardwareSection/memory' % get_url_path(vm["id"]))
memory =res.object.find('{http://schemas.dmtf.org/wbem/wscim/1/cim-schema/2/CIM_ResourceAllocationSettingData}VirtualQuantity').text
return memory
#########################
#
# ネットワーク設定取得補助
#
#########################
def _makePccVAppNetwork(self, network_xml, name, href=None):
name = name
href = href
fencemode = None
gateway = None
netmask = None
dns1 = None
dns2 = None
iprangeF =None
iprangeT =None
primary = False
if name == getOtherProperty("vCloud.PCCNetwork"):
primary = True
if href is None:
link = network_xml.find(fixxpath(network_xml, 'Link'))
if link is not None:
href = link.get('href')
fence = network_xml.find(fixxpath(network_xml, 'Configuration/FenceMode'))
if fence is not None:
fencemode = fence.text
scope = network_xml.find(fixxpath(network_xml, 'Configuration/IpScopes/IpScope'))
for elem in scope:
if elem.tag == '{http://www.vmware.com/vcloud/v1.5}Gateway':
gateway = elem.text
if elem.tag == '{http://www.vmware.com/vcloud/v1.5}Netmask':
netmask = elem.text
if elem.tag == '{http://www.vmware.com/vcloud/v1.5}Dns1':
dns1 = elem.text
if elem.tag == '{http://www.vmware.com/vcloud/v1.5}Dns2':
dns2 = elem.text
ipranges = network_xml.findall(fixxpath(network_xml, 'Configuration/IpScopes/IpScope/IpRanges/IpRange'))
if ipranges is not None:
for iprange in ipranges:
for elem in iprange:
if elem.tag == '{http://www.vmware.com/vcloud/v1.5}StartAddress':
iprangeF = elem.text
if elem.tag == '{http://www.vmware.com/vcloud/v1.5}EndAddress':
iprangeT = elem.text
#複数範囲は現状想定しない
break
return PccVAppNetwork(name, href, fencemode, gateway, netmask, dns1, dns2, iprangeF, iprangeT, primary)
############################################################
#
# vApp 系
#
############################################################
#########################
#
# 空VAppを作成
#
#########################
def createMyCloud(self, vdc, name, defNetworks):
try:
vappName = vdc.name + "-" + name
vapp_href = self._compose_MyCloud(vdc, vappName, defNetworks)
res = self.requestLoop(get_url_path(vapp_href))
node = self._to_node(res.object)
#実行ログ
self.logger.info(None, "IPROCESS-100701", [name])
return node
except Exception:
self.logger.error(traceback.format_exc())
raise IaasException("EPROCESS-000801", [name,])
#########################
#
# テンプレートからVAppを作成
#現在未使用 テンプレートからマイクラウドを作成する場合に使用
#
#########################
def createMyCloudByTemplate(self, vdc, name, template_name, defNetworks):
#vAppテンプレートを名称から特定
template = None
templates = self.list_images()
for temp in templates:
if template_name == temp.name:
template = temp.id
try:
vappName = vdc + "-" + name
vapp_href = self._instantiate_MyCloud(vdc, vappName, template, defNetworks)
res = self.requestLoop(get_url_path(vapp_href))
node = self._to_node(res.object)
return node
except Exception:
self.logger.error(traceback.format_exc())
raise IaasException("EPROCESS-000801", [name,])
#########################
#
# VAppを削除
#
#########################
def terminateMyCloud(self, myCloud):
try:
self.destroy_node(myCloud)
#実行ログ
self.logger.info(None, "IPROCESS-100702", [myCloud.name])
except Exception:
self.logger.error(traceback.format_exc())
raise IaasException("EPROCESS-000802", [myCloud.name,])
#########################
#
# 空VAppを作成補助
#
#########################
def _compose_MyCloud(self, vdc, name, useNetworks):
compose_xml = ComposeVAppXML(
name=name,
useNetworks=useNetworks
)
self.logger.info(compose_xml.tostring())
# Instantiate VM and get identifier.
res = self.requestLoop(
'%s/action/composeVApp' % get_url_path(vdc.id),
data=compose_xml.tostring(),
method='POST',
headers={'Content-Type': 'application/vnd.vmware.vcloud.composeVAppParams+xml'}
)
vapp_href = res.object.get('href')
task_href = res.object.find(fixxpath(res.object, "Tasks/Task")).get('href')
self._wait_for_task_completion(task_href)
return vapp_href
#########################
#
# テンプレートVAppを作成補助
#現在未使用 テンプレートからマイクラウドを作成する場合に使用
#
#########################
def _instantiate_MyCloud(self, name, template, networks):
instantiate_xml = InstantiateVAppXML(
name=name,
template=template,
networks=networks
)
# Instantiate VM and get identifier.
res = self.requestLoop(
'%s/action/instantiateVAppTemplate' % get_url_path(self.vdc.id),
data=instantiate_xml.tostring(),
method='POST',
headers={'Content-Type': 'application/vnd.vmware.vcloud.instantiateVAppTemplateParams+xml'}
)
vapp_href = res.object.get('href')
task_href = res.object.find(fixxpath(res.object, "Tasks/Task")).get('href')
self._wait_for_task_completion(task_href)
return vapp_href
#########################
#
# ネットワークのチェック
#
#########################
def checkNetwork(self, vapp, instanceNw):
vappnetworks = self.describeVappNetwork(vapp)
#不足チェック
for net in instanceNw:
isExist = False
for vappnet in vappnetworks:
if net["NETWORK_NAME"] == vappnet.name:
isExist = True
break
#存在しない
if not isExist:
self._add_vapp_nw(vapp, net)
#########################
#
# ネットワーク追加(VAPP)
#
#########################
def _add_vapp_nw(self, vapp, newNetwork):
rasd_ns = "{http://www.vmware.com/vcloud/v1.5}"
res = self.requestLoop('%s/networkConfigSection' % get_url_path(vapp.id))
network_config = ET.SubElement(res.object, "%sNetworkConfig" % rasd_ns)
# Don't set a custom vApp VM network name
network_config.set('networkName', newNetwork["NETWORK_NAME"])
configuration = ET.SubElement(network_config, '%sConfiguration' % rasd_ns)
for vdcnet in self.networks:
if vdcnet.get('name') == newNetwork["NETWORK_NAME"]:
ET.SubElement(configuration, '%sParentNetwork' % rasd_ns, {'href': vdcnet.get('href')})
ET.SubElement(configuration, '%sFenceMode' % rasd_ns).text = "bridged"
self.logger.info(ET.tostring(res.object))
#変更を行った場合のみ通信する
res = self.requestLoop(
'%s/networkConfigSection' % get_url_path(vapp.id),
data=ET.tostring(res.object),
method='PUT',
headers={'Content-Type': 'application/vnd.vmware.vcloud.networkConfigSection+xml'}
)
self._wait_for_task_completion(res.object.get('href'))
############################################################
#
# vm 系
#
############################################################
#########################
#
# VMを作成
#
#########################
def createInstances(self, node, **kwargs):
image = kwargs['image']
vm_name = kwargs['vm_name']
vm_storage = kwargs['vm_storage']
vm_networks = kwargs.get('vm_networks')
vm_fqdn = kwargs.get('fqdn')
try:
self._add_vm(node, image, vm_name, vm_fqdn, vm_storage, vm_networks)
res = self.requestLoop(get_url_path(node.id))
#実行ログ
self.logger.info(None, "IPROCESS-100703", [vm_name,])
return self._to_node(res.object)
except Exception:
self.logger.error(traceback.format_exc())
raise IaasException("EPROCESS-000803", [vm_name,])
#########################
#
# VMを開始
#
#########################
def startInstance(self, node, vm):
try:
node = self.ex_deploy_vm(node, vm)
#実行ログ
self.logger.info(None, "IPROCESS-100705", [vm["name"],])
return node
except Exception:
self.logger.error(traceback.format_exc())
raise IaasException("EPROCESS-000805", [vm["name"],])
#########################
#
# VMを停止
#
#########################
def stopInstance(self, node, vm):
try:
node = self.ex_undeploy_vm(node, vm)
#実行ログ
self.logger.info(None, "IPROCESS-100706", [vm["name"],])
return node
except Exception:
self.logger.error(traceback.format_exc())
raise IaasException("EPROCESS-000806", [vm["name"],])
#########################
#
# VMを削除
#
#########################
def terminateInstance(self, node, vm_name):
try:
vm = self.describeInstance(node, vm_name)
vm_harf = vm["id"]
self._del_vm(node, vm_harf)
#実行ログ
self.logger.info(None, "IPROCESS-100704", [vm_name,])
except Exception:
self.logger.error(traceback.format_exc())
raise IaasException("EPROCESS-000804", [vm_name,])
#########################
#
# VMを編集
#
#########################
def editInstance(self, vm, **kwargs):
try:
node = self.ex_edit_vm(vm["id"], **kwargs)
#実行ログ
self.logger.info(None, "IPROCESS-100710", [vm["name"],])
return node
except Exception:
self.logger.error(traceback.format_exc())
raise IaasException("EPROCESS-000810", [vm["name"],])
#########################
#
# VMを作成補助
#
#########################
def _add_vm(self, node, image, vm_name, vm_fqdn, vm_storage, vm_networks):
add_xml = RecomposeVAppXML_ADD_VM(
name=node.name,
image=image,
vm_name=vm_name,
vm_storage = vm_storage,
vm_networks=vm_networks,
vm_fqdn=vm_fqdn
)
self.logger.info(add_xml.tostring())
# Instantiate VM and get identifier.
res = self.requestLoop(
'%s/action/recomposeVApp ' % get_url_path(node.id),
data=add_xml.tostring(),
method='POST',
headers={'Content-Type': 'application/vnd.vmware.vcloud.recomposeVAppParams+xml'}
)
task_href = res.object.get('href')
self._wait_for_task_completion(task_href)
#########################
#
# VMを削除補助
#
#########################
def _del_vm(self, node, vm_harf):
del_xml = RecomposeVAppXML_DEL_VM(
name=node.name,
vm_harf=vm_harf
)
self.logger.info(del_xml.tostring())
# Instantiate VM and get identifier.
res = self.requestLoop(
'%s/action/recomposeVApp ' % get_url_path(node.id),
data=del_xml.tostring(),
method='POST',
headers={'Content-Type': 'application/vnd.vmware.vcloud.recomposeVAppParams+xml'}
)
task_href = res.object.get('href')
self._wait_for_task_completion(task_href)
#########################
#
# VMを編集補助
# 現在はストレージプロファイルのみに対応
#
#########################
def ex_edit_vm(self, vm_harf, **kwargs):
storageProfile = kwargs.get('storageProfile')
res = self.requestLoop(get_url_path(vm_harf))
#ストレージの変更
if storageProfile is not None:
res.object.find(fixxpath(res.object, "StorageProfile")).set('name', storageProfile.name)
res.object.find(fixxpath(res.object, "StorageProfile")).set('href', storageProfile.href)
self.logger.info(ET.tostring(res.object))
ress = self.requestLoop(get_url_path(vm_harf),
data=ET.tostring(res.object),
method='PUT',
headers={'Content-Type': 'application/vnd.vmware.vcloud.vm+xml'}
)
self._wait_for_task_completion(ress.object.get('href'))
#########################
#
# VMへの命令制御
# 現在は利用していません
#
#########################
def _perform_power_operation_vm(self, node, vm, operation):
res = self.requestLoop(
'%s/power/action/%s' % (get_url_path(vm["id"]), operation), method='POST')
self._wait_for_task_completion(res.object.get('href'), self.connectionCls.timeout)
res = self.requestLoop(get_url_path(node.id))
return self._to_node(res.object)
#########################
#
# 開始命令制御
#
#########################
def ex_deploy_vm(self, node, vm):
deploy_xml = ET.Element('DeployVAppParams', {'powerOn': 'true', 'xmlns': 'http://www.vmware.com/vcloud/v1.5'})
res = self.requestLoop('%s/action/deploy' % get_url_path(vm["id"]),
data=ET.tostring(deploy_xml),
method='POST',
headers={'Content-Type': 'application/vnd.vmware.vcloud.deployVAppParams+xml'}
)
self._wait_for_task_completion(res.object.get('href'))
res = self.requestLoop(get_url_path(node.id))
return self._to_node(res.object)
#########################
#
# 停止命令制御
#
#########################
def ex_undeploy_vm(self, node, vm):
undeploy_xml = ET.Element('UndeployVAppParams', {'xmlns': 'http://www.vmware.com/vcloud/v1.5'})
undeploy_power_action_xml = ET.SubElement(undeploy_xml, 'UndeployPowerAction')
undeploy_power_action_xml.text = 'shutdown'
try:
res = self.requestLoop('%s/action/undeploy' % get_url_path(vm["id"]),
data=ET.tostring(undeploy_xml),
method='POST',
headers={'Content-Type': 'application/vnd.vmware.vcloud.undeployVAppParams+xml'}
)
self._wait_for_task_completion(res.object.get('href'))
except Exception:
undeploy_power_action_xml.text = 'powerOff'
res = self.requestLoop(
'%s/action/undeploy' % get_url_path(vm["id"]),
data=ET.tostring(undeploy_xml),
method='POST',
headers={'Content-Type': 'application/vnd.vmware.vcloud.undeployVAppParams+xml'}
)
self._wait_for_task_completion(res.object.get('href'))
res = self.requestLoop(get_url_path(node.id))
return self._to_node(res.object)
############################################################
#
# Volume 系
#
############################################################
#########################
#
# ディスクの追加
#
#########################
def attachVolume(self, vm, disk):
try:
self._validate_vm_disk_size(disk["SIZE"])
diskid = self._add_vm_disk(vm, disk)
#実行ログ
self.logger.info(None, "IPROCESS-100707", [vm["name"],])
return self.describeVolume(vm, diskid)
except Exception:
self.logger.error(traceback.format_exc())
raise IaasException("EPROCESS-000807", [vm["name"],])
#########################
#
# ディスクの編集
#
#########################
def editVolume(self, vm, disk):
try:
self._validate_vm_disk_size(disk["SIZE"])
diskid = self._edit_vm_disk(vm, disk)
#実行ログ
self.logger.info(None, "IPROCESS-100711", [vm["name"],])
return self.describeVolume(vm, diskid)
except Exception:
self.logger.error(traceback.format_exc())
raise IaasException("EPROCESS-000811", [vm["name"],])
#########################
#
# ディスクの削除
#
#########################
def detachVolume(self, vm, diskid):
try:
self._del_vm_disk(vm, diskid)
#実行ログ
self.logger.info(None, "IPROCESS-100708", [vm["name"],])
except Exception:
self.logger.error(traceback.format_exc())
raise IaasException("EPROCESS-000808", [vm["name"],])
#########################
#
# ディスクの追加制御
#
#########################
def _add_vm_disk(self, vm, vm_disk):
#ディスクが存在しない場合は何もしない
if vm_disk is None:
return
rasd_ns = '{http://schemas.dmtf.org/wbem/wscim/1/cim-schema/2/CIM_ResourceAllocationSettingData}'
# virtualHardwareSection/disks 取得
res = self.requestLoop('%s/virtualHardwareSection/disks' % get_url_path(vm["id"]))
existing_ids = []
new_disk = None
#既存ディスク情報のチェック
for item in res.object.findall(fixxpath(res.object, 'Item')):
for elem in item:
if elem.tag == '%sInstanceID' % rasd_ns:
existing_ids.append(int(elem.text))
if item.find('%sHostResource' % rasd_ns) is not None:
new_disk = item
#追加するディスク情報
new_disk = copy.deepcopy(new_disk)
for elem in new_disk:
#不要なパラメータを消す
if elem.tag in ['%sAddressOnParent' % rasd_ns, '%sParent' % rasd_ns]:
new_disk.remove(elem)
disk_id = max(existing_ids) + 1
diskName = 'Hard Disk ' + str(disk_id)
#new_disk.find('%sAddressOnParent' % rasd_ns).text = str(vm_disk["UNIT_NO"])
new_disk.find('%sInstanceID' % rasd_ns).text = str(disk_id)
new_disk.find('%sElementName' % rasd_ns).text = diskName
new_disk.find('%sHostResource' % rasd_ns).set(fixxpath(new_disk, 'capacity'), str(int(vm_disk["SIZE"]) * 1024))
res.object.append(new_disk)
self.logger.info(ET.tostring(res.object))
res = self.requestLoop(
'%s/virtualHardwareSection/disks' % get_url_path(vm["id"]),
data=ET.tostring(res.object),
method='PUT',
headers={'Content-Type': 'application/vnd.vmware.vcloud.rasditemslist+xml'}
)
self._wait_for_task_completion(res.object.get('href'))
return disk_id
#########################
#
# ディスクの編集制御
#
#########################
def _edit_vm_disk(self, vm, disk):
#ディスクが存在しない場合は何もしない
if disk is None:
return
rasd_ns = '{http://schemas.dmtf.org/wbem/wscim/1/cim-schema/2/CIM_ResourceAllocationSettingData}'
# virtualHardwareSection/disks 取得
res = self.requestLoop('%s/virtualHardwareSection/disks' % get_url_path(vm["id"]))
for item in res.object.findall(fixxpath(res.object, 'Item')):
if item.find('%sInstanceID' % rasd_ns) is not None:
if str(item.find('%sInstanceID' % rasd_ns).text) == str(disk["DISK_ID"]):
item.find('%sHostResource' % rasd_ns).set(fixxpath(item, 'capacity'), str(int(disk["SIZE"]) * 1024))
self.logger.info(ET.tostring(res.object))
res = self.requestLoop(
'%s/virtualHardwareSection/disks' % get_url_path(vm["id"]),
data=ET.tostring(res.object),
method='PUT',
headers={'Content-Type': 'application/vnd.vmware.vcloud.rasditemslist+xml'}
)
self._wait_for_task_completion(res.object.get('href'))
#########################
#
# ディスクの削除制御
#
#########################
def _del_vm_disk(self, vm, disk_id):
#ディスクIDが指定されていない場合は何もしない
if disk_id is None:
return
rasd_ns = '{http://schemas.dmtf.org/wbem/wscim/1/cim-schema/2/CIM_ResourceAllocationSettingData}'
# virtualHardwareSection/disks 取得
res = self.requestLoop('%s/virtualHardwareSection/disks' % get_url_path(vm["id"]))
for item in res.object.findall(fixxpath(res.object, 'Item')):
if item.find('%sHostResource' % rasd_ns) is not None:
name = item.find('%sInstanceID' % rasd_ns).text
if str(name) == str(disk_id):
res.object.remove(item)
self.logger.info(ET.tostring(res.object))
res = self.requestLoop(
'%s/virtualHardwareSection/disks' % get_url_path(vm["id"]),
data=ET.tostring(res.object),
method='PUT',
headers={'Content-Type': 'application/vnd.vmware.vcloud.rasditemslist+xml'}
)
self._wait_for_task_completion(res.object.get('href'))
############################################################
#
# Address 系
#
############################################################
#########################
#
# ネットワーク変更(VM)
#
#########################
def _change_vm_nw(self, vm_id, vm_nw):
rasd_ns = "{http://www.vmware.com/vcloud/v1.5}"
indexList = ["0","1","2","3","4","5","6","7","8","9","10",]
#リストを名称キーのマップへ変換
editmap = {}
makemap = {}
for nw in vm_nw:
if nw["NETWORK_INDEX"] is not None:
editmap[str(nw["NETWORK_INDEX"])] = nw
else:
makemap[str(nw["NETWORK_NO"])] = nw
isEdit = False
vms = self._get_vm_elements(vm_id)
for vm in vms:
res = self.requestLoop('%s/networkConnectionSection' % get_url_path(vm.get('href')))
def_primary_index = res.object.find(fixxpath(res.object, 'PrimaryNetworkConnectionIndex')).text
primary_index = def_primary_index
net_conns = res.object.findall(fixxpath(res.object, 'NetworkConnection'))
for item in net_conns:
name = item.get('network')
index = item.find(fixxpath(item, 'NetworkConnectionIndex')).text
#対象の設定を取得
if not editmap.has_key(index):
#取得できなければこのNWは削除
res.object.remove(item)
isEdit = True
else:
#利用済インデックスをリストから削除
indexList.remove(index)
#取得できれば設定を比較
newNw = editmap.pop(index)
#Primary チェック
if isBit(newNw["IS_PRIMARY"]):
primary_index = newNw["NETWORK_INDEX"]
#IPMODE
if newNw["IP_MODE"] != item.find(fixxpath(item, 'IpAddressAllocationMode')).text:
item.find(fixxpath(item, 'IpAddressAllocationMode')).text = newNw["IP_MODE"]
isEdit = True
if newNw["IP_MODE"] == "MANUAL":
#設定「MANUAL」の場合はアドレスも変更する
item.find(fixxpath(item, 'IpAddress')).text = newNw["IP_ADDRESS"]
else:
if newNw["IP_ADDRESS"] != item.find(fixxpath(item, 'IpAddress')).text:
#IPアドレスのみ変更する場合
item.find(fixxpath(item, 'IpAddress')).text = newNw["IP_ADDRESS"]
isEdit = True
#邪魔なのでLinkタグを消す
link = res.object.find(fixxpath(res.object, 'Link'))
res.object.remove(link)
#追加ネットワークの登録
if len(makemap) > 0:
for key in makemap:
newNw = makemap[key]
networkConnection = ET.Element('%sNetworkConnection' % rasd_ns,
{'network': newNw["NETWORK_NAME"]}
)
ET.SubElement(networkConnection, '%sNetworkConnectionIndex' % rasd_ns).text = str(indexList[0])
#Primary チェック
if isBit(newNw["IS_PRIMARY"]):
primary_index = indexList[0]
if isNotEmpty(newNw["IP_ADDRESS"]):
ET.SubElement(networkConnection, '%sIpAddress' % rasd_ns).text = newNw["IP_ADDRESS"]
ET.SubElement(networkConnection, '%sIsConnected' % rasd_ns).text = "true"
ET.SubElement(networkConnection, '%sIpAddressAllocationMode' % rasd_ns).text = newNw["IP_MODE"]
res.object.append(networkConnection)
isEdit = True
indexList.remove(indexList[0])
if str(def_primary_index) != str(primary_index):
#プライマリの再設定
res.object.find(fixxpath(res.object, 'PrimaryNetworkConnectionIndex')).text = str(primary_index)
isEdit = True
#取っ払ったLinkを戻す
res.object.append(link)
self.logger.info(ET.tostring(res.object))
#変更を行った場合のみ通信する
if isEdit:
self.logger.info(ET.tostring(res.object))
res = self.requestLoop(
'%s/networkConnectionSection' % get_url_path(vm.get('href')),
data=ET.tostring(res.object),
method='PUT',
headers={'Content-Type': 'application/vnd.vmware.vcloud.networkConnectionSection+xml'}
)
self._wait_for_task_completion(res.object.get('href'))
############################################################
#
# KeyPair 系
#
############################################################
#制御項目なし
############################################################
#
# Snapshot 系
#
############################################################
#制御項目なし
############################################################
#
# Tags 系
#
############################################################
#制御項目なし
############################################################
#
# その他
#
############################################################
#########################
#
# Windows用 パスワード取得
#
#########################
def getPasswordData(self, instanceId):
pass
#########################
#
# ユーザーデータ設定
#
#########################
def setProductSections(self, vm, metadatas):
add_xml = SetProductSectionListXML(
metadatas=metadatas
)
self.logger.info(add_xml.tostring())
try:
# Instantiate VM and get identifier.
res = self.requestLoop(
'%s/productSections' % get_url_path(vm["id"]),
data=add_xml.tostring(),
method='PUT',
headers={'Content-Type': 'application/vnd.vmware.vcloud.productSections+xml'}
)
task_href = res.object.get('href')
self._wait_for_task_completion(task_href)
#実行ログ
self.logger.info(None, "IPROCESS-100709", [vm["name"],])
except Exception:
self.logger.error(traceback.format_exc())
raise IaasException("EPROCESS-000809", [])
############################################################
#
# LoadBalancer 操作系
#
############################################################
#制御項目なし
############################################################
#
# LibCloud オーバーライド
#
############################################################
#########################
#
# リクエストの失敗をRETRY_MAXまでリトライする
#
#########################
def requestLoop(self, *args, **kwargs):
retry = 0
status = "not"
while status != 'go':
try:
res = self.connection.request(*args, **kwargs)
return res
except Exception:
if retry > self.RETRY_MAX:
raise
else:
retry = retry +1
time.sleep(5)
#########################
#
# 説明
# タスクタイムアウトを設定値に合せて変更出来るように修正
# NOT オーバーライド
#
#
#########################
def _wait_for_task_completion(self, task_href):
#VCloud_1_5_NodeDriver._wait_for_task_completion(self, task_href, timeout = self.timeout)
start_time = time.time()
res = self.connection.request(get_url_path(task_href))
status = res.object.get('status')
retry = 0
while status != 'success':
if status == 'error':
if retry > self.RETRY_MAX:
# Get error reason from the response body
error_elem = res.object.find(fixxpath(res.object, 'Error'))
error_msg = "Unknown error"
if error_elem is not None:
error_msg = error_elem.get('message')
raise Exception("Error status returned by task %s.: %s" % (task_href, error_msg))
else:
retry = retry +1
if status == 'canceled':
raise Exception("Canceled status returned by task %s." % task_href)
if (time.time() - start_time >= self.timeout):
raise Exception("Timeout (%s sec) while waiting for task %s." % (self.timeout, task_href))
time.sleep(5)
res = self.connection.request(get_url_path(task_href))
status = res.object.get('status')
#########################
#
# 説明
# IPアドレスのパブリック、プライベートの判断基準を追加
# ストレージプロファイルを保持するよう修正
#
#
#########################
def _to_node(self, node_elm):
publicNetworkName = getOtherProperty("vCloud.PCCNetwork")
# Parse VMs as extra field
vms = []
for vm_elem in node_elm.findall(fixxpath(node_elm, 'Children/Vm')):
public_ips = []
private_ips = []
other_ips = []
for connection in vm_elem.findall(fixxpath(vm_elem, 'NetworkConnectionSection/NetworkConnection')):
if connection.attrib['network'] != "none":
ip = connection.find(fixxpath(connection, "IpAddress"))
if connection.attrib['network'] == publicNetworkName:
public_ips.append(ip.text)
else:
if connection.attrib['network'] == self.defnet:
private_ips.append(ip.text)
else:
other_ips.append(ip.text)
#デフォルトネットワークが無い(private_ipが無い)場合
#その他の1つ目をprivate_ipへ設定
if len(private_ips) == 0 and len(other_ips) > 0:
private_ips.append(other_ips[0])
########################################
#プライベートIPの設定に関し別案
########################################
# primary_ips = []
# def_ips = []
# primary_index = vm_elem.object.find(fixxpath(vm_elem, 'NetworkConnectionSection/PrimaryNetworkConnectionIndex')).text
# for connection in vm_elem.findall(fixxpath(vm_elem, 'NetworkConnectionSection/NetworkConnection')):
# if connection.attrib['network'] != "none":
# ip = connection.find(fixxpath(connection, "IpAddress"))
# index = connection.find(fixxpath(connection, "NetworkConnectionIndex"))
# if connection.attrib['network'] == publicNetworkName:
# public_ips.append(ip.text)
# else:
# if index == primary_index:
# primary_ips.append(ip.text)
#
# if connection.attrib['network'] == self.defnet:
# def_ips.append(ip.text)
# else:
# other_ips.append(ip.text)
#
# #プライベートUPの設定順
# # 1:プライマリネットワーク(PCCネットワークの場合は除外)
# # 2:デフォルトネットワーク
# # 3:その他ネットワーク
# if len(primary_ips) != 0:
# private_ips.append(primary_ips[0])
# else:
# if len(def_ips) != 0:
# private_ips.append(other_ips[0])
# elif len(other_ips) > 0:
# private_ips.append(other_ips[0])
#VMへ設定
vm = {
'id': vm_elem.get('href'),
'name': vm_elem.get('name'),
'state': self.NODE_STATE_MAP[vm_elem.get('status')],
'public_ips': public_ips,
'private_ips': private_ips,
'storageprofile':vm_elem.find(fixxpath(vm_elem, 'StorageProfile')).get('name'),
}
vms.append(vm)
# Take the node IP addresses from all VMs
public_ips = []
private_ips = []
for vm in vms:
public_ips.extend(vm['public_ips'])
private_ips.extend(vm['private_ips'])
# Find vDC
vdc_id = next(link.get('href') for link in node_elm.findall(fixxpath(node_elm, 'Link'))
if link.get('type') == 'application/vnd.vmware.vcloud.vdc+xml')
vdc = next(vdc for vdc in self.vdcs if vdc.id == vdc_id)
# Find TASK
tasks = node_elm.findall(fixxpath(node_elm, 'Tasks/Task'))
isTask = False
if len(tasks) > 0:
isTask = True
node = Node(id=node_elm.get('href'),
name=node_elm.get('name'),
state=self.NODE_STATE_MAP[node_elm.get('status')],
public_ips=public_ips,
private_ips=private_ips,
driver=self.connection.driver,
extra={'vdc': vdc.name, 'vms': vms, 'task': isTask})
return node
| gpl-2.0 | -5,055,009,846,600,980,000 | 32.043018 | 140 | 0.496404 | false |
goddardl/cortex | test/IECore/CoordinateSystemTest.py | 12 | 5463 | ##########################################################################
#
# Copyright (c) 2008-2011, Image Engine Design Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# * Neither the name of Image Engine Design nor the names of any
# other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
import os
import unittest
import IECore
class CoordinateSystemTest( unittest.TestCase ) :
def test( self ) :
a = IECore.CoordinateSystem( "a" )
self.assertEqual( a.getName(), "a" )
a.setName( "b" )
self.assertEqual( a.getName(), "b" )
aa = a.copy()
self.assertEqual( a, aa )
IECore.ObjectWriter( a, "test/IECore/data/coordSys.cob" ).write()
aaa = IECore.ObjectReader( "test/IECore/data/coordSys.cob" ).read()
self.assertEqual( aaa, aa )
self.assertEqual( aaa.getName(), "b" )
def testHash( self ) :
a = IECore.CoordinateSystem( "a" )
b = IECore.CoordinateSystem( "a" )
self.assertEqual( a.hash(), b.hash() )
b.setName( "b" )
self.assertNotEqual( a.hash(), b.hash() )
b.setName( "a" )
self.assertEqual( a.hash(), b.hash() )
b.setTransform( IECore.MatrixTransform( IECore.M44f.createTranslated( IECore.V3f( 1 ) ) ) )
self.assertNotEqual( a.hash(), b.hash() )
def testTransform( self ) :
c = IECore.CoordinateSystem()
self.assertEqual( c.getTransform(), None )
c = IECore.CoordinateSystem( "test" )
self.assertEqual( c.getName(), "test" )
self.assertEqual( c.getTransform(), None )
self.assertEqual( c, c.copy() )
c = IECore.CoordinateSystem( "test", IECore.MatrixTransform( IECore.M44f() ) )
self.assertEqual( c.getName(), "test" )
self.assertEqual( c.getTransform(), IECore.MatrixTransform( IECore.M44f() ) )
self.assertEqual( c, c.copy() )
cc = c.copy()
self.assertEqual( cc.getTransform(), IECore.MatrixTransform( IECore.M44f() ) )
self.failIf( c.getTransform().isSame( cc.getTransform() ) )
c.setTransform( IECore.MatrixTransform( IECore.M44f.createTranslated( IECore.V3f( 1 ) ) ) )
self.assertEqual( c.getTransform(), IECore.MatrixTransform( IECore.M44f.createTranslated( IECore.V3f( 1 ) ) ) )
c.setTransform( None )
self.assertEqual( c.getTransform(), None )
cc = c.copy()
self.assertEqual( cc.getTransform(), None )
def testLoadCobFromBeforeTransforms( self ) :
c = IECore.ObjectReader( "test/IECore/data/cobFiles/coordinateSystemBeforeTransforms.cob" ).read()
self.assertEqual( c.getName(), "test" )
self.assertEqual( c.getTransform(), None )
def testLoadCobWithTransform( self ) :
c = IECore.CoordinateSystem( "test", IECore.MatrixTransform( IECore.M44f() ) )
IECore.ObjectWriter( c, "test/IECore/data/coordSys.cob" ).write()
c = IECore.ObjectReader( "test/IECore/data/coordSys.cob" ).read()
self.assertEqual( c.getTransform(), IECore.MatrixTransform( IECore.M44f() ) )
c = IECore.CoordinateSystem( "test", None )
IECore.ObjectWriter( c, "test/IECore/data/coordSys.cob" ).write()
c = IECore.ObjectReader( "test/IECore/data/coordSys.cob" ).read()
self.assertEqual( c.getTransform(), None )
def testEquality( self ) :
c1 = IECore.CoordinateSystem( "test" )
c2 = IECore.CoordinateSystem( "test" )
self.assertEqual( c1, c2 )
self.assertEqual( c2, c1 )
c1.setName( "test2" )
self.assertNotEqual( c1, c2 )
self.assertNotEqual( c2, c1 )
c1.setName( "test" )
c1.setTransform( IECore.MatrixTransform( IECore.M44f() ) )
self.assertNotEqual( c1, c2 )
self.assertNotEqual( c2, c1 )
c2.setTransform( IECore.MatrixTransform( IECore.M44f() ) )
self.assertEqual( c1, c2 )
self.assertEqual( c2, c1 )
def testMemoryUsage( self ) :
c = IECore.CoordinateSystem( "test" )
m = c.memoryUsage()
c.setTransform( IECore.MatrixTransform( IECore.M44f() ) )
self.failUnless( c.memoryUsage() > m )
def tearDown( self ) :
if os.path.exists( "test/IECore/data/coordSys.cob" ) :
os.remove( "test/IECore/data/coordSys.cob" )
if __name__ == "__main__":
unittest.main()
| bsd-3-clause | -598,794,855,232,643,600 | 33.796178 | 113 | 0.679846 | false |
jalilag/apspir | objedit/subindextable.py | 10 | 43958 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#This file is part of CanFestival, a library implementing CanOpen Stack.
#
#Copyright (C): Edouard TISSERANT, Francis DUPIN and Laurent BESSARD
#
#See COPYING file for copyrights details.
#
#This library is free software; you can redistribute it and/or
#modify it under the terms of the GNU Lesser General Public
#License as published by the Free Software Foundation; either
#version 2.1 of the License, or (at your option) any later version.
#
#This library is distributed in the hope that it will be useful,
#but WITHOUT ANY WARRANTY; without even the implied warranty of
#MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
#Lesser General Public License for more details.
#
#You should have received a copy of the GNU Lesser General Public
#License along with this library; if not, write to the Free Software
#Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
import wx
import wx.grid
from types import *
from commondialogs import *
from node import OD_Subindex, OD_MultipleSubindexes, OD_IdenticalSubindexes, OD_IdenticalIndexes
ColSizes = [75, 250, 150, 125, 100, 60, 250]
ColAlignements = [wx.ALIGN_CENTER, wx.ALIGN_LEFT, wx.ALIGN_CENTER, wx.ALIGN_RIGHT, wx.ALIGN_CENTER, wx.ALIGN_CENTER, wx.ALIGN_LEFT]
def GetAccessList(write=True):
_ = lambda x : x
if write:
return [_("Read Only"), _("Write Only"), _("Read/Write")]
return [_("Read Only"), _("Read/Write")]
AccessList = ",".join(map(_, GetAccessList()))
RAccessList = ",".join(map(_, GetAccessList(False)))
ACCESS_LIST_DICT = dict([(_(access), access) for access in GetAccessList()])
def GetBoolList():
_ = lambda x : x
return [_("True"), _("False")]
BoolList = ",".join(map(_, GetBoolList()))
BOOL_LIST_DICT = dict([(_(bool), bool) for bool in GetBoolList()])
def GetOptionList():
_ = lambda x : x
return [_("Yes"), _("No")]
OptionList = ",".join(map(_, GetOptionList()))
OPTION_LIST_DICT = dict([(_(option), option) for option in GetOptionList()])
[USER_TYPE, SDO_SERVER, SDO_CLIENT,
PDO_TRANSMIT, PDO_RECEIVE, MAP_VARIABLE] = range(6)
INDEXCHOICE_OPTIONS = {
USER_TYPE: (_("User Type"), 0, "AddUserType"),
SDO_SERVER: (_("SDO Server"), 1, "AddSDOServerToCurrent"),
SDO_CLIENT: (_("SDO Client"), 1, "AddSDOClientToCurrent"),
PDO_RECEIVE: (_("PDO Receive"), 1, "AddPDOReceiveToCurrent"),
PDO_TRANSMIT: (_("PDO Transmit"), 1, "AddPDOTransmitToCurrent"),
MAP_VARIABLE: (_("Map Variable"), 0, "AddMapVariable")
}
INDEXCHOICE_OPTIONS_DICT = dict([(translation, option) for option, (translation, object, function) in INDEXCHOICE_OPTIONS.iteritems()])
INDEXCHOICE_SECTIONS = {
0 : [USER_TYPE],
2 : [SDO_SERVER, SDO_CLIENT],
3 : [PDO_RECEIVE],
4 : [PDO_RECEIVE],
5 : [PDO_TRANSMIT],
6 : [PDO_TRANSMIT],
8 : [MAP_VARIABLE],
}
def GetSubindexTableColnames():
_ = lambda x : x
return [_("subindex"), _("name"), _("type"), _("value"), _("access"), _("save"), _("comment")]
DictionaryOrganisation = [
{"minIndex" : 0x0001, "maxIndex" : 0x0FFF, "name" : "Data Type Definitions"},
{"minIndex" : 0x1000, "maxIndex" : 0x1029, "name" : "Communication Parameters"},
{"minIndex" : 0x1200, "maxIndex" : 0x12FF, "name" : "SDO Parameters"},
{"minIndex" : 0x1400, "maxIndex" : 0x15FF, "name" : "Receive PDO Parameters"},
{"minIndex" : 0x1600, "maxIndex" : 0x17FF, "name" : "Receive PDO Mapping"},
{"minIndex" : 0x1800, "maxIndex" : 0x19FF, "name" : "Transmit PDO Parameters"},
{"minIndex" : 0x1A00, "maxIndex" : 0x1BFF, "name" : "Transmit PDO Mapping"},
{"minIndex" : 0x1C00, "maxIndex" : 0x1FFF, "name" : "Other Communication Parameters"},
{"minIndex" : 0x2000, "maxIndex" : 0x5FFF, "name" : "Manufacturer Specific"},
{"minIndex" : 0x6000, "maxIndex" : 0x9FFF, "name" : "Standardized Device Profile"},
{"minIndex" : 0xA000, "maxIndex" : 0xBFFF, "name" : "Standardized Interface Profile"}]
IECTypeConversion = {
"BOOLEAN": "BOOL",
"INTEGER8": "SINT",
"INTEGER16": "INT",
"INTEGER32": "DINT",
"UNSIGNED8": "USINT",
"UNSIGNED16": "UINT",
"UNSIGNED32": "UDINT",
"REAL32": "REAL",
"VISIBLE_STRING": "STRING",
"OCTET_STRING": "STRING",
"UNICODE_STRING": "WSTRING",
"DOMAIN": "STRING",
"INTEGER24": "DINT",
"REAL64": "LREAL",
"INTEGER40": "LINT",
"INTEGER48": "LINT",
"INTEGER56": "LINT",
"INTEGER64": "LINT",
"UNSIGNED24": "UDINT",
"UNSIGNED40": "ULINT",
"UNSIGNED48": "ULINT",
"UNSIGNED56": "ULINT",
"UNSIGNED64": "ULINT",
}
SizeConversion = {1 : "X", 8 : "B", 16 : "W", 24 : "D", 32 : "D", 40 : "L", 48 : "L", 56 : "L", 64 : "L"}
class SubindexTable(wx.grid.PyGridTableBase):
"""
A custom wxGrid Table using user supplied data
"""
def __init__(self, parent, data, editors, colnames):
# The base class must be initialized *first*
wx.grid.PyGridTableBase.__init__(self)
self.data = data
self.editors = editors
self.CurrentIndex = 0
self.colnames = colnames
self.Parent = parent
self.Editable = True
# XXX
# we need to store the row length and collength to
# see if the table has changed size
self._rows = self.GetNumberRows()
self._cols = self.GetNumberCols()
def Disable(self):
self.Editable = False
def Enable(self):
self.Editable = True
def GetNumberCols(self):
return len(self.colnames)
def GetNumberRows(self):
return len(self.data)
def GetColLabelValue(self, col, translate=True):
if col < len(self.colnames):
if translate:
return _(self.colnames[col])
return self.colnames[col]
def GetRowLabelValues(self, row, translate=True):
return row
def GetValue(self, row, col, translate=True):
if row < self.GetNumberRows():
colname = self.GetColLabelValue(col, False)
value = unicode(self.data[row].get(colname, ""))
if translate and (colname == "access" or
self.editors[row][colname] in ["bool", "option"] or
self.editors[row][colname] == "map" and value == "None"):
value = _(value)
return value
def GetEditor(self, row, col):
if row < self.GetNumberRows():
return self.editors[row].get(self.GetColLabelValue(col, False), "")
def GetValueByName(self, row, colname):
return self.data[row].get(colname)
def SetValue(self, row, col, value):
if col < len(self.colnames):
colname = self.GetColLabelValue(col, False)
if colname == "access":
value = ACCESS_LIST_DICT[value]
elif self.editors[row][colname] == "bool":
value = BOOL_LIST_DICT[value]
elif self.editors[row][colname] == "option":
value = OPTION_LIST_DICT[value]
elif self.editors[row][colname] == "map" and value == _("None"):
value = "None"
self.data[row][colname] = value
def ResetView(self, grid):
"""
(wx.grid.Grid) -> Reset the grid view. Call this to
update the grid if rows and columns have been added or deleted
"""
grid.BeginBatch()
for current, new, delmsg, addmsg in [
(self._rows, self.GetNumberRows(), wx.grid.GRIDTABLE_NOTIFY_ROWS_DELETED, wx.grid.GRIDTABLE_NOTIFY_ROWS_APPENDED),
(self._cols, self.GetNumberCols(), wx.grid.GRIDTABLE_NOTIFY_COLS_DELETED, wx.grid.GRIDTABLE_NOTIFY_COLS_APPENDED),
]:
if new < current:
msg = wx.grid.GridTableMessage(self,delmsg,new,current-new)
grid.ProcessTableMessage(msg)
elif new > current:
msg = wx.grid.GridTableMessage(self,addmsg,new-current)
grid.ProcessTableMessage(msg)
self.UpdateValues(grid)
grid.EndBatch()
self._rows = self.GetNumberRows()
self._cols = self.GetNumberCols()
# update the column rendering scheme
self._updateColAttrs(grid)
# update the scrollbars and the displayed part of the grid
grid.AdjustScrollbars()
grid.ForceRefresh()
def UpdateValues(self, grid):
"""Update all displayed values"""
# This sends an event to the grid table to update all of the values
msg = wx.grid.GridTableMessage(self, wx.grid.GRIDTABLE_REQUEST_VIEW_GET_VALUES)
grid.ProcessTableMessage(msg)
def _updateColAttrs(self, grid):
"""
wx.grid.Grid -> update the column attributes to add the
appropriate renderer given the column name.
Otherwise default to the default renderer.
"""
for col in range(self.GetNumberCols()):
attr = wx.grid.GridCellAttr()
attr.SetAlignment(ColAlignements[col], wx.ALIGN_CENTRE)
grid.SetColAttr(col, attr)
grid.SetColMinimalWidth(col, ColSizes[col])
grid.AutoSizeColumn(col, False)
typelist = None
maplist = None
for row in range(self.GetNumberRows()):
editors = self.editors[row]
if wx.Platform == '__WXMSW__':
grid.SetRowMinimalHeight(row, 20)
else:
grid.SetRowMinimalHeight(row, 28)
grid.AutoSizeRow(row, False)
for col in range(self.GetNumberCols()):
editor = None
renderer = None
colname = self.GetColLabelValue(col, False)
editortype = editors[colname]
if editortype == "dcf":
editor = wx.grid.GridCellTextEditor()
renderer = wx.grid.GridCellStringRenderer()
elif editortype and self.Editable:
grid.SetReadOnly(row, col, False)
if editortype == "string":
editor = wx.grid.GridCellTextEditor()
renderer = wx.grid.GridCellStringRenderer()
if colname == "value" and "length" in editors:
editor.SetParameters(editors["length"])
elif editortype == "number":
editor = wx.grid.GridCellNumberEditor()
renderer = wx.grid.GridCellNumberRenderer()
if colname == "value" and "min" in editors and "max" in editors:
editor.SetParameters("%s,%s"%(editors["min"],editors["max"]))
elif editortype == "float":
editor = wx.grid.GridCellTextEditor()
renderer = wx.grid.GridCellStringRenderer()
elif editortype == "bool":
editor = wx.grid.GridCellChoiceEditor()
editor.SetParameters(BoolList)
elif editortype == "access":
editor = wx.grid.GridCellChoiceEditor()
editor.SetParameters(AccessList)
elif editortype == "raccess":
editor = wx.grid.GridCellChoiceEditor()
editor.SetParameters(RAccessList)
elif editortype == "option":
editor = wx.grid.GridCellChoiceEditor()
editor.SetParameters(OptionList)
elif editortype == "type":
editor = wx.grid.GridCellChoiceEditor()
if typelist == None:
typelist = self.Parent.Manager.GetCurrentTypeList()
editor.SetParameters(typelist)
elif editortype == "map":
editor = wx.grid.GridCellChoiceEditor()
if maplist == None:
maplist = self.Parent.Manager.GetCurrentMapList()
editor.SetParameters(maplist)
elif editortype == "time":
editor = wx.grid.GridCellTextEditor()
renderer = wx.grid.GridCellStringRenderer()
elif editortype == "domain":
editor = wx.grid.GridCellTextEditor()
renderer = wx.grid.GridCellStringRenderer()
else:
grid.SetReadOnly(row, col, True)
grid.SetCellEditor(row, col, editor)
grid.SetCellRenderer(row, col, renderer)
grid.SetCellBackgroundColour(row, col, wx.WHITE)
def SetData(self, data):
self.data = data
def SetEditors(self, editors):
self.editors = editors
def GetCurrentIndex(self):
return self.CurrentIndex
def SetCurrentIndex(self, index):
self.CurrentIndex = index
def AppendRow(self, row_content):
self.data.append(row_content)
def Empty(self):
self.data = []
self.editors = []
[ID_EDITINGPANEL, ID_EDITINGPANELADDBUTTON, ID_EDITINGPANELINDEXCHOICE,
ID_EDITINGPANELINDEXLIST, ID_EDITINGPANELINDEXLISTPANEL, ID_EDITINGPANELPARTLIST,
ID_EDITINGPANELSECONDSPLITTER, ID_EDITINGPANELSUBINDEXGRID,
ID_EDITINGPANELSUBINDEXGRIDPANEL, ID_EDITINGPANELCALLBACKCHECK,
] = [wx.NewId() for _init_ctrls in range(10)]
[ID_EDITINGPANELINDEXLISTMENUITEMS0, ID_EDITINGPANELINDEXLISTMENUITEMS1,
ID_EDITINGPANELINDEXLISTMENUITEMS2,
] = [wx.NewId() for _init_coll_IndexListMenu_Items in range(3)]
[ID_EDITINGPANELMENU1ITEMS0, ID_EDITINGPANELMENU1ITEMS1,
ID_EDITINGPANELMENU1ITEMS3, ID_EDITINGPANELMENU1ITEMS4,
] = [wx.NewId() for _init_coll_SubindexGridMenu_Items in range(4)]
class EditingPanel(wx.SplitterWindow):
def _init_coll_AddToListSizer_Items(self, parent):
parent.AddWindow(self.AddButton, 0, border=0, flag=0)
parent.AddWindow(self.IndexChoice, 0, border=0, flag=wx.GROW)
def _init_coll_SubindexGridSizer_Items(self, parent):
parent.AddWindow(self.CallbackCheck, 0, border=0, flag=0)
parent.AddWindow(self.SubindexGrid, 0, border=0, flag=wx.GROW)
def _init_coll_IndexListSizer_Items(self, parent):
parent.AddWindow(self.IndexList, 0, border=0, flag=wx.GROW)
parent.AddSizer(self.AddToListSizer, 0, border=0, flag=wx.GROW)
def _init_coll_AddToListSizer_Growables(self, parent):
parent.AddGrowableCol(1)
def _init_coll_SubindexGridSizer_Growables(self, parent):
parent.AddGrowableCol(0)
parent.AddGrowableRow(1)
def _init_coll_IndexListSizer_Growables(self, parent):
parent.AddGrowableCol(0)
parent.AddGrowableRow(0)
def _init_coll_SubindexGridMenu_Items(self, parent):
parent.Append(help='', id=ID_EDITINGPANELMENU1ITEMS0,
kind=wx.ITEM_NORMAL, text=_('Add subindexes'))
parent.Append(help='', id=ID_EDITINGPANELMENU1ITEMS1,
kind=wx.ITEM_NORMAL, text=_('Delete subindexes'))
parent.AppendSeparator()
parent.Append(help='', id=ID_EDITINGPANELMENU1ITEMS3,
kind=wx.ITEM_NORMAL, text=_('Default value'))
if not self.Editable:
parent.Append(help='', id=ID_EDITINGPANELMENU1ITEMS4,
kind=wx.ITEM_NORMAL, text=_('Add to DCF'))
self.Bind(wx.EVT_MENU, self.OnAddSubindexMenu,
id=ID_EDITINGPANELMENU1ITEMS0)
self.Bind(wx.EVT_MENU, self.OnDeleteSubindexMenu,
id=ID_EDITINGPANELMENU1ITEMS1)
self.Bind(wx.EVT_MENU, self.OnDefaultValueSubindexMenu,
id=ID_EDITINGPANELMENU1ITEMS3)
if not self.Editable:
self.Bind(wx.EVT_MENU, self.OnAddToDCFSubindexMenu,
id=ID_EDITINGPANELMENU1ITEMS4)
def _init_coll_IndexListMenu_Items(self, parent):
parent.Append(help='', id=ID_EDITINGPANELINDEXLISTMENUITEMS0,
kind=wx.ITEM_NORMAL, text=_('Rename'))
parent.Append(help='', id=ID_EDITINGPANELINDEXLISTMENUITEMS2,
kind=wx.ITEM_NORMAL, text=_('Modify'))
parent.Append(help='', id=ID_EDITINGPANELINDEXLISTMENUITEMS1,
kind=wx.ITEM_NORMAL, text=_('Delete'))
self.Bind(wx.EVT_MENU, self.OnRenameIndexMenu,
id=ID_EDITINGPANELINDEXLISTMENUITEMS0)
self.Bind(wx.EVT_MENU, self.OnDeleteIndexMenu,
id=ID_EDITINGPANELINDEXLISTMENUITEMS1)
self.Bind(wx.EVT_MENU, self.OnModifyIndexMenu,
id=ID_EDITINGPANELINDEXLISTMENUITEMS2)
def _init_utils(self):
self.IndexListMenu = wx.Menu(title='')
self.SubindexGridMenu = wx.Menu(title='')
self._init_coll_IndexListMenu_Items(self.IndexListMenu)
self._init_coll_SubindexGridMenu_Items(self.SubindexGridMenu)
def _init_sizers(self):
self.IndexListSizer = wx.FlexGridSizer(cols=1, hgap=0, rows=2, vgap=0)
self.SubindexGridSizer = wx.FlexGridSizer(cols=1, hgap=0, rows=2, vgap=0)
self.AddToListSizer = wx.FlexGridSizer(cols=2, hgap=0, rows=1, vgap=0)
self._init_coll_IndexListSizer_Growables(self.IndexListSizer)
self._init_coll_IndexListSizer_Items(self.IndexListSizer)
self._init_coll_SubindexGridSizer_Growables(self.SubindexGridSizer)
self._init_coll_SubindexGridSizer_Items(self.SubindexGridSizer)
self._init_coll_AddToListSizer_Growables(self.AddToListSizer)
self._init_coll_AddToListSizer_Items(self.AddToListSizer)
self.SubindexGridPanel.SetSizer(self.SubindexGridSizer)
self.IndexListPanel.SetSizer(self.IndexListSizer)
def _init_ctrls(self, prnt):
wx.SplitterWindow.__init__(self, id=ID_EDITINGPANEL,
name='MainSplitter', parent=prnt, point=wx.Point(0, 0),
size=wx.Size(-1, -1), style=wx.SP_3D)
self._init_utils()
self.PartList = wx.ListBox(choices=[], id=ID_EDITINGPANELPARTLIST,
name='PartList', parent=self, pos=wx.Point(0, 0),
size=wx.Size(-1, -1), style=0)
self.PartList.Bind(wx.EVT_LISTBOX, self.OnPartListBoxClick,
id=ID_EDITINGPANELPARTLIST)
self.SecondSplitter = wx.SplitterWindow(id=ID_EDITINGPANELSECONDSPLITTER,
name='SecondSplitter', parent=self, point=wx.Point(0, 0),
size=wx.Size(-1, -1), style=wx.SP_3D)
self.SplitHorizontally(self.PartList, self.SecondSplitter, 110)
self.SetMinimumPaneSize(1)
self.SubindexGridPanel = wx.Panel(id=ID_EDITINGPANELSUBINDEXGRIDPANEL,
name='SubindexGridPanel', parent=self.SecondSplitter,
pos=wx.Point(0, 0), size=wx.Size(-1, -1), style=wx.TAB_TRAVERSAL)
self.IndexListPanel = wx.Panel(id=ID_EDITINGPANELINDEXLISTPANEL,
name='IndexListPanel', parent=self.SecondSplitter,
pos=wx.Point(0, 0), size=wx.Size(-1, -1), style=wx.TAB_TRAVERSAL)
self.SecondSplitter.SplitVertically(self.IndexListPanel, self.SubindexGridPanel, 280)
self.SecondSplitter.SetMinimumPaneSize(1)
self.SubindexGrid = wx.grid.Grid(id=ID_EDITINGPANELSUBINDEXGRID,
name='SubindexGrid', parent=self.SubindexGridPanel, pos=wx.Point(0,
0), size=wx.Size(-1, -1), style=0)
self.SubindexGrid.SetFont(wx.Font(12, wx.SWISS, wx.NORMAL, wx.NORMAL, False,
'Sans'))
self.SubindexGrid.SetLabelFont(wx.Font(10, wx.SWISS, wx.NORMAL, wx.NORMAL,
False, 'Sans'))
self.SubindexGrid.Bind(wx.grid.EVT_GRID_CELL_CHANGE,
self.OnSubindexGridCellChange)
self.SubindexGrid.Bind(wx.grid.EVT_GRID_CELL_RIGHT_CLICK,
self.OnSubindexGridRightClick)
self.SubindexGrid.Bind(wx.grid.EVT_GRID_SELECT_CELL,
self.OnSubindexGridSelectCell)
self.SubindexGrid.Bind(wx.grid.EVT_GRID_CELL_LEFT_CLICK,
self.OnSubindexGridCellLeftClick)
self.SubindexGrid.Bind(wx.grid.EVT_GRID_EDITOR_SHOWN,
self.OnSubindexGridEditorShown)
self.CallbackCheck = wx.CheckBox(id=ID_EDITINGPANELCALLBACKCHECK,
label=_('Have Callbacks'), name='CallbackCheck',
parent=self.SubindexGridPanel, pos=wx.Point(0, 0), size=wx.Size(152,
24), style=0)
self.CallbackCheck.Bind(wx.EVT_CHECKBOX, self.OnCallbackCheck,
id=ID_EDITINGPANELCALLBACKCHECK)
self.IndexList = wx.ListBox(choices=[], id=ID_EDITINGPANELINDEXLIST,
name='IndexList', parent=self.IndexListPanel, pos=wx.Point(0, 0),
size=wx.Size(-1, -1), style=0)
self.IndexList.Bind(wx.EVT_LISTBOX, self.OnIndexListClick,
id=ID_EDITINGPANELINDEXLIST)
self.IndexList.Bind(wx.EVT_RIGHT_UP, self.OnIndexListRightUp)
self.AddButton = wx.Button(id=ID_EDITINGPANELADDBUTTON, label=_('Add'),
name='AddButton', parent=self.IndexListPanel, pos=wx.Point(0, 0),
size=wx.DefaultSize, style=0)
self.AddButton.Bind(wx.EVT_BUTTON, self.OnAddButtonClick,
id=ID_EDITINGPANELADDBUTTON)
self.IndexChoice = wx.ComboBox(choices=[], id=ID_EDITINGPANELINDEXCHOICE,
name='IndexChoice', parent=self.IndexListPanel, pos=wx.Point(50,
0), size=wx.Size(-1, 30), style=wx.CB_READONLY)
self._init_sizers()
def __init__(self, parent, window, manager, editable = True):
self.Editable = editable
self._init_ctrls(parent)
self.ParentWindow = window
self.Manager = manager
self.ListIndex = []
self.ChoiceIndex = []
self.FirstCall = False
self.Index = None
for values in DictionaryOrganisation:
text = " 0x%04X-0x%04X %s"%(values["minIndex"], values["maxIndex"], values["name"])
self.PartList.Append(text)
self.Table = SubindexTable(self, [], [], GetSubindexTableColnames())
self.SubindexGrid.SetTable(self.Table)
self.SubindexGrid.SetRowLabelSize(0)
self.CallbackCheck.Disable()
self.Table.ResetView(self.SubindexGrid)
if not self.Editable:
self.AddButton.Disable()
self.IndexChoice.Disable()
self.CallbackCheck.Disable()
self.Table.Disable()
wx.CallAfter(self.SetSashPosition, 110)
wx.CallAfter(self.SecondSplitter.SetSashPosition, 280)
def GetIndex(self):
return self.Index
def SetIndex(self, index):
self.Index = index
def GetSelection(self):
selected = self.IndexList.GetSelection()
if selected != wx.NOT_FOUND:
index = self.ListIndex[selected]
subIndex = self.SubindexGrid.GetGridCursorRow()
return index, subIndex
return None
def OnSubindexGridCellLeftClick(self, event):
if not self.ParentWindow.ModeSolo:
col = event.GetCol()
if self.Editable and col == 0:
selected = self.IndexList.GetSelection()
if selected != wx.NOT_FOUND:
index = self.ListIndex[selected]
subindex = event.GetRow()
entry_infos = self.Manager.GetEntryInfos(index)
if not entry_infos["struct"] & OD_MultipleSubindexes or subindex != 0:
subentry_infos = self.Manager.GetSubentryInfos(index, subindex)
typeinfos = self.Manager.GetEntryInfos(subentry_infos["type"])
if typeinfos:
bus_id = '.'.join(map(str, self.ParentWindow.GetBusId()))
var_name = "%s_%04x_%02x" % (self.Manager.GetCurrentNodeName(), index, subindex)
size = typeinfos["size"]
data = wx.TextDataObject(str(
("%s%s.%d.%d"%(SizeConversion[size], bus_id, index, subindex),
"location",
IECTypeConversion.get(typeinfos["name"]),
var_name, "")))
dragSource = wx.DropSource(self.SubindexGrid)
dragSource.SetData(data)
dragSource.DoDragDrop()
return
elif col == 0:
selected = self.IndexList.GetSelection()
node_id = self.ParentWindow.GetCurrentNodeId()
if selected != wx.NOT_FOUND and node_id is not None:
index = self.ListIndex[selected]
subindex = event.GetRow()
entry_infos = self.Manager.GetEntryInfos(index)
if not entry_infos["struct"] & OD_MultipleSubindexes or subindex != 0:
subentry_infos = self.Manager.GetSubentryInfos(index, subindex)
typeinfos = self.Manager.GetEntryInfos(subentry_infos["type"])
if subentry_infos["pdo"] and typeinfos:
bus_id = '.'.join(map(str, self.ParentWindow.GetBusId()))
var_name = "%s_%04x_%02x" % (self.Manager.GetSlaveName(node_id), index, subindex)
size = typeinfos["size"]
data = wx.TextDataObject(str(
("%s%s.%d.%d.%d"%(SizeConversion[size], bus_id, node_id, index, subindex),
"location",
IECTypeConversion.get(typeinfos["name"]),
var_name, "")))
dragSource = wx.DropSource(self.SubindexGrid)
dragSource.SetData(data)
dragSource.DoDragDrop()
return
event.Skip()
def OnAddButtonClick(self, event):
if self.Editable:
self.SubindexGrid.SetGridCursor(0, 0)
selected = self.IndexChoice.GetStringSelection()
if selected != "":
choice = INDEXCHOICE_OPTIONS_DICT.get(selected, None)
if choice is not None:
if INDEXCHOICE_OPTIONS[choice][1] == 0:
getattr(self.ParentWindow, INDEXCHOICE_OPTIONS[choice][2])()
elif INDEXCHOICE_OPTIONS[choice][1] == 1:
getattr(self.Manager, INDEXCHOICE_OPTIONS[choice][2])()
elif selected in [menu for menu, indexes in self.Manager.GetCurrentSpecificMenu()]:
self.Manager.AddSpecificEntryToCurrent(selected)
else:
index = self.ChoiceIndex[self.IndexChoice.GetSelection()]
self.Manager.ManageEntriesOfCurrent([index], [])
self.ParentWindow.RefreshBufferState()
self.RefreshIndexList()
event.Skip()
def OnPartListBoxClick(self, event):
if not self.ParentWindow.IsClosing():
self.SubindexGrid.SetGridCursor(0, 0)
self.RefreshIndexList()
event.Skip()
def OnIndexListClick(self, event):
if not self.ParentWindow.IsClosing():
self.SubindexGrid.SetGridCursor(0, 0)
self.RefreshTable()
event.Skip()
def OnSubindexGridSelectCell(self, event):
if not self.ParentWindow.IsClosing():
wx.CallAfter(self.ParentWindow.RefreshStatusBar)
event.Skip()
#-------------------------------------------------------------------------------
# Refresh Functions
#-------------------------------------------------------------------------------
def RefreshIndexList(self):
selected = self.IndexList.GetSelection()
choice = self.IndexChoice.GetStringSelection()
choiceindex = self.IndexChoice.GetSelection()
if selected != wx.NOT_FOUND:
selectedindex = self.ListIndex[selected]
self.IndexList.Clear()
self.IndexChoice.Clear()
i = self.PartList.GetSelection()
if i < len(DictionaryOrganisation):
values = DictionaryOrganisation[i]
self.ListIndex = []
for name, index in self.Manager.GetCurrentValidIndexes(values["minIndex"], values["maxIndex"]):
self.IndexList.Append("0x%04X %s"%(index, name))
self.ListIndex.append(index)
if self.Editable:
self.ChoiceIndex = []
choices = INDEXCHOICE_SECTIONS.get(i, None)
if choices is not None:
for c in choices:
self.IndexChoice.Append(INDEXCHOICE_OPTIONS[c][0])
if len(choices) > 1:
if choiceindex != wx.NOT_FOUND and choice == self.IndexChoice.GetString(choiceindex):
self.IndexChoice.SetStringSelection(choice)
else:
self.IndexChoice.SetSelection(0)
else:
for name, index in self.Manager.GetCurrentValidChoices(values["minIndex"], values["maxIndex"]):
if index:
self.IndexChoice.Append("0x%04X %s"%(index, name))
else:
self.IndexChoice.Append(name)
self.ChoiceIndex.append(index)
if choiceindex != wx.NOT_FOUND and choiceindex < self.IndexChoice.GetCount() and choice == self.IndexChoice.GetString(choiceindex):
self.IndexChoice.SetStringSelection(choice)
if self.Editable:
self.IndexChoice.Enable(self.IndexChoice.GetCount() != 0)
self.AddButton.Enable(self.IndexChoice.GetCount() != 0)
if selected == wx.NOT_FOUND or selected >= len(self.ListIndex) or selectedindex != self.ListIndex[selected]:
self.Table.Empty()
self.CallbackCheck.SetValue(False)
self.CallbackCheck.Disable()
self.Table.ResetView(self.SubindexGrid)
self.ParentWindow.RefreshStatusBar()
else:
self.IndexList.SetSelection(selected)
self.RefreshTable()
def RefreshTable(self):
selected = self.IndexList.GetSelection()
if selected != wx.NOT_FOUND:
index = self.ListIndex[selected]
if index > 0x260 and self.Editable:
self.CallbackCheck.Enable()
self.CallbackCheck.SetValue(self.Manager.HasCurrentEntryCallbacks(index))
result = self.Manager.GetCurrentEntryValues(index)
if result != None:
self.Table.SetCurrentIndex(index)
data, editors = result
self.Table.SetData(data)
self.Table.SetEditors(editors)
self.Table.ResetView(self.SubindexGrid)
self.ParentWindow.RefreshStatusBar()
#-------------------------------------------------------------------------------
# Editing Table value function
#-------------------------------------------------------------------------------
def OnSubindexGridEditorShown(self, event):
row, col = event.GetRow(), event.GetCol()
if self.Table.GetEditor(row, col) == "dcf":
wx.CallAfter(self.ShowDCFEntryDialog, row, col)
event.Veto()
else:
event.Skip()
def ShowDCFEntryDialog(self, row, col):
if self.Editable or self.ParentWindow.GetCurrentNodeId() is None:
selected = self.IndexList.GetSelection()
if selected != wx.NOT_FOUND:
index = self.ListIndex[selected]
if self.Manager.IsCurrentEntry(index):
dialog = DCFEntryValuesDialog(self, self.Editable)
dialog.SetValues(self.Table.GetValue(row, col).decode("hex_codec"))
if dialog.ShowModal() == wx.ID_OK and self.Editable:
value = dialog.GetValues()
self.Manager.SetCurrentEntry(index, row, value, "value", "dcf")
self.ParentWindow.RefreshBufferState()
wx.CallAfter(self.RefreshTable)
def OnSubindexGridCellChange(self, event):
if self.Editable:
index = self.Table.GetCurrentIndex()
subIndex = event.GetRow()
col = event.GetCol()
name = self.Table.GetColLabelValue(col, False)
value = self.Table.GetValue(subIndex, col, False)
editor = self.Table.GetEditor(subIndex, col)
self.Manager.SetCurrentEntry(index, subIndex, value, name, editor)
self.ParentWindow.RefreshBufferState()
wx.CallAfter(self.RefreshTable)
event.Skip()
def OnCallbackCheck(self, event):
if self.Editable:
index = self.Table.GetCurrentIndex()
self.Manager.SetCurrentEntryCallbacks(index, self.CallbackCheck.GetValue())
self.ParentWindow.RefreshBufferState()
wx.CallAfter(self.RefreshTable)
event.Skip()
#-------------------------------------------------------------------------------
# Contextual Menu functions
#-------------------------------------------------------------------------------
def OnIndexListRightUp(self, event):
if self.Editable:
if not self.FirstCall:
self.FirstCall = True
selected = self.IndexList.GetSelection()
if selected != wx.NOT_FOUND:
index = self.ListIndex[selected]
if index < 0x260:
self.IndexListMenu.FindItemByPosition(0).Enable(False)
self.IndexListMenu.FindItemByPosition(1).Enable(True)
self.PopupMenu(self.IndexListMenu)
elif 0x1000 <= index <= 0x1FFF:
self.IndexListMenu.FindItemByPosition(0).Enable(False)
self.IndexListMenu.FindItemByPosition(1).Enable(False)
self.PopupMenu(self.IndexListMenu)
elif 0x2000 <= index <= 0x5FFF:
self.IndexListMenu.FindItemByPosition(0).Enable(True)
self.IndexListMenu.FindItemByPosition(1).Enable(False)
self.PopupMenu(self.IndexListMenu)
elif index >= 0x6000:
self.IndexListMenu.FindItemByPosition(0).Enable(False)
self.IndexListMenu.FindItemByPosition(1).Enable(False)
self.PopupMenu(self.IndexListMenu)
else:
self.FirstCall = False
event.Skip()
def OnSubindexGridRightClick(self, event):
self.SubindexGrid.SetGridCursor(event.GetRow(), event.GetCol())
if self.Editable:
selected = self.IndexList.GetSelection()
if selected != wx.NOT_FOUND:
index = self.ListIndex[selected]
if self.Manager.IsCurrentEntry(index):
showpopup = False
infos = self.Manager.GetEntryInfos(index)
if 0x2000 <= index <= 0x5FFF and infos["struct"] & OD_MultipleSubindexes or infos["struct"] & OD_IdenticalSubindexes:
showpopup = True
self.SubindexGridMenu.FindItemByPosition(0).Enable(True)
self.SubindexGridMenu.FindItemByPosition(1).Enable(True)
else:
self.SubindexGridMenu.FindItemByPosition(0).Enable(False)
self.SubindexGridMenu.FindItemByPosition(1).Enable(False)
if self.Table.GetColLabelValue(event.GetCol(), False) == "value":
showpopup = True
self.SubindexGridMenu.FindItemByPosition(3).Enable(True)
else:
self.SubindexGridMenu.FindItemByPosition(3).Enable(False)
if showpopup:
self.PopupMenu(self.SubindexGridMenu)
elif self.Table.GetColLabelValue(event.GetCol(), False) == "value" and self.ParentWindow.GetCurrentNodeId() is not None:
selected = self.IndexList.GetSelection()
if selected != wx.NOT_FOUND:
index = self.ListIndex[selected]
if self.Manager.IsCurrentEntry(index):
infos = self.Manager.GetEntryInfos(index)
if not infos["struct"] & OD_MultipleSubindexes or event.GetRow() > 0:
self.SubindexGridMenu.FindItemByPosition(0).Enable(False)
self.SubindexGridMenu.FindItemByPosition(1).Enable(False)
self.SubindexGridMenu.FindItemByPosition(3).Enable(False)
self.SubindexGridMenu.FindItemByPosition(4).Enable(True)
self.PopupMenu(self.SubindexGridMenu)
event.Skip()
def OnAddToDCFSubindexMenu(self, event):
if not self.Editable:
selected = self.IndexList.GetSelection()
if selected != wx.NOT_FOUND:
index = self.ListIndex[selected]
subindex = self.SubindexGrid.GetGridCursorRow()
entry_infos = self.Manager.GetEntryInfos(index)
if not entry_infos["struct"] & OD_MultipleSubindexes or subindex != 0:
subentry_infos = self.Manager.GetSubentryInfos(index, subindex)
typeinfos = self.Manager.GetEntryInfos(subentry_infos["type"])
if typeinfos:
node_id = self.ParentWindow.GetCurrentNodeId()
value = self.Table.GetValueByName(subindex, "value")
if value == "True":
value = 1
elif value == "False":
value = 0
elif value.isdigit():
value = int(value)
elif value.startswith("0x"):
value = int(value, 16)
else:
value = int(value.encode("hex_codec"), 16)
self.Manager.AddToMasterDCF(node_id, index, subindex, max(1, typeinfos["size"] / 8), value)
self.ParentWindow.OpenMasterDCFDialog(node_id)
def OpenDCFDialog(self, node_id):
self.PartList.SetSelection(7)
self.RefreshIndexList()
self.IndexList.SetSelection(self.ListIndex.index(0x1F22))
self.RefreshTable()
self.SubindexGrid.SetGridCursor(node_id, 3)
self.ShowDCFEntryDialog(node_id, 3)
def OnRenameIndexMenu(self, event):
if self.Editable:
selected = self.IndexList.GetSelection()
if selected != wx.NOT_FOUND:
index = self.ListIndex[selected]
if self.Manager.IsCurrentEntry(index):
infos = self.Manager.GetEntryInfos(index)
dialog = wx.TextEntryDialog(self, _("Give a new name for index 0x%04X")%index,
_("Rename an index"), infos["name"], wx.OK|wx.CANCEL)
if dialog.ShowModal() == wx.ID_OK:
self.Manager.SetCurrentEntryName(index, dialog.GetValue())
self.ParentWindow.RefreshBufferState()
self.RefreshIndexList()
dialog.Destroy()
def OnModifyIndexMenu(self, event):
if self.Editable:
selected = self.IndexList.GetSelection()
if selected != wx.NOT_FOUND:
index = self.ListIndex[selected]
if self.Manager.IsCurrentEntry(index) and index < 0x260:
values, valuetype = self.Manager.GetCustomisedTypeValues(index)
dialog = UserTypeDialog(self)
dialog.SetTypeList(self.Manager.GetCustomisableTypes(), values[1])
if valuetype == 0:
dialog.SetValues(min = values[2], max = values[3])
elif valuetype == 1:
dialog.SetValues(length = values[2])
if dialog.ShowModal() == wx.ID_OK:
type, min, max, length = dialog.GetValues()
self.Manager.SetCurrentUserType(index, type, min, max, length)
self.ParentWindow.RefreshBufferState()
self.RefreshIndexList()
def OnDeleteIndexMenu(self, event):
if self.Editable:
selected = self.IndexList.GetSelection()
if selected != wx.NOT_FOUND:
index = self.ListIndex[selected]
if self.Manager.IsCurrentEntry(index):
self.Manager.ManageEntriesOfCurrent([],[index])
self.ParentWindow.RefreshBufferState()
self.RefreshIndexList()
def OnAddSubindexMenu(self, event):
if self.Editable:
selected = self.IndexList.GetSelection()
if selected != wx.NOT_FOUND:
index = self.ListIndex[selected]
if self.Manager.IsCurrentEntry(index):
dialog = wx.TextEntryDialog(self, _("Number of subindexes to add:"),
_("Add subindexes"), "1", wx.OK|wx.CANCEL)
if dialog.ShowModal() == wx.ID_OK:
try:
number = int(dialog.GetValue())
self.Manager.AddSubentriesToCurrent(index, number)
self.ParentWindow.RefreshBufferState()
self.RefreshIndexList()
except:
message = wx.MessageDialog(self, _("An integer is required!"), _("ERROR"), wx.OK|wx.ICON_ERROR)
message.ShowModal()
message.Destroy()
dialog.Destroy()
def OnDeleteSubindexMenu(self, event):
if self.Editable:
selected = self.IndexList.GetSelection()
if selected != wx.NOT_FOUND:
index = self.ListIndex[selected]
if self.Manager.IsCurrentEntry(index):
dialog = wx.TextEntryDialog(self, _("Number of subindexes to delete:"),
_("Delete subindexes"), "1", wx.OK|wx.CANCEL)
if dialog.ShowModal() == wx.ID_OK:
try:
number = int(dialog.GetValue())
self.Manager.RemoveSubentriesFromCurrent(index, number)
self.ParentWindow.RefreshBufferState()
self.RefreshIndexList()
except:
message = wx.MessageDialog(self, _("An integer is required!"), _("ERROR"), wx.OK|wx.ICON_ERROR)
message.ShowModal()
message.Destroy()
dialog.Destroy()
def OnDefaultValueSubindexMenu(self, event):
if self.Editable:
selected = self.IndexList.GetSelection()
if selected != wx.NOT_FOUND:
index = self.ListIndex[selected]
if self.Manager.IsCurrentEntry(index):
row = self.SubindexGrid.GetGridCursorRow()
self.Manager.SetCurrentEntryToDefault(index, row)
self.ParentWindow.RefreshBufferState()
self.RefreshIndexList()
| lgpl-2.1 | -4,548,165,522,337,977,300 | 45.813632 | 147 | 0.567087 | false |
youprofit/scikit-image | skimage/feature/tests/test_orb.py | 20 | 6215 | import numpy as np
from numpy.testing import assert_equal, assert_almost_equal, run_module_suite
from skimage.feature import ORB
from skimage import data
from skimage.color import rgb2gray
from skimage._shared.testing import test_parallel
img = rgb2gray(data.lena())
@test_parallel()
def test_keypoints_orb_desired_no_of_keypoints():
detector_extractor = ORB(n_keypoints=10, fast_n=12, fast_threshold=0.20)
detector_extractor.detect(img)
exp_rows = np.array([ 435. , 435.6 , 376. , 455. , 434.88, 269. ,
375.6 , 310.8 , 413. , 311.04])
exp_cols = np.array([ 180. , 180. , 156. , 176. , 180. , 111. ,
156. , 172.8, 70. , 172.8])
exp_scales = np.array([ 1. , 1.2 , 1. , 1. , 1.44 , 1. ,
1.2 , 1.2 , 1. , 1.728])
exp_orientations = np.array([-175.64733392, -167.94842949, -148.98350192,
-142.03599837, -176.08535837, -53.08162354,
-150.89208271, 97.7693776 , -173.4479964 ,
38.66312042])
exp_response = np.array([ 0.96770745, 0.81027306, 0.72376257,
0.5626413 , 0.5097993 , 0.44351774,
0.39154173, 0.39084861, 0.39063076,
0.37602487])
assert_almost_equal(exp_rows, detector_extractor.keypoints[:, 0])
assert_almost_equal(exp_cols, detector_extractor.keypoints[:, 1])
assert_almost_equal(exp_scales, detector_extractor.scales)
assert_almost_equal(exp_response, detector_extractor.responses)
assert_almost_equal(exp_orientations,
np.rad2deg(detector_extractor.orientations), 5)
detector_extractor.detect_and_extract(img)
assert_almost_equal(exp_rows, detector_extractor.keypoints[:, 0])
assert_almost_equal(exp_cols, detector_extractor.keypoints[:, 1])
def test_keypoints_orb_less_than_desired_no_of_keypoints():
detector_extractor = ORB(n_keypoints=15, fast_n=12,
fast_threshold=0.33, downscale=2, n_scales=2)
detector_extractor.detect(img)
exp_rows = np.array([ 67., 247., 269., 413., 435., 230., 264.,
330., 372.])
exp_cols = np.array([ 157., 146., 111., 70., 180., 136., 336.,
148., 156.])
exp_scales = np.array([ 1., 1., 1., 1., 1., 2., 2., 2., 2.])
exp_orientations = np.array([-105.76503839, -96.28973044, -53.08162354,
-173.4479964 , -175.64733392, -106.07927215,
-163.40016243, 75.80865813, -154.73195911])
exp_response = np.array([ 0.13197835, 0.24931321, 0.44351774,
0.39063076, 0.96770745, 0.04935129,
0.21431068, 0.15826555, 0.42403573])
assert_almost_equal(exp_rows, detector_extractor.keypoints[:, 0])
assert_almost_equal(exp_cols, detector_extractor.keypoints[:, 1])
assert_almost_equal(exp_scales, detector_extractor.scales)
assert_almost_equal(exp_response, detector_extractor.responses)
assert_almost_equal(exp_orientations,
np.rad2deg(detector_extractor.orientations), 5)
detector_extractor.detect_and_extract(img)
assert_almost_equal(exp_rows, detector_extractor.keypoints[:, 0])
assert_almost_equal(exp_cols, detector_extractor.keypoints[:, 1])
def test_descriptor_orb():
detector_extractor = ORB(fast_n=12, fast_threshold=0.20)
exp_descriptors = np.array([[ True, False, True, True, False, False, False, False, False, False],
[False, False, True, True, False, True, True, False, True, True],
[ True, False, False, False, True, False, True, True, True, False],
[ True, False, False, True, False, True, True, False, False, False],
[False, True, True, True, False, False, False, True, True, False],
[False, False, False, False, False, True, False, True, True, True],
[False, True, True, True, True, False, False, True, False, True],
[ True, True, True, False, True, True, True, True, False, False],
[ True, True, False, True, True, True, True, False, False, False],
[ True, False, False, False, False, True, False, False, True, True],
[ True, False, False, False, True, True, True, False, False, False],
[False, False, True, False, True, False, False, True, False, False],
[False, False, True, True, False, False, False, False, False, True],
[ True, True, False, False, False, True, True, True, True, True],
[ True, True, True, False, False, True, False, True, True, False],
[False, True, True, False, False, True, True, True, True, True],
[ True, True, True, False, False, False, False, True, True, True],
[False, False, False, False, True, False, False, True, True, False],
[False, True, False, False, True, False, False, False, True, True],
[ True, False, True, False, False, False, True, True, False, False]], dtype=bool)
detector_extractor.detect(img)
detector_extractor.extract(img, detector_extractor.keypoints,
detector_extractor.scales,
detector_extractor.orientations)
assert_equal(exp_descriptors,
detector_extractor.descriptors[100:120, 10:20])
detector_extractor.detect_and_extract(img)
assert_equal(exp_descriptors,
detector_extractor.descriptors[100:120, 10:20])
if __name__ == '__main__':
run_module_suite()
| bsd-3-clause | 494,025,806,176,672,700 | 53.043478 | 116 | 0.534352 | false |
CompMusic/essentia | test/src/unittest/spectral/test_spectralcontrast.py | 10 | 4457 | #!/usr/bin/env python
# Copyright (C) 2006-2013 Music Technology Group - Universitat Pompeu Fabra
#
# This file is part of Essentia
#
# Essentia is free software: you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the Free
# Software Foundation (FSF), either version 3 of the License, or (at your
# option) any later version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the Affero GNU General Public License
# version 3 along with this program. If not, see http://www.gnu.org/licenses/
from essentia_test import *
class TestSpectralContrast(TestCase):
def testRegression(self):
# Simple regression test, comparing to reference values
audio = MonoLoader(filename = join(testdata.audio_dir, 'recorded/musicbox.wav'),
sampleRate = 44100)()
fft = Spectrum()
window = Windowing(type = 'hamming')
SC = SpectralContrast(sampleRate = 44100)
expected = 0
sc = []
valleys = []
for frame in FrameGenerator(audio, frameSize = 2048, hopSize = 512):
result = SC(fft(window(frame)))
self.assert_(not any(numpy.isnan(result[0])))
self.assert_(not any(numpy.isinf(result[1])))
sc += [result[0]]
valleys += [result[1]]
self.assertAlmostEqual(numpy.mean(sc), -0.604606057431, 1e-5)
self.assertAlmostEqual(numpy.mean(valleys), -8.55062127501, 1e-5)
def testZero(self):
SC = SpectralContrast(sampleRate = 44100)
sc, valleys = SC(zeros(1025))
self.assertAlmostEqual(numpy.mean(sc), -1)
self.assertAlmostEqual(numpy.mean(valleys), numpy.log(1e-30))
def testOnes(self):
SC = SpectralContrast(sampleRate = 44100)
sc, valleys = SC(ones(1025))
self.assertAlmostEqual(numpy.mean(sc), -1)
self.assertAlmostEqual(numpy.mean(valleys), 0)
def testConstant(self):
SC = SpectralContrast(sampleRate = 44100)
sc, valleys = SC([0.5]*1025)
self.assertAlmostEqual(numpy.mean(sc), -1)
self.assertAlmostEqual(numpy.mean(valleys),-0.6931471825, 1e-7)
def testCompare(self):
spec0 = [1]*1025
spec1 = [1]*1015 + [0]*10
spec2 = [1]*10 + [0]*1015
sr = 44100
SC = SpectralContrast(sampleRate = sr, highFrequencyBound = sr/2)
sc0 = SC(spec0)
sc1 = SC(spec1)
sc2 = SC(spec2)
self.assertTrue(numpy.mean(sc1[0]) < numpy.mean(sc2[0]))
self.assertTrue(numpy.mean(sc0[0]) < numpy.mean(sc2[0]))
self.assertTrue(numpy.mean(sc0[0]) < numpy.mean(sc1[0]))
def testInvalidParam(self):
self.assertConfigureFails(SpectralContrast(), { 'frameSize': 0 })
self.assertConfigureFails(SpectralContrast(), { 'frameSize': 1 })
self.assertConfigureFails(SpectralContrast(), { 'sampleRate': 0 })
self.assertConfigureFails(SpectralContrast(), { 'numberBands': 0 })
self.assertConfigureFails(SpectralContrast(), { 'lowFrequencyBound': -1 })
self.assertConfigureFails(SpectralContrast(), { 'highFrequencyBound': 40000 })
self.assertConfigureFails(SpectralContrast(), { 'neighbourRatio': 1.5 })
self.assertConfigureFails(SpectralContrast(), { 'staticDistribution': 1.5 })
# lower bound cannot be larger than higher band:
self.assertConfigureFails(SpectralContrast(), { 'lowFrequencyBound': 11000,
'highFrequencyBound': 5000 })
def testEmpty(self):
SC = SpectralContrast(sampleRate = 44100)
self.assertComputeFails(SC, [])
def testOneElement(self):
# input spectrum must be 0.5*framesize
SC = SpectralContrast(sampleRate = 44100)
self.assertComputeFails(SC, [1])
def testSpectrumSizeSmallerThanNumberOfBands(self):
SC = SpectralContrast(sampleRate = 44100, frameSize = 4)
sc = SC([1,1,1])
self.assertAlmostEquals(numpy.mean(sc[0]), -2.7182817459)
self.assertAlmostEquals(numpy.mean(sc[1]), 0)
suite = allTests(TestSpectralContrast)
if __name__ == '__main__':
TextTestRunner(verbosity=2).run(suite)
| agpl-3.0 | 3,425,264,114,211,224,000 | 37.756522 | 88 | 0.645501 | false |
mbarbon/wapiti | src/file/vulnerabilityxmlparser.py | 1 | 3757 | #!/usr/bin/env python
# XML Report Generator Module for Wapiti Project
# Wapiti Project (http://wapiti.sourceforge.net)
#
# David del Pozo
# Alberto Pastor
# Copyright (C) 2008 Informatica Gesfor
# ICT Romulus (http://www.ict-romulus.eu)
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
from xml.parsers import expat
from vulnerability import Vulnerability
class VulnerabilityXMLParser:
VULNERABILITY = "vulnerability"
VULNERABILITY_NAME = "name"
VULNERABILITY_DESCRIPTION = "description"
VULNERABILITY_SOLUTION = "solution"
VULNERABILITY_REFERENCE = "reference"
VULNERABILITY_REFERENCES = "references"
VULNERABILITY_REFERENCE_TITLE = "title"
VULNERABILITY_REFERENCE_URL = "url"
vulnerabilities = []
vul = None
references = {}
title = ""
url = ""
tag = ""
def __init__(self):
self._parser = expat.ParserCreate()
self._parser.StartElementHandler = self.start_element
self._parser.EndElementHandler = self.end_element
self._parser.CharacterDataHandler = self.char_data
def parse(self, fileName):
f = None
try:
f = open(fileName)
content = f.read()
self.feed(content)
finally:
if f is not None:
f.close()
def feed(self, data):
self._parser.Parse(data, 0)
def close(self):
self._parser.Parse("", 1)
del self._parser
def start_element(self, name, attrs):
if name == self.VULNERABILITY:
self.vul = Vulnerability()
self.vul.setName(attrs[self.VULNERABILITY_NAME])
elif name == self.VULNERABILITY_DESCRIPTION:
self.tag = self.VULNERABILITY_DESCRIPTION
elif name == self.VULNERABILITY_SOLUTION:
#self.tag = self.VULNERABILITY_SOLUTION
self.vul.setSolution(attrs["text"])
elif name == self.VULNERABILITY_REFERENCES:
self.references = {}
elif name == self.VULNERABILITY_REFERENCE:
self.tag = self.VULNERABILITY_REFERENCE
elif name == self.VULNERABILITY_REFERENCE_TITLE:
self.tag = self.VULNERABILITY_REFERENCE_TITLE
elif name == self.VULNERABILITY_REFERENCE_URL:
self.tag = self.VULNERABILITY_REFERENCE_URL
def end_element(self, name):
if name == self.VULNERABILITY:
self.vulnerabilities.append(self.vul)
elif name == self.VULNERABILITY_REFERENCE:
self.references[self.title] = self.url
elif name == self.VULNERABILITY_REFERENCES:
self.vul.setReferences(self.references)
def char_data(self, data):
if self.tag == self.VULNERABILITY_DESCRIPTION:
self.vul.setDescription(data)
# elif self.tag==self.VULNERABILITY_SOLUTION:
# self.vul.setSolution(data)
elif self.tag == self.VULNERABILITY_REFERENCE_TITLE:
self.title = data
elif self.tag == self.VULNERABILITY_REFERENCE_URL:
self.url = data
self.tag = ""
def getVulnerabilities(self):
return self.vulnerabilities
| gpl-2.0 | -5,089,041,232,667,480,000 | 33.787037 | 76 | 0.656907 | false |
amenonsen/ansible | test/units/modules/network/f5/test_bigip_device_ha_group.py | 22 | 7470 | # -*- coding: utf-8 -*-
#
# Copyright: (c) 2018, F5 Networks Inc.
# GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import json
import pytest
import sys
if sys.version_info < (2, 7):
pytestmark = pytest.mark.skip("F5 Ansible modules require Python >= 2.7")
from ansible.module_utils.basic import AnsibleModule
try:
from library.modules.bigip_device_ha_group import ModuleParameters
from library.modules.bigip_device_ha_group import ModuleManager
from library.modules.bigip_device_ha_group import ArgumentSpec
# In Ansible 2.8, Ansible changed import paths.
from test.units.compat import unittest
from test.units.compat.mock import Mock
from test.units.compat.mock import patch
from test.units.modules.utils import set_module_args
except ImportError:
from ansible.modules.network.f5.bigip_device_ha_group import ModuleParameters
from ansible.modules.network.f5.bigip_device_ha_group import ModuleManager
from ansible.modules.network.f5.bigip_device_ha_group import ArgumentSpec
# Ansible 2.8 imports
from units.compat import unittest
from units.compat.mock import Mock
from units.compat.mock import patch
from units.modules.utils import set_module_args
fixture_path = os.path.join(os.path.dirname(__file__), 'fixtures')
fixture_data = {}
def load_fixture(name):
path = os.path.join(fixture_path, name)
if path in fixture_data:
return fixture_data[path]
with open(path) as f:
data = f.read()
try:
data = json.loads(data)
except Exception:
pass
fixture_data[path] = data
return data
class TestParameters(unittest.TestCase):
def test_module_parameters_v13(self):
args = dict(
name='foobar',
description='baz',
active_bonus=20,
enable='yes',
state='present',
pools=[
dict(
pool_name='fakepool',
attribute='percent-up-members',
weight=30,
minimum_threshold=2,
partition='Common'
)
],
trunks=[
dict(
trunk_name='faketrunk',
attribute='percent-up-members',
weight=30,
minimum_threshold=2
)
]
)
try:
self.p1 = patch('library.modules.bigip_device_ha_group.tmos_version')
self.m1 = self.p1.start()
self.m1.return_value = '13.1.0'
except Exception:
self.p1 = patch('ansible.modules.network.f5.bigip_device_ha_group.tmos_version')
self.m1 = self.p1.start()
self.m1.return_value = '13.1.0'
p = ModuleParameters(params=args)
assert p.name == 'foobar'
assert p.state == 'present'
assert p.active_bonus == 20
assert p.enabled is True
assert p.pools == [{'name': '/Common/fakepool', 'attribute': 'percent-up-members',
'weight': 30, 'minimumThreshold': 2}]
assert p.trunks == [{'name': 'faketrunk', 'attribute': 'percent-up-members',
'weight': 30, 'minimumThreshold': 2}]
self.p1.stop()
def test_module_parameters_v12(self):
args = dict(
name='foobar',
description='baz',
active_bonus=20,
enable='yes',
state='present',
pools=[
dict(
pool_name='fakepool',
attribute='percent-up-members',
weight=30,
minimum_threshold=2,
partition='Common'
)
],
trunks=[
dict(
trunk_name='faketrunk',
attribute='percent-up-members',
weight=20,
minimum_threshold=1
)
]
)
try:
self.p1 = patch('library.modules.bigip_device_ha_group.tmos_version')
self.m1 = self.p1.start()
self.m1.return_value = '12.1.0'
except Exception:
self.p1 = patch('ansible.modules.network.f5.bigip_device_ha_group.tmos_version')
self.m1 = self.p1.start()
self.m1.return_value = '12.1.0'
p = ModuleParameters(params=args)
assert p.name == 'foobar'
assert p.state == 'present'
assert p.active_bonus == 20
assert p.enabled is True
assert p.pools == [{'name': '/Common/fakepool', 'attribute': 'percent-up-members',
'weight': 30, 'threshold': 2}]
assert p.trunks == [{'name': 'faketrunk', 'attribute': 'percent-up-members',
'weight': 20, 'threshold': 1}]
self.p1.stop()
class TestManager(unittest.TestCase):
def setUp(self):
self.spec = ArgumentSpec()
try:
self.p1 = patch('library.modules.bigip_device_ha_group.tmos_version')
self.m1 = self.p1.start()
self.m1.return_value = '13.1.0'
except Exception:
self.p1 = patch('ansible.modules.network.f5.bigip_device_ha_group.tmos_version')
self.m1 = self.p1.start()
self.m1.return_value = '13.1.0'
def tearDown(self):
self.p1.stop()
def test_create_ha_group(self, *args):
set_module_args(dict(
name='fake_group',
state='present',
description='baz',
active_bonus=20,
enable='yes',
pools=[
dict(
pool_name='fakepool',
attribute='percent-up-members',
weight=30,
minimum_threshold=2,
partition='Common'
)
],
trunks=[
dict(
trunk_name='faketrunk',
attribute='percent-up-members',
weight=20,
minimum_threshold=1
)
],
provider=dict(
server='localhost',
password='password',
user='admin'
)
))
module = AnsibleModule(
argument_spec=self.spec.argument_spec,
supports_check_mode=self.spec.supports_check_mode
)
# Override methods to force specific logic in the module to happen
mm = ModuleManager(module=module)
mm.exists = Mock(return_value=False)
mm.create_on_device = Mock(return_value=True)
results = mm.exec_module()
assert results['changed'] is True
assert results['name'] == 'fake_group'
assert results['description'] == 'baz'
assert results['active_bonus'] == 20
assert results['enable'] == 'yes'
assert results['pools'] == [{'pool_name': '/Common/fakepool', 'attribute': 'percent-up-members',
'weight': 30, 'minimum_threshold': 2}]
assert results['trunks'] == [{'trunk_name': 'faketrunk', 'attribute': 'percent-up-members',
'weight': 20, 'minimum_threshold': 1}]
| gpl-3.0 | -6,894,789,936,368,118,000 | 31.478261 | 104 | 0.530522 | false |
davols/WhatManager2 | WhatManager2/management/commands/what_meta_fixer.py | 4 | 1349 | from django.core.management.base import BaseCommand
from django.db import transaction
from home.models import WhatTorrent
class Command(BaseCommand):
help = 'Fixes missing entries in what_meta by iterating `WhatTorrent`s.'
def handle(self, *args, **options):
print 'Running what_meta fixer...'
print 'Fixing WhatTorrent -> WhatTorrentGroup mapping'
what_torrent_ids = WhatTorrent.objects.filter(torrent_group=None).values_list(
'id', flat=True)
start = 0
page_size = 128
while start < len(what_torrent_ids):
print 'Updating objects {0}-{1}/{2}'.format(start, start + page_size,
len(what_torrent_ids))
bulk = WhatTorrent.objects.defer('torrent_file').in_bulk(
what_torrent_ids[start:start + page_size])
start += page_size
with transaction.atomic():
for torrent in bulk.values():
# Skip non-music torrents for now
if torrent.info_category_id != 1:
continue
try:
torrent.save()
except Exception as ex:
print 'Error updating what_id={0}: {1}'.format(torrent.id, ex)
print 'Completed.'
| mit | 5,698,572,313,436,174,000 | 41.15625 | 86 | 0.548554 | false |
stumoodie/PathwayEditor | libs/antlr-3.4/runtime/Python/antlr3/tokens.py | 20 | 11959 | """ANTLR3 runtime package"""
# begin[licence]
#
# [The "BSD licence"]
# Copyright (c) 2005-2008 Terence Parr
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. The name of the author may not be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
# OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
# NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
# THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# end[licence]
from antlr3.constants import EOF, DEFAULT_CHANNEL, INVALID_TOKEN_TYPE
############################################################################
#
# basic token interface
#
############################################################################
class Token(object):
"""@brief Abstract token baseclass."""
def getText(self):
"""@brief Get the text of the token.
Using setter/getter methods is deprecated. Use o.text instead.
"""
raise NotImplementedError
def setText(self, text):
"""@brief Set the text of the token.
Using setter/getter methods is deprecated. Use o.text instead.
"""
raise NotImplementedError
def getType(self):
"""@brief Get the type of the token.
Using setter/getter methods is deprecated. Use o.type instead."""
raise NotImplementedError
def setType(self, ttype):
"""@brief Get the type of the token.
Using setter/getter methods is deprecated. Use o.type instead."""
raise NotImplementedError
def getLine(self):
"""@brief Get the line number on which this token was matched
Lines are numbered 1..n
Using setter/getter methods is deprecated. Use o.line instead."""
raise NotImplementedError
def setLine(self, line):
"""@brief Set the line number on which this token was matched
Using setter/getter methods is deprecated. Use o.line instead."""
raise NotImplementedError
def getCharPositionInLine(self):
"""@brief Get the column of the tokens first character,
Columns are numbered 0..n-1
Using setter/getter methods is deprecated. Use o.charPositionInLine instead."""
raise NotImplementedError
def setCharPositionInLine(self, pos):
"""@brief Set the column of the tokens first character,
Using setter/getter methods is deprecated. Use o.charPositionInLine instead."""
raise NotImplementedError
def getChannel(self):
"""@brief Get the channel of the token
Using setter/getter methods is deprecated. Use o.channel instead."""
raise NotImplementedError
def setChannel(self, channel):
"""@brief Set the channel of the token
Using setter/getter methods is deprecated. Use o.channel instead."""
raise NotImplementedError
def getTokenIndex(self):
"""@brief Get the index in the input stream.
An index from 0..n-1 of the token object in the input stream.
This must be valid in order to use the ANTLRWorks debugger.
Using setter/getter methods is deprecated. Use o.index instead."""
raise NotImplementedError
def setTokenIndex(self, index):
"""@brief Set the index in the input stream.
Using setter/getter methods is deprecated. Use o.index instead."""
raise NotImplementedError
def getInputStream(self):
"""@brief From what character stream was this token created.
You don't have to implement but it's nice to know where a Token
comes from if you have include files etc... on the input."""
raise NotImplementedError
def setInputStream(self, input):
"""@brief From what character stream was this token created.
You don't have to implement but it's nice to know where a Token
comes from if you have include files etc... on the input."""
raise NotImplementedError
############################################################################
#
# token implementations
#
# Token
# +- CommonToken
# \- ClassicToken
#
############################################################################
class CommonToken(Token):
"""@brief Basic token implementation.
This implementation does not copy the text from the input stream upon
creation, but keeps start/stop pointers into the stream to avoid
unnecessary copy operations.
"""
def __init__(self, type=None, channel=DEFAULT_CHANNEL, text=None,
input=None, start=None, stop=None, oldToken=None):
Token.__init__(self)
if oldToken is not None:
self.type = oldToken.type
self.line = oldToken.line
self.charPositionInLine = oldToken.charPositionInLine
self.channel = oldToken.channel
self.index = oldToken.index
self._text = oldToken._text
self.input = oldToken.input
if isinstance(oldToken, CommonToken):
self.start = oldToken.start
self.stop = oldToken.stop
else:
self.type = type
self.input = input
self.charPositionInLine = -1 # set to invalid position
self.line = 0
self.channel = channel
#What token number is this from 0..n-1 tokens; < 0 implies invalid index
self.index = -1
# We need to be able to change the text once in a while. If
# this is non-null, then getText should return this. Note that
# start/stop are not affected by changing this.
self._text = text
# The char position into the input buffer where this token starts
self.start = start
# The char position into the input buffer where this token stops
# This is the index of the last char, *not* the index after it!
self.stop = stop
def getText(self):
if self._text is not None:
return self._text
if self.input is None:
return None
if self.start < self.input.size() and self.stop < self.input.size():
return self.input.substring(self.start, self.stop)
return '<EOF>'
def setText(self, text):
"""
Override the text for this token. getText() will return this text
rather than pulling from the buffer. Note that this does not mean
that start/stop indexes are not valid. It means that that input
was converted to a new string in the token object.
"""
self._text = text
text = property(getText, setText)
def getType(self):
return self.type
def setType(self, ttype):
self.type = ttype
def getTypeName(self):
return str(self.type)
typeName = property(lambda s: s.getTypeName())
def getLine(self):
return self.line
def setLine(self, line):
self.line = line
def getCharPositionInLine(self):
return self.charPositionInLine
def setCharPositionInLine(self, pos):
self.charPositionInLine = pos
def getChannel(self):
return self.channel
def setChannel(self, channel):
self.channel = channel
def getTokenIndex(self):
return self.index
def setTokenIndex(self, index):
self.index = index
def getInputStream(self):
return self.input
def setInputStream(self, input):
self.input = input
def __str__(self):
if self.type == EOF:
return "<EOF>"
channelStr = ""
if self.channel > 0:
channelStr = ",channel=" + str(self.channel)
txt = self.text
if txt is not None:
txt = txt.replace("\n","\\\\n")
txt = txt.replace("\r","\\\\r")
txt = txt.replace("\t","\\\\t")
else:
txt = "<no text>"
return "[@%d,%d:%d=%r,<%s>%s,%d:%d]" % (
self.index,
self.start, self.stop,
txt,
self.typeName, channelStr,
self.line, self.charPositionInLine
)
class ClassicToken(Token):
"""@brief Alternative token implementation.
A Token object like we'd use in ANTLR 2.x; has an actual string created
and associated with this object. These objects are needed for imaginary
tree nodes that have payload objects. We need to create a Token object
that has a string; the tree node will point at this token. CommonToken
has indexes into a char stream and hence cannot be used to introduce
new strings.
"""
def __init__(self, type=None, text=None, channel=DEFAULT_CHANNEL,
oldToken=None
):
Token.__init__(self)
if oldToken is not None:
self.text = oldToken.text
self.type = oldToken.type
self.line = oldToken.line
self.charPositionInLine = oldToken.charPositionInLine
self.channel = oldToken.channel
self.text = text
self.type = type
self.line = None
self.charPositionInLine = None
self.channel = channel
self.index = None
def getText(self):
return self.text
def setText(self, text):
self.text = text
def getType(self):
return self.type
def setType(self, ttype):
self.type = ttype
def getLine(self):
return self.line
def setLine(self, line):
self.line = line
def getCharPositionInLine(self):
return self.charPositionInLine
def setCharPositionInLine(self, pos):
self.charPositionInLine = pos
def getChannel(self):
return self.channel
def setChannel(self, channel):
self.channel = channel
def getTokenIndex(self):
return self.index
def setTokenIndex(self, index):
self.index = index
def getInputStream(self):
return None
def setInputStream(self, input):
pass
def toString(self):
channelStr = ""
if self.channel > 0:
channelStr = ",channel=" + str(self.channel)
txt = self.text
if txt is None:
txt = "<no text>"
return "[@%r,%r,<%r>%s,%r:%r]" % (self.index,
txt,
self.type,
channelStr,
self.line,
self.charPositionInLine
)
__str__ = toString
__repr__ = toString
INVALID_TOKEN = CommonToken(type=INVALID_TOKEN_TYPE)
# In an action, a lexer rule can set token to this SKIP_TOKEN and ANTLR
# will avoid creating a token for this symbol and try to fetch another.
SKIP_TOKEN = CommonToken(type=INVALID_TOKEN_TYPE)
| apache-2.0 | 638,587,864,638,592,100 | 27.610048 | 87 | 0.605402 | false |
Rhizi/rhizi | rhizi/rz_api_common.py | 5 | 3606 | # This file is part of rhizi, a collaborative knowledge graph editor.
# Copyright (C) 2014-2015 Rhizi
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
Common public API logic:
- object sanitization for inbound data
- object validation for inbound data
- sanitize_input__XXX: concerned with sanitizing potential currupt data arriving
from external sources.
- validae_object__XXX: concerned with validating the logical state of an object
"""
from flask import current_app
import logging
log = logging.getLogger('rhizi')
class API_Exception__bad_request(Exception): # raised by input sanitation functions
def __init__(self, internal_err_msg, caller_err_msg=None):
super(API_Exception__bad_request, self).__init__(internal_err_msg)
self.caller_err_msg = None # may be set to carry short string error messages which may be presented to the caller
def __sanitize_input(*args, **kw_args):
pass
def sanitize_input__node(n):
"""
provide a control point as to which node fields are persisted
"""
assert None != n.get('id'), 'invalid input: node: missing id'
def sanitize_input__link(l):
"""
provide a control point as to which link fields are persisted
"""
# expected prop assertions
assert None != l.get('id'), 'invalid input: link: missing id'
assert None != l.get('__src_id'), 'invalid input: link: missing src id'
assert None != l.get('__dst_id'), 'invalid input: link: missing dst id'
assert None != l.get('__type'), 'invalid input: link: missing type'
# unexpected prop assertions
assert None == l.get('name'), 'client is sending us name link property, it should not'
def sanitize_input__topo_diff(topo_diff):
for n in topo_diff.node_set_add:
sanitize_input__node(n)
for l in topo_diff.link_set_add:
sanitize_input__link(l)
def sanitize_input__attr_diff(attr_diff):
pass # TODO: impl
def sanitize_input__rzdoc_name(rzdoc_name_raw):
"""
sanitize rzdoc name raw input
"""
rzdoc_name = rzdoc_name_raw.strip() # make sure we ommit trailing white spaces from doc name
if None == rzdoc_name or 0 == len(rzdoc_name):
raise API_Exception__bad_request('rzdoc: open request: empty doc name')
if None != rzdoc_name and len(rzdoc_name) > current_app.rz_config.rzdoc__name__max_length:
raise API_Exception__bad_request('rzdoc: open request: doc name exceeds max doc name limit: %s' % (rzdoc_name))
# FIXME: fail on HTML escape codes, UTF handling, etc
return rzdoc_name
def validate_obj__attr_diff(attr_diff):
# check for name attr changes, which are currently forbidden
for n_id, node_attr_diff_set in attr_diff['__type_node'].items():
for attr_name in node_attr_diff_set['__attr_write'].keys():
if 'id' == attr_name:
raise Exception('validation error: Attr_Diff: forbidden attribute change: \'id\', n_id: ' + n_id)
| agpl-3.0 | 1,825,989,486,518,577,700 | 38.195652 | 122 | 0.679978 | false |
danieldc/cloudtunes | cloudtunes-server/cloudtunes/settings/defaults.py | 13 | 2703 | import sys
from os import path
import logging
from tornado.options import parse_command_line, options, define
from cloudtunes.log import CloudtunesLogFormatter
PACKAGE_ROOT = path.dirname(path.dirname(__file__))
ROOT = path.dirname(PACKAGE_ROOT)
define('port', default=8001, help='run on the given port', type=int)
parse_command_line()
PORT = options.port
SID_COOKIE = '_'
MONGODB = {
'host': 'localhost'
}
REDIS = {
'host': 'localhost'
}
#############################################################
# Logging
#############################################################
logger = logging.getLogger('cloudtunes')
# Hide our messages from the root logger configured by tornado.
# They would be logged twice. Kinda hacky.
logger.parent = None
logger.setLevel(logging.DEBUG)
handler = logging.StreamHandler(stream=sys.stdout)
handler.setLevel(logging.DEBUG)
handler.setFormatter(CloudtunesLogFormatter())
logger.addHandler(handler)
del handler
#############################################################
# Facebook <https://developers.facebook.com/apps>
#############################################################
FACEBOOK_APP_ID = None
FACEBOOK_APP_SECRET = None
# https://developers.facebook.com/docs/authentication/permissions/
# We don't really need any permissions ATM.
FACEBOOK_PERMISSIONS = [
# 'publish_actions',
# 'user_actions.music',
# 'friends_actions.music',
# 'email',
]
#############################################################
# Dropbox <https://www.dropbox.com/developers>
#############################################################
DROPBOX_API_ACCESS_TYPE = 'app_folder'
DROPBOX_API_APP_KEY = None
DROPBOX_API_APP_SECRET = None
#############################################################
# Last.fm <http://www.last.fm/api/account>
#############################################################
LASTFM_API_KEY = None
LASTFM_API_SECRET = None
#############################################################
# Tornado
#############################################################
HOMEPAGE_SITE_DIR = path.realpath(ROOT + '/homepage')
WEB_APP_DIR = path.realpath(ROOT + '/../cloudtunes-webapp/public')
TORNADO_APP = {
'cookie_secret': None,
'login_url': '/auth',
'template_path': path.join(PACKAGE_ROOT, 'templates'),
'static_path': WEB_APP_DIR,
'xsrf_cookies': False, # TODO: enable
'autoescape': 'xhtml_escape',
'socket_io_port': PORT,
'flash_policy_port': 10843,
'flash_policy_file': path.join(ROOT, 'flashpolicy.xml'),
}
FLASH_POLICY_PORT = 10843
FLASH_POLICY_FILE = path.join(ROOT, 'flashpolicy.xml')
#############################################################
del path, logging, sys
| bsd-3-clause | 4,062,958,205,558,327,300 | 24.990385 | 68 | 0.530152 | false |
JohnDenker/brython | www/tests/issues.py | 1 | 9780 | # issue 5
assert(isinstance(__debug__,bool))
# issue #6 : unknown encoding: windows-1250
s = "Dziś jeść ryby"
b = s.encode('windows-1250')
assert b == b'Dzi\x9c je\x9c\xe6 ryby'
assert b.decode('windows-1250') == "Dziś jeść ryby"
# issue #7 : attribute set on module is not available from inside the module
import inject_name_in_module
inject_name_in_module.xxx = 123
assert inject_name_in_module.xxx == 123
# XXX temporarily comment next line
#assert inject_name_in_module.yyy() == 246
# issue #15 in PierreQuentel/brython
class a(object):
def __init__(self):
self.x = 9
a.__init__
class b(a):
def __init__(s):
super().__init__()
assert s.x==9
z = b()
# issue 12
x = {'a':1}
assert 'a' in x
class ToDir:
def init(self):
pass
instanceToDir = ToDir()
dictToDir=({k: getattr(instanceToDir,k)
for k in dir(instanceToDir) if '__' not in k})
castdictToDir={str(k): getattr(instanceToDir,k)
for k in dir(instanceToDir) if '__' not in k}
assert 'init' in castdictToDir, 'init not in castdictToDir: %s' % list(dictToDir.keys())
assert castdictToDir["init"]==instanceToDir.init , 'init not init method: %s' % castdictToDir["init"]
assert 'init' in dictToDir, 'init not in dictToDir: %s' % list(dictToDir.keys())
assert dictToDir["init"]==instanceToDir.init , 'init not init method: %s' % dictToDir["init"]
# issue 32
assert 5 < 10 < 5 * 10 < 100
# issue 16 : isolate Python Namespacing
i = 5
def foo():
def bar():
return i
res = []
for i in range(5):
res.append(bar())
return res
assert foo() == [0, 1, 2, 3, 4]
# issues 62, 63 and 64
import test_sp
s = 'a = 3'
exec(s, test_sp.__dict__)
assert test_sp.a == 3
del test_sp.__dict__['a']
try:
test_sp.a
raise ValueError('should have raised AttributeError')
except AttributeError:
pass
except:
raise ValueError('should have raised AttributeError')
# issue 82 : Ellipsis literal (...) missing
def f():
...
#issue 83
import sys
assert sys.version_info > (3,0,0)
assert sys.version_info >= (3,0,0)
assert not sys.version_info == (3,0,0)
assert sys.version_info != (3,0,0)
assert not sys.version_info < (3,0,0)
assert not sys.version_info <= (3,0,0)
#issue 98
assert int.from_bytes(b'\xfc', 'big') == 252
assert int.from_bytes(bytearray([252,0]), 'big') == 64512
assert int.from_bytes(b'\x00\x10', byteorder='big') == 16
assert int.from_bytes(b'\x00\x10', byteorder='little') == 4096
assert int.from_bytes(b'\xfc\x00', byteorder='big', signed=True) == -1024
assert int.from_bytes(b'\xfc\x00', byteorder='big', signed=False) == 64512
assert int.from_bytes([255, 0, 0], byteorder='big') == 16711680
# issue #100
class A:
if True:
def aaa(self, x):
return x
class B(A):
if True:
def aaa(self, x):
return super().aaa(x)
b = B()
assert b.aaa(0)==0
# issue 108
def funcattrs(**kwds):
def decorate(func):
func.__dict__.update(kwds)
return func
return decorate
class C(object):
@funcattrs(abc=1, xyz="haha")
@funcattrs(booh=42)
def foo(self): return 42
assert C().foo() == 42
assert C.foo.abc == 1
assert C.foo.xyz == "haha"
assert C.foo.booh == 42
# issue 118
class A:
def toString(self):
return "whatever"
assert A().toString() == "whatever"
# issue 126
class MyType(type):
def __getattr__(cls, attr):
return "whatever"
class MyParent(metaclass=MyType):
pass
class MyClass(MyParent):
pass
assert MyClass.spam == "whatever"
assert MyParent.spam == "whatever"
#issue 127
assert "aaa+AAA".split("+") == ['aaa', 'AAA']
# issue 121
def recur(change_namespace=0):
if change_namespace:
x = 2
return
else:
x = 1
def nested():
return x
recur(change_namespace=1)
return nested()
assert recur() == 1
#issue 131
import time
import datetime
target = time.struct_time([1970, 1, 1, 0, 0, 0, 3, 1, 0])
assert time.gmtime(0).args == target.args
target = time.struct_time([1970, 1, 1, 0, 1, 40, 3, 1, 0])
assert time.gmtime(100).args == target.args
target = time.struct_time([2001, 9, 9, 1, 46, 40, 6, 252, 0])
assert time.gmtime(1000000000).args == target.args
target1 = datetime.datetime(1969, 12, 31, 12, 0)
target2 = datetime.datetime(1970, 1, 1, 12, 0)
## depending on timezone this could be any hour near midnight Jan 1st, 1970
assert target1 <= datetime.datetime.fromtimestamp(0) <= target2
try:
time.asctime(1)
except TypeError:
pass
except:
ValueError("Should have raised TypeError")
try:
time.asctime((1,2,3,4))
except TypeError:
pass
except:
ValueError("Should have raised TypeError")
assert time.asctime(time.gmtime(0)) == 'Thu Jan 1 00:00:00 1970'
tup = tuple(time.gmtime(0).args)
assert time.asctime(tup) == 'Thu Jan 1 00:00:00 1970'
# issue 137
codeobj = compile("3 + 4", "<example>", "eval")
assert eval(codeobj) == 7
x = 7
codeobj = compile("x + 4", "<example>", "eval")
assert eval(codeobj) == 11
# issue 154
class MyMetaClass(type):
def __str__(cls):
return "Hello"
class MyClass(metaclass=MyMetaClass):
pass
assert str(MyClass) == "Hello"
# issue 155
class MyMetaClass(type):
pass
class MyClass(metaclass=MyMetaClass):
pass
MyOtherClass = MyMetaClass("DirectlyCreatedClass", (), {})
assert isinstance(MyClass, MyMetaClass), type(MyClass)
assert isinstance(MyOtherClass, MyMetaClass), type(MyOtherClass)
# traceback objects
import sys
import types
try:
raise ValueError
except ValueError:
tb = sys.exc_info()[2]
assert isinstance(tb, types.TracebackType)
# issue 156
from collections import abc
assert isinstance(dict(one=1), abc.Mapping)
assert issubclass(dict, abc.Mapping)
# issue 169
from random import seed, shuffle
first = list(range(20))
seed(31416)
shuffle(first)
second = list(range(20))
seed(31416)
shuffle(second)
assert first == second, "Same seed does not produce same random results"
# True and False are instances of int
assert isinstance(True, int)
assert isinstance(False, int)
# repr of type(None)
assert repr(type(None)) == "<class 'NoneType'>"
# nonlocal
def f():
def g():
nonlocal t
return t
t = 1
return g
assert f()()==1
def f():
k = 1
def g():
def r():
nonlocal k
return k+1
return r()
return g()
assert f()==2
# setting __class__
class A:pass
class B:
x = 1
a = A()
assert not hasattr(a, 'x')
a.__class__ = B
assert a.x == 1
# hashable objects
class X:
def __hash__(self): return hash(1.0)
def __eq__(self, other): return other == 1
a = {1: 'a', X(): 'b'}
assert a=={1:'b'}
assert X() in a
assert a[X()]=='b'
class X:
def __hash__(self): return hash('u')
a = {'u': 'a', X(): 'b'}
assert set(a.values())=={'a', 'b'}
assert not X() in a
b = {'u':'a'}
assert not X() in b
class X:
def __hash__(self): return hash('u')
def __eq__(self, other): return other=='u'
pass
a = {'u': 'a', X(): 'b'}
assert a == {'u': 'b'}
assert X() in a
assert a[X()]=='b'
# issue 176
x = [1,2,3]
assert sum(-y for y in x) == -6
# issue 186
source = [0, 1, 2, 3]
total = sum(source.pop() for _ in range(len(source)))
assert total == 6, "expected 6 but instead was %d" % total
# issue 177
import sys
ModuleType=type(sys)
foo=ModuleType("foo", "foodoc")
assert foo.__name__=="foo"
assert foo.__doc__=="foodoc"
#assert type(foo.__dict__) == dict
# issue 183
x=4
cd=dict(globals())
cd.update(locals())
exec("x=x+4",cd)
assert x == 4
assert cd['x'] == 8
y=5
yd=dict(globals())
yd.update(locals())
co=compile("y=y+4","","exec")
exec(co,yd)
assert yd['y'] == 9
assert y == 5
# issue 201
import json
d=json.loads("""{"a":1,"b":2.1}""")
assert d == {'a': 1, 'b': 2.1}
assert type(d['a']) == int
assert type(d['b']) == float
# issue 203
def f(z):
z += 1
return z
x = 1.0
assert x != f(x)
# issue 204
import math
m, e = math.frexp(abs(123.456))
assert m == 0.9645
assert m * (1 << 24) == 16181624.832
# issue 207
for x in range(0x7ffffff0, 0x8000000f):
assert x & x == x, "%s & %s == %s" % (hex(x), hex(x), hex(x & x))
assert x | x == x, "%s | %s == %s" % (hex(x), hex(x), hex(x | x))
for x in range(0x17ffffff0, 0x17fffffff):
assert x & x == x, "%s & %s == %s" % (hex(x), hex(x), hex(x & x))
assert x | x == x, "%s | %s == %s" % (hex(x), hex(x), hex(x | x))
# issue 208
a=5
assert globals().get('a') == 5
# not an official issue
class Cmp:
def __init__(self,arg):
self.arg = arg
def __repr__(self):
return '<Cmp %s>' % self.arg
def __eq__(self, other):
return self.arg == other
a=Cmp(1)
b=Cmp(1)
assert a == b
assert not (a != b)
# issue 218
a = [1,2,3]
a *= 2
assert a == [1, 2, 3, 1, 2, 3]
# bug with property setter
class Test:
@property
def clicked(self):
return self.func
@clicked.setter
def clicked(self, callback):
self.func = callback
t = Test()
t.clicked = lambda x: x+7 #"clicked"
assert t.clicked(7) == 14
# issue 249
x = [a.strip() for a in [
" foo ",
" bar ",
]]
assert x == ['foo', 'bar']
# issue 250
assert 2**3**4 == 2417851639229258349412352
# issue 258
a = [1, 2, 3]
b, *c = a
assert c == [2, 3]
# issue 261 (__slots__)
class A:
__slots__ = 'x',
A.x
a = A()
a.x = 9
assert a.x == 9
try:
a.y = 0
except AttributeError:
pass
except:
raise
# issue 274
import base64
b = bytearray(b'<Z\x00N')
b64 = base64.b64encode( b )
assert b64 == b'PFoATg=='
import base64
buf = bytearray(b'EZ\x86\xdd\xabN\x86\xdd\xabNE[\x86\xdd\xabN\x86\xdd\xabN')
b64 = base64.b64encode( buf )
assert b64 == b'RVqG3atOht2rTkVbht2rTobdq04='
# issue 279
x = 0
if False: x+=2;x+=3
for n in range(2): x+=1;x+=1
assert x==4
# issue 280
for n in range(5):
pass
assert n==4
print('passed all tests')
| bsd-3-clause | 1,729,866,972,702,748,200 | 19.111111 | 101 | 0.614487 | false |
hradec/gaffer | python/GafferTest/IECorePreviewTest/MessagesTest.py | 7 | 11804 | ##########################################################################
#
# Copyright (c) 2020, Cinesite VFX Ltd. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above
# copyright notice, this list of conditions and the following
# disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided with
# the distribution.
#
# * Neither the name of Cinesite VFX Ltd. nor the names of
# any other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
import unittest
import Gaffer
import GafferTest
import IECore
from Gaffer.Private.IECorePreview import Message
from Gaffer.Private.IECorePreview import Messages
from Gaffer.Private.IECorePreview import MessagesData
class MessagesTest( GafferTest.TestCase ) :
def testMessage( self ) :
m = Message( IECore.MessageHandler.Level.Debug, "context", "message" )
self.assertEqual( m.level, IECore.MessageHandler.Level.Debug )
self.assertEqual( m.context, "context" )
self.assertEqual( m.message, "message" )
with self.assertRaises( AttributeError ) :
m.level = IECore.MessageHandler.Level.Info
with self.assertRaises( AttributeError ) :
m.context = ""
with self.assertRaises( AttributeError ) :
m.message = ""
def testData( self ) :
m1 = Messages()
m1d = MessagesData( m1 )
self.assertEqual( repr(m1d), "Gaffer.Private.IECorePreview.MessagesData()" )
m2d = m1d.copy()
m2 = m2d.value
m2.add( Message( IECore.MessageHandler.Level.Info, "testData", "message" ) )
self.assertEqual( m1.size(), 0 )
self.assertEqual( m2.size(), 1 )
with self.assertRaises( IECore.Exception ) :
repr(m2d)
def testMessageEquality( self ) :
m1 = Message( IECore.MessageHandler.Level.Debug, "context", "message" )
m2 = Message( IECore.MessageHandler.Level.Debug, "context", "message" )
m3 = Message( IECore.MessageHandler.Level.Info, "context", "message" )
m4 = Message( IECore.MessageHandler.Level.Debug, "context2", "message" )
m5 = Message( IECore.MessageHandler.Level.Debug, "context", "message2" )
self.assertEqual( m1, m2 )
self.assertNotEqual( m1, m3 )
self.assertTrue( m1 == m2 )
self.assertTrue( m1 != m3 )
def testMessageHash( self ) :
h = IECore.MurmurHash()
h1 = IECore.MurmurHash()
m1 = Message( IECore.MessageHandler.Level.Debug, "context", "message" )
m1.hash( h1 )
h2 = IECore.MurmurHash()
m2 = Message( IECore.MessageHandler.Level.Info, "context", "message" )
m2.hash( h2 )
h3 = IECore.MurmurHash()
m3 = Message( IECore.MessageHandler.Level.Debug, "", "message" )
m3.hash( h3 )
h4 = IECore.MurmurHash()
m4 = Message( IECore.MessageHandler.Level.Debug, "context", "" )
m4.hash( h4 )
# Check hashes are unique
hashes = [ h, h1, h2, h3, h4 ]
self.assertEqual( len(set(hashes)), len(hashes) )
# Check are stable
h4b = IECore.MurmurHash()
m4.hash( h4b )
self.assertEqual( h4b, h4 )
h5 = IECore.MurmurHash()
m5 = Message( IECore.MessageHandler.Level.Debug, "context", "" )
m5.hash( h5 )
self.assertEqual( h4, h5 )
# Test python hashing
allMessages = [ m1, m2, m3, m4, m5 ]
differentMsgs = [ m1, m2, m3, m4 ]
self.assertEqual( len(set(allMessages)), len(differentMsgs) )
def testMessages( self ) :
m = Messages()
self.assertEqual( m.size(), 0 )
self.assertEqual( len(m), 0 )
Level = IECore.MessageHandler.Level
for l in ( Level.Error, Level.Warning, Level.Info, Level.Debug ) :
self.assertEqual( m.count( l ), 0 )
for i in range( 20 ) :
m.add( Message( IECore.MessageHandler.Level( i % 4 ), "testMessages", str(i) ) )
self.assertEqual( m.size(), i + 1 )
self.assertEqual( len(m), m.size() )
for i in range( 20 ) :
self.assertEqual( m[i].level, IECore.MessageHandler.Level( i % 4 ) )
self.assertEqual( m[i].context, "testMessages" )
self.assertEqual( m[i].message, str(i) )
m.clear()
self.assertEqual( m.size(), 0 )
def testIndexing( self ) :
messages = (
Message( IECore.MessageHandler.Level.Debug, "testIndexing", "message1" ),
Message( IECore.MessageHandler.Level.Info, "testIndexing", "message2" ),
Message( IECore.MessageHandler.Level.Warning, "testIndexing", "message3" ),
Message( IECore.MessageHandler.Level.Error, "testIndexing", "message4" )
)
m = Messages()
for msg in messages :
m.add( msg )
for i in range( len(messages) ) :
self.assertEqual( m[i], messages[i] )
if i > 0 :
self.assertEqual( m[-i], messages[-i] )
with self.assertRaises( IndexError ) :
m[ len(m) ]
with self.assertRaises( IndexError ) :
m[ - ( len(m) + 1 ) ]
def testMessagesCopy( self ) :
m1 = Messages()
for i in range( 11 ) :
m1.add( Message( IECore.MessageHandler.Level( i % 4 ), "testMessagesCopy", str(i) ) )
m2 = m1
m3 = Messages( m1 )
self.assertEqual( m1, m2 )
self.assertEqual( m1, m3 )
self.assertEqual( m2, m3 )
# Check copies are de-coupled
m2.add( Message( IECore.MessageHandler.Level.Info, "testMessagesCopy", "message" ) )
self.assertEqual( m1, m2 )
self.assertNotEqual( m2, m3 )
m3.add( Message( IECore.MessageHandler.Level.Error, "testMessagesCopy", "message" ) )
self.assertEqual( m1, m2 )
self.assertNotEqual( m2, m3 )
def testMessagesEquality( self ) :
messages = [
Message( IECore.MessageHandler.Level( i % 4 ), "testMessagesEquality", str(i) )
for i in range( 10 )
]
m1 = Messages()
m2 = Messages()
for msg in messages :
m1.add( msg )
m2.add( msg )
self.assertEqual( m1, m2 )
self.assertFalse( m1 != m2 )
m1.clear()
self.assertNotEqual( m1, m2 )
self.assertTrue( m1 != m2 )
def testMessagesHash( self ) :
m1 = Messages()
h = m1.hash()
lastHash = h
for i in range( 10 ) :
m1.add( Message( IECore.MessageHandler.Level.Debug, "testMessagesHash", "" ) )
newHash = m1.hash()
self.assertNotEqual( newHash, lastHash )
lastHash = newHash
# check stable
self.assertEqual( m1.hash(), lastHash )
m2 = Messages( m1 )
self.assertEqual( m2.hash(), m1.hash() )
m3 = Messages()
for i in range( 10 ) :
m3.add( Message( IECore.MessageHandler.Level.Debug, "testMessagesHash", "" ) )
self.assertEqual( len(set( ( m1, m2, m3 ) ) ), 1 )
m1.clear()
self.assertEqual( m1.hash(), h )
self.assertNotEqual( m1.hash(), m2.hash() )
def testMessagesCount( self ) :
Level = IECore.MessageHandler.Level
messageCounts = ( ( Level.Error, 1 ), ( Level.Warning, 2 ), ( Level.Info, 3 ), ( Level.Debug, 4 ) )
m = Messages()
self.assertEqual( { m.count(l) for l, c in messageCounts }, { 0 } )
self.assertEqual( m.count( Level.Invalid ), 0 )
for level, count in messageCounts :
for i in range( count ) :
m.add( Message( level, "testMessagesCount", "Message %d" % i ) )
self.assertEqual( [ m.count(l) for l, c in messageCounts ], [ c for l, c in messageCounts ] )
m.clear()
self.assertEqual( { m.count(l) for l, c in messageCounts }, { 0 } )
@GafferTest.TestRunner.PerformanceTestMethod()
def testFirstDifference( self ) :
def generateMessages( count, context ) :
m = Messages()
appendMessages( m, count, context )
return m
def appendMessages( messages, count, context ) :
for i in range( count ) :
messages.add( Message( IECore.MessageHandler.Level( i % 4 ), context, "message %d" % i ) )
# NB, bucketSize is 100 in the current implementation, we need to
# definitely verify the results of this method in multi-bucket
# scenarios, along with incomplete buckets.
# Test one empty
m1 = Messages()
m2 = generateMessages( 10, "m" )
self.assertIsNone( m1.firstDifference( m2 ) )
self.assertEqual( m2.firstDifference( m1 ), 0 )
# Test equal
m1 = generateMessages( 1234, "m" )
m2 = Messages( m1 )
self.assertIsNone( m1.firstDifference( m2 ) )
self.assertIsNone( m2.firstDifference( m1 ) )
# Test all different
m1 = generateMessages( 1234, "a" )
m2 = generateMessages( 1234, "b" )
self.assertEqual( m1.firstDifference( m2 ), 0 )
self.assertEqual( m2.firstDifference( m1 ), 0 )
# Test varying length
m1 = generateMessages( 1102, "a" )
m2 = Messages( m1 )
appendMessages( m2, 100, "a" )
self.assertIsNone( m1.firstDifference( m2 ) )
self.assertEqual( m2.firstDifference( m1 ), 1102 )
# Test some different
m1 = generateMessages( 47, "a" )
m2 = Messages( m1 )
appendMessages( m1, 2, "a" )
appendMessages( m2, 2, "b" )
self.assertEqual( m1.firstDifference( m2 ), 47 )
self.assertEqual( m2.firstDifference( m1 ), 47 )
m1 = generateMessages( 1030, "a" )
m2 = Messages( m1 )
appendMessages( m1, 300, "b" )
appendMessages( m2, 302, "a" )
self.assertEqual( m1.firstDifference( m2 ), 1030 )
self.assertEqual( m2.firstDifference( m1 ), 1030 )
# Test comparison optimisation
m1 = generateMessages( 30005, "a" )
m2 = Messages( m1 )
appendMessages( m1, 1, "a" )
appendMessages( m2, 1, "b" )
with GafferTest.TestRunner.PerformanceScope() :
self.assertEqual( m1.firstDifference( m2 ), 30005 )
@GafferTest.TestRunner.PerformanceTestMethod()
def testMessagesCopyPerformanceS( self ) :
numMessages = 5000
numCopies = 100000
m = Messages()
for i in range( numMessages ) :
m.add( Message( IECore.MessageHandler.Level( i % 4 ), "testMessagesCopyPerformanceS", str(i) ) )
with GafferTest.TestRunner.PerformanceScope() :
GafferTest.testMessagesCopyPerformance( m, numCopies )
@GafferTest.TestRunner.PerformanceTestMethod()
def testMessagesCopyPerformanceM( self ) :
numMessages = 50000
numCopies = 100000
m = Messages()
for i in range( numMessages ) :
m.add( Message( IECore.MessageHandler.Level( i % 4 ), "testMessagesCopyPerformanceM", str(i) ) )
with GafferTest.TestRunner.PerformanceScope() :
GafferTest.testMessagesCopyPerformance( m, numCopies )
@GafferTest.TestRunner.PerformanceTestMethod()
def testMessagesCopyPerformanceL( self ) :
numMessages = 500000
numCopies = 1000
m = Messages()
for i in range( numMessages ) :
m.add( Message( IECore.MessageHandler.Level( i % 4 ), "testMessagesCopyPerformanceL", str(i) ) )
with GafferTest.TestRunner.PerformanceScope() :
GafferTest.testMessagesCopyPerformance( m, numCopies )
@GafferTest.TestRunner.PerformanceTestMethod()
def testMessagesAddPerformance( self ) :
GafferTest.testMessagesAddPerformance( 1000000 )
def testMessagesValueReuse( self ):
GafferTest.testMessagesValueReuse()
def testMessagesConstness( self ) :
GafferTest.testMessagesConstness()
if __name__ == "__main__":
unittest.main()
| bsd-3-clause | -9,020,621,841,446,465,000 | 27.650485 | 101 | 0.674602 | false |
google/timesketch | contrib/gcs_importer.py | 1 | 7472 | # Copyright 2020 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Google Cloud Storage importer."""
import argparse
import time
import os
import sys
import uuid
import json
import logging
from werkzeug.exceptions import Forbidden
from timesketch.app import create_app
from timesketch.lib import tasks
from timesketch.models import db_session
from timesketch.models.sketch import SearchIndex
from timesketch.models.sketch import Sketch
from timesketch.models.sketch import Timeline
from timesketch.models.user import User
try:
from google.cloud import pubsub_v1
from google.cloud import storage
except ImportError:
sys.exit('ERROR: You are missing Google Cloud libraries')
# Create logger
logger = logging.getLogger('gcs_importer')
logger.setLevel(logging.DEBUG)
handler = logging.StreamHandler()
handler.setLevel(logging.DEBUG)
formatter = logging.Formatter("%(asctime)s [%(levelname)s] %(message)s")
handler.setFormatter(formatter)
logger.addHandler(handler)
def download_from_gcs(gcs_base_path, filename):
"""Download file from Google Cloud Storage (GCS).
Args:
gcs_base_path: (str) GCS bucket path
filename: (str) Filename of the file to download
Returns:
(str) Path to downloaded file
"""
storage_client = storage.Client(args.project)
bucket = storage_client.get_bucket(args.bucket)
gcs_full_path = os.path.join(gcs_base_path, filename)
local_path = os.path.join(args.output, filename)
blob = bucket.blob(gcs_full_path)
blob.download_to_filename(local_path)
logger.info('Downloaded file from GCS: {}'.format(local_path))
return local_path
def setup_sketch(timeline_name, index_name, username, sketch_id=None):
"""Use existing sketch or create a new sketch.
Args:
timeline_name: (str) Name of the Timeline
index_name: (str) Name of the index
username: (str) Who should own the timeline
sketch_id: (str) Optional sketch_id to add timeline to
Returns:
(tuple) sketch ID and timeline ID as integers
"""
with app.app_context():
user = User.get_or_create(username=username)
sketch = None
if sketch_id:
try:
sketch = Sketch.query.get_with_acl(sketch_id, user=user)
logger.info('Using existing sketch: {} ({})'.format(
sketch.name, sketch.id))
except Forbidden:
pass
if not (sketch or sketch_id):
# Create a new sketch.
sketch_name = 'Turbinia: {}'.format(timeline_name)
sketch = Sketch(
name=sketch_name, description=sketch_name, user=user)
# Need to commit here to be able to set permissions later.
db_session.add(sketch)
db_session.commit()
sketch.grant_permission(permission='read', user=user)
sketch.grant_permission(permission='write', user=user)
sketch.grant_permission(permission='delete', user=user)
sketch.status.append(sketch.Status(user=None, status='new'))
db_session.add(sketch)
db_session.commit()
logger.info('Created new sketch: {} ({})'.format(
sketch.name, sketch.id))
searchindex = SearchIndex.get_or_create(
name=timeline_name, description='Created by Turbinia.', user=user,
index_name=index_name)
searchindex.grant_permission(permission='read', user=user)
searchindex.grant_permission(permission='write', user=user)
searchindex.grant_permission(permission='delete', user=user)
searchindex.set_status('processing')
db_session.add(searchindex)
db_session.commit()
timeline = Timeline(
name=searchindex.name, description=searchindex.description,
sketch=sketch, user=user, searchindex=searchindex)
# If the user doesn't have write access to the sketch then create the
# timeline but don't attach it to the sketch.
if not sketch.has_permission(user, 'write'):
timeline.sketch = None
else:
sketch.timelines.append(timeline)
db_session.add(timeline)
db_session.commit()
timeline.set_status('processing')
return sketch.id, timeline.id
def callback(message):
"""Google PubSub callback.
This function is called on all incoming messages on the configured topic.
Args:
message: (dict) PubSub message
"""
message.ack()
gcs_full_path = message.attributes.get('objectId')
# Exit early if the file type is wrong.
if not gcs_full_path.endswith('.plaso.metadata.json'):
return
gcs_base_path = os.path.dirname(gcs_full_path)
gcs_metadata_filename = os.path.basename(gcs_full_path)
gcs_base_filename = gcs_metadata_filename.replace('.metadata.json', '')
gcs_plaso_filename = gcs_base_filename
# Download files from GCS
local_metadata_file = download_from_gcs(
gcs_base_path, gcs_metadata_filename)
local_plaso_file = download_from_gcs(gcs_base_path, gcs_plaso_filename)
with open(local_metadata_file, 'r') as metadata_file:
metadata = json.load(metadata_file)
username = metadata.get('requester')
sketch_id_from_metadata = metadata.get('sketch_id')
if not username:
logger.error('Missing username')
return
timeline_name = os.path.splitext(gcs_plaso_filename)[0]
index_name = uuid.uuid4().hex
sketch_id, timeline_id = setup_sketch(
timeline_name, index_name, 'admin', sketch_id_from_metadata)
# Start indexing
with app.app_context():
pipeline = tasks.build_index_pipeline(
file_path=local_plaso_file, timeline_name=gcs_base_filename,
index_name=index_name, file_extension='plaso', sketch_id=sketch_id,
timeline_id=timeline_id)
pipeline.apply_async()
logger.info('File sent for indexing: {}'. format(gcs_base_filename))
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='GCS importer')
parser.add_argument('--project', help='Google Cloud Project ID')
parser.add_argument('--bucket',
help='Google Cloud Storage bucket to monitor')
parser.add_argument('--subscription',
help='Google Cloud PubSub subscription')
parser.add_argument('--output', default='/tmp',
help='Directory for downloads')
args = parser.parse_args()
# Create flask app
app = create_app()
# Setup Google Cloud Pub/Sub
subscriber = pubsub_v1.SubscriberClient()
subscription_path = subscriber.subscription_path(
args.project, args.subscription)
subscriber.subscribe(subscription_path, callback=callback)
logger.info('Listening on PubSub queue: {}'.format(args.subscription))
while True:
time.sleep(10)
| apache-2.0 | -569,849,790,712,848,500 | 34.580952 | 79 | 0.662875 | false |
hellhovnd/django | django/contrib/admin/sites.py | 4 | 18932 | from functools import update_wrapper
from django.http import Http404, HttpResponseRedirect
from django.contrib.admin import ModelAdmin, actions
from django.contrib.admin.forms import AdminAuthenticationForm
from django.contrib.auth import logout as auth_logout, REDIRECT_FIELD_NAME
from django.contrib.contenttypes import views as contenttype_views
from django.views.decorators.csrf import csrf_protect
from django.db.models.base import ModelBase
from django.core.exceptions import ImproperlyConfigured
from django.core.urlresolvers import reverse, NoReverseMatch
from django.template.response import TemplateResponse
from django.utils import six
from django.utils.text import capfirst
from django.utils.translation import ugettext as _
from django.views.decorators.cache import never_cache
from django.conf import settings
LOGIN_FORM_KEY = 'this_is_the_login_form'
class AlreadyRegistered(Exception):
pass
class NotRegistered(Exception):
pass
class AdminSite(object):
"""
An AdminSite object encapsulates an instance of the Django admin application, ready
to be hooked in to your URLconf. Models are registered with the AdminSite using the
register() method, and the get_urls() method can then be used to access Django view
functions that present a full admin interface for the collection of registered
models.
"""
login_form = None
index_template = None
app_index_template = None
login_template = None
logout_template = None
password_change_template = None
password_change_done_template = None
def __init__(self, name='admin', app_name='admin'):
self._registry = {} # model_class class -> admin_class instance
self.name = name
self.app_name = app_name
self._actions = {'delete_selected': actions.delete_selected}
self._global_actions = self._actions.copy()
def register(self, model_or_iterable, admin_class=None, **options):
"""
Registers the given model(s) with the given admin class.
The model(s) should be Model classes, not instances.
If an admin class isn't given, it will use ModelAdmin (the default
admin options). If keyword arguments are given -- e.g., list_display --
they'll be applied as options to the admin class.
If a model is already registered, this will raise AlreadyRegistered.
If a model is abstract, this will raise ImproperlyConfigured.
"""
if not admin_class:
admin_class = ModelAdmin
# Don't import the humongous validation code unless required
if admin_class and settings.DEBUG:
from django.contrib.admin.validation import validate
else:
validate = lambda model, adminclass: None
if isinstance(model_or_iterable, ModelBase):
model_or_iterable = [model_or_iterable]
for model in model_or_iterable:
if model._meta.abstract:
raise ImproperlyConfigured('The model %s is abstract, so it '
'cannot be registered with admin.' % model.__name__)
if model in self._registry:
raise AlreadyRegistered('The model %s is already registered' % model.__name__)
# Ignore the registration if the model has been
# swapped out.
if not model._meta.swapped:
# If we got **options then dynamically construct a subclass of
# admin_class with those **options.
if options:
# For reasons I don't quite understand, without a __module__
# the created class appears to "live" in the wrong place,
# which causes issues later on.
options['__module__'] = __name__
admin_class = type("%sAdmin" % model.__name__, (admin_class,), options)
# Validate (which might be a no-op)
validate(admin_class, model)
# Instantiate the admin class to save in the registry
self._registry[model] = admin_class(model, self)
def unregister(self, model_or_iterable):
"""
Unregisters the given model(s).
If a model isn't already registered, this will raise NotRegistered.
"""
if isinstance(model_or_iterable, ModelBase):
model_or_iterable = [model_or_iterable]
for model in model_or_iterable:
if model not in self._registry:
raise NotRegistered('The model %s is not registered' % model.__name__)
del self._registry[model]
def add_action(self, action, name=None):
"""
Register an action to be available globally.
"""
name = name or action.__name__
self._actions[name] = action
self._global_actions[name] = action
def disable_action(self, name):
"""
Disable a globally-registered action. Raises KeyError for invalid names.
"""
del self._actions[name]
def get_action(self, name):
"""
Explicitly get a registered global action whether it's enabled or
not. Raises KeyError for invalid names.
"""
return self._global_actions[name]
@property
def actions(self):
"""
Get all the enabled actions as an iterable of (name, func).
"""
return six.iteritems(self._actions)
def has_permission(self, request):
"""
Returns True if the given HttpRequest has permission to view
*at least one* page in the admin site.
"""
return request.user.is_active and request.user.is_staff
def check_dependencies(self):
"""
Check that all things needed to run the admin have been correctly installed.
The default implementation checks that LogEntry, ContentType and the
auth context processor are installed.
"""
from django.contrib.admin.models import LogEntry
from django.contrib.contenttypes.models import ContentType
if not LogEntry._meta.installed:
raise ImproperlyConfigured("Put 'django.contrib.admin' in your "
"INSTALLED_APPS setting in order to use the admin application.")
if not ContentType._meta.installed:
raise ImproperlyConfigured("Put 'django.contrib.contenttypes' in "
"your INSTALLED_APPS setting in order to use the admin application.")
if not ('django.contrib.auth.context_processors.auth' in settings.TEMPLATE_CONTEXT_PROCESSORS or
'django.core.context_processors.auth' in settings.TEMPLATE_CONTEXT_PROCESSORS):
raise ImproperlyConfigured("Put 'django.contrib.auth.context_processors.auth' "
"in your TEMPLATE_CONTEXT_PROCESSORS setting in order to use the admin application.")
def admin_view(self, view, cacheable=False):
"""
Decorator to create an admin view attached to this ``AdminSite``. This
wraps the view and provides permission checking by calling
``self.has_permission``.
You'll want to use this from within ``AdminSite.get_urls()``:
class MyAdminSite(AdminSite):
def get_urls(self):
from django.conf.urls import patterns, url
urls = super(MyAdminSite, self).get_urls()
urls += patterns('',
url(r'^my_view/$', self.admin_view(some_view))
)
return urls
By default, admin_views are marked non-cacheable using the
``never_cache`` decorator. If the view can be safely cached, set
cacheable=True.
"""
def inner(request, *args, **kwargs):
if LOGIN_FORM_KEY in request.POST and request.user.is_authenticated():
auth_logout(request)
if not self.has_permission(request):
if request.path == reverse('admin:logout',
current_app=self.name):
index_path = reverse('admin:index', current_app=self.name)
return HttpResponseRedirect(index_path)
return self.login(request)
return view(request, *args, **kwargs)
if not cacheable:
inner = never_cache(inner)
# We add csrf_protect here so this function can be used as a utility
# function for any view, without having to repeat 'csrf_protect'.
if not getattr(view, 'csrf_exempt', False):
inner = csrf_protect(inner)
return update_wrapper(inner, view)
def get_urls(self):
from django.conf.urls import patterns, url, include
if settings.DEBUG:
self.check_dependencies()
def wrap(view, cacheable=False):
def wrapper(*args, **kwargs):
return self.admin_view(view, cacheable)(*args, **kwargs)
return update_wrapper(wrapper, view)
# Admin-site-wide views.
urlpatterns = patterns('',
url(r'^$',
wrap(self.index),
name='index'),
url(r'^logout/$',
wrap(self.logout),
name='logout'),
url(r'^password_change/$',
wrap(self.password_change, cacheable=True),
name='password_change'),
url(r'^password_change/done/$',
wrap(self.password_change_done, cacheable=True),
name='password_change_done'),
url(r'^jsi18n/$',
wrap(self.i18n_javascript, cacheable=True),
name='jsi18n'),
url(r'^r/(?P<content_type_id>\d+)/(?P<object_id>.+)/$',
wrap(contenttype_views.shortcut),
name='view_on_site'),
url(r'^(?P<app_label>\w+)/$',
wrap(self.app_index),
name='app_list')
)
# Add in each model's views.
for model, model_admin in six.iteritems(self._registry):
urlpatterns += patterns('',
url(r'^%s/%s/' % (model._meta.app_label, model._meta.model_name),
include(model_admin.urls))
)
return urlpatterns
@property
def urls(self):
return self.get_urls(), self.app_name, self.name
def password_change(self, request):
"""
Handles the "change password" task -- both form display and validation.
"""
from django.contrib.auth.views import password_change
url = reverse('admin:password_change_done', current_app=self.name)
defaults = {
'current_app': self.name,
'post_change_redirect': url
}
if self.password_change_template is not None:
defaults['template_name'] = self.password_change_template
return password_change(request, **defaults)
def password_change_done(self, request, extra_context=None):
"""
Displays the "success" page after a password change.
"""
from django.contrib.auth.views import password_change_done
defaults = {
'current_app': self.name,
'extra_context': extra_context or {},
}
if self.password_change_done_template is not None:
defaults['template_name'] = self.password_change_done_template
return password_change_done(request, **defaults)
def i18n_javascript(self, request):
"""
Displays the i18n JavaScript that the Django admin requires.
This takes into account the USE_I18N setting. If it's set to False, the
generated JavaScript will be leaner and faster.
"""
if settings.USE_I18N:
from django.views.i18n import javascript_catalog
else:
from django.views.i18n import null_javascript_catalog as javascript_catalog
return javascript_catalog(request, packages=['django.conf', 'django.contrib.admin'])
@never_cache
def logout(self, request, extra_context=None):
"""
Logs out the user for the given HttpRequest.
This should *not* assume the user is already logged in.
"""
from django.contrib.auth.views import logout
defaults = {
'current_app': self.name,
'extra_context': extra_context or {},
}
if self.logout_template is not None:
defaults['template_name'] = self.logout_template
return logout(request, **defaults)
@never_cache
def login(self, request, extra_context=None):
"""
Displays the login form for the given HttpRequest.
"""
from django.contrib.auth.views import login
context = {
'title': _('Log in'),
'app_path': request.get_full_path(),
REDIRECT_FIELD_NAME: request.get_full_path(),
}
context.update(extra_context or {})
defaults = {
'extra_context': context,
'current_app': self.name,
'authentication_form': self.login_form or AdminAuthenticationForm,
'template_name': self.login_template or 'admin/login.html',
}
return login(request, **defaults)
@never_cache
def index(self, request, extra_context=None):
"""
Displays the main admin index page, which lists all of the installed
apps that have been registered in this site.
"""
app_dict = {}
user = request.user
for model, model_admin in self._registry.items():
app_label = model._meta.app_label
has_module_perms = user.has_module_perms(app_label)
if has_module_perms:
perms = model_admin.get_model_perms(request)
# Check whether user has any perm for this module.
# If so, add the module to the model_list.
if True in perms.values():
info = (app_label, model._meta.model_name)
model_dict = {
'name': capfirst(model._meta.verbose_name_plural),
'object_name': model._meta.object_name,
'perms': perms,
}
if perms.get('change', False):
try:
model_dict['admin_url'] = reverse('admin:%s_%s_changelist' % info, current_app=self.name)
except NoReverseMatch:
pass
if perms.get('add', False):
try:
model_dict['add_url'] = reverse('admin:%s_%s_add' % info, current_app=self.name)
except NoReverseMatch:
pass
if app_label in app_dict:
app_dict[app_label]['models'].append(model_dict)
else:
app_dict[app_label] = {
'name': app_label.title(),
'app_label': app_label,
'app_url': reverse('admin:app_list', kwargs={'app_label': app_label}, current_app=self.name),
'has_module_perms': has_module_perms,
'models': [model_dict],
}
# Sort the apps alphabetically.
app_list = list(six.itervalues(app_dict))
app_list.sort(key=lambda x: x['name'])
# Sort the models alphabetically within each app.
for app in app_list:
app['models'].sort(key=lambda x: x['name'])
context = {
'title': _('Site administration'),
'app_list': app_list,
}
context.update(extra_context or {})
return TemplateResponse(request, self.index_template or
'admin/index.html', context,
current_app=self.name)
def app_index(self, request, app_label, extra_context=None):
user = request.user
has_module_perms = user.has_module_perms(app_label)
app_dict = {}
for model, model_admin in self._registry.items():
if app_label == model._meta.app_label:
if has_module_perms:
perms = model_admin.get_model_perms(request)
# Check whether user has any perm for this module.
# If so, add the module to the model_list.
if True in perms.values():
info = (app_label, model._meta.model_name)
model_dict = {
'name': capfirst(model._meta.verbose_name_plural),
'object_name': model._meta.object_name,
'perms': perms,
}
if perms.get('change', False):
try:
model_dict['admin_url'] = reverse('admin:%s_%s_changelist' % info, current_app=self.name)
except NoReverseMatch:
pass
if perms.get('add', False):
try:
model_dict['add_url'] = reverse('admin:%s_%s_add' % info, current_app=self.name)
except NoReverseMatch:
pass
if app_dict:
app_dict['models'].append(model_dict),
else:
# First time around, now that we know there's
# something to display, add in the necessary meta
# information.
app_dict = {
'name': app_label.title(),
'app_label': app_label,
'app_url': '',
'has_module_perms': has_module_perms,
'models': [model_dict],
}
if not app_dict:
raise Http404('The requested admin page does not exist.')
# Sort the models alphabetically within each app.
app_dict['models'].sort(key=lambda x: x['name'])
context = {
'title': _('%s administration') % capfirst(app_label),
'app_list': [app_dict],
}
context.update(extra_context or {})
return TemplateResponse(request, self.app_index_template or [
'admin/%s/app_index.html' % app_label,
'admin/app_index.html'
], context, current_app=self.name)
# This global object represents the default admin site, for the common case.
# You can instantiate AdminSite in your own code to create a custom admin site.
site = AdminSite()
| bsd-3-clause | 3,075,761,306,525,674,000 | 40.336245 | 121 | 0.5618 | false |
unseenlaser/python-for-android | python-modules/twisted/twisted/internet/_dumbwin32proc.py | 60 | 11110 | # -*- test-case-name: twisted.test.test_process -*-
# Copyright (c) 2001-2009 Twisted Matrix Laboratories.
# See LICENSE for details.
"""
http://isometric.sixsided.org/_/gates_in_the_head/
"""
import os
# Win32 imports
import win32api
import win32con
import win32event
import win32file
import win32pipe
import win32process
import win32security
import pywintypes
# security attributes for pipes
PIPE_ATTRS_INHERITABLE = win32security.SECURITY_ATTRIBUTES()
PIPE_ATTRS_INHERITABLE.bInheritHandle = 1
from zope.interface import implements
from twisted.internet.interfaces import IProcessTransport, IConsumer, IProducer
from twisted.python.win32 import quoteArguments
from twisted.internet import error
from twisted.python import failure
from twisted.internet import _pollingfile
from twisted.internet._baseprocess import BaseProcess
def debug(msg):
import sys
print msg
sys.stdout.flush()
class _Reaper(_pollingfile._PollableResource):
def __init__(self, proc):
self.proc = proc
def checkWork(self):
if win32event.WaitForSingleObject(self.proc.hProcess, 0) != win32event.WAIT_OBJECT_0:
return 0
exitCode = win32process.GetExitCodeProcess(self.proc.hProcess)
self.deactivate()
self.proc.processEnded(exitCode)
return 0
def _findShebang(filename):
"""
Look for a #! line, and return the value following the #! if one exists, or
None if this file is not a script.
I don't know if there are any conventions for quoting in Windows shebang
lines, so this doesn't support any; therefore, you may not pass any
arguments to scripts invoked as filters. That's probably wrong, so if
somebody knows more about the cultural expectations on Windows, please feel
free to fix.
This shebang line support was added in support of the CGI tests;
appropriately enough, I determined that shebang lines are culturally
accepted in the Windows world through this page::
http://www.cgi101.com/learn/connect/winxp.html
@param filename: str representing a filename
@return: a str representing another filename.
"""
f = file(filename, 'rU')
if f.read(2) == '#!':
exe = f.readline(1024).strip('\n')
return exe
def _invalidWin32App(pywinerr):
"""
Determine if a pywintypes.error is telling us that the given process is
'not a valid win32 application', i.e. not a PE format executable.
@param pywinerr: a pywintypes.error instance raised by CreateProcess
@return: a boolean
"""
# Let's do this better in the future, but I have no idea what this error
# is; MSDN doesn't mention it, and there is no symbolic constant in
# win32process module that represents 193.
return pywinerr.args[0] == 193
class Process(_pollingfile._PollingTimer, BaseProcess):
"""A process that integrates with the Twisted event loop.
If your subprocess is a python program, you need to:
- Run python.exe with the '-u' command line option - this turns on
unbuffered I/O. Buffering stdout/err/in can cause problems, see e.g.
http://support.microsoft.com/default.aspx?scid=kb;EN-US;q1903
- If you don't want Windows messing with data passed over
stdin/out/err, set the pipes to be in binary mode::
import os, sys, mscvrt
msvcrt.setmode(sys.stdin.fileno(), os.O_BINARY)
msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY)
msvcrt.setmode(sys.stderr.fileno(), os.O_BINARY)
"""
implements(IProcessTransport, IConsumer, IProducer)
closedNotifies = 0
def __init__(self, reactor, protocol, command, args, environment, path):
_pollingfile._PollingTimer.__init__(self, reactor)
BaseProcess.__init__(self, protocol)
# security attributes for pipes
sAttrs = win32security.SECURITY_ATTRIBUTES()
sAttrs.bInheritHandle = 1
# create the pipes which will connect to the secondary process
self.hStdoutR, hStdoutW = win32pipe.CreatePipe(sAttrs, 0)
self.hStderrR, hStderrW = win32pipe.CreatePipe(sAttrs, 0)
hStdinR, self.hStdinW = win32pipe.CreatePipe(sAttrs, 0)
win32pipe.SetNamedPipeHandleState(self.hStdinW,
win32pipe.PIPE_NOWAIT,
None,
None)
# set the info structure for the new process.
StartupInfo = win32process.STARTUPINFO()
StartupInfo.hStdOutput = hStdoutW
StartupInfo.hStdError = hStderrW
StartupInfo.hStdInput = hStdinR
StartupInfo.dwFlags = win32process.STARTF_USESTDHANDLES
# Create new handles whose inheritance property is false
currentPid = win32api.GetCurrentProcess()
tmp = win32api.DuplicateHandle(currentPid, self.hStdoutR, currentPid, 0, 0,
win32con.DUPLICATE_SAME_ACCESS)
win32file.CloseHandle(self.hStdoutR)
self.hStdoutR = tmp
tmp = win32api.DuplicateHandle(currentPid, self.hStderrR, currentPid, 0, 0,
win32con.DUPLICATE_SAME_ACCESS)
win32file.CloseHandle(self.hStderrR)
self.hStderrR = tmp
tmp = win32api.DuplicateHandle(currentPid, self.hStdinW, currentPid, 0, 0,
win32con.DUPLICATE_SAME_ACCESS)
win32file.CloseHandle(self.hStdinW)
self.hStdinW = tmp
# Add the specified environment to the current environment - this is
# necessary because certain operations are only supported on Windows
# if certain environment variables are present.
env = os.environ.copy()
env.update(environment or {})
cmdline = quoteArguments(args)
# TODO: error detection here.
def doCreate():
self.hProcess, self.hThread, self.pid, dwTid = win32process.CreateProcess(
command, cmdline, None, None, 1, 0, env, path, StartupInfo)
try:
doCreate()
except pywintypes.error, pwte:
if not _invalidWin32App(pwte):
# This behavior isn't _really_ documented, but let's make it
# consistent with the behavior that is documented.
raise OSError(pwte)
else:
# look for a shebang line. Insert the original 'command'
# (actually a script) into the new arguments list.
sheb = _findShebang(command)
if sheb is None:
raise OSError(
"%r is neither a Windows executable, "
"nor a script with a shebang line" % command)
else:
args = list(args)
args.insert(0, command)
cmdline = quoteArguments(args)
origcmd = command
command = sheb
try:
# Let's try again.
doCreate()
except pywintypes.error, pwte2:
# d'oh, failed again!
if _invalidWin32App(pwte2):
raise OSError(
"%r has an invalid shebang line: "
"%r is not a valid executable" % (
origcmd, sheb))
raise OSError(pwte2)
# close handles which only the child will use
win32file.CloseHandle(hStderrW)
win32file.CloseHandle(hStdoutW)
win32file.CloseHandle(hStdinR)
# set up everything
self.stdout = _pollingfile._PollableReadPipe(
self.hStdoutR,
lambda data: self.proto.childDataReceived(1, data),
self.outConnectionLost)
self.stderr = _pollingfile._PollableReadPipe(
self.hStderrR,
lambda data: self.proto.childDataReceived(2, data),
self.errConnectionLost)
self.stdin = _pollingfile._PollableWritePipe(
self.hStdinW, self.inConnectionLost)
for pipewatcher in self.stdout, self.stderr, self.stdin:
self._addPollableResource(pipewatcher)
# notify protocol
self.proto.makeConnection(self)
self._addPollableResource(_Reaper(self))
def signalProcess(self, signalID):
if self.pid is None:
raise error.ProcessExitedAlready()
if signalID in ("INT", "TERM", "KILL"):
win32process.TerminateProcess(self.hProcess, 1)
def _getReason(self, status):
if status == 0:
return error.ProcessDone(status)
return error.ProcessTerminated(status)
def write(self, data):
"""Write data to the process' stdin."""
self.stdin.write(data)
def writeSequence(self, seq):
"""Write data to the process' stdin."""
self.stdin.writeSequence(seq)
def closeChildFD(self, fd):
if fd == 0:
self.closeStdin()
elif fd == 1:
self.closeStdout()
elif fd == 2:
self.closeStderr()
else:
raise NotImplementedError("Only standard-IO file descriptors available on win32")
def closeStdin(self):
"""Close the process' stdin.
"""
self.stdin.close()
def closeStderr(self):
self.stderr.close()
def closeStdout(self):
self.stdout.close()
def loseConnection(self):
"""Close the process' stdout, in and err."""
self.closeStdin()
self.closeStdout()
self.closeStderr()
def outConnectionLost(self):
self.proto.childConnectionLost(1)
self.connectionLostNotify()
def errConnectionLost(self):
self.proto.childConnectionLost(2)
self.connectionLostNotify()
def inConnectionLost(self):
self.proto.childConnectionLost(0)
self.connectionLostNotify()
def connectionLostNotify(self):
"""
Will be called 3 times, by stdout/err threads and process handle.
"""
self.closedNotifies += 1
self.maybeCallProcessEnded()
def maybeCallProcessEnded(self):
if self.closedNotifies == 3 and self.lostProcess:
win32file.CloseHandle(self.hProcess)
win32file.CloseHandle(self.hThread)
self.hProcess = None
self.hThread = None
BaseProcess.maybeCallProcessEnded(self)
# IConsumer
def registerProducer(self, producer, streaming):
self.stdin.registerProducer(producer, streaming)
def unregisterProducer(self):
self.stdin.unregisterProducer()
# IProducer
def pauseProducing(self):
self._pause()
def resumeProducing(self):
self._unpause()
def stopProducing(self):
self.loseConnection()
def __repr__(self):
"""
Return a string representation of the process.
"""
return "<%s pid=%s>" % (self.__class__.__name__, self.pid)
| apache-2.0 | -6,333,346,795,396,061,000 | 31.676471 | 93 | 0.617012 | false |
espadrine/opera | chromium/src/third_party/WebKit/Source/core/scripts/action_makenames.py | 2 | 7453 | #!/usr/bin/python
#
# Copyright (C) 2009 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Copyright (c) 2009 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# action_makenames.py is a harness script to connect actions sections of
# gyp-based builds to make_names.pl.
#
# usage: action_makenames.py OUTPUTS -- INPUTS [-- OPTIONS]
#
# Multiple OUTPUTS, INPUTS, and OPTIONS may be listed. The sections are
# separated by -- arguments.
#
# The directory name of the first output is chosen as the directory in which
# make_names will run. If the directory name for any subsequent output is
# different, those files will be moved to the desired directory.
#
# Multiple INPUTS may be listed. An input with a basename matching
# "make_names.pl" is taken as the path to that script. Inputs with names
# ending in TagNames.in or tags.in are taken as tag inputs. Inputs with names
# ending in AttributeNames.in or attrs.in are taken as attribute inputs. There
# may be at most one tag input and one attribute input. A make_names.pl input
# is required and at least one tag or attribute input must be present.
#
# OPTIONS is a list of additional options to pass to make_names.pl. This
# section need not be present.
import os
import posixpath
import shutil
import subprocess
import sys
def SplitArgsIntoSections(args):
sections = []
while len(args) > 0:
if not '--' in args:
# If there is no '--' left, everything remaining is an entire section.
dashes = len(args)
else:
dashes = args.index('--')
sections.append(args[:dashes])
# Next time through the loop, look at everything after this '--'.
if dashes + 1 == len(args):
# If the '--' is at the end of the list, we won't come back through the
# loop again. Add an empty section now corresponding to the nothingness
# following the final '--'.
args = []
sections.append(args)
else:
args = args[dashes + 1:]
return sections
def main(args):
sections = SplitArgsIntoSections(args[1:])
assert len(sections) == 2 or len(sections) == 3
(outputs, inputs) = sections[:2]
if len(sections) == 3:
options = sections[2]
else:
options = []
# Make all output pathnames absolute so that they can be accessed after
# changing directory.
for index in xrange(0, len(outputs)):
outputs[index] = os.path.abspath(outputs[index])
outputDir = os.path.dirname(outputs[0])
# Look at the inputs and figure out which ones are make_names.pl, tags, and
# attributes. There can be at most one of each, and those are the only
# input types supported. make_names.pl is required and at least one of tags
# and attributes is required.
makeNamesInput = None
tagInput = None
attrInput = None
eventsInput = None
for input in inputs:
# Make input pathnames absolute so they can be accessed after changing
# directory. On Windows, convert \ to / for inputs to the perl script to
# work around the intermix of activepython + cygwin perl.
inputAbs = os.path.abspath(input)
inputAbsPosix = inputAbs.replace(os.path.sep, posixpath.sep)
inputBasename = os.path.basename(input)
if inputBasename in ('make_names.pl', 'make_event_factory.pl', 'make_dom_exceptions.pl', 'make_settings.pl'):
assert makeNamesInput == None
makeNamesInput = inputAbs
elif inputBasename.endswith('TagNames.in') or inputBasename.endswith('tags.in'):
assert tagInput == None
tagInput = inputAbsPosix
elif inputBasename.endswith('AttributeNames.in') or inputBasename.endswith('attrs.in'):
assert attrInput == None
attrInput = inputAbsPosix
elif (inputBasename.endswith('EventTargetFactory.in') or inputBasename.endswith('EventNames.in')
or inputBasename.endswith('DOMExceptions.in') or inputBasename.endswith('Settings.in')):
eventsInput = inputAbsPosix
elif inputBasename.endswith('Names.in'):
options.append(inputAbsPosix)
elif inputBasename.endswith('.pm'):
continue
else:
assert False
assert makeNamesInput != None
assert tagInput != None or attrInput != None or eventsInput != None or ('--fonts' in options)
# scriptsPath is a Perl include directory, located relative to
# makeNamesInput.
scriptsPath = os.path.normpath(
os.path.join(os.path.dirname(makeNamesInput), os.pardir, 'scripts'))
# Change to the output directory because make_names.pl puts output in its
# working directory.
os.chdir(outputDir)
# Build up the command.
command = ['perl', '-I', scriptsPath, makeNamesInput]
if tagInput != None:
command.extend(['--tags', tagInput])
if attrInput != None:
command.extend(['--attrs', attrInput])
if eventsInput != None:
command.extend(['--input', eventsInput])
command.extend(options)
# Do it. check_call is new in 2.5, so simulate its behavior with call and
# assert.
returnCode = subprocess.call(command)
assert returnCode == 0
# Go through the outputs. Any output that belongs in a different directory
# is moved. Do a copy and delete instead of rename for maximum portability.
# Note that all paths used in this section are still absolute.
for output in outputs:
thisOutputDir = os.path.dirname(output)
if thisOutputDir != outputDir:
outputBasename = os.path.basename(output)
src = os.path.join(outputDir, outputBasename)
dst = os.path.join(thisOutputDir, outputBasename)
shutil.copyfile(src, dst)
os.unlink(src)
return returnCode
if __name__ == '__main__':
sys.exit(main(sys.argv))
| bsd-3-clause | -1,392,541,420,756,519,700 | 39.950549 | 117 | 0.686972 | false |
Nick-Hall/gramps | gramps/plugins/quickview/siblings.py | 10 | 2730 | #
# Gramps - a GTK+/GNOME based genealogy program
#
# Copyright (C) 2000-2007 Donald N. Allingham
# Copyright (C) 2007-2008 Brian G. Matherly
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
#
"""
Display a person's siblings in a report window
"""
from gramps.gen.simple import SimpleAccess, SimpleDoc
from gramps.gui.plug.quick import QuickTable
from gramps.gen.relationship import get_relationship_calculator
from gramps.gen.const import GRAMPS_LOCALE as glocale
_ = glocale.translation.gettext
def run(database, document, person):
"""
Loops through the families that the person is a child in, and display
the information about the other children.
"""
# setup the simple access functions
sdb = SimpleAccess(database)
sdoc = SimpleDoc(document)
stab = QuickTable(sdb)
rel_class = get_relationship_calculator(glocale)
# display the title
# feature request 2356: avoid genitive form
sdoc.title(_("Siblings of %s") % sdb.name(person))
sdoc.paragraph("")
stab.columns(_("Sibling"), _("Gender"), _("Birth Date"), _("Type"))
# grab our current id (self):
gid = sdb.gid(person)
# loop through each family in which the person is a child
document.has_data = False
for family in sdb.child_in(person):
# loop through each child in the family
for child in sdb.children(family):
# only display if this child is not the active person
if sdb.gid(child) != gid:
rel_str = rel_class.get_sibling_relationship_string(
rel_class.get_sibling_type(database, person, child),
person.get_gender(), child.get_gender())
else:
rel_str = _('self')
# pass row the child object to make link:
stab.row(child,
sdb.gender(child),
sdb.birth_or_fallback(child),
rel_str)
document.has_data = True
if document.has_data:
stab.write(sdoc)
else:
sdoc.header1(_("Not found") + "\n")
| gpl-2.0 | -6,824,467,217,679,716,000 | 36.916667 | 79 | 0.66044 | false |
wemanuel/smry | smry/server-auth/ls/google-cloud-sdk/lib/apiclient/errors.py | 12 | 3444 | #!/usr/bin/python2.4
#
# Copyright 2014 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Errors for the library.
All exceptions defined by the library
should be defined in this file.
"""
import json
from oauth2client import util
class Error(Exception):
"""Base error for this module."""
pass
class HttpError(Error):
"""HTTP data was invalid or unexpected."""
@util.positional(3)
def __init__(self, resp, content, uri=None):
self.resp = resp
self.content = content
self.uri = uri
def _get_reason(self):
"""Calculate the reason for the error from the response content."""
reason = self.resp.reason
try:
data = json.loads(self.content)
reason = data['error']['message']
except (ValueError, KeyError):
pass
if reason is None:
reason = ''
return reason
def __repr__(self):
if self.uri:
return '<HttpError %s when requesting %s returned "%s">' % (
self.resp.status, self.uri, self._get_reason().strip())
else:
return '<HttpError %s "%s">' % (self.resp.status, self._get_reason())
__str__ = __repr__
class InvalidJsonError(Error):
"""The JSON returned could not be parsed."""
pass
class UnknownFileType(Error):
"""File type unknown or unexpected."""
pass
class UnknownLinkType(Error):
"""Link type unknown or unexpected."""
pass
class UnknownApiNameOrVersion(Error):
"""No API with that name and version exists."""
pass
class UnacceptableMimeTypeError(Error):
"""That is an unacceptable mimetype for this operation."""
pass
class MediaUploadSizeError(Error):
"""Media is larger than the method can accept."""
pass
class ResumableUploadError(HttpError):
"""Error occured during resumable upload."""
pass
class InvalidChunkSizeError(Error):
"""The given chunksize is not valid."""
pass
class InvalidNotificationError(Error):
"""The channel Notification is invalid."""
pass
class BatchError(HttpError):
"""Error occured during batch operations."""
@util.positional(2)
def __init__(self, reason, resp=None, content=None):
self.resp = resp
self.content = content
self.reason = reason
def __repr__(self):
return '<BatchError %s "%s">' % (self.resp.status, self.reason)
__str__ = __repr__
class UnexpectedMethodError(Error):
"""Exception raised by RequestMockBuilder on unexpected calls."""
@util.positional(1)
def __init__(self, methodId=None):
"""Constructor for an UnexpectedMethodError."""
super(UnexpectedMethodError, self).__init__(
'Received unexpected call %s' % methodId)
class UnexpectedBodyError(Error):
"""Exception raised by RequestMockBuilder on unexpected bodies."""
def __init__(self, expected, provided):
"""Constructor for an UnexpectedMethodError."""
super(UnexpectedBodyError, self).__init__(
'Expected: [%s] - Provided: [%s]' % (expected, provided))
| apache-2.0 | 8,058,671,178,606,029,000 | 23.6 | 75 | 0.683798 | false |
jobovy/apogee | apogee/test/test_allStar.py | 1 | 11655 | import copy
import numpy
import apogee.tools.read as apread
from apogee.tools import bitmask, paramIndx, elemIndx
_DATA= apread.allStar(raw=True) #such that we can re-use it in different tests
from _util import known_failure
def test_telescope():
#Test the telescope tag against the APSTAR_ID
onemIndx= numpy.array(['apogee.apo1m' in s for s in _DATA['APSTAR_ID']])
telescopeIndx= numpy.array(['apo1m' in d for d in _DATA['TELESCOPE']],
dtype='bool')
assert numpy.sum(onemIndx*(True^telescopeIndx)) == 0,\
'TELESCOPE tag does not correspond to APSTAR_ID for 1m data'
return None
def test_targflags_apogee_target1():
# Test that TARGFLAGS corresponds to the bits in APOGEE_TARGET
targ1bits= range(31) #don't check 31, bc always set
targ1bits.pop(14) #14 not populated
for targbit in targ1bits:
name= bitmask.apogee_target1_string(targbit)
targindx= numpy.array([name in s for s in _DATA['TARGFLAGS']],
dtype='bool')
if targbit == 0:
targindx*= \
numpy.array([not 'APOGEE_FAINT_EXTRA' in s for s in _DATA['TARGFLAGS']],
dtype='bool')
badindx= ((_DATA['APOGEE_TARGET1'] & 2**targbit) != 0)*(True^targindx)
assert numpy.sum(badindx) == 0, 'Some objects with bit %i set in apogee_target1 do not have the corresponding flag name in TARGFLAGS set' % targbit
return None
def test_targflags_apogee_target2():
# Test that TARGFLAGS corresponds to the bits in APOGEE_TARGET
targ2bits= [1,2,3,4,9,10,11,12,13,14,15,16,17]
for targbit in targ2bits:
name= bitmask.apogee_target2_string(targbit)
targindx= numpy.array([name in s for s in _DATA['TARGFLAGS']],
dtype='bool')
badindx= ((_DATA['APOGEE_TARGET2'] & 2**targbit) != 0)*(True^targindx)
assert numpy.sum(badindx) == 0, 'Some objects with bit %i set in apogee_target2 do not have the corresponding flag name in TARGFLAGS set' % targbit
return None
def test_extratarg():
#Test that extratarg tag is
# 0 or 4 (duplicates) for main survey targets
# 1 for commissioning (bit 1)
# 2 for tellurics (bit 2)
# 3 1m (bit 3)
mainIndx= (((_DATA['APOGEE_TARGET1'] & 2**11) != 0)\
+((_DATA['APOGEE_TARGET1'] & 2**12) != 0)
+((_DATA['APOGEE_TARGET1'] & 2**13) != 0))
mainIndx*= (_DATA['EXTRATARG'] != 2**4) #rm duplicates
#Also rm commissioning
commIndx= _DATA['COMMISS'] == 1
mainIndx*= (True^commIndx)
assert numpy.sum(mainIndx*(_DATA['EXTRATARG'] != 0)) == 0, '%i main survey targets have EXTRATARG neq 0' % numpy.sum(mainIndx*_DATA['EXTRATARG'] > 0)
commBitSet= numpy.array([bitmask.bit_set(1,e) for e in _DATA['EXTRATARG']],
dtype='bool')
assert numpy.sum(commIndx*(True^commBitSet)) == 0, '%i commissioning targets do not have bit 1 in EXTRATARG set' % numpy.sum(commIndx*(True^commBitSet)) == 0
tellIndx= (_DATA['APOGEE_TARGET2'] & 2**9) != 0
tellBitSet= numpy.array([bitmask.bit_set(2,e) for e in _DATA['EXTRATARG']],
dtype='bool')
#Rm the tellurics that are main targets
tellIndx*= (True^mainIndx)
assert numpy.sum(tellIndx*(True^tellBitSet)) == 0, '%i telluric targets do not have bit 2 in EXTRATARG set' % numpy.sum(tellIndx*(True^tellBitSet))
#1m
onemIndx= numpy.array(['apogee.apo1m' in s for s in _DATA['APSTAR_ID']])
onemBitSet= numpy.array([bitmask.bit_set(3,e) for e in _DATA['EXTRATARG']],
dtype='bool')
assert numpy.sum(onemIndx*(True^onemBitSet)) == 0, '%i 1m targets do not have bit 3 in EXTRATARG set' % numpy.sum(onemIndx*(True^onemBitSet))
return None
def test_params_named():
#Test that the named tags correspond to the correct values in param according to PARAM_SYMBOL
assert numpy.all(numpy.fabs(_DATA['PARAM'][:,paramIndx('teff')]
-_DATA['TEFF']) < 10.**-10.), 'PARAM TEFF does not correspond to tag TEFF'
assert numpy.all(numpy.fabs(_DATA['PARAM'][:,paramIndx('logg')]
-_DATA['LOGG']) < 10.**-10.), 'PARAM LOGG does not correspond to tag LOGG'
cnanIndx= (True^numpy.isnan(numpy.sqrt(_DATA['PARAM_COV'][:,paramIndx('teff'),paramIndx('teff')])))
if numpy.sum(cnanIndx) > 0:
assert numpy.all(numpy.fabs(numpy.sqrt(_DATA['PARAM_COV'][cnanIndx,paramIndx('teff'),paramIndx('teff')])
-_DATA['TEFF_ERR'][cnanIndx]) < 10.**-10.), 'PARAM_COV TEFF does not correspond to tag TEFF_ERR'
cnanIndx= (True^numpy.isnan(numpy.sqrt(_DATA['PARAM_COV'][:,paramIndx('logg'),paramIndx('logg')])))
if numpy.sum(cnanIndx) > 0:
assert numpy.all(numpy.fabs(numpy.sqrt(_DATA['PARAM_COV'][cnanIndx,paramIndx('logg'),paramIndx('logg')])
-_DATA['LOGG_ERR'][cnanIndx]) < 10.**-10.), 'PARAM_COV LOGG does not correspond to tag LOGG_ERR'
return None
def test_params_err():
#Test that the param errors (teff and logg) are not equal to -1
assert not numpy.all(_DATA['TEFF_ERR'] == -1), 'TEFF_ERR are all equal to -1'
assert not numpy.all(_DATA['LOGG_ERR'] == -1), 'LOGG_ERR are all equal to -1'
return None
def test_elem_named():
#Test that the named tags for the elements correspond to the correct values in elem according to ELEM_SYMBOL
from apogee.tools import _ELEM_SYMBOL
elems= [e.capitalize() for e in _ELEM_SYMBOL if e != 'ci' and e != 'tiii']
ferreOverM= ['C','N','O','Mg','Si','S','Ca','Ti']
for ii,elem in enumerate(elems):
if elem == 'C' or elem == 'N' or elem == 'O': continue
elemval= copy.copy(_DATA['ELEM'][:,elemIndx(elem)])
if elem in ferreOverM: elemval+= _DATA['FPARAM'][:,paramIndx('metals')]
#BOVY: What about the following?
goodIndx= (_DATA['FPARAM'][:,paramIndx('metals')] != -9999.)\
*(_DATA[elem.upper()+'_H'] != -9999.)
assert numpy.all(numpy.fabs(elemval[goodIndx]-_DATA[elem.upper()+'_H'][goodIndx]) < 10.**-10.), 'ELEM value for %s_H does not agree with named tag' % elem
return None
def test_elem_err_named_exclNaN():
#Test that the named tags for the elements correspond to the correct values in elem according to ELEM_SYMBOL , rm differences that are NaN
from apogee.tools import _ELEM_SYMBOL
elems= [e.capitalize() for e in _ELEM_SYMBOL if e != 'ci' and e != 'tiii']
for ii,elem in enumerate(elems):
errDiff= _DATA['ELEM_ERR'][:,elemIndx(elem)]\
-_DATA[elem.upper()+'_H_ERR']
cnanIndx= True^numpy.isnan(errDiff)
assert numpy.all(numpy.fabs(errDiff[cnanIndx]) < 10.**-10.), 'ELEM_ERR value for %s_H_ERR does not agree with named tag' % elem
return None
#@known_failure
def test_elem_err_named():
#Test that the named tags for the elements correspond to the correct values in elem according to ELEM_SYMBOL
from apogee.tools import _ELEM_SYMBOL
elems= [e.capitalize() for e in _ELEM_SYMBOL if e != 'ci' and e != 'tiii']
for ii,elem in enumerate(elems):
errDiff= _DATA['ELEM_ERR'][:,elemIndx(elem)]\
-_DATA[elem.upper()+'_H_ERR']
assert numpy.all(numpy.fabs(errDiff) < 10.**-10.), 'ELEM_ERR value for %s_H_ERR does not agree with named tag' % elem
return None
def test_elem_calib_outsiderange_giants():
#Test that the elem calibration does not extend outside of the calibration
#temperature range
from apogee.tools import _ELEM_SYMBOL
elems= [e.capitalize() for e in _ELEM_SYMBOL if e != 'ci' and e != 'tiii']
TeffMin= 3800.
TeffMax= 5250.
giants= (_DATA['FPARAM'][:,paramIndx('logg')] < (2./1300.\
*(_DATA['FPARAM'][:,paramIndx('teff')]-3500.)+2.))\
*(_DATA['FPARAM'][:,paramIndx('logg')] < 4.)\
*(_DATA['FPARAM'][:,paramIndx('teff')] < 7000.)
for elem in elems:
calibDiff= _DATA['FELEM'][:,elemIndx(elem)]\
-_DATA['ELEM'][:,elemIndx(elem)]
#Only consider good stars for this element
indx= ((_DATA['ASPCAPFLAG'] & 2**23) == 0)\
*(_DATA['FPARAM'][:,paramIndx('teff')] > -1000.)\
*giants\
*(_DATA['FELEM'][:,elemIndx(elem)] > -1000.)\
*(_DATA['ELEM'][:,elemIndx(elem)] > -1000.)
try:
loTIndx= numpy.argmin(numpy.fabs(_DATA['FPARAM'][indx,
paramIndx('teff')]
-TeffMin))
except ValueError:
pass
else:
assert numpy.all(numpy.fabs(calibDiff[indx][_DATA['FPARAM'][indx,paramIndx('teff')] < TeffMin]-calibDiff[indx][loTIndx]) < 10.**-3.), 'Calibration offset does not saturate below the minimum calibration temperature of %i for element %s' % (TeffMin,elem)
try:
hiTIndx= numpy.argmin(numpy.fabs(_DATA['FPARAM'][indx,
paramIndx('teff')]
-TeffMax))
except ValueError:
pass
else:
assert numpy.all(numpy.fabs(calibDiff[indx][_DATA['FPARAM'][indx,paramIndx('teff')] > TeffMax]-calibDiff[indx][hiTIndx]) < 10.**-2.), 'Calibration offset does not saturate above the maximum calibration temperature of %i for element %s' % (TeffMax,elem)
return None
def test_elem_calib_outsiderange_dwarfs():
#Test that the elem calibration does not extend outside of the calibration
#temperature range
from apogee.tools import _ELEM_SYMBOL
elems= [e.capitalize() for e in _ELEM_SYMBOL if e != 'ci' and e != 'tiii']
TeffMin= 3800.
TeffMax= 7500.
dwarfs= (_DATA['FPARAM'][:,paramIndx('logg')] >= (2./1300.\
*(_DATA['FPARAM'][:,paramIndx('teff')]-3500.)+2.))\
+(_DATA['FPARAM'][:,paramIndx('logg')] >= 4.)\
+(_DATA['FPARAM'][:,paramIndx('teff')] >= 7000.)
for elem in elems:
calibDiff= _DATA['FELEM'][:,elemIndx(elem)]\
-_DATA['ELEM'][:,elemIndx(elem)]
#Only consider good stars for this element
indx= ((_DATA['ASPCAPFLAG'] & 2**23) == 0)\
*(_DATA['FPARAM'][:,paramIndx('teff')] > -1000.)\
*dwarfs\
*(_DATA['FELEM'][:,elemIndx(elem)] > -1000.)\
*(_DATA['ELEM'][:,elemIndx(elem)] > -1000.)
try:
loTIndx= numpy.argmin(numpy.fabs(_DATA['FPARAM'][indx,
paramIndx('teff')]
-TeffMin))
except ValueError:
pass
else:
assert numpy.all(numpy.fabs(calibDiff[indx][_DATA['FPARAM'][indx,paramIndx('teff')] < TeffMin]-calibDiff[indx][loTIndx]) < 10.**-3.), 'Calibration offset does not saturate below the minimum calibration temperature of %i for element %s' % (TeffMin,elem)
try:
hiTIndx= numpy.argmin(numpy.fabs(_DATA['FPARAM'][indx,
paramIndx('teff')]
-TeffMax))
except ValueError:
pass
else:
assert numpy.all(numpy.fabs(calibDiff[indx][_DATA['FPARAM'][indx,paramIndx('teff')] > TeffMax]-calibDiff[indx][hiTIndx]) < 10.**-2.), 'Calibration offset does not saturate above the maximum calibration temperature of %i for element %s' % (TeffMax,elem)
return None
| bsd-3-clause | 4,388,152,785,872,560,600 | 55.304348 | 264 | 0.582154 | false |
lepmik/nest-simulator | pynest/nest/tests/test_sp/test_disconnect_multiple.py | 7 | 9713 | # -*- coding: utf-8 -*-
#
# test_disconnect_multiple.py
#
# This file is part of NEST.
#
# Copyright (C) 2004 The NEST Initiative
#
# NEST is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# NEST is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with NEST. If not, see <http://www.gnu.org/licenses/>.
import nest
import unittest
__author__ = 'naveau'
class TestDisconnect(unittest.TestCase):
def setUp(self):
nest.ResetKernel()
nest.set_verbosity('M_ERROR')
self.exclude_synapse_model = [
'stdp_dopamine_synapse',
'stdp_dopamine_synapse_lbl',
'stdp_dopamine_synapse_hpc',
'stdp_dopamine_synapse_hpc_lbl',
'gap_junction',
'gap_junction_lbl',
'diffusion_connection',
'diffusion_connection_lbl',
'rate_connection_instantaneous',
'rate_connection_instantaneous_lbl',
'rate_connection_delayed',
'rate_connection_delayed_lbl'
]
def test_multiple_synapse_deletion_all_to_all(self):
for syn_model in nest.Models('synapses'):
if syn_model not in self.exclude_synapse_model:
nest.ResetKernel()
nest.CopyModel('static_synapse', 'my_static_synapse')
nest.SetDefaults(syn_model, {'delay': 0.5})
syn_dict = {
'model': syn_model,
'pre_synaptic_element': 'SE1',
'post_synaptic_element': 'SE2'
}
nest.SetKernelStatus({
'min_delay': 0.1,
'max_delay': 1.0,
'structural_plasticity_synapses': {'syn1': syn_dict}
})
neurons = nest.Create('iaf_psc_alpha', 10, {
'synaptic_elements': {
'SE1': {'z': 0.0, 'growth_rate': 0.0},
'SE2': {'z': 0.0, 'growth_rate': 0.0}
}
})
nest.Connect(neurons, neurons, "all_to_all", syn_dict)
# Test if the connected synaptic elements before the simulation
# are correct
status = nest.GetStatus(neurons, 'synaptic_elements')
for st_neuron in status:
self.assertEqual(10, st_neuron['SE1']['z_connected'])
self.assertEqual(10, st_neuron['SE2']['z_connected'])
srcId = range(0, 5)
targId = range(5, 10)
conns = nest.GetConnections(srcId, targId, syn_model)
assert conns
conndictionary = {'rule': 'all_to_all'}
syndictionary = {'model': syn_model}
nest.Disconnect(
[neurons[i] for i in srcId],
[neurons[i] for i in targId],
conndictionary,
syndictionary
)
status = nest.GetStatus(neurons, 'synaptic_elements')
for st_neuron in status[0:5]:
self.assertEqual(5, st_neuron['SE1']['z_connected'])
self.assertEqual(10, st_neuron['SE2']['z_connected'])
for st_neuron in status[5:10]:
self.assertEqual(10, st_neuron['SE1']['z_connected'])
self.assertEqual(5, st_neuron['SE2']['z_connected'])
def test_multiple_synapse_deletion_one_to_one(self):
for syn_model in nest.Models('synapses'):
if syn_model not in self.exclude_synapse_model:
nest.ResetKernel()
nest.CopyModel('static_synapse', 'my_static_synapse')
nest.SetDefaults(syn_model, {'delay': 0.5})
syn_dict = {
'model': syn_model,
'pre_synaptic_element': 'SE1',
'post_synaptic_element': 'SE2'
}
nest.SetKernelStatus({
'min_delay': 0.1,
'max_delay': 1.0,
'structural_plasticity_synapses': {'syn1': syn_dict}
})
neurons = nest.Create('iaf_psc_alpha', 10, {
'synaptic_elements': {
'SE1': {'z': 0.0, 'growth_rate': 0.0},
'SE2': {'z': 0.0, 'growth_rate': 0.0}
}
})
nest.Connect(neurons, neurons, "all_to_all", syn_dict)
# Test if the connected synaptic elements before the simulation
# are correct
status = nest.GetStatus(neurons, 'synaptic_elements')
for st_neuron in status:
self.assertEqual(10, st_neuron['SE1']['z_connected'])
self.assertEqual(10, st_neuron['SE2']['z_connected'])
srcId = range(0, 5)
targId = range(5, 10)
conns = nest.GetConnections(srcId, targId, syn_model)
assert conns
conndictionary = {'rule': 'one_to_one'}
syndictionary = {'model': syn_model}
nest.Disconnect(
[neurons[i] for i in srcId],
[neurons[i] for i in targId],
conndictionary,
syndictionary
)
status = nest.GetStatus(neurons, 'synaptic_elements')
for st_neuron in status[0:5]:
self.assertEqual(9, st_neuron['SE1']['z_connected'])
self.assertEqual(10, st_neuron['SE2']['z_connected'])
for st_neuron in status[5:10]:
self.assertEqual(10, st_neuron['SE1']['z_connected'])
self.assertEqual(9, st_neuron['SE2']['z_connected'])
def test_multiple_synapse_deletion_one_to_one_no_sp(self):
for syn_model in nest.Models('synapses'):
if syn_model not in self.exclude_synapse_model:
nest.ResetKernel()
nest.CopyModel('static_synapse', 'my_static_synapse')
neurons = nest.Create('iaf_psc_alpha', 10)
syn_dict = {'model': syn_model}
nest.Connect(neurons, neurons, "all_to_all", syn_dict)
srcId = range(0, 5)
targId = range(5, 10)
conns = nest.GetConnections(srcId, targId, syn_model)
assert len(conns) == 20
conndictionary = {'rule': 'one_to_one'}
syndictionary = {'model': syn_model}
nest.Disconnect(
[neurons[i] for i in srcId],
[neurons[i] for i in targId],
conndictionary,
syndictionary
)
conns = nest.GetConnections(srcId, targId, syn_model)
assert len(conns) == 16
def test_single_synapse_deletion_sp(self):
for syn_model in nest.Models('synapses'):
if syn_model not in self.exclude_synapse_model:
nest.ResetKernel()
nest.CopyModel('static_synapse', 'my_static_synapse')
syn_dict = {
'model': syn_model,
'pre_synaptic_element': 'SE1',
'post_synaptic_element': 'SE2'
}
# nest.SetKernelStatus(
# {'structural_plasticity_synapses': {'syn1': syn_dict}}
# )
neurons = nest.Create('iaf_psc_alpha', 2, {
'synaptic_elements': {
'SE1': {'z': 0.0, 'growth_rate': 0.0},
'SE2': {'z': 0.0, 'growth_rate': 0.0}
}
})
nest.Connect(neurons, neurons, "all_to_all", syn_dict)
nest.Connect(neurons, neurons, "all_to_all",
{'model': 'my_static_synapse'})
# Test if the connected synaptic elements before the simulation
# are correct
status = nest.GetStatus(neurons, 'synaptic_elements')
for st_neuron in status:
self.assertEqual(2, st_neuron['SE1']['z_connected'])
self.assertEqual(2, st_neuron['SE2']['z_connected'])
srcId = 0
targId = 1
conns = nest.GetConnections(
[neurons[srcId]], [neurons[targId]], syn_model)
assert conns
nest.DisconnectOneToOne(
neurons[srcId], neurons[targId], syn_dict)
status = nest.GetStatus(neurons, 'synaptic_elements')
self.assertEqual(1, status[srcId]['SE1']['z_connected'])
self.assertEqual(2, status[srcId]['SE2']['z_connected'])
self.assertEqual(2, status[targId]['SE1']['z_connected'])
self.assertEqual(1, status[targId]['SE2']['z_connected'])
conns = nest.GetConnections(
[neurons[srcId]], [neurons[targId]], syn_model)
assert not conns
def suite():
test_suite = unittest.makeSuite(TestDisconnect, 'test')
return test_suite
if __name__ == '__main__':
unittest.main()
| gpl-2.0 | -3,868,591,746,955,287,000 | 39.810924 | 79 | 0.498816 | false |
TobbeTripitaka/src | user/fomels/steepd.py | 3 | 1722 | #!/usr/bin/env python
'Steepest-descent method'
## Copyright (C) 2008 University of Texas at Austin
##
## This program is free software; you can redistribute it and/or modify
## it under the terms of the GNU General Public License as published by
## the Free Software Foundation; either version 2 of the License, or
## (at your option) any later version.
##
## This program is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with this program; if not, write to the Free Software
## Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
import rsf.api as rsf
def steepd(oper,dat,x0,niter):
'Steepest-gradient algorithm for minimizing |oper x - dat|^2'
x = x0
R = oper(adj=0)[x]-dat
for iter in range(niter):
g = oper(adj=1)[R]
G = oper(adj=0)[g]
RG = R.dot(G)
print "iter %d: %g" % (iter+1,RG)
alpha = - RG/G.dot(G)
x = x+g*alpha
R = R+G*alpha
return x
if __name__ == "__main__":
# test matrix and data
matrix = rsf.File([[1,1,1,0],
[1,2,0,0],
[1,3,1,0],
[1,4,0,1],
[1,5,1,1]])
y = rsf.File([3,3,5,7,9])
x0 = rsf.File([0,0,0,0])
# matrix multiplication operator
matmult = rsf.matmult(mat=matrix)
# Using function above
x = steepd(matmult,y,x0,100)
y2 = matmult[x]
print x[:]
print y2[:]
| gpl-2.0 | 6,386,223,832,854,620,000 | 30.309091 | 78 | 0.590012 | false |
karidon/Nnm_club_parser | template.py | 1 | 2915 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
__author__ = 'karidon'
__email__ = '[email protected]'
__date__ = '2016-05-10'
def save_html(projects, path, mode='w'):
'''
Сохраняет нащ список в файл HML.
:category: str (category films)
:param projects: list
:param path: str
:mode: str (режим записи)
:return: file
'''
# TODO: добавить оглавнение
html_template_head = """
<!DOCTYPE HTML>
<html>
<head>
<meta charset="utf-8">
<title>New Films</title>
</head>
<body>
<h1 align='center'>{category}</h1>
<table border='1' align='center'>
<tr>
<th>№</td>
<th>Название</td>
<th>Размер</td>
<th>Раздатчики</td>
<th>Обложка</td>
</tr>
"""
html_template_table = """
<tr>
<td align='center'>{number}</td>
<td align='center'><a href={link}>{topic}</a></td>
<td align='center'>{size}</td>
<td align='center'>{seeders}</td>
<td align='center'><img src={img} width="189" height="255"></td>
</tr>
"""
html_template_footer = """
</table>
</body>
</html>
"""
my_file = open(path, mode, encoding='utf-8')
my_file.write(html_template_head.format(category=projects[0]['category']))
for num, project in enumerate(projects, 1):
my_file.write(html_template_table.format(number=num, topic=project['topic'], link=project['link'],
size=project['size'], seeders=project['seeders'],
img=project['img']))
my_file.write(html_template_footer)
my_file.close()
if __name__ == '__main__':
projects = [{'category': 'Зарубежный фильм', 'topic': 'Детпул', 'link': 'https://yandex.ru/', 'size': '2.34 GB',
'seeders': '2345',
'img': 'http://assets.nnm-club.ws/forum/image.php?link=http://s017.radikal.ru/i420/1601/56/affa088a60aa.jpg'},
{'category': 'Зарубежный фильм', 'topic': 'Детпул', 'link': 'https://yandex.ru/', 'size': '2.34 GB',
'seeders': '2345',
'img': 'http://assets.nnm-club.ws/forum/image.php?link=http://s017.radikal.ru/i420/1601/56/affa088a60aa.jpg'}
]
save_html(projects, 'test.html')
projects = [{'category': 'Наши фильмы', 'topic': 'Детпул', 'link': 'https://yandex.ru/', 'size': '2.34 GB',
'seeders': '2345',
'img': 'http://assets.nnm-club.ws/forum/image.php?link=http://s017.radikal.ru/i420/1601/56/affa088a60aa.jpg'},
{'category': 'Наши фильмыы', 'topic': 'Детпул', 'link': 'https://yandex.ru/', 'size': '2.34 GB',
'seeders': '2345',
'img': 'http://assets.nnm-club.ws/forum/image.php?link=http://s017.radikal.ru/i420/1601/56/affa088a60aa.jpg'}
]
save_html(projects, 'test.html', 'a')
| gpl-2.0 | -6,166,498,592,405,527,000 | 31.797619 | 124 | 0.566606 | false |
openstack/sahara-dashboard | sahara_dashboard/content/data_processing/jobs/jobs/tests.py | 1 | 3082 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.urls import reverse
from sahara_dashboard import api
from sahara_dashboard.test import helpers as test
from sahara_dashboard.test.helpers import IsHttpRequest
INDEX_URL = reverse('horizon:project:data_processing.jobs:index')
DETAILS_URL = reverse(
'horizon:project:data_processing.jobs:details', args=['id'])
class DataProcessingJobTests(test.TestCase):
@test.create_mocks({api.sahara: ('job_execution_list',
'plugin_list', 'job_binary_list',
'data_source_list',
'job_list')})
def test_index(self):
self.mock_job_execution_list.return_value = \
self.job_executions.list()
res = self.client.get(INDEX_URL)
self.mock_job_execution_list.assert_called_once_with(
IsHttpRequest(), {})
self.assertEqual(
"cluster-1",
(res.context_data["tab_group"]._tabs["jobs_tab"].
_tables["jobs"].data[0].cluster_name))
self.assertEqual(
"job-1",
(res.context_data["tab_group"]._tabs["jobs_tab"].
_tables["jobs"].data[0].job_name))
self.assertTemplateUsed(res, 'jobs/index.html')
self.assertContains(res, 'Jobs')
@test.create_mocks({api.sahara: ('job_execution_get',
'cluster_get', 'job_get',
'data_source_get')})
def test_details(self):
self.mock_job_execution_get.return_value = (
self.job_executions.first())
res = self.client.get(DETAILS_URL)
self.assertTemplateUsed(res, 'horizon/common/_detail.html')
self.assertContains(res, 'RUNNING')
@test.create_mocks({api.sahara: ('job_execution_list',
'job_execution_delete')})
def test_delete(self):
job_exec = self.job_executions.first()
self.mock_job_execution_list.return_value = self.job_executions.list()
self.mock_job_execution_delete.return_value = None
form_data = {'action': 'jobs__delete__%s' % job_exec.id}
res = self.client.post(INDEX_URL, form_data)
self.mock_job_execution_list.assert_called_once_with(
IsHttpRequest(), {})
self.mock_job_execution_delete.assert_called_once_with(
IsHttpRequest(), job_exec.id)
self.assertNoFormErrors(res)
self.assertRedirectsNoFollow(res, INDEX_URL)
self.assertMessageCount(success=1)
| apache-2.0 | -8,253,138,707,710,880,000 | 39.552632 | 78 | 0.616807 | false |
TimYi/pybuilder | src/main/python/pybuilder/plugins/python/pyfix_unittest_plugin.py | 7 | 1416 | # -*- coding: utf-8 -*-
#
# This file is part of PyBuilder
#
# Copyright 2011-2015 PyBuilder Team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from pybuilder.core import init, task, description, use_plugin
__author__ = "Alexander Metzner"
use_plugin("python.core")
@init
def init_test_source_directory(project):
project.build_depends_on("pyfix")
project.set_property_if_unset("dir_source_unittest_python", "src/unittest/python")
project.set_property_if_unset("pyfix_unittest_module_glob", "*_pyfix_tests")
project.set_property_if_unset("pyfix_unittest_file_suffix", None) # deprecated, use pyfix_unittest_module_glob.
@task
@description("Runs unit tests written using the pyfix test framework")
def run_unit_tests(project, logger):
import pybuilder.plugins.python.pyfix_plugin_impl
pybuilder.plugins.python.pyfix_plugin_impl.run_unit_tests(project, logger)
| apache-2.0 | 671,588,175,894,485,200 | 34.4 | 116 | 0.735169 | false |
jinxiaoye1987/RyzomCore | nel/tools/build_gamedata/processes/sheet_id/2_build.py | 3 | 2057 | #!/usr/bin/python
#
# \file 2_build.py
# \brief Build sheet_id
# \date 2009-06-03 10:47GMT
# \author Jan Boon (Kaetemi)
# Python port of game data build pipeline.
# Build sheet_id
#
# NeL - MMORPG Framework <http://dev.ryzom.com/projects/nel/>
# Copyright (C) 2010 Winch Gate Property Limited
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import time, sys, os, shutil, subprocess, distutils.dir_util
sys.path.append("../../configuration")
if os.path.isfile("log.log"):
os.remove("log.log")
log = open("log.log", "w")
from scripts import *
from buildsite import *
from process import *
from tools import *
from directories import *
printLog(log, "")
printLog(log, "-------")
printLog(log, "--- Build sheet_id")
printLog(log, "-------")
printLog(log, time.strftime("%Y-%m-%d %H:%MGMT", time.gmtime(time.time())))
printLog(log, "")
# Find tools
MakeSheetId = findTool(log, ToolDirectories, MakeSheetIdTool, ToolSuffix)
printLog(log, "")
# For each sheet_id directory
printLog(log, ">>> Build sheet_id <<<")
if MakeSheetId == "":
toolLogFail(log, MakeSheetIdTool, ToolSuffix)
else:
mkPath(log, LeveldesignDirectory)
mkPath(log, LeveldesignWorldDirectory)
subprocess.call([ MakeSheetId, "-o" + LeveldesignDirectory + "/game_elem/sheet_id.bin", LeveldesignDirectory + "/game_elem", LeveldesignDirectory + "/game_element", LeveldesignWorldDirectory, DataShardDirectory + "mirror_sheets" ])
printLog(log, "")
log.close()
# end of file
| agpl-3.0 | 313,693,223,034,468,540 | 31.650794 | 232 | 0.724356 | false |
guewen/odoo | openerp/addons/base/tests/test_acl.py | 41 | 6032 | import unittest2
from lxml import etree
import openerp
from openerp.tools.misc import mute_logger
from openerp.tests import common
# test group that demo user should not have
GROUP_TECHNICAL_FEATURES = 'base.group_no_one'
class TestACL(common.TransactionCase):
def setUp(self):
super(TestACL, self).setUp()
self.res_currency = self.registry('res.currency')
self.res_partner = self.registry('res.partner')
self.res_users = self.registry('res.users')
_, self.demo_uid = self.registry('ir.model.data').get_object_reference(self.cr, self.uid, 'base', 'user_demo')
self.tech_group = self.registry('ir.model.data').get_object(self.cr, self.uid,
*(GROUP_TECHNICAL_FEATURES.split('.')))
def test_field_visibility_restriction(self):
"""Check that model-level ``groups`` parameter effectively restricts access to that
field for users who do not belong to one of the explicitly allowed groups"""
# Verify the test environment first
original_fields = self.res_currency.fields_get(self.cr, self.demo_uid, [])
form_view = self.res_currency.fields_view_get(self.cr, self.demo_uid, False, 'form')
view_arch = etree.fromstring(form_view.get('arch'))
has_tech_feat = self.res_users.has_group(self.cr, self.demo_uid, GROUP_TECHNICAL_FEATURES)
self.assertFalse(has_tech_feat, "`demo` user should not belong to the restricted group before the test")
self.assertTrue('accuracy' in original_fields, "'accuracy' field must be properly visible before the test")
self.assertNotEquals(view_arch.xpath("//field[@name='accuracy']"), [],
"Field 'accuracy' must be found in view definition before the test")
# Restrict access to the field and check it's gone
self.res_currency._columns['accuracy'].groups = GROUP_TECHNICAL_FEATURES
fields = self.res_currency.fields_get(self.cr, self.demo_uid, [])
form_view = self.res_currency.fields_view_get(self.cr, self.demo_uid, False, 'form')
view_arch = etree.fromstring(form_view.get('arch'))
self.assertFalse('accuracy' in fields, "'accuracy' field should be gone")
self.assertEquals(view_arch.xpath("//field[@name='accuracy']"), [],
"Field 'accuracy' must not be found in view definition")
# Make demo user a member of the restricted group and check that the field is back
self.tech_group.write({'users': [(4, self.demo_uid)]})
has_tech_feat = self.res_users.has_group(self.cr, self.demo_uid, GROUP_TECHNICAL_FEATURES)
fields = self.res_currency.fields_get(self.cr, self.demo_uid, [])
form_view = self.res_currency.fields_view_get(self.cr, self.demo_uid, False, 'form')
view_arch = etree.fromstring(form_view.get('arch'))
#import pprint; pprint.pprint(fields); pprint.pprint(form_view)
self.assertTrue(has_tech_feat, "`demo` user should now belong to the restricted group")
self.assertTrue('accuracy' in fields, "'accuracy' field must be properly visible again")
self.assertNotEquals(view_arch.xpath("//field[@name='accuracy']"), [],
"Field 'accuracy' must be found in view definition again")
#cleanup
self.tech_group.write({'users': [(3, self.demo_uid)]})
self.res_currency._columns['accuracy'].groups = False
@mute_logger('openerp.osv.orm')
def test_field_crud_restriction(self):
"Read/Write RPC access to restricted field should be forbidden"
# Verify the test environment first
has_tech_feat = self.res_users.has_group(self.cr, self.demo_uid, GROUP_TECHNICAL_FEATURES)
self.assertFalse(has_tech_feat, "`demo` user should not belong to the restricted group")
self.assert_(self.res_partner.read(self.cr, self.demo_uid, [1], ['bank_ids']))
self.assert_(self.res_partner.write(self.cr, self.demo_uid, [1], {'bank_ids': []}))
# Now restrict access to the field and check it's forbidden
self.res_partner._columns['bank_ids'].groups = GROUP_TECHNICAL_FEATURES
with self.assertRaises(openerp.osv.orm.except_orm):
self.res_partner.read(self.cr, self.demo_uid, [1], ['bank_ids'])
with self.assertRaises(openerp.osv.orm.except_orm):
self.res_partner.write(self.cr, self.demo_uid, [1], {'bank_ids': []})
# Add the restricted group, and check that it works again
self.tech_group.write({'users': [(4, self.demo_uid)]})
has_tech_feat = self.res_users.has_group(self.cr, self.demo_uid, GROUP_TECHNICAL_FEATURES)
self.assertTrue(has_tech_feat, "`demo` user should now belong to the restricted group")
self.assert_(self.res_partner.read(self.cr, self.demo_uid, [1], ['bank_ids']))
self.assert_(self.res_partner.write(self.cr, self.demo_uid, [1], {'bank_ids': []}))
#cleanup
self.tech_group.write({'users': [(3, self.demo_uid)]})
self.res_partner._columns['bank_ids'].groups = False
def test_fields_browse_restriction(self):
"""Test access to records having restricted fields"""
self.res_partner._columns['email'].groups = GROUP_TECHNICAL_FEATURES
try:
P = self.res_partner
pid = P.search(self.cr, self.demo_uid, [], limit=1)[0]
part = P.browse(self.cr, self.demo_uid, pid)
# accessing fields must no raise exceptions...
part.name
# ... except if they are restricted
with self.assertRaises(openerp.osv.orm.except_orm) as cm:
with mute_logger('openerp.osv.orm'):
part.email
self.assertEqual(cm.exception.args[0], 'Access Denied')
finally:
self.res_partner._columns['email'].groups = False
if __name__ == '__main__':
unittest2.main()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 | -4,600,665,028,816,539,600 | 54.33945 | 118 | 0.639755 | false |
390910131/Misago | misago/core/tests/test_momentjs.py | 8 | 1276 | from django.conf import settings
from django.test import TestCase
from misago.core.momentjs import list_available_locales, get_locale_path
class MomentJSTests(TestCase):
def test_list_available_locales(self):
"""list_available_locales returns list of locales"""
TEST_CASES = (
'af',
'ar-sa',
'de',
'et',
'pl',
'ru',
'pt-br',
'zh-tw'
)
locales = list_available_locales().keys()
for language in TEST_CASES:
self.assertIn(language, locales)
def test_get_locale_path(self):
"""get_locale_path returns path to locale or null if it doesnt exist"""
EXISTING_LOCALES = (
'af',
'ar-sa',
'ar-sasa',
'de',
'et',
'pl',
'pl-pl',
'ru',
'pt-br',
'zh-tw'
)
for language in EXISTING_LOCALES:
self.assertIsNotNone(get_locale_path(language))
NONEXISTING_LOCALES = (
'ga',
'en',
'en-us',
'martian',
)
for language in NONEXISTING_LOCALES:
self.assertIsNone(get_locale_path(language))
| gpl-2.0 | -7,768,394,679,325,975,000 | 23.538462 | 79 | 0.484326 | false |
tboyce1/home-assistant | homeassistant/components/light/scsgate.py | 19 | 3349 | """
Support for SCSGate lights.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/light.scsgate/
"""
import logging
import voluptuous as vol
import homeassistant.components.scsgate as scsgate
from homeassistant.components.light import (Light, PLATFORM_SCHEMA)
from homeassistant.const import (
ATTR_ENTITY_ID, ATTR_STATE, CONF_DEVICES, CONF_NAME)
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
DEPENDENCIES = ['scsgate']
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Required(CONF_DEVICES): vol.Schema({cv.slug: scsgate.SCSGATE_SCHEMA}),
})
def setup_platform(hass, config, add_devices, discovery_info=None):
"""Set up the SCSGate switches."""
devices = config.get(CONF_DEVICES)
lights = []
logger = logging.getLogger(__name__)
if devices:
for _, entity_info in devices.items():
if entity_info[scsgate.CONF_SCS_ID] in scsgate.SCSGATE.devices:
continue
name = entity_info[CONF_NAME]
scs_id = entity_info[scsgate.CONF_SCS_ID]
logger.info("Adding %s scsgate.light", name)
light = SCSGateLight(name=name, scs_id=scs_id, logger=logger)
lights.append(light)
add_devices(lights)
scsgate.SCSGATE.add_devices_to_register(lights)
class SCSGateLight(Light):
"""Representation of a SCSGate light."""
def __init__(self, scs_id, name, logger):
"""Initialize the light."""
self._name = name
self._scs_id = scs_id
self._toggled = False
self._logger = logger
@property
def scs_id(self):
"""Return the SCS ID."""
return self._scs_id
@property
def should_poll(self):
"""No polling needed for a SCSGate light."""
return False
@property
def name(self):
"""Return the name of the device if any."""
return self._name
@property
def is_on(self):
"""Return true if light is on."""
return self._toggled
def turn_on(self, **kwargs):
"""Turn the device on."""
from scsgate.tasks import ToggleStatusTask
scsgate.SCSGATE.append_task(
ToggleStatusTask(target=self._scs_id, toggled=True))
self._toggled = True
self.schedule_update_ha_state()
def turn_off(self, **kwargs):
"""Turn the device off."""
from scsgate.tasks import ToggleStatusTask
scsgate.SCSGATE.append_task(
ToggleStatusTask(target=self._scs_id, toggled=False))
self._toggled = False
self.schedule_update_ha_state()
def process_event(self, message):
"""Handle a SCSGate message related with this light."""
if self._toggled == message.toggled:
self._logger.info(
"Light %s, ignoring message %s because state already active",
self._scs_id, message)
# Nothing changed, ignoring
return
self._toggled = message.toggled
self.schedule_update_ha_state()
command = "off"
if self._toggled:
command = "on"
self.hass.bus.fire(
'button_pressed', {
ATTR_ENTITY_ID: self._scs_id,
ATTR_STATE: command,
}
)
| apache-2.0 | 6,436,838,442,553,438,000 | 26.908333 | 78 | 0.610033 | false |
byt3bl33d3r/Veil-Evasion | modules/payloads/template.py | 4 | 1980 | """
Description of the payload.
Addtional notes, sources, links, etc.
Author of the module.
"""
# framework import to access shellcode generation
from modules.common import shellcode
# framework import to access common helper methods, including randomization
from modules.common import helpers
# framework import to access encryption and source code obfuscation methods
from modules.common import encryption
# the main config file
import settings
# Main class must be titled "Payload"
class Payload:
def __init__(self):
# required options
self.description = "description"
self.language = "python/cs/powershell/whatever"
self.rating = "Poor/Normal/Good/Excellent"
self.extension = "py/cs/c/etc."
self.shellcode = shellcode.Shellcode()
# options we require user ineraction for- format is {Option : [Value, Description]]}
# the code logic will parse any of these out and require the user to input a value for them
self.required_options = {
"compile_to_exe" : ["Y", "Compile to an executable"],
"use_pyherion" : ["N", "Use the pyherion encrypter"]}
# an option note to be displayed to the user after payload generation
# i.e. additional compile notes, or usage warnings
self.notes = "...additional notes to user..."
# main method that returns the generated payload code
def generate(self):
# Generate Shellcode Using msfvenom
Shellcode = self.shellcode.generate()
# build our your payload sourcecode
PayloadCode = "..."
# add in a randomized string
PayloadCode += helpers.randomString()
# example of how to check the internal options
if self.required_options["use_pyherion"][0].lower() == "y":
PayloadCode = encryption.pyherion(PayloadCode)
# return everything
return PayloadCode
| gpl-3.0 | 6,042,473,314,929,948,000 | 30.428571 | 99 | 0.65 | false |
carvalhomb/tsmells | guess/src/Lib/xml/dom/DocumentFragment.py | 2 | 1418 | ########################################################################
#
# File Name: DocumentFragment.py
#
# Documentation: http://docs.4suite.com/4DOM/DocumentFragment.py.html
#
"""
WWW: http://4suite.com/4DOM e-mail: [email protected]
Copyright (c) 2000 Fourthought Inc, USA. All Rights Reserved.
See http://4suite.com/COPYRIGHT for license and copyright information
"""
from xml.dom import Node
from FtNode import FtNode
class DocumentFragment(FtNode):
nodeType = Node.DOCUMENT_FRAGMENT_NODE
_allowedChildren = [Node.ELEMENT_NODE,
Node.PROCESSING_INSTRUCTION_NODE,
Node.COMMENT_NODE,
Node.TEXT_NODE,
Node.CDATA_SECTION_NODE,
Node.ENTITY_REFERENCE_NODE]
def __init__(self, ownerDocument):
FtNode.__init__(self, ownerDocument)
self.__dict__['__nodeName'] = '#document-fragment'
### Overridden Methods ###
def __repr__(self):
return '<DocumentFragment Node at %x: with %d children>' % (
id(self),
len(self.childNodes),
)
### Helper Functions For Cloning ###
def _4dom_clone(self, owner):
return self.__class__(owner)
def __getinitargs__(self):
return (self.ownerDocument,
)
| gpl-2.0 | 6,940,060,919,489,865,000 | 29.511111 | 76 | 0.523272 | false |
cyberitsolutions/alloc-cli | alloccli/mbox.py | 1 | 2667 | """alloccli subcommand for downloading alloc comments to mbox file."""
import tempfile
import os
import subprocess
from sys import stdout
from contextlib import closing
from .alloc import alloc
class mbox(alloc):
"""Download a task's emails to an mbox file."""
# Setup the options that this cli can accept
ops = []
ops.append(('', 'help ', 'Show this help.'))
ops.append(('q', 'quiet ', 'Run with no output except errors.'))
ops.append(('t.', 'task=ID|NAME ', 'A task ID, or a fuzzy match for a task name.'))
# Specify some header and footer text for the help text
help_text = "Usage: %s [OPTIONS]\n"
help_text += __doc__
help_text += '''\n\n%s
This program will automatically run $MAILER on the mbox file, if outputting to a TTY.
Examples:
alloc mbox -t 1234
alloc mbox -t 1234 > file.mbox'''
def run(self, command_list):
"""Execute subcommand."""
# Get the command line arguments into a dictionary
o, remainder_ = self.get_args(command_list, self.ops, self.help_text)
self.quiet = o['quiet']
taskID = ''
# Got this far, then authenticate
self.authenticate()
# Get a taskID either passed via command line, or figured out from a
# task name
tops = {}
if self.is_num(o['task']):
taskID = o['task']
elif o['task']:
tops = {}
tops["taskName"] = o["task"]
tops["taskView"] = "prioritised"
taskID = self.search_for_task(tops)
if taskID:
s = ''
str0 = self.print_task(taskID, prependEmailHeader=True)
str1 = self.make_request(
{"method": "get_task_emails", "taskID": taskID})
str2 = self.make_request(
{"method": "get_timeSheetItem_comments", "taskID": taskID})
if str0:
s += str0 + "\n\n"
if str1:
s += str1 + "\n\n"
if str2:
s += str2
# If we're redirecting stdout eg alloc mbox -t 123 >task123.html
if not stdout.isatty():
print((str(s).encode('utf-8')))
else:
try:
fd, filepath = tempfile.mkstemp(
prefix="alloc-%s_" % taskID, suffix=".mbox")
with closing(os.fdopen(fd, 'wb')) as tf:
tf.write(str(s).encode('utf-8'))
subprocess.check_call(
[os.getenv("MAILER") or "mutt", "-f", filepath])
finally:
os.remove(filepath)
| agpl-3.0 | 8,695,259,717,809,778,000 | 30.376471 | 89 | 0.52231 | false |
tymofij/werkzeug | tests/contrib/test_iterio.py | 30 | 5488 | # -*- coding: utf-8 -*-
"""
tests.iterio
~~~~~~~~~~~~
Tests the iterio object.
:copyright: (c) 2014 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
import pytest
from tests import strict_eq
from werkzeug.contrib.iterio import IterIO, greenlet
class TestIterO(object):
def test_basic_native(self):
io = IterIO(["Hello", "World", "1", "2", "3"])
assert io.tell() == 0
assert io.read(2) == "He"
assert io.tell() == 2
assert io.read(3) == "llo"
assert io.tell() == 5
io.seek(0)
assert io.read(5) == "Hello"
assert io.tell() == 5
assert io._buf == "Hello"
assert io.read() == "World123"
assert io.tell() == 13
io.close()
assert io.closed
io = IterIO(["Hello\n", "World!"])
assert io.readline() == 'Hello\n'
assert io._buf == 'Hello\n'
assert io.read() == 'World!'
assert io._buf == 'Hello\nWorld!'
assert io.tell() == 12
io.seek(0)
assert io.readlines() == ['Hello\n', 'World!']
io = IterIO(['Line one\nLine ', 'two\nLine three'])
assert list(io) == ['Line one\n', 'Line two\n', 'Line three']
io = IterIO(iter('Line one\nLine two\nLine three'))
assert list(io) == ['Line one\n', 'Line two\n', 'Line three']
io = IterIO(['Line one\nL', 'ine', ' two', '\nLine three'])
assert list(io) == ['Line one\n', 'Line two\n', 'Line three']
io = IterIO(["foo\n", "bar"])
io.seek(-4, 2)
assert io.read(4) == '\nbar'
pytest.raises(IOError, io.seek, 2, 100)
io.close()
pytest.raises(ValueError, io.read)
def test_basic_bytes(self):
io = IterIO([b"Hello", b"World", b"1", b"2", b"3"])
assert io.tell() == 0
assert io.read(2) == b"He"
assert io.tell() == 2
assert io.read(3) == b"llo"
assert io.tell() == 5
io.seek(0)
assert io.read(5) == b"Hello"
assert io.tell() == 5
assert io._buf == b"Hello"
assert io.read() == b"World123"
assert io.tell() == 13
io.close()
assert io.closed
io = IterIO([b"Hello\n", b"World!"])
assert io.readline() == b'Hello\n'
assert io._buf == b'Hello\n'
assert io.read() == b'World!'
assert io._buf == b'Hello\nWorld!'
assert io.tell() == 12
io.seek(0)
assert io.readlines() == [b'Hello\n', b'World!']
io = IterIO([b"foo\n", b"bar"])
io.seek(-4, 2)
assert io.read(4) == b'\nbar'
pytest.raises(IOError, io.seek, 2, 100)
io.close()
pytest.raises(ValueError, io.read)
def test_basic_unicode(self):
io = IterIO([u"Hello", u"World", u"1", u"2", u"3"])
assert io.tell() == 0
assert io.read(2) == u"He"
assert io.tell() == 2
assert io.read(3) == u"llo"
assert io.tell() == 5
io.seek(0)
assert io.read(5) == u"Hello"
assert io.tell() == 5
assert io._buf == u"Hello"
assert io.read() == u"World123"
assert io.tell() == 13
io.close()
assert io.closed
io = IterIO([u"Hello\n", u"World!"])
assert io.readline() == u'Hello\n'
assert io._buf == u'Hello\n'
assert io.read() == u'World!'
assert io._buf == u'Hello\nWorld!'
assert io.tell() == 12
io.seek(0)
assert io.readlines() == [u'Hello\n', u'World!']
io = IterIO([u"foo\n", u"bar"])
io.seek(-4, 2)
assert io.read(4) == u'\nbar'
pytest.raises(IOError, io.seek, 2, 100)
io.close()
pytest.raises(ValueError, io.read)
def test_sentinel_cases(self):
io = IterIO([])
strict_eq(io.read(), '')
io = IterIO([], b'')
strict_eq(io.read(), b'')
io = IterIO([], u'')
strict_eq(io.read(), u'')
io = IterIO([])
strict_eq(io.read(), '')
io = IterIO([b''])
strict_eq(io.read(), b'')
io = IterIO([u''])
strict_eq(io.read(), u'')
io = IterIO([])
strict_eq(io.readline(), '')
io = IterIO([], b'')
strict_eq(io.readline(), b'')
io = IterIO([], u'')
strict_eq(io.readline(), u'')
io = IterIO([])
strict_eq(io.readline(), '')
io = IterIO([b''])
strict_eq(io.readline(), b'')
io = IterIO([u''])
strict_eq(io.readline(), u'')
@pytest.mark.skipif(greenlet is None, reason='Greenlet is not installed.')
class TestIterI(object):
def test_basic(self):
def producer(out):
out.write('1\n')
out.write('2\n')
out.flush()
out.write('3\n')
iterable = IterIO(producer)
assert next(iterable) == '1\n2\n'
assert next(iterable) == '3\n'
pytest.raises(StopIteration, next, iterable)
def test_sentinel_cases(self):
def producer_dummy_flush(out):
out.flush()
iterable = IterIO(producer_dummy_flush)
strict_eq(next(iterable), '')
def producer_empty(out):
pass
iterable = IterIO(producer_empty)
pytest.raises(StopIteration, next, iterable)
iterable = IterIO(producer_dummy_flush, b'')
strict_eq(next(iterable), b'')
iterable = IterIO(producer_dummy_flush, u'')
strict_eq(next(iterable), u'')
| bsd-3-clause | -8,026,106,812,219,098,000 | 28.989071 | 74 | 0.501822 | false |
apyrgio/ganeti | lib/cmdlib/instance_query.py | 2 | 11500 | #
#
# Copyright (C) 2006, 2007, 2008, 2009, 2010, 2011, 2012, 2013 Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
# TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Logical units for querying instances."""
import itertools
from ganeti import compat
from ganeti import constants
from ganeti import locking
from ganeti.cmdlib.base import NoHooksLU
from ganeti.cmdlib.common import ShareAll, GetWantedInstances, \
CheckInstancesNodeGroups, AnnotateDiskParams
from ganeti.cmdlib.instance_utils import NICListToTuple
from ganeti.hypervisor import hv_base
class LUInstanceQueryData(NoHooksLU):
"""Query runtime instance data.
"""
REQ_BGL = False
def ExpandNames(self):
self.needed_locks = {}
# Use locking if requested or when non-static information is wanted
if not (self.op.static or self.op.use_locking):
self.LogWarning("Non-static data requested, locks need to be acquired")
self.op.use_locking = True
if self.op.instances or not self.op.use_locking:
# Expand instance names right here
(_, self.wanted_names) = GetWantedInstances(self, self.op.instances)
else:
# Will use acquired locks
self.wanted_names = None
if self.op.use_locking:
self.share_locks = ShareAll()
if self.wanted_names is None:
self.needed_locks[locking.LEVEL_INSTANCE] = locking.ALL_SET
else:
self.needed_locks[locking.LEVEL_INSTANCE] = self.wanted_names
self.needed_locks[locking.LEVEL_NODEGROUP] = []
self.needed_locks[locking.LEVEL_NODE] = []
self.needed_locks[locking.LEVEL_NETWORK] = []
self.recalculate_locks[locking.LEVEL_NODE] = constants.LOCKS_REPLACE
self.dont_collate_locks[locking.LEVEL_NODEGROUP] = True
self.dont_collate_locks[locking.LEVEL_NODE] = True
self.dont_collate_locks[locking.LEVEL_NETWORK] = True
def DeclareLocks(self, level):
if self.op.use_locking:
owned_instances = dict(self.cfg.GetMultiInstanceInfoByName(
self.owned_locks(locking.LEVEL_INSTANCE)))
if level == locking.LEVEL_NODEGROUP:
# Lock all groups used by instances optimistically; this requires going
# via the node before it's locked, requiring verification later on
self.needed_locks[locking.LEVEL_NODEGROUP] = \
frozenset(group_uuid
for instance_uuid in owned_instances.keys()
for group_uuid in
self.cfg.GetInstanceNodeGroups(instance_uuid))
elif level == locking.LEVEL_NODE:
self._LockInstancesNodes()
elif level == locking.LEVEL_NETWORK:
self.needed_locks[locking.LEVEL_NETWORK] = \
frozenset(net_uuid
for instance_uuid in owned_instances.keys()
for net_uuid in
self.cfg.GetInstanceNetworks(instance_uuid))
def CheckPrereq(self):
"""Check prerequisites.
This only checks the optional instance list against the existing names.
"""
owned_instances = frozenset(self.owned_locks(locking.LEVEL_INSTANCE))
owned_groups = frozenset(self.owned_locks(locking.LEVEL_NODEGROUP))
owned_node_uuids = frozenset(self.owned_locks(locking.LEVEL_NODE))
owned_networks = frozenset(self.owned_locks(locking.LEVEL_NETWORK))
if self.wanted_names is None:
assert self.op.use_locking, "Locking was not used"
self.wanted_names = owned_instances
instances = dict(self.cfg.GetMultiInstanceInfoByName(self.wanted_names))
if self.op.use_locking:
CheckInstancesNodeGroups(self.cfg, instances, owned_groups,
owned_node_uuids, None)
else:
assert not (owned_instances or owned_groups or
owned_node_uuids or owned_networks)
self.wanted_instances = instances.values()
def _ComputeBlockdevStatus(self, node_uuid, instance, dev):
"""Returns the status of a block device
"""
if self.op.static or not node_uuid:
return None
result = self.rpc.call_blockdev_find(node_uuid, (dev, instance))
if result.offline:
return None
result.Raise("Can't compute disk status for %s" % instance.name)
status = result.payload
if status is None:
return None
return (status.dev_path, status.major, status.minor,
status.sync_percent, status.estimated_time,
status.is_degraded, status.ldisk_status)
def _ComputeDiskStatus(self, instance, node_uuid2name_fn, dev):
"""Compute block device status.
"""
(anno_dev,) = AnnotateDiskParams(instance, [dev], self.cfg)
return self._ComputeDiskStatusInner(instance, None, node_uuid2name_fn,
anno_dev)
def _ComputeDiskStatusInner(self, instance, snode_uuid, node_uuid2name_fn,
dev):
"""Compute block device status.
@attention: The device has to be annotated already.
"""
drbd_info = None
if dev.dev_type in constants.DTS_DRBD:
# we change the snode then (otherwise we use the one passed in)
if dev.logical_id[0] == instance.primary_node:
snode_uuid = dev.logical_id[1]
snode_minor = dev.logical_id[4]
pnode_minor = dev.logical_id[3]
else:
snode_uuid = dev.logical_id[0]
snode_minor = dev.logical_id[3]
pnode_minor = dev.logical_id[4]
drbd_info = {
"primary_node": node_uuid2name_fn(instance.primary_node),
"primary_minor": pnode_minor,
"secondary_node": node_uuid2name_fn(snode_uuid),
"secondary_minor": snode_minor,
"port": dev.logical_id[2],
"secret": dev.logical_id[5],
}
dev_pstatus = self._ComputeBlockdevStatus(instance.primary_node,
instance, dev)
dev_sstatus = self._ComputeBlockdevStatus(snode_uuid, instance, dev)
if dev.children:
dev_children = map(compat.partial(self._ComputeDiskStatusInner,
instance, snode_uuid,
node_uuid2name_fn),
dev.children)
else:
dev_children = []
return {
"iv_name": dev.iv_name,
"dev_type": dev.dev_type,
"logical_id": dev.logical_id,
"drbd_info": drbd_info,
"pstatus": dev_pstatus,
"sstatus": dev_sstatus,
"children": dev_children,
"mode": dev.mode,
"size": dev.size,
"spindles": dev.spindles,
"name": dev.name,
"uuid": dev.uuid,
}
def Exec(self, feedback_fn):
"""Gather and return data"""
result = {}
cluster = self.cfg.GetClusterInfo()
node_uuids = itertools.chain(*(self.cfg.GetInstanceNodes(i.uuid)
for i in self.wanted_instances))
nodes = dict(self.cfg.GetMultiNodeInfo(node_uuids))
groups = dict(self.cfg.GetMultiNodeGroupInfo(node.group
for node in nodes.values()))
for instance in self.wanted_instances:
pnode = nodes[instance.primary_node]
hvparams = cluster.FillHV(instance, skip_globals=True)
if self.op.static or pnode.offline:
remote_state = None
if pnode.offline:
self.LogWarning("Primary node %s is marked offline, returning static"
" information only for instance %s" %
(pnode.name, instance.name))
else:
remote_info = self.rpc.call_instance_info(
instance.primary_node, instance.name, instance.hypervisor,
cluster.hvparams[instance.hypervisor])
remote_info.Raise("Error checking node %s" % pnode.name)
remote_info = remote_info.payload
allow_userdown = \
cluster.enabled_user_shutdown and \
(instance.hypervisor != constants.HT_KVM or
hvparams[constants.HV_KVM_USER_SHUTDOWN])
if remote_info and "state" in remote_info:
if hv_base.HvInstanceState.IsShutdown(remote_info["state"]):
if allow_userdown:
remote_state = "user down"
else:
remote_state = "down"
else:
remote_state = "up"
else:
if instance.admin_state == constants.ADMINST_UP:
remote_state = "down"
elif instance.admin_state == constants.ADMINST_DOWN:
if instance.admin_state_source == constants.USER_SOURCE:
remote_state = "user down"
else:
remote_state = "down"
else:
remote_state = "offline"
group2name_fn = lambda uuid: groups[uuid].name
node_uuid2name_fn = lambda uuid: nodes[uuid].name
disks = map(compat.partial(self._ComputeDiskStatus, instance,
node_uuid2name_fn),
self.cfg.GetInstanceDisks(instance.uuid))
secondary_nodes = self.cfg.GetInstanceSecondaryNodes(instance.uuid)
snodes_group_uuids = [nodes[snode_uuid].group
for snode_uuid in secondary_nodes]
result[instance.name] = {
"name": instance.name,
"config_state": instance.admin_state,
"run_state": remote_state,
"pnode": pnode.name,
"pnode_group_uuid": pnode.group,
"pnode_group_name": group2name_fn(pnode.group),
"snodes": map(node_uuid2name_fn, secondary_nodes),
"snodes_group_uuids": snodes_group_uuids,
"snodes_group_names": map(group2name_fn, snodes_group_uuids),
"os": instance.os,
# this happens to be the same format used for hooks
"nics": NICListToTuple(self, instance.nics),
"disk_template": instance.disk_template,
"disks": disks,
"hypervisor": instance.hypervisor,
"network_port": instance.network_port,
"hv_instance": instance.hvparams,
"hv_actual": hvparams,
"be_instance": instance.beparams,
"be_actual": cluster.FillBE(instance),
"os_instance": instance.osparams,
"os_actual": cluster.SimpleFillOS(instance.os, instance.osparams),
"serial_no": instance.serial_no,
"mtime": instance.mtime,
"ctime": instance.ctime,
"uuid": instance.uuid,
}
return result
| bsd-2-clause | 1,510,076,378,824,684,500 | 36.096774 | 79 | 0.637739 | false |
jimsimon/sky_engine | sky/tools/webkitpy/layout_tests/controllers/manager.py | 10 | 23875 | # Copyright (C) 2010 Google Inc. All rights reserved.
# Copyright (C) 2010 Gabor Rapcsanyi ([email protected]), University of Szeged
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
The Manager runs a series of tests (TestType interface) against a set
of test files. If a test file fails a TestType, it returns a list of TestFailure
objects to the Manager. The Manager then aggregates the TestFailures to
create a final report.
"""
import datetime
import json
import logging
import random
import sys
import time
from webkitpy.common.net.file_uploader import FileUploader
from webkitpy.layout_tests.controllers.layout_test_finder import LayoutTestFinder
from webkitpy.layout_tests.controllers.layout_test_runner import LayoutTestRunner
from webkitpy.layout_tests.controllers.test_result_writer import TestResultWriter
from webkitpy.layout_tests.layout_package import json_results_generator
from webkitpy.layout_tests.models import test_expectations
from webkitpy.layout_tests.models import test_failures
from webkitpy.layout_tests.models import test_run_results
from webkitpy.layout_tests.models.test_input import TestInput
_log = logging.getLogger(__name__)
# Builder base URL where we have the archived test results.
BUILDER_BASE_URL = "http://build.chromium.org/buildbot/layout_test_results/"
TestExpectations = test_expectations.TestExpectations
class Manager(object):
"""A class for managing running a series of tests on a series of layout
test files."""
def __init__(self, port, options, printer):
"""Initialize test runner data structures.
Args:
port: an object implementing port-specific
options: a dictionary of command line options
printer: a Printer object to record updates to.
"""
self._port = port
self._filesystem = port.host.filesystem
self._options = options
self._printer = printer
self._expectations = None
self.HTTP_SUBDIR = 'http' + port.TEST_PATH_SEPARATOR
self.PERF_SUBDIR = 'perf'
self.WEBSOCKET_SUBDIR = 'websocket' + port.TEST_PATH_SEPARATOR
self.LAYOUT_TESTS_DIRECTORY = 'tests'
self.ARCHIVED_RESULTS_LIMIT = 25
self._http_server_started = False
self._websockets_server_started = False
self._results_directory = self._port.results_directory()
self._finder = LayoutTestFinder(self._port, self._options)
self._runner = LayoutTestRunner(self._options, self._port, self._printer, self._results_directory, self._test_is_slow)
def _collect_tests(self, args):
return self._finder.find_tests(self._options, args)
def _is_http_test(self, test):
return self.HTTP_SUBDIR in test or self._is_websocket_test(test)
def _is_websocket_test(self, test):
return self.WEBSOCKET_SUBDIR in test
def _http_tests(self, test_names):
return set(test for test in test_names if self._is_http_test(test))
def _is_perf_test(self, test):
return self.PERF_SUBDIR == test or (self.PERF_SUBDIR + self._port.TEST_PATH_SEPARATOR) in test
def _prepare_lists(self, paths, test_names):
tests_to_skip = self._finder.skip_tests(paths, test_names, self._expectations, self._http_tests(test_names))
tests_to_run = [test for test in test_names if test not in tests_to_skip]
if not tests_to_run:
return tests_to_run, tests_to_skip
# Create a sorted list of test files so the subset chunk,
# if used, contains alphabetically consecutive tests.
if self._options.order == 'natural':
tests_to_run.sort(key=self._port.test_key)
elif self._options.order == 'random':
random.shuffle(tests_to_run)
elif self._options.order == 'random-seeded':
rnd = random.Random()
rnd.seed(4) # http://xkcd.com/221/
rnd.shuffle(tests_to_run)
tests_to_run, tests_in_other_chunks = self._finder.split_into_chunks(tests_to_run)
self._expectations.add_extra_skipped_tests(tests_in_other_chunks)
tests_to_skip.update(tests_in_other_chunks)
return tests_to_run, tests_to_skip
def _test_input_for_file(self, test_file):
return TestInput(test_file,
self._options.slow_time_out_ms if self._test_is_slow(test_file) else self._options.time_out_ms,
self._test_requires_lock(test_file),
should_add_missing_baselines=(self._options.new_test_results and not self._test_is_expected_missing(test_file)))
def _test_requires_lock(self, test_file):
"""Return True if the test needs to be locked when
running multiple copies of NRWTs. Perf tests are locked
because heavy load caused by running other tests in parallel
might cause some of them to timeout."""
return False
def _test_is_expected_missing(self, test_file):
expectations = self._expectations.model().get_expectations(test_file)
return test_expectations.MISSING in expectations or test_expectations.NEEDS_REBASELINE in expectations or test_expectations.NEEDS_MANUAL_REBASELINE in expectations
def _test_is_slow(self, test_file):
return test_expectations.SLOW in self._expectations.model().get_expectations(test_file)
def needs_servers(self, test_names):
return any(self._test_requires_lock(test_name) for test_name in test_names)
def _rename_results_folder(self):
try:
timestamp = time.strftime("%Y-%m-%d-%H-%M-%S", time.localtime(self._filesystem.mtime(self._filesystem.join(self._results_directory, "results.html"))))
except OSError, e:
# It might be possible that results.html was not generated in previous run, because the test
# run was interrupted even before testing started. In those cases, don't archive the folder.
# Simply override the current folder contents with new results.
import errno
if e.errno == errno.EEXIST:
_log.warning("No results.html file found in previous run, skipping it.")
return None
archived_name = ''.join((self._filesystem.basename(self._results_directory), "_", timestamp))
archived_path = self._filesystem.join(self._filesystem.dirname(self._results_directory), archived_name)
self._filesystem.move(self._results_directory, archived_path)
def _clobber_old_archived_results(self):
results_directory_path = self._filesystem.dirname(self._results_directory)
file_list = self._filesystem.listdir(results_directory_path)
results_directories = []
for dir in file_list:
file_path = self._filesystem.join(results_directory_path, dir)
if self._filesystem.isdir(file_path):
results_directories.append(file_path)
results_directories.sort(key=lambda x: self._filesystem.mtime(x))
self._printer.write_update("Clobbering old archived results in %s" % results_directory_path)
for dir in results_directories[:-self.ARCHIVED_RESULTS_LIMIT]:
self._filesystem.rmtree(dir)
def _set_up_run(self, test_names):
self._printer.write_update("Checking build ...")
if self._options.build:
exit_code = self._port.check_build(self.needs_servers(test_names), self._printer)
if exit_code:
_log.error("Build check failed")
return exit_code
# This must be started before we check the system dependencies,
# since the helper may do things to make the setup correct.
if self._options.pixel_tests:
self._printer.write_update("Starting pixel test helper ...")
self._port.start_helper()
# Check that the system dependencies (themes, fonts, ...) are correct.
if not self._options.nocheck_sys_deps:
self._printer.write_update("Checking system dependencies ...")
exit_code = self._port.check_sys_deps(self.needs_servers(test_names))
if exit_code:
self._port.stop_helper()
return exit_code
if self._options.enable_versioned_results and self._filesystem.exists(self._results_directory):
if self._options.clobber_old_results:
_log.warning("Flag --enable_versioned_results overrides --clobber-old-results.")
self._clobber_old_archived_results()
# Rename the existing results folder for archiving.
self._rename_results_folder()
elif self._options.clobber_old_results:
self._clobber_old_results()
# Create the output directory if it doesn't already exist.
self._port.host.filesystem.maybe_make_directory(self._results_directory)
self._port.setup_test_run()
return test_run_results.OK_EXIT_STATUS
def run(self, args):
"""Run the tests and return a RunDetails object with the results."""
start_time = time.time()
self._printer.write_update("Collecting tests ...")
try:
paths, test_names = self._collect_tests(args)
except IOError:
# This is raised if --test-list doesn't exist
return test_run_results.RunDetails(exit_code=test_run_results.NO_TESTS_EXIT_STATUS)
self._printer.write_update("Parsing expectations ...")
self._expectations = test_expectations.TestExpectations(self._port, test_names)
tests_to_run, tests_to_skip = self._prepare_lists(paths, test_names)
self._printer.print_found(len(test_names), len(tests_to_run), self._options.repeat_each, self._options.iterations)
# Check to make sure we're not skipping every test.
if not tests_to_run:
_log.critical('No tests to run.')
return test_run_results.RunDetails(exit_code=test_run_results.NO_TESTS_EXIT_STATUS)
exit_code = self._set_up_run(tests_to_run)
if exit_code:
return test_run_results.RunDetails(exit_code=exit_code)
if self._options.retry_failures is None:
should_retry_failures = False
else:
should_retry_failures = self._options.retry_failures
enabled_pixel_tests_in_retry = False
try:
self._start_servers(tests_to_run)
initial_results = self._run_tests(tests_to_run, tests_to_skip, self._options.repeat_each, self._options.iterations,
self._port.num_workers(int(self._options.child_processes)), retrying=False)
# Don't retry failures when interrupted by user or failures limit exception.
should_retry_failures = should_retry_failures and not (initial_results.interrupted or initial_results.keyboard_interrupted)
tests_to_retry = self._tests_to_retry(initial_results)
if should_retry_failures and tests_to_retry:
enabled_pixel_tests_in_retry = self._force_pixel_tests_if_needed()
_log.info('')
_log.info("Retrying %d unexpected failure(s) ..." % len(tests_to_retry))
_log.info('')
retry_results = self._run_tests(tests_to_retry, tests_to_skip=set(), repeat_each=1, iterations=1,
num_workers=1, retrying=True)
if enabled_pixel_tests_in_retry:
self._options.pixel_tests = False
else:
retry_results = None
finally:
self._stop_servers()
self._clean_up_run()
# Some crash logs can take a long time to be written out so look
# for new logs after the test run finishes.
self._printer.write_update("looking for new crash logs")
self._look_for_new_crash_logs(initial_results, start_time)
if retry_results:
self._look_for_new_crash_logs(retry_results, start_time)
_log.debug("summarizing results")
summarized_full_results = test_run_results.summarize_results(self._port, self._expectations, initial_results, retry_results, enabled_pixel_tests_in_retry)
summarized_failing_results = test_run_results.summarize_results(self._port, self._expectations, initial_results, retry_results, enabled_pixel_tests_in_retry, only_include_failing=True)
exit_code = summarized_failing_results['num_regressions']
if exit_code > test_run_results.MAX_FAILURES_EXIT_STATUS:
_log.warning('num regressions (%d) exceeds max exit status (%d)' %
(exit_code, test_run_results.MAX_FAILURES_EXIT_STATUS))
exit_code = test_run_results.MAX_FAILURES_EXIT_STATUS
if not self._options.dry_run:
self._write_json_files(summarized_full_results, summarized_failing_results, initial_results)
if self._options.write_full_results_to:
self._filesystem.copyfile(self._filesystem.join(self._results_directory, "full_results.json"),
self._options.write_full_results_to)
self._upload_json_files()
results_path = self._filesystem.join(self._results_directory, "results.html")
self._copy_results_html_file(results_path)
if initial_results.keyboard_interrupted:
exit_code = test_run_results.INTERRUPTED_EXIT_STATUS
else:
if initial_results.interrupted:
exit_code = test_run_results.EARLY_EXIT_STATUS
if self._options.show_results and (exit_code or (self._options.full_results_html and initial_results.total_failures)):
self._port.show_results_html_file(results_path)
self._printer.print_results(time.time() - start_time, initial_results, summarized_failing_results)
return test_run_results.RunDetails(exit_code, summarized_full_results, summarized_failing_results, initial_results, retry_results, enabled_pixel_tests_in_retry)
def _run_tests(self, tests_to_run, tests_to_skip, repeat_each, iterations, num_workers, retrying):
test_inputs = []
for _ in xrange(iterations):
for test in tests_to_run:
for _ in xrange(repeat_each):
test_inputs.append(self._test_input_for_file(test))
return self._runner.run_tests(self._expectations, test_inputs, tests_to_skip, num_workers, retrying)
def _start_servers(self, tests_to_run):
if self._port.requires_sky_server() or any(self._is_http_test(test) for test in tests_to_run):
self._printer.write_update('Starting HTTP server ...')
self._port.start_sky_server(additional_dirs={}, number_of_drivers=self._options.max_locked_shards)
self._http_server_started = True
if any(self._is_websocket_test(test) for test in tests_to_run):
self._printer.write_update('Starting WebSocket server ...')
self._port.start_websocket_server()
self._websockets_server_started = True
def _stop_servers(self):
if self._http_server_started:
self._printer.write_update('Stopping HTTP server ...')
self._http_server_started = False
self._port.stop_sky_server()
if self._websockets_server_started:
self._printer.write_update('Stopping WebSocket server ...')
self._websockets_server_started = False
self._port.stop_websocket_server()
def _clean_up_run(self):
_log.debug("Flushing stdout")
sys.stdout.flush()
_log.debug("Flushing stderr")
sys.stderr.flush()
_log.debug("Stopping helper")
self._port.stop_helper()
_log.debug("Cleaning up port")
self._port.clean_up_test_run()
def _force_pixel_tests_if_needed(self):
if self._options.pixel_tests:
return False
_log.debug("Restarting helper")
self._port.stop_helper()
self._options.pixel_tests = True
self._port.start_helper()
return True
def _look_for_new_crash_logs(self, run_results, start_time):
"""Since crash logs can take a long time to be written out if the system is
under stress do a second pass at the end of the test run.
run_results: the results of the test run
start_time: time the tests started at. We're looking for crash
logs after that time.
"""
crashed_processes = []
for test, result in run_results.unexpected_results_by_name.iteritems():
if (result.type != test_expectations.CRASH):
continue
for failure in result.failures:
if not isinstance(failure, test_failures.FailureCrash):
continue
crashed_processes.append([test, failure.process_name, failure.pid])
sample_files = self._port.look_for_new_samples(crashed_processes, start_time)
if sample_files:
for test, sample_file in sample_files.iteritems():
writer = TestResultWriter(self._port._filesystem, self._port, self._port.results_directory(), test)
writer.copy_sample_file(sample_file)
crash_logs = self._port.look_for_new_crash_logs(crashed_processes, start_time)
if crash_logs:
for test, crash_log in crash_logs.iteritems():
writer = TestResultWriter(self._port._filesystem, self._port, self._port.results_directory(), test)
writer.write_crash_log(crash_log)
def _clobber_old_results(self):
# Just clobber the actual test results directories since the other
# files in the results directory are explicitly used for cross-run
# tracking.
self._printer.write_update("Clobbering old results in %s" %
self._results_directory)
layout_tests_dir = self._port.layout_tests_dir()
possible_dirs = self._port.test_dirs()
for dirname in possible_dirs:
if self._filesystem.isdir(self._filesystem.join(layout_tests_dir, dirname)):
self._filesystem.rmtree(self._filesystem.join(self._results_directory, dirname))
# Port specific clean-up.
self._port.clobber_old_port_specific_results()
def _tests_to_retry(self, run_results):
return [result.test_name for result in run_results.unexpected_results_by_name.values() if result.type != test_expectations.PASS]
def _write_json_files(self, summarized_full_results, summarized_failing_results, initial_results):
_log.debug("Writing JSON files in %s." % self._results_directory)
# FIXME: Upload stats.json to the server and delete times_ms.
times_trie = json_results_generator.test_timings_trie(initial_results.results_by_name.values())
times_json_path = self._filesystem.join(self._results_directory, "times_ms.json")
json_results_generator.write_json(self._filesystem, times_trie, times_json_path)
stats_trie = self._stats_trie(initial_results)
stats_path = self._filesystem.join(self._results_directory, "stats.json")
self._filesystem.write_text_file(stats_path, json.dumps(stats_trie))
full_results_path = self._filesystem.join(self._results_directory, "full_results.json")
json_results_generator.write_json(self._filesystem, summarized_full_results, full_results_path)
full_results_path = self._filesystem.join(self._results_directory, "failing_results.json")
# We write failing_results.json out as jsonp because we need to load it from a file url for results.html and Chromium doesn't allow that.
json_results_generator.write_json(self._filesystem, summarized_failing_results, full_results_path, callback="ADD_RESULTS")
_log.debug("Finished writing JSON files.")
def _upload_json_files(self):
if not self._options.test_results_server:
return
if not self._options.master_name:
_log.error("--test-results-server was set, but --master-name was not. Not uploading JSON files.")
return
_log.debug("Uploading JSON files for builder: %s", self._options.builder_name)
attrs = [("builder", self._options.builder_name),
("testtype", "Sky tests"),
("master", self._options.master_name)]
files = [(file, self._filesystem.join(self._results_directory, file)) for file in ["failing_results.json", "full_results.json", "times_ms.json"]]
url = "http://%s/testfile/upload" % self._options.test_results_server
# Set uploading timeout in case appengine server is having problems.
# 120 seconds are more than enough to upload test results.
uploader = FileUploader(url, 120)
try:
response = uploader.upload_as_multipart_form_data(self._filesystem, files, attrs)
if response:
if response.code == 200:
_log.debug("JSON uploaded.")
else:
_log.debug("JSON upload failed, %d: '%s'" % (response.code, response.read()))
else:
_log.error("JSON upload failed; no response returned")
except Exception, err:
_log.error("Upload failed: %s" % err)
def _copy_results_html_file(self, destination_path):
base_dir = self._port.path_from_webkit_base('tests', 'resources')
results_file = self._filesystem.join(base_dir, 'results.html')
# Note that the results.html template file won't exist when we're using a MockFileSystem during unit tests,
# so make sure it exists before we try to copy it.
if self._filesystem.exists(results_file):
self._filesystem.copyfile(results_file, destination_path)
def _stats_trie(self, initial_results):
def _worker_number(worker_name):
return int(worker_name.split('/')[1]) if worker_name else -1
stats = {}
for result in initial_results.results_by_name.values():
if result.type != test_expectations.SKIP:
stats[result.test_name] = {'results': (_worker_number(result.worker_name), result.test_number, result.pid, int(result.test_run_time * 1000), int(result.total_run_time * 1000))}
stats_trie = {}
for name, value in stats.iteritems():
json_results_generator.add_path_to_trie(name, value, stats_trie)
return stats_trie
| bsd-3-clause | -5,684,303,257,549,471,000 | 48.226804 | 192 | 0.655204 | false |
JudoWill/ResearchNotebooks | ContestWin.py | 1 | 2452 | # -*- coding: utf-8 -*-
# <nbformat>3.0</nbformat>
# <codecell>
from selenium import webdriver
from selenium.common.exceptions import TimeoutException, StaleElementReferenceException
from selenium.webdriver.support.ui import WebDriverWait
import bs4
import urllib2
import time
# <codecell>
def get_current_artist():
station_url = 'http://p2.wrff-fm.ccomrcdn.com/player/player_dispatcher.html?section=radio&action=listen_live'
data = urllib2.urlopen(station_url).read()
soup = bs4.BeautifulSoup(data)
return soup.playercontent.justplayed.song.artist.attrs['name']
print get_current_artist()
# <codecell>
def send_txt_message(chrome_driver, txt_number, message):
new_message_button = chrome_driver.find_element_by_xpath('//*[@id="newSms"]/div[2]')
new_message_button.click()
to_field = driver.find_element_by_xpath('//*[@id="selectContactForSingleCompose"]')
to_field.send_keys(str(txt_number))
mes_field = driver.find_element_by_xpath('//*[@id="send-one-text"]')
mes_field.click()
mes_field.send_keys(message)
send_button = driver.find_element_by_xpath('//*[@id="send-button-single-text"]')
send_button.click()
# <codecell>
#profile = webdriver.firefox.firefox_profile.FirefoxProfile('/home/will/.mozilla/firefox/fsg0yfdg.default/')
driver = webdriver.Chrome()
# <codecell>
driver.get('http://mightytext.net')
# <codecell>
tmp = driver.find_element_by_link_text('Login')
tmp.click()
time.sleep(10)
# <codecell>
ntmp = driver.find_element_by_xpath('//*[@id="Email"]')
ntmp.send_keys('judowill')
time.sleep(10)
otmp = driver.find_element_by_xpath('//*[@id="Passwd"]')
otmp.send_keys('judo8675309')
time.sleep(10)
rtmp = driver.find_element_by_xpath('//*[@id="signIn"]')
rtmp.click()
time.sleep(10)
# <codecell>
last_artist = ''
warned = False
while True:
time.sleep(1)
artist = get_current_artist()
if artist != last_artist:
print artist
last_artist = artist
if 'muse' in artist.lower():
print 'PLAYING SONG!'
send_txt_message(driver, 91045, 'Muse')
time.sleep(5)
if not warned:
print 'telling cat!'
try:
send_txt_message(driver, 2157405170, 'playing the Muse, send messages now!!!')
except:
pass
warned = True
time.sleep(5)
time.sleep(60*10)
warned = False
# <codecell>
# <codecell>
| mit | 7,580,593,206,299,301,000 | 24.278351 | 113 | 0.652529 | false |
alerta/python-alerta-client | setup.py | 2 | 1614 | #!/usr/bin/env python
import os
import setuptools
def read(filename):
return open(os.path.join(os.path.dirname(__file__), filename)).read()
setuptools.setup(
name='alerta',
version=read('VERSION'),
description='Alerta unified command-line tool and SDK',
long_description=read('README.md'),
long_description_content_type='text/markdown',
url='https://github.com/guardian/python-alerta',
license='Apache License 2.0',
author='Nick Satterly',
author_email='[email protected]',
packages=setuptools.find_packages(exclude=['tests']),
install_requires=[
'Click',
'requests',
'requests_hawk',
'tabulate',
'pytz'
],
include_package_data=True,
zip_safe=False,
entry_points={
'console_scripts': [
'alerta = alertaclient.cli:cli'
]
},
keywords='alerta client unified command line tool sdk',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Intended Audience :: Information Technology',
'Intended Audience :: System Administrators',
'Intended Audience :: Telecommunications Industry',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python :: 3.9',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.6',
'Topic :: System :: Monitoring',
'Topic :: Software Development :: Libraries :: Python Modules'
],
python_requires='>=3.6'
)
| apache-2.0 | -4,274,909,027,068,212,000 | 29.45283 | 73 | 0.615861 | false |
platformio/platformio | tests/package/test_pack.py | 1 | 5781 | # Copyright (c) 2014-present PlatformIO <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import os
import tarfile
import pytest
from platformio import fs
from platformio.compat import WINDOWS
from platformio.package.exception import UnknownManifestError
from platformio.package.pack import PackagePacker
def test_base(tmpdir_factory):
pkg_dir = tmpdir_factory.mktemp("package")
pkg_dir.join(".git").mkdir().join("file").write("")
pkg_dir.join(".gitignore").write("tests")
pkg_dir.join("._ignored").write("")
pkg_dir.join("main.cpp").write("#include <stdio.h>")
p = PackagePacker(str(pkg_dir))
# test missed manifest
with pytest.raises(UnknownManifestError):
p.pack()
# minimal package
pkg_dir.join("library.json").write('{"name": "foo", "version": "1.0.0"}')
pkg_dir.mkdir("include").join("main.h").write("#ifndef")
with fs.cd(str(pkg_dir)):
p.pack()
with tarfile.open(os.path.join(str(pkg_dir), "foo-1.0.0.tar.gz"), "r:gz") as tar:
assert set(tar.getnames()) == set(
[".gitignore", "include/main.h", "library.json", "main.cpp"]
)
def test_filters(tmpdir_factory):
pkg_dir = tmpdir_factory.mktemp("package")
src_dir = pkg_dir.mkdir("src")
src_dir.join("main.cpp").write("#include <stdio.h>")
src_dir.mkdir("util").join("helpers.cpp").write("void")
pkg_dir.mkdir("include").join("main.h").write("#ifndef")
test_dir = pkg_dir.mkdir("tests")
test_dir.join("test_1.h").write("")
test_dir.join("test_2.h").write("")
# test include with remap of root
pkg_dir.join("library.json").write(
json.dumps(dict(name="bar", version="1.2.3", export={"include": "src"}))
)
p = PackagePacker(str(pkg_dir))
with tarfile.open(p.pack(str(pkg_dir)), "r:gz") as tar:
assert set(tar.getnames()) == set(["util/helpers.cpp", "main.cpp"])
# test include "src" and "include"
pkg_dir.join("library.json").write(
json.dumps(
dict(name="bar", version="1.2.3", export={"include": ["src", "include"]})
)
)
p = PackagePacker(str(pkg_dir))
with tarfile.open(p.pack(str(pkg_dir)), "r:gz") as tar:
assert set(tar.getnames()) == set(
["include/main.h", "library.json", "src/main.cpp", "src/util/helpers.cpp"]
)
# test include & exclude
pkg_dir.join("library.json").write(
json.dumps(
dict(
name="bar",
version="1.2.3",
export={"include": ["src", "include"], "exclude": ["*/*.h"]},
)
)
)
p = PackagePacker(str(pkg_dir))
with tarfile.open(p.pack(str(pkg_dir)), "r:gz") as tar:
assert set(tar.getnames()) == set(
["library.json", "src/main.cpp", "src/util/helpers.cpp"]
)
def test_symlinks(tmpdir_factory):
# Windows does not support symbolic links
if WINDOWS:
return
pkg_dir = tmpdir_factory.mktemp("package")
src_dir = pkg_dir.mkdir("src")
src_dir.join("main.cpp").write("#include <stdio.h>")
pkg_dir.mkdir("include").join("main.h").write("#ifndef")
src_dir.join("main.h").mksymlinkto(os.path.join("..", "include", "main.h"))
pkg_dir.join("library.json").write('{"name": "bar", "version": "2.0.0"}')
tarball = pkg_dir.join("bar.tar.gz")
with tarfile.open(str(tarball), "w:gz") as tar:
for item in pkg_dir.listdir():
tar.add(str(item), str(item.relto(pkg_dir)))
p = PackagePacker(str(tarball))
assert p.pack(str(pkg_dir)).endswith("bar-2.0.0.tar.gz")
with tarfile.open(os.path.join(str(pkg_dir), "bar-2.0.0.tar.gz"), "r:gz") as tar:
assert set(tar.getnames()) == set(
["include/main.h", "library.json", "src/main.cpp", "src/main.h"]
)
m = tar.getmember("src/main.h")
assert m.issym()
def test_source_root(tmpdir_factory):
pkg_dir = tmpdir_factory.mktemp("package")
root_dir = pkg_dir.mkdir("root")
src_dir = root_dir.mkdir("src")
src_dir.join("main.cpp").write("#include <stdio.h>")
root_dir.join("library.json").write('{"name": "bar", "version": "2.0.0"}')
p = PackagePacker(str(pkg_dir))
with tarfile.open(p.pack(str(pkg_dir)), "r:gz") as tar:
assert set(tar.getnames()) == set(["library.json", "src/main.cpp"])
def test_manifest_uri(tmpdir_factory):
pkg_dir = tmpdir_factory.mktemp("package")
root_dir = pkg_dir.mkdir("root")
src_dir = root_dir.mkdir("src")
src_dir.join("main.cpp").write("#include <stdio.h>")
root_dir.join("library.json").write('{"name": "foo", "version": "1.0.0"}')
bar_dir = root_dir.mkdir("library").mkdir("bar")
bar_dir.join("library.json").write('{"name": "bar", "version": "2.0.0"}')
bar_dir.mkdir("include").join("bar.h").write("")
manifest_path = pkg_dir.join("remote_library.json")
manifest_path.write(
'{"name": "bar", "version": "3.0.0", "export": {"include": "root/library/bar"}}'
)
p = PackagePacker(str(pkg_dir), manifest_uri="file:%s" % manifest_path)
p.pack(str(pkg_dir))
with tarfile.open(os.path.join(str(pkg_dir), "bar-2.0.0.tar.gz"), "r:gz") as tar:
assert set(tar.getnames()) == set(["library.json", "include/bar.h"])
| apache-2.0 | -2,632,341,368,601,307,600 | 37.798658 | 88 | 0.61114 | false |
i2y/mochi | mochi/core/constants.py | 3 | 2266 | import ast
from .global_env import global_env
from mochi.parser import Symbol
# -----------------------------------------------
# EOF Class
class Eof(object):
def __repr__(self):
return "EOF"
def __str__(self):
return "EOF"
#-----------------------------------------------
# constants
EOF = Eof() # orignal: EOF = "EOF"
QUOTE = Symbol('quote')
QUASIQUOTE = Symbol('quasiquote')
UNQUOTE = Symbol('unquote')
UNQUOTE_SPLICING = Symbol('unquote_splicing')
SPLICING = Symbol('splicing')
VARG = Symbol('&')
VKWARG = Symbol('&&')
VAL = Symbol('val')
GET = Symbol('get')
FN = Symbol('fn')
ARGS = Symbol('args')
UNDERSCORE = Symbol('_')
LEN = Symbol('len')
IF = Symbol('if')
ELSE = Symbol('else')
LPARA = Symbol('(')
RPARA = Symbol(')')
RPARA = Symbol(')')
NONE_SYM = Symbol('None')
EMPTY = ()
EMPTY_SYM = Symbol('EMPTY')
TABLE = Symbol('table')
PMAP = Symbol('pmap')
DEF = Symbol('def')
MAKE_TUPLE = Symbol('make_tuple')
MAKE_LIST = Symbol('make_list')
MAKE_DICT = Symbol('dict*')
WITH_DECORATOR = Symbol('with_decorator')
RE_COMPILE = Symbol('re.compile')
GET_REF = Symbol('get!')
RECORD_SYM = Symbol('Record')
OBJECT_SYM = Symbol('object')
global_env['EMPTY'] = EMPTY
#-----------------------------------------------
class Comment(object):
def __str__(self):
return "comment"
def __repr__(self):
return "comment"
COMMENT = Comment()
class Char(object):
def __init__(self, str, lineno=0):
self.value = str
self.lineno = lineno
op_ast_map = {'+': ast.Add(),
'-': ast.Sub(),
'*': ast.Mult(),
'/': ast.Div(),
'%': ast.Mod(),
'**': ast.Pow(),
'<<': ast.LShift(),
'>>': ast.RShift(),
'|': ast.BitOr(),
'^^': ast.BitXor(),
'&&': ast.BitAnd(),
'//': ast.FloorDiv(),
'==': ast.Eq(),
'!=': ast.NotEq(),
'<': ast.Lt(),
'<=': ast.LtE(),
'>': ast.Gt(),
'>=': ast.GtE(),
'is': ast.Is(),
'is_not': ast.IsNot(),
'in': ast.In(),
'not_in': ast.NotIn(),
'and': ast.And(),
'or': ast.Or()} | mit | 5,757,378,737,250,850,000 | 23.117021 | 49 | 0.4594 | false |
gening/markdown_note | markdown_note/note_rmdir.py | 1 | 1311 | # coding: utf-8
"""
authors: gening
date: 2017-08-15 18:45:24
version: 1.0.1
desc: delete an empty folder detached to its markdown file.
usage: ./note_rmdir.py <md_file_name>
"""
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
import os
import sys
sys.path.append(os.path.join(os.path.split(os.path.abspath(__file__))[0], '..'))
from markdown_note.note_lib import FOLDER_SUFFIX
from markdown_note.note_lib import parse_file_name
from markdown_note.note_lib import clean_empty_folder
from markdown_note.note_lib import str_decode_utf8
def note_rmdir(filename):
# validate filename name
if not os.path.exists(filename):
raise Exception('%s: No such file' % filename)
# clean folder
dir_path, base_name, ext_name = parse_file_name(filename)
folder_name = base_name + FOLDER_SUFFIX
clean_empty_folder(os.path.join(dir_path, folder_name))
def main():
if len(sys.argv) == 2:
try:
markdown_file = str_decode_utf8(sys.argv[1])
note_rmdir(markdown_file)
return 0
except Exception as e:
print(e)
else:
print('usage: note-rmdir <file>')
return 1
if __name__ == '__main__':
exit_code = main()
sys.exit(exit_code)
| mit | -8,507,560,112,559,617,000 | 25.755102 | 80 | 0.654462 | false |
offlinehacker/flumotion | flumotion/component/muxers/webm.py | 1 | 1434 | # -*- Mode: Python -*-
# vi:si:et:sw=4:sts=4:ts=4
#
# Flumotion - a streaming media server
# Copyright (C) 2004,2005,2006,2007 Fluendo, S.L. (www.fluendo.com).
# All rights reserved.
# This file may be distributed and/or modified under the terms of
# the GNU General Public License version 2 as published by
# the Free Software Foundation.
# This file is distributed without any warranty; without even the implied
# warranty of merchantability or fitness for a particular purpose.
# See "LICENSE.GPL" in the source distribution for more information.
# Licensees having purchased or holding a valid Flumotion Advanced
# Streaming Server license may use this file in accordance with the
# Flumotion Advanced Streaming Server Commercial License Agreement.
# See "LICENSE.Flumotion" in the source distribution for more information.
# Headers in this file shall remain intact.
from flumotion.common import messages
from flumotion.common.i18n import N_, gettexter
from flumotion.component import feedcomponent
from flumotion.worker.checks import check
__version__ = "$Rev$"
T_ = gettexter()
class WebM(feedcomponent.MuxerComponent):
checkTimestamp = True
def do_check(self):
return check.do_check(self, check.checkPlugin, 'matroska',
'gst-plugins-good', (0, 10, 24))
def get_muxer_string(self, properties):
muxer = 'webmmux name=muxer streamable=true'
return muxer
| gpl-2.0 | -1,925,629,782,845,716,200 | 34.85 | 74 | 0.736402 | false |
SimpleITK/SimpleITK | Examples/ImageRegistrationMethod1/ImageRegistrationMethod1.py | 4 | 2327 | #!/usr/bin/env python
# =========================================================================
#
# Copyright NumFOCUS
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0.txt
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# =========================================================================
import SimpleITK as sitk
import sys
import os
def command_iteration(method):
print(f"{method.GetOptimizerIteration():3} = {method.GetMetricValue():10.5f} : {method.GetOptimizerPosition()}")
if len(sys.argv) < 4:
print("Usage:", sys.argv[0], "<fixedImageFilter> <movingImageFile>",
"<outputTransformFile>")
sys.exit(1)
fixed = sitk.ReadImage(sys.argv[1], sitk.sitkFloat32)
moving = sitk.ReadImage(sys.argv[2], sitk.sitkFloat32)
R = sitk.ImageRegistrationMethod()
R.SetMetricAsMeanSquares()
R.SetOptimizerAsRegularStepGradientDescent(4.0, .01, 200)
R.SetInitialTransform(sitk.TranslationTransform(fixed.GetDimension()))
R.SetInterpolator(sitk.sitkLinear)
R.AddCommand(sitk.sitkIterationEvent, lambda: command_iteration(R))
outTx = R.Execute(fixed, moving)
print("-------")
print(outTx)
print(f"Optimizer stop condition: {R.GetOptimizerStopConditionDescription()}")
print(f" Iteration: {R.GetOptimizerIteration()}")
print(f" Metric value: {R.GetMetricValue()}")
sitk.WriteTransform(outTx, sys.argv[3])
if ("SITK_NOSHOW" not in os.environ):
resampler = sitk.ResampleImageFilter()
resampler.SetReferenceImage(fixed)
resampler.SetInterpolator(sitk.sitkLinear)
resampler.SetDefaultPixelValue(100)
resampler.SetTransform(outTx)
out = resampler.Execute(moving)
simg1 = sitk.Cast(sitk.RescaleIntensity(fixed), sitk.sitkUInt8)
simg2 = sitk.Cast(sitk.RescaleIntensity(out), sitk.sitkUInt8)
cimg = sitk.Compose(simg1, simg2, simg1 // 2. + simg2 // 2.)
sitk.Show(cimg, "ImageRegistration1 Composition")
| apache-2.0 | -1,651,407,721,188,440,800 | 33.731343 | 116 | 0.691018 | false |
CanonicalLtd/landscape-client | landscape/client/manager/tests/test_plugin.py | 1 | 4110 | from twisted.internet.defer import Deferred
from landscape.client.tests.helpers import LandscapeTest
from landscape.client.tests.helpers import ManagerHelper
from landscape.client.manager.plugin import ManagerPlugin, SUCCEEDED, FAILED
class BrokerPluginTest(LandscapeTest):
helpers = [ManagerHelper]
def test_call_with_operation_result_success(self):
"""
A helper method exists which calls a function and sends an
operation-result message based on the success of that method.
"""
plugin = ManagerPlugin()
plugin.register(self.manager)
broker_service = self.broker_service
broker_service.message_store.set_accepted_types(["operation-result"])
message = {"operation-id": 12312}
operation = (lambda: None)
def assert_messages(ignored):
messages = broker_service.message_store.get_pending_messages()
self.assertMessages(messages,
[{"type": "operation-result",
"status": SUCCEEDED,
"operation-id": 12312}])
result = plugin.call_with_operation_result(message, operation)
return result.addCallback(assert_messages)
def test_call_with_operation_result_error(self):
"""
The helper for operation-results sends an appropriate message when an
exception is raised from the given method.
"""
self.log_helper.ignore_errors(RuntimeError)
plugin = ManagerPlugin()
plugin.register(self.manager)
broker_service = self.broker_service
broker_service.message_store.set_accepted_types(["operation-result"])
message = {"operation-id": 12312}
def operation():
raise RuntimeError("What the crap!")
def assert_messages(ignored):
messages = broker_service.message_store.get_pending_messages()
self.assertMessages(messages,
[{"type": "operation-result", "status": FAILED,
"result-text": "RuntimeError: What the "
"crap!", "operation-id": 12312}])
logdata = self.logfile.getvalue()
self.assertTrue("RuntimeError: What the crap!" in logdata, logdata)
result = plugin.call_with_operation_result(message, operation)
return result.addCallback(assert_messages)
def test_call_with_operation_result_exchanges_urgently(self):
"""
Operation results are reported to the server as quickly as possible.
"""
plugin = ManagerPlugin()
plugin.register(self.manager)
broker_service = self.broker_service
broker_service.message_store.set_accepted_types(["operation-result"])
message = {"operation-id": 123}
operation = (lambda: None)
def assert_urgency(ignored):
self.assertTrue(broker_service.exchanger.is_urgent())
result = plugin.call_with_operation_result(message, operation)
return result.addCallback(assert_urgency)
def test_callable_returning_a_deferred(self):
"""
The callable parameter can return a C{Deferred}.
"""
plugin = ManagerPlugin()
plugin.register(self.manager)
broker_service = self.broker_service
broker_service.message_store.set_accepted_types(["operation-result"])
message = {"operation-id": 12312}
deferred = Deferred()
operation = (lambda: deferred)
def assert_messages(ignored):
messages = broker_service.message_store.get_pending_messages()
self.assertMessages(messages,
[{"type": "operation-result",
"result-text": "blah",
"status": SUCCEEDED,
"operation-id": 12312}])
result = plugin.call_with_operation_result(message, operation)
result.addCallback(assert_messages)
deferred.callback("blah")
return result
| gpl-2.0 | 551,846,246,184,726,800 | 39.693069 | 79 | 0.608029 | false |
RasaHQ/rasa_core | tests/core/test_dialogues.py | 1 | 1367 | import json
import jsonpickle
import pytest
from rasa.core import utils
from rasa.core.domain import Domain
from rasa.core.tracker_store import InMemoryTrackerStore
from tests.core.utilities import tracker_from_dialogue_file
from tests.core.conftest import TEST_DIALOGUES, EXAMPLE_DOMAINS
@pytest.mark.parametrize("filename", TEST_DIALOGUES)
def test_dialogue_serialisation(filename):
dialogue_json = utils.read_file(filename)
restored = json.loads(dialogue_json)
tracker = tracker_from_dialogue_file(filename)
en_de_coded = json.loads(jsonpickle.encode(tracker.as_dialogue()))
assert restored == en_de_coded
@pytest.mark.parametrize("pair", zip(TEST_DIALOGUES, EXAMPLE_DOMAINS))
def test_inmemory_tracker_store(pair):
filename, domainpath = pair
domain = Domain.load(domainpath)
tracker = tracker_from_dialogue_file(filename, domain)
tracker_store = InMemoryTrackerStore(domain)
tracker_store.save(tracker)
restored = tracker_store.retrieve(tracker.sender_id)
assert restored == tracker
def test_tracker_restaurant():
domain = Domain.load("examples/restaurantbot/domain.yml")
filename = 'data/test_dialogues/restaurantbot.json'
tracker = tracker_from_dialogue_file(filename, domain)
assert tracker.get_slot("price") == "lo"
assert tracker.get_slot("name") is None # slot doesn't exist!
| apache-2.0 | -4,248,341,281,015,927,000 | 34.973684 | 70 | 0.752743 | false |
ikargis/horizon_fod | horizon/test/urls.py | 3 | 1545 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
URL patterns for testing Horizon views.
"""
from django.conf.urls import include # noqa
from django.conf.urls import patterns # noqa
from django.conf.urls import url # noqa
from django.contrib.staticfiles.urls import staticfiles_urlpatterns # noqa
from django.views.generic import TemplateView # noqa
import horizon
urlpatterns = patterns('',
url(r'', include(horizon.urls)),
url(r"auth/login/", "django.contrib.auth.views.login",
{'template_name': "auth/login.html"},
name='login'),
url(r'auth/', include('django.contrib.auth.urls')),
url(r'^qunit/$',
TemplateView.as_view(template_name="horizon/qunit.html"),
name='qunit_tests')
)
urlpatterns += staticfiles_urlpatterns()
| apache-2.0 | 691,911,654,505,577,200 | 33.333333 | 78 | 0.718447 | false |
umitproject/umpa | umit/umpa/protocols/_layer4.py | 1 | 4489 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (C) 2008-2009 Adriano Monteiro Marques.
#
# Author: Bartosz SKOWRON <getxsick at gmail dot com>
#
# This library is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation; either version 2.1 of the License, or
# (at your option) any later version.
#
# This library is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
# or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
# License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this library; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
"""
Usefull classes for 4th layer's protocols.
TCP/UDP use special pseudo header to calculate checksum. These classes
are provided.
"""
from umit.umpa.protocols._protocols import Protocol
from umit.umpa.protocols.IP import IP
from umit.umpa.protocols._fields import IntField, IPv4AddrField, IPv6AddrField
class Layer4ChecksumField(IntField):
"""
A checksum for the common classes of 4th layer of OSI model.
Especially UDP/TCP use it. The checksum is calculated from the Pseudo
Header, the main header and the payload.
"""
bits = 16
auto = True
def _generate_value(self):
"""
Generate value for undefined field yet.
@return: auto-generated value of the field.
"""
return 0
class PseudoHeader(Protocol):
"""
This is Pseudo Header.
This class is useful for some protocols like TCP or UDP.
It's used to calculate checksum of those protocols.
It's prefixed to the protocol header before calculating.
"""
_ordered_fields = ('src', 'dst', 'reserved',
'protocol_id', 'total_length')
def __init__(self, protocol_id, total_length):
"""
Create a new PseudoHeader()
@type protocol_id: C{int}
@param protocol_id: id of the protocol which use PseudoHeader.
@type total_length: C{int}
@param total_length: length of the real header and payload.
"""
fields_list = [ IPv4AddrField("Source Address"),
IPv4AddrField("Destination Address"),
IntField("Reserved", 0, bits=8),
IntField("Protocol", protocol_id, bits=8),
IntField("Total Length", total_length, bits=16) ]
super(PseudoHeader, self).__init__(fields_list)
def _pre_raw(self, raw_value, bit, protocol_container, protocol_bits):
"""
Handle with fields before calling fillout() for them.
Parse lower protocol (usually IP) to get source/destination address.
@type raw_value: C{int}
@param raw_value: currently raw value for the packet.
@type bit: C{int}
@param bit: currently length of the protocol.
@type protocol_container: C{tuple}
@param protocol_container: tuple of protocols included in the packet.
@type protocol_bits: C{int}
@param protocol_bits: currently length of the packet.
@return: C{raw_value, bit}
"""
# assign localhost first becuase if there is none IP instance
self.src = "127.0.0.1"
self.dst = "127.0.0.1"
# grabbing informations from the IP's header
it = iter(protocol_container)
for proto in it:
if isinstance(proto, IP):
self.src = proto.src
self.dst = proto.dst
break
return raw_value, bit
def _post_raw(self, raw_value, bit, protocol_container, protocol_bits):
"""
Handle with fields after calling fillout() for them.
Nothing to do for PseudoHeader class here. Return required vars.
@type raw_value: C{int}
@param raw_value: currently raw value for the packet.
@type bit: C{int}
@param bit: currently length of the protocol.
@type protocol_container: C{tuple}
@param protocol_container: tuple of protocols included in the packet.
@type protocol_bits: C{int}
@param protocol_bits: currently length of the packet.
@return: C{raw_value, bit}
"""
return raw_value, bit
| lgpl-2.1 | -1,053,505,410,817,154,600 | 31.528986 | 78 | 0.638672 | false |
wbyne/QGIS | python/plugins/processing/algs/qgis/LinesIntersection.py | 1 | 5177 | # -*- coding: utf-8 -*-
"""
***************************************************************************
LinesIntersection.py
---------------------
Date : August 2012
Copyright : (C) 2012 by Victor Olaya
Email : volayaf at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Victor Olaya'
__date__ = 'August 2012'
__copyright__ = '(C) 2012, Victor Olaya'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
import os
from qgis.PyQt.QtGui import QIcon
from qgis.core import Qgis, QgsFeatureRequest, QgsFeature, QgsGeometry, QgsWkbTypes
from processing.core.GeoAlgorithm import GeoAlgorithm
from processing.core.parameters import ParameterVector
from processing.core.parameters import ParameterTableField
from processing.core.outputs import OutputVector
from processing.tools import dataobjects, vector
pluginPath = os.path.split(os.path.split(os.path.dirname(__file__))[0])[0]
class LinesIntersection(GeoAlgorithm):
INPUT_A = 'INPUT_A'
INPUT_B = 'INPUT_B'
FIELD_A = 'FIELD_A'
FIELD_B = 'FIELD_B'
OUTPUT = 'OUTPUT'
def getIcon(self):
return QIcon(os.path.join(pluginPath, 'images', 'ftools', 'lines_intersection.png'))
def defineCharacteristics(self):
self.name, self.i18n_name = self.trAlgorithm('Line intersections')
self.group, self.i18n_group = self.trAlgorithm('Vector overlay tools')
self.addParameter(ParameterVector(self.INPUT_A,
self.tr('Input layer'), [dataobjects.TYPE_VECTOR_LINE]))
self.addParameter(ParameterVector(self.INPUT_B,
self.tr('Intersect layer'), [dataobjects.TYPE_VECTOR_LINE]))
self.addParameter(ParameterTableField(
self.FIELD_A,
self.tr('Input unique ID field'),
self.INPUT_A,
optional=True))
self.addParameter(ParameterTableField(
self.FIELD_B,
self.tr('Intersect unique ID field'),
self.INPUT_B,
optional=True))
self.addOutput(OutputVector(self.OUTPUT, self.tr('Intersections'), datatype=[dataobjects.TYPE_VECTOR_POINT]))
def processAlgorithm(self, progress):
layerA = dataobjects.getObjectFromUri(self.getParameterValue(self.INPUT_A))
layerB = dataobjects.getObjectFromUri(self.getParameterValue(self.INPUT_B))
fieldA = self.getParameterValue(self.FIELD_A)
fieldB = self.getParameterValue(self.FIELD_B)
idxA = layerA.fields().lookupField(fieldA)
idxB = layerB.fields().lookupField(fieldB)
fieldList = [layerA.fields()[idxA],
layerB.fields()[idxB]]
writer = self.getOutputFromName(self.OUTPUT).getVectorWriter(fieldList,
QgsWkbTypes.Point, layerA.crs())
spatialIndex = vector.spatialindex(layerB)
outFeat = QgsFeature()
features = vector.features(layerA)
total = 100.0 / len(features)
hasIntersections = False
for current, inFeatA in enumerate(features):
inGeom = inFeatA.geometry()
hasIntersections = False
lines = spatialIndex.intersects(inGeom.boundingBox())
if len(lines) > 0:
hasIntersections = True
if hasIntersections:
for i in lines:
request = QgsFeatureRequest().setFilterFid(i)
inFeatB = next(layerB.getFeatures(request))
tmpGeom = inFeatB.geometry()
points = []
attrsA = inFeatA.attributes()
attrsB = inFeatB.attributes()
if inGeom.intersects(tmpGeom):
tempGeom = inGeom.intersection(tmpGeom)
if tempGeom.type() == QgsWkbTypes.PointGeometry:
if tempGeom.isMultipart():
points = tempGeom.asMultiPoint()
else:
points.append(tempGeom.asPoint())
for j in points:
outFeat.setGeometry(tempGeom.fromPoint(j))
outFeat.setAttributes([attrsA[idxA],
attrsB[idxB]])
writer.addFeature(outFeat)
progress.setPercentage(int(current * total))
del writer
| gpl-2.0 | 5,613,628,603,960,018,000 | 38.219697 | 117 | 0.532355 | false |
insiderr/insiderr-app | ios-patches/basemodules/twisted/words/protocols/irc.py | 26 | 115360 | # -*- test-case-name: twisted.words.test.test_irc -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Internet Relay Chat protocol for client and server.
Future Plans
============
The way the IRCClient class works here encourages people to implement
IRC clients by subclassing the ephemeral protocol class, and it tends
to end up with way more state than it should for an object which will
be destroyed as soon as the TCP transport drops. Someone oughta do
something about that, ya know?
The DCC support needs to have more hooks for the client for it to be
able to ask the user things like "Do you want to accept this session?"
and "Transfer #2 is 67% done." and otherwise manage the DCC sessions.
Test coverage needs to be better.
@var MAX_COMMAND_LENGTH: The maximum length of a command, as defined by RFC
2812 section 2.3.
@var attributes: Singleton instance of L{_CharacterAttributes}, used for
constructing formatted text information.
@author: Kevin Turner
@see: RFC 1459: Internet Relay Chat Protocol
@see: RFC 2812: Internet Relay Chat: Client Protocol
@see: U{The Client-To-Client-Protocol
<http://www.irchelp.org/irchelp/rfc/ctcpspec.html>}
"""
import errno, os, random, re, stat, struct, sys, time, types, traceback
import operator
import string, socket
import textwrap
import shlex
from os import path
from twisted.internet import reactor, protocol, task
from twisted.persisted import styles
from twisted.protocols import basic
from twisted.python import log, reflect, _textattributes
NUL = chr(0)
CR = chr(015)
NL = chr(012)
LF = NL
SPC = chr(040)
# This includes the CRLF terminator characters.
MAX_COMMAND_LENGTH = 512
CHANNEL_PREFIXES = '&#!+'
class IRCBadMessage(Exception):
pass
class IRCPasswordMismatch(Exception):
pass
class IRCBadModes(ValueError):
"""
A malformed mode was encountered while attempting to parse a mode string.
"""
def parsemsg(s):
"""Breaks a message from an IRC server into its prefix, command, and arguments.
"""
prefix = ''
trailing = []
if not s:
raise IRCBadMessage("Empty line.")
if s[0] == ':':
prefix, s = s[1:].split(' ', 1)
if s.find(' :') != -1:
s, trailing = s.split(' :', 1)
args = s.split()
args.append(trailing)
else:
args = s.split()
command = args.pop(0)
return prefix, command, args
def split(str, length=80):
"""
Split a string into multiple lines.
Whitespace near C{str[length]} will be preferred as a breaking point.
C{"\\n"} will also be used as a breaking point.
@param str: The string to split.
@type str: C{str}
@param length: The maximum length which will be allowed for any string in
the result.
@type length: C{int}
@return: C{list} of C{str}
"""
return [chunk
for line in str.split('\n')
for chunk in textwrap.wrap(line, length)]
def _intOrDefault(value, default=None):
"""
Convert a value to an integer if possible.
@rtype: C{int} or type of L{default}
@return: An integer when C{value} can be converted to an integer,
otherwise return C{default}
"""
if value:
try:
return int(value)
except (TypeError, ValueError):
pass
return default
class UnhandledCommand(RuntimeError):
"""
A command dispatcher could not locate an appropriate command handler.
"""
class _CommandDispatcherMixin(object):
"""
Dispatch commands to handlers based on their name.
Command handler names should be of the form C{prefix_commandName},
where C{prefix} is the value specified by L{prefix}, and must
accept the parameters as given to L{dispatch}.
Attempting to mix this in more than once for a single class will cause
strange behaviour, due to L{prefix} being overwritten.
@type prefix: C{str}
@ivar prefix: Command handler prefix, used to locate handler attributes
"""
prefix = None
def dispatch(self, commandName, *args):
"""
Perform actual command dispatch.
"""
def _getMethodName(command):
return '%s_%s' % (self.prefix, command)
def _getMethod(name):
return getattr(self, _getMethodName(name), None)
method = _getMethod(commandName)
if method is not None:
return method(*args)
method = _getMethod('unknown')
if method is None:
raise UnhandledCommand("No handler for %r could be found" % (_getMethodName(commandName),))
return method(commandName, *args)
def parseModes(modes, params, paramModes=('', '')):
"""
Parse an IRC mode string.
The mode string is parsed into two lists of mode changes (added and
removed), with each mode change represented as C{(mode, param)} where mode
is the mode character, and param is the parameter passed for that mode, or
C{None} if no parameter is required.
@type modes: C{str}
@param modes: Modes string to parse.
@type params: C{list}
@param params: Parameters specified along with L{modes}.
@type paramModes: C{(str, str)}
@param paramModes: A pair of strings (C{(add, remove)}) that indicate which modes take
parameters when added or removed.
@returns: Two lists of mode changes, one for modes added and the other for
modes removed respectively, mode changes in each list are represented as
C{(mode, param)}.
"""
if len(modes) == 0:
raise IRCBadModes('Empty mode string')
if modes[0] not in '+-':
raise IRCBadModes('Malformed modes string: %r' % (modes,))
changes = ([], [])
direction = None
count = -1
for ch in modes:
if ch in '+-':
if count == 0:
raise IRCBadModes('Empty mode sequence: %r' % (modes,))
direction = '+-'.index(ch)
count = 0
else:
param = None
if ch in paramModes[direction]:
try:
param = params.pop(0)
except IndexError:
raise IRCBadModes('Not enough parameters: %r' % (ch,))
changes[direction].append((ch, param))
count += 1
if len(params) > 0:
raise IRCBadModes('Too many parameters: %r %r' % (modes, params))
if count == 0:
raise IRCBadModes('Empty mode sequence: %r' % (modes,))
return changes
class IRC(protocol.Protocol):
"""
Internet Relay Chat server protocol.
"""
buffer = ""
hostname = None
encoding = None
def connectionMade(self):
self.channels = []
if self.hostname is None:
self.hostname = socket.getfqdn()
def sendLine(self, line):
if self.encoding is not None:
if isinstance(line, unicode):
line = line.encode(self.encoding)
self.transport.write("%s%s%s" % (line, CR, LF))
def sendMessage(self, command, *parameter_list, **prefix):
"""
Send a line formatted as an IRC message.
First argument is the command, all subsequent arguments are parameters
to that command. If a prefix is desired, it may be specified with the
keyword argument 'prefix'.
"""
if not command:
raise ValueError("IRC message requires a command.")
if ' ' in command or command[0] == ':':
# Not the ONLY way to screw up, but provides a little
# sanity checking to catch likely dumb mistakes.
raise ValueError("Somebody screwed up, 'cuz this doesn't" \
" look like a command to me: %s" % command)
line = ' '.join([command] + list(parameter_list))
if 'prefix' in prefix:
line = ":%s %s" % (prefix['prefix'], line)
self.sendLine(line)
if len(parameter_list) > 15:
log.msg("Message has %d parameters (RFC allows 15):\n%s" %
(len(parameter_list), line))
def dataReceived(self, data):
"""
This hack is to support mIRC, which sends LF only, even though the RFC
says CRLF. (Also, the flexibility of LineReceiver to turn "line mode"
on and off was not required.)
"""
lines = (self.buffer + data).split(LF)
# Put the (possibly empty) element after the last LF back in the
# buffer
self.buffer = lines.pop()
for line in lines:
if len(line) <= 2:
# This is a blank line, at best.
continue
if line[-1] == CR:
line = line[:-1]
prefix, command, params = parsemsg(line)
# mIRC is a big pile of doo-doo
command = command.upper()
# DEBUG: log.msg( "%s %s %s" % (prefix, command, params))
self.handleCommand(command, prefix, params)
def handleCommand(self, command, prefix, params):
"""
Determine the function to call for the given command and call it with
the given arguments.
"""
method = getattr(self, "irc_%s" % command, None)
try:
if method is not None:
method(prefix, params)
else:
self.irc_unknown(prefix, command, params)
except:
log.deferr()
def irc_unknown(self, prefix, command, params):
"""
Called by L{handleCommand} on a command that doesn't have a defined
handler. Subclasses should override this method.
"""
raise NotImplementedError(command, prefix, params)
# Helper methods
def privmsg(self, sender, recip, message):
"""
Send a message to a channel or user
@type sender: C{str} or C{unicode}
@param sender: Who is sending this message. Should be of the form
username!ident@hostmask (unless you know better!).
@type recip: C{str} or C{unicode}
@param recip: The recipient of this message. If a channel, it must
start with a channel prefix.
@type message: C{str} or C{unicode}
@param message: The message being sent.
"""
self.sendLine(":%s PRIVMSG %s :%s" % (sender, recip, lowQuote(message)))
def notice(self, sender, recip, message):
"""
Send a "notice" to a channel or user.
Notices differ from privmsgs in that the RFC claims they are different.
Robots are supposed to send notices and not respond to them. Clients
typically display notices differently from privmsgs.
@type sender: C{str} or C{unicode}
@param sender: Who is sending this message. Should be of the form
username!ident@hostmask (unless you know better!).
@type recip: C{str} or C{unicode}
@param recip: The recipient of this message. If a channel, it must
start with a channel prefix.
@type message: C{str} or C{unicode}
@param message: The message being sent.
"""
self.sendLine(":%s NOTICE %s :%s" % (sender, recip, message))
def action(self, sender, recip, message):
"""
Send an action to a channel or user.
@type sender: C{str} or C{unicode}
@param sender: Who is sending this message. Should be of the form
username!ident@hostmask (unless you know better!).
@type recip: C{str} or C{unicode}
@param recip: The recipient of this message. If a channel, it must
start with a channel prefix.
@type message: C{str} or C{unicode}
@param message: The action being sent.
"""
self.sendLine(":%s ACTION %s :%s" % (sender, recip, message))
def topic(self, user, channel, topic, author=None):
"""
Send the topic to a user.
@type user: C{str} or C{unicode}
@param user: The user receiving the topic. Only their nick name, not
the full hostmask.
@type channel: C{str} or C{unicode}
@param channel: The channel for which this is the topic.
@type topic: C{str} or C{unicode} or C{None}
@param topic: The topic string, unquoted, or None if there is no topic.
@type author: C{str} or C{unicode}
@param author: If the topic is being changed, the full username and
hostmask of the person changing it.
"""
if author is None:
if topic is None:
self.sendLine(':%s %s %s %s :%s' % (
self.hostname, RPL_NOTOPIC, user, channel, 'No topic is set.'))
else:
self.sendLine(":%s %s %s %s :%s" % (
self.hostname, RPL_TOPIC, user, channel, lowQuote(topic)))
else:
self.sendLine(":%s TOPIC %s :%s" % (author, channel, lowQuote(topic)))
def topicAuthor(self, user, channel, author, date):
"""
Send the author of and time at which a topic was set for the given
channel.
This sends a 333 reply message, which is not part of the IRC RFC.
@type user: C{str} or C{unicode}
@param user: The user receiving the topic. Only their nick name, not
the full hostmask.
@type channel: C{str} or C{unicode}
@param channel: The channel for which this information is relevant.
@type author: C{str} or C{unicode}
@param author: The nickname (without hostmask) of the user who last set
the topic.
@type date: C{int}
@param date: A POSIX timestamp (number of seconds since the epoch) at
which the topic was last set.
"""
self.sendLine(':%s %d %s %s %s %d' % (
self.hostname, 333, user, channel, author, date))
def names(self, user, channel, names):
"""
Send the names of a channel's participants to a user.
@type user: C{str} or C{unicode}
@param user: The user receiving the name list. Only their nick name,
not the full hostmask.
@type channel: C{str} or C{unicode}
@param channel: The channel for which this is the namelist.
@type names: C{list} of C{str} or C{unicode}
@param names: The names to send.
"""
# XXX If unicode is given, these limits are not quite correct
prefixLength = len(channel) + len(user) + 10
namesLength = 512 - prefixLength
L = []
count = 0
for n in names:
if count + len(n) + 1 > namesLength:
self.sendLine(":%s %s %s = %s :%s" % (
self.hostname, RPL_NAMREPLY, user, channel, ' '.join(L)))
L = [n]
count = len(n)
else:
L.append(n)
count += len(n) + 1
if L:
self.sendLine(":%s %s %s = %s :%s" % (
self.hostname, RPL_NAMREPLY, user, channel, ' '.join(L)))
self.sendLine(":%s %s %s %s :End of /NAMES list" % (
self.hostname, RPL_ENDOFNAMES, user, channel))
def who(self, user, channel, memberInfo):
"""
Send a list of users participating in a channel.
@type user: C{str} or C{unicode}
@param user: The user receiving this member information. Only their
nick name, not the full hostmask.
@type channel: C{str} or C{unicode}
@param channel: The channel for which this is the member information.
@type memberInfo: C{list} of C{tuples}
@param memberInfo: For each member of the given channel, a 7-tuple
containing their username, their hostmask, the server to which they
are connected, their nickname, the letter "H" or "G" (standing for
"Here" or "Gone"), the hopcount from C{user} to this member, and
this member's real name.
"""
for info in memberInfo:
(username, hostmask, server, nickname, flag, hops, realName) = info
assert flag in ("H", "G")
self.sendLine(":%s %s %s %s %s %s %s %s %s :%d %s" % (
self.hostname, RPL_WHOREPLY, user, channel,
username, hostmask, server, nickname, flag, hops, realName))
self.sendLine(":%s %s %s %s :End of /WHO list." % (
self.hostname, RPL_ENDOFWHO, user, channel))
def whois(self, user, nick, username, hostname, realName, server, serverInfo, oper, idle, signOn, channels):
"""
Send information about the state of a particular user.
@type user: C{str} or C{unicode}
@param user: The user receiving this information. Only their nick name,
not the full hostmask.
@type nick: C{str} or C{unicode}
@param nick: The nickname of the user this information describes.
@type username: C{str} or C{unicode}
@param username: The user's username (eg, ident response)
@type hostname: C{str}
@param hostname: The user's hostmask
@type realName: C{str} or C{unicode}
@param realName: The user's real name
@type server: C{str} or C{unicode}
@param server: The name of the server to which the user is connected
@type serverInfo: C{str} or C{unicode}
@param serverInfo: A descriptive string about that server
@type oper: C{bool}
@param oper: Indicates whether the user is an IRC operator
@type idle: C{int}
@param idle: The number of seconds since the user last sent a message
@type signOn: C{int}
@param signOn: A POSIX timestamp (number of seconds since the epoch)
indicating the time the user signed on
@type channels: C{list} of C{str} or C{unicode}
@param channels: A list of the channels which the user is participating in
"""
self.sendLine(":%s %s %s %s %s %s * :%s" % (
self.hostname, RPL_WHOISUSER, user, nick, username, hostname, realName))
self.sendLine(":%s %s %s %s %s :%s" % (
self.hostname, RPL_WHOISSERVER, user, nick, server, serverInfo))
if oper:
self.sendLine(":%s %s %s %s :is an IRC operator" % (
self.hostname, RPL_WHOISOPERATOR, user, nick))
self.sendLine(":%s %s %s %s %d %d :seconds idle, signon time" % (
self.hostname, RPL_WHOISIDLE, user, nick, idle, signOn))
self.sendLine(":%s %s %s %s :%s" % (
self.hostname, RPL_WHOISCHANNELS, user, nick, ' '.join(channels)))
self.sendLine(":%s %s %s %s :End of WHOIS list." % (
self.hostname, RPL_ENDOFWHOIS, user, nick))
def join(self, who, where):
"""
Send a join message.
@type who: C{str} or C{unicode}
@param who: The name of the user joining. Should be of the form
username!ident@hostmask (unless you know better!).
@type where: C{str} or C{unicode}
@param where: The channel the user is joining.
"""
self.sendLine(":%s JOIN %s" % (who, where))
def part(self, who, where, reason=None):
"""
Send a part message.
@type who: C{str} or C{unicode}
@param who: The name of the user joining. Should be of the form
username!ident@hostmask (unless you know better!).
@type where: C{str} or C{unicode}
@param where: The channel the user is joining.
@type reason: C{str} or C{unicode}
@param reason: A string describing the misery which caused this poor
soul to depart.
"""
if reason:
self.sendLine(":%s PART %s :%s" % (who, where, reason))
else:
self.sendLine(":%s PART %s" % (who, where))
def channelMode(self, user, channel, mode, *args):
"""
Send information about the mode of a channel.
@type user: C{str} or C{unicode}
@param user: The user receiving the name list. Only their nick name,
not the full hostmask.
@type channel: C{str} or C{unicode}
@param channel: The channel for which this is the namelist.
@type mode: C{str}
@param mode: A string describing this channel's modes.
@param args: Any additional arguments required by the modes.
"""
self.sendLine(":%s %s %s %s %s %s" % (
self.hostname, RPL_CHANNELMODEIS, user, channel, mode, ' '.join(args)))
class ServerSupportedFeatures(_CommandDispatcherMixin):
"""
Handle ISUPPORT messages.
Feature names match those in the ISUPPORT RFC draft identically.
Information regarding the specifics of ISUPPORT was gleaned from
<http://www.irc.org/tech_docs/draft-brocklesby-irc-isupport-03.txt>.
"""
prefix = 'isupport'
def __init__(self):
self._features = {
'CHANNELLEN': 200,
'CHANTYPES': tuple('#&'),
'MODES': 3,
'NICKLEN': 9,
'PREFIX': self._parsePrefixParam('(ovh)@+%'),
# The ISUPPORT draft explicitly says that there is no default for
# CHANMODES, but we're defaulting it here to handle the case where
# the IRC server doesn't send us any ISUPPORT information, since
# IRCClient.getChannelModeParams relies on this value.
'CHANMODES': self._parseChanModesParam(['b', '', 'lk'])}
def _splitParamArgs(cls, params, valueProcessor=None):
"""
Split ISUPPORT parameter arguments.
Values can optionally be processed by C{valueProcessor}.
For example::
>>> ServerSupportedFeatures._splitParamArgs(['A:1', 'B:2'])
(('A', '1'), ('B', '2'))
@type params: C{iterable} of C{str}
@type valueProcessor: C{callable} taking {str}
@param valueProcessor: Callable to process argument values, or C{None}
to perform no processing
@rtype: C{list} of C{(str, object)}
@return: Sequence of C{(name, processedValue)}
"""
if valueProcessor is None:
valueProcessor = lambda x: x
def _parse():
for param in params:
if ':' not in param:
param += ':'
a, b = param.split(':', 1)
yield a, valueProcessor(b)
return list(_parse())
_splitParamArgs = classmethod(_splitParamArgs)
def _unescapeParamValue(cls, value):
"""
Unescape an ISUPPORT parameter.
The only form of supported escape is C{\\xHH}, where HH must be a valid
2-digit hexadecimal number.
@rtype: C{str}
"""
def _unescape():
parts = value.split('\\x')
# The first part can never be preceeded by the escape.
yield parts.pop(0)
for s in parts:
octet, rest = s[:2], s[2:]
try:
octet = int(octet, 16)
except ValueError:
raise ValueError('Invalid hex octet: %r' % (octet,))
yield chr(octet) + rest
if '\\x' not in value:
return value
return ''.join(_unescape())
_unescapeParamValue = classmethod(_unescapeParamValue)
def _splitParam(cls, param):
"""
Split an ISUPPORT parameter.
@type param: C{str}
@rtype: C{(str, list)}
@return C{(key, arguments)}
"""
if '=' not in param:
param += '='
key, value = param.split('=', 1)
return key, map(cls._unescapeParamValue, value.split(','))
_splitParam = classmethod(_splitParam)
def _parsePrefixParam(cls, prefix):
"""
Parse the ISUPPORT "PREFIX" parameter.
The order in which the parameter arguments appear is significant, the
earlier a mode appears the more privileges it gives.
@rtype: C{dict} mapping C{str} to C{(str, int)}
@return: A dictionary mapping a mode character to a two-tuple of
C({symbol, priority)}, the lower a priority (the lowest being
C{0}) the more privileges it gives
"""
if not prefix:
return None
if prefix[0] != '(' and ')' not in prefix:
raise ValueError('Malformed PREFIX parameter')
modes, symbols = prefix.split(')', 1)
symbols = zip(symbols, xrange(len(symbols)))
modes = modes[1:]
return dict(zip(modes, symbols))
_parsePrefixParam = classmethod(_parsePrefixParam)
def _parseChanModesParam(self, params):
"""
Parse the ISUPPORT "CHANMODES" parameter.
See L{isupport_CHANMODES} for a detailed explanation of this parameter.
"""
names = ('addressModes', 'param', 'setParam', 'noParam')
if len(params) > len(names):
raise ValueError(
'Expecting a maximum of %d channel mode parameters, got %d' % (
len(names), len(params)))
items = map(lambda key, value: (key, value or ''), names, params)
return dict(items)
_parseChanModesParam = classmethod(_parseChanModesParam)
def getFeature(self, feature, default=None):
"""
Get a server supported feature's value.
A feature with the value C{None} is equivalent to the feature being
unsupported.
@type feature: C{str}
@param feature: Feature name
@type default: C{object}
@param default: The value to default to, assuming that C{feature}
is not supported
@return: Feature value
"""
return self._features.get(feature, default)
def hasFeature(self, feature):
"""
Determine whether a feature is supported or not.
@rtype: C{bool}
"""
return self.getFeature(feature) is not None
def parse(self, params):
"""
Parse ISUPPORT parameters.
If an unknown parameter is encountered, it is simply added to the
dictionary, keyed by its name, as a tuple of the parameters provided.
@type params: C{iterable} of C{str}
@param params: Iterable of ISUPPORT parameters to parse
"""
for param in params:
key, value = self._splitParam(param)
if key.startswith('-'):
self._features.pop(key[1:], None)
else:
self._features[key] = self.dispatch(key, value)
def isupport_unknown(self, command, params):
"""
Unknown ISUPPORT parameter.
"""
return tuple(params)
def isupport_CHANLIMIT(self, params):
"""
The maximum number of each channel type a user may join.
"""
return self._splitParamArgs(params, _intOrDefault)
def isupport_CHANMODES(self, params):
"""
Available channel modes.
There are 4 categories of channel mode::
addressModes - Modes that add or remove an address to or from a
list, these modes always take a parameter.
param - Modes that change a setting on a channel, these modes
always take a parameter.
setParam - Modes that change a setting on a channel, these modes
only take a parameter when being set.
noParam - Modes that change a setting on a channel, these modes
never take a parameter.
"""
try:
return self._parseChanModesParam(params)
except ValueError:
return self.getFeature('CHANMODES')
def isupport_CHANNELLEN(self, params):
"""
Maximum length of a channel name a client may create.
"""
return _intOrDefault(params[0], self.getFeature('CHANNELLEN'))
def isupport_CHANTYPES(self, params):
"""
Valid channel prefixes.
"""
return tuple(params[0])
def isupport_EXCEPTS(self, params):
"""
Mode character for "ban exceptions".
The presence of this parameter indicates that the server supports
this functionality.
"""
return params[0] or 'e'
def isupport_IDCHAN(self, params):
"""
Safe channel identifiers.
The presence of this parameter indicates that the server supports
this functionality.
"""
return self._splitParamArgs(params)
def isupport_INVEX(self, params):
"""
Mode character for "invite exceptions".
The presence of this parameter indicates that the server supports
this functionality.
"""
return params[0] or 'I'
def isupport_KICKLEN(self, params):
"""
Maximum length of a kick message a client may provide.
"""
return _intOrDefault(params[0])
def isupport_MAXLIST(self, params):
"""
Maximum number of "list modes" a client may set on a channel at once.
List modes are identified by the "addressModes" key in CHANMODES.
"""
return self._splitParamArgs(params, _intOrDefault)
def isupport_MODES(self, params):
"""
Maximum number of modes accepting parameters that may be sent, by a
client, in a single MODE command.
"""
return _intOrDefault(params[0])
def isupport_NETWORK(self, params):
"""
IRC network name.
"""
return params[0]
def isupport_NICKLEN(self, params):
"""
Maximum length of a nickname the client may use.
"""
return _intOrDefault(params[0], self.getFeature('NICKLEN'))
def isupport_PREFIX(self, params):
"""
Mapping of channel modes that clients may have to status flags.
"""
try:
return self._parsePrefixParam(params[0])
except ValueError:
return self.getFeature('PREFIX')
def isupport_SAFELIST(self, params):
"""
Flag indicating that a client may request a LIST without being
disconnected due to the large amount of data generated.
"""
return True
def isupport_STATUSMSG(self, params):
"""
The server supports sending messages to only to clients on a channel
with a specific status.
"""
return params[0]
def isupport_TARGMAX(self, params):
"""
Maximum number of targets allowable for commands that accept multiple
targets.
"""
return dict(self._splitParamArgs(params, _intOrDefault))
def isupport_TOPICLEN(self, params):
"""
Maximum length of a topic that may be set.
"""
return _intOrDefault(params[0])
class IRCClient(basic.LineReceiver):
"""
Internet Relay Chat client protocol, with sprinkles.
In addition to providing an interface for an IRC client protocol,
this class also contains reasonable implementations of many common
CTCP methods.
TODO
====
- Limit the length of messages sent (because the IRC server probably
does).
- Add flood protection/rate limiting for my CTCP replies.
- NickServ cooperation. (a mix-in?)
@ivar nickname: Nickname the client will use.
@ivar password: Password used to log on to the server. May be C{None}.
@ivar realname: Supplied to the server during login as the "Real name"
or "ircname". May be C{None}.
@ivar username: Supplied to the server during login as the "User name".
May be C{None}
@ivar userinfo: Sent in reply to a C{USERINFO} CTCP query. If C{None}, no
USERINFO reply will be sent.
"This is used to transmit a string which is settable by
the user (and never should be set by the client)."
@ivar fingerReply: Sent in reply to a C{FINGER} CTCP query. If C{None}, no
FINGER reply will be sent.
@type fingerReply: Callable or String
@ivar versionName: CTCP VERSION reply, client name. If C{None}, no VERSION
reply will be sent.
@type versionName: C{str}, or None.
@ivar versionNum: CTCP VERSION reply, client version.
@type versionNum: C{str}, or None.
@ivar versionEnv: CTCP VERSION reply, environment the client is running in.
@type versionEnv: C{str}, or None.
@ivar sourceURL: CTCP SOURCE reply, a URL where the source code of this
client may be found. If C{None}, no SOURCE reply will be sent.
@ivar lineRate: Minimum delay between lines sent to the server. If
C{None}, no delay will be imposed.
@type lineRate: Number of Seconds.
@ivar motd: Either L{None} or, between receipt of I{RPL_MOTDSTART} and
I{RPL_ENDOFMOTD}, a L{list} of L{str}, each of which is the content
of an I{RPL_MOTD} message.
@ivar erroneousNickFallback: Default nickname assigned when an unregistered
client triggers an C{ERR_ERRONEUSNICKNAME} while trying to register
with an illegal nickname.
@type erroneousNickFallback: C{str}
@ivar _registered: Whether or not the user is registered. It becomes True
once a welcome has been received from the server.
@type _registered: C{bool}
@ivar _attemptedNick: The nickname that will try to get registered. It may
change if it is illegal or already taken. L{nickname} becomes the
L{_attemptedNick} that is successfully registered.
@type _attemptedNick: C{str}
@type supported: L{ServerSupportedFeatures}
@ivar supported: Available ISUPPORT features on the server
@type hostname: C{str}
@ivar hostname: Host name of the IRC server the client is connected to.
Initially the host name is C{None} and later is set to the host name
from which the I{RPL_WELCOME} message is received.
@type _heartbeat: L{task.LoopingCall}
@ivar _heartbeat: Looping call to perform the keepalive by calling
L{IRCClient._sendHeartbeat} every L{heartbeatInterval} seconds, or
C{None} if there is no heartbeat.
@type heartbeatInterval: C{float}
@ivar heartbeatInterval: Interval, in seconds, to send I{PING} messages to
the server as a form of keepalive, defaults to 120 seconds. Use C{None}
to disable the heartbeat.
"""
hostname = None
motd = None
nickname = 'irc'
password = None
realname = None
username = None
### Responses to various CTCP queries.
userinfo = None
# fingerReply is a callable returning a string, or a str()able object.
fingerReply = None
versionName = None
versionNum = None
versionEnv = None
sourceURL = "http://twistedmatrix.com/downloads/"
dcc_destdir = '.'
dcc_sessions = None
# If this is false, no attempt will be made to identify
# ourself to the server.
performLogin = 1
lineRate = None
_queue = None
_queueEmptying = None
delimiter = '\n' # '\r\n' will also work (see dataReceived)
__pychecker__ = 'unusednames=params,prefix,channel'
_registered = False
_attemptedNick = ''
erroneousNickFallback = 'defaultnick'
_heartbeat = None
heartbeatInterval = 120
def _reallySendLine(self, line):
return basic.LineReceiver.sendLine(self, lowQuote(line) + '\r')
def sendLine(self, line):
if self.lineRate is None:
self._reallySendLine(line)
else:
self._queue.append(line)
if not self._queueEmptying:
self._sendLine()
def _sendLine(self):
if self._queue:
self._reallySendLine(self._queue.pop(0))
self._queueEmptying = reactor.callLater(self.lineRate,
self._sendLine)
else:
self._queueEmptying = None
def connectionLost(self, reason):
basic.LineReceiver.connectionLost(self, reason)
self.stopHeartbeat()
def _createHeartbeat(self):
"""
Create the heartbeat L{LoopingCall}.
"""
return task.LoopingCall(self._sendHeartbeat)
def _sendHeartbeat(self):
"""
Send a I{PING} message to the IRC server as a form of keepalive.
"""
self.sendLine('PING ' + self.hostname)
def stopHeartbeat(self):
"""
Stop sending I{PING} messages to keep the connection to the server
alive.
@since: 11.1
"""
if self._heartbeat is not None:
self._heartbeat.stop()
self._heartbeat = None
def startHeartbeat(self):
"""
Start sending I{PING} messages every L{IRCClient.heartbeatInterval}
seconds to keep the connection to the server alive during periods of no
activity.
@since: 11.1
"""
self.stopHeartbeat()
if self.heartbeatInterval is None:
return
self._heartbeat = self._createHeartbeat()
self._heartbeat.start(self.heartbeatInterval, now=False)
### Interface level client->user output methods
###
### You'll want to override these.
### Methods relating to the server itself
def created(self, when):
"""
Called with creation date information about the server, usually at logon.
@type when: C{str}
@param when: A string describing when the server was created, probably.
"""
def yourHost(self, info):
"""
Called with daemon information about the server, usually at logon.
@type info: C{str}
@param when: A string describing what software the server is running, probably.
"""
def myInfo(self, servername, version, umodes, cmodes):
"""
Called with information about the server, usually at logon.
@type servername: C{str}
@param servername: The hostname of this server.
@type version: C{str}
@param version: A description of what software this server runs.
@type umodes: C{str}
@param umodes: All the available user modes.
@type cmodes: C{str}
@param cmodes: All the available channel modes.
"""
def luserClient(self, info):
"""
Called with information about the number of connections, usually at logon.
@type info: C{str}
@param info: A description of the number of clients and servers
connected to the network, probably.
"""
def bounce(self, info):
"""
Called with information about where the client should reconnect.
@type info: C{str}
@param info: A plaintext description of the address that should be
connected to.
"""
def isupport(self, options):
"""
Called with various information about what the server supports.
@type options: C{list} of C{str}
@param options: Descriptions of features or limits of the server, possibly
in the form "NAME=VALUE".
"""
def luserChannels(self, channels):
"""
Called with the number of channels existant on the server.
@type channels: C{int}
"""
def luserOp(self, ops):
"""
Called with the number of ops logged on to the server.
@type ops: C{int}
"""
def luserMe(self, info):
"""
Called with information about the server connected to.
@type info: C{str}
@param info: A plaintext string describing the number of users and servers
connected to this server.
"""
### Methods involving me directly
def privmsg(self, user, channel, message):
"""
Called when I have a message from a user to me or a channel.
"""
pass
def joined(self, channel):
"""
Called when I finish joining a channel.
channel has the starting character (C{'#'}, C{'&'}, C{'!'}, or C{'+'})
intact.
"""
def left(self, channel):
"""
Called when I have left a channel.
channel has the starting character (C{'#'}, C{'&'}, C{'!'}, or C{'+'})
intact.
"""
def noticed(self, user, channel, message):
"""
Called when I have a notice from a user to me or a channel.
If the client makes any automated replies, it must not do so in
response to a NOTICE message, per the RFC::
The difference between NOTICE and PRIVMSG is that
automatic replies MUST NEVER be sent in response to a
NOTICE message. [...] The object of this rule is to avoid
loops between clients automatically sending something in
response to something it received.
"""
def modeChanged(self, user, channel, set, modes, args):
"""
Called when users or channel's modes are changed.
@type user: C{str}
@param user: The user and hostmask which instigated this change.
@type channel: C{str}
@param channel: The channel where the modes are changed. If args is
empty the channel for which the modes are changing. If the changes are
at server level it could be equal to C{user}.
@type set: C{bool} or C{int}
@param set: True if the mode(s) is being added, False if it is being
removed. If some modes are added and others removed at the same time
this function will be called twice, the first time with all the added
modes, the second with the removed ones. (To change this behaviour
override the irc_MODE method)
@type modes: C{str}
@param modes: The mode or modes which are being changed.
@type args: C{tuple}
@param args: Any additional information required for the mode
change.
"""
def pong(self, user, secs):
"""
Called with the results of a CTCP PING query.
"""
pass
def signedOn(self):
"""
Called after sucessfully signing on to the server.
"""
pass
def kickedFrom(self, channel, kicker, message):
"""
Called when I am kicked from a channel.
"""
pass
def nickChanged(self, nick):
"""
Called when my nick has been changed.
"""
self.nickname = nick
### Things I observe other people doing in a channel.
def userJoined(self, user, channel):
"""
Called when I see another user joining a channel.
"""
pass
def userLeft(self, user, channel):
"""
Called when I see another user leaving a channel.
"""
pass
def userQuit(self, user, quitMessage):
"""
Called when I see another user disconnect from the network.
"""
pass
def userKicked(self, kickee, channel, kicker, message):
"""
Called when I observe someone else being kicked from a channel.
"""
pass
def action(self, user, channel, data):
"""
Called when I see a user perform an ACTION on a channel.
"""
pass
def topicUpdated(self, user, channel, newTopic):
"""
In channel, user changed the topic to newTopic.
Also called when first joining a channel.
"""
pass
def userRenamed(self, oldname, newname):
"""
A user changed their name from oldname to newname.
"""
pass
### Information from the server.
def receivedMOTD(self, motd):
"""
I received a message-of-the-day banner from the server.
motd is a list of strings, where each string was sent as a seperate
message from the server. To display, you might want to use::
'\\n'.join(motd)
to get a nicely formatted string.
"""
pass
### user input commands, client->server
### Your client will want to invoke these.
def join(self, channel, key=None):
"""
Join a channel.
@type channel: C{str}
@param channel: The name of the channel to join. If it has no prefix,
C{'#'} will be prepended to it.
@type key: C{str}
@param key: If specified, the key used to join the channel.
"""
if channel[0] not in CHANNEL_PREFIXES:
channel = '#' + channel
if key:
self.sendLine("JOIN %s %s" % (channel, key))
else:
self.sendLine("JOIN %s" % (channel,))
def leave(self, channel, reason=None):
"""
Leave a channel.
@type channel: C{str}
@param channel: The name of the channel to leave. If it has no prefix,
C{'#'} will be prepended to it.
@type reason: C{str}
@param reason: If given, the reason for leaving.
"""
if channel[0] not in CHANNEL_PREFIXES:
channel = '#' + channel
if reason:
self.sendLine("PART %s :%s" % (channel, reason))
else:
self.sendLine("PART %s" % (channel,))
def kick(self, channel, user, reason=None):
"""
Attempt to kick a user from a channel.
@type channel: C{str}
@param channel: The name of the channel to kick the user from. If it has
no prefix, C{'#'} will be prepended to it.
@type user: C{str}
@param user: The nick of the user to kick.
@type reason: C{str}
@param reason: If given, the reason for kicking the user.
"""
if channel[0] not in CHANNEL_PREFIXES:
channel = '#' + channel
if reason:
self.sendLine("KICK %s %s :%s" % (channel, user, reason))
else:
self.sendLine("KICK %s %s" % (channel, user))
part = leave
def invite(self, user, channel):
"""
Attempt to invite user to channel
@type user: C{str}
@param user: The user to invite
@type channel: C{str}
@param channel: The channel to invite the user too
@since: 11.0
"""
if channel[0] not in CHANNEL_PREFIXES:
channel = '#' + channel
self.sendLine("INVITE %s %s" % (user, channel))
def topic(self, channel, topic=None):
"""
Attempt to set the topic of the given channel, or ask what it is.
If topic is None, then I sent a topic query instead of trying to set the
topic. The server should respond with a TOPIC message containing the
current topic of the given channel.
@type channel: C{str}
@param channel: The name of the channel to change the topic on. If it
has no prefix, C{'#'} will be prepended to it.
@type topic: C{str}
@param topic: If specified, what to set the topic to.
"""
# << TOPIC #xtestx :fff
if channel[0] not in CHANNEL_PREFIXES:
channel = '#' + channel
if topic != None:
self.sendLine("TOPIC %s :%s" % (channel, topic))
else:
self.sendLine("TOPIC %s" % (channel,))
def mode(self, chan, set, modes, limit = None, user = None, mask = None):
"""
Change the modes on a user or channel.
The C{limit}, C{user}, and C{mask} parameters are mutually exclusive.
@type chan: C{str}
@param chan: The name of the channel to operate on.
@type set: C{bool}
@param set: True to give the user or channel permissions and False to
remove them.
@type modes: C{str}
@param modes: The mode flags to set on the user or channel.
@type limit: C{int}
@param limit: In conjuction with the C{'l'} mode flag, limits the
number of users on the channel.
@type user: C{str}
@param user: The user to change the mode on.
@type mask: C{str}
@param mask: In conjuction with the C{'b'} mode flag, sets a mask of
users to be banned from the channel.
"""
if set:
line = 'MODE %s +%s' % (chan, modes)
else:
line = 'MODE %s -%s' % (chan, modes)
if limit is not None:
line = '%s %d' % (line, limit)
elif user is not None:
line = '%s %s' % (line, user)
elif mask is not None:
line = '%s %s' % (line, mask)
self.sendLine(line)
def say(self, channel, message, length=None):
"""
Send a message to a channel
@type channel: C{str}
@param channel: The channel to say the message on. If it has no prefix,
C{'#'} will be prepended to it.
@type message: C{str}
@param message: The message to say.
@type length: C{int}
@param length: The maximum number of octets to send at a time. This has
the effect of turning a single call to C{msg()} into multiple
commands to the server. This is useful when long messages may be
sent that would otherwise cause the server to kick us off or
silently truncate the text we are sending. If None is passed, the
entire message is always send in one command.
"""
if channel[0] not in CHANNEL_PREFIXES:
channel = '#' + channel
self.msg(channel, message, length)
def _safeMaximumLineLength(self, command):
"""
Estimate a safe maximum line length for the given command.
This is done by assuming the maximum values for nickname length,
realname and hostname combined with the command that needs to be sent
and some guessing. A theoretical maximum value is used because it is
possible that our nickname, username or hostname changes (on the server
side) while the length is still being calculated.
"""
# :nickname!realname@hostname COMMAND ...
theoretical = ':%s!%s@%s %s' % (
'a' * self.supported.getFeature('NICKLEN'),
# This value is based on observation.
'b' * 10,
# See <http://tools.ietf.org/html/rfc2812#section-2.3.1>.
'c' * 63,
command)
# Fingers crossed.
fudge = 10
return MAX_COMMAND_LENGTH - len(theoretical) - fudge
def msg(self, user, message, length=None):
"""
Send a message to a user or channel.
The message will be split into multiple commands to the server if:
- The message contains any newline characters
- Any span between newline characters is longer than the given
line-length.
@param user: Username or channel name to which to direct the
message.
@type user: C{str}
@param message: Text to send.
@type message: C{str}
@param length: Maximum number of octets to send in a single
command, including the IRC protocol framing. If C{None} is given
then L{IRCClient._safeMaximumLineLength} is used to determine a
value.
@type length: C{int}
"""
fmt = 'PRIVMSG %s :' % (user,)
if length is None:
length = self._safeMaximumLineLength(fmt)
# Account for the line terminator.
minimumLength = len(fmt) + 2
if length <= minimumLength:
raise ValueError("Maximum length must exceed %d for message "
"to %s" % (minimumLength, user))
for line in split(message, length - minimumLength):
self.sendLine(fmt + line)
def notice(self, user, message):
"""
Send a notice to a user.
Notices are like normal message, but should never get automated
replies.
@type user: C{str}
@param user: The user to send a notice to.
@type message: C{str}
@param message: The contents of the notice to send.
"""
self.sendLine("NOTICE %s :%s" % (user, message))
def away(self, message=''):
"""
Mark this client as away.
@type message: C{str}
@param message: If specified, the away message.
"""
self.sendLine("AWAY :%s" % message)
def back(self):
"""
Clear the away status.
"""
# An empty away marks us as back
self.away()
def whois(self, nickname, server=None):
"""
Retrieve user information about the given nick name.
@type nickname: C{str}
@param nickname: The nick name about which to retrieve information.
@since: 8.2
"""
if server is None:
self.sendLine('WHOIS ' + nickname)
else:
self.sendLine('WHOIS %s %s' % (server, nickname))
def register(self, nickname, hostname='foo', servername='bar'):
"""
Login to the server.
@type nickname: C{str}
@param nickname: The nickname to register.
@type hostname: C{str}
@param hostname: If specified, the hostname to logon as.
@type servername: C{str}
@param servername: If specified, the servername to logon as.
"""
if self.password is not None:
self.sendLine("PASS %s" % self.password)
self.setNick(nickname)
if self.username is None:
self.username = nickname
self.sendLine("USER %s %s %s :%s" % (self.username, hostname, servername, self.realname))
def setNick(self, nickname):
"""
Set this client's nickname.
@type nickname: C{str}
@param nickname: The nickname to change to.
"""
self._attemptedNick = nickname
self.sendLine("NICK %s" % nickname)
def quit(self, message = ''):
"""
Disconnect from the server
@type message: C{str}
@param message: If specified, the message to give when quitting the
server.
"""
self.sendLine("QUIT :%s" % message)
### user input commands, client->client
def describe(self, channel, action):
"""
Strike a pose.
@type channel: C{str}
@param channel: The name of the channel to have an action on. If it
has no prefix, it is sent to the user of that name.
@type action: C{str}
@param action: The action to preform.
@since: 9.0
"""
self.ctcpMakeQuery(channel, [('ACTION', action)])
_pings = None
_MAX_PINGRING = 12
def ping(self, user, text = None):
"""
Measure round-trip delay to another IRC client.
"""
if self._pings is None:
self._pings = {}
if text is None:
chars = string.letters + string.digits + string.punctuation
key = ''.join([random.choice(chars) for i in range(12)])
else:
key = str(text)
self._pings[(user, key)] = time.time()
self.ctcpMakeQuery(user, [('PING', key)])
if len(self._pings) > self._MAX_PINGRING:
# Remove some of the oldest entries.
byValue = [(v, k) for (k, v) in self._pings.items()]
byValue.sort()
excess = self._MAX_PINGRING - len(self._pings)
for i in xrange(excess):
del self._pings[byValue[i][1]]
def dccSend(self, user, file):
if type(file) == types.StringType:
file = open(file, 'r')
size = fileSize(file)
name = getattr(file, "name", "file@%s" % (id(file),))
factory = DccSendFactory(file)
port = reactor.listenTCP(0, factory, 1)
raise NotImplementedError,(
"XXX!!! Help! I need to bind a socket, have it listen, and tell me its address. "
"(and stop accepting once we've made a single connection.)")
my_address = struct.pack("!I", my_address)
args = ['SEND', name, my_address, str(port)]
if not (size is None):
args.append(size)
args = ' '.join(args)
self.ctcpMakeQuery(user, [('DCC', args)])
def dccResume(self, user, fileName, port, resumePos):
"""
Send a DCC RESUME request to another user.
"""
self.ctcpMakeQuery(user, [
('DCC', ['RESUME', fileName, port, resumePos])])
def dccAcceptResume(self, user, fileName, port, resumePos):
"""
Send a DCC ACCEPT response to clients who have requested a resume.
"""
self.ctcpMakeQuery(user, [
('DCC', ['ACCEPT', fileName, port, resumePos])])
### server->client messages
### You might want to fiddle with these,
### but it is safe to leave them alone.
def irc_ERR_NICKNAMEINUSE(self, prefix, params):
"""
Called when we try to register or change to a nickname that is already
taken.
"""
self._attemptedNick = self.alterCollidedNick(self._attemptedNick)
self.setNick(self._attemptedNick)
def alterCollidedNick(self, nickname):
"""
Generate an altered version of a nickname that caused a collision in an
effort to create an unused related name for subsequent registration.
@param nickname: The nickname a user is attempting to register.
@type nickname: C{str}
@returns: A string that is in some way different from the nickname.
@rtype: C{str}
"""
return nickname + '_'
def irc_ERR_ERRONEUSNICKNAME(self, prefix, params):
"""
Called when we try to register or change to an illegal nickname.
The server should send this reply when the nickname contains any
disallowed characters. The bot will stall, waiting for RPL_WELCOME, if
we don't handle this during sign-on.
@note: The method uses the spelling I{erroneus}, as it appears in
the RFC, section 6.1.
"""
if not self._registered:
self.setNick(self.erroneousNickFallback)
def irc_ERR_PASSWDMISMATCH(self, prefix, params):
"""
Called when the login was incorrect.
"""
raise IRCPasswordMismatch("Password Incorrect.")
def irc_RPL_WELCOME(self, prefix, params):
"""
Called when we have received the welcome from the server.
"""
self.hostname = prefix
self._registered = True
self.nickname = self._attemptedNick
self.signedOn()
self.startHeartbeat()
def irc_JOIN(self, prefix, params):
"""
Called when a user joins a channel.
"""
nick = prefix.split('!')[0]
channel = params[-1]
if nick == self.nickname:
self.joined(channel)
else:
self.userJoined(nick, channel)
def irc_PART(self, prefix, params):
"""
Called when a user leaves a channel.
"""
nick = prefix.split('!')[0]
channel = params[0]
if nick == self.nickname:
self.left(channel)
else:
self.userLeft(nick, channel)
def irc_QUIT(self, prefix, params):
"""
Called when a user has quit.
"""
nick = prefix.split('!')[0]
self.userQuit(nick, params[0])
def irc_MODE(self, user, params):
"""
Parse a server mode change message.
"""
channel, modes, args = params[0], params[1], params[2:]
if modes[0] not in '-+':
modes = '+' + modes
if channel == self.nickname:
# This is a mode change to our individual user, not a channel mode
# that involves us.
paramModes = self.getUserModeParams()
else:
paramModes = self.getChannelModeParams()
try:
added, removed = parseModes(modes, args, paramModes)
except IRCBadModes:
log.err(None, 'An error occured while parsing the following '
'MODE message: MODE %s' % (' '.join(params),))
else:
if added:
modes, params = zip(*added)
self.modeChanged(user, channel, True, ''.join(modes), params)
if removed:
modes, params = zip(*removed)
self.modeChanged(user, channel, False, ''.join(modes), params)
def irc_PING(self, prefix, params):
"""
Called when some has pinged us.
"""
self.sendLine("PONG %s" % params[-1])
def irc_PRIVMSG(self, prefix, params):
"""
Called when we get a message.
"""
user = prefix
channel = params[0]
message = params[-1]
if not message:
# Don't raise an exception if we get blank message.
return
if message[0] == X_DELIM:
m = ctcpExtract(message)
if m['extended']:
self.ctcpQuery(user, channel, m['extended'])
if not m['normal']:
return
message = ' '.join(m['normal'])
self.privmsg(user, channel, message)
def irc_NOTICE(self, prefix, params):
"""
Called when a user gets a notice.
"""
user = prefix
channel = params[0]
message = params[-1]
if message[0]==X_DELIM:
m = ctcpExtract(message)
if m['extended']:
self.ctcpReply(user, channel, m['extended'])
if not m['normal']:
return
message = ' '.join(m['normal'])
self.noticed(user, channel, message)
def irc_NICK(self, prefix, params):
"""
Called when a user changes their nickname.
"""
nick = prefix.split('!', 1)[0]
if nick == self.nickname:
self.nickChanged(params[0])
else:
self.userRenamed(nick, params[0])
def irc_KICK(self, prefix, params):
"""
Called when a user is kicked from a channel.
"""
kicker = prefix.split('!')[0]
channel = params[0]
kicked = params[1]
message = params[-1]
if kicked.lower() == self.nickname.lower():
# Yikes!
self.kickedFrom(channel, kicker, message)
else:
self.userKicked(kicked, channel, kicker, message)
def irc_TOPIC(self, prefix, params):
"""
Someone in the channel set the topic.
"""
user = prefix.split('!')[0]
channel = params[0]
newtopic = params[1]
self.topicUpdated(user, channel, newtopic)
def irc_RPL_TOPIC(self, prefix, params):
"""
Called when the topic for a channel is initially reported or when it
subsequently changes.
"""
user = prefix.split('!')[0]
channel = params[1]
newtopic = params[2]
self.topicUpdated(user, channel, newtopic)
def irc_RPL_NOTOPIC(self, prefix, params):
user = prefix.split('!')[0]
channel = params[1]
newtopic = ""
self.topicUpdated(user, channel, newtopic)
def irc_RPL_MOTDSTART(self, prefix, params):
if params[-1].startswith("- "):
params[-1] = params[-1][2:]
self.motd = [params[-1]]
def irc_RPL_MOTD(self, prefix, params):
if params[-1].startswith("- "):
params[-1] = params[-1][2:]
if self.motd is None:
self.motd = []
self.motd.append(params[-1])
def irc_RPL_ENDOFMOTD(self, prefix, params):
"""
I{RPL_ENDOFMOTD} indicates the end of the message of the day
messages. Deliver the accumulated lines to C{receivedMOTD}.
"""
motd = self.motd
self.motd = None
self.receivedMOTD(motd)
def irc_RPL_CREATED(self, prefix, params):
self.created(params[1])
def irc_RPL_YOURHOST(self, prefix, params):
self.yourHost(params[1])
def irc_RPL_MYINFO(self, prefix, params):
info = params[1].split(None, 3)
while len(info) < 4:
info.append(None)
self.myInfo(*info)
def irc_RPL_BOUNCE(self, prefix, params):
self.bounce(params[1])
def irc_RPL_ISUPPORT(self, prefix, params):
args = params[1:-1]
# Several ISUPPORT messages, in no particular order, may be sent
# to the client at any given point in time (usually only on connect,
# though.) For this reason, ServerSupportedFeatures.parse is intended
# to mutate the supported feature list.
self.supported.parse(args)
self.isupport(args)
def irc_RPL_LUSERCLIENT(self, prefix, params):
self.luserClient(params[1])
def irc_RPL_LUSEROP(self, prefix, params):
try:
self.luserOp(int(params[1]))
except ValueError:
pass
def irc_RPL_LUSERCHANNELS(self, prefix, params):
try:
self.luserChannels(int(params[1]))
except ValueError:
pass
def irc_RPL_LUSERME(self, prefix, params):
self.luserMe(params[1])
def irc_unknown(self, prefix, command, params):
pass
### Receiving a CTCP query from another party
### It is safe to leave these alone.
def ctcpQuery(self, user, channel, messages):
"""
Dispatch method for any CTCP queries received.
Duplicated CTCP queries are ignored and no dispatch is
made. Unrecognized CTCP queries invoke L{IRCClient.ctcpUnknownQuery}.
"""
seen = set()
for tag, data in messages:
method = getattr(self, 'ctcpQuery_%s' % tag, None)
if tag not in seen:
if method is not None:
method(user, channel, data)
else:
self.ctcpUnknownQuery(user, channel, tag, data)
seen.add(tag)
def ctcpUnknownQuery(self, user, channel, tag, data):
"""
Fallback handler for unrecognized CTCP queries.
No CTCP I{ERRMSG} reply is made to remove a potential denial of service
avenue.
"""
log.msg('Unknown CTCP query from %r: %r %r' % (user, tag, data))
def ctcpQuery_ACTION(self, user, channel, data):
self.action(user, channel, data)
def ctcpQuery_PING(self, user, channel, data):
nick = user.split('!')[0]
self.ctcpMakeReply(nick, [("PING", data)])
def ctcpQuery_FINGER(self, user, channel, data):
if data is not None:
self.quirkyMessage("Why did %s send '%s' with a FINGER query?"
% (user, data))
if not self.fingerReply:
return
if callable(self.fingerReply):
reply = self.fingerReply()
else:
reply = str(self.fingerReply)
nick = user.split('!')[0]
self.ctcpMakeReply(nick, [('FINGER', reply)])
def ctcpQuery_VERSION(self, user, channel, data):
if data is not None:
self.quirkyMessage("Why did %s send '%s' with a VERSION query?"
% (user, data))
if self.versionName:
nick = user.split('!')[0]
self.ctcpMakeReply(nick, [('VERSION', '%s:%s:%s' %
(self.versionName,
self.versionNum or '',
self.versionEnv or ''))])
def ctcpQuery_SOURCE(self, user, channel, data):
if data is not None:
self.quirkyMessage("Why did %s send '%s' with a SOURCE query?"
% (user, data))
if self.sourceURL:
nick = user.split('!')[0]
# The CTCP document (Zeuge, Rollo, Mesander 1994) says that SOURCE
# replies should be responded to with the location of an anonymous
# FTP server in host:directory:file format. I'm taking the liberty
# of bringing it into the 21st century by sending a URL instead.
self.ctcpMakeReply(nick, [('SOURCE', self.sourceURL),
('SOURCE', None)])
def ctcpQuery_USERINFO(self, user, channel, data):
if data is not None:
self.quirkyMessage("Why did %s send '%s' with a USERINFO query?"
% (user, data))
if self.userinfo:
nick = user.split('!')[0]
self.ctcpMakeReply(nick, [('USERINFO', self.userinfo)])
def ctcpQuery_CLIENTINFO(self, user, channel, data):
"""
A master index of what CTCP tags this client knows.
If no arguments are provided, respond with a list of known tags.
If an argument is provided, provide human-readable help on
the usage of that tag.
"""
nick = user.split('!')[0]
if not data:
# XXX: prefixedMethodNames gets methods from my *class*,
# but it's entirely possible that this *instance* has more
# methods.
names = reflect.prefixedMethodNames(self.__class__,
'ctcpQuery_')
self.ctcpMakeReply(nick, [('CLIENTINFO', ' '.join(names))])
else:
args = data.split()
method = getattr(self, 'ctcpQuery_%s' % (args[0],), None)
if not method:
self.ctcpMakeReply(nick, [('ERRMSG',
"CLIENTINFO %s :"
"Unknown query '%s'"
% (data, args[0]))])
return
doc = getattr(method, '__doc__', '')
self.ctcpMakeReply(nick, [('CLIENTINFO', doc)])
def ctcpQuery_ERRMSG(self, user, channel, data):
# Yeah, this seems strange, but that's what the spec says to do
# when faced with an ERRMSG query (not a reply).
nick = user.split('!')[0]
self.ctcpMakeReply(nick, [('ERRMSG',
"%s :No error has occoured." % data)])
def ctcpQuery_TIME(self, user, channel, data):
if data is not None:
self.quirkyMessage("Why did %s send '%s' with a TIME query?"
% (user, data))
nick = user.split('!')[0]
self.ctcpMakeReply(nick,
[('TIME', ':%s' %
time.asctime(time.localtime(time.time())))])
def ctcpQuery_DCC(self, user, channel, data):
"""Initiate a Direct Client Connection
"""
if not data: return
dcctype = data.split(None, 1)[0].upper()
handler = getattr(self, "dcc_" + dcctype, None)
if handler:
if self.dcc_sessions is None:
self.dcc_sessions = []
data = data[len(dcctype)+1:]
handler(user, channel, data)
else:
nick = user.split('!')[0]
self.ctcpMakeReply(nick, [('ERRMSG',
"DCC %s :Unknown DCC type '%s'"
% (data, dcctype))])
self.quirkyMessage("%s offered unknown DCC type %s"
% (user, dcctype))
def dcc_SEND(self, user, channel, data):
# Use shlex.split for those who send files with spaces in the names.
data = shlex.split(data)
if len(data) < 3:
raise IRCBadMessage("malformed DCC SEND request: %r" % (data,))
(filename, address, port) = data[:3]
address = dccParseAddress(address)
try:
port = int(port)
except ValueError:
raise IRCBadMessage("Indecipherable port %r" % (port,))
size = -1
if len(data) >= 4:
try:
size = int(data[3])
except ValueError:
pass
# XXX Should we bother passing this data?
self.dccDoSend(user, address, port, filename, size, data)
def dcc_ACCEPT(self, user, channel, data):
data = shlex.split(data)
if len(data) < 3:
raise IRCBadMessage("malformed DCC SEND ACCEPT request: %r" % (
data,))
(filename, port, resumePos) = data[:3]
try:
port = int(port)
resumePos = int(resumePos)
except ValueError:
return
self.dccDoAcceptResume(user, filename, port, resumePos)
def dcc_RESUME(self, user, channel, data):
data = shlex.split(data)
if len(data) < 3:
raise IRCBadMessage("malformed DCC SEND RESUME request: %r" % (
data,))
(filename, port, resumePos) = data[:3]
try:
port = int(port)
resumePos = int(resumePos)
except ValueError:
return
self.dccDoResume(user, filename, port, resumePos)
def dcc_CHAT(self, user, channel, data):
data = shlex.split(data)
if len(data) < 3:
raise IRCBadMessage("malformed DCC CHAT request: %r" % (data,))
(filename, address, port) = data[:3]
address = dccParseAddress(address)
try:
port = int(port)
except ValueError:
raise IRCBadMessage("Indecipherable port %r" % (port,))
self.dccDoChat(user, channel, address, port, data)
### The dccDo methods are the slightly higher-level siblings of
### common dcc_ methods; the arguments have been parsed for them.
def dccDoSend(self, user, address, port, fileName, size, data):
"""
Called when I receive a DCC SEND offer from a client.
By default, I do nothing here.
"""
## filename = path.basename(arg)
## protocol = DccFileReceive(filename, size,
## (user,channel,data),self.dcc_destdir)
## reactor.clientTCP(address, port, protocol)
## self.dcc_sessions.append(protocol)
pass
def dccDoResume(self, user, file, port, resumePos):
"""
Called when a client is trying to resume an offered file
via DCC send. It should be either replied to with a DCC
ACCEPT or ignored (default).
"""
pass
def dccDoAcceptResume(self, user, file, port, resumePos):
"""
Called when a client has verified and accepted a DCC resume
request made by us. By default it will do nothing.
"""
pass
def dccDoChat(self, user, channel, address, port, data):
pass
#factory = DccChatFactory(self, queryData=(user, channel, data))
#reactor.connectTCP(address, port, factory)
#self.dcc_sessions.append(factory)
#def ctcpQuery_SED(self, user, data):
# """Simple Encryption Doodoo
#
# Feel free to implement this, but no specification is available.
# """
# raise NotImplementedError
def ctcpMakeReply(self, user, messages):
"""
Send one or more C{extended messages} as a CTCP reply.
@type messages: a list of extended messages. An extended
message is a (tag, data) tuple, where 'data' may be C{None}.
"""
self.notice(user, ctcpStringify(messages))
### client CTCP query commands
def ctcpMakeQuery(self, user, messages):
"""
Send one or more C{extended messages} as a CTCP query.
@type messages: a list of extended messages. An extended
message is a (tag, data) tuple, where 'data' may be C{None}.
"""
self.msg(user, ctcpStringify(messages))
### Receiving a response to a CTCP query (presumably to one we made)
### You may want to add methods here, or override UnknownReply.
def ctcpReply(self, user, channel, messages):
"""
Dispatch method for any CTCP replies received.
"""
for m in messages:
method = getattr(self, "ctcpReply_%s" % m[0], None)
if method:
method(user, channel, m[1])
else:
self.ctcpUnknownReply(user, channel, m[0], m[1])
def ctcpReply_PING(self, user, channel, data):
nick = user.split('!', 1)[0]
if (not self._pings) or (not self._pings.has_key((nick, data))):
raise IRCBadMessage,\
"Bogus PING response from %s: %s" % (user, data)
t0 = self._pings[(nick, data)]
self.pong(user, time.time() - t0)
def ctcpUnknownReply(self, user, channel, tag, data):
"""Called when a fitting ctcpReply_ method is not found.
XXX: If the client makes arbitrary CTCP queries,
this method should probably show the responses to
them instead of treating them as anomolies.
"""
log.msg("Unknown CTCP reply from %s: %s %s\n"
% (user, tag, data))
### Error handlers
### You may override these with something more appropriate to your UI.
def badMessage(self, line, excType, excValue, tb):
"""
When I get a message that's so broken I can't use it.
"""
log.msg(line)
log.msg(''.join(traceback.format_exception(excType, excValue, tb)))
def quirkyMessage(self, s):
"""This is called when I receive a message which is peculiar,
but not wholly indecipherable.
"""
log.msg(s + '\n')
### Protocool methods
def connectionMade(self):
self.supported = ServerSupportedFeatures()
self._queue = []
if self.performLogin:
self.register(self.nickname)
def dataReceived(self, data):
basic.LineReceiver.dataReceived(self, data.replace('\r', ''))
def lineReceived(self, line):
line = lowDequote(line)
try:
prefix, command, params = parsemsg(line)
if command in numeric_to_symbolic:
command = numeric_to_symbolic[command]
self.handleCommand(command, prefix, params)
except IRCBadMessage:
self.badMessage(line, *sys.exc_info())
def getUserModeParams(self):
"""
Get user modes that require parameters for correct parsing.
@rtype: C{[str, str]}
@return C{[add, remove]}
"""
return ['', '']
def getChannelModeParams(self):
"""
Get channel modes that require parameters for correct parsing.
@rtype: C{[str, str]}
@return C{[add, remove]}
"""
# PREFIX modes are treated as "type B" CHANMODES, they always take
# parameter.
params = ['', '']
prefixes = self.supported.getFeature('PREFIX', {})
params[0] = params[1] = ''.join(prefixes.iterkeys())
chanmodes = self.supported.getFeature('CHANMODES')
if chanmodes is not None:
params[0] += chanmodes.get('addressModes', '')
params[0] += chanmodes.get('param', '')
params[1] = params[0]
params[0] += chanmodes.get('setParam', '')
return params
def handleCommand(self, command, prefix, params):
"""Determine the function to call for the given command and call
it with the given arguments.
"""
method = getattr(self, "irc_%s" % command, None)
try:
if method is not None:
method(prefix, params)
else:
self.irc_unknown(prefix, command, params)
except:
log.deferr()
def __getstate__(self):
dct = self.__dict__.copy()
dct['dcc_sessions'] = None
dct['_pings'] = None
return dct
def dccParseAddress(address):
if '.' in address:
pass
else:
try:
address = long(address)
except ValueError:
raise IRCBadMessage,\
"Indecipherable address %r" % (address,)
else:
address = (
(address >> 24) & 0xFF,
(address >> 16) & 0xFF,
(address >> 8) & 0xFF,
address & 0xFF,
)
address = '.'.join(map(str,address))
return address
class DccFileReceiveBasic(protocol.Protocol, styles.Ephemeral):
"""Bare protocol to receive a Direct Client Connection SEND stream.
This does enough to keep the other guy talking, but you'll want to
extend my dataReceived method to *do* something with the data I get.
"""
bytesReceived = 0
def __init__(self, resumeOffset=0):
self.bytesReceived = resumeOffset
self.resume = (resumeOffset != 0)
def dataReceived(self, data):
"""Called when data is received.
Warning: This just acknowledges to the remote host that the
data has been received; it doesn't *do* anything with the
data, so you'll want to override this.
"""
self.bytesReceived = self.bytesReceived + len(data)
self.transport.write(struct.pack('!i', self.bytesReceived))
class DccSendProtocol(protocol.Protocol, styles.Ephemeral):
"""Protocol for an outgoing Direct Client Connection SEND.
"""
blocksize = 1024
file = None
bytesSent = 0
completed = 0
connected = 0
def __init__(self, file):
if type(file) is types.StringType:
self.file = open(file, 'r')
def connectionMade(self):
self.connected = 1
self.sendBlock()
def dataReceived(self, data):
# XXX: Do we need to check to see if len(data) != fmtsize?
bytesShesGot = struct.unpack("!I", data)
if bytesShesGot < self.bytesSent:
# Wait for her.
# XXX? Add some checks to see if we've stalled out?
return
elif bytesShesGot > self.bytesSent:
# self.transport.log("DCC SEND %s: She says she has %d bytes "
# "but I've only sent %d. I'm stopping "
# "this screwy transfer."
# % (self.file,
# bytesShesGot, self.bytesSent))
self.transport.loseConnection()
return
self.sendBlock()
def sendBlock(self):
block = self.file.read(self.blocksize)
if block:
self.transport.write(block)
self.bytesSent = self.bytesSent + len(block)
else:
# Nothing more to send, transfer complete.
self.transport.loseConnection()
self.completed = 1
def connectionLost(self, reason):
self.connected = 0
if hasattr(self.file, "close"):
self.file.close()
class DccSendFactory(protocol.Factory):
protocol = DccSendProtocol
def __init__(self, file):
self.file = file
def buildProtocol(self, connection):
p = self.protocol(self.file)
p.factory = self
return p
def fileSize(file):
"""I'll try my damndest to determine the size of this file object.
"""
size = None
if hasattr(file, "fileno"):
fileno = file.fileno()
try:
stat_ = os.fstat(fileno)
size = stat_[stat.ST_SIZE]
except:
pass
else:
return size
if hasattr(file, "name") and path.exists(file.name):
try:
size = path.getsize(file.name)
except:
pass
else:
return size
if hasattr(file, "seek") and hasattr(file, "tell"):
try:
try:
file.seek(0, 2)
size = file.tell()
finally:
file.seek(0, 0)
except:
pass
else:
return size
return size
class DccChat(basic.LineReceiver, styles.Ephemeral):
"""Direct Client Connection protocol type CHAT.
DCC CHAT is really just your run o' the mill basic.LineReceiver
protocol. This class only varies from that slightly, accepting
either LF or CR LF for a line delimeter for incoming messages
while always using CR LF for outgoing.
The lineReceived method implemented here uses the DCC connection's
'client' attribute (provided upon construction) to deliver incoming
lines from the DCC chat via IRCClient's normal privmsg interface.
That's something of a spoof, which you may well want to override.
"""
queryData = None
delimiter = CR + NL
client = None
remoteParty = None
buffer = ""
def __init__(self, client, queryData=None):
"""Initialize a new DCC CHAT session.
queryData is a 3-tuple of
(fromUser, targetUserOrChannel, data)
as received by the CTCP query.
(To be honest, fromUser is the only thing that's currently
used here. targetUserOrChannel is potentially useful, while
the 'data' argument is soley for informational purposes.)
"""
self.client = client
if queryData:
self.queryData = queryData
self.remoteParty = self.queryData[0]
def dataReceived(self, data):
self.buffer = self.buffer + data
lines = self.buffer.split(LF)
# Put the (possibly empty) element after the last LF back in the
# buffer
self.buffer = lines.pop()
for line in lines:
if line[-1] == CR:
line = line[:-1]
self.lineReceived(line)
def lineReceived(self, line):
log.msg("DCC CHAT<%s> %s" % (self.remoteParty, line))
self.client.privmsg(self.remoteParty,
self.client.nickname, line)
class DccChatFactory(protocol.ClientFactory):
protocol = DccChat
noisy = 0
def __init__(self, client, queryData):
self.client = client
self.queryData = queryData
def buildProtocol(self, addr):
p = self.protocol(client=self.client, queryData=self.queryData)
p.factory = self
return p
def clientConnectionFailed(self, unused_connector, unused_reason):
self.client.dcc_sessions.remove(self)
def clientConnectionLost(self, unused_connector, unused_reason):
self.client.dcc_sessions.remove(self)
def dccDescribe(data):
"""Given the data chunk from a DCC query, return a descriptive string.
"""
orig_data = data
data = data.split()
if len(data) < 4:
return orig_data
(dcctype, arg, address, port) = data[:4]
if '.' in address:
pass
else:
try:
address = long(address)
except ValueError:
pass
else:
address = (
(address >> 24) & 0xFF,
(address >> 16) & 0xFF,
(address >> 8) & 0xFF,
address & 0xFF,
)
address = '.'.join(map(str, address))
if dcctype == 'SEND':
filename = arg
size_txt = ''
if len(data) >= 5:
try:
size = int(data[4])
size_txt = ' of size %d bytes' % (size,)
except ValueError:
pass
dcc_text = ("SEND for file '%s'%s at host %s, port %s"
% (filename, size_txt, address, port))
elif dcctype == 'CHAT':
dcc_text = ("CHAT for host %s, port %s"
% (address, port))
else:
dcc_text = orig_data
return dcc_text
class DccFileReceive(DccFileReceiveBasic):
"""Higher-level coverage for getting a file from DCC SEND.
I allow you to change the file's name and destination directory.
I won't overwrite an existing file unless I've been told it's okay
to do so. If passed the resumeOffset keyword argument I will attempt to
resume the file from that amount of bytes.
XXX: I need to let the client know when I am finished.
XXX: I need to decide how to keep a progress indicator updated.
XXX: Client needs a way to tell me "Do not finish until I say so."
XXX: I need to make sure the client understands if the file cannot be written.
"""
filename = 'dcc'
fileSize = -1
destDir = '.'
overwrite = 0
fromUser = None
queryData = None
def __init__(self, filename, fileSize=-1, queryData=None,
destDir='.', resumeOffset=0):
DccFileReceiveBasic.__init__(self, resumeOffset=resumeOffset)
self.filename = filename
self.destDir = destDir
self.fileSize = fileSize
if queryData:
self.queryData = queryData
self.fromUser = self.queryData[0]
def set_directory(self, directory):
"""Set the directory where the downloaded file will be placed.
May raise OSError if the supplied directory path is not suitable.
"""
if not path.exists(directory):
raise OSError(errno.ENOENT, "You see no directory there.",
directory)
if not path.isdir(directory):
raise OSError(errno.ENOTDIR, "You cannot put a file into "
"something which is not a directory.",
directory)
if not os.access(directory, os.X_OK | os.W_OK):
raise OSError(errno.EACCES,
"This directory is too hard to write in to.",
directory)
self.destDir = directory
def set_filename(self, filename):
"""Change the name of the file being transferred.
This replaces the file name provided by the sender.
"""
self.filename = filename
def set_overwrite(self, boolean):
"""May I overwrite existing files?
"""
self.overwrite = boolean
# Protocol-level methods.
def connectionMade(self):
dst = path.abspath(path.join(self.destDir,self.filename))
exists = path.exists(dst)
if self.resume and exists:
# I have been told I want to resume, and a file already
# exists - Here we go
self.file = open(dst, 'ab')
log.msg("Attempting to resume %s - starting from %d bytes" %
(self.file, self.file.tell()))
elif self.overwrite or not exists:
self.file = open(dst, 'wb')
else:
raise OSError(errno.EEXIST,
"There's a file in the way. "
"Perhaps that's why you cannot open it.",
dst)
def dataReceived(self, data):
self.file.write(data)
DccFileReceiveBasic.dataReceived(self, data)
# XXX: update a progress indicator here?
def connectionLost(self, reason):
"""When the connection is lost, I close the file.
"""
self.connected = 0
logmsg = ("%s closed." % (self,))
if self.fileSize > 0:
logmsg = ("%s %d/%d bytes received"
% (logmsg, self.bytesReceived, self.fileSize))
if self.bytesReceived == self.fileSize:
pass # Hooray!
elif self.bytesReceived < self.fileSize:
logmsg = ("%s (Warning: %d bytes short)"
% (logmsg, self.fileSize - self.bytesReceived))
else:
logmsg = ("%s (file larger than expected)"
% (logmsg,))
else:
logmsg = ("%s %d bytes received"
% (logmsg, self.bytesReceived))
if hasattr(self, 'file'):
logmsg = "%s and written to %s.\n" % (logmsg, self.file.name)
if hasattr(self.file, 'close'): self.file.close()
# self.transport.log(logmsg)
def __str__(self):
if not self.connected:
return "<Unconnected DccFileReceive object at %x>" % (id(self),)
from_ = self.transport.getPeer()
if self.fromUser:
from_ = "%s (%s)" % (self.fromUser, from_)
s = ("DCC transfer of '%s' from %s" % (self.filename, from_))
return s
def __repr__(self):
s = ("<%s at %x: GET %s>"
% (self.__class__, id(self), self.filename))
return s
_OFF = '\x0f'
_BOLD = '\x02'
_COLOR = '\x03'
_REVERSE_VIDEO = '\x16'
_UNDERLINE = '\x1f'
# Mapping of IRC color names to their color values.
_IRC_COLORS = dict(
zip(['white', 'black', 'blue', 'green', 'lightRed', 'red', 'magenta',
'orange', 'yellow', 'lightGreen', 'cyan', 'lightCyan', 'lightBlue',
'lightMagenta', 'gray', 'lightGray'], range(16)))
# Mapping of IRC color values to their color names.
_IRC_COLOR_NAMES = dict((code, name) for name, code in _IRC_COLORS.items())
class _CharacterAttributes(_textattributes.CharacterAttributesMixin):
"""
Factory for character attributes, including foreground and background color
and non-color attributes such as bold, reverse video and underline.
Character attributes are applied to actual text by using object
indexing-syntax (C{obj['abc']}) after accessing a factory attribute, for
example::
attributes.bold['Some text']
These can be nested to mix attributes::
attributes.bold[attributes.underline['Some text']]
And multiple values can be passed::
attributes.normal[attributes.bold['Some'], ' text']
Non-color attributes can be accessed by attribute name, available
attributes are:
- bold
- reverseVideo
- underline
Available colors are:
0. white
1. black
2. blue
3. green
4. light red
5. red
6. magenta
7. orange
8. yellow
9. light green
10. cyan
11. light cyan
12. light blue
13. light magenta
14. gray
15. light gray
@ivar fg: Foreground colors accessed by attribute name, see above
for possible names.
@ivar bg: Background colors accessed by attribute name, see above
for possible names.
@since: 13.1
"""
fg = _textattributes._ColorAttribute(
_textattributes._ForegroundColorAttr, _IRC_COLORS)
bg = _textattributes._ColorAttribute(
_textattributes._BackgroundColorAttr, _IRC_COLORS)
attrs = {
'bold': _BOLD,
'reverseVideo': _REVERSE_VIDEO,
'underline': _UNDERLINE}
attributes = _CharacterAttributes()
class _FormattingState(_textattributes._FormattingStateMixin):
"""
Formatting state/attributes of a single character.
Attributes include:
- Formatting nullifier
- Bold
- Underline
- Reverse video
- Foreground color
- Background color
@since: 13.1
"""
compareAttributes = (
'off', 'bold', 'underline', 'reverseVideo', 'foreground', 'background')
def __init__(self, off=False, bold=False, underline=False,
reverseVideo=False, foreground=None, background=None):
self.off = off
self.bold = bold
self.underline = underline
self.reverseVideo = reverseVideo
self.foreground = foreground
self.background = background
def toMIRCControlCodes(self):
"""
Emit a mIRC control sequence that will set up all the attributes this
formatting state has set.
@return: A string containing mIRC control sequences that mimic this
formatting state.
"""
attrs = []
if self.bold:
attrs.append(_BOLD)
if self.underline:
attrs.append(_UNDERLINE)
if self.reverseVideo:
attrs.append(_REVERSE_VIDEO)
if self.foreground is not None or self.background is not None:
c = ''
if self.foreground is not None:
c += '%02d' % (self.foreground,)
if self.background is not None:
c += ',%02d' % (self.background,)
attrs.append(_COLOR + c)
return _OFF + ''.join(map(str, attrs))
def _foldr(f, z, xs):
"""
Apply a function of two arguments cumulatively to the items of
a sequence, from right to left, so as to reduce the sequence to
a single value.
@type f: C{callable} taking 2 arguments
@param z: Initial value.
@param xs: Sequence to reduce.
@return: Single value resulting from reducing C{xs}.
"""
return reduce(lambda x, y: f(y, x), reversed(xs), z)
class _FormattingParser(_CommandDispatcherMixin):
"""
A finite-state machine that parses formatted IRC text.
Currently handled formatting includes: bold, reverse, underline,
mIRC color codes and the ability to remove all current formatting.
@see: U{http://www.mirc.co.uk/help/color.txt}
@type _formatCodes: C{dict} mapping C{str} to C{str}
@cvar _formatCodes: Mapping of format code values to names.
@type state: C{str}
@ivar state: Current state of the finite-state machine.
@type _buffer: C{str}
@ivar _buffer: Buffer, containing the text content, of the formatting
sequence currently being parsed, the buffer is used as the content for
L{_attrs} before being added to L{_result} and emptied upon calling
L{emit}.
@type _attrs: C{set}
@ivar _attrs: Set of the applicable formatting states (bold, underline,
etc.) for the current L{_buffer}, these are applied to L{_buffer} when
calling L{emit}.
@type foreground: L{_ForegroundColorAttr}
@ivar foreground: Current foreground color attribute, or C{None}.
@type background: L{_BackgroundColorAttr}
@ivar background: Current background color attribute, or C{None}.
@ivar _result: Current parse result.
"""
prefix = 'state'
_formatCodes = {
_OFF: 'off',
_BOLD: 'bold',
_COLOR: 'color',
_REVERSE_VIDEO: 'reverseVideo',
_UNDERLINE: 'underline'}
def __init__(self):
self.state = 'TEXT'
self._buffer = ''
self._attrs = set()
self._result = None
self.foreground = None
self.background = None
def process(self, ch):
"""
Handle input.
@type ch: C{str}
@param ch: A single character of input to process
"""
self.dispatch(self.state, ch)
def complete(self):
"""
Flush the current buffer and return the final parsed result.
@return: Structured text and attributes.
"""
self.emit()
if self._result is None:
self._result = attributes.normal
return self._result
def emit(self):
"""
Add the currently parsed input to the result.
"""
if self._buffer:
attrs = [getattr(attributes, name) for name in self._attrs]
attrs.extend(filter(None, [self.foreground, self.background]))
if not attrs:
attrs.append(attributes.normal)
attrs.append(self._buffer)
attr = _foldr(operator.getitem, attrs.pop(), attrs)
if self._result is None:
self._result = attr
else:
self._result[attr]
self._buffer = ''
def state_TEXT(self, ch):
"""
Handle the "text" state.
Along with regular text, single token formatting codes are handled
in this state too.
@param ch: The character being processed.
"""
formatName = self._formatCodes.get(ch)
if formatName == 'color':
self.emit()
self.state = 'COLOR_FOREGROUND'
else:
if formatName is None:
self._buffer += ch
else:
self.emit()
if formatName == 'off':
self._attrs = set()
self.foreground = self.background = None
else:
self._attrs.symmetric_difference_update([formatName])
def state_COLOR_FOREGROUND(self, ch):
"""
Handle the foreground color state.
Foreground colors can consist of up to two digits and may optionally
end in a I{,}. Any non-digit or non-comma characters are treated as
invalid input and result in the state being reset to "text".
@param ch: The character being processed.
"""
# Color codes may only be a maximum of two characters.
if ch.isdigit() and len(self._buffer) < 2:
self._buffer += ch
else:
if self._buffer:
# Wrap around for color numbers higher than we support, like
# most other IRC clients.
col = int(self._buffer) % len(_IRC_COLORS)
self.foreground = getattr(attributes.fg, _IRC_COLOR_NAMES[col])
else:
# If there were no digits, then this has been an empty color
# code and we can reset the color state.
self.foreground = self.background = None
if ch == ',' and self._buffer:
# If there's a comma and it's not the first thing, move on to
# the background state.
self._buffer = ''
self.state = 'COLOR_BACKGROUND'
else:
# Otherwise, this is a bogus color code, fall back to text.
self._buffer = ''
self.state = 'TEXT'
self.emit()
self.process(ch)
def state_COLOR_BACKGROUND(self, ch):
"""
Handle the background color state.
Background colors can consist of up to two digits and must occur after
a foreground color and must be preceded by a I{,}. Any non-digit
character is treated as invalid input and results in the state being
set to "text".
@param ch: The character being processed.
"""
# Color codes may only be a maximum of two characters.
if ch.isdigit() and len(self._buffer) < 2:
self._buffer += ch
else:
if self._buffer:
# Wrap around for color numbers higher than we support, like
# most other IRC clients.
col = int(self._buffer) % len(_IRC_COLORS)
self.background = getattr(attributes.bg, _IRC_COLOR_NAMES[col])
self._buffer = ''
self.emit()
self.state = 'TEXT'
self.process(ch)
def parseFormattedText(text):
"""
Parse text containing IRC formatting codes into structured information.
Color codes are mapped from 0 to 15 and wrap around if greater than 15.
@type text: C{str}
@param text: Formatted text to parse.
@return: Structured text and attributes.
@since: 13.1
"""
state = _FormattingParser()
for ch in text:
state.process(ch)
return state.complete()
def assembleFormattedText(formatted):
"""
Assemble formatted text from structured information.
Currently handled formatting includes: bold, reverse, underline,
mIRC color codes and the ability to remove all current formatting.
It is worth noting that assembled text will always begin with the control
code to disable other attributes for the sake of correctness.
For example::
from twisted.words.protocols.irc import attributes as A
assembleFormattedText(
A.normal[A.bold['Time: '], A.fg.lightRed['Now!']])
Would produce "Time: " in bold formatting, followed by "Now!" with a
foreground color of light red and without any additional formatting.
Available attributes are:
- bold
- reverseVideo
- underline
Available colors are:
0. white
1. black
2. blue
3. green
4. light red
5. red
6. magenta
7. orange
8. yellow
9. light green
10. cyan
11. light cyan
12. light blue
13. light magenta
14. gray
15. light gray
@see: U{http://www.mirc.co.uk/help/color.txt}
@param formatted: Structured text and attributes.
@rtype: C{str}
@return: String containing mIRC control sequences that mimic those
specified by L{formatted}.
@since: 13.1
"""
return _textattributes.flatten(
formatted, _FormattingState(), 'toMIRCControlCodes')
def stripFormatting(text):
"""
Remove all formatting codes from C{text}, leaving only the text.
@type text: C{str}
@param text: Formatted text to parse.
@rtype: C{str}
@return: Plain text without any control sequences.
@since: 13.1
"""
formatted = parseFormattedText(text)
return _textattributes.flatten(
formatted, _textattributes.DefaultFormattingState())
# CTCP constants and helper functions
X_DELIM = chr(001)
def ctcpExtract(message):
"""
Extract CTCP data from a string.
@return: A C{dict} containing two keys:
- C{'extended'}: A list of CTCP (tag, data) tuples.
- C{'normal'}: A list of strings which were not inside a CTCP delimiter.
"""
extended_messages = []
normal_messages = []
retval = {'extended': extended_messages,
'normal': normal_messages }
messages = message.split(X_DELIM)
odd = 0
# X1 extended data X2 nomal data X3 extended data X4 normal...
while messages:
if odd:
extended_messages.append(messages.pop(0))
else:
normal_messages.append(messages.pop(0))
odd = not odd
extended_messages[:] = filter(None, extended_messages)
normal_messages[:] = filter(None, normal_messages)
extended_messages[:] = map(ctcpDequote, extended_messages)
for i in xrange(len(extended_messages)):
m = extended_messages[i].split(SPC, 1)
tag = m[0]
if len(m) > 1:
data = m[1]
else:
data = None
extended_messages[i] = (tag, data)
return retval
# CTCP escaping
M_QUOTE= chr(020)
mQuoteTable = {
NUL: M_QUOTE + '0',
NL: M_QUOTE + 'n',
CR: M_QUOTE + 'r',
M_QUOTE: M_QUOTE + M_QUOTE
}
mDequoteTable = {}
for k, v in mQuoteTable.items():
mDequoteTable[v[-1]] = k
del k, v
mEscape_re = re.compile('%s.' % (re.escape(M_QUOTE),), re.DOTALL)
def lowQuote(s):
for c in (M_QUOTE, NUL, NL, CR):
s = s.replace(c, mQuoteTable[c])
return s
def lowDequote(s):
def sub(matchobj, mDequoteTable=mDequoteTable):
s = matchobj.group()[1]
try:
s = mDequoteTable[s]
except KeyError:
s = s
return s
return mEscape_re.sub(sub, s)
X_QUOTE = '\\'
xQuoteTable = {
X_DELIM: X_QUOTE + 'a',
X_QUOTE: X_QUOTE + X_QUOTE
}
xDequoteTable = {}
for k, v in xQuoteTable.items():
xDequoteTable[v[-1]] = k
xEscape_re = re.compile('%s.' % (re.escape(X_QUOTE),), re.DOTALL)
def ctcpQuote(s):
for c in (X_QUOTE, X_DELIM):
s = s.replace(c, xQuoteTable[c])
return s
def ctcpDequote(s):
def sub(matchobj, xDequoteTable=xDequoteTable):
s = matchobj.group()[1]
try:
s = xDequoteTable[s]
except KeyError:
s = s
return s
return xEscape_re.sub(sub, s)
def ctcpStringify(messages):
"""
@type messages: a list of extended messages. An extended
message is a (tag, data) tuple, where 'data' may be C{None}, a
string, or a list of strings to be joined with whitespace.
@returns: String
"""
coded_messages = []
for (tag, data) in messages:
if data:
if not isinstance(data, types.StringType):
try:
# data as list-of-strings
data = " ".join(map(str, data))
except TypeError:
# No? Then use it's %s representation.
pass
m = "%s %s" % (tag, data)
else:
m = str(tag)
m = ctcpQuote(m)
m = "%s%s%s" % (X_DELIM, m, X_DELIM)
coded_messages.append(m)
line = ''.join(coded_messages)
return line
# Constants (from RFC 2812)
RPL_WELCOME = '001'
RPL_YOURHOST = '002'
RPL_CREATED = '003'
RPL_MYINFO = '004'
RPL_ISUPPORT = '005'
RPL_BOUNCE = '010'
RPL_USERHOST = '302'
RPL_ISON = '303'
RPL_AWAY = '301'
RPL_UNAWAY = '305'
RPL_NOWAWAY = '306'
RPL_WHOISUSER = '311'
RPL_WHOISSERVER = '312'
RPL_WHOISOPERATOR = '313'
RPL_WHOISIDLE = '317'
RPL_ENDOFWHOIS = '318'
RPL_WHOISCHANNELS = '319'
RPL_WHOWASUSER = '314'
RPL_ENDOFWHOWAS = '369'
RPL_LISTSTART = '321'
RPL_LIST = '322'
RPL_LISTEND = '323'
RPL_UNIQOPIS = '325'
RPL_CHANNELMODEIS = '324'
RPL_NOTOPIC = '331'
RPL_TOPIC = '332'
RPL_INVITING = '341'
RPL_SUMMONING = '342'
RPL_INVITELIST = '346'
RPL_ENDOFINVITELIST = '347'
RPL_EXCEPTLIST = '348'
RPL_ENDOFEXCEPTLIST = '349'
RPL_VERSION = '351'
RPL_WHOREPLY = '352'
RPL_ENDOFWHO = '315'
RPL_NAMREPLY = '353'
RPL_ENDOFNAMES = '366'
RPL_LINKS = '364'
RPL_ENDOFLINKS = '365'
RPL_BANLIST = '367'
RPL_ENDOFBANLIST = '368'
RPL_INFO = '371'
RPL_ENDOFINFO = '374'
RPL_MOTDSTART = '375'
RPL_MOTD = '372'
RPL_ENDOFMOTD = '376'
RPL_YOUREOPER = '381'
RPL_REHASHING = '382'
RPL_YOURESERVICE = '383'
RPL_TIME = '391'
RPL_USERSSTART = '392'
RPL_USERS = '393'
RPL_ENDOFUSERS = '394'
RPL_NOUSERS = '395'
RPL_TRACELINK = '200'
RPL_TRACECONNECTING = '201'
RPL_TRACEHANDSHAKE = '202'
RPL_TRACEUNKNOWN = '203'
RPL_TRACEOPERATOR = '204'
RPL_TRACEUSER = '205'
RPL_TRACESERVER = '206'
RPL_TRACESERVICE = '207'
RPL_TRACENEWTYPE = '208'
RPL_TRACECLASS = '209'
RPL_TRACERECONNECT = '210'
RPL_TRACELOG = '261'
RPL_TRACEEND = '262'
RPL_STATSLINKINFO = '211'
RPL_STATSCOMMANDS = '212'
RPL_ENDOFSTATS = '219'
RPL_STATSUPTIME = '242'
RPL_STATSOLINE = '243'
RPL_UMODEIS = '221'
RPL_SERVLIST = '234'
RPL_SERVLISTEND = '235'
RPL_LUSERCLIENT = '251'
RPL_LUSEROP = '252'
RPL_LUSERUNKNOWN = '253'
RPL_LUSERCHANNELS = '254'
RPL_LUSERME = '255'
RPL_ADMINME = '256'
RPL_ADMINLOC = '257'
RPL_ADMINLOC = '258'
RPL_ADMINEMAIL = '259'
RPL_TRYAGAIN = '263'
ERR_NOSUCHNICK = '401'
ERR_NOSUCHSERVER = '402'
ERR_NOSUCHCHANNEL = '403'
ERR_CANNOTSENDTOCHAN = '404'
ERR_TOOMANYCHANNELS = '405'
ERR_WASNOSUCHNICK = '406'
ERR_TOOMANYTARGETS = '407'
ERR_NOSUCHSERVICE = '408'
ERR_NOORIGIN = '409'
ERR_NORECIPIENT = '411'
ERR_NOTEXTTOSEND = '412'
ERR_NOTOPLEVEL = '413'
ERR_WILDTOPLEVEL = '414'
ERR_BADMASK = '415'
ERR_UNKNOWNCOMMAND = '421'
ERR_NOMOTD = '422'
ERR_NOADMININFO = '423'
ERR_FILEERROR = '424'
ERR_NONICKNAMEGIVEN = '431'
ERR_ERRONEUSNICKNAME = '432'
ERR_NICKNAMEINUSE = '433'
ERR_NICKCOLLISION = '436'
ERR_UNAVAILRESOURCE = '437'
ERR_USERNOTINCHANNEL = '441'
ERR_NOTONCHANNEL = '442'
ERR_USERONCHANNEL = '443'
ERR_NOLOGIN = '444'
ERR_SUMMONDISABLED = '445'
ERR_USERSDISABLED = '446'
ERR_NOTREGISTERED = '451'
ERR_NEEDMOREPARAMS = '461'
ERR_ALREADYREGISTRED = '462'
ERR_NOPERMFORHOST = '463'
ERR_PASSWDMISMATCH = '464'
ERR_YOUREBANNEDCREEP = '465'
ERR_YOUWILLBEBANNED = '466'
ERR_KEYSET = '467'
ERR_CHANNELISFULL = '471'
ERR_UNKNOWNMODE = '472'
ERR_INVITEONLYCHAN = '473'
ERR_BANNEDFROMCHAN = '474'
ERR_BADCHANNELKEY = '475'
ERR_BADCHANMASK = '476'
ERR_NOCHANMODES = '477'
ERR_BANLISTFULL = '478'
ERR_NOPRIVILEGES = '481'
ERR_CHANOPRIVSNEEDED = '482'
ERR_CANTKILLSERVER = '483'
ERR_RESTRICTED = '484'
ERR_UNIQOPPRIVSNEEDED = '485'
ERR_NOOPERHOST = '491'
ERR_NOSERVICEHOST = '492'
ERR_UMODEUNKNOWNFLAG = '501'
ERR_USERSDONTMATCH = '502'
# And hey, as long as the strings are already intern'd...
symbolic_to_numeric = {
"RPL_WELCOME": '001',
"RPL_YOURHOST": '002',
"RPL_CREATED": '003',
"RPL_MYINFO": '004',
"RPL_ISUPPORT": '005',
"RPL_BOUNCE": '010',
"RPL_USERHOST": '302',
"RPL_ISON": '303',
"RPL_AWAY": '301',
"RPL_UNAWAY": '305',
"RPL_NOWAWAY": '306',
"RPL_WHOISUSER": '311',
"RPL_WHOISSERVER": '312',
"RPL_WHOISOPERATOR": '313',
"RPL_WHOISIDLE": '317',
"RPL_ENDOFWHOIS": '318',
"RPL_WHOISCHANNELS": '319',
"RPL_WHOWASUSER": '314',
"RPL_ENDOFWHOWAS": '369',
"RPL_LISTSTART": '321',
"RPL_LIST": '322',
"RPL_LISTEND": '323',
"RPL_UNIQOPIS": '325',
"RPL_CHANNELMODEIS": '324',
"RPL_NOTOPIC": '331',
"RPL_TOPIC": '332',
"RPL_INVITING": '341',
"RPL_SUMMONING": '342',
"RPL_INVITELIST": '346',
"RPL_ENDOFINVITELIST": '347',
"RPL_EXCEPTLIST": '348',
"RPL_ENDOFEXCEPTLIST": '349',
"RPL_VERSION": '351',
"RPL_WHOREPLY": '352',
"RPL_ENDOFWHO": '315',
"RPL_NAMREPLY": '353',
"RPL_ENDOFNAMES": '366',
"RPL_LINKS": '364',
"RPL_ENDOFLINKS": '365',
"RPL_BANLIST": '367',
"RPL_ENDOFBANLIST": '368',
"RPL_INFO": '371',
"RPL_ENDOFINFO": '374',
"RPL_MOTDSTART": '375',
"RPL_MOTD": '372',
"RPL_ENDOFMOTD": '376',
"RPL_YOUREOPER": '381',
"RPL_REHASHING": '382',
"RPL_YOURESERVICE": '383',
"RPL_TIME": '391',
"RPL_USERSSTART": '392',
"RPL_USERS": '393',
"RPL_ENDOFUSERS": '394',
"RPL_NOUSERS": '395',
"RPL_TRACELINK": '200',
"RPL_TRACECONNECTING": '201',
"RPL_TRACEHANDSHAKE": '202',
"RPL_TRACEUNKNOWN": '203',
"RPL_TRACEOPERATOR": '204',
"RPL_TRACEUSER": '205',
"RPL_TRACESERVER": '206',
"RPL_TRACESERVICE": '207',
"RPL_TRACENEWTYPE": '208',
"RPL_TRACECLASS": '209',
"RPL_TRACERECONNECT": '210',
"RPL_TRACELOG": '261',
"RPL_TRACEEND": '262',
"RPL_STATSLINKINFO": '211',
"RPL_STATSCOMMANDS": '212',
"RPL_ENDOFSTATS": '219',
"RPL_STATSUPTIME": '242',
"RPL_STATSOLINE": '243',
"RPL_UMODEIS": '221',
"RPL_SERVLIST": '234',
"RPL_SERVLISTEND": '235',
"RPL_LUSERCLIENT": '251',
"RPL_LUSEROP": '252',
"RPL_LUSERUNKNOWN": '253',
"RPL_LUSERCHANNELS": '254',
"RPL_LUSERME": '255',
"RPL_ADMINME": '256',
"RPL_ADMINLOC": '257',
"RPL_ADMINLOC": '258',
"RPL_ADMINEMAIL": '259',
"RPL_TRYAGAIN": '263',
"ERR_NOSUCHNICK": '401',
"ERR_NOSUCHSERVER": '402',
"ERR_NOSUCHCHANNEL": '403',
"ERR_CANNOTSENDTOCHAN": '404',
"ERR_TOOMANYCHANNELS": '405',
"ERR_WASNOSUCHNICK": '406',
"ERR_TOOMANYTARGETS": '407',
"ERR_NOSUCHSERVICE": '408',
"ERR_NOORIGIN": '409',
"ERR_NORECIPIENT": '411',
"ERR_NOTEXTTOSEND": '412',
"ERR_NOTOPLEVEL": '413',
"ERR_WILDTOPLEVEL": '414',
"ERR_BADMASK": '415',
"ERR_UNKNOWNCOMMAND": '421',
"ERR_NOMOTD": '422',
"ERR_NOADMININFO": '423',
"ERR_FILEERROR": '424',
"ERR_NONICKNAMEGIVEN": '431',
"ERR_ERRONEUSNICKNAME": '432',
"ERR_NICKNAMEINUSE": '433',
"ERR_NICKCOLLISION": '436',
"ERR_UNAVAILRESOURCE": '437',
"ERR_USERNOTINCHANNEL": '441',
"ERR_NOTONCHANNEL": '442',
"ERR_USERONCHANNEL": '443',
"ERR_NOLOGIN": '444',
"ERR_SUMMONDISABLED": '445',
"ERR_USERSDISABLED": '446',
"ERR_NOTREGISTERED": '451',
"ERR_NEEDMOREPARAMS": '461',
"ERR_ALREADYREGISTRED": '462',
"ERR_NOPERMFORHOST": '463',
"ERR_PASSWDMISMATCH": '464',
"ERR_YOUREBANNEDCREEP": '465',
"ERR_YOUWILLBEBANNED": '466',
"ERR_KEYSET": '467',
"ERR_CHANNELISFULL": '471',
"ERR_UNKNOWNMODE": '472',
"ERR_INVITEONLYCHAN": '473',
"ERR_BANNEDFROMCHAN": '474',
"ERR_BADCHANNELKEY": '475',
"ERR_BADCHANMASK": '476',
"ERR_NOCHANMODES": '477',
"ERR_BANLISTFULL": '478',
"ERR_NOPRIVILEGES": '481',
"ERR_CHANOPRIVSNEEDED": '482',
"ERR_CANTKILLSERVER": '483',
"ERR_RESTRICTED": '484',
"ERR_UNIQOPPRIVSNEEDED": '485',
"ERR_NOOPERHOST": '491',
"ERR_NOSERVICEHOST": '492',
"ERR_UMODEUNKNOWNFLAG": '501',
"ERR_USERSDONTMATCH": '502',
}
numeric_to_symbolic = {}
for k, v in symbolic_to_numeric.items():
numeric_to_symbolic[v] = k
| gpl-3.0 | -250,563,550,933,231,500 | 29.713525 | 112 | 0.577575 | false |
dpac-vlsi/SynchroTrace | util/configs/example/ruby_direct_test.py | 8 | 4459 | # Copyright (c) 2006-2007 The Regents of The University of Michigan
# Copyright (c) 2009 Advanced Micro Devices, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Ron Dreslinski
# Brad Beckmann
import m5
from m5.objects import *
from m5.defines import buildEnv
from m5.util import addToPath
import os, optparse, sys
addToPath('../common')
addToPath('../ruby')
import Options
import Ruby
# Get paths we might need. It's expected this file is in m5/configs/example.
config_path = os.path.dirname(os.path.abspath(__file__))
config_root = os.path.dirname(config_path)
m5_root = os.path.dirname(config_root)
parser = optparse.OptionParser()
Options.addCommonOptions(parser)
parser.add_option("-l", "--requests", metavar="N", default=100,
help="Stop after N requests")
parser.add_option("-f", "--wakeup_freq", metavar="N", default=10,
help="Wakeup every N cycles")
parser.add_option("--test-type", type="string", default="SeriesGetx",
help="SeriesGetx|SeriesGets|Invalidate")
#
# Add the ruby specific and protocol specific options
#
Ruby.define_options(parser)
execfile(os.path.join(config_root, "common", "Options.py"))
(options, args) = parser.parse_args()
if args:
print "Error: script doesn't take any positional arguments"
sys.exit(1)
#
# Select the direct test generator
#
if options.test_type == "SeriesGetx":
generator = SeriesRequestGenerator(num_cpus = options.num_cpus,
issue_writes = True)
elif options.test_type == "SeriesGets":
generator = SeriesRequestGenerator(num_cpus = options.num_cpus,
issue_writes = False)
elif options.test_type == "Invalidate":
generator = InvalidateGenerator(num_cpus = options.num_cpus)
else:
print "Error: unknown direct test generator"
sys.exit(1)
#
# Create the M5 system. Note that the Memory Object isn't
# actually used by the rubytester, but is included to support the
# M5 memory size == Ruby memory size checks
#
system = System(physmem = SimpleMemory())
#
# Create the ruby random tester
#
system.tester = RubyDirectedTester(requests_to_complete = \
options.requests,
generator = generator)
Ruby.create_system(options, system)
assert(options.num_cpus == len(system.ruby._cpu_ruby_ports))
for ruby_port in system.ruby._cpu_ruby_ports:
#
# Tie the ruby tester ports to the ruby cpu ports
#
system.tester.cpuPort = ruby_port.slave
# -----------------------
# run simulation
# -----------------------
root = Root( full_system = False, system = system )
root.system.mem_mode = 'timing'
# Not much point in this being higher than the L1 latency
m5.ticks.setGlobalFrequency('1ns')
# instantiate configuration
m5.instantiate()
# simulate until program terminates
exit_event = m5.simulate(options.maxtick)
print 'Exiting @ tick', m5.curTick(), 'because', exit_event.getCause()
| bsd-3-clause | 7,195,518,720,051,425,000 | 34.672 | 77 | 0.709352 | false |
dcneeme/droidcontroller | pid.py | 1 | 15883 | #-------------------------------------------------------------------------------
# PID.py
# A simple implementation of a PID controller and also the threestep motor control
#-------------------------------------------------------------------------------
# Heavily modified PID source from the book "Real-World Instrumentation with Python"
# by J. M. Hughes, published by O'Reilly Media, December 2010,
# ISBN 978-0-596-80956-0.
#-------------------------------------------------------------------------------
# modified and ThreeStep class added by droid4control.com 2014
#
# usage example:
# from pid import *
# f=PID(setpoint=20, min=-100, max=100)
# f.output(11) # returns output, p, i, d, e, onLimit
# or
# f=ThreeStep(setpoint=3)
# print f.output(10)
# last change 02.03.2014 by neeme
import time
class PID:
""" Simple PID control.
This class implements a simplistic PID control algorithm.
"""
def __init__(self, setpoint = 0, P = 1.0, I = 0.01, D = 0.0, min = None, max = None): # initialize gains
self.setSetpoint(setpoint)
self.setKp(P)
self.setKi(I)
self.setKd(D)
self.setMin(min)
self.setMax(max)
self.Initialize()
def setSetpoint(self, invar):
""" Set the goal for the actual value """
self.setPoint = invar
def setKp(self, invar):
""" Sets proportional gain """
self.Kp = invar
def setKi(self, invar):
""" Set integral gain and modify integral accordingly to avoid related jumps """
try:
#print('trying to set new setKi '+str(invar)+' while existing Ki='+str(self.Ki)) # debug
if self.Ki > 0 and invar > 0 and self.Ki != invar:
print('setKi with initialize')
self.Ki = invar
self.Initialize()
else:
self.Ki = invar
except:
self.Ki = invar
def resetIntegral(self):
""" reset integral part """
self.Ci = 0
def setKd(self, invar):
""" Set derivative gain """
self.Kd = invar
def setPrevErr(self, invar):
""" Set previous error value """
self.prev_err = invar
def setMin(self, invar):
""" Set lower limit for output """
try:
#print('pid: trying to set new outMin '+str(invar)+' while outMax='+str(self.outMax)) # debug
if self.Ki > 0 and invar != None and self.outMin != invar:
print('pid: setMin with initialize')
self.outMin = invar
self.Initialize()
else:
self.outMin = invar
except:
self.outMin = invar
def setMax(self, invar):
""" Set upper limit for output """
try:
#print('pid: trying to set new outMax '+str(invar)+' while outMin='+str(self.outMin)) # debug
if self.Ki > 0 and invar != None and self.outMax != invar:
print('pid: setMax with initialize')
self.outMax = invar
self.Initialize()
else:
self.outMax = invar
except:
self.outMax = invar
def Initialize(self):
""" initialize delta t variables """
self.currtm = time.time()
self.prevtm = self.currtm
self.prev_err = 0
self.onLimit = 0 # value 0 means between limits, -10 on lo limit, 1 on hi limit
# term result variables
self.Cp = 0
if self.Ki >0 and self.outMin != None and self.outMax != None:
self.Ci=(2 * self.outMin + self.outMax) / (3 * self.Ki) # to avoid long integration to normal level, set int between outmin and outmax
print('pid: integral biased to '+str(round(self.Ci))+' while Ki='+str(self.Ki))
else:
self.Ci = 0
self.Cd = 0
print('pid: initialized')
def output(self, invar):
""" Performs a PID computation and returns a control value based on
the elapsed time (dt) and the difference between actual value and setpoint.
"""
dir=['down','','up'] # up or down
try:
error=self.setPoint - invar # error value
except:
error=0 # for the case of invalid actual
msg='invalid actual '+repr(invar)+' for pid error calculation, error zero used!'
self.currtm = time.time() # get t
dt = self.currtm - self.prevtm # get delta t
de = error - self.prev_err # get delta error
self.Cp = self.Kp * error # proportional term
if self.Ki > 0:
if (self.onLimit == 0 or (self.onLimit == -1 and error > 0) or (self.onLimit == 1 and error < 0)):
#integration is only allowed if Ki not zero and no limit reached or when output is moving away from limit
self.onLimit = 0
self.Ci += error * dt # integral term
#print('pid: integration done, new Ci='+str(round(self.Ci)))
else:
pass
print('pid: integration '+dir[self.onLimit+1]+' forbidden due to saturation, onLimit '+str(self.onLimit)+', error '+str(error)) # debug
self.Cd = 0
if dt > 0: # no div by zero
self.Cd = de/dt # derivative term
self.prevtm = self.currtm # save t for next pass
self.prev_err = error # save t-1 error
out=self.Cp + (self.Ki * self.Ci) + (self.Kd * self.Cd) # sum the terms and return the result
if self.outMax is not None and self.outMin is not None:
if not self.outMax > self.outMin: # avoid faulty limits
print('pid: illegal outmin, outmax values:',self.outMin,self.outMax) # important notice!
if self.outMax is not None:
if out > self.outMax:
self.onLimit = 1 # reached hi limit
out = self.outMax
if self.outMin is not None:
if out < self.outMin:
self.onLimit = -1 # reached lo limit
out = self.outMin
if self.outMin is not None and self.outMax is not None: # to be sure about onLimit, double check
if out > self.outMin and out < self.outMax:
if self.onLimit != 0:
print('pid: fixing onLimit error value '+str(self.onLimit)+' to zero!')
self.onLimit = 0 # fix possible error
if out == self.outMax and self.onLimit == -1: # swapped min/max and onlimit values for some reason?
print('pid: hi out and onlimit values do not match! out',out,', outMin',self.outMin,', outMax',self.outMax,', onlimit',self.onLimit)
#self.onLimit = 1 # fix possible error
elif out == self.outMin and self.onLimit == 1:
print('pid: lo out and onlimit values do not match! out',out,', outMin',self.outMin,', outMax',self.outMax,', onlimit',self.onLimit)
#self.onLimit = -1 # fix possible error
print('pid sp',round(self.setPoint),', actual',invar,', out',round(out),', p i d',round(self.Cp), round(self.Ki * self.Ci), round(self.Kd * self.Cd),', onlimit',self.onLimit) # debug
return out, self.Cp, (self.Ki * self.Ci), (self.Kd * self.Cd), error, self.onLimit
class ThreeStep:
""" Three-step motor control.
Outputs pulse length to run the motor in one or another direction
"""
def __init__(self, setpoint = 0, motortime = 100, maxpulse = 10, maxerror = 100, minpulse =1 , minerror = 1, runperiod = 20):
self.setSetpoint(setpoint)
self.setMotorTime(motortime)
self.setMaxpulseLength(maxpulse)
self.setMaxpulseError(maxerror)
self.setMinpulseLength(minpulse)
self.setMinpulseError(minerror)
self.setRunPeriod(runperiod)
self.Initialize()
def setSetpoint(self, invar):
""" Set the setpoint for the actual value """
self.Setpoint = invar
def setMotorTime(self, invar):
""" Sets motor running time in seconds to travel from one limit to another
(give the bigger value if the travel times are different in different directions)
"""
self.MotorTime = abs(invar)
def setMaxpulseLength(self, invar):
""" Sets maximum pulse time in seconds to use """
self.MaxpulseLength = abs(invar)
def setMaxpulseError(self, invar):
""" Ties maximum error to maximum pulse length in seconds to use.
That also defines the 'sensitivity' of the relation between the error and the motor reaction
"""
self.MaxpulseError = abs(invar)
def setMinpulseLength(self, invar):
""" Sets minimum pulse length in seconds to use """
self.MinpulseLength = abs(invar)
def setMinpulseError(self, invar):
""" Ties the minimum pulse length to the error level. This also sets the dead zone,
where there is no output (motor run) below this (absolute) value on either direction """
self.MinpulseError = abs(invar)
def setRunPeriod(self, invar):
""" Sets the time for no new pulse to be started """
self.RunPeriod = abs(invar)
def Initialize(self):
""" initialize time dependant variables
"""
self.currtime = time.time()
#self.prevtime = self.currtime
self.last_start = self.currtime # - self.RunPeriod - 1 # this way we are ready to start a new pulse if needed - this is NOT GOOD! better wait.
self.last_length = 0 # positive or negative value means signal to start pulse with given length in seconds. 0 means no pulse start
self.last_state = 0 # +1 or -1 value means signal to start pulse with given length in seconds
self.last_limit = 0 # value 0 for means travel position between limits, +1 on hi limit, -1 on lo limit
self.runtime = 0 # cumulative runtime towards up - low
self.onLimit = 0
def interpolate(self, x, x1 = 0, y1 = 0, x2 = 0, y2 = 0):
""" Returns linearly interpolated value y based on x and two known points defined by x1y1 and x2y2 """
if y1 != y2: # valid data to avoid division by zero
return y1+(x-x1)*(y2-y1)/(x2-x1)
else:
return None
def output(self, invar): # actual as parameter
""" Performs pulse generation if needed and if no previous pulse is currently active.
Returns output values for pulse length, running state and reaching the travel limit.
All output values can be either positive or negative depending on the direction towards higher or lower limit.
If error gets smaller than minpulse during the nonzero output, zero the output state.
"""
try:
error=self.Setpoint - invar # error value
except:
error=0 # for the case of invalid actual
msg='invalid actual '+repr(invar)+' for 3step error calculation, error zero used!'
print(msg)
#error=self.Setpoint - invar # current error value
self.currtime = time.time() # get current time
state=0 # pulse level, not known yet
#current state, need to stop? level control happens by calling only!
if self.currtime > self.last_start + abs(self.last_length) and self.last_state != 0: # need to stop ########## STOP ##############
#print('need to stop ongoing pulse due to pulse time (',abs(self.last_length),') s out') # debug
#if self.onLimit == 0 or (self.onLimit == -1 and error > 0) or (self.onLimit == 1 and error < 0): # modify running time
# self.runtime = self.runtime + self.last_state*(self.currtime - self.last_start) # sign via state is important
state = 0 # stop the run
self.last_state = state
print('3step: stopped pulse, cumulative travel time',round(self.runtime))
if self.runtime > self.MotorTime: # limit
self.onLimit = 1 # reached hi limit
self.runtime = self.MotorTime
print('reached hi limit') # debug
if self.runtime < -self.MotorTime: # limit
self.onLimit = -1 # reached lo limit
self.runtime = -self.MotorTime
print('reached lo limit') # debug
#need to start a new pulse? chk runPeriod
if self.currtime > self.last_start + self.RunPeriod and self.last_state == 0: # free to start next pulse (no ongoing)
#print('no ongoing pulse, last_state',self.last_state,'time from previous start',int(self.currtime - self.last_start)) # debug
if abs(error) > self.MinpulseError: # pulse is needed
print('3step: new pulse needed due to error vs minpulseerror',error,self.MinpulseError) # debug
if error > 0 and error > self.MinpulseError: # pulse to run higher needed
length = self.interpolate(error, self.MinpulseError, self.MinpulseLength, self.MaxpulseError, self.MaxpulseLength)
if length > self.MaxpulseLength:
length = self.MaxpulseLength
self.last_length = length
self.last_start = self.currtime
state = 1
#print('3step: started pulse w len',round(length)) # debug
elif error < 0 and error < -self.MinpulseError: # pulse to run lower needed
length = self.interpolate(error, -self.MinpulseError, -self.MinpulseLength, -self.MaxpulseError, -self.MaxpulseLength)
if length < -self.MaxpulseLength:
length = -self.MaxpulseLength
self.last_length = length
self.last_start = self.currtime
state = -1
print('3step: STARTED PULSE w len '+str(length)) # debug
self.runtime = self.runtime+length # new cumulative
else: # no need for a new pulse
length = 0
#print('no need for a pulse due to error vs minpulseerror',error,self.MinpulseError) # debug
else: # no new pulse yet or pulse already active
length = 0
state = self.last_state
msg='3step: pulse last start '+str(int(self.currtime - self.last_start))+' s ago, runperiod '+str(self.RunPeriod)+', cumulative travel time '+str(round(self.runtime)) # debug
print(msg)
#syslog(msg) # debug
#if abs(error) < self.MinpulseError and state != 0: # stop the ongoing pulse - not strictly needed
# state = 0 # if the actual drive to the motor happens via timer controlled by length previously output, this does not have any effect
# print('stop the ongoing pulse') # debug
pulseleft=int(self.last_start + abs(self.last_length) - self.currtime)
if state != 0 and pulseleft > 0:
msg='ongoing pulse time left '+str(pulseleft)+', state (direction) '+str(state) # debug
print(msg)
#syslog(msg) # debug
if state != self.last_state:
self.last_state = state
msg='3step ERROR '+str(round(error))+', minerror '+str(self.MinpulseError)+', maxerror '+str(self.MaxpulseError)+', LENGTH '+str(round(length))+', minpulse '+str(self.MinpulseLength)+', maxpulse '+str(self.MaxpulseLength) # debug
print(msg)
#syslog(msg)
return length, state, self.onLimit, int(self.runtime) | gpl-3.0 | -1,855,959,136,888,601,900 | 46.414925 | 237 | 0.568029 | false |
virtualnobi/MediaFiler | nobi/wx/Menu.py | 1 | 1806 | """
(c) by nobisoft 2016-
"""
# Imports
## Standard
## Contributed
import wx
## nobi
## Project
# Class
class Menu(wx.Menu):
"""An extension of wx.Menu
- it allows to insert a menu item after another one identified by ID
- it (shall sometime) reduce subsequent separators to one automatically
"""
# Setters
def insertAfterId(self, anchorId, newText=None, newId=None, newMenu=None):
"""Add an item after the item with anchorId.
If neither newId or newMenu are given, a separator is inserted.
Raise ValueError if both newId and newMenu are given, or either is given without newText.
Raise KeyError if anchorId does not exist.
Number anchorId is the id of the item after which the new one shall be inserted
String newText is the text shown in the menu
Number newId, if given, is the function ID of the new item
wx.Menu newMenu, if given, is the next-level menu
"""
if (newId and newMenu):
raise ValueError
if ((not newText)
and (newId or newMenu)):
raise ValueError
items = self.GetMenuItems()
itemNo = 0
for item in items:
if (item.GetId() == anchorId):
break
itemNo = (itemNo + 1)
if (len(items) <= itemNo):
raise KeyError
else:
if (newId):
self.Insert((itemNo + 1), newId, newText, kind=wx.ITEM_NORMAL)
elif (newMenu):
# self.InsertMenu((itemNo + 1), 0, newText, newMenu) # invent an ID for wxPython
self.Insert((itemNo + 1), 0, newText, newMenu) # invent an ID for wxPython # wxPython 4
else:
self.InsertSeparator(itemNo + 1)
| gpl-3.0 | 4,786,442,484,183,353,000 | 30.137931 | 105 | 0.583056 | false |
jamesbdunlop/defaultMayaLibrary | xml_export/uv_getUVs.py | 2 | 14895 | from apps.app_logger import log
import maya.api.OpenMaya as om
import maya.cmds as cmds
import config_constants as configCONST
def _uvMapIndex(pathToGeo, mapName):
x = 0
p = True
maps = {}
while p:
if cmds.getAttr('%s.uvSet[%s].uvSetName' % (pathToGeo, x)):
maps[cmds.getAttr('%s.uvSet[%s].uvSetName' % (pathToGeo, x))] = x
x = x + 1
else:
p = False
#print maps
return maps[mapName]
def _getPerFaceUVInfo(shapeFn, map):
"""
Function used to fetch the information for the assignUV approach
REQUIRED:
# (faceId, vertexIndex, uvId, uvSet='') -> self
# Assigns a UV coordinate from a uvSet to a specified vertex of a face.
"""
getFaceCount = shapeFn.numPolygons
#log(None, method = 'uv_getUVs._getPerFaceUVInfo', message = '{0:<10}{1}'.format('getFaceCount:', getFaceCount), outputToLogFile = False, verbose = configCONST.DEBUGGING)
perFaceUVInfo = {}
for x in range(0, getFaceCount):
#print
#log(None, method = 'uv_getUVs._getPerFaceUVInfo', message = '{0:<10}{1}'.format('FaceID:', x), outputToLogFile = False, verbose = configCONST.DEBUGGING)
## Now setup an array to take the face vert indexes
myVertexIntArray = []
## Now setup an array to take the face vert indexes uvIds
myUVID_IntArray = []
## Now get the verts for this faceIndex
verts = cmds.polyListComponentConversion( '%s.f[%s]' % (shapeFn.name(), x), fromFace=True, toVertex=True)
#log(None, method = 'uv_getUVs._getPerFaceUVInfo', message = '{0:<10}{1}'.format('verts:', verts), outputToLogFile = False, verbose = configCONST.DEBUGGING)
for eachVert in verts:
vertID = eachVert.split('[')[-1].split(']')[0]
if ':' not in vertID:
if int(vertID) not in myVertexIntArray:
myVertexIntArray.append(int(vertID))
else:
for y in range(int(vertID.split(':')[0]), int(vertID.split(':')[-1])+1):
if int(y) not in myVertexIntArray:
myVertexIntArray.append(int(y))
#[myVertexIntArray.append(int(eachVert.split('[')[-1].split(']')[0])) for eachVert in verts if ':' not in eachVert]
#log(None, method = 'uv_getUVs._getPerFaceUVInfo', message = '{0:<10}{1}'.format('myVertexIntArray:', myVertexIntArray), outputToLogFile = False, verbose = configCONST.DEBUGGING)
#try: ## there are no : split verts...
# splitVerts = [[int(vertID) for vertID in eachVert.split('[')[-1].split(']')[0].split(':')] for eachVert in verts if ':' in eachVert]
# #log(None, method = 'uv_getUVs._getPerFaceUVInfo', message = '{0:<10}{1}'.format('splitVerts:', splitVerts), outputToLogFile = False, verbose = configCONST.DEBUGGING)
# [myVertexIntArray.append(int(y)) for y in range(splitVerts[0][0], splitVerts[0][1] + 1)]
# #log(None, method = 'uv_getUVs._getPerFaceUVInfo', message = '{0:<10}{1}'.format('myVertexIntArray:', myVertexIntArray), outputToLogFile = False, verbose = configCONST.DEBUGGING)
#except:
# pass
## Now get the face relative ids
vertCount = len(list(set(myVertexIntArray)))
#log(None, method = 'uv_getUVs._getPerFaceUVInfo', message = '{0:<10}{1}'.format('vertCountAFTER:', vertCount), outputToLogFile = False, verbose = configCONST.DEBUGGING)
if vertCount:
try:
[myUVID_IntArray.append(shapeFn.getPolygonUVid(x, s, map)) for s in range(0, vertCount)]
except:
for s in range(0, vertCount):
try:
uv_id = shapeFn.getPolygonUVid(x, s, map)
#log(None, method = 'uv_getUVs._getPerFaceUVInfo', message = '{0:<10}{1}'.format('uv_id:', uv_id), outputToLogFile = False, verbose = configCONST.DEBUGGING)
myUVID_IntArray.append(uv_id)
except:
myUVID_IntArray.append(None)
#log(None, method = 'uv_getUVs._getPerFaceUVInfo', message = '{0:<10}{1}'.format('myVertexIntArray:', myVertexIntArray), outputToLogFile = False, verbose = configCONST.DEBUGGING)
#log(None, method = 'uv_getUVs._getPerFaceUVInfo', message = '{0:<10}{1}'.format('myUVID_IntArray:', myUVID_IntArray), outputToLogFile = False, verbose = configCONST.DEBUGGING)
perFaceUVInfo[x] = [myVertexIntArray, myUVID_IntArray]
return perFaceUVInfo
def _getUandV(shapeFn, map):
"""
Function to return the u and v arrays
"""
myU, myV = shapeFn.getUVs(map)
#log(None, method = 'uv_getUVs._getUandV', message = 'u: {0} \nv:{1}'.format(myU, myV), outputToLogFile = False, verbose = configCONST.DEBUGGING)
return myU, myV
def _getUVSets(shapeFn):
"""
Function to return the current uvSets IN USE from a MFnMesh node
"""
#log(None, method = 'uv_getUVs._getUVSets', message = 'Fetching uvsets now..', outputToLogFile = False, verbose = configCONST.DEBUGGING)
uvsets = []
getFaceCount = shapeFn.numPolygons
try:
[[uvsets.extend([eachUVset]) for eachUVset in shapeFn.getFaceUVSetNames(x) if eachUVset not in uvsets] for x in range(0, getFaceCount) ]
except:
uvsets = None
# for eachUVSet in shapeFn.getUVSetNames():
# for x in range(0, getFaceCount):
# _getUVSets = shapeFn.getFaceUVSetNames(x)
# for eachUVset in _getUVSets:
# if eachUVset not in uvsets:
# uvsets.extend([eachUVset])
#log(None, method = 'uv_getUVs._getUVSets', message = '{0:<10}{1}'.format('uvsets:', uvsets), outputToLogFile = False, verbose = configCONST.DEBUGGING)
if uvsets:
return uvsets
else:
return uvsets
def _getUVShells(shapeFn, map, perFaceInfo):
"""
Function to call the number of shells and return the array of the uv's shell numbers
"""
getShells = shapeFn.getUvShellsIds(map)
#log(None, method = 'uv_getUVs._getUVShells', message = '{0:<10}{1}'.format('getShells:', getShells), outputToLogFile = False, verbose = configCONST.DEBUGGING)
## The index of this list is also the UVIDs from 0 -> end of list
## Need to go through this list and get each index and shell relationship
shellUVs = {}
for x, uvShell in enumerate(getShells[1]):
if not uvShell in shellUVs.keys():
shellUVs[uvShell] = [x]
else:
shellUVs[uvShell].append(x)
#log(None, method = 'uv_getUVs._getUVShells', message = '{0:<10}{1}'.format('shellUVs:', shellUVs), outputToLogFile = False, verbose = configCONST.DEBUGGING)
shellUVsCount = {}
for eachShell, eachUVIDList in shellUVs.items():
#log(None, method = 'uv_getUVs._getUVShells', message = '{0:<10}{1}'.format('shellID:', eachShell), outputToLogFile = False, verbose = configCONST.DEBUGGING)
#log(None, method = 'uv_getUVs._getUVShells', message = '{0:<10}{1}'.format('uvidList:', eachUVIDList), outputToLogFile = False, verbose = configCONST.DEBUGGING)
#log(None, method = 'uv_getUVs._getUVShells', message = '{0:<10}{1}'.format('len:', len(eachUVIDList)), outputToLogFile = False, verbose = configCONST.DEBUGGING)
shellCount = om.MIntArray()
for eachUVID in eachUVIDList:
for eachFace, eachFaceInfo in perFaceInfo.items(): ## eachFaceID, eachFace [VertList] [UVIDList]
if eachUVID in eachFaceInfo[1]: ## If the id is in the shells id list
count = len(eachFaceInfo[0]) ## Get the face vert count
shellCount.append(count) ## Append this to the array
if eachShell not in shellUVsCount.keys():
shellUVsCount[eachShell] = [shellCount]
else:
shellUVsCount[eachShell].append(shellCount)
#log(None, method = 'uv_getUVs._getUVShells', message = '{0:<10}{1}'.format('shellCountLEN:', len(shellCount)), outputToLogFile = False, verbose = configCONST.DEBUGGING)
#log(None, method = 'uv_getUVs._getUVShells', message = '{0:<10}{1}'.format('shellUVsCount:', shellUVsCount), outputToLogFile = False, verbose = configCONST.DEBUGGING)
## Trying to return the array first as opposed to a new list.
return getShells, shellUVs, shellUVsCount
def getUVs(geoName = None, multiOnly = False):
"""
Function to get as much info about the mesh uvs for use later on as possible
"""
### Create dictionary for storing final uv data
#log(None, method = 'uv_getUVs.getUVs', message = '{0:<10}{1}'.format('geoName:', geoName), outputToLogFile = False, verbose = configCONST.DEBUGGING)
uvSetData = {}
shapeFn = None
## Full path to the geo for writing out later.
fullPathToName = cmds.ls(geoName, l = True)[0]
#log(None, method = 'uv_getUVs.getUVs', message = '{0:<10}{1}'.format('fullPathToName:', fullPathToName), outputToLogFile = False, verbose = configCONST.DEBUGGING)
## make sure this is a mesh
getChildren = cmds.listRelatives(fullPathToName, children = True)[0]
#log(None, method = 'uv_getUVs.getUVs', message = '{0:<10}{1}'.format('getChildren:', getChildren), outputToLogFile = False, verbose = configCONST.DEBUGGING)
if getChildren:
#log(None, method = 'uv_getUVs.getUVs', message = 'Shape has a child...', outputToLogFile = False, verbose = configCONST.DEBUGGING)
if cmds.nodeType(getChildren) == 'mesh':
selectionList = om.MSelectionList()
selectionList.add(fullPathToName)
nodeDagPath = selectionList.getDagPath(0)
shapeFn = om.MFnMesh(nodeDagPath)
## Now fetch data from shapeFn
shapeName = shapeFn.name()
#log(None, method = 'uv_getUVs.getUVs', message = '{0:<10}{1}'.format('shapeName:', shapeName), outputToLogFile = False, verbose = configCONST.DEBUGGING)
currentUVSets = shapeFn.getUVSetNames()
## Now we find the UV sets for the mesh into a valid list.
## We're looking through each face to see what uvSets are assigned to them to find valid uv sets.
uvsets = _getUVSets(shapeFn)### VALID UV SETS WILL BE RETURNED IF THE ARTIST HAS CREATED AN EMPTY UV SET IT WILL BE DISCARDED
#log(None, method = 'uv_getUVs.getUVs', message = '{0:<10}{1}'.format('uvSets:', uvsets), outputToLogFile = False, verbose = configCONST.DEBUGGING)
## Check to see if the flag for mult uv sets only is on
if multiOnly:
if len(uvsets) > 1:
export = True
else:
export = False
else:
export = True
if export:
#log(None, method = 'uv_getUVs.getUVs', message = '{0:<10}{1}'.format('Processing: ', geoName), outputToLogFile = False, verbose = configCONST.DEBUGGING)
#log(None, method = 'uv_getUVs.getUVs', message = '{0:<10}{1}'.format('len(currentUVSets): ', len(currentUVSets)), outputToLogFile = False, verbose = configCONST.DEBUGGING)
for eachUVSet in uvsets:
data = []
## Add the uvset name....
shapeFn.setCurrentUVSetName(eachUVSet)
## Add the path to the geo
## Returns |path|to|geo
data.extend([fullPathToName])
#log(None, method = 'uv_getUVs.getUVs', message = '{0:<10}{1}'.format('fullPathToName:', fullPathToName), outputToLogFile = False, verbose = configCONST.DEBUGGING)
## Add the name
## Returns nameofUvSet
data.extend([eachUVSet])
#log(None, method = 'uv_getUVs.getUVs', message = '{0:<10}{1}'.format('uvSetName:', eachUVSet), outputToLogFile = False, verbose = configCONST.DEBUGGING)
## Add the u and v from the straight foward fetch
## Returns [uArray], [vArray]
getUVArrays = _getUandV(shapeFn, eachUVSet)
data.extend([getUVArrays[0]])
data.extend([getUVArrays[1]])
#log(None, method = 'uv_getUVs.getUVs', message = '{0:<10}{1}'.format('getUVArraysU:', getUVArrays[0]), outputToLogFile = False, verbose = configCONST.DEBUGGING)
#log(None, method = 'uv_getUVs.getUVs', message = '{0:<10}{1}'.format('getUVArraysV:', getUVArrays[1]), outputToLogFile = False, verbose = configCONST.DEBUGGING)
## Get the perFace info in case we need it for rebuilding later
## Returns {faceId: [myVertixIntArray, myUVID_IntArray]}
faceUVInfo = _getPerFaceUVInfo(shapeFn, eachUVSet)
#log(None, method = 'uv_getUVs.getUVs', message = '{0:<10}{1}'.format('faceUVInfo:', faceUVInfo), outputToLogFile = False, verbose = configCONST.DEBUGGING)
data.extend([faceUVInfo])
## Add the num of uvshells and the shell list
## Returns (shellCount, [vertIndexShellAssociationList]), {shell: [shellUVs]}, [ShellUVsCount]
#getShells = _getUVShells(shapeFn, eachUVSet, faceUVInfo)
##log(None, method = 'uv_getUVs.getUVs', message = '{0:<10}{1}'.format('getShells:', getShells), outputToLogFile = False, verbose = configCONST.DEBUGGING)
#data.extend([getShells])
## The uvName index
#print 'processing uvindex for %s' % eachUVSet
data.extend([_uvMapIndex(pathToGeo = fullPathToName, mapName = eachUVSet)])
#log(None, method = 'uv_getUVs.getUVs', message = '{0:<10}{1}'.format('mapIndex:', _uvMapIndex(pathToGeo = fullPathToName, mapName = eachUVSet)), outputToLogFile = False, verbose = configCONST.DEBUGGING)
uvSetData[eachUVSet] = data
#log(None, method = 'uv_getUVs.getUVs', message = '{0:<10}{1}'.format('uvSetData:', uvSetData), outputToLogFile = False, verbose = configCONST.DEBUGGING)
## Forcing this back to map1 to see if I can avoid crashes
shapeFn.setCurrentUVSetName('map1')
#print 'Data stored for %s' % geoName
if uvSetData:
return [geoName, uvSetData]
else:
return None | apache-2.0 | -8,915,309,298,227,669,000 | 57.1875 | 223 | 0.591675 | false |
chromium/chromium | third_party/blink/renderer/build/scripts/writer_test_util.py | 6 | 3534 | # Copyright 2021 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import contextlib
import difflib
import filecmp
import os
import shutil
import tempfile
import unittest
from json5_generator import Json5File, Writer
@contextlib.contextmanager
def tmp_dir():
tmp = tempfile.mkdtemp()
try:
yield tmp
finally:
shutil.rmtree(tmp)
def path_to_test_file(*path):
return os.path.join(os.path.dirname(__file__), 'tests', *path)
def diff(filename1, filename2):
with open(filename1) as file1:
file1_lines = file1.readlines()
with open(filename2) as file2:
file2_lines = file2.readlines()
# Use Python's difflib module so that diffing works across platforms
return ''.join(difflib.context_diff(file1_lines, file2_lines))
def is_identical_file(reference_filename, output_filename):
reference_basename = os.path.basename(reference_filename)
if not os.path.isfile(reference_filename):
print 'Missing reference file!'
print '(if adding new test, update reference files)'
print reference_basename
print
return False
if not filecmp.cmp(reference_filename, output_filename):
# cmp is much faster than diff, and usual case is "no difference",
# so only run diff if cmp detects a difference
print 'FAIL: %s' % reference_basename
print diff(reference_filename, output_filename)
return False
return True
def compare_output_dir(reference_dir, output_dir):
"""
Compares output files in both reference_dir and output_dir.
Note: this function ignores subdirectory content in both reference
dir and output_dir.
Note: reference_dir should have all ref files ending with .ref suffix.
'.ref' suffix is added to bypass code formatter on reference files.
:returns {bool}: Whether files in output dir matches files in ref dir
"""
ref_content = {
f[:-4]
for f in os.listdir(reference_dir) if f.endswith('.ref')
}
output_content = set(os.listdir(output_dir))
if ref_content != output_content:
print 'Output files does not match.'
print 'Following files are extra: {}'.format(output_content -
ref_content)
print 'Following files are missing: {}'.format(ref_content -
output_content)
return False
for file_name in ref_content:
ref_file = os.path.join(reference_dir, file_name) + '.ref'
output_file = os.path.join(output_dir, file_name)
if os.path.isdir(ref_file) and os.path.isdir(output_file):
continue
elif os.path.isdir(ref_file) or os.path.isdir(output_file):
return False
elif not is_identical_file(ref_file, output_file):
return False
return True
class WriterTest(unittest.TestCase):
def _test_writer(self, writer_class, json5_files, reference_dir):
"""
:param writer_class {Writer}: a subclass to Writer
:param json5_files {List[str]}: json5 test input files
:param reference_dir {str}: directory to expected output files
"""
with tmp_dir() as tmp:
writer = writer_class(json5_files, tmp)
writer.write_files(tmp)
writer.cleanup_files(tmp)
self.assertTrue(compare_output_dir(reference_dir, tmp))
| bsd-3-clause | -8,408,895,125,960,388,000 | 30.837838 | 74 | 0.644312 | false |
Zerknechterer/pyload | module/plugins/hoster/SendspaceCom.py | 1 | 2311 | # -*- coding: utf-8 -*-
import re
from module.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo
class SendspaceCom(SimpleHoster):
__name__ = "SendspaceCom"
__type__ = "hoster"
__version__ = "0.17"
__pattern__ = r'https?://(?:www\.)?sendspace\.com/file/\w+'
__config__ = [("use_premium", "bool", "Use premium account if available", True)]
__description__ = """Sendspace.com hoster plugin"""
__license__ = "GPLv3"
__authors__ = [("zoidberg", "[email protected]")]
NAME_PATTERN = r'<h2 class="bgray">\s*<(?:b|strong)>(?P<N>[^<]+)</'
SIZE_PATTERN = r'<div class="file_description reverse margin_center">\s*<b>File Size:</b>\s*(?P<S>[\d.,]+)(?P<U>[\w^_]+)\s*</div>'
OFFLINE_PATTERN = r'<div class="msg error" style="cursor: default">Sorry, the file you requested is not available.</div>'
LINK_FREE_PATTERN = r'<a id="download_button" href="(.+?)"'
CAPTCHA_PATTERN = r'<td><img src="(/captchas/captcha\.php?captcha=(.+?))"></td>'
USER_CAPTCHA_PATTERN = r'<td><img src="/captchas/captcha\.php?user=(.+?))"></td>'
def handleFree(self, pyfile):
params = {}
for _i in xrange(3):
m = re.search(self.LINK_FREE_PATTERN, self.html)
if m:
if 'captcha_hash' in params:
self.correctCaptcha()
self.link = m.group(1)
break
m = re.search(self.CAPTCHA_PATTERN, self.html)
if m:
if 'captcha_hash' in params:
self.invalidCaptcha()
captcha_url1 = "http://www.sendspace.com/" + m.group(1)
m = re.search(self.USER_CAPTCHA_PATTERN, self.html)
captcha_url2 = "http://www.sendspace.com/" + m.group(1)
params = {'captcha_hash': m.group(2),
'captcha_submit': 'Verify',
'captcha_answer': self.decryptCaptcha(captcha_url1) + " " + self.decryptCaptcha(captcha_url2)}
else:
params = {'download': "Regular Download"}
self.logDebug(params)
self.html = self.load(pyfile.url, post=params)
else:
self.fail(_("Download link not found"))
getInfo = create_getInfo(SendspaceCom)
| gpl-3.0 | -340,626,205,837,711,300 | 37.516667 | 137 | 0.539593 | false |
hanvo/MusicCloud | Crawler/Install Files/pygame/test/test_utils/png.py | 19 | 155395 | #!/usr/bin/env python
# $URL: http://pypng.googlecode.com/svn/trunk/code/png.py $
# $Rev: 228 $
# png.py - PNG encoder/decoder in pure Python
#
# Modified for Pygame in Oct., 2012 to work with Python 3.x.
#
# Copyright (C) 2006 Johann C. Rocholl <[email protected]>
# Portions Copyright (C) 2009 David Jones <[email protected]>
# And probably portions Copyright (C) 2006 Nicko van Someren <[email protected]>
#
# Original concept by Johann C. Rocholl.
#
# LICENSE (The MIT License)
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation files
# (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge,
# publish, distribute, sublicense, and/or sell copies of the Software,
# and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
# Changelog (recent first):
# 2009-03-11 David: interlaced bit depth < 8 (writing).
# 2009-03-10 David: interlaced bit depth < 8 (reading).
# 2009-03-04 David: Flat and Boxed pixel formats.
# 2009-02-26 David: Palette support (writing).
# 2009-02-23 David: Bit-depths < 8; better PNM support.
# 2006-06-17 Nicko: Reworked into a class, faster interlacing.
# 2006-06-17 Johann: Very simple prototype PNG decoder.
# 2006-06-17 Nicko: Test suite with various image generators.
# 2006-06-17 Nicko: Alpha-channel, grey-scale, 16-bit/plane support.
# 2006-06-15 Johann: Scanline iterator interface for large input files.
# 2006-06-09 Johann: Very simple prototype PNG encoder.
# Incorporated into Bangai-O Development Tools by drj on 2009-02-11 from
# http://trac.browsershots.org/browser/trunk/pypng/lib/png.py?rev=2885
# Incorporated into pypng by drj on 2009-03-12 from
# //depot/prj/bangaio/master/code/png.py#67
"""
Pure Python PNG Reader/Writer
This Python module implements support for PNG images (see PNG
specification at http://www.w3.org/TR/2003/REC-PNG-20031110/ ). It reads
and writes PNG files with all allowable bit depths (1/2/4/8/16/24/32/48/64
bits per pixel) and colour combinations: greyscale (1/2/4/8/16 bit); RGB,
RGBA, LA (greyscale with alpha) with 8/16 bits per channel; colour mapped
images (1/2/4/8 bit). Adam7 interlacing is supported for reading and
writing. A number of optional chunks can be specified (when writing)
and understood (when reading): ``tRNS``, ``bKGD``, ``gAMA``.
For help, type ``import png; help(png)`` in your python interpreter.
A good place to start is the :class:`Reader` and :class:`Writer` classes.
Requires Python 2.3. Limited support is available for Python 2.2, but
not everything works. Best with Python 2.4 and higher. Installation is
trivial, but see the ``README.txt`` file (with the source distribution)
for details.
This file can also be used as a command-line utility to convert
`Netpbm <http://netpbm.sourceforge.net/>`_ PNM files to PNG, and the reverse conversion from PNG to
PNM. The interface is similar to that of the ``pnmtopng`` program from
Netpbm. Type ``python png.py --help`` at the shell prompt
for usage and a list of options.
A note on spelling and terminology
----------------------------------
Generally British English spelling is used in the documentation. So
that's "greyscale" and "colour". This not only matches the author's
native language, it's also used by the PNG specification.
The major colour models supported by PNG (and hence by PyPNG) are:
greyscale, RGB, greyscale--alpha, RGB--alpha. These are sometimes
referred to using the abbreviations: L, RGB, LA, RGBA. In this case
each letter abbreviates a single channel: *L* is for Luminance or Luma or
Lightness which is the channel used in greyscale images; *R*, *G*, *B* stand
for Red, Green, Blue, the components of a colour image; *A* stands for
Alpha, the opacity channel (used for transparency effects, but higher
values are more opaque, so it makes sense to call it opacity).
A note on formats
-----------------
When getting pixel data out of this module (reading) and presenting
data to this module (writing) there are a number of ways the data could
be represented as a Python value. Generally this module uses one of
three formats called "flat row flat pixel", "boxed row flat pixel", and
"boxed row boxed pixel". Basically the concern is whether each pixel
and each row comes in its own little tuple (box), or not.
Consider an image that is 3 pixels wide by 2 pixels high, and each pixel
has RGB components:
Boxed row flat pixel::
list([R,G,B, R,G,B, R,G,B],
[R,G,B, R,G,B, R,G,B])
Each row appears as its own list, but the pixels are flattened so that
three values for one pixel simply follow the three values for the previous
pixel. This is the most common format used, because it provides a good
compromise between space and convenience. PyPNG regards itself as
at liberty to replace any sequence type with any sufficiently compatible
other sequence type; in practice each row is an array (from the array
module), and the outer list is sometimes an iterator rather than an
explicit list (so that streaming is possible).
Flat row flat pixel::
[R,G,B, R,G,B, R,G,B,
R,G,B, R,G,B, R,G,B]
The entire image is one single giant sequence of colour values.
Generally an array will be used (to save space), not a list.
Boxed row boxed pixel::
list([ (R,G,B), (R,G,B), (R,G,B) ],
[ (R,G,B), (R,G,B), (R,G,B) ])
Each row appears in its own list, but each pixel also appears in its own
tuple. A serious memory burn in Python.
In all cases the top row comes first, and for each row the pixels are
ordered from left-to-right. Within a pixel the values appear in the
order, R-G-B-A (or L-A for greyscale--alpha).
There is a fourth format, mentioned because it is used internally,
is close to what lies inside a PNG file itself, and has some support
from the public API. This format is called packed. When packed,
each row is a sequence of bytes (integers from 0 to 255), just as
it is before PNG scanline filtering is applied. When the bit depth
is 8 this is essentially the same as boxed row flat pixel; when the
bit depth is less than 8, several pixels are packed into each byte;
when the bit depth is 16 (the only value more than 8 that is supported
by the PNG image format) each pixel value is decomposed into 2 bytes
(and `packed` is a misnomer). This format is used by the
:meth:`Writer.write_packed` method. It isn't usually a convenient
format, but may be just right if the source data for the PNG image
comes from something that uses a similar format (for example, 1-bit
BMPs, or another PNG file).
And now, my famous members
--------------------------
"""
# http://www.python.org/doc/2.2.3/whatsnew/node5.html
from __future__ import generators
__version__ = "$URL: http://pypng.googlecode.com/svn/trunk/code/png.py $ $Rev: 228 $"
from pygame.compat import geterror, next_, imap_
from array import array
try: # See :pyver:old
import itertools
except:
pass
import math
# http://www.python.org/doc/2.4.4/lib/module-operator.html
import operator
import struct
import sys
import zlib
# http://www.python.org/doc/2.4.4/lib/module-warnings.html
import warnings
__all__ = ['Image', 'Reader', 'Writer', 'write_chunks', 'from_array']
# The PNG signature.
# http://www.w3.org/TR/PNG/#5PNG-file-signature
_signature = struct.pack('8B', 137, 80, 78, 71, 13, 10, 26, 10)
_adam7 = ((0, 0, 8, 8),
(4, 0, 8, 8),
(0, 4, 4, 8),
(2, 0, 4, 4),
(0, 2, 2, 4),
(1, 0, 2, 2),
(0, 1, 1, 2))
def group(s, n):
# See
# http://www.python.org/doc/2.6/library/functions.html#zip
return zip(*[iter(s)]*n)
def isarray(x):
"""Same as ``isinstance(x, array)`` except on Python 2.2, where it
always returns ``False``. This helps PyPNG work on Python 2.2.
"""
try:
return isinstance(x, array)
except:
return False
try: # see :pyver:old
array.tostring
except:
def tostring(row):
l = len(row)
return struct.pack('%dB' % l, *row)
else:
def tostring(row):
"""Convert row of bytes to string. Expects `row` to be an
``array``.
"""
return row.tostring()
# Conditionally convert to bytes. Works on Python 2 and Python 3.
try:
bytes('', 'ascii')
def strtobytes(x): return bytes(x, 'iso8859-1')
def bytestostr(x): return str(x, 'iso8859-1')
except:
strtobytes = str
bytestostr = str
def interleave_planes(ipixels, apixels, ipsize, apsize):
"""
Interleave (colour) planes, e.g. RGB + A = RGBA.
Return an array of pixels consisting of the `ipsize` elements of data
from each pixel in `ipixels` followed by the `apsize` elements of data
from each pixel in `apixels`. Conventionally `ipixels` and
`apixels` are byte arrays so the sizes are bytes, but it actually
works with any arrays of the same type. The returned array is the
same type as the input arrays which should be the same type as each other.
"""
itotal = len(ipixels)
atotal = len(apixels)
newtotal = itotal + atotal
newpsize = ipsize + apsize
# Set up the output buffer
# See http://www.python.org/doc/2.4.4/lib/module-array.html#l2h-1356
out = array(ipixels.typecode)
# It's annoying that there is no cheap way to set the array size :-(
out.extend(ipixels)
out.extend(apixels)
# Interleave in the pixel data
for i in range(ipsize):
out[i:newtotal:newpsize] = ipixels[i:itotal:ipsize]
for i in range(apsize):
out[i+ipsize:newtotal:newpsize] = apixels[i:atotal:apsize]
return out
def check_palette(palette):
"""Check a palette argument (to the :class:`Writer` class) for validity.
Returns the palette as a list if okay; raises an exception otherwise.
"""
# None is the default and is allowed.
if palette is None:
return None
p = list(palette)
if not (0 < len(p) <= 256):
raise ValueError("a palette must have between 1 and 256 entries")
seen_triple = False
for i,t in enumerate(p):
if len(t) not in (3,4):
raise ValueError(
"palette entry %d: entries must be 3- or 4-tuples." % i)
if len(t) == 3:
seen_triple = True
if seen_triple and len(t) == 4:
raise ValueError(
"palette entry %d: all 4-tuples must precede all 3-tuples" % i)
for x in t:
if int(x) != x or not(0 <= x <= 255):
raise ValueError(
"palette entry %d: values must be integer: 0 <= x <= 255" % i)
return p
class Error(Exception):
prefix = 'Error'
def __str__(self):
return self.prefix + ': ' + ' '.join(self.args)
class FormatError(Error):
"""Problem with input file format. In other words, PNG file does
not conform to the specification in some way and is invalid.
"""
prefix = 'FormatError'
class ChunkError(FormatError):
prefix = 'ChunkError'
class Writer:
"""
PNG encoder in pure Python.
"""
def __init__(self, width=None, height=None,
size=None,
greyscale=False,
alpha=False,
bitdepth=8,
palette=None,
transparent=None,
background=None,
gamma=None,
compression=None,
interlace=False,
bytes_per_sample=None, # deprecated
planes=None,
colormap=None,
maxval=None,
chunk_limit=2**20):
"""
Create a PNG encoder object.
Arguments:
width, height
Image size in pixels, as two separate arguments.
size
Image size (w,h) in pixels, as single argument.
greyscale
Input data is greyscale, not RGB.
alpha
Input data has alpha channel (RGBA or LA).
bitdepth
Bit depth: from 1 to 16.
palette
Create a palette for a colour mapped image (colour type 3).
transparent
Specify a transparent colour (create a ``tRNS`` chunk).
background
Specify a default background colour (create a ``bKGD`` chunk).
gamma
Specify a gamma value (create a ``gAMA`` chunk).
compression
zlib compression level (1-9).
interlace
Create an interlaced image.
chunk_limit
Write multiple ``IDAT`` chunks to save memory.
The image size (in pixels) can be specified either by using the
`width` and `height` arguments, or with the single `size`
argument. If `size` is used it should be a pair (*width*,
*height*).
`greyscale` and `alpha` are booleans that specify whether
an image is greyscale (or colour), and whether it has an
alpha channel (or not).
`bitdepth` specifies the bit depth of the source pixel values.
Each source pixel value must be an integer between 0 and
``2**bitdepth-1``. For example, 8-bit images have values
between 0 and 255. PNG only stores images with bit depths of
1,2,4,8, or 16. When `bitdepth` is not one of these values,
the next highest valid bit depth is selected, and an ``sBIT``
(significant bits) chunk is generated that specifies the original
precision of the source image. In this case the supplied pixel
values will be rescaled to fit the range of the selected bit depth.
The details of which bit depth / colour model combinations the
PNG file format supports directly, are somewhat arcane
(refer to the PNG specification for full details). Briefly:
"small" bit depths (1,2,4) are only allowed with greyscale and
colour mapped images; colour mapped images cannot have bit depth
16.
For colour mapped images (in other words, when the `palette`
argument is specified) the `bitdepth` argument must match one of
the valid PNG bit depths: 1, 2, 4, or 8. (It is valid to have a
PNG image with a palette and an ``sBIT`` chunk, but the meaning
is slightly different; it would be awkward to press the
`bitdepth` argument into service for this.)
The `palette` option, when specified, causes a colour mapped image
to be created: the PNG colour type is set to 3; greyscale
must not be set; alpha must not be set; transparent must
not be set; the bit depth must be 1,2,4, or 8. When a colour
mapped image is created, the pixel values are palette indexes
and the `bitdepth` argument specifies the size of these indexes
(not the size of the colour values in the palette).
The palette argument value should be a sequence of 3- or
4-tuples. 3-tuples specify RGB palette entries; 4-tuples
specify RGBA palette entries. If both 4-tuples and 3-tuples
appear in the sequence then all the 4-tuples must come
before all the 3-tuples. A ``PLTE`` chunk is created; if there
are 4-tuples then a ``tRNS`` chunk is created as well. The
``PLTE`` chunk will contain all the RGB triples in the same
sequence; the ``tRNS`` chunk will contain the alpha channel for
all the 4-tuples, in the same sequence. Palette entries
are always 8-bit.
If specified, the `transparent` and `background` parameters must
be a tuple with three integer values for red, green, blue, or
a simple integer (or singleton tuple) for a greyscale image.
If specified, the `gamma` parameter must be a positive number
(generally, a float). A ``gAMA`` chunk will be created. Note that
this will not change the values of the pixels as they appear in
the PNG file, they are assumed to have already been converted
appropriately for the gamma specified.
The `compression` argument specifies the compression level
to be used by the ``zlib`` module. Higher values are likely
to compress better, but will be slower to compress. The
default for this argument is ``None``; this does not mean
no compression, rather it means that the default from the
``zlib`` module is used (which is generally acceptable).
If `interlace` is true then an interlaced image is created
(using PNG's so far only interace method, *Adam7*). This does not
affect how the pixels should be presented to the encoder, rather
it changes how they are arranged into the PNG file. On slow
connexions interlaced images can be partially decoded by the
browser to give a rough view of the image that is successively
refined as more image data appears.
.. note ::
Enabling the `interlace` option requires the entire image
to be processed in working memory.
`chunk_limit` is used to limit the amount of memory used whilst
compressing the image. In order to avoid using large amounts of
memory, multiple ``IDAT`` chunks may be created.
"""
# At the moment the `planes` argument is ignored;
# its purpose is to act as a dummy so that
# ``Writer(x, y, **info)`` works, where `info` is a dictionary
# returned by Reader.read and friends.
# Ditto for `colormap`.
# A couple of helper functions come first. Best skipped if you
# are reading through.
def isinteger(x):
try:
return int(x) == x
except:
return False
def check_color(c, which):
"""Checks that a colour argument for transparent or
background options is the right form. Also "corrects" bare
integers to 1-tuples.
"""
if c is None:
return c
if greyscale:
try:
l = len(c)
except TypeError:
c = (c,)
if len(c) != 1:
raise ValueError("%s for greyscale must be 1-tuple" %
which)
if not isinteger(c[0]):
raise ValueError(
"%s colour for greyscale must be integer" %
which)
else:
if not (len(c) == 3 and
isinteger(c[0]) and
isinteger(c[1]) and
isinteger(c[2])):
raise ValueError(
"%s colour must be a triple of integers" %
which)
return c
if size:
if len(size) != 2:
raise ValueError(
"size argument should be a pair (width, height)")
if width is not None and width != size[0]:
raise ValueError(
"size[0] (%r) and width (%r) should match when both are used."
% (size[0], width))
if height is not None and height != size[1]:
raise ValueError(
"size[1] (%r) and height (%r) should match when both are used."
% (size[1], height))
width,height = size
del size
if width <= 0 or height <= 0:
raise ValueError("width and height must be greater than zero")
if not isinteger(width) or not isinteger(height):
raise ValueError("width and height must be integers")
# http://www.w3.org/TR/PNG/#7Integers-and-byte-order
if width > 2**32-1 or height > 2**32-1:
raise ValueError("width and height cannot exceed 2**32-1")
if alpha and transparent is not None:
raise ValueError(
"transparent colour not allowed with alpha channel")
if bytes_per_sample is not None:
warnings.warn('please use bitdepth instead of bytes_per_sample',
DeprecationWarning)
if bytes_per_sample not in (0.125, 0.25, 0.5, 1, 2):
raise ValueError(
"bytes per sample must be .125, .25, .5, 1, or 2")
bitdepth = int(8*bytes_per_sample)
del bytes_per_sample
if not isinteger(bitdepth) or bitdepth < 1 or 16 < bitdepth:
raise ValueError("bitdepth (%r) must be a postive integer <= 16" %
bitdepth)
self.rescale = None
if palette:
if bitdepth not in (1,2,4,8):
raise ValueError("with palette, bitdepth must be 1, 2, 4, or 8")
if transparent is not None:
raise ValueError("transparent and palette not compatible")
if alpha:
raise ValueError("alpha and palette not compatible")
if greyscale:
raise ValueError("greyscale and palette not compatible")
else:
# No palette, check for sBIT chunk generation.
if alpha or not greyscale:
if bitdepth not in (8,16):
targetbitdepth = (8,16)[bitdepth > 8]
self.rescale = (bitdepth, targetbitdepth)
bitdepth = targetbitdepth
del targetbitdepth
else:
assert greyscale
assert not alpha
if bitdepth not in (1,2,4,8,16):
if bitdepth > 8:
targetbitdepth = 16
elif bitdepth == 3:
targetbitdepth = 4
else:
assert bitdepth in (5,6,7)
targetbitdepth = 8
self.rescale = (bitdepth, targetbitdepth)
bitdepth = targetbitdepth
del targetbitdepth
if bitdepth < 8 and (alpha or not greyscale and not palette):
raise ValueError(
"bitdepth < 8 only permitted with greyscale or palette")
if bitdepth > 8 and palette:
raise ValueError(
"bit depth must be 8 or less for images with palette")
transparent = check_color(transparent, 'transparent')
background = check_color(background, 'background')
# It's important that the true boolean values (greyscale, alpha,
# colormap, interlace) are converted to bool because Iverson's
# convention is relied upon later on.
self.width = width
self.height = height
self.transparent = transparent
self.background = background
self.gamma = gamma
self.greyscale = bool(greyscale)
self.alpha = bool(alpha)
self.colormap = bool(palette)
self.bitdepth = int(bitdepth)
self.compression = compression
self.chunk_limit = chunk_limit
self.interlace = bool(interlace)
self.palette = check_palette(palette)
self.color_type = 4*self.alpha + 2*(not greyscale) + 1*self.colormap
assert self.color_type in (0,2,3,4,6)
self.color_planes = (3,1)[self.greyscale or self.colormap]
self.planes = self.color_planes + self.alpha
# :todo: fix for bitdepth < 8
self.psize = (self.bitdepth/8) * self.planes
def make_palette(self):
"""Create the byte sequences for a ``PLTE`` and if necessary a
``tRNS`` chunk. Returned as a pair (*p*, *t*). *t* will be
``None`` if no ``tRNS`` chunk is necessary.
"""
p = array('B')
t = array('B')
for x in self.palette:
p.extend(x[0:3])
if len(x) > 3:
t.append(x[3])
p = tostring(p)
t = tostring(t)
if t:
return p,t
return p,None
def write(self, outfile, rows):
"""Write a PNG image to the output file. `rows` should be
an iterable that yields each row in boxed row flat pixel format.
The rows should be the rows of the original image, so there
should be ``self.height`` rows of ``self.width * self.planes`` values.
If `interlace` is specified (when creating the instance), then
an interlaced PNG file will be written. Supply the rows in the
normal image order; the interlacing is carried out internally.
.. note ::
Interlacing will require the entire image to be in working memory.
"""
if self.interlace:
fmt = 'BH'[self.bitdepth > 8]
a = array(fmt, itertools.chain(*rows))
return self.write_array(outfile, a)
else:
nrows = self.write_passes(outfile, rows)
if nrows != self.height:
raise ValueError(
"rows supplied (%d) does not match height (%d)" %
(nrows, self.height))
def write_passes(self, outfile, rows, packed=False):
"""
Write a PNG image to the output file.
Most users are expected to find the :meth:`write` or
:meth:`write_array` method more convenient.
The rows should be given to this method in the order that
they appear in the output file. For straightlaced images,
this is the usual top to bottom ordering, but for interlaced
images the rows should have already been interlaced before
passing them to this function.
`rows` should be an iterable that yields each row. When
`packed` is ``False`` the rows should be in boxed row flat pixel
format; when `packed` is ``True`` each row should be a packed
sequence of bytes.
"""
# http://www.w3.org/TR/PNG/#5PNG-file-signature
outfile.write(_signature)
# http://www.w3.org/TR/PNG/#11IHDR
write_chunk(outfile, 'IHDR',
struct.pack("!2I5B", self.width, self.height,
self.bitdepth, self.color_type,
0, 0, self.interlace))
# See :chunk:order
# http://www.w3.org/TR/PNG/#11gAMA
if self.gamma is not None:
write_chunk(outfile, 'gAMA',
struct.pack("!L", int(round(self.gamma*1e5))))
# See :chunk:order
# http://www.w3.org/TR/PNG/#11sBIT
if self.rescale:
write_chunk(outfile, 'sBIT',
struct.pack('%dB' % self.planes,
*[self.rescale[0]]*self.planes))
# :chunk:order: Without a palette (PLTE chunk), ordering is
# relatively relaxed. With one, gAMA chunk must precede PLTE
# chunk which must precede tRNS and bKGD.
# See http://www.w3.org/TR/PNG/#5ChunkOrdering
if self.palette:
p,t = self.make_palette()
write_chunk(outfile, 'PLTE', p)
if t:
# tRNS chunk is optional. Only needed if palette entries
# have alpha.
write_chunk(outfile, 'tRNS', t)
# http://www.w3.org/TR/PNG/#11tRNS
if self.transparent is not None:
if self.greyscale:
write_chunk(outfile, 'tRNS',
struct.pack("!1H", *self.transparent))
else:
write_chunk(outfile, 'tRNS',
struct.pack("!3H", *self.transparent))
# http://www.w3.org/TR/PNG/#11bKGD
if self.background is not None:
if self.greyscale:
write_chunk(outfile, 'bKGD',
struct.pack("!1H", *self.background))
else:
write_chunk(outfile, 'bKGD',
struct.pack("!3H", *self.background))
# http://www.w3.org/TR/PNG/#11IDAT
if self.compression is not None:
compressor = zlib.compressobj(self.compression)
else:
compressor = zlib.compressobj()
# Choose an extend function based on the bitdepth. The extend
# function packs/decomposes the pixel values into bytes and
# stuffs them onto the data array.
data = array('B')
if self.bitdepth == 8 or packed:
extend = data.extend
elif self.bitdepth == 16:
# Decompose into bytes
def extend(sl):
fmt = '!%dH' % len(sl)
data.extend(array('B', struct.pack(fmt, *sl)))
else:
# Pack into bytes
assert self.bitdepth < 8
# samples per byte
spb = int(8/self.bitdepth)
def extend(sl):
a = array('B', sl)
# Adding padding bytes so we can group into a whole
# number of spb-tuples.
l = float(len(a))
extra = math.ceil(l / float(spb))*spb - l
a.extend([0]*int(extra))
# Pack into bytes
l = group(a, spb)
l = map(lambda e: reduce(lambda x,y:
(x << self.bitdepth) + y, e), l)
data.extend(l)
if self.rescale:
oldextend = extend
factor = \
float(2**self.rescale[1]-1) / float(2**self.rescale[0]-1)
def extend(sl):
oldextend(map(lambda x: int(round(factor*x)), sl))
# Build the first row, testing mostly to see if we need to
# changed the extend function to cope with NumPy integer types
# (they cause our ordinary definition of extend to fail, so we
# wrap it). See
# http://code.google.com/p/pypng/issues/detail?id=44
enumrows = enumerate(rows)
del rows
# First row's filter type.
data.append(0)
# :todo: Certain exceptions in the call to ``.next()`` or the
# following try would indicate no row data supplied.
# Should catch.
i,row = next_(enumrows)
try:
# If this fails...
extend(row)
except:
# ... try a version that converts the values to int first.
# Not only does this work for the (slightly broken) NumPy
# types, there are probably lots of other, unknown, "nearly"
# int types it works for.
def wrapmapint(f):
return lambda sl: f(map(int, sl))
extend = wrapmapint(extend)
del wrapmapint
extend(row)
for i,row in enumrows:
# Add "None" filter type. Currently, it's essential that
# this filter type be used for every scanline as we do not
# mark the first row of a reduced pass image; that means we
# could accidentally compute the wrong filtered scanline if
# we used "up", "average", or "paeth" on such a line.
data.append(0)
extend(row)
if len(data) > self.chunk_limit:
compressed = compressor.compress(tostring(data))
if len(compressed):
# print >> sys.stderr, len(data), len(compressed)
write_chunk(outfile, 'IDAT', compressed)
# Because of our very witty definition of ``extend``,
# above, we must re-use the same ``data`` object. Hence
# we use ``del`` to empty this one, rather than create a
# fresh one (which would be my natural FP instinct).
del data[:]
if len(data):
compressed = compressor.compress(tostring(data))
else:
compressed = ''
flushed = compressor.flush()
if len(compressed) or len(flushed):
# print >> sys.stderr, len(data), len(compressed), len(flushed)
write_chunk(outfile, 'IDAT', compressed + flushed)
# http://www.w3.org/TR/PNG/#11IEND
write_chunk(outfile, 'IEND')
return i+1
def write_array(self, outfile, pixels):
"""
Write an array in flat row flat pixel format as a PNG file on
the output file. See also :meth:`write` method.
"""
if self.interlace:
self.write_passes(outfile, self.array_scanlines_interlace(pixels))
else:
self.write_passes(outfile, self.array_scanlines(pixels))
def write_packed(self, outfile, rows):
"""
Write PNG file to `outfile`. The pixel data comes from `rows`
which should be in boxed row packed format. Each row should be
a sequence of packed bytes.
Technically, this method does work for interlaced images but it
is best avoided. For interlaced images, the rows should be
presented in the order that they appear in the file.
This method should not be used when the source image bit depth
is not one naturally supported by PNG; the bit depth should be
1, 2, 4, 8, or 16.
"""
if self.rescale:
raise Error("write_packed method not suitable for bit depth %d" %
self.rescale[0])
return self.write_passes(outfile, rows, packed=True)
def convert_pnm(self, infile, outfile):
"""
Convert a PNM file containing raw pixel data into a PNG file
with the parameters set in the writer object. Works for
(binary) PGM, PPM, and PAM formats.
"""
if self.interlace:
pixels = array('B')
pixels.fromfile(infile,
(self.bitdepth/8) * self.color_planes *
self.width * self.height)
self.write_passes(outfile, self.array_scanlines_interlace(pixels))
else:
self.write_passes(outfile, self.file_scanlines(infile))
def convert_ppm_and_pgm(self, ppmfile, pgmfile, outfile):
"""
Convert a PPM and PGM file containing raw pixel data into a
PNG outfile with the parameters set in the writer object.
"""
pixels = array('B')
pixels.fromfile(ppmfile,
(self.bitdepth/8) * self.color_planes *
self.width * self.height)
apixels = array('B')
apixels.fromfile(pgmfile,
(self.bitdepth/8) *
self.width * self.height)
pixels = interleave_planes(pixels, apixels,
(self.bitdepth/8) * self.color_planes,
(self.bitdepth/8))
if self.interlace:
self.write_passes(outfile, self.array_scanlines_interlace(pixels))
else:
self.write_passes(outfile, self.array_scanlines(pixels))
def file_scanlines(self, infile):
"""
Generates boxed rows in flat pixel format, from the input file
`infile`. It assumes that the input file is in a "Netpbm-like"
binary format, and is positioned at the beginning of the first
pixel. The number of pixels to read is taken from the image
dimensions (`width`, `height`, `planes`) and the number of bytes
per value is implied by the image `bitdepth`.
"""
# Values per row
vpr = self.width * self.planes
row_bytes = vpr
if self.bitdepth > 8:
assert self.bitdepth == 16
row_bytes *= 2
fmt = '>%dH' % vpr
def line():
return array('H', struct.unpack(fmt, infile.read(row_bytes)))
else:
def line():
scanline = array('B', infile.read(row_bytes))
return scanline
for y in range(self.height):
yield line()
def array_scanlines(self, pixels):
"""
Generates boxed rows (flat pixels) from flat rows (flat pixels)
in an array.
"""
# Values per row
vpr = self.width * self.planes
stop = 0
for y in range(self.height):
start = stop
stop = start + vpr
yield pixels[start:stop]
def array_scanlines_interlace(self, pixels):
"""
Generator for interlaced scanlines from an array. `pixels` is
the full source image in flat row flat pixel format. The
generator yields each scanline of the reduced passes in turn, in
boxed row flat pixel format.
"""
# http://www.w3.org/TR/PNG/#8InterlaceMethods
# Array type.
fmt = 'BH'[self.bitdepth > 8]
# Value per row
vpr = self.width * self.planes
for xstart, ystart, xstep, ystep in _adam7:
if xstart >= self.width:
continue
# Pixels per row (of reduced image)
ppr = int(math.ceil((self.width-xstart)/float(xstep)))
# number of values in reduced image row.
row_len = ppr*self.planes
for y in range(ystart, self.height, ystep):
if xstep == 1:
offset = y * vpr
yield pixels[offset:offset+vpr]
else:
row = array(fmt)
# There's no easier way to set the length of an array
row.extend(pixels[0:row_len])
offset = y * vpr + xstart * self.planes
end_offset = (y+1) * vpr
skip = self.planes * xstep
for i in range(self.planes):
row[i::self.planes] = \
pixels[offset+i:end_offset:skip]
yield row
def write_chunk(outfile, tag, data=strtobytes('')):
"""
Write a PNG chunk to the output file, including length and
checksum.
"""
# http://www.w3.org/TR/PNG/#5Chunk-layout
outfile.write(struct.pack("!I", len(data)))
tag = strtobytes(tag)
outfile.write(tag)
outfile.write(data)
checksum = zlib.crc32(tag)
checksum = zlib.crc32(data, checksum)
checksum &= 2**32-1
outfile.write(struct.pack("!I", checksum))
def write_chunks(out, chunks):
"""Create a PNG file by writing out the chunks."""
out.write(_signature)
for chunk in chunks:
write_chunk(out, *chunk)
def filter_scanline(type, line, fo, prev=None):
"""Apply a scanline filter to a scanline. `type` specifies the
filter type (0 to 4); `line` specifies the current (unfiltered)
scanline as a sequence of bytes; `prev` specifies the previous
(unfiltered) scanline as a sequence of bytes. `fo` specifies the
filter offset; normally this is size of a pixel in bytes (the number
of bytes per sample times the number of channels), but when this is
< 1 (for bit depths < 8) then the filter offset is 1.
"""
assert 0 <= type < 5
# The output array. Which, pathetically, we extend one-byte at a
# time (fortunately this is linear).
out = array('B', [type])
def sub():
ai = -fo
for x in line:
if ai >= 0:
x = (x - line[ai]) & 0xff
out.append(x)
ai += 1
def up():
for i,x in enumerate(line):
x = (x - prev[i]) & 0xff
out.append(x)
def average():
ai = -fo
for i,x in enumerate(line):
if ai >= 0:
x = (x - ((line[ai] + prev[i]) >> 1)) & 0xff
else:
x = (x - (prev[i] >> 1)) & 0xff
out.append(x)
ai += 1
def paeth():
# http://www.w3.org/TR/PNG/#9Filter-type-4-Paeth
ai = -fo # also used for ci
for i,x in enumerate(line):
a = 0
b = prev[i]
c = 0
if ai >= 0:
a = line[ai]
c = prev[ai]
p = a + b - c
pa = abs(p - a)
pb = abs(p - b)
pc = abs(p - c)
if pa <= pb and pa <= pc: Pr = a
elif pb <= pc: Pr = b
else: Pr = c
x = (x - Pr) & 0xff
out.append(x)
ai += 1
if not prev:
# We're on the first line. Some of the filters can be reduced
# to simpler cases which makes handling the line "off the top"
# of the image simpler. "up" becomes "none"; "paeth" becomes
# "left" (non-trivial, but true). "average" needs to be handled
# specially.
if type == 2: # "up"
return line # type = 0
elif type == 3:
prev = [0]*len(line)
elif type == 4: # "paeth"
type = 1
if type == 0:
out.extend(line)
elif type == 1:
sub()
elif type == 2:
up()
elif type == 3:
average()
else: # type == 4
paeth()
return out
def from_array(a, mode=None, info={}):
"""Create a PNG :class:`Image` object from a 2- or 3-dimensional array.
One application of this function is easy PIL-style saving:
``png.from_array(pixels, 'L').save('foo.png')``.
.. note :
The use of the term *3-dimensional* is for marketing purposes
only. It doesn't actually work. Please bear with us. Meanwhile
enjoy the complimentary snacks (on request) and please use a
2-dimensional array.
Unless they are specified using the *info* parameter, the PNG's
height and width are taken from the array size. For a 3 dimensional
array the first axis is the height; the second axis is the width;
and the third axis is the channel number. Thus an RGB image that is
16 pixels high and 8 wide will use an array that is 16x8x3. For 2
dimensional arrays the first axis is the height, but the second axis
is ``width*channels``, so an RGB image that is 16 pixels high and 8
wide will use a 2-dimensional array that is 16x24 (each row will be
8*3==24 sample values).
*mode* is a string that specifies the image colour format in a
PIL-style mode. It can be:
``'L'``
greyscale (1 channel)
``'LA'``
greyscale with alpha (2 channel)
``'RGB'``
colour image (3 channel)
``'RGBA'``
colour image with alpha (4 channel)
The mode string can also specify the bit depth (overriding how this
function normally derives the bit depth, see below). Appending
``';16'`` to the mode will cause the PNG to be 16 bits per channel;
any decimal from 1 to 16 can be used to specify the bit depth.
When a 2-dimensional array is used *mode* determines how many
channels the image has, and so allows the width to be derived from
the second array dimension.
The array is expected to be a ``numpy`` array, but it can be any
suitable Python sequence. For example, a list of lists can be used:
``png.from_array([[0, 255, 0], [255, 0, 255]], 'L')``. The exact
rules are: ``len(a)`` gives the first dimension, height;
``len(a[0])`` gives the second dimension; ``len(a[0][0])`` gives the
third dimension, unless an exception is raised in which case a
2-dimensional array is assumed. It's slightly more complicated than
that because an iterator of rows can be used, and it all still
works. Using an iterator allows data to be streamed efficiently.
The bit depth of the PNG is normally taken from the array element's
datatype (but if *mode* specifies a bitdepth then that is used
instead). The array element's datatype is determined in a way which
is supposed to work both for ``numpy`` arrays and for Python
``array.array`` objects. A 1 byte datatype will give a bit depth of
8, a 2 byte datatype will give a bit depth of 16. If the datatype
does not have an implicit size, for example it is a plain Python
list of lists, as above, then a default of 8 is used.
The *info* parameter is a dictionary that can be used to specify
metadata (in the same style as the arguments to the
:class:``png.Writer`` class). For this function the keys that are
useful are:
height
overrides the height derived from the array dimensions and allows
*a* to be an iterable.
width
overrides the width derived from the array dimensions.
bitdepth
overrides the bit depth derived from the element datatype (but
must match *mode* if that also specifies a bit depth).
Generally anything specified in the
*info* dictionary will override any implicit choices that this
function would otherwise make, but must match any explicit ones.
For example, if the *info* dictionary has a ``greyscale`` key then
this must be true when mode is ``'L'`` or ``'LA'`` and false when
mode is ``'RGB'`` or ``'RGBA'``.
"""
# We abuse the *info* parameter by modifying it. Take a copy here.
# (Also typechecks *info* to some extent).
info = dict(info)
# Syntax check mode string.
bitdepth = None
try:
mode = mode.split(';')
if len(mode) not in (1,2):
raise Error()
if mode[0] not in ('L', 'LA', 'RGB', 'RGBA'):
raise Error()
if len(mode) == 2:
try:
bitdepth = int(mode[1])
except:
raise Error()
except Error:
raise Error("mode string should be 'RGB' or 'L;16' or similar.")
mode = mode[0]
# Get bitdepth from *mode* if possible.
if bitdepth:
if info.get('bitdepth') and bitdepth != info['bitdepth']:
raise Error("mode bitdepth (%d) should match info bitdepth (%d)." %
(bitdepth, info['bitdepth']))
info['bitdepth'] = bitdepth
# Fill in and/or check entries in *info*.
# Dimensions.
if 'size' in info:
# Check width, height, size all match where used.
for dimension,axis in [('width', 0), ('height', 1)]:
if dimension in info:
if info[dimension] != info['size'][axis]:
raise Error(
"info[%r] shhould match info['size'][%r]." %
(dimension, axis))
info['width'],info['height'] = info['size']
if 'height' not in info:
try:
l = len(a)
except:
raise Error(
"len(a) does not work, supply info['height'] instead.")
info['height'] = l
# Colour format.
if 'greyscale' in info:
if bool(info['greyscale']) != ('L' in mode):
raise Error("info['greyscale'] should match mode.")
info['greyscale'] = 'L' in mode
if 'alpha' in info:
if bool(info['alpha']) != ('A' in mode):
raise Error("info['alpha'] should match mode.")
info['alpha'] = 'A' in mode
planes = len(mode)
if 'planes' in info:
if info['planes'] != planes:
raise Error("info['planes'] should match mode.")
# In order to work out whether we the array is 2D or 3D we need its
# first row, which requires that we take a copy of its iterator.
# We may also need the first row to derive width and bitdepth.
a,t = itertools.tee(a)
row = next_(t)
del t
try:
row[0][0]
threed = True
testelement = row[0]
except:
threed = False
testelement = row
if 'width' not in info:
if threed:
width = len(row)
else:
width = len(row) // planes
info['width'] = width
# Not implemented yet
assert not threed
if 'bitdepth' not in info:
try:
dtype = testelement.dtype
# goto the "else:" clause. Sorry.
except:
try:
# Try a Python array.array.
bitdepth = 8 * testelement.itemsize
except:
# We can't determine it from the array element's
# datatype, use a default of 8.
bitdepth = 8
else:
# If we got here without exception, we now assume that
# the array is a numpy array.
if dtype.kind == 'b':
bitdepth = 1
else:
bitdepth = 8 * dtype.itemsize
info['bitdepth'] = bitdepth
for thing in 'width height bitdepth greyscale alpha'.split():
assert thing in info
return Image(a, info)
# So that refugee's from PIL feel more at home. Not documented.
fromarray = from_array
class Image:
"""A PNG image.
You can create an :class:`Image` object from an array of pixels by calling
:meth:`png.from_array`. It can be saved to disk with the
:meth:`save` method."""
def __init__(self, rows, info):
"""
.. note ::
The constructor is not public. Please do not call it.
"""
self.rows = rows
self.info = info
def save(self, file):
"""Save the image to *file*. If *file* looks like an open file
descriptor then it is used, otherwise it is treated as a
filename and a fresh file is opened.
In general, you can only call this method once; after it has
been called the first time and the PNG image has been saved, the
source data will have been streamed, and cannot be streamed
again.
"""
w = Writer(**self.info)
try:
file.write
def close(): pass
except:
file = open(file, 'wb')
def close(): file.close()
try:
w.write(file, self.rows)
finally:
close()
class _readable:
"""
A simple file-like interface for strings and arrays.
"""
def __init__(self, buf):
self.buf = buf
self.offset = 0
def read(self, n):
r = self.buf[self.offset:self.offset+n]
if isarray(r):
r = r.tostring()
self.offset += n
return r
class Reader:
"""
PNG decoder in pure Python.
"""
def __init__(self, _guess=None, **kw):
"""
Create a PNG decoder object.
The constructor expects exactly one keyword argument. If you
supply a positional argument instead, it will guess the input
type. You can choose among the following keyword arguments:
filename
Name of input file (a PNG file).
file
A file-like object (object with a read() method).
bytes
``array`` or ``string`` with PNG data.
"""
if ((_guess is not None and len(kw) != 0) or
(_guess is None and len(kw) != 1)):
raise TypeError("Reader() takes exactly 1 argument")
# Will be the first 8 bytes, later on. See validate_signature.
self.signature = None
self.transparent = None
# A pair of (len,type) if a chunk has been read but its data and
# checksum have not (in other words the file position is just
# past the 4 bytes that specify the chunk type). See preamble
# method for how this is used.
self.atchunk = None
if _guess is not None:
if isarray(_guess):
kw["bytes"] = _guess
elif isinstance(_guess, str):
kw["filename"] = _guess
elif isinstance(_guess, file):
kw["file"] = _guess
if "filename" in kw:
self.file = open(kw["filename"], "rb")
elif "file" in kw:
self.file = kw["file"]
elif "bytes" in kw:
self.file = _readable(kw["bytes"])
else:
raise TypeError("expecting filename, file or bytes array")
def chunk(self, seek=None):
"""
Read the next PNG chunk from the input file; returns a
(*type*,*data*) tuple. *type* is the chunk's type as a string
(all PNG chunk types are 4 characters long). *data* is the
chunk's data content, as a string.
If the optional `seek` argument is
specified then it will keep reading chunks until it either runs
out of file or finds the type specified by the argument. Note
that in general the order of chunks in PNGs is unspecified, so
using `seek` can cause you to miss chunks.
"""
self.validate_signature()
while True:
# http://www.w3.org/TR/PNG/#5Chunk-layout
if not self.atchunk:
self.atchunk = self.chunklentype()
length,type = self.atchunk
self.atchunk = None
data = self.file.read(length)
if len(data) != length:
raise ChunkError('Chunk %s too short for required %i octets.'
% (type, length))
checksum = self.file.read(4)
if len(checksum) != 4:
raise ValueError('Chunk %s too short for checksum.', tag)
if seek and type != seek:
continue
verify = zlib.crc32(strtobytes(type))
verify = zlib.crc32(data, verify)
# Whether the output from zlib.crc32 is signed or not varies
# according to hideous implementation details, see
# http://bugs.python.org/issue1202 .
# We coerce it to be positive here (in a way which works on
# Python 2.3 and older).
verify &= 2**32 - 1
verify = struct.pack('!I', verify)
if checksum != verify:
# print repr(checksum)
(a, ) = struct.unpack('!I', checksum)
(b, ) = struct.unpack('!I', verify)
raise ChunkError(
"Checksum error in %s chunk: 0x%08X != 0x%08X." %
(type, a, b))
return type, data
def chunks(self):
"""Return an iterator that will yield each chunk as a
(*chunktype*, *content*) pair.
"""
while True:
t,v = self.chunk()
yield t,v
if t == 'IEND':
break
def undo_filter(self, filter_type, scanline, previous):
"""Undo the filter for a scanline. `scanline` is a sequence of
bytes that does not include the initial filter type byte.
`previous` is decoded previous scanline (for straightlaced
images this is the previous pixel row, but for interlaced
images, it is the previous scanline in the reduced image, which
in general is not the previous pixel row in the final image).
When there is no previous scanline (the first row of a
straightlaced image, or the first row in one of the passes in an
interlaced image), then this argument should be ``None``.
The scanline will have the effects of filtering removed, and the
result will be returned as a fresh sequence of bytes.
"""
# :todo: Would it be better to update scanline in place?
# Create the result byte array. It seems that the best way to
# create the array to be the right size is to copy from an
# existing sequence. *sigh*
# If we fill the result with scanline, then this allows a
# micro-optimisation in the "null" and "sub" cases.
result = array('B', scanline)
if filter_type == 0:
# And here, we _rely_ on filling the result with scanline,
# above.
return result
if filter_type not in (1,2,3,4):
raise FormatError('Invalid PNG Filter Type.'
' See http://www.w3.org/TR/2003/REC-PNG-20031110/#9Filters .')
# Filter unit. The stride from one pixel to the corresponding
# byte from the previous previous. Normally this is the pixel
# size in bytes, but when this is smaller than 1, the previous
# byte is used instead.
fu = max(1, self.psize)
# For the first line of a pass, synthesize a dummy previous
# line. An alternative approach would be to observe that on the
# first line 'up' is the same as 'null', 'paeth' is the same
# as 'sub', with only 'average' requiring any special case.
if not previous:
previous = array('B', [0]*len(scanline))
def sub():
"""Undo sub filter."""
ai = 0
# Loops starts at index fu. Observe that the initial part
# of the result is already filled in correctly with
# scanline.
for i in range(fu, len(result)):
x = scanline[i]
a = result[ai]
result[i] = (x + a) & 0xff
ai += 1
def up():
"""Undo up filter."""
for i in range(len(result)):
x = scanline[i]
b = previous[i]
result[i] = (x + b) & 0xff
def average():
"""Undo average filter."""
ai = -fu
for i in range(len(result)):
x = scanline[i]
if ai < 0:
a = 0
else:
a = result[ai]
b = previous[i]
result[i] = (x + ((a + b) >> 1)) & 0xff
ai += 1
def paeth():
"""Undo Paeth filter."""
# Also used for ci.
ai = -fu
for i in range(len(result)):
x = scanline[i]
if ai < 0:
a = c = 0
else:
a = result[ai]
c = previous[ai]
b = previous[i]
p = a + b - c
pa = abs(p - a)
pb = abs(p - b)
pc = abs(p - c)
if pa <= pb and pa <= pc:
pr = a
elif pb <= pc:
pr = b
else:
pr = c
result[i] = (x + pr) & 0xff
ai += 1
# Call appropriate filter algorithm. Note that 0 has already
# been dealt with.
(None, sub, up, average, paeth)[filter_type]()
return result
def deinterlace(self, raw):
"""
Read raw pixel data, undo filters, deinterlace, and flatten.
Return in flat row flat pixel format.
"""
# print >> sys.stderr, ("Reading interlaced, w=%s, r=%s, planes=%s," +
# " bpp=%s") % (self.width, self.height, self.planes, self.bps)
# Values per row (of the target image)
vpr = self.width * self.planes
# Make a result array, and make it big enough. Interleaving
# writes to the output array randomly (well, not quite), so the
# entire output array must be in memory.
fmt = 'BH'[self.bitdepth > 8]
a = array(fmt, [0]*vpr*self.height)
source_offset = 0
for xstart, ystart, xstep, ystep in _adam7:
# print >> sys.stderr, "Adam7: start=%s,%s step=%s,%s" % (
# xstart, ystart, xstep, ystep)
if xstart >= self.width:
continue
# The previous (reconstructed) scanline. None at the
# beginning of a pass to indicate that there is no previous
# line.
recon = None
# Pixels per row (reduced pass image)
ppr = int(math.ceil((self.width-xstart)/float(xstep)))
# Row size in bytes for this pass.
row_size = int(math.ceil(self.psize * ppr))
for y in range(ystart, self.height, ystep):
filter_type = raw[source_offset]
source_offset += 1
scanline = raw[source_offset:source_offset+row_size]
source_offset += row_size
recon = self.undo_filter(filter_type, scanline, recon)
# Convert so that there is one element per pixel value
flat = self.serialtoflat(recon, ppr)
if xstep == 1:
assert xstart == 0
offset = y * vpr
a[offset:offset+vpr] = flat
else:
offset = y * vpr + xstart * self.planes
end_offset = (y+1) * vpr
skip = self.planes * xstep
for i in range(self.planes):
a[offset+i:end_offset:skip] = \
flat[i::self.planes]
return a
def iterboxed(self, rows):
"""Iterator that yields each scanline in boxed row flat pixel
format. `rows` should be an iterator that yields the bytes of
each row in turn.
"""
def asvalues(raw):
"""Convert a row of raw bytes into a flat row. Result may
or may not share with argument"""
if self.bitdepth == 8:
return raw
if self.bitdepth == 16:
raw = tostring(raw)
return array('H', struct.unpack('!%dH' % (len(raw)//2), raw))
assert self.bitdepth < 8
width = self.width
# Samples per byte
spb = 8//self.bitdepth
out = array('B')
mask = 2**self.bitdepth - 1
shifts = map(self.bitdepth.__mul__, reversed(range(spb)))
for o in raw:
out.extend(map(lambda i: mask&(o>>i), shifts))
return out[:width]
return imap_(asvalues, rows)
def serialtoflat(self, bytes, width=None):
"""Convert serial format (byte stream) pixel data to flat row
flat pixel.
"""
if self.bitdepth == 8:
return bytes
if self.bitdepth == 16:
bytes = tostring(bytes)
return array('H',
struct.unpack('!%dH' % (len(bytes)//2), bytes))
assert self.bitdepth < 8
if width is None:
width = self.width
# Samples per byte
spb = 8//self.bitdepth
out = array('B')
mask = 2**self.bitdepth - 1
shifts = map(self.bitdepth.__mul__, reversed(range(spb)))
l = width
for o in bytes:
out.extend([(mask&(o>>s)) for s in shifts][:l])
l -= spb
if l <= 0:
l = width
return out
def iterstraight(self, raw):
"""Iterator that undoes the effect of filtering, and yields each
row in serialised format (as a sequence of bytes). Assumes input
is straightlaced. `raw` should be an iterable that yields the
raw bytes in chunks of arbitrary size."""
# length of row, in bytes
rb = self.row_bytes
a = array('B')
# The previous (reconstructed) scanline. None indicates first
# line of image.
recon = None
for some in raw:
a.extend(some)
while len(a) >= rb + 1:
filter_type = a[0]
scanline = a[1:rb+1]
del a[:rb+1]
recon = self.undo_filter(filter_type, scanline, recon)
yield recon
if len(a) != 0:
# :file:format We get here with a file format error: when the
# available bytes (after decompressing) do not pack into exact
# rows.
raise FormatError(
'Wrong size for decompressed IDAT chunk.')
assert len(a) == 0
def validate_signature(self):
"""If signature (header) has not been read then read and
validate it; otherwise do nothing.
"""
if self.signature:
return
self.signature = self.file.read(8)
if self.signature != _signature:
raise FormatError("PNG file has invalid signature.")
def preamble(self):
"""
Extract the image metadata by reading the initial part of the PNG
file up to the start of the ``IDAT`` chunk. All the chunks that
precede the ``IDAT`` chunk are read and either processed for
metadata or discarded.
"""
self.validate_signature()
while True:
if not self.atchunk:
self.atchunk = self.chunklentype()
if self.atchunk is None:
raise FormatError(
'This PNG file has no IDAT chunks.')
if self.atchunk[1] == 'IDAT':
return
self.process_chunk()
def chunklentype(self):
"""Reads just enough of the input to determine the next
chunk's length and type, returned as a (*length*, *type*) pair
where *type* is a string. If there are no more chunks, ``None``
is returned.
"""
x = self.file.read(8)
if not x:
return None
if len(x) != 8:
raise FormatError(
'End of file whilst reading chunk length and type.')
length,type = struct.unpack('!I4s', x)
type = bytestostr(type)
if length > 2**31-1:
raise FormatError('Chunk %s is too large: %d.' % (type,length))
return length,type
def process_chunk(self):
"""Process the next chunk and its data. This only processes the
following chunk types, all others are ignored: ``IHDR``,
``PLTE``, ``bKGD``, ``tRNS``, ``gAMA``, ``sBIT``.
"""
type, data = self.chunk()
if type == 'IHDR':
# http://www.w3.org/TR/PNG/#11IHDR
if len(data) != 13:
raise FormatError('IHDR chunk has incorrect length.')
(self.width, self.height, self.bitdepth, self.color_type,
self.compression, self.filter,
self.interlace) = struct.unpack("!2I5B", data)
# Check that the header specifies only valid combinations.
if self.bitdepth not in (1,2,4,8,16):
raise Error("invalid bit depth %d" % self.bitdepth)
if self.color_type not in (0,2,3,4,6):
raise Error("invalid colour type %d" % self.color_type)
# Check indexed (palettized) images have 8 or fewer bits
# per pixel; check only indexed or greyscale images have
# fewer than 8 bits per pixel.
if ((self.color_type & 1 and self.bitdepth > 8) or
(self.bitdepth < 8 and self.color_type not in (0,3))):
raise FormatError("Illegal combination of bit depth (%d)"
" and colour type (%d)."
" See http://www.w3.org/TR/2003/REC-PNG-20031110/#table111 ."
% (self.bitdepth, self.color_type))
if self.compression != 0:
raise Error("unknown compression method %d" % self.compression)
if self.filter != 0:
raise FormatError("Unknown filter method %d,"
" see http://www.w3.org/TR/2003/REC-PNG-20031110/#9Filters ."
% self.filter)
if self.interlace not in (0,1):
raise FormatError("Unknown interlace method %d,"
" see http://www.w3.org/TR/2003/REC-PNG-20031110/#8InterlaceMethods ."
% self.interlace)
# Derived values
# http://www.w3.org/TR/PNG/#6Colour-values
colormap = bool(self.color_type & 1)
greyscale = not (self.color_type & 2)
alpha = bool(self.color_type & 4)
color_planes = (3,1)[greyscale or colormap]
planes = color_planes + alpha
self.colormap = colormap
self.greyscale = greyscale
self.alpha = alpha
self.color_planes = color_planes
self.planes = planes
self.psize = float(self.bitdepth)/float(8) * planes
if int(self.psize) == self.psize:
self.psize = int(self.psize)
self.row_bytes = int(math.ceil(self.width * self.psize))
# Stores PLTE chunk if present, and is used to check
# chunk ordering constraints.
self.plte = None
# Stores tRNS chunk if present, and is used to check chunk
# ordering constraints.
self.trns = None
# Stores sbit chunk if present.
self.sbit = None
elif type == 'PLTE':
# http://www.w3.org/TR/PNG/#11PLTE
if self.plte:
warnings.warn("Multiple PLTE chunks present.")
self.plte = data
if len(data) % 3 != 0:
raise FormatError(
"PLTE chunk's length should be a multiple of 3.")
if len(data) > (2**self.bitdepth)*3:
raise FormatError("PLTE chunk is too long.")
if len(data) == 0:
raise FormatError("Empty PLTE is not allowed.")
elif type == 'bKGD':
try:
if self.colormap:
if not self.plte:
warnings.warn(
"PLTE chunk is required before bKGD chunk.")
self.background = struct.unpack('B', data)
else:
self.background = struct.unpack("!%dH" % self.color_planes,
data)
except struct.error:
raise FormatError("bKGD chunk has incorrect length.")
elif type == 'tRNS':
# http://www.w3.org/TR/PNG/#11tRNS
self.trns = data
if self.colormap:
if not self.plte:
warnings.warn("PLTE chunk is required before tRNS chunk.")
else:
if len(data) > len(self.plte)/3:
# Was warning, but promoted to Error as it
# would otherwise cause pain later on.
raise FormatError("tRNS chunk is too long.")
else:
if self.alpha:
raise FormatError(
"tRNS chunk is not valid with colour type %d." %
self.color_type)
try:
self.transparent = \
struct.unpack("!%dH" % self.color_planes, data)
except struct.error:
raise FormatError("tRNS chunk has incorrect length.")
elif type == 'gAMA':
try:
self.gamma = struct.unpack("!L", data)[0] / 100000.0
except struct.error:
raise FormatError("gAMA chunk has incorrect length.")
elif type == 'sBIT':
self.sbit = data
if (self.colormap and len(data) != 3 or
not self.colormap and len(data) != self.planes):
raise FormatError("sBIT chunk has incorrect length.")
def read(self):
"""
Read the PNG file and decode it. Returns (`width`, `height`,
`pixels`, `metadata`).
May use excessive memory.
`pixels` are returned in boxed row flat pixel format.
"""
def iteridat():
"""Iterator that yields all the ``IDAT`` chunks as strings."""
while True:
try:
type, data = self.chunk()
except ValueError:
e = geterror()
raise ChunkError(e.args[0])
if type == 'IEND':
# http://www.w3.org/TR/PNG/#11IEND
break
if type != 'IDAT':
continue
# type == 'IDAT'
# http://www.w3.org/TR/PNG/#11IDAT
if self.colormap and not self.plte:
warnings.warn("PLTE chunk is required before IDAT chunk")
yield data
def iterdecomp(idat):
"""Iterator that yields decompressed strings. `idat` should
be an iterator that yields the ``IDAT`` chunk data.
"""
# Currently, with no max_length paramter to decompress, this
# routine will do one yield per IDAT chunk. So not very
# incremental.
d = zlib.decompressobj()
# Each IDAT chunk is passed to the decompressor, then any
# remaining state is decompressed out.
for data in idat:
# :todo: add a max_length argument here to limit output
# size.
yield array('B', d.decompress(data))
yield array('B', d.flush())
self.preamble()
raw = iterdecomp(iteridat())
if self.interlace:
raw = array('B', itertools.chain(*raw))
arraycode = 'BH'[self.bitdepth>8]
# Like :meth:`group` but producing an array.array object for
# each row.
pixels = imap_(lambda *row: array(arraycode, row),
*[iter(self.deinterlace(raw))]*self.width*self.planes)
else:
pixels = self.iterboxed(self.iterstraight(raw))
meta = dict()
for attr in 'greyscale alpha planes bitdepth interlace'.split():
meta[attr] = getattr(self, attr)
meta['size'] = (self.width, self.height)
for attr in 'gamma transparent background'.split():
a = getattr(self, attr, None)
if a is not None:
meta[attr] = a
return self.width, self.height, pixels, meta
def read_flat(self):
"""
Read a PNG file and decode it into flat row flat pixel format.
Returns (*width*, *height*, *pixels*, *metadata*).
May use excessive memory.
`pixels` are returned in flat row flat pixel format.
See also the :meth:`read` method which returns pixels in the
more stream-friendly boxed row flat pixel format.
"""
x, y, pixel, meta = self.read()
arraycode = 'BH'[meta['bitdepth']>8]
pixel = array(arraycode, itertools.chain(*pixel))
return x, y, pixel, meta
def palette(self, alpha='natural'):
"""Returns a palette that is a sequence of 3-tuples or 4-tuples,
synthesizing it from the ``PLTE`` and ``tRNS`` chunks. These
chunks should have already been processed (for example, by
calling the :meth:`preamble` method). All the tuples are the
same size: 3-tuples if there is no ``tRNS`` chunk, 4-tuples when
there is a ``tRNS`` chunk. Assumes that the image is colour type
3 and therefore a ``PLTE`` chunk is required.
If the `alpha` argument is ``'force'`` then an alpha channel is
always added, forcing the result to be a sequence of 4-tuples.
"""
if not self.plte:
raise FormatError(
"Required PLTE chunk is missing in colour type 3 image.")
plte = group(array('B', self.plte), 3)
if self.trns or alpha == 'force':
trns = array('B', self.trns or '')
trns.extend([255]*(len(plte)-len(trns)))
plte = map(operator.add, plte, group(trns, 1))
return plte
def asDirect(self):
"""Returns the image data as a direct representation of an
``x * y * planes`` array. This method is intended to remove the
need for callers to deal with palettes and transparency
themselves. Images with a palette (colour type 3)
are converted to RGB or RGBA; images with transparency (a
``tRNS`` chunk) are converted to LA or RGBA as appropriate.
When returned in this format the pixel values represent the
colour value directly without needing to refer to palettes or
transparency information.
Like the :meth:`read` method this method returns a 4-tuple:
(*width*, *height*, *pixels*, *meta*)
This method normally returns pixel values with the bit depth
they have in the source image, but when the source PNG has an
``sBIT`` chunk it is inspected and can reduce the bit depth of
the result pixels; pixel values will be reduced according to
the bit depth specified in the ``sBIT`` chunk (PNG nerds should
note a single result bit depth is used for all channels; the
maximum of the ones specified in the ``sBIT`` chunk. An RGB565
image will be rescaled to 6-bit RGB666).
The *meta* dictionary that is returned reflects the `direct`
format and not the original source image. For example, an RGB
source image with a ``tRNS`` chunk to represent a transparent
colour, will have ``planes=3`` and ``alpha=False`` for the
source image, but the *meta* dictionary returned by this method
will have ``planes=4`` and ``alpha=True`` because an alpha
channel is synthesized and added.
*pixels* is the pixel data in boxed row flat pixel format (just
like the :meth:`read` method).
All the other aspects of the image data are not changed.
"""
self.preamble()
# Simple case, no conversion necessary.
if not self.colormap and not self.trns and not self.sbit:
return self.read()
x,y,pixels,meta = self.read()
if self.colormap:
meta['colormap'] = False
meta['alpha'] = bool(self.trns)
meta['bitdepth'] = 8
meta['planes'] = 3 + bool(self.trns)
plte = self.palette()
def iterpal(pixels):
for row in pixels:
row = map(plte.__getitem__, row)
yield array('B', itertools.chain(*row))
pixels = iterpal(pixels)
elif self.trns:
# It would be nice if there was some reasonable way of doing
# this without generating a whole load of intermediate tuples.
# But tuples does seem like the easiest way, with no other way
# clearly much simpler or much faster. (Actually, the L to LA
# conversion could perhaps go faster (all those 1-tuples!), but
# I still wonder whether the code proliferation is worth it)
it = self.transparent
maxval = 2**meta['bitdepth']-1
planes = meta['planes']
meta['alpha'] = True
meta['planes'] += 1
typecode = 'BH'[meta['bitdepth']>8]
def itertrns(pixels):
for row in pixels:
# For each row we group it into pixels, then form a
# characterisation vector that says whether each pixel
# is opaque or not. Then we convert True/False to
# 0/maxval (by multiplication), and add it as the extra
# channel.
row = group(row, planes)
opa = map(it.__ne__, row)
opa = map(maxval.__mul__, opa)
opa = zip(opa) # convert to 1-tuples
yield array(typecode,
itertools.chain(*map(operator.add, row, opa)))
pixels = itertrns(pixels)
targetbitdepth = None
if self.sbit:
sbit = struct.unpack('%dB' % len(self.sbit), self.sbit)
targetbitdepth = max(sbit)
if targetbitdepth > meta['bitdepth']:
raise Error('sBIT chunk %r exceeds bitdepth %d' %
(sbit,self.bitdepth))
if min(sbit) <= 0:
raise Error('sBIT chunk %r has a 0-entry' % sbit)
if targetbitdepth == meta['bitdepth']:
targetbitdepth = None
if targetbitdepth:
shift = meta['bitdepth'] - targetbitdepth
meta['bitdepth'] = targetbitdepth
def itershift(pixels):
for row in pixels:
yield map(shift.__rrshift__, row)
pixels = itershift(pixels)
return x,y,pixels,meta
def asFloat(self, maxval=1.0):
"""Return image pixels as per :meth:`asDirect` method, but scale
all pixel values to be floating point values between 0.0 and
*maxval*.
"""
x,y,pixels,info = self.asDirect()
sourcemaxval = 2**info['bitdepth']-1
del info['bitdepth']
info['maxval'] = float(maxval)
factor = float(maxval)/float(sourcemaxval)
def iterfloat():
for row in pixels:
yield map(factor.__mul__, row)
return x,y,iterfloat(),info
def _as_rescale(self, get, targetbitdepth):
"""Helper used by :meth:`asRGB8` and :meth:`asRGBA8`."""
width,height,pixels,meta = get()
maxval = 2**meta['bitdepth'] - 1
targetmaxval = 2**targetbitdepth - 1
factor = float(targetmaxval) / float(maxval)
meta['bitdepth'] = targetbitdepth
def iterscale():
for row in pixels:
yield map(lambda x: int(round(x*factor)), row)
return width, height, iterscale(), meta
def asRGB8(self):
"""Return the image data as an RGB pixels with 8-bits per
sample. This is like the :meth:`asRGB` method except that
this method additionally rescales the values so that they
are all between 0 and 255 (8-bit). In the case where the
source image has a bit depth < 8 the transformation preserves
all the information; where the source image has bit depth
> 8, then rescaling to 8-bit values loses precision. No
dithering is performed. Like :meth:`asRGB`, an alpha channel
in the source image will raise an exception.
This function returns a 4-tuple:
(*width*, *height*, *pixels*, *metadata*).
*width*, *height*, *metadata* are as per the :meth:`read` method.
*pixels* is the pixel data in boxed row flat pixel format.
"""
return self._as_rescale(self.asRGB, 8)
def asRGBA8(self):
"""Return the image data as RGBA pixels with 8-bits per
sample. This method is similar to :meth:`asRGB8` and
:meth:`asRGBA`: The result pixels have an alpha channel, *and*
values are rescaled to the range 0 to 255. The alpha channel is
synthesized if necessary (with a small speed penalty).
"""
return self._as_rescale(self.asRGBA, 8)
def asRGB(self):
"""Return image as RGB pixels. RGB colour images are passed
through unchanged; greyscales are expanded into RGB
triplets (there is a small speed overhead for doing this).
An alpha channel in the source image will raise an
exception.
The return values are as for the :meth:`read` method
except that the *metadata* reflect the returned pixels, not the
source image. In particular, for this method
``metadata['greyscale']`` will be ``False``.
"""
width,height,pixels,meta = self.asDirect()
if meta['alpha']:
raise Error("will not convert image with alpha channel to RGB")
if not meta['greyscale']:
return width,height,pixels,meta
meta['greyscale'] = False
typecode = 'BH'[meta['bitdepth'] > 8]
def iterrgb():
for row in pixels:
a = array(typecode, [0]) * 3 * width
for i in range(3):
a[i::3] = row
yield a
return width,height,iterrgb(),meta
def asRGBA(self):
"""Return image as RGBA pixels. Greyscales are expanded into
RGB triplets; an alpha channel is synthesized if necessary.
The return values are as for the :meth:`read` method
except that the *metadata* reflect the returned pixels, not the
source image. In particular, for this method
``metadata['greyscale']`` will be ``False``, and
``metadata['alpha']`` will be ``True``.
"""
width,height,pixels,meta = self.asDirect()
if meta['alpha'] and not meta['greyscale']:
return width,height,pixels,meta
typecode = 'BH'[meta['bitdepth'] > 8]
maxval = 2**meta['bitdepth'] - 1
def newarray():
return array(typecode, [0]) * 4 * width
if meta['alpha'] and meta['greyscale']:
# LA to RGBA
def convert():
for row in pixels:
# Create a fresh target row, then copy L channel
# into first three target channels, and A channel
# into fourth channel.
a = newarray()
for i in range(3):
a[i::4] = row[0::2]
a[3::4] = row[1::2]
yield a
elif meta['greyscale']:
# L to RGBA
def convert():
for row in pixels:
a = newarray()
for i in range(3):
a[i::4] = row
a[3::4] = array(typecode, [maxval]) * width
yield a
else:
assert not meta['alpha'] and not meta['greyscale']
# RGB to RGBA
def convert():
for row in pixels:
a = newarray()
for i in range(3):
a[i::4] = row[i::3]
a[3::4] = array(typecode, [maxval]) * width
yield a
meta['alpha'] = True
meta['greyscale'] = False
return width,height,convert(),meta
# === Legacy Version Support ===
# :pyver:old: PyPNG works on Python versions 2.3 and 2.2, but not
# without some awkward problems. Really PyPNG works on Python 2.4 (and
# above); it works on Pythons 2.3 and 2.2 by virtue of fixing up
# problems here. It's a bit ugly (which is why it's hidden down here).
#
# Generally the strategy is one of pretending that we're running on
# Python 2.4 (or above), and patching up the library support on earlier
# versions so that it looks enough like Python 2.4. When it comes to
# Python 2.2 there is one thing we cannot patch: extended slices
# http://www.python.org/doc/2.3/whatsnew/section-slices.html.
# Instead we simply declare that features that are implemented using
# extended slices will not work on Python 2.2.
#
# In order to work on Python 2.3 we fix up a recurring annoyance involving
# the array type. In Python 2.3 an array cannot be initialised with an
# array, and it cannot be extended with a list (or other sequence).
# Both of those are repeated issues in the code. Whilst I would not
# normally tolerate this sort of behaviour, here we "shim" a replacement
# for array into place (and hope no-ones notices). You never read this.
#
# In an amusing case of warty hacks on top of warty hacks... the array
# shimming we try and do only works on Python 2.3 and above (you can't
# subclass array.array in Python 2.2). So to get it working on Python
# 2.2 we go for something much simpler and (probably) way slower.
try:
array('B').extend([])
array('B', array('B'))
except:
# Expect to get here on Python 2.3
try:
class _array_shim(array):
true_array = array
def __new__(cls, typecode, init=None):
super_new = super(_array_shim, cls).__new__
it = super_new(cls, typecode)
if init is None:
return it
it.extend(init)
return it
def extend(self, extension):
super_extend = super(_array_shim, self).extend
if isinstance(extension, self.true_array):
return super_extend(extension)
if not isinstance(extension, (list, str)):
# Convert to list. Allows iterators to work.
extension = list(extension)
return super_extend(self.true_array(self.typecode, extension))
array = _array_shim
except:
# Expect to get here on Python 2.2
def array(typecode, init=()):
if type(init) == str:
return map(ord, init)
return list(init)
# Further hacks to get it limping along on Python 2.2
try:
enumerate
except:
def enumerate(seq):
i=0
for x in seq:
yield i,x
i += 1
try:
reversed
except:
def reversed(l):
l = list(l)
l.reverse()
for x in l:
yield x
try:
itertools
except:
class _dummy_itertools:
pass
itertools = _dummy_itertools()
def _itertools_imap(f, seq):
for x in seq:
yield f(x)
itertools.imap = _itertools_imap
def _itertools_chain(*iterables):
for it in iterables:
for element in it:
yield element
itertools.chain = _itertools_chain
# === Internal Test Support ===
# This section comprises the tests that are internally validated (as
# opposed to tests which produce output files that are externally
# validated). Primarily they are unittests.
# Note that it is difficult to internally validate the results of
# writing a PNG file. The only thing we can do is read it back in
# again, which merely checks consistency, not that the PNG file we
# produce is valid.
# Run the tests from the command line:
# python -c 'import png;png.test()'
# (For an in-memory binary file IO object) We use BytesIO where
# available, otherwise we use StringIO, but name it BytesIO.
try:
from io import BytesIO
except:
from StringIO import StringIO as BytesIO
import tempfile
# http://www.python.org/doc/2.4.4/lib/module-unittest.html
import unittest
def test():
unittest.main(__name__)
def topngbytes(name, rows, x, y, **k):
"""Convenience function for creating a PNG file "in memory" as a
string. Creates a :class:`Writer` instance using the keyword arguments,
then passes `rows` to its :meth:`Writer.write` method. The resulting
PNG file is returned as a string. `name` is used to identify the file for
debugging.
"""
import os
print (name)
f = BytesIO()
w = Writer(x, y, **k)
w.write(f, rows)
if os.environ.get('PYPNG_TEST_TMP'):
w = open(name, 'wb')
w.write(f.getvalue())
w.close()
return f.getvalue()
def testWithIO(inp, out, f):
"""Calls the function `f` with ``sys.stdin`` changed to `inp`
and ``sys.stdout`` changed to `out`. They are restored when `f`
returns. This function returns whatever `f` returns.
"""
import os
try:
oldin,sys.stdin = sys.stdin,inp
oldout,sys.stdout = sys.stdout,out
x = f()
finally:
sys.stdin = oldin
sys.stdout = oldout
if os.environ.get('PYPNG_TEST_TMP') and hasattr(out,'getvalue'):
name = mycallersname()
if name:
w = open(name+'.png', 'wb')
w.write(out.getvalue())
w.close()
return x
def mycallersname():
"""Returns the name of the caller of the caller of this function
(hence the name of the caller of the function in which
"mycallersname()" textually appears). Returns None if this cannot
be determined."""
# http://docs.python.org/library/inspect.html#the-interpreter-stack
import inspect
frame = inspect.currentframe()
if not frame:
return None
frame_,filename_,lineno_,funname,linelist_,listi_ = (
inspect.getouterframes(frame)[2])
return funname
def seqtobytes(s):
"""Convert a sequence of integers to a *bytes* instance. Good for
plastering over Python 2 / Python 3 cracks.
"""
return strtobytes(''.join(chr(x) for x in s))
class Test(unittest.TestCase):
# This member is used by the superclass. If we don't define a new
# class here then when we use self.assertRaises() and the PyPNG code
# raises an assertion then we get no proper traceback. I can't work
# out why, but defining a new class here means we get a proper
# traceback.
class failureException(Exception):
pass
def helperLN(self, n):
mask = (1 << n) - 1
# Use small chunk_limit so that multiple chunk writing is
# tested. Making it a test for Issue 20.
w = Writer(15, 17, greyscale=True, bitdepth=n, chunk_limit=99)
f = BytesIO()
w.write_array(f, array('B', map(mask.__and__, range(1, 256))))
r = Reader(bytes=f.getvalue())
x,y,pixels,meta = r.read()
self.assertEqual(x, 15)
self.assertEqual(y, 17)
self.assertEqual(list(itertools.chain(*pixels)),
map(mask.__and__, range(1,256)))
def testL8(self):
return self.helperLN(8)
def testL4(self):
return self.helperLN(4)
def testL2(self):
"Also tests asRGB8."
w = Writer(1, 4, greyscale=True, bitdepth=2)
f = BytesIO()
w.write_array(f, array('B', range(4)))
r = Reader(bytes=f.getvalue())
x,y,pixels,meta = r.asRGB8()
self.assertEqual(x, 1)
self.assertEqual(y, 4)
for i,row in enumerate(pixels):
self.assertEqual(len(row), 3)
self.assertEqual(list(row), [0x55*i]*3)
def testP2(self):
"2-bit palette."
a = (255,255,255)
b = (200,120,120)
c = (50,99,50)
w = Writer(1, 4, bitdepth=2, palette=[a,b,c])
f = BytesIO()
w.write_array(f, array('B', (0,1,1,2)))
r = Reader(bytes=f.getvalue())
x,y,pixels,meta = r.asRGB8()
self.assertEqual(x, 1)
self.assertEqual(y, 4)
self.assertEqual(list(pixels), map(list, [a, b, b, c]))
def testPtrns(self):
"Test colour type 3 and tRNS chunk (and 4-bit palette)."
a = (50,99,50,50)
b = (200,120,120,80)
c = (255,255,255)
d = (200,120,120)
e = (50,99,50)
w = Writer(3, 3, bitdepth=4, palette=[a,b,c,d,e])
f = BytesIO()
w.write_array(f, array('B', (4, 3, 2, 3, 2, 0, 2, 0, 1)))
r = Reader(bytes=f.getvalue())
x,y,pixels,meta = r.asRGBA8()
self.assertEqual(x, 3)
self.assertEqual(y, 3)
c = c+(255,)
d = d+(255,)
e = e+(255,)
boxed = [(e,d,c),(d,c,a),(c,a,b)]
flat = map(lambda row: itertools.chain(*row), boxed)
self.assertEqual(map(list, pixels), map(list, flat))
def testRGBtoRGBA(self):
"asRGBA8() on colour type 2 source."""
# Test for Issue 26
r = Reader(bytes=_pngsuite['basn2c08'])
x,y,pixels,meta = r.asRGBA8()
# Test the pixels at row 9 columns 0 and 1.
row9 = list(pixels)[9]
self.assertEqual(row9[0:8],
[0xff, 0xdf, 0xff, 0xff, 0xff, 0xde, 0xff, 0xff])
def testLtoRGBA(self):
"asRGBA() on grey source."""
# Test for Issue 60
r = Reader(bytes=_pngsuite['basi0g08'])
x,y,pixels,meta = r.asRGBA()
row9 = list(list(pixels)[9])
self.assertEqual(row9[0:8],
[222, 222, 222, 255, 221, 221, 221, 255])
def testCtrns(self):
"Test colour type 2 and tRNS chunk."
# Test for Issue 25
r = Reader(bytes=_pngsuite['tbrn2c08'])
x,y,pixels,meta = r.asRGBA8()
# I just happen to know that the first pixel is transparent.
# In particular it should be #7f7f7f00
row0 = list(pixels)[0]
self.assertEqual(tuple(row0[0:4]), (0x7f, 0x7f, 0x7f, 0x00))
def testAdam7read(self):
"""Adam7 interlace reading.
Specifically, test that for images in the PngSuite that
have both an interlaced and straightlaced pair that both
images from the pair produce the same array of pixels."""
for candidate in _pngsuite:
if not candidate.startswith('basn'):
continue
candi = candidate.replace('n', 'i')
if candi not in _pngsuite:
continue
print ('adam7 read %s' % (candidate,))
straight = Reader(bytes=_pngsuite[candidate])
adam7 = Reader(bytes=_pngsuite[candi])
# Just compare the pixels. Ignore x,y (because they're
# likely to be correct?); metadata is ignored because the
# "interlace" member differs. Lame.
straight = straight.read()[2]
adam7 = adam7.read()[2]
self.assertEqual(map(list, straight), map(list, adam7))
def testAdam7write(self):
"""Adam7 interlace writing.
For each test image in the PngSuite, write an interlaced
and a straightlaced version. Decode both, and compare results.
"""
# Not such a great test, because the only way we can check what
# we have written is to read it back again.
for name,bytes in _pngsuite.items():
# Only certain colour types supported for this test.
if name[3:5] not in ['n0', 'n2', 'n4', 'n6']:
continue
it = Reader(bytes=bytes)
x,y,pixels,meta = it.read()
pngi = topngbytes('adam7wn'+name+'.png', pixels,
x=x, y=y, bitdepth=it.bitdepth,
greyscale=it.greyscale, alpha=it.alpha,
transparent=it.transparent,
interlace=False)
x,y,ps,meta = Reader(bytes=pngi).read()
it = Reader(bytes=bytes)
x,y,pixels,meta = it.read()
pngs = topngbytes('adam7wi'+name+'.png', pixels,
x=x, y=y, bitdepth=it.bitdepth,
greyscale=it.greyscale, alpha=it.alpha,
transparent=it.transparent,
interlace=True)
x,y,pi,meta = Reader(bytes=pngs).read()
self.assertEqual(map(list, ps), map(list, pi))
def testPGMin(self):
"""Test that the command line tool can read PGM files."""
def do():
return _main(['testPGMin'])
s = BytesIO()
s.write(strtobytes('P5 2 2 3\n'))
s.write(strtobytes('\x00\x01\x02\x03'))
s.flush()
s.seek(0)
o = BytesIO()
testWithIO(s, o, do)
r = Reader(bytes=o.getvalue())
x,y,pixels,meta = r.read()
self.assertTrue(r.greyscale)
self.assertEqual(r.bitdepth, 2)
def testPAMin(self):
"""Test that the command line tool can read PAM file."""
def do():
return _main(['testPAMin'])
s = BytesIO()
s.write(strtobytes('P7\nWIDTH 3\nHEIGHT 1\nDEPTH 4\nMAXVAL 255\n'
'TUPLTYPE RGB_ALPHA\nENDHDR\n'))
# The pixels in flat row flat pixel format
flat = [255,0,0,255, 0,255,0,120, 0,0,255,30]
asbytes = seqtobytes(flat)
s.write(asbytes)
s.flush()
s.seek(0)
o = BytesIO()
testWithIO(s, o, do)
r = Reader(bytes=o.getvalue())
x,y,pixels,meta = r.read()
self.assertTrue(r.alpha)
self.assertTrue(not r.greyscale)
self.assertEqual(list(itertools.chain(*pixels)), flat)
def testLA4(self):
"""Create an LA image with bitdepth 4."""
bytes = topngbytes('la4.png', [[5, 12]], 1, 1,
greyscale=True, alpha=True, bitdepth=4)
sbit = Reader(bytes=bytes).chunk('sBIT')[1]
self.assertEqual(sbit, strtobytes('\x04\x04'))
def testPNMsbit(self):
"""Test that PNM files can generates sBIT chunk."""
def do():
return _main(['testPNMsbit'])
s = BytesIO()
s.write(strtobytes('P6 8 1 1\n'))
for pixel in range(8):
s.write(struct.pack('<I', (0x4081*pixel)&0x10101)[:3])
s.flush()
s.seek(0)
o = BytesIO()
testWithIO(s, o, do)
r = Reader(bytes=o.getvalue())
sbit = r.chunk('sBIT')[1]
self.assertEqual(sbit, strtobytes('\x01\x01\x01'))
def testLtrns0(self):
"""Create greyscale image with tRNS chunk."""
return self.helperLtrns(0)
def testLtrns1(self):
"""Using 1-tuple for transparent arg."""
return self.helperLtrns((0,))
def helperLtrns(self, transparent):
"""Helper used by :meth:`testLtrns*`."""
pixels = zip([0x00, 0x38, 0x4c, 0x54, 0x5c, 0x40, 0x38, 0x00])
o = BytesIO()
w = Writer(8, 8, greyscale=True, bitdepth=1, transparent=transparent)
w.write_packed(o, pixels)
r = Reader(bytes=o.getvalue())
x,y,pixels,meta = r.asDirect()
self.assertTrue(meta['alpha'])
self.assertTrue(meta['greyscale'])
self.assertEqual(meta['bitdepth'], 1)
def testWinfo(self):
"""Test the dictionary returned by a `read` method can be used
as args for :meth:`Writer`.
"""
r = Reader(bytes=_pngsuite['basn2c16'])
info = r.read()[3]
w = Writer(**info)
def testPackedIter(self):
"""Test iterator for row when using write_packed.
Indicative for Issue 47.
"""
w = Writer(16, 2, greyscale=True, alpha=False, bitdepth=1)
o = BytesIO()
w.write_packed(o, [itertools.chain([0x0a], [0xaa]),
itertools.chain([0x0f], [0xff])])
r = Reader(bytes=o.getvalue())
x,y,pixels,info = r.asDirect()
pixels = list(pixels)
self.assertEqual(len(pixels), 2)
self.assertEqual(len(pixels[0]), 16)
def testInterlacedArray(self):
"""Test that reading an interlaced PNG yields each row as an
array."""
r = Reader(bytes=_pngsuite['basi0g08'])
list(r.read()[2])[0].tostring
def testTrnsArray(self):
"""Test that reading a type 2 PNG with tRNS chunk yields each
row as an array (using asDirect)."""
r = Reader(bytes=_pngsuite['tbrn2c08'])
list(r.asDirect()[2])[0].tostring
# Invalid file format tests. These construct various badly
# formatted PNG files, then feed them into a Reader. When
# everything is working properly, we should get FormatError
# exceptions raised.
def testEmpty(self):
"""Test empty file."""
r = Reader(bytes='')
self.assertRaises(FormatError, r.asDirect)
def testSigOnly(self):
"""Test file containing just signature bytes."""
r = Reader(bytes=_signature)
self.assertRaises(FormatError, r.asDirect)
def testExtraPixels(self):
"""Test file that contains too many pixels."""
def eachchunk(chunk):
if chunk[0] != 'IDAT':
return chunk
data = zlib.decompress(chunk[1])
data += strtobytes('\x00garbage')
data = zlib.compress(data)
chunk = (chunk[0], data)
return chunk
self.assertRaises(FormatError, self.helperFormat, eachchunk)
def testNotEnoughPixels(self):
def eachchunk(chunk):
if chunk[0] != 'IDAT':
return chunk
# Remove last byte.
data = zlib.decompress(chunk[1])
data = data[:-1]
data = zlib.compress(data)
return (chunk[0], data)
self.assertRaises(FormatError, self.helperFormat, eachchunk)
def helperFormat(self, f):
r = Reader(bytes=_pngsuite['basn0g01'])
o = BytesIO()
def newchunks():
for chunk in r.chunks():
yield f(chunk)
write_chunks(o, newchunks())
r = Reader(bytes=o.getvalue())
return list(r.asDirect()[2])
def testBadFilter(self):
def eachchunk(chunk):
if chunk[0] != 'IDAT':
return chunk
data = zlib.decompress(chunk[1])
# Corrupt the first filter byte
data = strtobytes('\x99') + data[1:]
data = zlib.compress(data)
return (chunk[0], data)
self.assertRaises(FormatError, self.helperFormat, eachchunk)
def testFlat(self):
"""Test read_flat."""
import hashlib
r = Reader(bytes=_pngsuite['basn0g02'])
x,y,pixel,meta = r.read_flat()
d = hashlib.md5(seqtobytes(pixel)).digest()
self.assertEqual(_enhex(d), '255cd971ab8cd9e7275ff906e5041aa0')
def testfromarray(self):
img = from_array([[0, 0x33, 0x66], [0xff, 0xcc, 0x99]], 'L')
img.save('testfromarray.png')
def testfromarrayL16(self):
img = from_array(group(range(2**16), 256), 'L;16')
img.save('testL16.png')
def testfromarrayRGB(self):
img = from_array([[0,0,0, 0,0,1, 0,1,0, 0,1,1],
[1,0,0, 1,0,1, 1,1,0, 1,1,1]], 'RGB;1')
o = BytesIO()
img.save(o)
def testfromarrayIter(self):
import itertools
i = itertools.islice(itertools.count(10), 20)
i = imap_(lambda x: [x, x, x], i)
img = from_array(i, 'RGB;5', dict(height=20))
f = open('testiter.png', 'wb')
img.save(f)
f.close()
# numpy dependent tests. These are skipped (with a message to
# sys.stderr) if numpy cannot be imported.
def testNumpyuint16(self):
"""numpy uint16."""
try:
import numpy
except ImportError:
sys.stderr.write("skipping numpy test\n")
return
rows = [map(numpy.uint16, range(0,0x10000,0x5555))]
b = topngbytes('numpyuint16.png', rows, 4, 1,
greyscale=True, alpha=False, bitdepth=16)
def testNumpyuint8(self):
"""numpy uint8."""
try:
import numpy
except ImportError:
sys.stderr.write("skipping numpy test\n")
return
rows = [map(numpy.uint8, range(0,0x100,0x55))]
b = topngbytes('numpyuint8.png', rows, 4, 1,
greyscale=True, alpha=False, bitdepth=8)
def testNumpybool(self):
"""numpy bool."""
try:
import numpy
except ImportError:
sys.stderr.write("skipping numpy test\n")
return
rows = [map(numpy.bool, [0,1])]
b = topngbytes('numpybool.png', rows, 2, 1,
greyscale=True, alpha=False, bitdepth=1)
def testNumpyarray(self):
"""numpy array."""
try:
import numpy
except ImportError:
sys.stderr.write("skipping numpy test\n")
return
pixels = numpy.array([[0,0x5555],[0x5555,0xaaaa]], numpy.uint16)
img = from_array(pixels, 'L')
img.save('testnumpyL16.png')
# === Command Line Support ===
def _dehex(s):
"""Liberally convert from hex string to binary string."""
import re
import binascii
# Remove all non-hexadecimal digits
s = re.sub(r'[^a-fA-F\d]', '', s)
# binscii.unhexlify works in Python 2 and Python 3 (unlike
# thing.decode('hex')).
return binascii.unhexlify(strtobytes(s))
def _enhex(s):
"""Convert from binary string (bytes) to hex string (str)."""
import binascii
return bytestostr(binascii.hexlify(s))
# Copies of PngSuite test files taken
# from http://www.schaik.com/pngsuite/pngsuite_bas_png.html
# on 2009-02-19 by drj and converted to hex.
# Some of these are not actually in PngSuite (but maybe they should
# be?), they use the same naming scheme, but start with a capital
# letter.
_pngsuite = {
'basi0g01': _dehex("""
89504e470d0a1a0a0000000d49484452000000200000002001000000012c0677
cf0000000467414d41000186a031e8965f0000009049444154789c2d8d310ec2
300c45dfc682c415187a00a42e197ab81e83b127e00c5639001363a580d8582c
65c910357c4b78b0bfbfdf4f70168c19e7acb970a3f2d1ded9695ce5bf5963df
d92aaf4c9fd927ea449e6487df5b9c36e799b91bdf082b4d4bd4014fe4014b01
ab7a17aee694d28d328a2d63837a70451e1648702d9a9ff4a11d2f7a51aa21e5
a18c7ffd0094e3511d661822f20000000049454e44ae426082
"""),
'basi0g02': _dehex("""
89504e470d0a1a0a0000000d49484452000000200000002002000000016ba60d
1f0000000467414d41000186a031e8965f0000005149444154789c635062e860
00e17286bb609c93c370ec189494960631366e4467b3ae675dcf10f521ea0303
90c1ca006444e11643482064114a4852c710baea3f18c31918020c30410403a6
0ac1a09239009c52804d85b6d97d0000000049454e44ae426082
"""),
'basi0g04': _dehex("""
89504e470d0a1a0a0000000d4948445200000020000000200400000001e4e6f8
bf0000000467414d41000186a031e8965f000000ae49444154789c658e5111c2
301044171c141c141c041c843a287510ea20d441c041c141c141c04191102454
03994998cecd7edcecedbb9bdbc3b2c2b6457545fbc4bac1be437347f7c66a77
3c23d60db15e88f5c5627338a5416c2e691a9b475a89cd27eda12895ae8dfdab
43d61e590764f5c83a226b40d669bec307f93247701687723abf31ff83a2284b
a5b4ae6b63ac6520ad730ca4ed7b06d20e030369bd6720ed383290360406d24e
13811f2781eba9d34d07160000000049454e44ae426082
"""),
'basi0g08': _dehex("""
89504e470d0a1a0a0000000d4948445200000020000000200800000001211615
be0000000467414d41000186a031e8965f000000b549444154789cb5905d0ac2
3010849dbac81c42c47bf843cf253e8878b0aa17110f214bdca6be240f5d21a5
94ced3e49bcd322c1624115515154998aa424822a82a5624a1aa8a8b24c58f99
999908130989a04a00d76c2c09e76cf21adcb209393a6553577da17140a2c59e
70ecbfa388dff1f03b82fb82bd07f05f7cb13f80bb07ad2fd60c011c3c588eef
f1f4e03bbec7ce832dca927aea005e431b625796345307b019c845e6bfc3bb98
769d84f9efb02ea6c00f9bb9ff45e81f9f280000000049454e44ae426082
"""),
'basi0g16': _dehex("""
89504e470d0a1a0a0000000d49484452000000200000002010000000017186c9
fd0000000467414d41000186a031e8965f000000e249444154789cb5913b0ec2
301044c7490aa8f85d81c3e4301c8f53a4ca0da8902c8144b3920b4043111282
23bc4956681a6bf5fc3c5a3ba0448912d91a4de2c38dd8e380231eede4c4f7a1
4677700bec7bd9b1d344689315a3418d1a6efbe5b8305ba01f8ff4808c063e26
c60d5c81edcf6c58c535e252839e93801b15c0a70d810ae0d306b205dc32b187
272b64057e4720ff0502154034831520154034c3df81400510cdf0015c86e5cc
5c79c639fddba9dcb5456b51d7980eb52d8e7d7fa620a75120d6064641a05120
b606771a05626b401a05f1f589827cf0fe44c1f0bae0055698ee8914fffffe00
00000049454e44ae426082
"""),
'basi2c08': _dehex("""
89504e470d0a1a0a0000000d49484452000000200000002008020000018b1fdd
350000000467414d41000186a031e8965f000000f249444154789cd59341aa04
210c44abc07b78133d59d37333bd89d76868b566d10cf4675af8596431a11662
7c5688919280e312257dd6a0a4cf1a01008ee312a5f3c69c37e6fcc3f47e6776
a07f8bdaf5b40feed2d33e025e2ff4fe2d4a63e1a16d91180b736d8bc45854c5
6d951863f4a7e0b66dcf09a900f3ffa2948d4091e53ca86c048a64390f662b50
4a999660ced906182b9a01a8be00a56404a6ede182b1223b4025e32c4de34304
63457680c93aada6c99b73865aab2fc094920d901a203f5ddfe1970d28456783
26cffbafeffcd30654f46d119be4793f827387fc0d189d5bc4d69a3c23d45a7f
db803146578337df4d0a3121fc3d330000000049454e44ae426082
"""),
'basi2c16': _dehex("""
89504e470d0a1a0a0000000d4948445200000020000000201002000001db8f01
760000000467414d41000186a031e8965f0000020a49444154789cd5962173e3
3010853fcf1838cc61a1818185a53e56787fa13fa130852e3b5878b4b0b03081
b97f7030070b53e6b057a0a8912bbb9163b9f109ececbc59bd7dcf2b45492409
d66f00eb1dd83cb5497d65456aeb8e1040913b3b2c04504c936dd5a9c7e2c6eb
b1b8f17a58e8d043da56f06f0f9f62e5217b6ba3a1b76f6c9e99e8696a2a72e2
c4fb1e4d452e92ec9652b807486d12b6669be00db38d9114b0c1961e375461a5
5f76682a85c367ad6f682ff53a9c2a353191764b78bb07d8ddc3c97c1950f391
6745c7b9852c73c2f212605a466a502705c8338069c8b9e84efab941eb393a97
d4c9fd63148314209f1c1d3434e847ead6380de291d6f26a25c1ebb5047f5f24
d85c49f0f22cc1d34282c72709cab90477bf25b89d49f0f351822297e0ea9704
f34c82bc94002448ede51866e5656aef5d7c6a385cb4d80e6a538ceba04e6df2
480e9aa84ddedb413bb5c97b3838456df2d4fec2c7a706983e7474d085fae820
a841776a83073838973ac0413fea2f1dc4a06e71108fda73109bdae48954ad60
bf867aac3ce44c7c1589a711cf8a81df9b219679d96d1cec3d8bbbeaa2012626
df8c7802eda201b2d2e0239b409868171fc104ba8b76f10b4da09f6817ffc609
c413ede267fd1fbab46880c90f80eccf0013185eb48b47ba03df2bdaadef3181
cb8976f18e13188768170f98c0f844bb78cb04c62ddac59d09fc3fa25dfc1da4
14deb3df1344f70000000049454e44ae426082
"""),
'basi3p08': _dehex("""
89504e470d0a1a0a0000000d494844520000002000000020080300000133a3ba
500000000467414d41000186a031e8965f00000300504c5445224400f5ffed77
ff77cbffff110a003a77002222ffff11ff110000222200ffac5566ff66ff6666
ff01ff221200dcffffccff994444ff005555220000cbcbff44440055ff55cbcb
00331a00ffecdcedffffe4ffcbffdcdc44ff446666ff330000442200ededff66
6600ffa444ffffaaeded0000cbcbfefffffdfffeffff0133ff33552a000101ff
8888ff00aaaa010100440000888800ffe4cbba5b0022ff22663200ffff99aaaa
ff550000aaaa00cb630011ff11d4ffaa773a00ff4444dc6b0066000001ff0188
4200ecffdc6bdc00ffdcba00333300ed00ed7300ffff88994a0011ffff770000
ff8301ffbabafe7b00fffeff00cb00ff999922ffff880000ffff77008888ffdc
ff1a33000000aa33ffff009900990000000001326600ffbaff44ffffffaaff00
770000fefeaa00004a9900ffff66ff22220000998bff1155ffffff0101ff88ff
005500001111fffffefffdfea4ff4466ffffff66ff003300ffff55ff77770000
88ff44ff00110077ffff006666ffffed000100fff5ed1111ffffff44ff22ffff
eded11110088ffff00007793ff2200dcdc3333fffe00febabaff99ffff333300
63cb00baba00acff55ffffdcffff337bfe00ed00ed5555ffaaffffdcdcff5555
00000066dcdc00dc00dc83ff017777fffefeffffffcbff5555777700fefe00cb
00cb0000fe010200010000122200ffff220044449bff33ffd4aa0000559999ff
999900ba00ba2a5500ffcbcbb4ff66ff9b33ffffbaaa00aa42880053aa00ffaa
aa0000ed00babaffff1100fe00000044009999990099ffcc99ba000088008800
dc00ff93220000dcfefffeaa5300770077020100cb0000000033ffedff00ba00
ff3333edffedffc488bcff7700aa00660066002222dc0000ffcbffdcffdcff8b
110000cb00010155005500880000002201ffffcbffcbed0000ff88884400445b
ba00ffbc77ff99ff006600baffba00777773ed00fe00003300330000baff77ff
004400aaffaafffefe000011220022c4ff8800eded99ff99ff55ff002200ffb4
661100110a1100ff1111dcffbabaffff88ff88010001ff33ffb98ed362000002
a249444154789c65d0695c0b001806f03711a9904a94d24dac63292949e5a810
d244588a14ca5161d1a1323973252242d62157d12ae498c8124d25ca3a11398a
16e55a3cdffab0ffe7f77d7fcff3528645349b584c3187824d9d19d4ec2e3523
9eb0ae975cf8de02f2486d502191841b42967a1ad49e5ddc4265f69a899e26b5
e9e468181baae3a71a41b95669da8df2ea3594c1b31046d7b17bfb86592e4cbe
d89b23e8db0af6304d756e60a8f4ad378bdc2552ae5948df1d35b52143141533
33bbbbababebeb3b3bc9c9c9c6c6c0c0d7b7b535323225a5aa8a02024a4bedec
0a0a2a2bcdcd7d7cf2f3a9a9c9cdcdd8b8adcdd5b5ababa828298982824a4ab2
b21212acadbdbc1414e2e24859b9a72730302f4f49292c4c57373c9c0a0b7372
8c8c1c1c3a3a92936d6dfdfd293e3e26262a4a4eaea2424b4b5fbfbc9c323278
3c0b0ba1303abaae8ecdeeed950d6669a9a7a7a141d4de9e9d5d5cdcd2229b94
c572716132f97cb1d8db9bc3110864a39795d9db6b6a26267a7a9a98d4d6a6a7
cb76090ef6f030354d4d75766e686030545464cb393a1a1ac6c68686eae8f8f9
a9aa4644c8b66d6e1689dcdd2512a994cb35330b0991ad9f9b6b659596a6addd
d8282fafae5e5323fb8f41d01f76c22fd8061be01bfc041a0323e1002c81cd30
0b9ec027a0c930014ec035580fc3e112bc069a0b53e11c0c8095f00176c163a0
e5301baec06a580677600ddc05ba0f13e120bc81a770133ec355a017300d4ec2
0c7800bbe1219c02fa08f3e13c1c85dbb00a2ec05ea0dff00a6ec15a98027360
070c047a06d7e1085c84f1b014f6c03fa0b33018b6c0211801ebe018fc00da0a
6f61113c877eb01d4ec317a085700f26c130f80efbe132bc039a0733e106fc81
f7f017f6c10aa0d1300a0ec374780943e1382c06fa0a9b60238c83473016cec0
02f80f73fefe1072afc1e50000000049454e44ae426082
"""),
'basi6a08': _dehex("""
89504e470d0a1a0a0000000d4948445200000020000000200806000001047d4a
620000000467414d41000186a031e8965f0000012049444154789cc595414ec3
3010459fa541b8bbb26641b8069b861e8b4d12c1c112c1452a710a2a65d840d5
949041fc481ec98ae27c7f3f8d27e3e4648047600fec0d1f390fbbe2633a31e2
9389e4e4ea7bfdbf3d9a6b800ab89f1bd6b553cfcbb0679e960563d72e0a9293
b7337b9f988cc67f5f0e186d20e808042f1c97054e1309da40d02d7e27f92e03
6cbfc64df0fc3117a6210a1b6ad1a00df21c1abcf2a01944c7101b0cb568a001
909c9cf9e399cf3d8d9d4660a875405d9a60d000b05e2de55e25780b7a5268e0
622118e2399aab063a815808462f1ab86890fc2e03e48bb109ded7d26ce4bf59
0db91bac0050747fec5015ce80da0e5700281be533f0ce6d5900b59bcb00ea6d
200314cf801faab200ea752803a8d7a90c503a039f824a53f4694e7342000000
0049454e44ae426082
"""),
'basn0g01': _dehex("""
89504e470d0a1a0a0000000d49484452000000200000002001000000005b0147
590000000467414d41000186a031e8965f0000005b49444154789c2dccb10903
300c05d1ebd204b24a200b7a346f90153c82c18d0a61450751f1e08a2faaead2
a4846ccea9255306e753345712e211b221bf4b263d1b427325255e8bdab29e6f
6aca30692e9d29616ee96f3065f0bf1f1087492fd02f14c90000000049454e44
ae426082
"""),
'basn0g02': _dehex("""
89504e470d0a1a0a0000000d49484452000000200000002002000000001ca13d
890000000467414d41000186a031e8965f0000001f49444154789c6360085df5
1f8cf1308850c20053868f0133091f6390b90700bd497f818b0989a900000000
49454e44ae426082
"""),
# A version of basn0g04 dithered down to 3 bits.
'Basn0g03': _dehex("""
89504e470d0a1a0a0000000d494844520000002000000020040000000093e1c8
2900000001734249540371d88211000000fd49444154789c6d90d18906210c84
c356f22356b2889588604301b112112b11d94a96bb495cf7fe87f32d996f2689
44741cc658e39c0b118f883e1f63cc89dafbc04c0f619d7d898396c54b875517
83f3a2e7ac09a2074430e7f497f00f1138a5444f82839c5206b1f51053cca968
63258821e7f2b5438aac16fbecc052b646e709de45cf18996b29648508728612
952ca606a73566d44612b876845e9a347084ea4868d2907ff06be4436c4b41a3
a3e1774285614c5affb40dbd931a526619d9fa18e4c2be420858de1df0e69893
a0e3e5523461be448561001042b7d4a15309ce2c57aef2ba89d1c13794a109d7
b5880aa27744fc5c4aecb5e7bcef5fe528ec6293a930690000000049454e44ae
426082
"""),
'basn0g04': _dehex("""
89504e470d0a1a0a0000000d494844520000002000000020040000000093e1c8
290000000467414d41000186a031e8965f0000004849444154789c6360601014
545232367671090d4d4b2b2f6720430095dbd1418e002a77e64c720450b9ab56
912380caddbd9b1c0154ee9933e408a072efde25470095fbee1d1902001f14ee
01eaff41fa0000000049454e44ae426082
"""),
'basn0g08': _dehex("""
89504e470d0a1a0a0000000d4948445200000020000000200800000000561125
280000000467414d41000186a031e8965f0000004149444154789c6364602400
1408c8b30c05058c0f0829f8f71f3f6079301c1430ca11906764a2795c0c0605
8c8ff0cafeffcff887e67131181430cae0956564040050e5fe7135e2d8590000
000049454e44ae426082
"""),
'basn0g16': _dehex("""
89504e470d0a1a0a0000000d49484452000000200000002010000000000681f9
6b0000000467414d41000186a031e8965f0000005e49444154789cd5d2310ac0
300c4351395bef7fc6dca093c0287b32d52a04a3d98f3f3880a7b857131363a0
3a82601d089900dd82f640ca04e816dc06422640b7a03d903201ba05b7819009
d02d680fa44c603f6f07ec4ff41938cf7f0016d84bd85fae2b9fd70000000049
454e44ae426082
"""),
'basn2c08': _dehex("""
89504e470d0a1a0a0000000d4948445200000020000000200802000000fc18ed
a30000000467414d41000186a031e8965f0000004849444154789cedd5c10900
300c024085ec91fdb772133b442bf4a1f8cee12bb40d043b800a14f81ca0ede4
7d4c784081020f4a871fc284071428f0a0743823a94081bb7077a3c00182b1f9
5e0f40cf4b0000000049454e44ae426082
"""),
'basn2c16': _dehex("""
89504e470d0a1a0a0000000d4948445200000020000000201002000000ac8831
e00000000467414d41000186a031e8965f000000e549444154789cd596c10a83
301044a7e0417fcb7eb7fdadf6961e06039286266693cc7a188645e43dd6a08f
1042003e2fe09aef6472737e183d27335fcee2f35a77b702ebce742870a23397
f3edf2705dd10160f3b2815fe8ecf2027974a6b0c03f74a6e4192843e75c6c03
35e8ec3202f5e84c0181bbe8cca967a00d9df3491bb040671f2e6087ce1c2860
8d1e05f8c7ee0f1d00b667e70df44467ef26d01fbd9bc028f42860f71d188bce
fb8d3630039dbd59601e7ab3c06cf428507f0634d039afdc80123a7bb1801e7a
b1802a7a14c89f016d74ce331bf080ce9e08f8414f04bca133bfe642fe5e07bb
c4ec0000000049454e44ae426082
"""),
'basn6a08': _dehex("""
89504e470d0a1a0a0000000d4948445200000020000000200806000000737a7a
f40000000467414d41000186a031e8965f0000006f49444154789cedd6310a80
300c46e12764684fa1f73f55048f21c4ddc545781d52e85028fc1f4d28d98a01
305e7b7e9cffba33831d75054703ca06a8f90d58a0074e351e227d805c8254e3
1bb0420f5cdc2e0079208892ffe2a00136a07b4007943c1004d900195036407f
011bf00052201a9c160fb84c0000000049454e44ae426082
"""),
'cs3n3p08': _dehex("""
89504e470d0a1a0a0000000d494844520000002000000020080300000044a48a
c60000000467414d41000186a031e8965f0000000373424954030303a392a042
00000054504c544592ff0000ff9200ffff00ff0000dbff00ff6dffb600006dff
b6ff00ff9200dbff000049ffff2400ff000024ff0049ff0000ffdb00ff4900ff
b6ffff0000ff2400b6ffffdb000092ffff6d000024ffff49006dff00df702b17
0000004b49444154789c85cac70182000000b1b3625754b0edbfa72324ef7486
184ed0177a437b680bcdd0031c0ed00ea21f74852ed00a1c9ed0086da0057487
6ed0121cd6d004bda0013a421ff803224033e177f4ae260000000049454e44ae
426082
"""),
's09n3p02': _dehex("""
89504e470d0a1a0a0000000d49484452000000090000000902030000009dffee
830000000467414d41000186a031e8965f000000037342495404040477f8b5a3
0000000c504c544500ff000077ffff00ffff7700ff5600640000001f49444154
789c63600002fbff0c0c56ab19182ca381581a4283f82071200000696505c36a
437f230000000049454e44ae426082
"""),
'tbgn3p08': _dehex("""
89504e470d0a1a0a0000000d494844520000002000000020080300000044a48a
c60000000467414d41000186a031e8965f00000207504c54457f7f7fafafafab
abab110000222200737300999999510d00444400959500959595e6e600919191
8d8d8d620d00898989666600b7b700911600000000730d007373736f6f6faaaa
006b6b6b676767c41a00cccc0000f30000ef00d51e0055555567670000dd0051
515100d1004d4d4de61e0038380000b700160d0d00ab00560d00090900009500
009100008d003333332f2f2f2f2b2f2b2b000077007c7c001a05002b27000073
002b2b2b006f00bb1600272727780d002323230055004d4d00cc1e00004d00cc
1a000d00003c09006f6f00002f003811271111110d0d0d55554d090909001100
4d0900050505000d00e2e200000900000500626200a6a6a6a2a2a29e9e9e8484
00fb00fbd5d500801100800d00ea00ea555500a6a600e600e6f7f700e200e233
0500888888d900d9848484c01a007777003c3c05c8c8008080804409007c7c7c
bb00bbaa00aaa600a61e09056262629e009e9a009af322005e5e5e05050000ee
005a5a5adddd00a616008d008d00e20016050027270088110078780000c40078
00787300736f006f44444400aa00c81e004040406600663c3c3c090000550055
1a1a00343434d91e000084004d004d007c004500453c3c00ea1e00222222113c
113300331e1e1efb22001a1a1a004400afaf00270027003c001616161e001e0d
160d2f2f00808000001e00d1d1001100110d000db7b7b7090009050005b3b3b3
6d34c4230000000174524e530040e6d86600000001624b474402660b7c640000
01f249444154789c6360c0048c8c58049100575f215ee92e6161ef109cd2a15e
4b9645ce5d2c8f433aa4c24f3cbd4c98833b2314ab74a186f094b9c2c27571d2
6a2a58e4253c5cda8559057a392363854db4d9d0641973660b0b0bb76bb16656
06970997256877a07a95c75a1804b2fbcd128c80b482a0b0300f8a824276a9a8
ec6e61612b3e57ee06fbf0009619d5fac846ac5c60ed20e754921625a2daadc6
1967e29e97d2239c8aec7e61fdeca9cecebef54eb36c848517164514af16169e
866444b2b0b7b55534c815cc2ec22d89cd1353800a8473100a4485852d924a6a
412adc74e7ad1016ceed043267238c901716f633a812022998a4072267c4af02
92127005c0f811b62830054935ce017b38bf0948cc5c09955f030a24617d9d46
63371fd940b0827931cbfdf4956076ac018b592f72d45594a9b1f307f3261b1a
084bc2ad50018b1900719ba6ba4ca325d0427d3f6161449486f981144cf3100e
2a5f2a1ce8683e4ddf1b64275240c8438d98af0c729bbe07982b8a1c94201dc2
b3174c9820bcc06201585ad81b25b64a2146384e3798290c05ad280a18c0a62e
e898260c07fca80a24c076cc864b777131a00190cdfa3069035eccbc038c30e1
3e88b46d16b6acc5380d6ac202511c392f4b789aa7b0b08718765990111606c2
9e854c38e5191878fbe471e749b0112bb18902008dc473b2b2e8e72700000000
49454e44ae426082
"""),
'Tp2n3p08': _dehex("""
89504e470d0a1a0a0000000d494844520000002000000020080300000044a48a
c60000000467414d41000186a031e8965f00000300504c544502ffff80ff05ff
7f0703ff7f0180ff04ff00ffff06ff000880ff05ff7f07ffff06ff000804ff00
0180ff02ffff03ff7f02ffff80ff0503ff7f0180ffff0008ff7f0704ff00ffff
06ff000802ffffff7f0704ff0003ff7fffff0680ff050180ff04ff000180ffff
0008ffff0603ff7f80ff05ff7f0702ffffff000880ff05ffff0603ff7f02ffff
ff7f070180ff04ff00ffff06ff000880ff050180ffff7f0702ffff04ff0003ff
7fff7f0704ff0003ff7f0180ffffff06ff000880ff0502ffffffff0603ff7fff
7f0702ffff04ff000180ff80ff05ff0008ff7f07ffff0680ff0504ff00ff0008
0180ff03ff7f02ffff02ffffffff0604ff0003ff7f0180ffff000880ff05ff7f
0780ff05ff00080180ff02ffffff7f0703ff7fffff0604ff00ff7f07ff0008ff
ff0680ff0504ff0002ffff0180ff03ff7fff0008ffff0680ff0504ff000180ff
02ffff03ff7fff7f070180ff02ffff04ff00ffff06ff0008ff7f0780ff0503ff
7fffff06ff0008ff7f0780ff0502ffff03ff7f0180ff04ff0002ffffff7f07ff
ff0604ff0003ff7fff00080180ff80ff05ffff0603ff7f0180ffff000804ff00
80ff0502ffffff7f0780ff05ffff0604ff000180ffff000802ffffff7f0703ff
7fff0008ff7f070180ff03ff7f02ffff80ff05ffff0604ff00ff0008ffff0602
ffff0180ff04ff0003ff7f80ff05ff7f070180ff04ff00ff7f0780ff0502ffff
ff000803ff7fffff0602ffffff7f07ffff0680ff05ff000804ff0003ff7f0180
ff02ffff0180ffff7f0703ff7fff000804ff0080ff05ffff0602ffff04ff00ff
ff0603ff7fff7f070180ff80ff05ff000803ff7f0180ffff7f0702ffffff0008
04ff00ffff0680ff0503ff7f0180ff04ff0080ff05ffff06ff000802ffffff7f
0780ff05ff0008ff7f070180ff03ff7f04ff0002ffffffff0604ff00ff7f07ff
000880ff05ffff060180ff02ffff03ff7f80ff05ffff0602ffff0180ff03ff7f
04ff00ff7f07ff00080180ffff000880ff0502ffff04ff00ff7f0703ff7fffff
06ff0008ffff0604ff00ff7f0780ff0502ffff03ff7f0180ffdeb83387000000
f874524e53000000000000000008080808080808081010101010101010181818
1818181818202020202020202029292929292929293131313131313131393939
393939393941414141414141414a4a4a4a4a4a4a4a52525252525252525a5a5a
5a5a5a5a5a62626262626262626a6a6a6a6a6a6a6a73737373737373737b7b7b
7b7b7b7b7b83838383838383838b8b8b8b8b8b8b8b94949494949494949c9c9c
9c9c9c9c9ca4a4a4a4a4a4a4a4acacacacacacacacb4b4b4b4b4b4b4b4bdbdbd
bdbdbdbdbdc5c5c5c5c5c5c5c5cdcdcdcdcdcdcdcdd5d5d5d5d5d5d5d5dedede
dededededee6e6e6e6e6e6e6e6eeeeeeeeeeeeeeeef6f6f6f6f6f6f6f6b98ac5
ca0000012c49444154789c6360e7169150d230b475f7098d4ccc28a96ced9e32
63c1da2d7b8e9fb97af3d1fb8f3f18e8a0808953544a4dd7c4c2c9233c2621bf
b4aab17fdacce5ab36ee3a72eafaad87efbefea68702362e7159652d031b07cf
c0b8a4cce28aa68e89f316aedfb4ffd0b92bf79fbcfcfe931e0a183904e55435
8decdcbcc22292b3caaadb7b27cc5db67af3be63e72fdf78fce2d31f7a2860e5
119356d037b374f10e8a4fc92eaa6fee99347fc9caad7b0f9ebd74f7c1db2fbf
e8a180995f484645dbdccad12f38363dafbcb6a573faeca5ebb6ed3e7ce2c29d
e76fbefda38702063e0149751d537b67ff80e8d4dcc29a86bea97316add9b0e3
c0e96bf79ebdfafc971e0a587885e515f58cad5d7d43a2d2720aeadaba26cf5a
bc62fbcea3272fde7efafac37f3a28000087c0fe101bc2f85f0000000049454e
44ae426082
"""),
'tbbn1g04': _dehex("""
89504e470d0a1a0a0000000d494844520000002000000020040000000093e1c8
290000000467414d41000186a031e8965f0000000274524e530007e8f7589b00
000002624b47440000aa8d23320000013e49444154789c55d1cd4b024118c7f1
efbe6419045b6a48a72d352808b435284f9187ae9b098627a1573a19945beba5
e8129e8222af11d81e3a4545742de8ef6af6d5762e0fbf0fc33c33f36085cb76
bc4204778771b867260683ee57e13f0c922df5c719c2b3b6c6c25b2382cea4b9
9f7d4f244370746ac71f4ca88e0f173a6496749af47de8e44ba8f3bf9bdfa98a
0faf857a7dd95c7dc8d7c67c782c99727997f41eb2e3c1e554152465bb00fe8e
b692d190b718d159f4c0a45c4435915a243c58a7a4312a7a57913f05747594c6
46169866c57101e4d4ce4d511423119c419183a3530cc63db88559ae28e7342a
1e9c8122b71139b8872d6e913153224bc1f35b60e4445bd4004e20ed6682c759
1d9873b3da0fbf50137dc5c9bde84fdb2ec8bde1189e0448b63584735993c209
7a601bd2710caceba6158797285b7f2084a2f82c57c01a0000000049454e44ae
426082
"""),
'tbrn2c08': _dehex("""
89504e470d0a1a0a0000000d4948445200000020000000200802000000fc18ed
a30000000467414d41000186a031e8965f0000000674524e53007f007f007f8a
33334f00000006624b474400ff0000000033277cf3000004d649444154789cad
965f68537714c73fd912d640235e692f34d0406fa0c1663481045ab060065514
56660a295831607df0a1488715167060840a1614e6431e9cb34fd2c00a762c85
f6a10f816650c13b0cf40612e1822ddc4863bd628a8924d23d6464f9d3665dd9
f7e977ce3dbff3cd3939bfdfef6bb87dfb364782dbed065ebe7cd93acc78b4ec
a228debd7bb7bfbfbfbbbbfb7f261045311a8d261209405194274f9ea4d3e916
f15f1c3eb5dd6e4fa5fecce526239184a2b0b8486f6f617171b1f5ae4311381c
8e57af5e5dbd7a351088150a78bd389d44222c2f93cdfe66b7db8f4ee07038b6
b6b6bebf766d7e7e7e60a06432313b4ba984c3c1c4049a46b95c5a58583822c1
dbb76f27272733d1b9df853c3030c0f232562b9108cf9eb1b888d7cbf030abab
31abd5fa1f08dc6ef7e7cf9f1f3f7e1c8944745d4f1400c62c001313acad21cb
b8dd2c2c603271eb1640341aad4c6d331aa7e8c48913a150a861307ecc11e964
74899919bc5e14e56fffc404f1388502f178dceff7ef4bf0a5cfe7abb533998c
e5f9ea2f1dd88c180d64cb94412df3dd57e83a6b3b3c7a84c98420100c72fd3a
636348bae726379fe69e8e8d8dbd79f3a6558b0607079796965256479b918085
7b02db12712b6181950233023f3f647494ee6e2e5ea45864cce5b8a7fe3acffc
3aebb22c2bd5d20e22d0757d7b7bbbbdbd3d94a313bed1b0aa3cd069838b163a
8d4c59585f677292d0b84d9a995bd337def3fe6bbe5e6001989b9b6bfe27ea08
36373781542ab56573248b4c5bc843ac4048c7ab21aa24ca00534c25482828a3
8c9ee67475bbaaaab22cb722c8e57240a150301a8d219de94e44534d7d90e885
87acb0e2c4f9800731629b6c5ee14a35a6b9887d2a0032994cb9cf15dbe59650
ff7b46a04c9a749e7cc5112214266cc65c31354d5b5d5d3d90209bcd5616a552
a95c2e87f2a659bd9ee01c2cd73964e438f129a6aa9e582c363838b80f81d7eb
5555b56a2a8ad2d9d7affd0409f8015c208013fea00177b873831b0282c964f2
783c1e8fa7582cee5f81a669b5e6eeeeaee58e8559b0c233d8843c7c0b963a82
34e94b5cb2396d7d7d7db22c8ba258fb0afd43f0e2c58b919191ba9de9b4d425
118329b0c3323c8709d02041b52b4ea7f39de75d2a934a2693c0a953a76a93d4
5d157ebf7f6565a5542a553df97c5e10045dd731c130b86113cc300cbd489224
08422a952a140a95788fc763b1d41558d7a2d7af5f5fb870a1d6a3aaaacd6603
18802da84c59015bd2e6897b745d9765b99a1df0f97c0daf74e36deaf7fbcd66
73ad2797cb89a2c839880188a2e8743a8bc5a22ccbba5e376466b3b9bdbdbd21
6123413a9d0e0402b51e4dd3bababa788eb022b85caeb6b6364551b6b7b76942
43f7f727007a7a7a04a1ee8065b3595fde2768423299ac1ec6669c3973e65004
c0f8f878ad69341a33994ced2969c0d0d0502412f9f8f163f3a7fd654b474787
288ad53e74757535df6215b85cae60302849d2410aecc037f9f2e5cbd5b5c160
680eb0dbede170381c0e7ff8f0a185be3b906068684892a4ca7a6f6faff69328
8ad3d3d3f7efdfdfdbdbfb57e96868a14d0d0643381c96242997cbe5f3794010
84603078fcf8f1d6496bd14a3aba5c2ea7d369341a5555b5582c8140e0fcf9f3
1b1b1b87cf4eeb0a8063c78e45a3d19e9e1ebfdfdf5a831e844655d18093274f
9e3d7bf6d3a74f3b3b3b47c80efc05ff7af28fefb70d9b0000000049454e44ae
426082
"""),
'basn6a16': _dehex("""
89504e470d0a1a0a0000000d494844520000002000000020100600000023eaa6
b70000000467414d41000186a031e8965f00000d2249444154789cdd995f6c1c
d775c67ff38fb34b724d2ee55a8e4b04a0ac87049100cab4dbd8c6528902cb4d
10881620592e52d4325ac0905bc98a94025e71fd622cb5065ac98a0c283050c0
728a00b6e542a1d126885cd3298928891d9a0444037e904434951d4b90b84b2f
c9dde1fcebc33977a95555348f411e16dfce9d3b77ee77eebde77ce78c95a669
0ad07c17009a13edd898b87dfb1fcb7d2b4d1bff217f33df80deb1e6267df0ff
c1e6e6dfafdf1f5a7fd30f9aef66b6d546dd355bf02c40662e3307f9725a96c6
744c3031f83782f171c148dbc3bf1774f5dad1e79d6f095a3f54d4fbec5234ef
d9a2f8d73afe4f14f57ef4f42def7b44f19060f06b45bddf1c5534d77fd922be
2973a15a82e648661c6e3240aa3612ead952b604bde57458894f29deaf133bac
13d2766f5227a4a3b8cf08da7adfd6fbd6bd8a4fe9dbb43d35e3dfa3f844fbf8
9119bf4f7144094fb56333abf8a86063ca106f94b3a3b512343765e60082097f
1bb86ba72439a653519b09f5cee1ce61c897d37eedf5553580ae60f4af8af33a
b14fd400b6a0f34535c0434afc0b3a9f07147527a5fa7ca218ff56c74d74dc3f
155cfd3325fc278acf2ae1cb4a539f5f9937c457263b0bd51234c732a300cdd1
cc1840f0aaff54db0e4874ed5a9b5d6d27d4bb36746d80de72baa877ff4b275a
d7895ed1897ea4139b5143fcbb1a62560da1ed9662aaed895ec78a91c18795b8
5e07ab4af8ba128e95e682e0728bf8f2e5ae815a091a53d902ac1920d8e05f06
589de8d8d66680789f4e454fb9d9ec66cd857af796ee2d902fa73fd5bba775a2
153580ae44705ed0d37647d15697cb8f14bfa3e3e8fdf8031d47af571503357c
f30d25acedcbbf135c9a35c49766ba07ab255859e8ec03684e66860182dff8f7
0304bff6ff1c20fc81b7afdd00a71475539a536e36bb5973a19e3b923b02bde5
e4efd4003ac170eb2d13fe274157afedbd82d6fb3a9a1e85e4551d47cf7078f8
9671fe4289ebf5f2bf08d63f37c4eb4773c55a0996efeefa0ca011671d8060ca
2f0004c7fcc300e166ef0240f825efe3361f106d57d423d0723f7acacd66376b
2ed47b7a7a7a205f4ef4ac4691e0aad9aa0d41cf13741c3580a506487574ddca
61a8c403c1863ebfbcac3475168b2de28b8b3d77544bb05ce92a02aceced3c0d
d0cc65ea371b201cf1c601c24dde1c4078cedbdeb60322f50126a019bf6edc9b
39e566b39b3517eaf97c3e0fbde5e4491d45bd74537145d155b476aa0176e868
c6abebf30dbd5e525c54ac8e18e2d56abeb756827a3d970358a97416019a6f64
f60004fdfe1580d5c98e618070cc1b05887eee7e0d209a70db7d8063029889b4
c620ead78d7b33a7dc6c76b3e6427ddddbebde867c393aa7845e5403e8ca794a
d0d6fb897af5f03525fe5782f5e7046bdaef468bf88d1debc6ab25583cd17310
6079b9ab0ba059c914018245bf076075b5a303200c3c1f209a733701444fbbaf
00c4134ebb016c5d0b23614c243701cdf875e3decce9349bddacb9505fbf7dfd
76e82d87736a00f5d2b5ffd4b7dce2719a4d25ae717ee153c1abef18e257cfad
7fa45682da48ef38c052b53b0fd06864b300c151ff08c0ea431de701a287dd5f
004497dc7b01a253ee3e80b8c7f91c20f967fb6fdb7c80ada7d8683723614c24
3701cdf875e3decc29379bddacb950ef3fd47f08f2e5a61ea4aa2a3eb757cd55
13345efcfa59c12b2f19e2578ef77fb75a82854ffbee01a83f977b11a031931d
040802df07082b5e11207cc17b1e209a770700e2df0a83e409fb7580f827c230
99b06fd901fb058d6835dacd481813c94d40337eddb83773cacd66376b2ed437
bebcf165e82d2f4e4beb7f3fa6e652c2d7ee10bc78c010bfb87fe3c95a09ae9f
bd732740bd2fb700d0f865f64180e059ff044018ca0ca28a5b04883f701e0088
bfec7c0c909cb71f0448c6ec518074b375012079d9dedf66004bcfbc51eb2dd1
aadacd481813c94d40337eddb83773cacd66376b2ed487868686205fbe7c49ef
5605a73f34c4a7a787eeab96e0da81bb4e022c15ba27019a5b339300e16bf286
a8eae601e25866907cdf3e0890acb36f00245fb57f05904e59c300e92561946e
b2e600d209ab7d07f04d458dfb46ad1bd16ab49b913026929b8066fcba716fe6
949bcd6ed65ca8ef7e7cf7e3d05b7e7c8f217ee6cdddbb6a25a856f37980e0c7
fe4e80a82623c48193014846ec7180f4acf518409aca0cd28a5504e03b32c374
de1a00608a0240faaa327a4b19fe946fb6f90054dbb5f2333d022db56eb4966a
3723614c243701cdf8f556bea8a7dc6c76b3e66bd46584ddbbcebc0990cf4b0f
ff4070520c282338a7e26700ec725202b01e4bcf0258963c6f1d4d8f0030cb20
805549c520930c03584fa522b676f11600ffc03fde3e1b3489a9c9054c9aa23b
c08856a3dd8c843191dc0434e3d78d7b33a75c36fb993761f7ae5a69f72ef97f
e6ad336fed7e1c60e8bee96980bbdebbb60da07b7069062033d9dc0ae03d296f
70ab511ec071640676252902d833c916007b3e1900b0a6d2028035968e025861
ea01581369fb11488c34d18cbc95989afccca42baad65ba2d5683723614c24d7
8066fcbab8b7e96918baaf5aaa56219f975fb50a43f7c9bde90fa73f1c1a02d8
78f2e27e803b77ca08b90519315b6fe400fc1392097a9eccc0ad444500e70199
a1331f0f00d8934901c07e5d526ceb87c2d07e2579badd005a2b31a5089391b7
1253358049535a6add8856dd0146c298482e01ede27ed878b256ba7600ee3a09
c18fc1df09fe01084ec25defc1b56db0f1a4f4bd78e0e2818d2f0334e7330300
7df7c888b917e50dd9c1c60c80efcb0cbc63e1f700bce7c31700dccbd1060027
8add9b0de06c8e2f00d84962b7d7030e2a61538331b98051f92631bd253f336a
dd8856a3dd44c25c390efddfad96ae9f853b77c25201ba27c533b8bdf28b6ad0
3d084b33d2e7fa59099e9901b8f2d29597fa0f01848f78e70082117f1ca07b76
6910209b9519f895a008d031bbba05c09d8f06005c5b18b8fba25300cea6780e
c03e911c6ccf06d507b48a4fa606634a114609de929f9934c5a87511ad57cfc1
fa476aa5854fa1ef1e3910b905686e85cc24c40138198915f133d2d6dc2a7dea
7df2ccc2a752faf2cec1d577aebeb37e3b4034eeee0008dff3be0e6b923773b4
7904c0ef9119767cb4fa1500ef1361e08e452500f71561e84cc4ed3e20fab6a2
c905f40cb76a3026bf3319b91ac2e46792a6dcd801ebc6aba5da08f48ecb81c8
bd088d5f42f6417191de93908c803d0e76199292b485af41b60e8d9c3c537f0e
8211f0c7211a077707dc18b931b2ee6d80a4d7ae024491ebc24d4a708ff70680
7f25e807e8785f1878e322d6ddaf453f0770ff2dfa769b01423dbbad72a391b6
5a7c3235985629423372494cab55c8f7d64a8b27a0e7202c55a13b0f8d19c80e
4ae9ca3f015115dc3ca467c17a4c7ee95970ab10e5a54ff0ac3cd39881ee5958
1a84f03df0be0e492fd855a8d6aa35d10b4962dbb0a604a3d3ee5e80a8eee600
a24977f8660378bf0bbf00e01d0a8fb7f980f04b8aa6ce6aca8d5a7533c52753
839152c4e222f4dc512dd5eb90cbc981e8ea12cf90cd8a8bf47d89159e2741d3
7124f65b96fcd254dae258fa84a13c13043246a32129574787e49eae2b49b86d
c3e2e78b9ff7f4002415bb08907c66df0d103b4e0c104db90500ff70700c203a
ee1e82dba4c3e16e256c0acca6ceaae9afd1f612d7eb472157ac95962bd05594
7dd1598466053245088e827f44628657942a825b84e4fb601f84b4025611aca3
901e01bb024911dc0a4445f08e41f83df02b10142173149ab71baf027611ea95
7a257704201d14cd9af4d90b00f194530088cb4e09c0df1c5c0088f7393f6833
c0aa3ac156655de3bca9b34ab9716906ba07aba5e5bba1eb3358d90b9da7c533
64f6888bf47b60f521e8380fe10be03d2feac17900927560df40f4e48f805960
50328d648bf4893f9067c217a0631656b7c898c122847bc07b03a2d3e0ee85e4
33b0ef867450c4fad2ecd26cf7168074c0ba0c904cdac300c9cfec4701924df6
1cdca61e10685c6f7d52d0caba1498972f43d740adb4b2009d7d7220b20e3473
90a943d00ffe959bb6eac3e0fe42ea49ee00c45f06e76329b1dabf127d690d80
5581b408f63c2403e0cc433c00ee658836803b0fd100747c04ab5f917704fd10
d5c1cd41ec801343d207f602a403605d86e5f9e5f9ae0d00e994556833806685
c931fb709b0f08b4e869bea5c827859549e82c544b8d29c816a0390999613920
7e610d5727a16318c2003c1fa24be0de2b32caf92224e7c17e5004b6350c4c01
05601218066b0ad28224e149019c086257ca315102de2712903bde97b8144d82
3b2c6ac52d403c054e019249b087f53d0558995a99ea946c70cc927458b3c1ff
550f30050df988d4284376b4566a8e416654cc921985e037e0df0fc131f00f4b
acf0c6211c036f14a239703741740adc7da227edd7e56b833d0ae92549b4d357
25dfb49ed2ff63908e6adf27d6d0dda7638d4154d2778daca17f58e61297c129
41f233b01f5dc3740cac51688c35c6b22580f48224fee9b83502569a66b629f1
09f3713473413e2666e7fe6f6c6efefdfafda1f56f6e06f93496d9d67cb7366a
9964b6f92e64b689196ec6c604646fd3fe4771ff1bf03f65d8ecc3addbb5f300
00000049454e44ae426082
"""),
}
def test_suite(options, args):
"""
Create a PNG test image and write the file to stdout.
"""
# Below is a big stack of test image generators.
# They're all really tiny, so PEP 8 rules are suspended.
def test_gradient_horizontal_lr(x, y): return x
def test_gradient_horizontal_rl(x, y): return 1-x
def test_gradient_vertical_tb(x, y): return y
def test_gradient_vertical_bt(x, y): return 1-y
def test_radial_tl(x, y): return max(1-math.sqrt(x*x+y*y), 0.0)
def test_radial_center(x, y): return test_radial_tl(x-0.5, y-0.5)
def test_radial_tr(x, y): return test_radial_tl(1-x, y)
def test_radial_bl(x, y): return test_radial_tl(x, 1-y)
def test_radial_br(x, y): return test_radial_tl(1-x, 1-y)
def test_stripe(x, n): return float(int(x*n) & 1)
def test_stripe_h_2(x, y): return test_stripe(x, 2)
def test_stripe_h_4(x, y): return test_stripe(x, 4)
def test_stripe_h_10(x, y): return test_stripe(x, 10)
def test_stripe_v_2(x, y): return test_stripe(y, 2)
def test_stripe_v_4(x, y): return test_stripe(y, 4)
def test_stripe_v_10(x, y): return test_stripe(y, 10)
def test_stripe_lr_10(x, y): return test_stripe(x+y, 10)
def test_stripe_rl_10(x, y): return test_stripe(1+x-y, 10)
def test_checker(x, y, n): return float((int(x*n) & 1) ^ (int(y*n) & 1))
def test_checker_8(x, y): return test_checker(x, y, 8)
def test_checker_15(x, y): return test_checker(x, y, 15)
def test_zero(x, y): return 0
def test_one(x, y): return 1
test_patterns = {
'GLR': test_gradient_horizontal_lr,
'GRL': test_gradient_horizontal_rl,
'GTB': test_gradient_vertical_tb,
'GBT': test_gradient_vertical_bt,
'RTL': test_radial_tl,
'RTR': test_radial_tr,
'RBL': test_radial_bl,
'RBR': test_radial_br,
'RCTR': test_radial_center,
'HS2': test_stripe_h_2,
'HS4': test_stripe_h_4,
'HS10': test_stripe_h_10,
'VS2': test_stripe_v_2,
'VS4': test_stripe_v_4,
'VS10': test_stripe_v_10,
'LRS': test_stripe_lr_10,
'RLS': test_stripe_rl_10,
'CK8': test_checker_8,
'CK15': test_checker_15,
'ZERO': test_zero,
'ONE': test_one,
}
def test_pattern(width, height, bitdepth, pattern):
"""Create a single plane (monochrome) test pattern. Returns a
flat row flat pixel array.
"""
maxval = 2**bitdepth-1
if maxval > 255:
a = array('H')
else:
a = array('B')
fw = float(width)
fh = float(height)
pfun = test_patterns[pattern]
for y in range(height):
fy = float(y)/fh
for x in range(width):
a.append(int(round(pfun(float(x)/fw, fy) * maxval)))
return a
def test_rgba(size=256, bitdepth=8,
red="GTB", green="GLR", blue="RTL", alpha=None):
"""
Create a test image. Each channel is generated from the
specified pattern; any channel apart from red can be set to
None, which will cause it not to be in the image. It
is possible to create all PNG channel types (L, RGB, LA, RGBA),
as well as non PNG channel types (RGA, and so on).
"""
i = test_pattern(size, size, bitdepth, red)
psize = 1
for channel in (green, blue, alpha):
if channel:
c = test_pattern(size, size, bitdepth, channel)
i = interleave_planes(i, c, psize, 1)
psize += 1
return i
def pngsuite_image(name):
"""
Create a test image by reading an internal copy of the files
from the PngSuite. Returned in flat row flat pixel format.
"""
if name not in _pngsuite:
raise NotImplementedError("cannot find PngSuite file %s (use -L for a list)" % name)
r = Reader(bytes=_pngsuite[name])
w,h,pixels,meta = r.asDirect()
assert w == h
# LAn for n < 8 is a special case for which we need to rescale
# the data.
if meta['greyscale'] and meta['alpha'] and meta['bitdepth'] < 8:
factor = 255 // (2**meta['bitdepth']-1)
def rescale(data):
for row in data:
yield map(factor.__mul__, row)
pixels = rescale(pixels)
meta['bitdepth'] = 8
arraycode = 'BH'[meta['bitdepth']>8]
return w, array(arraycode, itertools.chain(*pixels)), meta
# The body of test_suite()
size = 256
if options.test_size:
size = options.test_size
options.bitdepth = options.test_depth
options.greyscale=bool(options.test_black)
kwargs = {}
if options.test_red:
kwargs["red"] = options.test_red
if options.test_green:
kwargs["green"] = options.test_green
if options.test_blue:
kwargs["blue"] = options.test_blue
if options.test_alpha:
kwargs["alpha"] = options.test_alpha
if options.greyscale:
if options.test_red or options.test_green or options.test_blue:
raise ValueError("cannot specify colours (R, G, B) when greyscale image (black channel, K) is specified")
kwargs["red"] = options.test_black
kwargs["green"] = None
kwargs["blue"] = None
options.alpha = bool(options.test_alpha)
if not args:
pixels = test_rgba(size, options.bitdepth, **kwargs)
else:
size,pixels,meta = pngsuite_image(args[0])
for k in ['bitdepth', 'alpha', 'greyscale']:
setattr(options, k, meta[k])
writer = Writer(size, size,
bitdepth=options.bitdepth,
transparent=options.transparent,
background=options.background,
gamma=options.gamma,
greyscale=options.greyscale,
alpha=options.alpha,
compression=options.compression,
interlace=options.interlace)
writer.write_array(sys.stdout, pixels)
def read_pam_header(infile):
"""
Read (the rest of a) PAM header. `infile` should be positioned
immediately after the initial 'P7' line (at the beginning of the
second line). Returns are as for `read_pnm_header`.
"""
# Unlike PBM, PGM, and PPM, we can read the header a line at a time.
header = dict()
while True:
l = infile.readline().strip()
if l == strtobytes('ENDHDR'):
break
if not l:
raise EOFError('PAM ended prematurely')
if l[0] == strtobytes('#'):
continue
l = l.split(None, 1)
if l[0] not in header:
header[l[0]] = l[1]
else:
header[l[0]] += strtobytes(' ') + l[1]
required = ['WIDTH', 'HEIGHT', 'DEPTH', 'MAXVAL']
required = [strtobytes(x) for x in required]
WIDTH,HEIGHT,DEPTH,MAXVAL = required
present = [x for x in required if x in header]
if len(present) != len(required):
raise Error('PAM file must specify WIDTH, HEIGHT, DEPTH, and MAXVAL')
width = int(header[WIDTH])
height = int(header[HEIGHT])
depth = int(header[DEPTH])
maxval = int(header[MAXVAL])
if (width <= 0 or
height <= 0 or
depth <= 0 or
maxval <= 0):
raise Error(
'WIDTH, HEIGHT, DEPTH, MAXVAL must all be positive integers')
return 'P7', width, height, depth, maxval
def read_pnm_header(infile, supported=('P5','P6')):
"""
Read a PNM header, returning (format,width,height,depth,maxval).
`width` and `height` are in pixels. `depth` is the number of
channels in the image; for PBM and PGM it is synthesized as 1, for
PPM as 3; for PAM images it is read from the header. `maxval` is
synthesized (as 1) for PBM images.
"""
# Generally, see http://netpbm.sourceforge.net/doc/ppm.html
# and http://netpbm.sourceforge.net/doc/pam.html
supported = [strtobytes(x) for x in supported]
# Technically 'P7' must be followed by a newline, so by using
# rstrip() we are being liberal in what we accept. I think this
# is acceptable.
type = infile.read(3).rstrip()
if type not in supported:
raise NotImplementedError('file format %s not supported' % type)
if type == strtobytes('P7'):
# PAM header parsing is completely different.
return read_pam_header(infile)
# Expected number of tokens in header (3 for P4, 4 for P6)
expected = 4
pbm = ('P1', 'P4')
if type in pbm:
expected = 3
header = [type]
# We have to read the rest of the header byte by byte because the
# final whitespace character (immediately following the MAXVAL in
# the case of P6) may not be a newline. Of course all PNM files in
# the wild use a newline at this point, so it's tempting to use
# readline; but it would be wrong.
def getc():
c = infile.read(1)
if not c:
raise Error('premature EOF reading PNM header')
return c
c = getc()
while True:
# Skip whitespace that precedes a token.
while c.isspace():
c = getc()
# Skip comments.
while c == '#':
while c not in '\n\r':
c = getc()
if not c.isdigit():
raise Error('unexpected character %s found in header' % c)
# According to the specification it is legal to have comments
# that appear in the middle of a token.
# This is bonkers; I've never seen it; and it's a bit awkward to
# code good lexers in Python (no goto). So we break on such
# cases.
token = strtobytes('')
while c.isdigit():
token += c
c = getc()
# Slight hack. All "tokens" are decimal integers, so convert
# them here.
header.append(int(token))
if len(header) == expected:
break
# Skip comments (again)
while c == '#':
while c not in '\n\r':
c = getc()
if not c.isspace():
raise Error('expected header to end with whitespace, not %s' % c)
if type in pbm:
# synthesize a MAXVAL
header.append(1)
depth = (1,3)[type == strtobytes('P6')]
return header[0], header[1], header[2], depth, header[3]
def write_pnm(file, width, height, pixels, meta):
"""Write a Netpbm PNM/PAM file."""
bitdepth = meta['bitdepth']
maxval = 2**bitdepth - 1
# Rudely, the number of image planes can be used to determine
# whether we are L (PGM), LA (PAM), RGB (PPM), or RGBA (PAM).
planes = meta['planes']
# Can be an assert as long as we assume that pixels and meta came
# from a PNG file.
assert planes in (1,2,3,4)
if planes in (1,3):
if 1 == planes:
# PGM
# Could generate PBM if maxval is 1, but we don't (for one
# thing, we'd have to convert the data, not just blat it
# out).
fmt = 'P5'
else:
# PPM
fmt = 'P6'
file.write('%s %d %d %d\n' % (fmt, width, height, maxval))
if planes in (2,4):
# PAM
# See http://netpbm.sourceforge.net/doc/pam.html
if 2 == planes:
tupltype = 'GRAYSCALE_ALPHA'
else:
tupltype = 'RGB_ALPHA'
file.write('P7\nWIDTH %d\nHEIGHT %d\nDEPTH %d\nMAXVAL %d\n'
'TUPLTYPE %s\nENDHDR\n' %
(width, height, planes, maxval, tupltype))
# Values per row
vpr = planes * width
# struct format
fmt = '>%d' % vpr
if maxval > 0xff:
fmt = fmt + 'H'
else:
fmt = fmt + 'B'
for row in pixels:
file.write(struct.pack(fmt, *row))
file.flush()
def color_triple(color):
"""
Convert a command line colour value to a RGB triple of integers.
FIXME: Somewhere we need support for greyscale backgrounds etc.
"""
if color.startswith('#') and len(color) == 4:
return (int(color[1], 16),
int(color[2], 16),
int(color[3], 16))
if color.startswith('#') and len(color) == 7:
return (int(color[1:3], 16),
int(color[3:5], 16),
int(color[5:7], 16))
elif color.startswith('#') and len(color) == 13:
return (int(color[1:5], 16),
int(color[5:9], 16),
int(color[9:13], 16))
def _main(argv):
"""
Run the PNG encoder with options from the command line.
"""
# Parse command line arguments
from optparse import OptionParser
import re
version = '%prog ' + re.sub(r'( ?\$|URL: |Rev:)', '', __version__)
parser = OptionParser(version=version)
parser.set_usage("%prog [options] [imagefile]")
parser.add_option('-r', '--read-png', default=False,
action='store_true',
help='Read PNG, write PNM')
parser.add_option("-i", "--interlace",
default=False, action="store_true",
help="create an interlaced PNG file (Adam7)")
parser.add_option("-t", "--transparent",
action="store", type="string", metavar="color",
help="mark the specified colour (#RRGGBB) as transparent")
parser.add_option("-b", "--background",
action="store", type="string", metavar="color",
help="save the specified background colour")
parser.add_option("-a", "--alpha",
action="store", type="string", metavar="pgmfile",
help="alpha channel transparency (RGBA)")
parser.add_option("-g", "--gamma",
action="store", type="float", metavar="value",
help="save the specified gamma value")
parser.add_option("-c", "--compression",
action="store", type="int", metavar="level",
help="zlib compression level (0-9)")
parser.add_option("-T", "--test",
default=False, action="store_true",
help="create a test image (a named PngSuite image if an argument is supplied)")
parser.add_option('-L', '--list',
default=False, action='store_true',
help="print list of named test images")
parser.add_option("-R", "--test-red",
action="store", type="string", metavar="pattern",
help="test pattern for the red image layer")
parser.add_option("-G", "--test-green",
action="store", type="string", metavar="pattern",
help="test pattern for the green image layer")
parser.add_option("-B", "--test-blue",
action="store", type="string", metavar="pattern",
help="test pattern for the blue image layer")
parser.add_option("-A", "--test-alpha",
action="store", type="string", metavar="pattern",
help="test pattern for the alpha image layer")
parser.add_option("-K", "--test-black",
action="store", type="string", metavar="pattern",
help="test pattern for greyscale image")
parser.add_option("-d", "--test-depth",
default=8, action="store", type="int",
metavar='NBITS',
help="create test PNGs that are NBITS bits per channel")
parser.add_option("-S", "--test-size",
action="store", type="int", metavar="size",
help="width and height of the test image")
(options, args) = parser.parse_args(args=argv[1:])
# Convert options
if options.transparent is not None:
options.transparent = color_triple(options.transparent)
if options.background is not None:
options.background = color_triple(options.background)
if options.list:
names = list(_pngsuite)
names.sort()
for name in names:
print (name)
return
# Run regression tests
if options.test:
return test_suite(options, args)
# Prepare input and output files
if len(args) == 0:
infilename = '-'
infile = sys.stdin
elif len(args) == 1:
infilename = args[0]
infile = open(infilename, 'rb')
else:
parser.error("more than one input file")
outfile = sys.stdout
if options.read_png:
# Encode PNG to PPM
png = Reader(file=infile)
width,height,pixels,meta = png.asDirect()
write_pnm(outfile, width, height, pixels, meta)
else:
# Encode PNM to PNG
format, width, height, depth, maxval = \
read_pnm_header(infile, ('P5','P6','P7'))
# When it comes to the variety of input formats, we do something
# rather rude. Observe that L, LA, RGB, RGBA are the 4 colour
# types supported by PNG and that they correspond to 1, 2, 3, 4
# channels respectively. So we use the number of channels in
# the source image to determine which one we have. We do not
# care about TUPLTYPE.
greyscale = depth <= 2
pamalpha = depth in (2,4)
supported = map(lambda x: 2**x-1, range(1,17))
try:
mi = supported.index(maxval)
except ValueError:
raise NotImplementedError(
'your maxval (%s) not in supported list %s' %
(maxval, str(supported)))
bitdepth = mi+1
writer = Writer(width, height,
greyscale=greyscale,
bitdepth=bitdepth,
interlace=options.interlace,
transparent=options.transparent,
background=options.background,
alpha=bool(pamalpha or options.alpha),
gamma=options.gamma,
compression=options.compression)
if options.alpha:
pgmfile = open(options.alpha, 'rb')
format, awidth, aheight, adepth, amaxval = \
read_pnm_header(pgmfile, 'P5')
if amaxval != '255':
raise NotImplementedError(
'maxval %s not supported for alpha channel' % amaxval)
if (awidth, aheight) != (width, height):
raise ValueError("alpha channel image size mismatch"
" (%s has %sx%s but %s has %sx%s)"
% (infilename, width, height,
options.alpha, awidth, aheight))
writer.convert_ppm_and_pgm(infile, pgmfile, outfile)
else:
writer.convert_pnm(infile, outfile)
if __name__ == '__main__':
try:
_main(sys.argv)
except Error:
e = geterror()
sys.stderr.write("%s\n" % (e,))
| bsd-3-clause | -2,471,159,973,479,759,400 | 40.001319 | 117 | 0.640284 | false |
jbornschein/y2k | caltech/sbn-nade.py | 6 | 1843 |
import numpy as np
from learning.dataset import CalTechSilhouettes
from learning.termination import LogLikelihoodIncrease, EarlyStopping
from learning.monitor import MonitorLL, DLogModelParams, SampleFromP
from learning.training import Trainer
from learning.models.rws import LayerStack
from learning.models.sbn import SBN, SBNTop
from learning.models.darn import DARN, DARNTop
from learning.models.nade import NADE, NADETop
n_vis = 28*28
dataset = CalTechSilhouettes(which_set='train')
valiset = CalTechSilhouettes(which_set='valid')
testset = CalTechSilhouettes(which_set='test')
p_layers=[
SBN(
n_X=n_vis,
n_Y=300,
),
SBN(
n_X=300,
n_Y=100,
),
SBN(
n_X=100,
n_Y=50,
),
SBN(
n_X=50,
n_Y=10,
),
SBNTop(
n_X=10,
)
]
q_layers=[
NADE(
n_Y=n_vis,
n_X=300,
),
NADE(
n_Y=300,
n_X=100,
),
NADE(
n_Y=100,
n_X=50,
),
NADE(
n_Y=50,
n_X=10,
)
]
model = LayerStack(
p_layers=p_layers,
q_layers=q_layers,
)
trainer = Trainer(
n_samples=5,
learning_rate_p=1e-3,
learning_rate_q=1e-3,
learning_rate_s=1e-3,
layer_discount=1.0,
batch_size=25,
dataset=dataset,
model=model,
termination=EarlyStopping(),
#step_monitors=[MonitorLL(data=smallset, n_samples=[1, 5, 25, 100])],
epoch_monitors=[
DLogModelParams(),
MonitorLL(name="valiset", data=valiset, n_samples=[1, 5, 25, 100]),
SampleFromP(n_samples=100)
],
final_monitors=[
MonitorLL(name="final-valiset", data=valiset, n_samples=[1, 5, 25, 100, 500, 1000]),
MonitorLL(name="final-testset", data=testset, n_samples=[1, 5, 25, 100, 500, 1000]),
],
monitor_nth_step=100,
)
| agpl-3.0 | -6,987,051,185,225,650,000 | 20.183908 | 93 | 0.592512 | false |
imatge-upc/trecvid-2015 | scripts/python/display.py | 1 | 5149 | import numpy as np
import matplotlib.pyplot as plt
import cv2
import os
import pickle
import evaluate as eval
from get_params import get_params
""" Run this to save figures displaying top 12 results of the ranking for each query."""
params = get_params()
# Image paths
DB_IMAGES = os.path.join(params['root'],'1_images', params['database'])
QUERY_IMAGES = os.path.join(params['root'],'1_images/query' + params['year'])
RANKING_PATH = os.path.join(params['root'],'7_rankings',params['net'],params['database'] + params['year'],params['distance_type'])
QUERY_DATA = os.path.join(params['root'], '4_object_proposals', 'query' + params['year'] + '_gt/csv')
if params['year'] == '2014':
GROUND_TRUTH_FILE = os.path.join(params['root'],'8_groundtruth','src','ins.search.qrels.tv14')
else:
GROUND_TRUTH_FILE = os.path.join(params['root'],'8_groundtruth','src','ins.search.qrels.tv13')
FIGURES_PATH = os.path.join(params['root'], '9_other/figures', params['distance_type'])
if not os.path.isdir(FIGURES_PATH):
os.makedirs(FIGURES_PATH)
RED = [255,0,0]
GREEN = [0,255,0]
def display(params):
QUERY_IMAGES = os.path.join(params['root'],'1_images/query' + params['year'])
ranking,db_frames,db_regions, query_images,query_regions, labels = get_data(params)
if params['year'] == '2014':
QUERY_IMAGES = os.path.join(QUERY_IMAGES,params['query_name'])
print params['query_name']
fig = plt.figure(figsize=(20,10))
labels = labels[0]
for i in range(len(query_images)):
q = query_images[i].split('/')[-1]
query_path = os.path.join(QUERY_IMAGES,q)
img_query = cv2.cvtColor( cv2.imread(query_path), cv2.COLOR_BGR2RGB)
cv2.rectangle(img_query, (int(query_regions[i][0]), int(query_regions[i][1])), (int(query_regions[i][2]), int(query_regions[i][3])), 255,5)
ax = fig.add_subplot(4, 4, i+1)
ax.imshow(img_query)
for j in range(12):
img_db = cv2.cvtColor( cv2.imread(os.path.join(DB_IMAGES,ranking[j],db_frames[j] + '.jpg')), cv2.COLOR_BGR2RGB)
cv2.rectangle(img_db, (int(db_regions[j][0]), int(db_regions[j][1])), (int(db_regions[j][2]), int(db_regions[j][3])), 255,5)
if labels[j]==1:
img_db= cv2.copyMakeBorder(img_db,10,10,10,10,cv2.BORDER_CONSTANT,value=GREEN)
else:
img_db= cv2.copyMakeBorder(img_db,10,10,10,10,cv2.BORDER_CONSTANT,value=RED)
ax = fig.add_subplot(4, 4, 5+j)
ax.imshow(img_db)
print "Displaying..."
plt.axis('off')
plt.savefig(os.path.join(FIGURES_PATH,params['query_name'] + '.png'))
plt.close()
#plt.show()
def rerank(ranking,baseline_ranking,frames,regions):
new_ranking = []
new_frames = []
new_regions = []
for i in range(len(ranking)):
shot = ranking[i]
if shot in baseline_ranking:
new_ranking.append(shot)
new_frames.append(frames[i])
new_regions.append(regions[i])
return new_ranking,new_frames,new_regions
def get_data(params):
# Ranking info
f = open(os.path.join(RANKING_PATH,params['query_name'] + '.rank'))
ranking = pickle.load(f)
frames = pickle.load(f)
regions = pickle.load(f)
f.close()
if params['database'] =='gt_imgs':
baseline_file = os.path.join(params['root'],'2_baseline', 'dcu_caffenet',params['query_name'] + '.rank')
baseline_ranking = pickle.load(open(baseline_file,'rb'))
baseline_ranking = baseline_ranking[0:1000]
ranking,frames,regions = rerank(ranking,baseline_ranking,frames,regions)
with(open(os.path.join(QUERY_DATA, params['query_name']+'.csv'),'r')) as f:
image_list = f.readlines()
query_images = []
regions_query = []
for pos in range( len(image_list) - 1):
line_to_read = image_list[pos + 1]
line_parts = line_to_read.split(',')
ymin = int(float(line_parts[1]))
xmin = int(float(line_parts[2]))
ymax = int(float(line_parts[3]))
xmax = int(float(line_parts[4]))
boxes = np.array([xmin,ymin,xmax,ymax])
if len(regions_query) == 0:
regions_query = np.reshape(boxes,(1,4))
else:
regions_query = np.vstack((regions_query,np.reshape(boxes,(1,4))))
query_images.append(line_parts[0])
labels, num_relevant = eval.relnotrel(GROUND_TRUTH_FILE, params['query_name'], ranking)
return ranking,frames,regions, query_images,regions_query,labels
if __name__ == '__main__':
params = get_params()
if params['year'] == '2014':
queries = range(9099,9129)
elif params['year'] == '2014:
queries = range(9069,9099)
else:
queries = range(9129,9159)
for query in queries:
if query not in [9100,9113,9117]:
params['query_name'] = str(query)
if os.path.isfile( os.path.join( RANKING_PATH,params['query_name'] + '.rank') ):
display(params)
| mit | -868,938,235,470,008,200 | 31.435065 | 147 | 0.594096 | false |
jinxiaoye1987/RyzomCore | nel/tools/build_gamedata/processes/anim/3_install.py | 3 | 1732 | #!/usr/bin/python
#
# \file 3_install.py
# \brief Install anim
# \date 2009-03-10 13:13GMT
# \author Jan Boon (Kaetemi)
# Python port of game data build pipeline.
# Install anim
#
# NeL - MMORPG Framework <http://dev.ryzom.com/projects/nel/>
# Copyright (C) 2010 Winch Gate Property Limited
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import time, sys, os, shutil, subprocess, distutils.dir_util
sys.path.append("../../configuration")
if os.path.isfile("log.log"):
os.remove("log.log")
log = open("log.log", "w")
from scripts import *
from buildsite import *
from process import *
from tools import *
from directories import *
printLog(log, "")
printLog(log, "-------")
printLog(log, "--- Install anim")
printLog(log, "-------")
printLog(log, time.strftime("%Y-%m-%d %H:%MGMT", time.gmtime(time.time())))
printLog(log, "")
printLog(log, ">>> Install anim <<<")
srcDir = ExportBuildDirectory + "/" + AnimBuildDirectory
mkPath(log, srcDir)
destDir = InstallDirectory + "/" + AnimInstallDirectory
mkPath(log, destDir)
copyFilesNoTreeIfNeeded(log, srcDir, destDir)
printLog(log, "")
log.close()
# end of file
| agpl-3.0 | 1,373,817,630,931,381,000 | 29.385965 | 75 | 0.717667 | false |
ASMlover/study | python/proto/google/protobuf/internal/python_message.py | 1 | 42917 | # Protocol Buffers - Google's data interchange format
# Copyright 2008 Google Inc. All rights reserved.
# http://code.google.com/p/protobuf/
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# This code is meant to work on Python 2.4 and above only.
#
# TODO(robinson): Helpers for verbose, common checks like seeing if a
# descriptor's cpp_type is CPPTYPE_MESSAGE.
"""Contains a metaclass and helper functions used to create
protocol message classes from Descriptor objects at runtime.
Recall that a metaclass is the "type" of a class.
(A class is to a metaclass what an instance is to a class.)
In this case, we use the GeneratedProtocolMessageType metaclass
to inject all the useful functionality into the classes
output by the protocol compiler at compile-time.
The upshot of all this is that the real implementation
details for ALL pure-Python protocol buffers are *here in
this file*.
"""
from __future__ import unicode_literals
__author__ = '[email protected] (Will Robinson)'
import sys
if sys.version > '3':
import copyreg
def copy_reg_pickle(type, function):
return copyreg.pickle(type,function)
else:
import copy_reg
def copy_reg_pickle(type, function):
return copy_reg.pickle(type,function)
import struct
import weakref
# We use "as" to avoid name collisions with variables.
from google.protobuf.internal import containers
from google.protobuf.internal import decoder
from google.protobuf.internal import encoder
from google.protobuf.internal import enum_type_wrapper
from google.protobuf.internal import message_listener as message_listener_mod
from google.protobuf.internal import type_checkers
from google.protobuf.internal import wire_format
from google.protobuf.internal import utils
from google.protobuf.internal.utils import SimIO, bytestr_to_string, \
iteritems, range
from google.protobuf import descriptor as descriptor_mod
from google.protobuf import message as message_mod
from google.protobuf import text_format
_FieldDescriptor = descriptor_mod.FieldDescriptor
def NewMessage(bases, descriptor, dictionary):
_AddClassAttributesForNestedExtensions(descriptor, dictionary)
_AddSlots(descriptor, dictionary)
return bases
def InitMessage(descriptor, cls):
cls._decoders_by_tag = {}
cls._extensions_by_name = {}
cls._extensions_by_number = {}
if (descriptor.has_options and
descriptor.GetOptions().message_set_wire_format):
cls._decoders_by_tag[decoder.MESSAGE_SET_ITEM_TAG] = (
decoder.MessageSetItemDecoder(cls._extensions_by_number))
# Attach stuff to each FieldDescriptor for quick lookup later on.
for field in descriptor.fields:
_AttachFieldHelpers(cls, field)
_AddEnumValues(descriptor, cls)
_AddInitMethod(descriptor, cls)
_AddPropertiesForFields(descriptor, cls)
_AddPropertiesForExtensions(descriptor, cls)
_AddStaticMethods(cls)
_AddMessageMethods(descriptor, cls)
_AddPrivateHelperMethods(cls)
copy_reg_pickle(cls, lambda obj: (cls, (), obj.__getstate__()))
# Stateless helpers for GeneratedProtocolMessageType below.
# Outside clients should not access these directly.
#
# I opted not to make any of these methods on the metaclass, to make it more
# clear that I'm not really using any state there and to keep clients from
# thinking that they have direct access to these construction helpers.
def _PropertyName(proto_field_name):
"""Returns the name of the public property attribute which
clients can use to get and (in some cases) set the value
of a protocol message field.
Args:
proto_field_name: The protocol message field name, exactly
as it appears (or would appear) in a .proto file.
"""
# TODO(robinson): Escape Python keywords (e.g., yield), and test this support.
# nnorwitz makes my day by writing:
# """
# FYI. See the keyword module in the stdlib. This could be as simple as:
#
# if keyword.iskeyword(proto_field_name):
# return proto_field_name + "_"
# return proto_field_name
# """
# Kenton says: The above is a BAD IDEA. People rely on being able to use
# getattr() and setattr() to reflectively manipulate field values. If we
# rename the properties, then every such user has to also make sure to apply
# the same transformation. Note that currently if you name a field "yield",
# you can still access it just fine using getattr/setattr -- it's not even
# that cumbersome to do so.
# TODO(kenton): Remove this method entirely if/when everyone agrees with my
# position.
return proto_field_name
def _VerifyExtensionHandle(message, extension_handle):
"""Verify that the given extension handle is valid."""
if not isinstance(extension_handle, _FieldDescriptor):
raise KeyError('HasExtension() expects an extension handle, got: %s' %
extension_handle)
if not extension_handle.is_extension:
raise KeyError('"%s" is not an extension.' % extension_handle.full_name)
if not extension_handle.containing_type:
raise KeyError('"%s" is missing a containing_type.'
% extension_handle.full_name)
if extension_handle.containing_type is not message.DESCRIPTOR:
raise KeyError('Extension "%s" extends message type "%s", but this '
'message is of type "%s".' %
(extension_handle.full_name,
extension_handle.containing_type.full_name,
message.DESCRIPTOR.full_name))
def _AddSlots(message_descriptor, dictionary):
"""Adds a __slots__ entry to dictionary, containing the names of all valid
attributes for this message type.
Args:
message_descriptor: A Descriptor instance describing this message type.
dictionary: Class dictionary to which we'll add a '__slots__' entry.
"""
dictionary['__slots__'] = ['_cached_byte_size',
'_cached_byte_size_dirty',
'_fields',
'_unknown_fields',
'_is_present_in_parent',
'_listener',
'_listener_for_children',
'__weakref__']
def _IsMessageSetExtension(field):
return (field.is_extension and
field.containing_type.has_options and
field.containing_type.GetOptions().message_set_wire_format and
field.type == _FieldDescriptor.TYPE_MESSAGE and
field.message_type == field.extension_scope and
field.label == _FieldDescriptor.LABEL_OPTIONAL)
def _AttachFieldHelpers(cls, field_descriptor):
is_repeated = (field_descriptor.label == _FieldDescriptor.LABEL_REPEATED)
is_packed = (field_descriptor.has_options and
field_descriptor.GetOptions().packed)
if _IsMessageSetExtension(field_descriptor):
field_encoder = encoder.MessageSetItemEncoder(field_descriptor.number)
sizer = encoder.MessageSetItemSizer(field_descriptor.number)
else:
field_encoder = type_checkers.TYPE_TO_ENCODER[field_descriptor.type](
field_descriptor.number, is_repeated, is_packed)
sizer = type_checkers.TYPE_TO_SIZER[field_descriptor.type](
field_descriptor.number, is_repeated, is_packed)
field_descriptor._encoder = field_encoder
field_descriptor._sizer = sizer
field_descriptor._default_constructor = _DefaultValueConstructorForField(
field_descriptor)
def AddDecoder(wiretype, is_packed):
tag_bytes = encoder.TagBytes(field_descriptor.number, wiretype)
cls._decoders_by_tag[tag_bytes] = (
type_checkers.TYPE_TO_DECODER[field_descriptor.type](
field_descriptor.number, is_repeated, is_packed,
field_descriptor, field_descriptor._default_constructor))
AddDecoder(type_checkers.FIELD_TYPE_TO_WIRE_TYPE[field_descriptor.type],
False)
if is_repeated and wire_format.IsTypePackable(field_descriptor.type):
# To support wire compatibility of adding packed = true, add a decoder for
# packed values regardless of the field's options.
AddDecoder(wire_format.WIRETYPE_LENGTH_DELIMITED, True)
def _AddClassAttributesForNestedExtensions(descriptor, dictionary):
extension_dict = descriptor.extensions_by_name
for extension_name, extension_field in iteritems(extension_dict):
assert extension_name not in dictionary
dictionary[extension_name] = extension_field
def _AddEnumValues(descriptor, cls):
"""Sets class-level attributes for all enum fields defined in this message.
Also exporting a class-level object that can name enum values.
Args:
descriptor: Descriptor object for this message type.
cls: Class we're constructing for this message type.
"""
for enum_type in descriptor.enum_types:
setattr(cls, enum_type.name, enum_type_wrapper.EnumTypeWrapper(enum_type))
for enum_value in enum_type.values:
setattr(cls, enum_value.name, enum_value.number)
def _DefaultValueConstructorForField(field):
"""Returns a function which returns a default value for a field.
Args:
field: FieldDescriptor object for this field.
The returned function has one argument:
message: Message instance containing this field, or a weakref proxy
of same.
That function in turn returns a default value for this field. The default
value may refer back to |message| via a weak reference.
"""
if field.label == _FieldDescriptor.LABEL_REPEATED:
if field.has_default_value and field.default_value != []:
raise ValueError('Repeated field default value not empty list: %s' % (
field.default_value))
if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
# We can't look at _concrete_class yet since it might not have
# been set. (Depends on order in which we initialize the classes).
message_type = field.message_type
def MakeRepeatedMessageDefault(message):
return containers.RepeatedCompositeFieldContainer(
message._listener_for_children, field.message_type)
return MakeRepeatedMessageDefault
else:
type_checker = type_checkers.GetTypeChecker(field.cpp_type, field.type)
def MakeRepeatedScalarDefault(message):
return containers.RepeatedScalarFieldContainer(
message._listener_for_children, type_checker)
return MakeRepeatedScalarDefault
if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
# _concrete_class may not yet be initialized.
message_type = field.message_type
def MakeSubMessageDefault(message):
result = message_type._concrete_class()
result._SetListener(message._listener_for_children)
return result
return MakeSubMessageDefault
def MakeScalarDefault(message):
# TODO(protobuf-team): This may be broken since there may not be
# default_value. Combine with has_default_value somehow.
return field.default_value
return MakeScalarDefault
def _AddInitMethod(message_descriptor, cls):
"""Adds an __init__ method to cls."""
fields = message_descriptor.fields
def init(self, **kwargs):
self._cached_byte_size = 0
self._cached_byte_size_dirty = len(kwargs) > 0
self._fields = {}
# _unknown_fields is () when empty for efficiency, and will be turned into
# a list if fields are added.
self._unknown_fields = ()
self._is_present_in_parent = False
self._listener = message_listener_mod.NullMessageListener()
self._listener_for_children = _Listener(self)
for field_name, field_value in iteritems(kwargs):
field = _GetFieldByName(message_descriptor, field_name)
if field is None:
raise TypeError("%s() got an unexpected keyword argument '%s'" %
(message_descriptor.name, field_name))
if field.label == _FieldDescriptor.LABEL_REPEATED:
copy = field._default_constructor(self)
if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: # Composite
for val in field_value:
copy.add().MergeFrom(val)
else: # Scalar
copy.extend(field_value)
self._fields[field] = copy
elif field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
copy = field._default_constructor(self)
copy.MergeFrom(field_value)
self._fields[field] = copy
else:
setattr(self, field_name, field_value)
init.__module__ = None
init.__doc__ = None
cls.__init__ = init
def _GetFieldByName(message_descriptor, field_name):
"""Returns a field descriptor by field name.
Args:
message_descriptor: A Descriptor describing all fields in message.
field_name: The name of the field to retrieve.
Returns:
The field descriptor associated with the field name.
"""
try:
return message_descriptor.fields_by_name[field_name]
except KeyError:
raise ValueError('Protocol message has no "%s" field.' % field_name)
def _AddPropertiesForFields(descriptor, cls):
"""Adds properties for all fields in this protocol message type."""
for field in descriptor.fields:
_AddPropertiesForField(field, cls)
if descriptor.is_extendable:
# _ExtensionDict is just an adaptor with no state so we allocate a new one
# every time it is accessed.
cls.Extensions = property(lambda self: _ExtensionDict(self))
def _AddPropertiesForField(field, cls):
"""Adds a public property for a protocol message field.
Clients can use this property to get and (in the case
of non-repeated scalar fields) directly set the value
of a protocol message field.
Args:
field: A FieldDescriptor for this field.
cls: The class we're constructing.
"""
# Catch it if we add other types that we should
# handle specially here.
assert _FieldDescriptor.MAX_CPPTYPE == 10
constant_name = field.name.upper() + "_FIELD_NUMBER"
setattr(cls, constant_name, field.number)
if field.label == _FieldDescriptor.LABEL_REPEATED:
_AddPropertiesForRepeatedField(field, cls)
elif field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
_AddPropertiesForNonRepeatedCompositeField(field, cls)
else:
_AddPropertiesForNonRepeatedScalarField(field, cls)
def _AddPropertiesForRepeatedField(field, cls):
"""Adds a public property for a "repeated" protocol message field. Clients
can use this property to get the value of the field, which will be either a
_RepeatedScalarFieldContainer or _RepeatedCompositeFieldContainer (see
below).
Note that when clients add values to these containers, we perform
type-checking in the case of repeated scalar fields, and we also set any
necessary "has" bits as a side-effect.
Args:
field: A FieldDescriptor for this field.
cls: The class we're constructing.
"""
proto_field_name = field.name
property_name = _PropertyName(proto_field_name)
def getter(self):
field_value = self._fields.get(field)
if field_value is None:
# Construct a new object to represent this field.
field_value = field._default_constructor(self)
# Atomically check if another thread has preempted us and, if not, swap
# in the new object we just created. If someone has preempted us, we
# take that object and discard ours.
# WARNING: We are relying on setdefault() being atomic. This is true
# in CPython but we haven't investigated others. This warning appears
# in several other locations in this file.
field_value = self._fields.setdefault(field, field_value)
return field_value
getter.__module__ = None
getter.__doc__ = 'Getter for %s.' % proto_field_name
# We define a setter just so we can throw an exception with a more
# helpful error message.
def setter(self, new_value):
raise AttributeError('Assignment not allowed to repeated field '
'"%s" in protocol message object.' % proto_field_name)
doc = 'Magic attribute generated for "%s" proto field.' % proto_field_name
setattr(cls, property_name, property(getter, setter, doc=doc))
def _AddPropertiesForNonRepeatedScalarField(field, cls):
"""Adds a public property for a nonrepeated, scalar protocol message field.
Clients can use this property to get and directly set the value of the field.
Note that when the client sets the value of a field by using this property,
all necessary "has" bits are set as a side-effect, and we also perform
type-checking.
Args:
field: A FieldDescriptor for this field.
cls: The class we're constructing.
"""
proto_field_name = field.name
property_name = _PropertyName(proto_field_name)
type_checker = type_checkers.GetTypeChecker(field.cpp_type, field.type)
default_value = field.default_value
valid_values = set()
def getter(self):
# TODO(protobuf-team): This may be broken since there may not be
# default_value. Combine with has_default_value somehow.
return self._fields.get(field, default_value)
getter.__module__ = None
getter.__doc__ = 'Getter for %s.' % proto_field_name
def setter(self, new_value):
type_checker.CheckValue(new_value)
self._fields[field] = new_value
# Check _cached_byte_size_dirty inline to improve performance, since scalar
# setters are called frequently.
if not self._cached_byte_size_dirty:
self._Modified()
setter.__module__ = None
setter.__doc__ = 'Setter for %s.' % proto_field_name
# Add a property to encapsulate the getter/setter.
doc = 'Magic attribute generated for "%s" proto field.' % proto_field_name
setattr(cls, property_name, property(getter, setter, doc=doc))
def _AddPropertiesForNonRepeatedCompositeField(field, cls):
"""Adds a public property for a nonrepeated, composite protocol message field.
A composite field is a "group" or "message" field.
Clients can use this property to get the value of the field, but cannot
assign to the property directly.
Args:
field: A FieldDescriptor for this field.
cls: The class we're constructing.
"""
# TODO(robinson): Remove duplication with similar method
# for non-repeated scalars.
proto_field_name = field.name
property_name = _PropertyName(proto_field_name)
# TODO(komarek): Can anyone explain to me why we cache the message_type this
# way, instead of referring to field.message_type inside of getter(self)?
# What if someone sets message_type later on (which makes for simpler
# dyanmic proto descriptor and class creation code).
message_type = field.message_type
def getter(self):
field_value = self._fields.get(field)
if field_value is None:
# Construct a new object to represent this field.
field_value = message_type._concrete_class() # use field.message_type?
field_value._SetListener(self._listener_for_children)
# Atomically check if another thread has preempted us and, if not, swap
# in the new object we just created. If someone has preempted us, we
# take that object and discard ours.
# WARNING: We are relying on setdefault() being atomic. This is true
# in CPython but we haven't investigated others. This warning appears
# in several other locations in this file.
field_value = self._fields.setdefault(field, field_value)
return field_value
getter.__module__ = None
getter.__doc__ = 'Getter for %s.' % proto_field_name
# We define a setter just so we can throw an exception with a more
# helpful error message.
def setter(self, new_value):
raise AttributeError('Assignment not allowed to composite field '
'"%s" in protocol message object.' % proto_field_name)
# Add a property to encapsulate the getter.
doc = 'Magic attribute generated for "%s" proto field.' % proto_field_name
setattr(cls, property_name, property(getter, setter, doc=doc))
def _AddPropertiesForExtensions(descriptor, cls):
"""Adds properties for all fields in this protocol message type."""
extension_dict = descriptor.extensions_by_name
for extension_name, extension_field in iteritems(extension_dict):
constant_name = extension_name.upper() + "_FIELD_NUMBER"
setattr(cls, constant_name, extension_field.number)
def _AddStaticMethods(cls):
# TODO(robinson): This probably needs to be thread-safe(?)
def RegisterExtension(extension_handle):
extension_handle.containing_type = cls.DESCRIPTOR
_AttachFieldHelpers(cls, extension_handle)
# Try to insert our extension, failing if an extension with the same number
# already exists.
actual_handle = cls._extensions_by_number.setdefault(
extension_handle.number, extension_handle)
if actual_handle is not extension_handle:
raise AssertionError(
'Extensions "%s" and "%s" both try to extend message type "%s" with '
'field number %d.' %
(extension_handle.full_name, actual_handle.full_name,
cls.DESCRIPTOR.full_name, extension_handle.number))
cls._extensions_by_name[extension_handle.full_name] = extension_handle
handle = extension_handle # avoid line wrapping
if _IsMessageSetExtension(handle):
# MessageSet extension. Also register under type name.
cls._extensions_by_name[
extension_handle.message_type.full_name] = extension_handle
cls.RegisterExtension = staticmethod(RegisterExtension)
def FromString(s):
message = cls()
message.MergeFromString(s)
return message
cls.FromString = staticmethod(FromString)
def _IsPresent(item):
"""Given a (FieldDescriptor, value) tuple from _fields, return true if the
value should be included in the list returned by ListFields()."""
if item[0].label == _FieldDescriptor.LABEL_REPEATED:
return bool(item[1])
elif item[0].cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
return item[1]._is_present_in_parent
else:
return True
def _AddListFieldsMethod(message_descriptor, cls):
"""Helper for _AddMessageMethods()."""
def ListFields(self):
all_fields = [item for item in iteritems(self._fields) if _IsPresent(item)]
all_fields.sort(key = lambda item: item[0].number)
return all_fields
cls.ListFields = ListFields
def _AddHasFieldMethod(message_descriptor, cls):
"""Helper for _AddMessageMethods()."""
singular_fields = {}
for field in message_descriptor.fields:
if field.label != _FieldDescriptor.LABEL_REPEATED:
singular_fields[field.name] = field
def HasField(self, field_name):
try:
field = singular_fields[field_name]
except KeyError:
raise ValueError(
'Protocol message has no singular "%s" field.' % field_name)
if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
value = self._fields.get(field)
return value is not None and value._is_present_in_parent
else:
return field in self._fields
cls.HasField = HasField
def _AddClearFieldMethod(message_descriptor, cls):
"""Helper for _AddMessageMethods()."""
def ClearField(self, field_name):
try:
field = message_descriptor.fields_by_name[field_name]
except KeyError:
raise ValueError('Protocol message has no "%s" field.' % field_name)
if field in self._fields:
# Note: If the field is a sub-message, its listener will still point
# at us. That's fine, because the worst than can happen is that it
# will call _Modified() and invalidate our byte size. Big deal.
del self._fields[field]
# Always call _Modified() -- even if nothing was changed, this is
# a mutating method, and thus calling it should cause the field to become
# present in the parent message.
self._Modified()
cls.ClearField = ClearField
def _AddClearExtensionMethod(cls):
"""Helper for _AddMessageMethods()."""
def ClearExtension(self, extension_handle):
_VerifyExtensionHandle(self, extension_handle)
# Similar to ClearField(), above.
if extension_handle in self._fields:
del self._fields[extension_handle]
self._Modified()
cls.ClearExtension = ClearExtension
def _AddClearMethod(message_descriptor, cls):
"""Helper for _AddMessageMethods()."""
def Clear(self):
# Clear fields.
self._fields = {}
self._unknown_fields = ()
self._Modified()
cls.Clear = Clear
def _AddHasExtensionMethod(cls):
"""Helper for _AddMessageMethods()."""
def HasExtension(self, extension_handle):
_VerifyExtensionHandle(self, extension_handle)
if extension_handle.label == _FieldDescriptor.LABEL_REPEATED:
raise KeyError('"%s" is repeated.' % extension_handle.full_name)
if extension_handle.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
value = self._fields.get(extension_handle)
return value is not None and value._is_present_in_parent
else:
return extension_handle in self._fields
cls.HasExtension = HasExtension
def _AddEqualsMethod(message_descriptor, cls):
"""Helper for _AddMessageMethods()."""
def __eq__(self, other):
if (not isinstance(other, message_mod.Message) or
other.DESCRIPTOR != self.DESCRIPTOR):
return False
if self is other:
return True
if not self.ListFields() == other.ListFields():
return False
# Sort unknown fields because their order shouldn't affect equality test.
unknown_fields = list(self._unknown_fields)
unknown_fields.sort()
other_unknown_fields = list(other._unknown_fields)
other_unknown_fields.sort()
return unknown_fields == other_unknown_fields
cls.__eq__ = __eq__
def _AddStrMethod(message_descriptor, cls):
"""Helper for _AddMessageMethods()."""
def __str__(self):
return bytestr_to_string(text_format.MessageToString(self))
cls.__str__ = __str__
def _AddUnicodeMethod(unused_message_descriptor, cls):
"""Helper for _AddMessageMethods()."""
def __unicode__(self):
return text_format.MessageToString(self, as_utf8=True).decode('utf-8')
cls.__unicode__ = __unicode__
def _AddSetListenerMethod(cls):
"""Helper for _AddMessageMethods()."""
def SetListener(self, listener):
if listener is None:
self._listener = message_listener_mod.NullMessageListener()
else:
self._listener = listener
cls._SetListener = SetListener
def _BytesForNonRepeatedElement(value, field_number, field_type):
"""Returns the number of bytes needed to serialize a non-repeated element.
The returned byte count includes space for tag information and any
other additional space associated with serializing value.
Args:
value: Value we're serializing.
field_number: Field number of this value. (Since the field number
is stored as part of a varint-encoded tag, this has an impact
on the total bytes required to serialize the value).
field_type: The type of the field. One of the TYPE_* constants
within FieldDescriptor.
"""
try:
fn = type_checkers.TYPE_TO_BYTE_SIZE_FN[field_type]
return fn(field_number, value)
except KeyError:
raise message_mod.EncodeError('Unrecognized field type: %d' % field_type)
def _AddByteSizeMethod(message_descriptor, cls):
"""Helper for _AddMessageMethods()."""
def ByteSize(self):
if not self._cached_byte_size_dirty:
return self._cached_byte_size
size = 0
for field_descriptor, field_value in self.ListFields():
size += field_descriptor._sizer(field_value)
for tag_bytes, value_bytes in self._unknown_fields:
size += len(tag_bytes) + len(value_bytes)
self._cached_byte_size = size
self._cached_byte_size_dirty = False
self._listener_for_children.dirty = False
return size
cls.ByteSize = ByteSize
def _AddSerializeToStringMethod(message_descriptor, cls):
"""Helper for _AddMessageMethods()."""
def SerializeToString(self):
# Check if the message has all of its required fields set.
errors = []
if not self.IsInitialized():
raise message_mod.EncodeError(
'Message %s is missing required fields: %s' % (
self.DESCRIPTOR.full_name, ','.join(self.FindInitializationErrors())))
return self.SerializePartialToString()
cls.SerializeToString = SerializeToString
def _AddSerializePartialToStringMethod(message_descriptor, cls):
"""Helper for _AddMessageMethods()."""
def SerializePartialToString(self):
out = SimIO()
self._InternalSerialize(out.write)
return out.getvalue()
cls.SerializePartialToString = SerializePartialToString
def InternalSerialize(self, write_bytes):
for field_descriptor, field_value in self.ListFields():
field_descriptor._encoder(write_bytes, field_value)
for tag_bytes, value_bytes in self._unknown_fields:
write_bytes(tag_bytes)
write_bytes(value_bytes)
cls._InternalSerialize = InternalSerialize
def _AddMergeFromStringMethod(message_descriptor, cls):
"""Helper for _AddMessageMethods()."""
def MergeFromString(self, serialized):
length = len(serialized)
try:
if self._InternalParse(serialized, 0, length) != length:
# The only reason _InternalParse would return early is if it
# encountered an end-group tag.
raise message_mod.DecodeError('Unexpected end-group tag.')
except IndexError:
raise message_mod.DecodeError('Truncated message.')
except struct.error as e:
raise message_mod.DecodeError(e)
return length # Return this for legacy reasons.
cls.MergeFromString = MergeFromString
local_ReadTag = decoder.ReadTag
local_SkipField = decoder.SkipField
decoders_by_tag = cls._decoders_by_tag
def InternalParse(self, buffer, pos, end):
self._Modified()
field_dict = self._fields
unknown_field_list = self._unknown_fields
while pos != end:
(tag_bytes, new_pos) = local_ReadTag(buffer, pos)
field_decoder = decoders_by_tag.get(tag_bytes)
if field_decoder is None:
value_start_pos = new_pos
new_pos = local_SkipField(buffer, new_pos, end, tag_bytes)
if new_pos == -1:
return pos
if not unknown_field_list:
unknown_field_list = self._unknown_fields = []
unknown_field_list.append((tag_bytes, buffer[value_start_pos:new_pos]))
pos = new_pos
else:
pos = field_decoder(buffer, new_pos, end, self, field_dict)
return pos
cls._InternalParse = InternalParse
def _AddIsInitializedMethod(message_descriptor, cls):
"""Adds the IsInitialized and FindInitializationError methods to the
protocol message class."""
required_fields = [field for field in message_descriptor.fields
if field.label == _FieldDescriptor.LABEL_REQUIRED]
def IsInitialized(self, errors=None):
"""Checks if all required fields of a message are set.
Args:
errors: A list which, if provided, will be populated with the field
paths of all missing required fields.
Returns:
True iff the specified message has all required fields set.
"""
# Performance is critical so we avoid HasField() and ListFields().
for field in required_fields:
if (field not in self._fields or
(field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE and
not self._fields[field]._is_present_in_parent)):
if errors is not None:
errors.extend(self.FindInitializationErrors())
return False
for field, value in iteritems(self._fields):
if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
if field.label == _FieldDescriptor.LABEL_REPEATED:
for element in value:
if not element.IsInitialized():
if errors is not None:
errors.extend(self.FindInitializationErrors())
return False
elif value._is_present_in_parent and not value.IsInitialized():
if errors is not None:
errors.extend(self.FindInitializationErrors())
return False
return True
cls.IsInitialized = IsInitialized
def FindInitializationErrors(self):
"""Finds required fields which are not initialized.
Returns:
A list of strings. Each string is a path to an uninitialized field from
the top-level message, e.g. "foo.bar[5].baz".
"""
errors = [] # simplify things
for field in required_fields:
if not self.HasField(field.name):
errors.append(field.name)
for field, value in self.ListFields():
if field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
if field.is_extension:
name = "(%s)" % field.full_name
else:
name = field.name
if field.label == _FieldDescriptor.LABEL_REPEATED:
for i in range(len(value)):
element = value[i]
prefix = "%s[%d]." % (name, i)
sub_errors = element.FindInitializationErrors()
errors += [ prefix + error for error in sub_errors ]
else:
prefix = name + "."
sub_errors = value.FindInitializationErrors()
errors += [ prefix + error for error in sub_errors ]
return errors
cls.FindInitializationErrors = FindInitializationErrors
def _AddMergeFromMethod(cls):
LABEL_REPEATED = _FieldDescriptor.LABEL_REPEATED
CPPTYPE_MESSAGE = _FieldDescriptor.CPPTYPE_MESSAGE
def MergeFrom(self, msg):
if not isinstance(msg, cls):
raise TypeError(
"Parameter to MergeFrom() must be instance of same class: "
"expected %s got %s." % (cls.__name__, type(msg).__name__))
assert msg is not self
self._Modified()
fields = self._fields
for field, value in iteritems(msg._fields):
if field.label == LABEL_REPEATED:
field_value = fields.get(field)
if field_value is None:
# Construct a new object to represent this field.
field_value = field._default_constructor(self)
fields[field] = field_value
field_value.MergeFrom(value)
elif field.cpp_type == CPPTYPE_MESSAGE:
if value._is_present_in_parent:
field_value = fields.get(field)
if field_value is None:
# Construct a new object to represent this field.
field_value = field._default_constructor(self)
fields[field] = field_value
field_value.MergeFrom(value)
else:
self._fields[field] = value
if msg._unknown_fields:
if not self._unknown_fields:
self._unknown_fields = []
self._unknown_fields.extend(msg._unknown_fields)
cls.MergeFrom = MergeFrom
def _AddMessageMethods(message_descriptor, cls):
"""Adds implementations of all Message methods to cls."""
_AddListFieldsMethod(message_descriptor, cls)
_AddHasFieldMethod(message_descriptor, cls)
_AddClearFieldMethod(message_descriptor, cls)
if message_descriptor.is_extendable:
_AddClearExtensionMethod(cls)
_AddHasExtensionMethod(cls)
_AddClearMethod(message_descriptor, cls)
_AddEqualsMethod(message_descriptor, cls)
_AddStrMethod(message_descriptor, cls)
_AddUnicodeMethod(message_descriptor, cls)
_AddSetListenerMethod(cls)
_AddByteSizeMethod(message_descriptor, cls)
_AddSerializeToStringMethod(message_descriptor, cls)
_AddSerializePartialToStringMethod(message_descriptor, cls)
_AddMergeFromStringMethod(message_descriptor, cls)
_AddIsInitializedMethod(message_descriptor, cls)
_AddMergeFromMethod(cls)
def _AddPrivateHelperMethods(cls):
"""Adds implementation of private helper methods to cls."""
def Modified(self):
"""Sets the _cached_byte_size_dirty bit to true,
and propagates this to our listener iff this was a state change.
"""
# Note: Some callers check _cached_byte_size_dirty before calling
# _Modified() as an extra optimization. So, if this method is ever
# changed such that it does stuff even when _cached_byte_size_dirty is
# already true, the callers need to be updated.
if not self._cached_byte_size_dirty:
self._cached_byte_size_dirty = True
self._listener_for_children.dirty = True
self._is_present_in_parent = True
self._listener.Modified()
cls._Modified = Modified
cls.SetInParent = Modified
class _Listener(object):
"""MessageListener implementation that a parent message registers with its
child message.
In order to support semantics like:
foo.bar.baz.qux = 23
assert foo.HasField('bar')
...child objects must have back references to their parents.
This helper class is at the heart of this support.
"""
def __init__(self, parent_message):
"""Args:
parent_message: The message whose _Modified() method we should call when
we receive Modified() messages.
"""
# This listener establishes a back reference from a child (contained) object
# to its parent (containing) object. We make this a weak reference to avoid
# creating cyclic garbage when the client finishes with the 'parent' object
# in the tree.
if isinstance(parent_message, weakref.ProxyType):
self._parent_message_weakref = parent_message
else:
self._parent_message_weakref = weakref.proxy(parent_message)
# As an optimization, we also indicate directly on the listener whether
# or not the parent message is dirty. This way we can avoid traversing
# up the tree in the common case.
self.dirty = False
def Modified(self):
if self.dirty:
return
try:
# Propagate the signal to our parents iff this is the first field set.
self._parent_message_weakref._Modified()
except ReferenceError:
# We can get here if a client has kept a reference to a child object,
# and is now setting a field on it, but the child's parent has been
# garbage-collected. This is not an error.
pass
# TODO(robinson): Move elsewhere? This file is getting pretty ridiculous...
# TODO(robinson): Unify error handling of "unknown extension" crap.
# TODO(robinson): Support iteritems()-style iteration over all
# extensions with the "has" bits turned on?
class _ExtensionDict(object):
"""Dict-like container for supporting an indexable "Extensions"
field on proto instances.
Note that in all cases we expect extension handles to be
FieldDescriptors.
"""
def __init__(self, extended_message):
"""extended_message: Message instance for which we are the Extensions dict.
"""
self._extended_message = extended_message
def __getitem__(self, extension_handle):
"""Returns the current value of the given extension handle."""
_VerifyExtensionHandle(self._extended_message, extension_handle)
result = self._extended_message._fields.get(extension_handle)
if result is not None:
return result
if extension_handle.label == _FieldDescriptor.LABEL_REPEATED:
result = extension_handle._default_constructor(self._extended_message)
elif extension_handle.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
result = extension_handle.message_type._concrete_class()
try:
result._SetListener(self._extended_message._listener_for_children)
except ReferenceError:
pass
else:
# Singular scalar -- just return the default without inserting into the
# dict.
return extension_handle.default_value
# Atomically check if another thread has preempted us and, if not, swap
# in the new object we just created. If someone has preempted us, we
# take that object and discard ours.
# WARNING: We are relying on setdefault() being atomic. This is true
# in CPython but we haven't investigated others. This warning appears
# in several other locations in this file.
result = self._extended_message._fields.setdefault(
extension_handle, result)
return result
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
my_fields = self._extended_message.ListFields()
other_fields = other._extended_message.ListFields()
# Get rid of non-extension fields.
my_fields = [ field for field in my_fields if field.is_extension ]
other_fields = [ field for field in other_fields if field.is_extension ]
return my_fields == other_fields
def __ne__(self, other):
return not self == other
def __hash__(self):
raise TypeError('unhashable object')
# Note that this is only meaningful for non-repeated, scalar extension
# fields. Note also that we may have to call _Modified() when we do
# successfully set a field this way, to set any necssary "has" bits in the
# ancestors of the extended message.
def __setitem__(self, extension_handle, value):
"""If extension_handle specifies a non-repeated, scalar extension
field, sets the value of that field.
"""
_VerifyExtensionHandle(self._extended_message, extension_handle)
if (extension_handle.label == _FieldDescriptor.LABEL_REPEATED or
extension_handle.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE):
raise TypeError(
'Cannot assign to extension "%s" because it is a repeated or '
'composite type.' % extension_handle.full_name)
# It's slightly wasteful to lookup the type checker each time,
# but we expect this to be a vanishingly uncommon case anyway.
type_checker = type_checkers.GetTypeChecker(
extension_handle.cpp_type, extension_handle.type)
type_checker.CheckValue(value)
self._extended_message._fields[extension_handle] = value
self._extended_message._Modified()
def _FindExtensionByName(self, name):
"""Tries to find a known extension with the specified name.
Args:
name: Extension full name.
Returns:
Extension field descriptor.
"""
return self._extended_message._extensions_by_name.get(name, None)
| bsd-2-clause | 4,198,976,967,410,636,300 | 35.965547 | 80 | 0.700282 | false |
danellecline/mbari-aesa | test/split_images.py | 1 | 3285 | #!/usr/bin/env python
__author__ = 'Danelle Cline'
__copyright__ = '2016'
__license__ = 'GPL v3'
__contact__ = 'dcline at mbari.org'
__doc__ = '''
Reads in AESA training images and splits into training/test/validation set
@var __date__: Date of last svn commit
@undocumented: __doc__ parser
@status: production
@license: GPL
'''
import shutil
import fnmatch
import os
import csv
import logging
import pandas as pd
import numpy as np
import glob
from datetime import datetime
import subprocess
import process
from collections import namedtuple
from sklearn.cross_validation import StratifiedShuffleSplit, ShuffleSplit, train_test_split
from shutil import copyfile
import util
import shutil
import numpy as np
import glob
import os
import conf
import fnmatch
import os
if __name__ == '__main__':
try:
print 'Parsing ' + conf.ANNOTATIONS_FILE
df = pd.read_csv(conf.ANNOTATIONS_FILE, sep=',')
# files are named according to the index in the dataframe
matches = []
for root, dirnames, filenames in os.walk(conf.CROPPED_DIR):
for filename in fnmatch.filter(filenames, '*.jpg'):
matches.append(os.path.join(root, filename))
ids = [int(os.path.basename(s).replace(".jpg", "")) for s in matches]
# get the dataframes at the indexes
df_subset = df.ix[ids]
y = df_subset.pop('group')
X = df_subset
x_train, x_test, y_train, y_test = train_test_split(X, y, test_size=0.3, stratify=y) # split 70% training/30% testing
#x_test, x_val, y_test, y_val = train_test_split(x_test, y_test, test_size=0.5) # 15% for test and validation
if os.path.exists(conf.TRAIN_DIR):
shutil.rmtree(conf.TRAIN_DIR)
util.ensure_dir(conf.TRAIN_DIR)
if os.path.exists(conf.TEST_DIR):
shutil.rmtree(conf.TEST_DIR)
util.ensure_dir(conf.TEST_DIR)
print 'Copying training images to ' + conf.TRAIN_DIR + ' ...'
for index, row in x_train.iterrows():
category = row.Category
dir = ('%s%s/' % (conf.CROPPED_DIR, category.upper()))
src = '%s/%06d.jpg' % (dir, index)
dst = '%s/%06d.jpg' % (conf.TRAIN_DIR, index)
if os.path.exists(src):
copyfile(src, dst)
filenames = glob.glob(conf.TRAIN_DIR + '/*.jpg')
ids = [int(os.path.basename(s).replace(".jpg", "")) for s in filenames]
ids.sort()
ids = np.array(ids)
print "Saving %s" % conf.TRAIN_IDS
np.save(conf.TRAIN_IDS, ids)
print 'Copying test images to ' + conf.TEST_DIR + ' ...'
for index, row in x_test.iterrows():
category = row.Category
dir = ('%s%s/' % (conf.CROPPED_DIR, category.upper()))
src = '%s/%06d.jpg' % (dir, index)
dst = '%s/%06d.jpg' % (conf.TEST_DIR, index)
if os.path.exists(src):
copyfile(src, dst)
filenames = glob.glob(conf.TEST_DIR + '/*.jpg')
ids = [int(os.path.basename(s).replace(".jpg", "")) for s in filenames]
ids.sort()
ids = np.array(ids)
print "Saving %s" % conf.TEST_IDS
np.save(conf.TEST_IDS, ids)
except Exception as ex:
print ex
print 'Done'
| gpl-3.0 | 6,930,534,936,888,665,000 | 30.285714 | 125 | 0.595129 | false |
CloudServer/nova | nova/consoleauth/rpcapi.py | 51 | 3682 | # Copyright 2013 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Client side of the consoleauth RPC API.
"""
from oslo_config import cfg
import oslo_messaging as messaging
from nova import rpc
CONF = cfg.CONF
rpcapi_cap_opt = cfg.StrOpt('consoleauth',
help='Set a version cap for messages sent to consoleauth services')
CONF.register_opt(rpcapi_cap_opt, 'upgrade_levels')
class ConsoleAuthAPI(object):
'''Client side of the consoleauth rpc API.
API version history:
* 1.0 - Initial version.
* 1.1 - Added get_backdoor_port()
* 1.2 - Added instance_uuid to authorize_console, and
delete_tokens_for_instance
... Grizzly and Havana support message version 1.2. So, any changes
to existing methods in 2.x after that point should be done such that
they can handle the version_cap being set to 1.2.
* 2.0 - Major API rev for Icehouse
... Icehouse and Juno support message version 2.0. So, any changes to
existing methods in 2.x after that point should be done such that they
can handle the version_cap being set to 2.0.
* 2.1 - Added access_url to authorize_console
... Kilo support message version 2.1. So, any changes to existing
methods in 2.x after that point should be done such that they can
handle the version_cap being set to 2.1.
'''
VERSION_ALIASES = {
'grizzly': '1.2',
'havana': '1.2',
'icehouse': '2.0',
'juno': '2.0',
'kilo': '2.1',
}
def __init__(self):
super(ConsoleAuthAPI, self).__init__()
target = messaging.Target(topic=CONF.consoleauth_topic, version='2.1')
version_cap = self.VERSION_ALIASES.get(CONF.upgrade_levels.consoleauth,
CONF.upgrade_levels.consoleauth)
self.client = rpc.get_client(target, version_cap=version_cap)
def authorize_console(self, ctxt, token, console_type, host, port,
internal_access_path, instance_uuid,
access_url):
# The remote side doesn't return anything, but we want to block
# until it completes.'
msg_args = dict(token=token, console_type=console_type,
host=host, port=port,
internal_access_path=internal_access_path,
instance_uuid=instance_uuid,
access_url=access_url)
version = '2.1'
if not self.client.can_send_version('2.1'):
version = '2.0'
del msg_args['access_url']
cctxt = self.client.prepare(version=version)
return cctxt.call(ctxt, 'authorize_console', **msg_args)
def check_token(self, ctxt, token):
cctxt = self.client.prepare()
return cctxt.call(ctxt, 'check_token', token=token)
def delete_tokens_for_instance(self, ctxt, instance_uuid):
cctxt = self.client.prepare()
return cctxt.cast(ctxt,
'delete_tokens_for_instance',
instance_uuid=instance_uuid)
| apache-2.0 | 6,924,779,524,255,266,000 | 36.191919 | 79 | 0.616241 | false |
h3llrais3r/SickRage | lib/tornado/test/twisted_test.py | 18 | 27826 | # Author: Ovidiu Predescu
# Date: July 2011
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Unittest for the twisted-style reactor.
"""
from __future__ import absolute_import, division, print_function
import logging
import os
import shutil
import signal
import sys
import tempfile
import threading
import warnings
from tornado.escape import utf8
from tornado import gen
from tornado.httpclient import AsyncHTTPClient
from tornado.httpserver import HTTPServer
from tornado.ioloop import IOLoop
from tornado.platform.auto import set_close_exec
from tornado.platform.select import SelectIOLoop
from tornado.testing import bind_unused_port
from tornado.test.util import unittest
from tornado.util import import_object, PY3
from tornado.web import RequestHandler, Application
try:
import fcntl
from twisted.internet.defer import Deferred, inlineCallbacks, returnValue # type: ignore
from twisted.internet.interfaces import IReadDescriptor, IWriteDescriptor # type: ignore
from twisted.internet.protocol import Protocol # type: ignore
from twisted.python import log # type: ignore
from tornado.platform.twisted import TornadoReactor, TwistedIOLoop
from zope.interface import implementer # type: ignore
have_twisted = True
except ImportError:
have_twisted = False
# The core of Twisted 12.3.0 is available on python 3, but twisted.web is not
# so test for it separately.
try:
from twisted.web.client import Agent, readBody # type: ignore
from twisted.web.resource import Resource # type: ignore
from twisted.web.server import Site # type: ignore
# As of Twisted 15.0.0, twisted.web is present but fails our
# tests due to internal str/bytes errors.
have_twisted_web = sys.version_info < (3,)
except ImportError:
have_twisted_web = False
if PY3:
import _thread as thread
else:
import thread
skipIfNoTwisted = unittest.skipUnless(have_twisted,
"twisted module not present")
skipIfPy26 = unittest.skipIf(sys.version_info < (2, 7),
"twisted incompatible with singledispatch in py26")
def save_signal_handlers():
saved = {}
for sig in [signal.SIGINT, signal.SIGTERM, signal.SIGCHLD]:
saved[sig] = signal.getsignal(sig)
if "twisted" in repr(saved):
if not issubclass(IOLoop.configured_class(), TwistedIOLoop):
# when the global ioloop is twisted, we expect the signal
# handlers to be installed. Otherwise, it means we're not
# cleaning up after twisted properly.
raise Exception("twisted signal handlers already installed")
return saved
def restore_signal_handlers(saved):
for sig, handler in saved.items():
signal.signal(sig, handler)
class ReactorTestCase(unittest.TestCase):
def setUp(self):
self._saved_signals = save_signal_handlers()
self._io_loop = IOLoop()
self._reactor = TornadoReactor(self._io_loop)
def tearDown(self):
self._io_loop.close(all_fds=True)
restore_signal_handlers(self._saved_signals)
@skipIfNoTwisted
class ReactorWhenRunningTest(ReactorTestCase):
def test_whenRunning(self):
self._whenRunningCalled = False
self._anotherWhenRunningCalled = False
self._reactor.callWhenRunning(self.whenRunningCallback)
self._reactor.run()
self.assertTrue(self._whenRunningCalled)
self.assertTrue(self._anotherWhenRunningCalled)
def whenRunningCallback(self):
self._whenRunningCalled = True
self._reactor.callWhenRunning(self.anotherWhenRunningCallback)
self._reactor.stop()
def anotherWhenRunningCallback(self):
self._anotherWhenRunningCalled = True
@skipIfNoTwisted
class ReactorCallLaterTest(ReactorTestCase):
def test_callLater(self):
self._laterCalled = False
self._now = self._reactor.seconds()
self._timeout = 0.001
dc = self._reactor.callLater(self._timeout, self.callLaterCallback)
self.assertEqual(self._reactor.getDelayedCalls(), [dc])
self._reactor.run()
self.assertTrue(self._laterCalled)
self.assertTrue(self._called - self._now > self._timeout)
self.assertEqual(self._reactor.getDelayedCalls(), [])
def callLaterCallback(self):
self._laterCalled = True
self._called = self._reactor.seconds()
self._reactor.stop()
@skipIfNoTwisted
class ReactorTwoCallLaterTest(ReactorTestCase):
def test_callLater(self):
self._later1Called = False
self._later2Called = False
self._now = self._reactor.seconds()
self._timeout1 = 0.0005
dc1 = self._reactor.callLater(self._timeout1, self.callLaterCallback1)
self._timeout2 = 0.001
dc2 = self._reactor.callLater(self._timeout2, self.callLaterCallback2)
self.assertTrue(self._reactor.getDelayedCalls() == [dc1, dc2] or
self._reactor.getDelayedCalls() == [dc2, dc1])
self._reactor.run()
self.assertTrue(self._later1Called)
self.assertTrue(self._later2Called)
self.assertTrue(self._called1 - self._now > self._timeout1)
self.assertTrue(self._called2 - self._now > self._timeout2)
self.assertEqual(self._reactor.getDelayedCalls(), [])
def callLaterCallback1(self):
self._later1Called = True
self._called1 = self._reactor.seconds()
def callLaterCallback2(self):
self._later2Called = True
self._called2 = self._reactor.seconds()
self._reactor.stop()
@skipIfNoTwisted
class ReactorCallFromThreadTest(ReactorTestCase):
def setUp(self):
super(ReactorCallFromThreadTest, self).setUp()
self._mainThread = thread.get_ident()
def tearDown(self):
self._thread.join()
super(ReactorCallFromThreadTest, self).tearDown()
def _newThreadRun(self):
self.assertNotEqual(self._mainThread, thread.get_ident())
if hasattr(self._thread, 'ident'): # new in python 2.6
self.assertEqual(self._thread.ident, thread.get_ident())
self._reactor.callFromThread(self._fnCalledFromThread)
def _fnCalledFromThread(self):
self.assertEqual(self._mainThread, thread.get_ident())
self._reactor.stop()
def _whenRunningCallback(self):
self._thread = threading.Thread(target=self._newThreadRun)
self._thread.start()
def testCallFromThread(self):
self._reactor.callWhenRunning(self._whenRunningCallback)
self._reactor.run()
@skipIfNoTwisted
class ReactorCallInThread(ReactorTestCase):
def setUp(self):
super(ReactorCallInThread, self).setUp()
self._mainThread = thread.get_ident()
def _fnCalledInThread(self, *args, **kwargs):
self.assertNotEqual(thread.get_ident(), self._mainThread)
self._reactor.callFromThread(lambda: self._reactor.stop())
def _whenRunningCallback(self):
self._reactor.callInThread(self._fnCalledInThread)
def testCallInThread(self):
self._reactor.callWhenRunning(self._whenRunningCallback)
self._reactor.run()
if have_twisted:
@implementer(IReadDescriptor)
class Reader(object):
def __init__(self, fd, callback):
self._fd = fd
self._callback = callback
def logPrefix(self):
return "Reader"
def close(self):
self._fd.close()
def fileno(self):
return self._fd.fileno()
def readConnectionLost(self, reason):
self.close()
def connectionLost(self, reason):
self.close()
def doRead(self):
self._callback(self._fd)
@implementer(IWriteDescriptor)
class Writer(object):
def __init__(self, fd, callback):
self._fd = fd
self._callback = callback
def logPrefix(self):
return "Writer"
def close(self):
self._fd.close()
def fileno(self):
return self._fd.fileno()
def connectionLost(self, reason):
self.close()
def doWrite(self):
self._callback(self._fd)
@skipIfNoTwisted
class ReactorReaderWriterTest(ReactorTestCase):
def _set_nonblocking(self, fd):
flags = fcntl.fcntl(fd, fcntl.F_GETFL)
fcntl.fcntl(fd, fcntl.F_SETFL, flags | os.O_NONBLOCK)
def setUp(self):
super(ReactorReaderWriterTest, self).setUp()
r, w = os.pipe()
self._set_nonblocking(r)
self._set_nonblocking(w)
set_close_exec(r)
set_close_exec(w)
self._p1 = os.fdopen(r, "rb", 0)
self._p2 = os.fdopen(w, "wb", 0)
def tearDown(self):
super(ReactorReaderWriterTest, self).tearDown()
self._p1.close()
self._p2.close()
def _testReadWrite(self):
"""
In this test the writer writes an 'x' to its fd. The reader
reads it, check the value and ends the test.
"""
self.shouldWrite = True
def checkReadInput(fd):
self.assertEquals(fd.read(1), b'x')
self._reactor.stop()
def writeOnce(fd):
if self.shouldWrite:
self.shouldWrite = False
fd.write(b'x')
self._reader = Reader(self._p1, checkReadInput)
self._writer = Writer(self._p2, writeOnce)
self._reactor.addWriter(self._writer)
# Test that adding the reader twice adds it only once to
# IOLoop.
self._reactor.addReader(self._reader)
self._reactor.addReader(self._reader)
def testReadWrite(self):
self._reactor.callWhenRunning(self._testReadWrite)
self._reactor.run()
def _testNoWriter(self):
"""
In this test we have no writer. Make sure the reader doesn't
read anything.
"""
def checkReadInput(fd):
self.fail("Must not be called.")
def stopTest():
# Close the writer here since the IOLoop doesn't know
# about it.
self._writer.close()
self._reactor.stop()
self._reader = Reader(self._p1, checkReadInput)
# We create a writer, but it should never be invoked.
self._writer = Writer(self._p2, lambda fd: fd.write('x'))
# Test that adding and removing the writer leaves us with no writer.
self._reactor.addWriter(self._writer)
self._reactor.removeWriter(self._writer)
# Test that adding and removing the reader doesn't cause
# unintended effects.
self._reactor.addReader(self._reader)
# Wake up after a moment and stop the test
self._reactor.callLater(0.001, stopTest)
def testNoWriter(self):
self._reactor.callWhenRunning(self._testNoWriter)
self._reactor.run()
# Test various combinations of twisted and tornado http servers,
# http clients, and event loop interfaces.
@skipIfNoTwisted
@unittest.skipIf(not have_twisted_web, 'twisted web not present')
class CompatibilityTests(unittest.TestCase):
def setUp(self):
self.saved_signals = save_signal_handlers()
self.io_loop = IOLoop()
self.io_loop.make_current()
self.reactor = TornadoReactor(self.io_loop)
def tearDown(self):
self.reactor.disconnectAll()
self.io_loop.clear_current()
self.io_loop.close(all_fds=True)
restore_signal_handlers(self.saved_signals)
def start_twisted_server(self):
class HelloResource(Resource):
isLeaf = True
def render_GET(self, request):
return "Hello from twisted!"
site = Site(HelloResource())
port = self.reactor.listenTCP(0, site, interface='127.0.0.1')
self.twisted_port = port.getHost().port
def start_tornado_server(self):
class HelloHandler(RequestHandler):
def get(self):
self.write("Hello from tornado!")
app = Application([('/', HelloHandler)],
log_function=lambda x: None)
server = HTTPServer(app, io_loop=self.io_loop)
sock, self.tornado_port = bind_unused_port()
server.add_sockets([sock])
def run_ioloop(self):
self.stop_loop = self.io_loop.stop
self.io_loop.start()
self.reactor.fireSystemEvent('shutdown')
def run_reactor(self):
self.stop_loop = self.reactor.stop
self.stop = self.reactor.stop
self.reactor.run()
def tornado_fetch(self, url, runner):
responses = []
client = AsyncHTTPClient(self.io_loop)
def callback(response):
responses.append(response)
self.stop_loop()
client.fetch(url, callback=callback)
runner()
self.assertEqual(len(responses), 1)
responses[0].rethrow()
return responses[0]
def twisted_fetch(self, url, runner):
# http://twistedmatrix.com/documents/current/web/howto/client.html
chunks = []
client = Agent(self.reactor)
d = client.request(b'GET', utf8(url))
class Accumulator(Protocol):
def __init__(self, finished):
self.finished = finished
def dataReceived(self, data):
chunks.append(data)
def connectionLost(self, reason):
self.finished.callback(None)
def callback(response):
finished = Deferred()
response.deliverBody(Accumulator(finished))
return finished
d.addCallback(callback)
def shutdown(failure):
if hasattr(self, 'stop_loop'):
self.stop_loop()
elif failure is not None:
# loop hasn't been initialized yet; try our best to
# get an error message out. (the runner() interaction
# should probably be refactored).
try:
failure.raiseException()
except:
logging.error('exception before starting loop', exc_info=True)
d.addBoth(shutdown)
runner()
self.assertTrue(chunks)
return ''.join(chunks)
def twisted_coroutine_fetch(self, url, runner):
body = [None]
@gen.coroutine
def f():
# This is simpler than the non-coroutine version, but it cheats
# by reading the body in one blob instead of streaming it with
# a Protocol.
client = Agent(self.reactor)
response = yield client.request(b'GET', utf8(url))
with warnings.catch_warnings():
# readBody has a buggy DeprecationWarning in Twisted 15.0:
# https://twistedmatrix.com/trac/changeset/43379
warnings.simplefilter('ignore', category=DeprecationWarning)
body[0] = yield readBody(response)
self.stop_loop()
self.io_loop.add_callback(f)
runner()
return body[0]
def testTwistedServerTornadoClientIOLoop(self):
self.start_twisted_server()
response = self.tornado_fetch(
'http://127.0.0.1:%d' % self.twisted_port, self.run_ioloop)
self.assertEqual(response.body, 'Hello from twisted!')
def testTwistedServerTornadoClientReactor(self):
self.start_twisted_server()
response = self.tornado_fetch(
'http://127.0.0.1:%d' % self.twisted_port, self.run_reactor)
self.assertEqual(response.body, 'Hello from twisted!')
def testTornadoServerTwistedClientIOLoop(self):
self.start_tornado_server()
response = self.twisted_fetch(
'http://127.0.0.1:%d' % self.tornado_port, self.run_ioloop)
self.assertEqual(response, 'Hello from tornado!')
def testTornadoServerTwistedClientReactor(self):
self.start_tornado_server()
response = self.twisted_fetch(
'http://127.0.0.1:%d' % self.tornado_port, self.run_reactor)
self.assertEqual(response, 'Hello from tornado!')
@skipIfPy26
def testTornadoServerTwistedCoroutineClientIOLoop(self):
self.start_tornado_server()
response = self.twisted_coroutine_fetch(
'http://127.0.0.1:%d' % self.tornado_port, self.run_ioloop)
self.assertEqual(response, 'Hello from tornado!')
@skipIfNoTwisted
@skipIfPy26
class ConvertDeferredTest(unittest.TestCase):
def test_success(self):
@inlineCallbacks
def fn():
if False:
# inlineCallbacks doesn't work with regular functions;
# must have a yield even if it's unreachable.
yield
returnValue(42)
f = gen.convert_yielded(fn())
self.assertEqual(f.result(), 42)
def test_failure(self):
@inlineCallbacks
def fn():
if False:
yield
1 / 0
f = gen.convert_yielded(fn())
with self.assertRaises(ZeroDivisionError):
f.result()
if have_twisted:
# Import and run as much of twisted's test suite as possible.
# This is unfortunately rather dependent on implementation details,
# but there doesn't appear to be a clean all-in-one conformance test
# suite for reactors.
#
# This is a list of all test suites using the ReactorBuilder
# available in Twisted 11.0.0 and 11.1.0 (and a blacklist of
# specific test methods to be disabled).
twisted_tests = {
'twisted.internet.test.test_core.ObjectModelIntegrationTest': [],
'twisted.internet.test.test_core.SystemEventTestsBuilder': [
'test_iterate', # deliberately not supported
# Fails on TwistedIOLoop and AsyncIOLoop.
'test_runAfterCrash',
],
'twisted.internet.test.test_fdset.ReactorFDSetTestsBuilder': [
"test_lostFileDescriptor", # incompatible with epoll and kqueue
],
'twisted.internet.test.test_process.ProcessTestsBuilder': [
# Only work as root. Twisted's "skip" functionality works
# with py27+, but not unittest2 on py26.
'test_changeGID',
'test_changeUID',
# This test sometimes fails with EPIPE on a call to
# kqueue.control. Happens consistently for me with
# trollius but not asyncio or other IOLoops.
'test_childConnectionLost',
],
# Process tests appear to work on OSX 10.7, but not 10.6
# 'twisted.internet.test.test_process.PTYProcessTestsBuilder': [
# 'test_systemCallUninterruptedByChildExit',
# ],
'twisted.internet.test.test_tcp.TCPClientTestsBuilder': [
'test_badContext', # ssl-related; see also SSLClientTestsMixin
],
'twisted.internet.test.test_tcp.TCPPortTestsBuilder': [
# These use link-local addresses and cause firewall prompts on mac
'test_buildProtocolIPv6AddressScopeID',
'test_portGetHostOnIPv6ScopeID',
'test_serverGetHostOnIPv6ScopeID',
'test_serverGetPeerOnIPv6ScopeID',
],
'twisted.internet.test.test_tcp.TCPConnectionTestsBuilder': [],
'twisted.internet.test.test_tcp.WriteSequenceTests': [],
'twisted.internet.test.test_tcp.AbortConnectionTestCase': [],
'twisted.internet.test.test_threads.ThreadTestsBuilder': [],
'twisted.internet.test.test_time.TimeTestsBuilder': [],
# Extra third-party dependencies (pyOpenSSL)
# 'twisted.internet.test.test_tls.SSLClientTestsMixin': [],
'twisted.internet.test.test_udp.UDPServerTestsBuilder': [],
'twisted.internet.test.test_unix.UNIXTestsBuilder': [
# Platform-specific. These tests would be skipped automatically
# if we were running twisted's own test runner.
'test_connectToLinuxAbstractNamespace',
'test_listenOnLinuxAbstractNamespace',
# These tests use twisted's sendmsg.c extension and sometimes
# fail with what looks like uninitialized memory errors
# (more common on pypy than cpython, but I've seen it on both)
'test_sendFileDescriptor',
'test_sendFileDescriptorTriggersPauseProducing',
'test_descriptorDeliveredBeforeBytes',
'test_avoidLeakingFileDescriptors',
],
'twisted.internet.test.test_unix.UNIXDatagramTestsBuilder': [
'test_listenOnLinuxAbstractNamespace',
],
'twisted.internet.test.test_unix.UNIXPortTestsBuilder': [],
}
if sys.version_info >= (3,):
# In Twisted 15.2.0 on Python 3.4, the process tests will try to run
# but fail, due in part to interactions between Tornado's strict
# warnings-as-errors policy and Twisted's own warning handling
# (it was not obvious how to configure the warnings module to
# reconcile the two), and partly due to what looks like a packaging
# error (process_cli.py missing). For now, just skip it.
del twisted_tests['twisted.internet.test.test_process.ProcessTestsBuilder']
for test_name, blacklist in twisted_tests.items():
try:
test_class = import_object(test_name)
except (ImportError, AttributeError):
continue
for test_func in blacklist: # type: ignore
if hasattr(test_class, test_func):
# The test_func may be defined in a mixin, so clobber
# it instead of delattr()
setattr(test_class, test_func, lambda self: None)
def make_test_subclass(test_class):
class TornadoTest(test_class): # type: ignore
_reactors = ["tornado.platform.twisted._TestReactor"]
def setUp(self):
# Twisted's tests expect to be run from a temporary
# directory; they create files in their working directory
# and don't always clean up after themselves.
self.__curdir = os.getcwd()
self.__tempdir = tempfile.mkdtemp()
os.chdir(self.__tempdir)
super(TornadoTest, self).setUp() # type: ignore
def tearDown(self):
super(TornadoTest, self).tearDown() # type: ignore
os.chdir(self.__curdir)
shutil.rmtree(self.__tempdir)
def flushWarnings(self, *args, **kwargs):
# This is a hack because Twisted and Tornado have
# differing approaches to warnings in tests.
# Tornado sets up a global set of warnings filters
# in runtests.py, while Twisted patches the filter
# list in each test. The net effect is that
# Twisted's tests run with Tornado's increased
# strictness (BytesWarning and ResourceWarning are
# enabled) but without our filter rules to ignore those
# warnings from Twisted code.
filtered = []
for w in super(TornadoTest, self).flushWarnings( # type: ignore
*args, **kwargs):
if w['category'] in (BytesWarning, ResourceWarning):
continue
filtered.append(w)
return filtered
def buildReactor(self):
self.__saved_signals = save_signal_handlers()
return test_class.buildReactor(self)
def unbuildReactor(self, reactor):
test_class.unbuildReactor(self, reactor)
# Clean up file descriptors (especially epoll/kqueue
# objects) eagerly instead of leaving them for the
# GC. Unfortunately we can't do this in reactor.stop
# since twisted expects to be able to unregister
# connections in a post-shutdown hook.
reactor._io_loop.close(all_fds=True)
restore_signal_handlers(self.__saved_signals)
TornadoTest.__name__ = test_class.__name__
return TornadoTest
test_subclass = make_test_subclass(test_class)
globals().update(test_subclass.makeTestCaseClasses())
# Since we're not using twisted's test runner, it's tricky to get
# logging set up well. Most of the time it's easiest to just
# leave it turned off, but while working on these tests you may want
# to uncomment one of the other lines instead.
log.defaultObserver.stop()
# import sys; log.startLogging(sys.stderr, setStdout=0)
# log.startLoggingWithObserver(log.PythonLoggingObserver().emit, setStdout=0)
# import logging; logging.getLogger('twisted').setLevel(logging.WARNING)
# Twisted recently introduced a new logger; disable that one too.
try:
from twisted.logger import globalLogBeginner # type: ignore
except ImportError:
pass
else:
globalLogBeginner.beginLoggingTo([], redirectStandardIO=False)
if have_twisted:
class LayeredTwistedIOLoop(TwistedIOLoop):
"""Layers a TwistedIOLoop on top of a TornadoReactor on a SelectIOLoop.
This is of course silly, but is useful for testing purposes to make
sure we're implementing both sides of the various interfaces
correctly. In some tests another TornadoReactor is layered on top
of the whole stack.
"""
def initialize(self, **kwargs):
# When configured to use LayeredTwistedIOLoop we can't easily
# get the next-best IOLoop implementation, so use the lowest common
# denominator.
self.real_io_loop = SelectIOLoop(make_current=False) # type: ignore
reactor = TornadoReactor(io_loop=self.real_io_loop)
super(LayeredTwistedIOLoop, self).initialize(reactor=reactor, **kwargs)
self.add_callback(self.make_current)
def close(self, all_fds=False):
super(LayeredTwistedIOLoop, self).close(all_fds=all_fds)
# HACK: This is the same thing that test_class.unbuildReactor does.
for reader in self.reactor._internalReaders:
self.reactor.removeReader(reader)
reader.connectionLost(None)
self.real_io_loop.close(all_fds=all_fds)
def stop(self):
# One of twisted's tests fails if I don't delay crash()
# until the reactor has started, but if I move this to
# TwistedIOLoop then the tests fail when I'm *not* running
# tornado-on-twisted-on-tornado. I'm clearly missing something
# about the startup/crash semantics, but since stop and crash
# are really only used in tests it doesn't really matter.
def f():
self.reactor.crash()
# Become current again on restart. This is needed to
# override real_io_loop's claim to being the current loop.
self.add_callback(self.make_current)
self.reactor.callWhenRunning(f)
if __name__ == "__main__":
unittest.main()
| gpl-3.0 | 3,854,676,761,329,528,300 | 37.065663 | 93 | 0.624991 | false |
wbg-optronix-lab/emergence-lab | core/urls/project.py | 2 | 1961 | # -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from django.conf.urls import url
import core.views
urlpatterns = [
# url(r'^$',
# core.views.ProjectListView.as_view(), name='project_list'),
url(r'^create/$',
core.views.ProjectCreateView.as_view(), name='project_create'),
url(r'^track/$',
core.views.TrackProjectView.as_view(), name='track_project'),
# url(r'^(?P<slug>[\w-]+)/$',
# core.views.ProjectDetailView.as_view(), name='project_detail_all'),
# url(r'^(?P<slug>[\w-]+)/edit/$',
# core.views.ProjectUpdateView.as_view(), name='project_update'),
url(r'^(?P<slug>[\w-]+)/track/$',
core.views.TrackProjectRedirectView.as_view(), name='project_track'),
url(r'^(?P<slug>[\w-]+)/untrack/$',
core.views.UntrackProjectRedirectView.as_view(),
name='project_untrack'),
url(r'^(?P<slug>[\w-]+)/activate/$',
core.views.ActivateProjectRedirectView.as_view(),
name='project_activate'),
url(r'^(?P<slug>[\w-]+)/deactivate/$',
core.views.DeactivateProjectRedirectView.as_view(),
name='project_deactivate'),
# url(r'^(?P<slug>[\w-]+)/add-investigation/$',
# core.views.InvestigationCreateView.as_view(),
# name='investigation_create'),
# url(r'^(?P<project>[\w-]+)/(?P<slug>[\w-]+)/$',
# core.views.InvestigationDetailView.as_view(),
# name='investigation_detail_all'),
# url(r'^(?P<project>[\w-]+)/(?P<slug>[\w-]+)/edit/$',
# core.views.InvestigationUpdateView.as_view(),
# name='investigation_update'),
url(r'^(?P<project>[\w-]+)/(?P<slug>[\w-]+)/activate/$',
core.views.ActivateInvestigationRedirectView.as_view(),
name='investigation_activate'),
url(r'^(?P<project>[\w-]+)/(?P<slug>[\w-]+)/deactivate/$',
core.views.DeactivateInvestigationRedirectView.as_view(),
name='investigation_deactivate'),
]
| mit | -5,325,597,419,552,490,000 | 42.577778 | 77 | 0.595105 | false |
isezen/pytkm | compression.py | 1 | 3010 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# pylint: disable=C0103, C0321, W0212
"""Compression utility for 7z and zip files. Optimized for csv files."""
import os
import zipfile as zipf
import pylzma
def compress(f, rename_to=None, big_data=False, f_type='7z'):
"""
Compresses a file. Optimized for csv files.
:param f: Full file path
:param big_data: If True, directly reads data from file, compress it and
writes to new file. Uses less memory. Suitable for big
data.
:param f_type: File type: 7z | zip
:type f_type: str
:type big_data: bool
:type f: str
"""
f_types = ['7z', 'zip']
if f_type not in f_types:
raise ValueError("f_type must be one of %s" % f_types)
fn = f if rename_to is None else rename_to
fn = fn + '.' + f_type
if f_type == f_types[0]:
import struct
with open(f, "rb") as f1:
# pylint: disable=E1101
c = pylzma.compressfile(f1, literalContextBits=4, eos=0,
dictionary=24, fastBytes=255)
result = c.read(5) + struct.pack('<Q', os.path.getsize(f))
with open(fn, 'wb') as f2: f2.write(result)
with open(fn, 'ab') as f2:
if big_data:
while True:
tmp = c.read(1024)
if not tmp: break
f2.write(tmp)
else:
f2.write(c.read())
elif f_type == f_types[1]:
# http://stackoverflow.com/questions/14568647/create-zip-in-python?rq=1
with zipf.ZipFile(fn, 'w', zipf.ZIP_DEFLATED) as z:
f2 = os.path.splitext(fn)[0] + os.path.splitext(f)[1]
z.write(f, os.path.basename(f2))
return fn
def decompress(f, rename_to=None):
"""
Decompress a compressed file by the extension.
Only supports .7z and .zip files.
:type f: str
:param f: Full path to file
"""
f_types = ['.7z', '.zip']
fn, ext = os.path.splitext(f)
if ext not in f_types:
raise ValueError("f extension must be one of %s" % f_types)
fn = fn if rename_to is None else rename_to
if ext == f_types[0]:
with open(f, "rb") as fl: cdata = fl.read()
with open(fn, 'wb') as fl:
# pylint: disable=E1101
fl.write(pylzma.decompress_compat(cdata[0:5] + cdata[13:]))
elif ext == f_types[1]:
with zipf.ZipFile(f) as z:
p = os.path.dirname(f)
z.extractall(p)
fn = z.namelist()
fn = [os.path.join(p, i) for i in fn]
if len(fn) == 1: fn = fn[0]
return fn
def read_from_zip(f):
"""
Reads first file content from zip file.
:param f: Full path to file
:return: Decompressed data
:rtype: str
"""
byts = ''
with zipf.ZipFile(f) as z:
il = z.infolist()
if len(il) > 0:
byts = z.read(il[0].filename)
return byts.decode()
| mit | -2,574,618,834,693,519,000 | 29.714286 | 79 | 0.53289 | false |
jammerful/buildbot | master/buildbot/worker_transition.py | 11 | 11459 | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
"""
Utility functions to support transition from "slave"-named API to
"worker"-named.
Use of old API generates Python warning which may be logged, ignored or treated
as an error using Python builtin warnings API.
"""
from __future__ import absolute_import
from __future__ import print_function
from future.utils import iteritems
import functools
import sys
import warnings
from twisted.python.deprecate import deprecatedModuleAttribute as _deprecatedModuleAttribute
from twisted.python.deprecate import getWarningMethod
from twisted.python.deprecate import setWarningMethod
from twisted.python.versions import Version
__all__ = (
"DeprecatedWorkerNameWarning",
"deprecatedWorkerClassMethod",
"WorkerAPICompatMixin",
"setupWorkerTransition",
"deprecatedWorkerModuleAttribute",
"reportDeprecatedWorkerNameUsage",
"reportDeprecatedWorkerModuleUsage",
)
_WORKER_WARNING_MARK = "[WORKER]"
def _compat_name(new_name, compat_name=None):
"""Returns old API ("slave") name for new name ("worker").
>>> assert _compat_name("Worker") == "Slave"
>>> assert _compat_name("SomeWorkerStuff") == "SomeSlaveStuff"
>>> assert _compat_name("SomeWorker", compat_name="SomeBuildSlave") == \
"SomeBuildSlave"
If `compat_name` is not specified old name is construct by replacing in
`new_name`:
"worker" -> "slave",
"Worker" -> "Slave".
For the sake of simplicity of usage if `compat_name` argument is specified
it will returned as the result.
"""
if compat_name is not None:
assert "slave" in compat_name.lower()
assert new_name == "" or "worker" in new_name.lower(), new_name
return compat_name
compat_replacements = {
"worker": "slave",
"Worker": "Slave",
}
compat_name = new_name
assert "slave" not in compat_name.lower()
assert "worker" in compat_name.lower()
for new_word, old_word in iteritems(compat_replacements):
compat_name = compat_name.replace(new_word, old_word)
assert compat_name != new_name
assert "slave" in compat_name.lower()
assert "worker" not in compat_name.lower()
return compat_name
# DeprecationWarning or PendingDeprecationWarning may be used as
# the base class, but by default deprecation warnings are disabled in Python,
# so by default old-API usage warnings will be ignored - this is not what
# we want.
class DeprecatedWorkerAPIWarning(Warning):
"""Base class for deprecated API warnings."""
class DeprecatedWorkerNameWarning(DeprecatedWorkerAPIWarning):
"""Warning class for use of deprecated classes, functions, methods
and attributes.
"""
# Separate warnings about deprecated modules from other deprecated
# identifiers. Deprecated modules are loaded only once and it's hard to
# predict in tests exact places where warning should be issued (in contrast
# warnings about other identifiers will be issued every usage).
class DeprecatedWorkerModuleWarning(DeprecatedWorkerAPIWarning):
"""Warning class for use of deprecated modules."""
def reportDeprecatedWorkerNameUsage(message, stacklevel=None, filename=None,
lineno=None):
"""Hook that is ran when old API name is used.
:param stacklevel: stack level relative to the caller's frame.
Defaults to caller of the caller of this function.
"""
if filename is None:
if stacklevel is None:
# Warning will refer to the caller of the caller of this function.
stacklevel = 3
else:
stacklevel += 2
warnings.warn(DeprecatedWorkerNameWarning(message), None, stacklevel)
else:
assert stacklevel is None
if lineno is None:
lineno = 0
warnings.warn_explicit(
DeprecatedWorkerNameWarning(message),
DeprecatedWorkerNameWarning,
filename, lineno)
def reportDeprecatedWorkerModuleUsage(message, stacklevel=None):
"""Hook that is ran when old API module is used.
:param stacklevel: stack level relative to the caller's frame.
Defaults to caller of the caller of this function.
"""
if stacklevel is None:
# Warning will refer to the caller of the caller of this function.
stacklevel = 3
else:
stacklevel += 2
warnings.warn(DeprecatedWorkerModuleWarning(message), None, stacklevel)
def setupWorkerTransition():
"""Hook Twisted deprecation machinery to use custom warning class
for Worker API deprecation warnings."""
default_warn_method = getWarningMethod()
def custom_warn_method(message, category, stacklevel):
if stacklevel is not None:
stacklevel += 1
if _WORKER_WARNING_MARK in message:
# Message contains our mark - it's Worker API Renaming warning,
# issue it appropriately.
message = message.replace(_WORKER_WARNING_MARK, "")
warnings.warn(
DeprecatedWorkerNameWarning(message), message, stacklevel)
else:
# Other's warning message
default_warn_method(message, category, stacklevel)
setWarningMethod(custom_warn_method)
def deprecatedWorkerModuleAttribute(scope, attribute, compat_name=None,
new_name=None):
"""This is similar to Twisted's deprecatedModuleAttribute, but for
Worker API Rename warnings.
Can be used to create compatibility attributes for module-level classes,
functions and global variables.
:param scope: module scope (locals() in the context of a module)
:param attribute: module object (class, function, global variable)
:param compat_name: optional compatibility name (will be generated if not
specified)
:param new_name: optional new name (will be used name of attribute object
in the module is not specified). If empty string is specified, then no
new name is assumed for this attribute.
"""
module_name = scope["__name__"]
assert module_name in sys.modules, "scope must be module, i.e. locals()"
assert sys.modules[module_name].__dict__ is scope, \
"scope must be module, i.e. locals()"
if new_name is None:
scope_keys = list(scope.keys())
scope_values = list(scope.values())
attribute_name = scope_keys[scope_values.index(attribute)]
else:
attribute_name = new_name
compat_name = _compat_name(attribute_name, compat_name=compat_name)
scope[compat_name] = attribute
if attribute_name:
msg = "Use {0} instead.".format(attribute_name)
else:
msg = "Don't use it."
_deprecatedModuleAttribute(
Version("Buildbot", 0, 9, 0),
_WORKER_WARNING_MARK + msg,
module_name, compat_name)
def deprecatedWorkerClassProperty(scope, prop, compat_name=None,
new_name=None):
"""Define compatibility class property.
Can be used to create compatibility attribute for class property.
:param scope: class scope (locals() in the context of a scope)
:param prop: property object for which compatibility name should be
created.
:param compat_name: optional compatibility name (will be generated if not
specified)
:param new_name: optional new name (will be used name of attribute object
in the module is not specified). If empty string is specified, then no
new name is assumed for this attribute.
"""
if new_name is None:
scope_keys = list(scope.keys())
scope_values = list(scope.values())
attribute_name = scope_keys[scope_values.index(prop)]
else:
attribute_name = new_name
compat_name = _compat_name(attribute_name, compat_name=compat_name)
if attribute_name:
advice_msg = "use '{0}' instead".format(attribute_name)
else:
advice_msg = "don't use it"
def get(self):
reportDeprecatedWorkerNameUsage(
"'{old}' property is deprecated, "
"{advice}.".format(
old=compat_name, advice=advice_msg))
return getattr(self, attribute_name)
assert compat_name not in scope
scope[compat_name] = property(get)
def deprecatedWorkerClassMethod(scope, method, compat_name=None):
"""Define old-named method inside class."""
method_name = method.__name__
compat_name = _compat_name(method_name, compat_name=compat_name)
assert compat_name not in scope
def old_method(self, *args, **kwargs):
reportDeprecatedWorkerNameUsage(
"'{old}' method is deprecated, use '{new}' instead.".format(
new=method_name, old=compat_name))
return getattr(self, method_name)(*args, **kwargs)
functools.update_wrapper(old_method, method)
scope[compat_name] = old_method
class WorkerAPICompatMixin(object):
"""Mixin class for classes that have old-named worker attributes."""
def __getattr__(self, name):
if name not in self.__compat_attrs:
raise AttributeError(
"'{class_name}' object has no attribute '{attr_name}'".format(
class_name=self.__class__.__name__,
attr_name=name))
new_name = self.__compat_attrs[name]
# TODO: Log class name, operation type etc.
reportDeprecatedWorkerNameUsage(
"'{old}' attribute is deprecated, use '{new}' instead.".format(
new=new_name, old=name))
return getattr(self, new_name)
def __setattr__(self, name, value):
if name in self.__compat_attrs:
new_name = self.__compat_attrs[name]
# TODO: Log class name, operation type etc.
reportDeprecatedWorkerNameUsage(
"'{old}' attribute is deprecated, use '{new}' instead.".format(
new=new_name, old=name))
return setattr(self, new_name, value)
else:
object.__setattr__(self, name, value)
@property
def __compat_attrs(self):
# It's unreliable to initialize attributes in __init__() since
# old-style classes are used and parent initializers are mostly
# not called.
if "_compat_attrs_mapping" not in self.__dict__:
self.__dict__["_compat_attrs_mapping"] = {}
return self._compat_attrs_mapping
def _registerOldWorkerAttr(self, attr_name, name=None):
"""Define old-named attribute inside class instance."""
compat_name = _compat_name(attr_name, compat_name=name)
assert compat_name not in self.__dict__
assert compat_name not in self.__compat_attrs
self.__compat_attrs[compat_name] = attr_name
# Enable worker transition hooks
setupWorkerTransition()
| gpl-2.0 | -2,315,338,362,696,529,000 | 33.20597 | 92 | 0.666987 | false |
geotagx/pybossa | test/test_uploader/test_local_uploader.py | 1 | 7471 | # -*- coding: utf8 -*-
# This file is part of PyBossa.
#
# Copyright (C) 2015 SciFabric LTD.
#
# PyBossa is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PyBossa is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with PyBossa. If not, see <http://www.gnu.org/licenses/>.
"""This module tests the Uploader class."""
import os
import tempfile
from default import Test
from pybossa.uploader.local import LocalUploader
from mock import patch
from werkzeug.datastructures import FileStorage
from nose.tools import assert_raises
class TestLocalUploader(Test):
"""Test PyBossa Uploader module."""
def test_local_uploader_relative_directory_init(self):
"""Test LOCAL UPLOADER init works with relative path."""
new_upload_folder = 'uploads'
new_config_uf = {'UPLOAD_FOLDER': new_upload_folder}
with patch.dict(self.flask_app.config, new_config_uf):
new_uploader = LocalUploader()
new_uploader.init_app(self.flask_app)
err_msg = "Upload folder should be absolute not relative"
assert os.path.isabs(new_uploader.upload_folder) is True, err_msg
err_msg = "Upload folder uploads should be existing"
assert os.path.isdir(new_uploader.upload_folder) is True, err_msg
def test_wrong_local_uploader_relative_directory_init(self):
"""Test LOCAL UPLOADER init with wrong relative path."""
new_upload_folder = 'iamnotexisting'
err_msg = "Uploadfolder ./iamnotexisting should not exist"
assert os.path.isdir(new_upload_folder) is False, err_msg
new_config_uf = {'UPLOAD_FOLDER': new_upload_folder}
with patch.dict(self.flask_app.config, new_config_uf):
new_uploader = LocalUploader()
assert_raises(IOError, new_uploader.init_app, self.flask_app) # Should raise IOError
err_msg = "wrong upload folder ./iamnotexisting should not exist"
assert os.path.isdir(new_upload_folder) is False, err_msg
def test_local_uploader_standard_directory_existing(self):
"""Test if local uploads directory existing"""
uploads_path = os.path.join(os.path.dirname(self.flask_app.root_path), 'uploads') # ../uploads
err_msg = "./uploads folder is not existing"
assert os.path.isdir(uploads_path) is True, err_msg
context_uploads_path = os.path.join(self.flask_app.root_path, 'uploads') # pybossa/uploads
err_msg = "pybossa/uploads should not exist"
assert os.path.isdir(context_uploads_path) is False, err_msg
def test_local_uploader_init(self):
"""Test LOCAL UPLOADER init works."""
u = LocalUploader()
u.init_app(self.flask_app)
new_extensions = ['pdf', 'doe']
new_upload_folder = '/tmp/'
new_config_ext = {'ALLOWED_EXTENSIONS': new_extensions}
new_config_uf = {'UPLOAD_FOLDER': new_upload_folder}
with patch.dict(self.flask_app.config, new_config_ext):
with patch.dict(self.flask_app.config, new_config_uf):
new_uploader = LocalUploader()
new_uploader.init_app(self.flask_app)
expected_extensions = set.union(u.allowed_extensions,
new_extensions)
err_msg = "The new uploader should support two extra extensions"
assert expected_extensions == new_uploader.allowed_extensions, err_msg
err_msg = "Upload folder /tmp should be existing"
assert os.path.isdir(new_uploader.upload_folder) is True, err_msg
err_msg = "Upload folder by default is /tmp/"
assert new_uploader.upload_folder == '/tmp/', err_msg
@patch('werkzeug.datastructures.FileStorage.save', side_effect=IOError)
def test_local_uploader_upload_fails(self, mock):
"""Test LOCAL UPLOADER upload fails."""
u = LocalUploader()
file = FileStorage(filename='test.jpg')
res = u.upload_file(file, container='user_3')
err_msg = ("Upload file should return False, \
as there is an exception")
assert res is False, err_msg
@patch('werkzeug.datastructures.FileStorage.save', return_value=None)
def test_local_uploader_upload_correct_file(self, mock):
"""Test LOCAL UPLOADER upload works."""
mock.save.return_value = None
u = LocalUploader()
file = FileStorage(filename='test.jpg')
res = u.upload_file(file, container='user_3')
err_msg = ("Upload file should return True, \
as this extension is allowed")
assert res is True, err_msg
@patch('werkzeug.datastructures.FileStorage.save', return_value=None)
def test_local_uploader_upload_wrong_file(self, mock):
"""Test LOCAL UPLOADER upload works with wrong extension."""
mock.save.return_value = None
u = LocalUploader()
file = FileStorage(filename='test.txt')
res = u.upload_file(file, container='user_3')
err_msg = ("Upload file should return False, \
as this extension is not allowed")
assert res is False, err_msg
@patch('werkzeug.datastructures.FileStorage.save', return_value=None)
def test_local_folder_is_created(self, mock):
"""Test LOCAL UPLOADER folder creation works."""
mock.save.return_value = True
u = LocalUploader()
u.upload_folder = tempfile.mkdtemp()
file = FileStorage(filename='test.jpg')
container = 'mycontainer'
res = u.upload_file(file, container=container)
path = os.path.join(u.upload_folder, container)
err_msg = "This local path should exist: %s" % path
assert os.path.isdir(path) is True, err_msg
@patch('os.remove', return_value=None)
def test_local_folder_delete(self, mock):
"""Test LOCAL UPLOADER delete works."""
u = LocalUploader()
err_msg = "Delete should return true"
assert u.delete_file('file', 'container') is True, err_msg
@patch('os.remove', side_effect=OSError)
def test_local_folder_delete_fails(self, mock):
"""Test LOCAL UPLOADER delete fail works."""
u = LocalUploader()
err_msg = "Delete should return False"
assert u.delete_file('file', 'container') is False, err_msg
def test_file_exists_for_missing_file(self):
"""Test LOCAL UPLOADER file_exists returns False if the file does not exist"""
u = LocalUploader()
container = 'mycontainer'
assert u.file_exists('noexist.txt', container) is False
def test_file_exists_for_real_file(self):
"""Test LOCAL UPLOADER file_exists returns True if the file exists"""
u = LocalUploader()
u.upload_folder = tempfile.mkdtemp()
file = FileStorage(filename='test.jpg')
container = 'mycontainer'
u.upload_file(file, container=container)
assert u.file_exists('test.jpg', container) is True
| agpl-3.0 | 8,128,730,163,763,250,000 | 45.403727 | 109 | 0.647704 | false |
valmynd/MediaFetcher | src/plugins/youtube_dl/youtube_dl/extractor/crackle.py | 1 | 5817 | # coding: utf-8
from __future__ import unicode_literals, division
import hashlib
import hmac
import re
import time
from .common import InfoExtractor
from ..compat import compat_HTTPError
from ..utils import (
determine_ext,
float_or_none,
int_or_none,
parse_age_limit,
parse_duration,
url_or_none,
ExtractorError
)
class CrackleIE(InfoExtractor):
_VALID_URL = r'(?:crackle:|https?://(?:(?:www|m)\.)?(?:sony)?crackle\.com/(?:playlist/\d+/|(?:[^/]+/)+))(?P<id>\d+)'
_TESTS = [{
# geo restricted to CA
'url': 'https://www.crackle.com/andromeda/2502343',
'info_dict': {
'id': '2502343',
'ext': 'mp4',
'title': 'Under The Night',
'description': 'md5:d2b8ca816579ae8a7bf28bfff8cefc8a',
'duration': 2583,
'view_count': int,
'average_rating': 0,
'age_limit': 14,
'genre': 'Action, Sci-Fi',
'creator': 'Allan Kroeker',
'artist': 'Keith Hamilton Cobb, Kevin Sorbo, Lisa Ryder, Lexa Doig, Robert Hewitt Wolfe',
'release_year': 2000,
'series': 'Andromeda',
'episode': 'Under The Night',
'season_number': 1,
'episode_number': 1,
},
'params': {
# m3u8 download
'skip_download': True,
}
}, {
'url': 'https://www.sonycrackle.com/andromeda/2502343',
'only_matching': True,
}]
_MEDIA_FILE_SLOTS = {
'360p.mp4': {
'width': 640,
'height': 360,
},
'480p.mp4': {
'width': 768,
'height': 432,
},
'480p_1mbps.mp4': {
'width': 852,
'height': 480,
},
}
def _real_extract(self, url):
video_id = self._match_id(url)
country_code = self._downloader.params.get('geo_bypass_country', None)
countries = [country_code] if country_code else (
'US', 'AU', 'CA', 'AS', 'FM', 'GU', 'MP', 'PR', 'PW', 'MH', 'VI')
last_e = None
for country in countries:
try:
# Authorization generation algorithm is reverse engineered from:
# https://www.sonycrackle.com/static/js/main.ea93451f.chunk.js
media_detail_url = 'https://web-api-us.crackle.com/Service.svc/details/media/%s/%s?disableProtocols=true' % (
video_id, country)
timestamp = time.strftime('%Y%m%d%H%M', time.gmtime())
h = hmac.new(b'IGSLUQCBDFHEOIFM', '|'.join([media_detail_url, timestamp]).encode(),
hashlib.sha1).hexdigest().upper()
media = self._download_json(
media_detail_url, video_id, 'Downloading media JSON as %s' % country,
'Unable to download media JSON', headers={
'Accept': 'application/json',
'Authorization': '|'.join([h, timestamp, '117', '1']),
})
except ExtractorError as e:
# 401 means geo restriction, trying next country
if isinstance(e.cause, compat_HTTPError) and e.cause.code == 401:
last_e = e
continue
raise
media_urls = media.get('MediaURLs')
if not media_urls or not isinstance(media_urls, list):
continue
title = media['Title']
formats = []
for e in media['MediaURLs']:
if e.get('UseDRM') is True:
continue
format_url = url_or_none(e.get('Path'))
if not format_url:
continue
ext = determine_ext(format_url)
if ext == 'm3u8':
formats.extend(self._extract_m3u8_formats(
format_url, video_id, 'mp4', entry_protocol='m3u8_native',
m3u8_id='hls', fatal=False))
elif ext == 'mpd':
formats.extend(self._extract_mpd_formats(
format_url, video_id, mpd_id='dash', fatal=False))
elif format_url.endswith('.ism/Manifest'):
formats.extend(self._extract_ism_formats(
format_url, video_id, ism_id='mss', fatal=False))
else:
mfs_path = e.get('Type')
mfs_info = self._MEDIA_FILE_SLOTS.get(mfs_path)
if not mfs_info:
continue
formats.append({
'url': format_url,
'format_id': 'http-' + mfs_path.split('.')[0],
'width': mfs_info['width'],
'height': mfs_info['height'],
})
self._sort_formats(formats)
description = media.get('Description')
duration = int_or_none(media.get(
'DurationInSeconds')) or parse_duration(media.get('Duration'))
view_count = int_or_none(media.get('CountViews'))
average_rating = float_or_none(media.get('UserRating'))
age_limit = parse_age_limit(media.get('Rating'))
genre = media.get('Genre')
release_year = int_or_none(media.get('ReleaseYear'))
creator = media.get('Directors')
artist = media.get('Cast')
if media.get('MediaTypeDisplayValue') == 'Full Episode':
series = media.get('ShowName')
episode = title
season_number = int_or_none(media.get('Season'))
episode_number = int_or_none(media.get('Episode'))
else:
series = episode = season_number = episode_number = None
subtitles = {}
cc_files = media.get('ClosedCaptionFiles')
if isinstance(cc_files, list):
for cc_file in cc_files:
if not isinstance(cc_file, dict):
continue
cc_url = url_or_none(cc_file.get('Path'))
if not cc_url:
continue
lang = cc_file.get('Locale') or 'en'
subtitles.setdefault(lang, []).append({'url': cc_url})
thumbnails = []
images = media.get('Images')
if isinstance(images, list):
for image_key, image_url in images.items():
mobj = re.search(r'Img_(\d+)[xX](\d+)', image_key)
if not mobj:
continue
thumbnails.append({
'url': image_url,
'width': int(mobj.group(1)),
'height': int(mobj.group(2)),
})
return {
'id': video_id,
'title': title,
'description': description,
'duration': duration,
'view_count': view_count,
'average_rating': average_rating,
'age_limit': age_limit,
'genre': genre,
'creator': creator,
'artist': artist,
'release_year': release_year,
'series': series,
'episode': episode,
'season_number': season_number,
'episode_number': episode_number,
'thumbnails': thumbnails,
'subtitles': subtitles,
'formats': formats,
}
raise last_e
| gpl-3.0 | -6,836,645,360,470,241,000 | 27.79703 | 117 | 0.618188 | false |
Debian/debile | debile/master/arches.py | 2 | 3766 | # Copyright (c) 2012-2013 Paul Tagliamonte <[email protected]>
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
from debile.utils.commands import run_command
def arch_matches(arch, alias):
"""
Check if given arch `arch` matches the other arch `alias`. This is most
useful for the complex any-* rules.
"""
if arch == alias:
return True
if arch == 'all' or arch == 'source':
# These pseudo-arches does not match any wildcards or aliases
return False
if alias == 'any':
# The 'any' wildcard matches all *real* architectures
return True
if alias == 'linux-any':
# GNU/Linux arches are named <cpuabi>
# Other Linux arches are named <libc>-linux-<cpuabi>
return not '-' in arch or 'linux' in arch.split('-')
if alias.endswith('-any'):
# Non-Linux GNU/<os> arches are named <os>-<cpuabi>
# Other non-Linux arches are named <libc>-<os>-<cpuabi>
osname, _ = alias.split('-', 1)
return osname in arch.split('-')
if not "-" in arch and not "-" in alias:
return False
# This is a fucking disaster for perf. Do what we can to not get here.
out, err, ret = run_command([
"/usr/bin/dpkg-architecture",
"-a%s" % (arch),
"-i%s" % (alias)
])
return ret == 0
def get_preferred_affinity(
affinity_preference, valid_affinities, valid_arches
):
"""
Given a list of strings representing the preffered affinities in the
config, a list of string with valid affinities of the source, and a list
of valid architectures in the suite, return the arch object to use as
affinity for arch "all" jobs.
"""
for affinity in affinity_preference:
arch = None
for x in valid_arches:
if x.name == affinity:
arch = x
break
if arch is None:
continue
for alias in valid_affinities:
if arch_matches(affinity, alias):
return arch
raise ValueError(
"No valid affinity - preferences: '%s'; valid: '%s'; arches %s" % (
", ".join(affinity_preference),
", ".join(valid_affinities),
", ".join([x.name for x in valid_arches]),))
def get_source_arches(dsc_arches, valid_arches):
"""
Given a list of strings with the Architectures data from the dsc,
and a list of valid Arch objects from the suite, return the Arch
objects to add to the Source object.
"""
ret = []
for arch in valid_arches:
for alias in dsc_arches:
if arch_matches(arch.name, alias):
ret.append(arch)
break # Break inner loop, continue outer loop
return ret
| mit | -7,822,004,619,686,526,000 | 33.87037 | 76 | 0.642857 | false |
rajarammallya/melange | melange/common/config.py | 2 | 2102 | #!/usr/bin/env python
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 OpenStack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Routines for configuring Melange."""
from melange.openstack.common import config as openstack_config
parse_options = openstack_config.parse_options
add_log_options = openstack_config.add_log_options
add_common_options = openstack_config.add_common_options
setup_logging = openstack_config.setup_logging
get_option = openstack_config.get_option
class Config(object):
instance = {}
@classmethod
def load_paste_app(cls, *args, **kwargs):
conf, app = openstack_config.load_paste_app(*args, **kwargs)
cls.instance = conf
return conf, app
@classmethod
def load_paste_config(cls, *args, **kwargs):
conf_file, conf = openstack_config.load_paste_config(*args, **kwargs)
cls.instance = conf
return conf
@classmethod
def get(cls, key, default=None):
return cls.instance.get(key, default)
@classmethod
def get_params_group(cls, group_key):
group_key = group_key + "_"
return dict((key.replace(group_key, "", 1), cls.instance.get(key))
for key in cls.instance
if key.startswith(group_key))
def load_app_environment(oparser):
add_common_options(oparser)
add_log_options(oparser)
(options, args) = parse_options(oparser)
conf = Config.load_paste_config('melange', options, args)
setup_logging(options=options, conf=conf)
return conf
| apache-2.0 | 3,419,125,939,191,101,400 | 31.338462 | 78 | 0.685062 | false |
mathias-nyman/mittn | mittn/headlessscanner/test_dbtools.py | 2 | 8099 | import unittest
import tempfile
import uuid
import os
import mittn.headlessscanner.dbtools as dbtools
import datetime
import socket
import json
import sqlalchemy
from sqlalchemy import create_engine, Table, Column, MetaData, exc, types
class dbtools_test_case(unittest.TestCase):
def setUp(self):
# Create an empty mock inline "context" object
# See https://docs.python.org/2/library/functions.html#type
self.context = type('context', (object,), dict())
# Whip up a sqlite database URI for testing
self.db_file = os.path.join(tempfile.gettempdir(),
'mittn_unittest.' + str(uuid.uuid4()))
self.context.dburl = 'sqlite:///' + self.db_file
def test_dburl_not_defined(self):
# Try to open connection without a defined database URI
empty_context = type('context', (object,), dict())
dbconn = dbtools.open_database(empty_context)
self.assertEqual(dbconn,
None,
"No dburl provided should return None as connection")
def test_create_db_connection(self):
# Try whether an actual database connection can be opened
dbconn = dbtools.open_database(self.context)
self.assertEqual(type(dbconn),
sqlalchemy.engine.base.Connection,
"An SQLAlchemy connection object was not returned")
def test_add_false_positive(self):
# Add a false positive to database and check that all fields
# get populated and can be compared back originals
issue = {'scenario_id': '1',
'url': 'testurl',
'severity': 'testseverity',
'issuetype': 'testissuetype',
'issuename': 'testissuename',
'issuedetail': 'testissuedetail',
'confidence': 'testconfidence',
'host': 'testhost',
'port': 'testport',
'protocol': 'testprotocol',
'messages': '{foo=bar}'}
dbtools.add_false_positive(self.context, issue)
# Connect directly to the database and check the data is there
db_engine = sqlalchemy.create_engine(self.context.dburl)
dbconn = db_engine.connect()
db_metadata = sqlalchemy.MetaData()
headlessscanner_issues = Table('headlessscanner_issues',
db_metadata,
Column('new_issue', types.Boolean),
Column('issue_no', types.Integer, primary_key=True, nullable=False), # Implicit autoincrement
Column('timestamp', types.DateTime(timezone=True)),
Column('test_runner_host', types.Text),
Column('scenario_id', types.Text),
Column('url', types.Text),
Column('severity', types.Text),
Column('issuetype', types.Text),
Column('issuename', types.Text),
Column('issuedetail', types.Text),
Column('confidence', types.Text),
Column('host', types.Text),
Column('port', types.Text),
Column('protocol', types.Text),
Column('messages', types.LargeBinary))
db_select = sqlalchemy.sql.select([headlessscanner_issues])
db_result = dbconn.execute(db_select)
result = db_result.fetchone()
for key, value in issue.iteritems():
if key == 'messages':
self.assertEqual(result[key], json.dumps(value))
else:
self.assertEqual(result[key], value,
'%s not found in database after add' % key)
self.assertEqual(result['test_runner_host'], socket.gethostbyname(socket.getfqdn()),
'Test runner host name not correct in database')
self.assertLessEqual(result['timestamp'], datetime.datetime.utcnow(),
'Timestamp not correctly stored in database')
dbconn.close()
def test_number_of_new_false_positives(self):
# Add a couple of false positives to database as new issues,
# and check that the they're counted properly
issue = {'scenario_id': '1',
'timestamp': datetime.datetime.utcnow(),
'test_runner_host': 'localhost',
'url': 'url',
'severity': 'severity',
'issuetype': 'issuetype',
'issuename': 'issuename',
'issuedetail': 'issuedetail',
'confidence': 'confidence',
'host': 'host',
'port': 'port',
'protocol': 'protocol',
'messages': 'messagejson'}
# Add one, expect count to be 1
dbtools.add_false_positive(self.context, issue)
self.assertEqual(dbtools.number_of_new_in_database(self.context),
1, "After adding one, expect one finding in database")
# Add a second one, expect count to be 2
dbtools.add_false_positive(self.context, issue)
self.assertEqual(dbtools.number_of_new_in_database(self.context),
2, "After adding two, expect two findings in db")
def test_false_positive_detection(self):
# Test whether false positives in database are identified properly
issue = {'scenario_id': '1',
'timestamp': datetime.datetime.utcnow(),
'test_runner_host': 'localhost',
'url': 'url',
'severity': 'severity',
'issuetype': 'issuetype',
'issuename': 'issuename',
'issuedetail': 'issuedetail',
'confidence': 'confidence',
'host': 'host',
'port': 'port',
'protocol': 'protocol',
'messages': 'messagejson'}
# First add one false positive and try checking against it
dbtools.add_false_positive(self.context, issue)
self.assertEqual(dbtools.known_false_positive(self.context,
issue),
True, "Duplicate false positive not detected")
# Change one of the differentiating fields, and test, and
# add the tested one to the database.
issue['scenario_id'] = '2' # Non-duplicate
self.assertEqual(dbtools.known_false_positive(self.context,
issue),
False, "Not a duplicate: scenario_id different")
dbtools.add_false_positive(self.context, issue)
# Repeat for all the differentiating fields
issue['url'] = 'another url'
self.assertEqual(dbtools.known_false_positive(self.context,
issue),
False, "Not a duplicate: url different")
dbtools.add_false_positive(self.context, issue)
issue['issuetype'] = 'foo'
self.assertEqual(dbtools.known_false_positive(self.context,
issue),
False, "Not a duplicate: issuetype different")
dbtools.add_false_positive(self.context, issue)
# Finally, test the last one again twice, now it ought to be
# reported back as a duplicate
self.assertEqual(dbtools.known_false_positive(self.context,
issue),
True, "A duplicate case not detected")
def tearDown(self):
try:
os.unlink(self.db_file)
except:
pass
| apache-2.0 | -3,506,958,445,289,969,000 | 45.545977 | 133 | 0.523398 | false |
boracho/reddit-librarian | librarianbot.py | 1 | 1346 | import praw
import time
import json
sub_philo = 'philosophy'
sub_science = 'science'
library = ''
key_flair = 'psychology'
keyword_philo = 'consciousness'
keyword_test = 'snow'
keyword_test_two = 'intersection'
keyword_sci = 'quantum'
already_posted_id = []
already_posted_url = []
r = praw.Reddit(user_agent = 'Librarian Bot')
r.login('', '')
subreddit = r.get_subreddit(sub_science)
def submission_parser(subreddit, keyword):
with open('log.json', 'r') as data:
dbase = json.load(data)
for submission in subreddit.get_hot(limit=10):
lowercase_title = str(submission).lower()
if submission.id not in dbase['already posted id'] and submission.url not in dbase['already posted url']:
if submission.link_flair_text!= None and submission.link_flair_text.lower() == keyword:
with open('log.json', 'r+') as f:
dbase = json.load(f)
dbase['already posted id'].append(submission.id)
dbase['already posted url'].append(submission.url)
f.seek(0)
json.dump(dbase, f, indent=4)
r.submit(library, submission.title, url=submission.url)
print 'I just added another link to your library!'
while True:
submission_parser(subreddit, key_flair)
time.sleep(3600)
| mit | 8,054,340,248,041,795,000 | 34.421053 | 113 | 0.625557 | false |
eugenejen/AutobahnPython | examples/twisted/wamp/work/pubsub/server.py | 14 | 3020 | ###############################################################################
##
## Copyright (C) 2013 Tavendo GmbH
##
## Licensed under the Apache License, Version 2.0 (the "License");
## you may not use this file except in compliance with the License.
## You may obtain a copy of the License at
##
## http://www.apache.org/licenses/LICENSE-2.0
##
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions and
## limitations under the License.
##
###############################################################################
from autobahn.wamp2.broker import Broker
from autobahn.wamp2.websocket import WampWebSocketServerProtocol, \
WampWebSocketServerFactory
class PubSubServerProtocol(WampWebSocketServerProtocol):
"""
"""
def onSessionOpen(self):
self.setBroker(self.factory.broker)
class PubSubServerFactory(WampWebSocketServerFactory):
"""
"""
protocol = PubSubServerProtocol
def __init__(self, url, debug = False):
WampWebSocketServerFactory.__init__(self, url, debug)
self.broker = Broker()
if __name__ == '__main__':
import sys, argparse
from twisted.python import log
from twisted.internet.endpoints import serverFromString
## parse command line arguments
##
parser = argparse.ArgumentParser()
parser.add_argument("-d", "--debug", action = "store_true",
help = "Enable debug output.")
parser.add_argument("--websocket", default = "tcp:9000",
help = 'WebSocket server Twisted endpoint descriptor, e.g. "tcp:9000" or "unix:/tmp/mywebsocket".')
parser.add_argument("--wsurl", default = "ws://localhost:9000",
help = 'WebSocket URL (must suit the endpoint), e.g. "ws://localhost:9000".')
parser.add_argument("--web", default = "tcp:8080",
help = 'Web server endpoint descriptor, e.g. "tcp:8080".')
args = parser.parse_args()
## start Twisted logging to stdout
##
log.startLogging(sys.stdout)
## we use an Autobahn utility to install the "best" available Twisted reactor
##
from autobahn.choosereactor import install_reactor
reactor = install_reactor()
print "Running on reactor", reactor
## start a WebSocket server
##
wampfactory = PubSubServerFactory(args.wsurl, debug = args.debug)
wampserver = serverFromString(reactor, args.websocket)
wampserver.listen(wampfactory)
## start a Web server
##
if args.web != "":
from twisted.web.server import Site
from twisted.web.static import File
webfactory = Site(File("."))
webserver = serverFromString(reactor, args.web)
webserver.listen(webfactory)
## now enter the Twisted reactor loop
##
reactor.run()
| apache-2.0 | 2,489,533,038,190,035,500 | 27.490566 | 122 | 0.628477 | false |
cartertech/odoo-hr-ng | hr_policy_absence/__init__.py | 1 | 1070 | #-*- coding:utf-8 -*-
##############################################################################
#
# Copyright (C) 2015 One Click Software (http://oneclick.solutions)
# and Copyright (C) 2013 Michael Telahun Makonnen <[email protected]>.
# All Rights Reserved.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import hr_policy_absence
| agpl-3.0 | -6,444,817,049,636,742,000 | 45.521739 | 80 | 0.615888 | false |
Daniel-CA/odoomrp-wip-public | partner_risk_insurance/__openerp__.py | 28 | 1474 | # -*- encoding: utf-8 -*-
##############################################################################
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see http://www.gnu.org/licenses/.
#
##############################################################################
{
"name": "Partner Risk Insurance",
"version": "1.0",
"description": """
This module adds a new tab in the partner form to introduce risk insurance
information.
""",
"author": "OdooMRP team,"
"AvanzOSC,"
"Serv. Tecnol. Avanzados - Pedro M. Baeza,"
"Factor Libre S.L,"
"NaN·tic",
'contributors': ["Daniel Campos <[email protected]>"],
"website": "http://www.factorlibre.com",
"depends": [],
"category": "Custom Modules",
"data": ['views/res_partner_view.xml'],
"active": False,
"installable": True
}
| agpl-3.0 | -3,977,420,785,175,846,400 | 37.763158 | 78 | 0.575696 | false |
SPARLab/BikeMaps | spirit/utils/forms.py | 1 | 1737 | #-*- coding: utf-8 -*-
from django import forms
from django.utils.html import conditional_escape, mark_safe
from six import smart_text
class NestedModelChoiceField(forms.ModelChoiceField):
"""A ModelChoiceField that groups parents and childrens"""
# TODO: subclass ModelChoiceIterator, remove _populate_choices()
def __init__(self, related_name, parent_field, label_field, *args, **kwargs):
"""
@related_name: related_name or "FOO_set"
@parent_field: ForeignKey('self') field, use 'name_id' to save some queries
@label_field: field for obj representation
"""
super(NestedModelChoiceField, self).__init__(*args, **kwargs)
self.related_name = related_name
self.parent_field = parent_field
self.label_field = label_field
self._populate_choices()
def _populate_choices(self):
# This is *hackish* but simpler than subclassing ModelChoiceIterator
choices = [("", self.empty_label), ]
kwargs = {self.parent_field: None, }
queryset = self.queryset.filter(**kwargs)\
.prefetch_related(self.related_name)
for parent in queryset:
choices.append((self.prepare_value(parent), self.label_from_instance(parent)))
choices.extend([(self.prepare_value(children), self.label_from_instance(children))
for children in getattr(parent, self.related_name).all()])
self.choices = choices
def label_from_instance(self, obj):
level_indicator = ""
if getattr(obj, self.parent_field):
level_indicator = "--- "
return mark_safe(level_indicator + conditional_escape(smart_text(getattr(obj, self.label_field))))
| mit | 5,185,146,625,280,223,000 | 39.395349 | 106 | 0.644214 | false |
bootc/owfs-cvsimport | module/swig/python/examples/xmlrpc_server.py | 1 | 2373 | #! /usr/bin/env python
"""
::BOH
$Id$
Copyright (c) 2005 Peter Kropf. All rights reserved.
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or (at
your option) any later version.
This program is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
::EOH
Create an XML-RPC server for a 1-wire network.
Run xmlrpc_client.py to see the server in action.
Or point a browser at http://localhost:8765 to see some documentation.
"""
import sys
import ow
from DocXMLRPCServer import DocXMLRPCServer, DocXMLRPCRequestHandler
from SocketServer import ThreadingMixIn
class owr:
"""
A wrapper class is needed around the ow.Sensor class since the
XML-RPC protocol doesn't know anything about generators, Python
objects and such. XML-RPC is a pretty simple protocol that deals
pretty well with basic types. So that's what it'll get.
"""
def entries( self, path ):
"""List a sensor's attributes."""
return [entry for entry in ow.Sensor( path ).entries( )]
def sensors( self, path ):
"""List all the sensors that exist in a particular path."""
return [sensor._path for sensor in ow.Sensor( path ).sensors( )]
def attr( self, path, attr ):
"""Lookup a specific sensor attribute."""
sensor = ow.Sensor( path )
exec 'val = sensor.' + attr
return val
class ThreadingServer( ThreadingMixIn, DocXMLRPCServer ):
pass
# Initialize ow for a USB controller or for a serial port.
ow.init( 'u' )
#ow.init( '/dev/ttyS0' )
# Allow connections for the localhost on port 8765.
serveraddr = ( '', 8765 )
srvr = ThreadingServer( serveraddr, DocXMLRPCRequestHandler )
srvr.set_server_title( '1-wire network' )
srvr.set_server_documentation( 'Welcome to the world of 1-wire networks.' )
srvr.register_instance( owr( ) )
srvr.register_introspection_functions( )
srvr.serve_forever( )
| gpl-2.0 | -1,502,286,951,648,837,600 | 29.818182 | 75 | 0.720607 | false |
betterlife/psi | tests/thirdparty/local_image_store_test.py | 2 | 1772 | from __future__ import print_function
import os
import unittest
import uuid
import io
from werkzeug.datastructures import FileStorage
from psi.app.thirdparty.local_image_store import LocalImageStore
from tests.base_test_case import BaseTestCase
class TestLocalImageStore(BaseTestCase):
def setUp(self):
super(TestLocalImageStore, self).setUp()
test_image_file_path = os.path.join(os.path.dirname(__file__), '../resources/image.png')
self.image_file = open(test_image_file_path, 'rb')
def tearDown(self):
super(TestLocalImageStore, self).tearDown()
self.image_file.close()
def testLocalImageSaveAndRemove(self):
public_id = str(uuid.uuid4())
data = self.image_file.read()
stream = io.BytesIO(data)
image = FileStorage(content_type='image/png', filename=u'/etc/init.d/functions.png', name='image_placeholder',
content_length=0, stream=stream)
result = LocalImageStore.save(image, public_id)
self.assertIsNotNone(result)
filename = public_id + ".png"
self.assertEqual(result['url'], "/static/uploaded/" + filename)
self.assertEqual(result['filename'], filename)
file_absolute_path = os.path.join(self.app.config['UPLOAD_FOLDER'], result['filename'])
uploaded_file = open(file_absolute_path, 'rb')
uploaded_data = uploaded_file.read()
self.assertEqual(data, uploaded_data)
uploaded_file.close()
LocalImageStore.remove(file_absolute_path, public_id)
try:
uploaded_file = open(file_absolute_path)
uploaded_file.close()
except IOError as e:
pass
else:
self.fail("The file should be deleted!")
| mit | -3,454,109,550,331,404,000 | 31.814815 | 118 | 0.64842 | false |
crepererum/invenio | invenio/modules/bulletin/format_elements/bfe_webjournal_sub_navigation.py | 13 | 6484 | # -*- coding: utf-8 -*-
# $Id: bfe_webjournal_CERNBulletinSubNavigation.py,v 1.13 2009/02/12 10:00:57 jerome Exp $
#
# This file is part of Invenio.
# Copyright (C) 2009, 2010, 2011 CERN.
#
# Invenio is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""
WebJournal element - Displays vertical subnavigation menu in detailed
article pages.
"""
from invenio.modules.formatter.engine import BibFormatObject
from invenio.base.i18n import gettext_set_language
from invenio.legacy.webjournal.utils import \
parse_url_string, \
make_journal_url, \
get_journal_articles,\
get_journal_categories
def format_element(bfo, new_articles_first='yes',
subject_to_css_class_kb="WebJournalSubject2CSSClass",
display_all_category_articles='no'):
"""
Creates a navigation for articles in the same issue and category.
@param new_articles_first: if 'yes', display new articles before other articles
@param subject_to_css_class_kb: knowledge base that maps 595__a to a CSS class
@param display_all_category_articles: if yes, display all articles, whatever category is selected
"""
# get variables
args = parse_url_string(bfo.user_info['uri'])
this_recid = bfo.control_field('001')
this_issue_number = args["issue"]
category_name = args["category"]
journal_name = args["journal_name"]
ln = bfo.lang
_ = gettext_set_language(ln)
this_title = ""
if ln == "fr":
if bfo.fields('246_1a'):
this_title = bfo.fields('246_1a')[0]
elif bfo.fields('245__a'):
this_title = bfo.fields('245__a')[0]
else:
if bfo.fields('245__a'):
this_title = bfo.fields('245__a')[0]
elif bfo.fields('246_1a'):
this_title = bfo.fields('246_1a')[0]
journal_categories = [category_name]
if display_all_category_articles.lower() == 'yes':
# Let's retrieve all categories. Ok, we are not supposed to do
# that with that element, but if journal editor wants...
journal_categories = get_journal_categories(journal_name,
this_issue_number)
menu_out = ''
for category in journal_categories:
ordered_articles = get_journal_articles(journal_name,
this_issue_number,
category,
newest_first=new_articles_first.lower() == 'yes')
new_articles_only = False
if ordered_articles.keys() and max(ordered_articles.keys()) < 0:
# If there are only new articles, don't bother marking them as
# new
new_articles_only = True
menu_out += '<div class="subNavigationMenu">'
order_numbers = ordered_articles.keys()
order_numbers.sort()
for order_number in order_numbers:
for article_id in ordered_articles[order_number]:
# A record is considered as new if its position is
# negative and there are some non-new articles
article_is_new = (order_number < 0 and not new_articles_only)
if str(article_id) == this_recid:
# Mark as active
# Get CSS class (if relevant)
notes = bfo.fields('595__a')
css_classes = [bfo.kb(subject_to_css_class_kb, note, None) \
for note in notes]
css_classes = [css_class for css_class in css_classes \
if css_class is not None]
if article_is_new:
css_classes.append('new')
separator = bfo.field('594__a')
if separator == "YES":
menu_out += '''<hr/>'''
menu_out += '''<div class="active">
<div class="subNavigationMenuItem %s">%s</div></div>''' % \
(' '.join(css_classes),
this_title)
else:
temp_rec = BibFormatObject(article_id)
title = ''
if ln == "fr":
title = temp_rec.field('246_1a')
if title == '':
title = temp_rec.field('245__a')
else:
title = temp_rec.field('245__a')
if title == '':
title = temp_rec.field('246_1a')
# Get CSS class (if relevant)
notes = temp_rec.fields('595__a')
css_classes = [temp_rec.kb(subject_to_css_class_kb, note, None) \
for note in notes]
css_classes = [css_class for css_class in css_classes \
if css_class is not None]
if article_is_new:
css_classes.append('new')
separator = temp_rec.field('594__a')
if separator == "YES":
menu_out += '''<hr/>'''
menu_out += '''<div class="subNavigationMenuItem %s">
<a href="%s">%s</a></div>
''' % (' '.join(css_classes),
make_journal_url(bfo.user_info['uri'],
{'recid': article_id,
'ln': bfo.lang,
'category': category}),
title)
menu_out += '</div>'
return menu_out
def escape_values(bfo):
"""
Called by BibFormat in order to check if output of this element
should be escaped.
"""
return 0
| gpl-2.0 | -3,365,103,016,537,588,700 | 39.779874 | 101 | 0.524368 | false |
jck/myhdl | myhdl/_bin.py | 4 | 1623 | # This file is part of the myhdl library, a Python package for using
# Python as a Hardware Description Language.
#
# Copyright (C) 2003-2008 Jan Decaluwe
#
# The myhdl library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public License as
# published by the Free Software Foundation; either version 2.1 of the
# License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
""" module with the bin function.
"""
def _int2bitstring(num):
if num == 0:
return '0'
if abs(num) == 1:
return '1'
bits = []
p, q = divmod(num, 2)
bits.append(str(q))
while not (abs(p) == 1):
p, q = divmod(p, 2)
bits.append(str(q))
bits.append('1')
bits.reverse()
return ''.join(bits)
def bin(num, width=0):
"""Return a binary string representation.
num -- number to convert
Optional parameter:
width -- specifies the desired string (sign bit padding)
"""
num = int(num)
s = _int2bitstring(num)
if width:
pad = '0'
if num < 0:
pad = '1'
return (width - len(s)) * pad + s
return s
| lgpl-2.1 | 1,026,983,746,684,243,300 | 29.622642 | 74 | 0.649415 | false |
iut-ibk/DynaMind-UrbanSim | 3rdparty/opus/src/urbansim/gridcell/is_vacant_land.py | 2 | 1727 | # Opus/UrbanSim urban simulation software.
# Copyright (C) 2005-2009 University of Washington
# See opus_core/LICENSE
from opus_core.variables.variable import Variable
from urbansim.functions import attribute_label
from numpy import logical_not
class is_vacant_land(Variable):
"""Returns 1 if vacant land (contains no buildings), otherwise 0."""
_return_type="bool8"
number_of_buildings = "gridcell.number_of_agents(building)"
def dependencies(self):
return [self.number_of_buildings]
def compute(self, dataset_pool):
return logical_not(self.get_dataset().get_attribute(self.number_of_buildings))
from opus_core.tests import opus_unittest
from opus_core.tests.utils.variable_tester import VariableTester
from numpy import array
class Tests(opus_unittest.OpusTestCase):
def test_my_inputs(self):
tester = VariableTester(
__file__,
package_order=['urbansim'],
test_data={
'gridcell':{
'grid_id':array([1,2,3,4]),
},
'building': {
'building_id': array([1,2,3,4,5,6]),
'building_type_id': array([1,2,1,2,1,1]),
'grid_id': array([2,3,1,1,2,1])
},
'building_type': {
'building_type_id':array([1,2]),
'name': array(['foo', 'commercial'])
}
}
)
should_be = array([0, 0, 0, 1])
tester.test_is_equal_for_variable_defined_by_this_module(self, should_be)
if __name__=='__main__':
opus_unittest.main() | gpl-2.0 | -4,363,778,423,606,772,000 | 31.901961 | 86 | 0.543717 | false |
Panagiotis-Kon/empower-runtime | empower/core/vap.py | 1 | 2577 | #!/usr/bin/env python3
#
# Copyright (c) 2016 Roberto Riggio
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""EmPOWER Virtual Access Point (VAP) class."""
import empower.logger
LOG = empower.logger.get_logger()
class VAP(object):
""" The EmPOWER Virtual Access Point
Attributes:
net_bssid: The client's MAC Address as an EtherAddress instance.
bssid: The LVAP's MAC Address as an EtherAddress instance. This
address is dynamically generated by the Access Controller.
The BSSID is supposed to be unique in the entire network.
ssid: The currently associated SSID.
block: the resource blocks to which this LVAP is assigned.
"""
def __init__(self, net_bssid, block, wtp, tenant):
# read only params
self.net_bssid = net_bssid
self.block = block
self.wtp = wtp
self._tenant = tenant
@property
def ssid(self):
""" Get the SSID assigned to this LVAP. """
if not self._tenant:
return None
return self._tenant.tenant_name
@property
def tenant(self):
""" Get the tenant assigned to this LVAP. """
return self._tenant
def to_dict(self):
""" Return a JSON-serializable dictionary representing the LVAP """
return {'net_bssid': self.net_bssid,
'ssid': self.ssid,
'block': self.block,
'wtp': self.wtp}
def __str__(self):
accum = []
accum.append("net_bssid ")
accum.append(str(self.net_bssid))
accum.append(" ssid ")
accum.append(str(self.ssid))
accum.append(" block ")
accum.append(str(self.block))
accum.append(" wtp ")
accum.append(str(self.wtp.addr))
return ''.join(accum)
def __hash__(self):
return hash(self.net_bssid)
def __eq__(self, other):
if isinstance(other, VAP):
return self.net_bssid == other.net_bssid
return False
def __ne__(self, other):
return not self.__eq__(other)
| apache-2.0 | -1,850,319,026,677,174,800 | 27.318681 | 75 | 0.619325 | false |
shadowmint/nwidget | lib/pyglet-1.4.4/pyglet/image/atlas.py | 42 | 9104 | # ----------------------------------------------------------------------------
# pyglet
# Copyright (c) 2006-2008 Alex Holkner
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
# * Neither the name of pyglet nor the names of its
# contributors may be used to endorse or promote products
# derived from this software without specific prior written
# permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
# ----------------------------------------------------------------------------
'''Group multiple small images into larger textures.
This module is used by `pyglet.resource` to efficiently pack small images into
larger textures. `TextureAtlas` maintains one texture; `TextureBin` manages a
collection of atlases of a given size.
Example usage::
# Load images from disk
car_image = pyglet.image.load('car.png')
boat_image = pyglet.image.load('boat.png')
# Pack these images into one or more textures
bin = TextureBin()
car_texture = bin.add(car_image)
boat_texture = bin.add(boat_image)
The result of `TextureBin.add` is a `TextureRegion` containing the image.
Once added, an image cannot be removed from a bin (or an atlas); nor can a
list of images be obtained from a given bin or atlas -- it is the
application's responsibility to keep track of the regions returned by the
``add`` methods.
:since: pyglet 1.1
'''
__docformat__ = 'restructuredtext'
__version__ = '$Id: $'
import pyglet
class AllocatorException(Exception):
'''The allocator does not have sufficient free space for the requested
image size.'''
pass
class _Strip(object):
def __init__(self, y, max_height):
self.x = 0
self.y = y
self.max_height = max_height
self.y2 = y
def add(self, width, height):
assert width > 0 and height > 0
assert height <= self.max_height
x, y = self.x, self.y
self.x += width
self.y2 = max(self.y + height, self.y2)
return x, y
def compact(self):
self.max_height = self.y2 - self.y
class Allocator(object):
'''Rectangular area allocation algorithm.
Initialise with a given ``width`` and ``height``, then repeatedly
call `alloc` to retrieve free regions of the area and protect that
area from future allocations.
`Allocator` uses a fairly simple strips-based algorithm. It performs best
when rectangles are allocated in decreasing height order.
'''
def __init__(self, width, height):
'''Create an `Allocator` of the given size.
:Parameters:
`width` : int
Width of the allocation region.
`height` : int
Height of the allocation region.
'''
assert width > 0 and height > 0
self.width = width
self.height = height
self.strips = [_Strip(0, height)]
self.used_area = 0
def alloc(self, width, height):
'''Get a free area in the allocator of the given size.
After calling `alloc`, the requested area will no longer be used.
If there is not enough room to fit the given area `AllocatorException`
is raised.
:Parameters:
`width` : int
Width of the area to allocate.
`height` : int
Height of the area to allocate.
:rtype: int, int
:return: The X and Y coordinates of the bottom-left corner of the
allocated region.
'''
for strip in self.strips:
if self.width - strip.x >= width and strip.max_height >= height:
self.used_area += width * height
return strip.add(width, height)
if self.width >= width and self.height - strip.y2 >= height:
self.used_area += width * height
strip.compact()
newstrip = _Strip(strip.y2, self.height - strip.y2)
self.strips.append(newstrip)
return newstrip.add(width, height)
raise AllocatorException('No more space in %r for box %dx%d' % (
self, width, height))
def get_usage(self):
'''Get the fraction of area already allocated.
This method is useful for debugging and profiling only.
:rtype: float
'''
return self.used_area / float(self.width * self.height)
def get_fragmentation(self):
'''Get the fraction of area that's unlikely to ever be used, based on
current allocation behaviour.
This method is useful for debugging and profiling only.
:rtype: float
'''
# The total unused area in each compacted strip is summed.
if not self.strips:
return 0.
possible_area = self.strips[-1].y2 * self.width
return 1.0 - self.used_area / float(possible_area)
class TextureAtlas(object):
'''Collection of images within a texture.
'''
def __init__(self, width=256, height=256):
'''Create a texture atlas of the given size.
:Parameters:
`width` : int
Width of the underlying texture.
`height` : int
Height of the underlying texture.
'''
self.texture = pyglet.image.Texture.create(
width, height, pyglet.gl.GL_RGBA, rectangle=True)
self.allocator = Allocator(width, height)
def add(self, img):
'''Add an image to the atlas.
This method will fail if the given image cannot be transferred
directly to a texture (for example, if it is another texture).
`ImageData` is the usual image type for this method.
`AllocatorException` will be raised if there is no room in the atlas
for the image.
:Parameters:
`img` : `AbstractImage`
The image to add.
:rtype: `TextureRegion`
:return: The region of the atlas containing the newly added image.
'''
x, y = self.allocator.alloc(img.width, img.height)
self.texture.blit_into(img, x, y, 0)
region = self.texture.get_region(x, y, img.width, img.height)
return region
class TextureBin(object):
'''Collection of texture atlases.
`TextureBin` maintains a collection of texture atlases, and creates new
ones as necessary to accommodate images added to the bin.
'''
def __init__(self, texture_width=256, texture_height=256):
'''Create a texture bin for holding atlases of the given size.
:Parameters:
`texture_width` : int
Width of texture atlases to create.
`texture_height` : int
Height of texture atlases to create.
'''
self.atlases = []
self.texture_width = texture_width
self.texture_height = texture_height
def add(self, img):
'''Add an image into this texture bin.
This method calls `TextureAtlas.add` for the first atlas that has room
for the image.
`AllocatorException` is raised if the image exceeds the dimensions of
``texture_width`` and ``texture_height``.
:Parameters:
`img` : `AbstractImage`
The image to add.
:rtype: `TextureRegion`
:return: The region of an atlas containing the newly added image.
'''
for atlas in list(self.atlases):
try:
return atlas.add(img)
except AllocatorException:
# Remove atlases that are no longer useful (this is so their
# textures can later be freed if the images inside them get
# collected).
if img.width < 64 and img.height < 64:
self.atlases.remove(atlas)
atlas = TextureAtlas(self.texture_width, self.texture_height)
self.atlases.append(atlas)
return atlas.add(img)
| apache-2.0 | 8,840,669,249,644,559,000 | 34.150579 | 78 | 0.621046 | false |
chipaca/snapcraft | tests/unit/meta/test_application.py | 2 | 9746 | # -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
#
# Copyright (C) 2019-2021 Canonical Ltd
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os
import pathlib
from testtools.matchers import Contains, Equals, FileExists, Not
from snapcraft import yaml_utils
from snapcraft.internal.meta import application, desktop, errors
from tests import unit
class AppCommandTest(unit.TestCase):
def setUp(self):
super().setUp()
for exe in (
"test-command",
"test-stop-command",
"test-command-chain",
"prepend-command-chain",
):
open(exe, "w").close()
os.chmod(exe, 0o755)
def test_app_no_change(self):
app = application.Application.from_dict(
app_name="foo",
app_dict={
"command": "test-command",
"stop-command": "test-stop-command",
"daemon": "simple",
"install-mode": "disable",
"command-chain": ["test-command-chain"],
},
)
app.prime_commands(base="core18", prime_dir=self.path)
self.expectThat(
app.to_dict(),
Equals(
{
"command": "test-command",
"stop-command": "test-stop-command",
"daemon": "simple",
"install-mode": "disable",
"command-chain": ["test-command-chain"],
}
),
)
app.write_command_wrappers(prime_dir=self.path)
self.expectThat("command-foo.wrapper", Not(FileExists()))
self.expectThat("stop-command-foo.wrapper", Not(FileExists()))
def test_app_with_wrapper(self):
app = application.Application.from_dict(
app_name="foo",
app_dict={
"command": "/test-command",
"stop-command": "/test-stop-command",
"daemon": "simple",
},
)
app.prime_commands(base="core18", prime_dir=self.path)
self.assertThat(
app.to_dict(),
Equals(
{
"command": "command-foo.wrapper",
"stop-command": "stop-command-foo.wrapper",
"daemon": "simple",
}
),
)
app.write_command_wrappers(prime_dir=self.path)
self.expectThat("command-foo.wrapper", FileExists())
self.expectThat("stop-command-foo.wrapper", FileExists())
def test_massaged_core(self):
app = application.Application.from_dict(
app_name="foo", app_dict={"command": "$SNAP/test-command"}
)
app.prime_commands(base="core", prime_dir=self.path)
self.assertThat(app.to_dict(), Equals({"command": "test-command"}))
def test_massaged_core18(self):
app = application.Application.from_dict(
app_name="foo", app_dict={"command": "$SNAP/test-command"}
)
app.prime_commands(base="core18", prime_dir=self.path)
self.assertThat(app.to_dict(), Equals({"command": "test-command"}))
def test_not_massaged_core20(self):
app = application.Application.from_dict(
app_name="foo", app_dict={"command": "$SNAP/test-command"}
)
self.assertRaises(
errors.InvalidAppCommandNotFound,
app.prime_commands,
base="core20",
prime_dir=self.path,
)
def test_socket_mode_change_to_octal(self):
app = application.Application.from_dict(
app_name="foo",
app_dict={
"command": "test-command",
"daemon": "simple",
"sockets": {
"sock1": {"listen-stream": 8080},
"sock2": {
"listen-stream": "$SNAP_COMMON/sock2",
"socket-mode": 1000,
},
},
},
)
self.expectThat(
type(app.to_dict()["sockets"]["sock2"]["socket-mode"]),
Equals(yaml_utils.OctInt),
)
def test_no_command_chain(self):
app = application.Application.from_dict(
app_name="foo", app_dict={"command": "test-command"}
)
app.prime_commands(base="core18", prime_dir=self.path)
self.assertThat(app.to_dict(), Equals({"command": "test-command"}))
class TestWrapperUse:
scenarios = (
(
"wrapper allowed for plain command on core18",
dict(extra_app_properties={}, base="core18", expect_wrappers=True),
),
(
"wrapper allowed for plain command on core",
dict(extra_app_properties={}, base="core", expect_wrappers=True),
),
(
"wrapper not allowed for not core or core18 base",
dict(extra_app_properties={}, base="core20", expect_wrappers=False),
),
(
"wrapper not allowed with command-chain",
dict(
extra_app_properties={"command-chain": ["command-chain"]},
base="core18",
expect_wrappers=False,
),
),
(
"wrapper not allowed with none adapter",
dict(
extra_app_properties={"adapter": "none"},
base="core18",
expect_wrappers=False,
),
),
)
def test_wrapper(self, tmp_work_path, extra_app_properties, base, expect_wrappers):
app_properties = dict(command="foo")
app_properties.update(extra_app_properties)
for exe in ["foo"] + app_properties.get("command-chain", list()):
exe_path = pathlib.Path(exe)
exe_path.touch()
exe_path.chmod(0o755)
app = application.Application.from_dict(app_name="foo", app_dict=app_properties)
assert app.can_use_wrapper(base=base) == expect_wrappers
class InvalidCommandChainTest(unit.TestCase):
def test_command_chain_path_not_found(self):
app = application.Application.from_dict(
app_name="foo", app_dict={"command-chain": "file-not-found"}
)
self.assertRaises(
errors.InvalidCommandChainError,
app.validate_command_chain_executables,
prime_dir=self.path,
)
def test_command_chain_path_not_executable(self):
open("file-not-executable", "w").close()
app = application.Application.from_dict(
app_name="foo", app_dict={"command-chain": "file-not-executable"}
)
self.assertRaises(
errors.InvalidCommandChainError,
app.validate_command_chain_executables,
prime_dir=self.path,
)
class DesktopFileTest(unit.TestCase):
def test_desktop_file(self):
desktop_file_path = "foo.desktop"
with open(desktop_file_path, "w") as desktop_file:
print("[Desktop Entry]", file=desktop_file)
print("Exec=in-snap-exe", file=desktop_file)
open("command-chain", "w").close()
os.chmod("command-chain", 0o755)
app = application.Application.from_dict(
app_name="foo", app_dict=dict(command="/foo", desktop=desktop_file_path)
)
desktop_file = desktop.DesktopFile(
snap_name="foo",
app_name=app.app_name,
filename=app.desktop,
prime_dir=self.path,
)
desktop_file.write(gui_dir="gui")
expected_desktop_file_path = os.path.join("gui", "foo.desktop")
self.expectThat(app.to_dict(), Not(Contains("desktop")))
self.expectThat(expected_desktop_file_path, FileExists())
class AppPassthroughTests(unit.TestCase):
def test_no_passthrough(self):
app = application.Application(
app_name="foo",
adapter=application.ApplicationAdapter.NONE,
command_chain=["test-command-chain"],
passthrough=None,
)
app_dict = app.to_dict()
self.assertThat(app_dict, Equals({"command-chain": ["test-command-chain"]}))
def test_passthrough_to_dict(self):
app = application.Application(
app_name="foo",
adapter=application.ApplicationAdapter.NONE,
command_chain=["test-command-chain"],
passthrough={"test-property": "test-value"},
)
app_dict = app.to_dict()
self.assertThat(
app_dict,
Equals(
{"command-chain": ["test-command-chain"], "test-property": "test-value"}
),
)
def test_passthrough_to_dict_from_dict(self):
app = application.Application.from_dict(
app_name="foo",
app_dict={
"adapter": "none",
"command-chain": ["test-command-chain"],
"passthrough": {"test-property": "test-value"},
},
)
app_dict = app.to_dict()
self.assertThat(
app_dict,
Equals(
{"command-chain": ["test-command-chain"], "test-property": "test-value"}
),
)
| gpl-3.0 | -3,080,333,936,170,977,300 | 31.378738 | 88 | 0.546275 | false |
apehua/pilas | pilasengine/interprete/lanas_ui.py | 2 | 2136 | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'pilasengine/interprete/lanas.ui'
#
# by: PyQt4 UI code generator 4.11.3
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
class Ui_Lanas(object):
def setupUi(self, Lanas):
Lanas.setObjectName(_fromUtf8("Lanas"))
Lanas.resize(656, 349)
self.verticalLayout = QtGui.QVBoxLayout(Lanas)
self.verticalLayout.setSizeConstraint(QtGui.QLayout.SetDefaultConstraint)
self.verticalLayout.setMargin(0)
self.verticalLayout.setObjectName(_fromUtf8("verticalLayout"))
self.widget_interprete = QtGui.QStackedWidget(Lanas)
self.widget_interprete.setObjectName(_fromUtf8("widget_interprete"))
self.verticalLayout.addWidget(self.widget_interprete)
self.consejo = QtGui.QLabel(Lanas)
self.consejo.setFrameShape(QtGui.QFrame.NoFrame)
self.consejo.setFrameShadow(QtGui.QFrame.Plain)
self.consejo.setLineWidth(0)
self.consejo.setMidLineWidth(0)
self.consejo.setText(_fromUtf8(""))
self.consejo.setObjectName(_fromUtf8("consejo"))
self.verticalLayout.addWidget(self.consejo)
self.retranslateUi(Lanas)
QtCore.QMetaObject.connectSlotsByName(Lanas)
def retranslateUi(self, Lanas):
Lanas.setWindowTitle(_translate("Lanas", "Lanas - Interprete de Python", None))
Lanas.setToolTip(_translate("Lanas", "Guardar contenido del interprete", None))
if __name__ == "__main__":
import sys
app = QtGui.QApplication(sys.argv)
Lanas = QtGui.QWidget()
ui = Ui_Lanas()
ui.setupUi(Lanas)
Lanas.show()
sys.exit(app.exec_())
| lgpl-3.0 | -6,399,663,227,094,866,000 | 34.016393 | 87 | 0.692416 | false |
schmidtc/pysal | pysal/contrib/handler/tests/test_error_spet_sparse.py | 1 | 20565 | import unittest
import pysal
import numpy as np
from scipy import sparse
#from pysal.spreg import error_sp_het as HET
from functools import partial
from pysal.contrib.handler import Model
GM_Error_Het = partial(Model, mtype='GM_Error_Het')
GM_Endog_Error_Het = partial(Model, mtype='GM_Endog_Error_Het')
GM_Combo_Het = partial(Model, mtype='GM_Combo_Het')
BaseGM_Error_Het = partial(Model, mtype='BaseGM_Error_Het')
BaseGM_Endog_Error_Het = partial(Model, mtype='BaseGM_Endog_Error_Het')
BaseGM_Combo_Het = partial(Model, mtype='BaseGM_Combo_Het')
class TestBaseGMErrorHet(unittest.TestCase):
def setUp(self):
db=pysal.open(pysal.examples.get_path("columbus.dbf"),"r")
y = np.array(db.by_col("HOVAL"))
self.y = np.reshape(y, (49,1))
X = []
X.append(db.by_col("INC"))
X.append(db.by_col("CRIME"))
self.X = np.array(X).T
self.X = np.hstack((np.ones(self.y.shape),self.X))
self.X = sparse.csr_matrix(self.X)
self.w = pysal.rook_from_shapefile(pysal.examples.get_path("columbus.shp"))
self.w.transform = 'r'
def test_model(self):
reg = BaseGM_Error_Het(self.y, self.X, self.w.sparse, step1c=True)
betas = np.array([[ 47.99626638], [ 0.71048989], [ -0.55876126], [ 0.41178776]])
np.testing.assert_array_almost_equal(reg.betas,betas,7)
u = np.array([ 27.38122697])
np.testing.assert_array_almost_equal(reg.u[0],u,7)
ef = np.array([ 32.29765975])
np.testing.assert_array_almost_equal(reg.e_filtered[0],ef,7)
predy = np.array([ 53.08577603])
np.testing.assert_array_almost_equal(reg.predy[0],predy,7)
n = 49
self.assertAlmostEqual(reg.n,n)
k = 3
self.assertAlmostEqual(reg.k,k)
y = np.array([ 80.467003])
np.testing.assert_array_almost_equal(reg.y[0],y,7)
x = np.array([ 1. , 19.531 , 15.72598])
np.testing.assert_array_almost_equal(reg.x[0].toarray()[0],x,7)
i_s = 'Maximum number of iterations reached.'
np.testing.assert_string_equal(reg.iter_stop,i_s)
its = 1
self.assertAlmostEqual(reg.iteration,its,7)
my = 38.436224469387746
self.assertAlmostEqual(reg.mean_y,my)
stdy = 18.466069465206047
self.assertAlmostEqual(reg.std_y,stdy)
vm = np.array([[ 1.31767529e+02, -3.58368748e+00, -1.65090647e+00,
0.00000000e+00],
[ -3.58368748e+00, 1.35513711e-01, 3.77539055e-02,
0.00000000e+00],
[ -1.65090647e+00, 3.77539055e-02, 2.61042702e-02,
0.00000000e+00],
[ 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
2.82398517e-02]])
np.testing.assert_array_almost_equal(reg.vm,vm,6)
xtx = np.array([[ 4.90000000e+01, 7.04371999e+02, 1.72131237e+03],
[ 7.04371999e+02, 1.16866734e+04, 2.15575320e+04],
[ 1.72131237e+03, 2.15575320e+04, 7.39058986e+04]])
np.testing.assert_array_almost_equal(reg.xtx,xtx,4)
class TestGMErrorHet(unittest.TestCase):
def setUp(self):
db=pysal.open(pysal.examples.get_path("columbus.dbf"),"r")
y = np.array(db.by_col("HOVAL"))
self.y = np.reshape(y, (49,1))
X = []
X.append(db.by_col("INC"))
X.append(db.by_col("CRIME"))
self.X = np.array(X).T
self.X = sparse.csr_matrix(self.X)
self.w = pysal.rook_from_shapefile(pysal.examples.get_path("columbus.shp"))
self.w.transform = 'r'
def test_model(self):
reg = GM_Error_Het(self.y, self.X, self.w, step1c=True)
betas = np.array([[ 47.99626638], [ 0.71048989], [ -0.55876126], [ 0.41178776]])
np.testing.assert_array_almost_equal(reg.betas,betas,7)
u = np.array([ 27.38122697])
np.testing.assert_array_almost_equal(reg.u[0],u,7)
ef = np.array([ 32.29765975])
np.testing.assert_array_almost_equal(reg.e_filtered[0],ef,7)
predy = np.array([ 53.08577603])
np.testing.assert_array_almost_equal(reg.predy[0],predy,7)
n = 49
self.assertAlmostEqual(reg.n,n)
k = 3
self.assertAlmostEqual(reg.k,k)
y = np.array([ 80.467003])
np.testing.assert_array_almost_equal(reg.y[0],y,7)
x = np.array([ 1. , 19.531 , 15.72598])
np.testing.assert_array_almost_equal(reg.x[0].toarray()[0],x,7)
i_s = 'Maximum number of iterations reached.'
np.testing.assert_string_equal(reg.iter_stop,i_s)
its = 1
self.assertAlmostEqual(reg.iteration,its,7)
my = 38.436224469387746
self.assertAlmostEqual(reg.mean_y,my)
stdy = 18.466069465206047
self.assertAlmostEqual(reg.std_y,stdy)
vm = np.array([[ 1.31767529e+02, -3.58368748e+00, -1.65090647e+00,
0.00000000e+00],
[ -3.58368748e+00, 1.35513711e-01, 3.77539055e-02,
0.00000000e+00],
[ -1.65090647e+00, 3.77539055e-02, 2.61042702e-02,
0.00000000e+00],
[ 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,
2.82398517e-02]])
np.testing.assert_array_almost_equal(reg.vm,vm,6)
pr2 = 0.34951013222581306
self.assertAlmostEqual(reg.pr2,pr2)
stde = np.array([ 11.47900385, 0.36812187, 0.16156816, 0.16804717])
np.testing.assert_array_almost_equal(reg.std_err,stde,4)
z_stat = np.array([[ 4.18122226e+00, 2.89946274e-05],
[ 1.93003988e+00, 5.36018970e-02],
[ -3.45836247e+00, 5.43469673e-04],
[ 2.45042960e+00, 1.42685863e-02]])
np.testing.assert_array_almost_equal(reg.z_stat,z_stat,4)
xtx = np.array([[ 4.90000000e+01, 7.04371999e+02, 1.72131237e+03],
[ 7.04371999e+02, 1.16866734e+04, 2.15575320e+04],
[ 1.72131237e+03, 2.15575320e+04, 7.39058986e+04]])
np.testing.assert_array_almost_equal(reg.xtx,xtx,4)
class TestBaseGMEndogErrorHet(unittest.TestCase):
def setUp(self):
db=pysal.open(pysal.examples.get_path("columbus.dbf"),"r")
y = np.array(db.by_col("HOVAL"))
self.y = np.reshape(y, (49,1))
X = []
X.append(db.by_col("INC"))
self.X = np.array(X).T
self.X = np.hstack((np.ones(self.y.shape),self.X))
self.X = sparse.csr_matrix(self.X)
yd = []
yd.append(db.by_col("CRIME"))
self.yd = np.array(yd).T
q = []
q.append(db.by_col("DISCBD"))
self.q = np.array(q).T
self.w = pysal.rook_from_shapefile(pysal.examples.get_path("columbus.shp"))
self.w.transform = 'r'
def test_model(self):
reg = BaseGM_Endog_Error_Het(self.y, self.X, self.yd, self.q, self.w.sparse, step1c=True)
betas = np.array([[ 55.39707924], [ 0.46563046], [ -0.67038326], [ 0.41135023]])
np.testing.assert_array_almost_equal(reg.betas,betas,7)
u = np.array([ 26.51812895])
np.testing.assert_array_almost_equal(reg.u[0],u,7)
ef = np.array([ 31.46604707])
np.testing.assert_array_almost_equal(reg.e_filtered[0],ef,7)
predy = np.array([ 53.94887405])
np.testing.assert_array_almost_equal(reg.predy[0],predy,7)
n = 49
self.assertAlmostEqual(reg.n,n)
k = 3
self.assertAlmostEqual(reg.k,k)
y = np.array([ 80.467003])
np.testing.assert_array_almost_equal(reg.y[0],y,7)
x = np.array([ 1. , 19.531])
np.testing.assert_array_almost_equal(reg.x[0].toarray()[0],x,7)
yend = np.array([ 15.72598])
np.testing.assert_array_almost_equal(reg.yend[0],yend,7)
q = np.array([ 5.03])
np.testing.assert_array_almost_equal(reg.q[0],q,7)
z = np.array([ 1. , 19.531 , 15.72598])
np.testing.assert_array_almost_equal(reg.z[0].toarray()[0],z,7)
h = np.array([ 1. , 19.531, 5.03 ])
np.testing.assert_array_almost_equal(reg.h[0].toarray()[0],h,7)
i_s = 'Maximum number of iterations reached.'
np.testing.assert_string_equal(reg.iter_stop,i_s)
its = 1
self.assertAlmostEqual(reg.iteration,its,7)
my = 38.436224469387746
self.assertAlmostEqual(reg.mean_y,my)
stdy = 18.466069465206047
self.assertAlmostEqual(reg.std_y,stdy)
vm = np.array([[ 8.34637805e+02, -2.16932259e+01, -1.33327894e+01,
1.65840848e+00],
[ -2.16932259e+01, 5.97683070e-01, 3.39503523e-01,
-3.90111107e-02],
[ -1.33327894e+01, 3.39503523e-01, 2.19008080e-01,
-2.81929695e-02],
[ 1.65840848e+00, -3.90111107e-02, -2.81929695e-02,
3.15686105e-02]])
np.testing.assert_array_almost_equal(reg.vm,vm,6)
hth = np.array([[ 49. , 704.371999 , 139.75 ],
[ 704.371999 , 11686.67338121, 2246.12800625],
[ 139.75 , 2246.12800625, 498.5851 ]])
np.testing.assert_array_almost_equal(reg.hth,hth,6)
class TestGMEndogErrorHet(unittest.TestCase):
def setUp(self):
db=pysal.open(pysal.examples.get_path("columbus.dbf"),"r")
y = np.array(db.by_col("HOVAL"))
self.y = np.reshape(y, (49,1))
X = []
X.append(db.by_col("INC"))
self.X = np.array(X).T
self.X = sparse.csr_matrix(self.X)
yd = []
yd.append(db.by_col("CRIME"))
self.yd = np.array(yd).T
q = []
q.append(db.by_col("DISCBD"))
self.q = np.array(q).T
self.w = pysal.rook_from_shapefile(pysal.examples.get_path("columbus.shp"))
self.w.transform = 'r'
def test_model(self):
reg = GM_Endog_Error_Het(self.y, self.X, self.yd, self.q, self.w, step1c=True)
betas = np.array([[ 55.39707924], [ 0.46563046], [ -0.67038326], [ 0.41135023]])
np.testing.assert_array_almost_equal(reg.betas,betas,7)
u = np.array([ 26.51812895])
np.testing.assert_array_almost_equal(reg.u[0],u,7)
predy = np.array([ 53.94887405])
np.testing.assert_array_almost_equal(reg.predy[0],predy,7)
n = 49
self.assertAlmostEqual(reg.n,n)
k = 3
self.assertAlmostEqual(reg.k,k)
y = np.array([ 80.467003])
np.testing.assert_array_almost_equal(reg.y[0],y,7)
x = np.array([ 1. , 19.531])
np.testing.assert_array_almost_equal(reg.x[0].toarray()[0],x,7)
yend = np.array([ 15.72598])
np.testing.assert_array_almost_equal(reg.yend[0],yend,7)
q = np.array([ 5.03])
np.testing.assert_array_almost_equal(reg.q[0],q,7)
z = np.array([ 1. , 19.531 , 15.72598])
np.testing.assert_array_almost_equal(reg.z[0].toarray()[0],z,7)
h = np.array([ 1. , 19.531, 5.03 ])
np.testing.assert_array_almost_equal(reg.h[0].toarray()[0],h,7)
i_s = 'Maximum number of iterations reached.'
np.testing.assert_string_equal(reg.iter_stop,i_s)
its = 1
self.assertAlmostEqual(reg.iteration,its,7)
my = 38.436224469387746
self.assertAlmostEqual(reg.mean_y,my)
stdy = 18.466069465206047
self.assertAlmostEqual(reg.std_y,stdy)
vm = np.array([[ 8.34637805e+02, -2.16932259e+01, -1.33327894e+01,
1.65840848e+00],
[ -2.16932259e+01, 5.97683070e-01, 3.39503523e-01,
-3.90111107e-02],
[ -1.33327894e+01, 3.39503523e-01, 2.19008080e-01,
-2.81929695e-02],
[ 1.65840848e+00, -3.90111107e-02, -2.81929695e-02,
3.15686105e-02]])
np.testing.assert_array_almost_equal(reg.vm,vm,6)
pr2 = 0.34648011338954804
self.assertAlmostEqual(reg.pr2,pr2,7)
std_err = np.array([ 28.89009873, 0.77309965, 0.46798299,
0.17767558])
np.testing.assert_array_almost_equal(reg.std_err,std_err,6)
z_stat = np.array([(1.9175109006819244, 0.055173057472126787), (0.60229035155742305, 0.54698088217644414), (-1.4324949211864271, 0.15200223057569454), (2.3151759776869496, 0.020603303355572443)])
np.testing.assert_array_almost_equal(reg.z_stat,z_stat,6)
hth = np.array([[ 49. , 704.371999 , 139.75 ],
[ 704.371999 , 11686.67338121, 2246.12800625],
[ 139.75 , 2246.12800625, 498.5851 ]])
np.testing.assert_array_almost_equal(reg.hth,hth,6)
class TestBaseGMComboHet(unittest.TestCase):
def setUp(self):
db=pysal.open(pysal.examples.get_path("columbus.dbf"),"r")
y = np.array(db.by_col("HOVAL"))
self.y = np.reshape(y, (49,1))
X = []
X.append(db.by_col("INC"))
X.append(db.by_col("CRIME"))
self.X = np.array(X).T
self.w = pysal.rook_from_shapefile(pysal.examples.get_path("columbus.shp"))
self.w.transform = 'r'
def test_model(self):
# Only spatial lag
yd2, q2 = pysal.spreg.utils.set_endog(self.y, self.X, self.w, None, None, 1, True)
self.X = np.hstack((np.ones(self.y.shape),self.X))
self.X = sparse.csr_matrix(self.X)
reg = BaseGM_Combo_Het(self.y, self.X, yend=yd2, q=q2, w=self.w.sparse, step1c=True)
betas = np.array([[ 57.7778574 ], [ 0.73034922], [ -0.59257362], [ -0.2230231 ], [ 0.56636724]])
np.testing.assert_array_almost_equal(reg.betas,betas,7)
u = np.array([ 25.65156033])
np.testing.assert_array_almost_equal(reg.u[0],u,7)
ef = np.array([ 31.87664403])
np.testing.assert_array_almost_equal(reg.e_filtered[0],ef,7)
predy = np.array([ 54.81544267])
np.testing.assert_array_almost_equal(reg.predy[0],predy,7)
n = 49
self.assertAlmostEqual(reg.n,n)
k = 4
self.assertAlmostEqual(reg.k,k)
y = np.array([ 80.467003])
np.testing.assert_array_almost_equal(reg.y[0],y,7)
x = np.array([ 1. , 19.531 , 15.72598])
np.testing.assert_array_almost_equal(reg.x[0].toarray()[0],x,7)
yend = np.array([ 35.4585005])
np.testing.assert_array_almost_equal(reg.yend[0],yend,7)
q = np.array([ 18.594 , 24.7142675])
np.testing.assert_array_almost_equal(reg.q[0],q,7)
z = np.array([ 1. , 19.531 , 15.72598 , 35.4585005])
np.testing.assert_array_almost_equal(reg.z[0].toarray()[0],z,7)
i_s = 'Maximum number of iterations reached.'
np.testing.assert_string_equal(reg.iter_stop,i_s)
its = 1
self.assertAlmostEqual(reg.iteration,its,7)
my = 38.436224469387746
self.assertAlmostEqual(reg.mean_y,my)
stdy = 18.466069465206047
self.assertAlmostEqual(reg.std_y,stdy,7)
vm = np.array([[ 4.86218274e+02, -2.77268729e+00, -1.59987770e+00,
-1.01969471e+01, 2.74302006e+00],
[ -2.77268729e+00, 1.04680972e-01, 2.51172238e-02,
1.95136385e-03, 3.70052723e-03],
[ -1.59987770e+00, 2.51172238e-02, 2.15655720e-02,
7.65868344e-03, -7.30173070e-03],
[ -1.01969471e+01, 1.95136385e-03, 7.65868344e-03,
2.78273684e-01, -6.89402590e-02],
[ 2.74302006e+00, 3.70052723e-03, -7.30173070e-03,
-6.89402590e-02, 7.12034037e-02]])
np.testing.assert_array_almost_equal(reg.vm,vm,6)
hth = np.array([[ 4.90000000e+01, 7.04371999e+02, 1.72131237e+03,
7.24743592e+02, 1.70735413e+03],
[ 7.04371999e+02, 1.16866734e+04, 2.15575320e+04,
1.10925200e+04, 2.23848036e+04],
[ 1.72131237e+03, 2.15575320e+04, 7.39058986e+04,
2.34796298e+04, 6.70145378e+04],
[ 7.24743592e+02, 1.10925200e+04, 2.34796298e+04,
1.16146226e+04, 2.30304624e+04],
[ 1.70735413e+03, 2.23848036e+04, 6.70145378e+04,
2.30304624e+04, 6.69879858e+04]])
np.testing.assert_array_almost_equal(reg.hth,hth,4)
class TestGMComboHet(unittest.TestCase):
def setUp(self):
db=pysal.open(pysal.examples.get_path("columbus.dbf"),"r")
y = np.array(db.by_col("HOVAL"))
self.y = np.reshape(y, (49,1))
X = []
X.append(db.by_col("INC"))
X.append(db.by_col("CRIME"))
self.X = np.array(X).T
self.X = sparse.csr_matrix(self.X)
self.w = pysal.rook_from_shapefile(pysal.examples.get_path("columbus.shp"))
self.w.transform = 'r'
def test_model(self):
# Only spatial lag
reg = GM_Combo_Het(self.y, self.X, w=self.w, step1c=True)
betas = np.array([[ 57.7778574 ], [ 0.73034922], [ -0.59257362], [ -0.2230231 ], [ 0.56636724]])
np.testing.assert_array_almost_equal(reg.betas,betas,7)
u = np.array([ 25.65156033])
np.testing.assert_array_almost_equal(reg.u[0],u,7)
ef = np.array([ 31.87664403])
np.testing.assert_array_almost_equal(reg.e_filtered[0],ef,7)
ep = np.array([ 28.30648145])
np.testing.assert_array_almost_equal(reg.e_pred[0],ep,7)
pe = np.array([ 52.16052155])
np.testing.assert_array_almost_equal(reg.predy_e[0],pe,7)
predy = np.array([ 54.81544267])
np.testing.assert_array_almost_equal(reg.predy[0],predy,7)
n = 49
self.assertAlmostEqual(reg.n,n)
k = 4
self.assertAlmostEqual(reg.k,k)
y = np.array([ 80.467003])
np.testing.assert_array_almost_equal(reg.y[0],y,7)
x = np.array([ 1. , 19.531 , 15.72598])
np.testing.assert_array_almost_equal(reg.x[0].toarray()[0],x,7)
yend = np.array([ 35.4585005])
np.testing.assert_array_almost_equal(reg.yend[0],yend,7)
q = np.array([ 18.594 , 24.7142675])
np.testing.assert_array_almost_equal(reg.q[0].toarray()[0],q,7)
z = np.array([ 1. , 19.531 , 15.72598 , 35.4585005])
np.testing.assert_array_almost_equal(reg.z[0].toarray()[0],z,7)
i_s = 'Maximum number of iterations reached.'
np.testing.assert_string_equal(reg.iter_stop,i_s)
its = 1
self.assertAlmostEqual(reg.iteration,its,7)
my = 38.436224469387746
self.assertAlmostEqual(reg.mean_y,my)
stdy = 18.466069465206047
self.assertAlmostEqual(reg.std_y,stdy)
vm = np.array([[ 4.86218274e+02, -2.77268729e+00, -1.59987770e+00,
-1.01969471e+01, 2.74302006e+00],
[ -2.77268729e+00, 1.04680972e-01, 2.51172238e-02,
1.95136385e-03, 3.70052723e-03],
[ -1.59987770e+00, 2.51172238e-02, 2.15655720e-02,
7.65868344e-03, -7.30173070e-03],
[ -1.01969471e+01, 1.95136385e-03, 7.65868344e-03,
2.78273684e-01, -6.89402590e-02],
[ 2.74302006e+00, 3.70052723e-03, -7.30173070e-03,
-6.89402590e-02, 7.12034037e-02]])
np.testing.assert_array_almost_equal(reg.vm,vm,6)
pr2 = 0.3001582877472412
self.assertAlmostEqual(reg.pr2,pr2,7)
pr2_e = 0.35613102283621967
self.assertAlmostEqual(reg.pr2_e,pr2_e,7)
std_err = np.array([ 22.05035768, 0.32354439, 0.14685221, 0.52751653, 0.26683966])
np.testing.assert_array_almost_equal(reg.std_err,std_err,6)
z_stat = np.array([(2.6202684885795335, 0.00878605635338265), (2.2573385444145524, 0.023986928627746887), (-4.0351698589183433, 5.456281036278686e-05), (-0.42277935292121521, 0.67245625315942159), (2.1225002455741895, 0.033795752094112265)])
np.testing.assert_array_almost_equal(reg.z_stat,z_stat,6)
hth = np.array([[ 4.90000000e+01, 7.04371999e+02, 1.72131237e+03,
7.24743592e+02, 1.70735413e+03],
[ 7.04371999e+02, 1.16866734e+04, 2.15575320e+04,
1.10925200e+04, 2.23848036e+04],
[ 1.72131237e+03, 2.15575320e+04, 7.39058986e+04,
2.34796298e+04, 6.70145378e+04],
[ 7.24743592e+02, 1.10925200e+04, 2.34796298e+04,
1.16146226e+04, 2.30304624e+04],
[ 1.70735413e+03, 2.23848036e+04, 6.70145378e+04,
2.30304624e+04, 6.69879858e+04]])
np.testing.assert_array_almost_equal(reg.hth,hth,4)
if __name__ == '__main__':
unittest.main()
| bsd-3-clause | 3,940,776,996,582,257,000 | 47.502358 | 249 | 0.576903 | false |
idan/oauthlib | tests/oauth2/rfc6749/endpoints/test_extra_credentials.py | 1 | 2611 | """Ensure extra credentials can be supplied for inclusion in tokens.
"""
from unittest import mock
from oauthlib.oauth2 import (
BackendApplicationServer, LegacyApplicationServer, MobileApplicationServer,
RequestValidator, WebApplicationServer,
)
from tests.unittest import TestCase
class ExtraCredentialsTest(TestCase):
def set_client(self, request):
request.client = mock.MagicMock()
request.client.client_id = 'mocked'
return True
def setUp(self):
self.validator = mock.MagicMock(spec=RequestValidator)
self.validator.get_default_redirect_uri.return_value = 'https://i.b/cb'
self.web = WebApplicationServer(self.validator)
self.mobile = MobileApplicationServer(self.validator)
self.legacy = LegacyApplicationServer(self.validator)
self.backend = BackendApplicationServer(self.validator)
def test_post_authorization_request(self):
def save_code(client_id, token, request):
self.assertEqual('creds', request.extra)
def save_token(token, request):
self.assertEqual('creds', request.extra)
# Authorization code grant
self.validator.save_authorization_code.side_effect = save_code
self.web.create_authorization_response(
'https://i.b/auth?client_id=foo&response_type=code',
scopes=['foo'],
credentials={'extra': 'creds'})
# Implicit grant
self.validator.save_bearer_token.side_effect = save_token
self.mobile.create_authorization_response(
'https://i.b/auth?client_id=foo&response_type=token',
scopes=['foo'],
credentials={'extra': 'creds'})
def test_token_request(self):
def save_token(token, request):
self.assertIn('extra', token)
self.validator.save_bearer_token.side_effect = save_token
self.validator.authenticate_client.side_effect = self.set_client
# Authorization code grant
self.web.create_token_response('https://i.b/token',
body='grant_type=authorization_code&code=foo',
credentials={'extra': 'creds'})
# Password credentials grant
self.legacy.create_token_response('https://i.b/token',
body='grant_type=password&username=foo&password=bar',
credentials={'extra': 'creds'})
# Client credentials grant
self.backend.create_token_response('https://i.b/token',
body='grant_type=client_credentials',
credentials={'extra': 'creds'})
| bsd-3-clause | 3,686,196,729,399,241,700 | 36.84058 | 79 | 0.642283 | false |
comiconomenclaturist/Airtime | python_apps/pypo/pypo/testpypoliqqueue.py | 12 | 2336 | from pypoliqqueue import PypoLiqQueue
from telnetliquidsoap import DummyTelnetLiquidsoap, TelnetLiquidsoap
from Queue import Queue
from threading import Lock
import sys
import signal
import logging
from datetime import datetime
from datetime import timedelta
def keyboardInterruptHandler(signum, frame):
logger = logging.getLogger()
logger.info('\nKeyboard Interrupt\n')
sys.exit(0)
signal.signal(signal.SIGINT, keyboardInterruptHandler)
# configure logging
format = '%(levelname)s - %(pathname)s - %(lineno)s - %(asctime)s - %(message)s'
logging.basicConfig(level=logging.DEBUG, format=format)
logging.captureWarnings(True)
telnet_lock = Lock()
pypoPush_q = Queue()
pypoLiq_q = Queue()
liq_queue_tracker = {
"s0": None,
"s1": None,
"s2": None,
"s3": None,
}
#dummy_telnet_liquidsoap = DummyTelnetLiquidsoap(telnet_lock, logging)
dummy_telnet_liquidsoap = TelnetLiquidsoap(telnet_lock, logging, \
"localhost", \
1234)
plq = PypoLiqQueue(pypoLiq_q, telnet_lock, logging, liq_queue_tracker, \
dummy_telnet_liquidsoap)
plq.daemon = True
plq.start()
print "Time now: %s" % datetime.utcnow()
media_schedule = {}
start_dt = datetime.utcnow() + timedelta(seconds=1)
end_dt = datetime.utcnow() + timedelta(seconds=6)
media_schedule[start_dt] = {"id": 5, \
"type":"file", \
"row_id":9, \
"uri":"", \
"dst":"/home/martin/Music/ipod/Hot Chocolate - You Sexy Thing.mp3", \
"fade_in":0, \
"fade_out":0, \
"cue_in":0, \
"cue_out":300, \
"start": start_dt, \
"end": end_dt, \
"show_name":"Untitled", \
"replay_gain": 0, \
"independent_event": True \
}
start_dt = datetime.utcnow() + timedelta(seconds=2)
end_dt = datetime.utcnow() + timedelta(seconds=6)
media_schedule[start_dt] = {"id": 5, \
"type":"file", \
"row_id":9, \
"uri":"", \
"dst":"/home/martin/Music/ipod/Good Charlotte - bloody valentine.mp3", \
"fade_in":0, \
"fade_out":0, \
"cue_in":0, \
"cue_out":300, \
"start": start_dt, \
"end": end_dt, \
"show_name":"Untitled", \
"replay_gain": 0, \
"independent_event": True \
}
pypoLiq_q.put(media_schedule)
plq.join()
| gpl-3.0 | 8,531,366,200,700,141,000 | 22.836735 | 80 | 0.602312 | false |
jml/flocker | flocker/ca/test/test_ca.py | 3 | 25789 | # Copyright ClusterHQ Inc. See LICENSE file for details.
"""
Tests for certification logic in ``flocker.ca._ca``
"""
import datetime
import os
from uuid import uuid4, UUID
from Crypto.Util import asn1
from OpenSSL import crypto
from twisted.trial.unittest import SynchronousTestCase
from twisted.python.filepath import FilePath
from .. import (RootCredential, ControlCredential, NodeCredential,
UserCredential, PathError, EXPIRY_20_YEARS,
AUTHORITY_CERTIFICATE_FILENAME, AUTHORITY_KEY_FILENAME)
from ..testtools import assert_has_extension
from ...testtools import not_root, skip_on_broken_permissions
NODE_UUID = str(uuid4())
def make_credential_tests(cls, expected_file_name, **kwargs):
class CredentialTests(SynchronousTestCase):
"""
Base test case for credential tests.
"""
def setUp(self):
self.start_date = datetime.datetime.utcnow()
self.expiry_date = self.start_date + datetime.timedelta(
seconds=EXPIRY_20_YEARS)
self.cert_file_name = expected_file_name + b".crt"
self.key_file_name = expected_file_name + b".key"
self.path = FilePath(self.mktemp())
self.path.makedirs()
self.ca = RootCredential.initialize(
self.path, b"mycluster", begin=self.start_date
)
self.credential = cls.initialize(
self.path, self.ca, begin=self.start_date, **kwargs
)
for k, v in kwargs.iteritems():
setattr(self, k, v)
def test_certificate_matches_public_key(self):
"""
A certificate's public key matches the public key it is
meant to be paired with.
"""
self.assertTrue(
self.credential.credential.keypair.keypair.matches(
self.credential.credential.certificate.getPublicKey())
)
def test_certificate_matches_private_key(self):
"""
A certificate matches the private key it is meant to
be paired with.
"""
priv = self.credential.credential.keypair.keypair.original
pub = self.credential.credential.certificate
pub = pub.getPublicKey().original
pub_asn1 = crypto.dump_privatekey(crypto.FILETYPE_ASN1, pub)
priv_asn1 = crypto.dump_privatekey(crypto.FILETYPE_ASN1, priv)
pub_der = asn1.DerSequence()
pub_der.decode(pub_asn1)
priv_der = asn1.DerSequence()
priv_der.decode(priv_asn1)
pub_modulus = pub_der[1]
priv_modulus = priv_der[1]
self.assertEqual(pub_modulus, priv_modulus)
def test_written_keypair_reloads(self):
"""
A keypair written by ``UserCredential.initialize`` can be
successfully reloaded in to an identical ``ControlCertificate``
instance.
"""
self.assertEqual(
self.credential,
cls.from_path(self.path, **kwargs)
)
def test_create_error_on_non_existent_path(self):
"""
A ``PathError`` is raised if the path given to
``UserCredential.initialize`` does not exist.
"""
path = FilePath(self.mktemp())
e = self.assertRaises(
PathError, cls.initialize,
path, self.ca, **kwargs
)
expected = (b"Unable to write certificate file. "
b"No such file or directory {path}").format(
path=path.child(self.cert_file_name).path)
self.assertEqual(str(e), expected)
def test_load_error_on_non_existent_path(self):
"""
A ``PathError`` is raised if the path given to
``UserCredential.from_path`` does not exist.
"""
path = FilePath(self.mktemp())
e = self.assertRaises(
PathError, cls.from_path,
path, **kwargs
)
expected = (b"Certificate file could not be opened. "
b"No such file or directory {path}").format(
path=path.child(self.cert_file_name).path)
self.assertEqual(str(e), expected)
def test_load_error_on_non_existent_certificate_file(self):
"""
A ``PathError`` is raised if the certificate file path given to
``UserCredential.from_path`` does not exist.
"""
path = FilePath(self.mktemp())
path.makedirs()
e = self.assertRaises(
PathError, cls.from_path,
path, **kwargs
)
expected = ("Certificate file could not be opened. "
"No such file or directory "
"{path}").format(
path=path.child(self.cert_file_name).path)
self.assertEqual(str(e), expected)
def test_load_error_on_non_existent_key_file(self):
"""
A ``PathError`` is raised if the key file path given to
``UserCredential.from_path`` does not exist.
"""
path = FilePath(self.mktemp())
path.makedirs()
crt_path = path.child(self.cert_file_name)
crt_file = crt_path.open(b'w')
crt_file.write(b"dummy")
crt_file.close()
e = self.assertRaises(
PathError, cls.from_path,
path, **kwargs
)
expected = ("Private key file could not be opened. "
"No such file or directory "
"{path}").format(
path=path.child(self.key_file_name).path)
self.assertEqual(str(e), expected)
@not_root
@skip_on_broken_permissions
def test_load_error_on_unreadable_certificate_file(self):
"""
A ``PathError`` is raised if the certificate file path given to
``UserCredential.from_path`` cannot be opened for reading.
"""
path = FilePath(self.mktemp())
path.makedirs()
crt_path = path.child(self.cert_file_name)
crt_file = crt_path.open(b'w')
crt_file.write(b"dummy")
crt_file.close()
# make file unreadable
crt_path.chmod(0o100)
key_path = path.child(self.key_file_name)
key_file = key_path.open(b'w')
key_file.write(b"dummy")
key_file.close()
# make file unreadable
key_path.chmod(0o100)
e = self.assertRaises(
PathError, cls.from_path,
path, **kwargs
)
expected = (
"Certificate file could not be opened. "
"Permission denied {path}"
).format(path=crt_path.path)
self.assertEqual(str(e), expected)
@not_root
@skip_on_broken_permissions
def test_load_error_on_unreadable_key_file(self):
"""
A ``PathError`` is raised if the key file path given to
``UserCredential.from_path`` cannot be opened for reading.
"""
path = FilePath(self.mktemp())
path.makedirs()
crt_path = path.child(self.cert_file_name)
crt_file = crt_path.open(b'w')
crt_file.write(b"dummy")
crt_file.close()
key_path = path.child(self.key_file_name)
key_file = key_path.open(b'w')
key_file.write(b"dummy")
key_file.close()
# make file unreadable
key_path.chmod(0o100)
e = self.assertRaises(
PathError, cls.from_path,
path, **kwargs
)
expected = (
"Private key file could not be opened. "
"Permission denied {path}"
).format(path=key_path.path)
self.assertEqual(str(e), expected)
def test_certificate_ou_matches_ca(self):
"""
A certificate written by ``UserCredential.initialize`` has the
issuing authority's organizational unit as its organizational
unit name.
"""
cert = self.credential.credential.certificate.original
issuer = cert.get_issuer()
subject = cert.get_subject()
self.assertEqual(
issuer.OU,
subject.OU
)
def test_certificate_is_signed_by_ca(self):
"""
A certificate written by ``UserCredential.initialize`` is signed by
the certificate authority.
"""
cert = self.credential.credential.certificate.original
issuer = cert.get_issuer()
self.assertEqual(
issuer.CN,
self.ca.credential.certificate.getSubject().CN
)
def test_certificate_expiration(self):
"""
A certificate written by ``UserCredential.initialize`` has an
expiry date 20 years from the date of signing.
"""
cert = self.credential.credential.certificate.original
date_str = cert.get_notAfter()
expected_expiry = self.expiry_date.strftime("%Y%m%d%H%M%SZ")
self.assertEqual(date_str, expected_expiry)
def test_certificate_is_rsa_4096_sha_256(self):
"""
A certificate written by ``UserCredential.initialize`` is an RSA
4096 bit, SHA-256 format.
"""
cert = self.credential.credential.certificate.original
key = self.credential.credential.certificate
key = key.getPublicKey().original
self.assertEqual(
(crypto.TYPE_RSA, 4096, b'sha256WithRSAEncryption'),
(key.type(), key.bits(), cert.get_signature_algorithm())
)
def test_keypair_correct_umask(self):
"""
A keypair file written by ``NodeCredential.initialize`` has
the correct permissions (0600).
"""
key_path = self.path.child(self.key_file_name)
st = os.stat(key_path.path)
self.assertEqual(b'0600', oct(st.st_mode & 0777))
def test_certificate_correct_permission(self):
"""
A certificate file written by ``NodeCredential.initialize`` has
the correct access mode set (0600).
"""
cert_path = self.path.child(self.cert_file_name)
st = os.stat(cert_path.path)
self.assertEqual(b'0600', oct(st.st_mode & 0777))
def test_written_keypair_exists(self):
"""
``NodeCredential.initialize`` writes a PEM file to the
specified path.
"""
self.assertEqual(
(True, True),
(self.path.child(self.cert_file_name).exists(),
self.path.child(self.key_file_name).exists())
)
return CredentialTests
class UserCredentialTests(
make_credential_tests(UserCredential, b"alice", username=u"alice")):
"""
Tests for ``flocker.ca._ca.UserCredential``.
"""
def test_certificate_subject_username(self):
"""
A certificate written by ``UserCredential.initialize`` has the
subject common name "user-{user}" where {user} is the username
supplied during the certificate's creation.
"""
cert = self.credential.credential.certificate.original
subject = cert.get_subject()
self.assertEqual(subject.CN, u"user-{user}".format(
user=self.credential.username))
def test_extendedKeyUsage(self):
"""
The generated certificate has extendedKeyUsage set to "clientAuth".
"""
assert_has_extension(self, self.credential.credential,
b"extendedKeyUsage", b"clientAuth")
class NodeCredentialTests(
make_credential_tests(NodeCredential, NODE_UUID, uuid=NODE_UUID)):
"""
Tests for ``flocker.ca._ca.NodeCredential``.
"""
def test_certificate_common_name_node_uuid(self):
"""
A certificate written by ``NodeCredential.initialize`` has the
subject common name "node-{uuid}" where {uuid} is the UUID
generated during the certificate's creation.
"""
cert = self.credential.credential.certificate.original
subject = cert.get_subject()
self.assertEqual(subject.CN, b"node-{uuid}".format(
uuid=self.credential.uuid))
def test_certificate_ou_cluster_uuid(self):
"""
A certificate written by ``NodeCredential.initialize`` has the
organizational unit name exposed as the ``cluster_uuid``
attribute.
"""
cert = self.credential.credential.certificate.original
subject = cert.get_subject()
self.assertEqual(UUID(hex=subject.OU), self.credential.cluster_uuid)
class ControlCredentialTests(
make_credential_tests(ControlCredential,
b"control-control.example.com",
hostname=b"control.example.com")):
"""
Tests for ``flocker.ca._ca.ControlCredential``.
"""
def test_certificate_subject_control_service(self):
"""
A certificate written by ``ControlCredential.initialize`` has the
subject common name "control-service"
"""
cert = self.credential.credential.certificate.original
subject = cert.get_subject()
self.assertEqual(
subject.CN, b"control-service")
def test_subjectAltName_dns(self):
"""
If given a domain name as hostname, the generated certificate has a
subjectAltName containing the given hostname as a DNS record.
"""
assert_has_extension(self, self.credential.credential,
b"subjectAltName",
b"DNS:control-service,DNS:control.example.com")
def test_subjectAltName_ipv4(self):
"""
If given a IPv4 address as the hostname, the generated certificate has
a subjectAltName containing with a IP record.
"""
credential = ControlCredential.initialize(
self.path, self.ca, begin=self.start_date, hostname=b"127.0.0.1")
assert_has_extension(self, credential.credential,
b"subjectAltName",
b"DNS:control-service,IP:127.0.0.1")
class RootCredentialTests(SynchronousTestCase):
"""
Tests for ``flocker.ca._ca.RootCredential``.
"""
def test_written_keypair_exists(self):
"""
``RootCredential.initialize`` writes a PEM file to the
specified path.
"""
path = FilePath(self.mktemp())
path.makedirs()
RootCredential.initialize(path, b"mycluster")
self.assertEqual(
(True, True),
(path.child(AUTHORITY_CERTIFICATE_FILENAME).exists(),
path.child(AUTHORITY_KEY_FILENAME).exists())
)
def test_certificate_matches_public_key(self):
"""
A certificate's public key matches the public key it is
meant to be paired with.
"""
path = FilePath(self.mktemp())
path.makedirs()
ca = RootCredential.initialize(path, b"mycluster")
self.assertTrue(
ca.credential.keypair.keypair.matches(
ca.credential.certificate.getPublicKey())
)
def test_certificate_matches_private_key(self):
"""
A certificate matches the private key it is meant to
be paired with.
"""
path = FilePath(self.mktemp())
path.makedirs()
ca = RootCredential.initialize(path, b"mycluster")
priv = ca.credential.keypair.keypair.original
pub = ca.credential.certificate.getPublicKey().original
pub_asn1 = crypto.dump_privatekey(crypto.FILETYPE_ASN1, pub)
priv_asn1 = crypto.dump_privatekey(crypto.FILETYPE_ASN1, priv)
pub_der = asn1.DerSequence()
pub_der.decode(pub_asn1)
priv_der = asn1.DerSequence()
priv_der.decode(priv_asn1)
pub_modulus = pub_der[1]
priv_modulus = priv_der[1]
self.assertEqual(pub_modulus, priv_modulus)
def test_written_keypair_reloads(self):
"""
A keypair written by ``RootCredential.initialize`` can be
successfully reloaded in to an identical ``RootCredential``
instance.
"""
path = FilePath(self.mktemp())
path.makedirs()
ca1 = RootCredential.initialize(path, b"mycluster")
ca2 = RootCredential.from_path(path)
self.assertEqual(ca1, ca2)
def test_keypair_correct_umask(self):
"""
A keypair file written by ``RootCredential.initialize`` has
the correct permissions (0600).
"""
path = FilePath(self.mktemp())
path.makedirs()
RootCredential.initialize(path, b"mycluster")
keyPath = path.child(AUTHORITY_KEY_FILENAME)
st = os.stat(keyPath.path)
self.assertEqual(b'0600', oct(st.st_mode & 0777))
def test_certificate_correct_permission(self):
"""
A certificate file written by ``RootCredential.initialize`` has
the correct access mode set (0600).
"""
path = FilePath(self.mktemp())
path.makedirs()
RootCredential.initialize(path, b"mycluster")
keyPath = path.child(AUTHORITY_CERTIFICATE_FILENAME)
st = os.stat(keyPath.path)
self.assertEqual(b'0600', oct(st.st_mode & 0777))
def test_create_error_on_non_existent_path(self):
"""
A ``PathError`` is raised if the path given to
``RootCredential.initialize`` does not exist.
"""
path = FilePath(self.mktemp())
e = self.assertRaises(
PathError, RootCredential.initialize, path, b"mycluster"
)
expected = ("Unable to write certificate file. "
"No such file or directory "
"{path}").format(path=path.child(
AUTHORITY_CERTIFICATE_FILENAME).path)
self.assertEqual(str(e), expected)
def test_load_error_on_non_existent_path(self):
"""
A ``PathError`` is raised if the path given to
``RootCredential.from_path`` does not exist.
"""
path = FilePath(self.mktemp())
e = self.assertRaises(
PathError, RootCredential.from_path, path
)
expected = (
"Unable to load certificate authority file. Please run "
"`flocker-ca initialize` to generate a new certificate "
"authority. No such file or directory {path}"
).format(path=path.child(AUTHORITY_CERTIFICATE_FILENAME).path)
self.assertEqual(str(e), expected)
def test_load_error_on_non_existent_certificate_file(self):
"""
A ``PathError`` is raised if the certificate file path given to
``RootCredential.from_path`` does not exist.
"""
path = FilePath(self.mktemp())
path.makedirs()
e = self.assertRaises(
PathError, RootCredential.from_path, path
)
expected = (
"Unable to load certificate authority file. Please run "
"`flocker-ca initialize` to generate a new certificate "
"authority. No such file or directory {path}"
).format(path=path.child(AUTHORITY_CERTIFICATE_FILENAME).path)
self.assertEqual(str(e), expected)
def test_load_error_on_non_existent_key_file(self):
"""
A ``PathError`` is raised if the key file path given to
``RootCredential.from_path`` does not exist.
"""
path = FilePath(self.mktemp())
path.makedirs()
crt_path = path.child(AUTHORITY_CERTIFICATE_FILENAME)
crt_file = crt_path.open(b'w')
crt_file.write(b"dummy")
crt_file.close()
e = self.assertRaises(
PathError, RootCredential.from_path, path
)
expected = (
"Unable to load certificate authority file. Please run "
"`flocker-ca initialize` to generate a new certificate "
"authority. No such file or directory {path}"
).format(path=path.child(AUTHORITY_KEY_FILENAME).path)
self.assertEqual(str(e), expected)
@not_root
@skip_on_broken_permissions
def test_load_error_on_unreadable_certificate_file(self):
"""
A ``PathError`` is raised if the certificate file path given to
``RootCredential.from_path`` cannot be opened for reading.
"""
path = FilePath(self.mktemp())
path.makedirs()
crt_path = path.child(AUTHORITY_CERTIFICATE_FILENAME)
crt_file = crt_path.open(b'w')
crt_file.write(b"dummy")
crt_file.close()
# make file unreadable
crt_path.chmod(0o100)
key_path = path.child(AUTHORITY_KEY_FILENAME)
key_file = key_path.open(b'w')
key_file.write(b"dummy")
key_file.close()
# make file unreadable
key_path.chmod(0o100)
e = self.assertRaises(
PathError, RootCredential.from_path, path
)
expected = (
"Unable to load certificate authority file. "
"Permission denied {path}"
).format(path=crt_path.path)
self.assertEqual(str(e), expected)
@not_root
@skip_on_broken_permissions
def test_load_error_on_unreadable_key_file(self):
"""
A ``PathError`` is raised if the key file path given to
``RootCredential.from_path`` cannot be opened for reading.
"""
path = FilePath(self.mktemp())
path.makedirs()
crt_path = path.child(AUTHORITY_CERTIFICATE_FILENAME)
crt_file = crt_path.open(b'w')
crt_file.write(b"dummy")
crt_file.close()
key_path = path.child(AUTHORITY_KEY_FILENAME)
key_file = key_path.open(b'w')
key_file.write(b"dummy")
key_file.close()
# make file unreadable
key_path.chmod(0o100)
e = self.assertRaises(
PathError, RootCredential.from_path, path
)
expected = (
"Unable to load certificate authority file. "
"Permission denied {path}"
).format(path=key_path.path)
self.assertEqual(str(e), expected)
def test_certificate_is_self_signed(self):
"""
A certificate written by ``RootCredential.initialize`` is a
self-signed certificate.
"""
path = FilePath(self.mktemp())
path.makedirs()
ca = RootCredential.initialize(path, b"mycluster")
cert = ca.credential.certificate.original
issuer = cert.get_issuer().get_components()
subject = cert.get_subject().get_components()
self.assertEqual(issuer, subject)
def test_certificate_expiration(self):
"""
A certificate written by ``RootCredential.initialize`` has an expiry
date 20 years from the date of signing.
XXX: This test is prone to intermittent failure depending on the time
of day it is run. Fixed in
https://github.com/ClusterHQ/flocker/pull/1339
"""
path = FilePath(self.mktemp())
path.makedirs()
start_date = datetime.datetime.utcnow()
expected_expiry = start_date + datetime.timedelta(
seconds=EXPIRY_20_YEARS)
expected_expiry = expected_expiry.strftime("%Y%m%d%H%M%SZ")
ca = RootCredential.initialize(path, b"mycluster", begin=start_date)
cert = ca.credential.certificate.original
date_str = cert.get_notAfter()
self.assertEqual(date_str, expected_expiry)
def test_certificate_is_rsa_4096_sha_256(self):
"""
A certificate written by ``RootCredential.initialize`` is an RSA
4096 bit, SHA-256 format.
"""
path = FilePath(self.mktemp())
path.makedirs()
ca = RootCredential.initialize(path, b"mycluster")
cert = ca.credential.certificate.original
key = ca.credential.certificate.getPublicKey().original
self.assertEqual(
(crypto.TYPE_RSA, 4096, b'sha256WithRSAEncryption'),
(key.type(), key.bits(), cert.get_signature_algorithm())
)
def test_cluster_uuid(self):
"""
Each certificate created by ``RootCredential.initialize`` has a unique
cluster UUID, stored in the distinguished name organizational unit
name.
"""
path = FilePath(self.mktemp())
path.makedirs()
ca = RootCredential.initialize(path, b"mycluster")
cert = ca.credential.certificate
path2 = FilePath(self.mktemp())
path2.makedirs()
ca2 = RootCredential.initialize(path2, b"mycluster2")
cert2 = ca2.credential.certificate
self.assertNotEqual(UUID(hex=cert.getSubject().OU),
UUID(hex=cert2.getSubject().OU))
def test_organizational_unit(self):
"""
``RootCredential.organizational_unit`` is its organizational unit.
"""
path = FilePath(self.mktemp())
path.makedirs()
RootCredential.initialize(path, b"mycluster")
ca = RootCredential.from_path(path)
self.assertEqual(ca.organizational_unit,
ca.credential.certificate.getSubject().OU)
| apache-2.0 | 4,754,824,312,859,952,000 | 37.149408 | 79 | 0.575788 | false |
chrisfilo/Neurosynth | neurosynth/analysis/meta.py | 1 | 10482 | """ Meta-analysis tools """
import logging
import numpy as np
from scipy.stats import norm
from neurosynth.base import imageutils
from neurosynth.analysis import stats
from os.path import join, exists
from os import makedirs
logger = logging.getLogger('neurosynth.meta')
def analyze_features(dataset, features=None, image_type='pFgA_z',
threshold=0.001, q=0.01, output_dir=None,
prefix=None):
""" Generate meta-analysis images for a set of features.
Args:
dataset: A Dataset instance containing feature and activation data.
features: A list of named features to generate meta-analysis maps for.
If None, analyzes all features in the current dataset.
image_type: The type of image to return. Specify one of the extensions
generated by the MetaAnalysis procedure--e.g., pFgA_z, pAgF, etc.
By default, will use pFgA_z (i.e., z-scores reflecting the
probability that a Mappable has a feature given that activation is
present).
threshold: The threshold for determining whether or not a Mappable has
a feature. By default, this is 0.001, which is only sensible in the
case of term-based features (so be sure to specify it for other
kinds).
q: The FDR rate to use for multiple comparisons correction (default =
0.05).
output_dir: Directory to save all meta-analysis images to. If none,
returns all the data as a matrix.
prefix: All output images will be prepended with this string (if None,
defaults to the name of the feature).
Returns:
If output_dir is None, an n_voxels x n_features 2D numpy array.
"""
if features is None:
features = dataset.get_feature_names()
if output_dir is None:
result = np.zeros((dataset.masker.n_vox_in_mask, len(features)))
for i, f in enumerate(features):
ids = dataset.get_studies(features=f, frequency_threshold=threshold)
ma = MetaAnalysis(dataset, ids, q=q)
if output_dir is None:
result[:, i] = ma.images[image_type]
else:
pfx = f if prefix is None else prefix + '_' + f
ma.save_results(output_dir=output_dir, prefix=pfx)
if output_dir is None:
return result
class MetaAnalysis(object):
""" Meta-analysis of a Dataset. Currently contrasts two subsets of
studies within a Dataset and saves a bunch of statistical images.
Only one list of study IDs (ids) needs to be passed; the Universe will
be bisected into studies that are and are not included in the
list, and the contrast is then performed across these two groups.
If a second optional second study list is provided (ids2), the Dataset
is first constrained to the union of ids1 and ids2, and the standard
contrast is then performed."""
def __init__(self, dataset, ids, ids2=None, q=0.01, prior=0.5,
min_studies=1):
""" Initialize a new MetaAnalysis instance and run an analysis.
Args:
dataset: A Dataset instance.
ids: A list of Mappable IDs to include in the meta-analysis.
ids2: Optional second list of Mappable IDs. If passed, the set of
studies will be restricted to the union of ids and ids2 before
performing the meta-analysis. This is useful for meta-analytic
contrasts, as the resulting images will in effect identify
regions that are reported/activated more frequently in one
list than in the other.
q: The FDR threshold to use when correcting for multiple
comparisons. Set to .01 by default.
prior: The prior to use when calculating conditional probabilities.
This is the prior probability of a feature being used in a
study (i.e., p(F)). For example, if set to 0.25, the analysis
will assume that 1/4 of studies load on the target feature, as
opposed to the empirically estimated p(F), which is len(ids) /
total number of studies in the dataset. If prior is not passed,
defaults to 0.5, reflecting an effort to put all terms on level
footing and avoid undue influence of base rates (because some
terms are much more common than others). Note that modifying
the prior will only affect the effect size/probability maps,
and not the statistical inference (z-score) maps.
min_studies: Integer or float indicating which voxels to mask out
from results due to lack of stability. If an integer is passed,
all voxels that activate in fewer than this number of studies
will be ignored (i.e., a value of 0 will be assigned in all
output images). If a float in the range of 0 - 1 is passed,
this will be interpreted as a proportion to use as the cut-off
(e.g., passing 0.03 will exclude all voxels active in fewer
than 3% of the entire dataset). Defaults to 1, meaning all
voxels that activate at least one study will be kept.
"""
self.dataset = dataset
mt = dataset.image_table
self.selected_ids = list(set(mt.ids) & set(ids))
self.selected_id_indices = np.in1d(mt.ids, ids)
# If ids2 is provided, we only use mappables explicitly in either ids or ids2.
# Otherwise, all mappables not in the ids list are used as the control
# condition.
unselected_id_indices = ~self.selected_id_indices if ids2 == None \
else np.in1d(mt.ids, ids2)
# Calculate different count variables
logger.debug("Calculating counts...")
n_selected = len(self.selected_ids)
n_unselected = np.sum(unselected_id_indices)
n_mappables = n_selected + n_unselected
n_selected_active_voxels = mt.data.dot(self.selected_id_indices)
n_unselected_active_voxels = mt.data.dot(unselected_id_indices)
# Nomenclature for variables below: p = probability, F = feature present, g = given,
# U = unselected, A = activation. So, e.g., pAgF = p(A|F) = probability of activation
# in a voxel if we know that the feature is present in a study.
pF = (n_selected * 1.0) / n_mappables
pA = np.array((mt.data.sum(axis=1) * 1.0) / n_mappables).squeeze()
# Conditional probabilities
logger.debug("Calculating conditional probabilities...")
pAgF = n_selected_active_voxels * 1.0 / n_selected
pAgU = n_unselected_active_voxels * 1.0 / n_unselected
pFgA = pAgF * pF / pA
# Recompute conditionals with uniform prior
logger.debug("Recomputing with uniform priors...")
pAgF_prior = prior * pAgF + (1 - prior) * pAgU
pFgA_prior = pAgF * prior / pAgF_prior
def p_to_z(p, sign):
p = p/2 # convert to two-tailed
# prevent underflow
p[p < 1e-240] = 1e-240
# Convert to z and assign tail
z = np.abs(norm.ppf(p)) * sign
# Set very large z's to max precision
z[np.isinf(z)] = norm.ppf(1e-240)*-1
return z
# One-way chi-square test for consistency of activation
p_vals = stats.one_way(
np.squeeze(n_selected_active_voxels), n_selected)
p_vals[p_vals < 1e-240] = 1e-240
z_sign = np.sign(
n_selected_active_voxels - np.mean(
n_selected_active_voxels)).ravel()
pAgF_z = p_to_z(p_vals, z_sign)
fdr_thresh = stats.fdr(p_vals, q)
pAgF_z_FDR = imageutils.threshold_img(
pAgF_z, fdr_thresh, p_vals, mask_out='above')
# Two-way chi-square for specificity of activation
cells = np.squeeze(
np.array([[n_selected_active_voxels, n_unselected_active_voxels],
[n_selected - n_selected_active_voxels, n_unselected -
n_unselected_active_voxels]]).T)
p_vals = stats.two_way(cells)
p_vals[p_vals < 1e-240] = 1e-240
z_sign = np.sign(pAgF - pAgU).ravel()
pFgA_z = p_to_z(p_vals, z_sign)
fdr_thresh = stats.fdr(p_vals, q)
pFgA_z_FDR = imageutils.threshold_img(
pFgA_z, fdr_thresh, p_vals, mask_out='above')
# Retain any images we may want to save or access later
self.images = {
'pA': pA,
'pAgF': pAgF,
'pFgA': pFgA,
('pAgF_given_pF=%0.2f' % prior): pAgF_prior,
('pFgA_given_pF=%0.2f' % prior): pFgA_prior,
'pAgF_z': pAgF_z,
'pFgA_z': pFgA_z,
('pAgF_z_FDR_%s' % q): pAgF_z_FDR,
('pFgA_z_FDR_%s' % q): pFgA_z_FDR
}
# Mask out all voxels below num_studies threshold
if min_studies > 0:
if isinstance(min_studies, int):
min_studies = float(
min_studies) / n_mappables # Recalculate as proportion
vox_to_exclude = np.where(pA < min_studies)[0] # Create mask
# Mask each image
for k in self.images:
self.images[k][vox_to_exclude] = 0
def save_results(self, output_dir='.', prefix='', prefix_sep='_',
image_list=None):
""" Write out any images generated by the meta-analysis.
Args:
output_dir (str): folder to write images to
prefix (str): all image files will be prepended with this string
prefix_sep (str): glue between the prefix and rest of filename
image_list (list): optional list of images to save--e.g.,
['pFgA_z', 'pAgF']. If image_list is None (default), will save
all images.
"""
if prefix == '':
prefix_sep = ''
if not exists(output_dir):
makedirs(output_dir)
logger.debug("Saving results...")
if image_list is None:
image_list = self.images.keys()
for suffix, img in self.images.items():
if suffix in image_list:
filename = prefix + prefix_sep + suffix + '.nii.gz'
outpath = join(output_dir, filename)
imageutils.save_img(img, outpath, self.dataset.masker)
| mit | -9,200,291,890,323,377,000 | 45.586667 | 93 | 0.59979 | false |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.