repo_name
stringlengths 5
100
| path
stringlengths 4
375
| copies
stringclasses 991
values | size
stringlengths 4
7
| content
stringlengths 666
1M
| license
stringclasses 15
values |
---|---|---|---|---|---|
KFire-Android/kernel_omap_otter-common | tools/perf/python/twatch.py | 3213 | 1338 | #! /usr/bin/python
# -*- python -*-
# -*- coding: utf-8 -*-
# twatch - Experimental use of the perf python interface
# Copyright (C) 2011 Arnaldo Carvalho de Melo <[email protected]>
#
# This application is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; version 2.
#
# This application is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
import perf
def main():
cpus = perf.cpu_map()
threads = perf.thread_map()
evsel = perf.evsel(task = 1, comm = 1, mmap = 0,
wakeup_events = 1, sample_period = 1,
sample_id_all = 1,
sample_type = perf.SAMPLE_PERIOD | perf.SAMPLE_TID | perf.SAMPLE_CPU | perf.SAMPLE_TID)
evsel.open(cpus = cpus, threads = threads);
evlist = perf.evlist(cpus, threads)
evlist.add(evsel)
evlist.mmap()
while True:
evlist.poll(timeout = -1)
for cpu in cpus:
event = evlist.read_on_cpu(cpu)
if not event:
continue
print "cpu: %2d, pid: %4d, tid: %4d" % (event.sample_cpu,
event.sample_pid,
event.sample_tid),
print event
if __name__ == '__main__':
main()
| gpl-2.0 |
robwarm/gpaw-symm | gpaw/hooks.py | 1 | 1120 | import os
import random
class NotConverged:
def __init__(self, dir='.'):
self.dir = dir
def __call__(self, calc):
if calc.wfs.world.rank > 0:
return
from ase.io import write
name = os.path.join(self.dir, ''.join(random.sample('gpaw' * 3, 12)))
write(name + '.traj', calc.atoms.copy())
fd = open(name + '.gkw', 'w')
fd.write('%r\n' % dict(calc.input_parameters))
fd.close()
fd = open(name + '.txt', 'w')
txt = calc.txt
calc.txt = fd
calc.print_logo()
calc.print_cell_and_parameters()
calc.print_positions()
fd.close()
calc.txt = txt
os.chmod(name + '.traj', 0666)
os.chmod(name + '.gkw', 0666)
os.chmod(name + '.txt', 0666)
hooks = {} # dictionary for callback functions
# Fill in allowed hooks:
locs = locals()
for name in ['converged', 'not_converged']:
if name in locs:
hooks[name] = locs[name]
# Backwards compatiblity:
if 'crashed' in locs and 'not_converged' not in hooks:
hooks['not_converged'] = locs['crashed']
| gpl-3.0 |
mmazanec22/too-windy | env/lib/python3.5/site-packages/requests/packages/chardet/latin1prober.py | 1778 | 5232 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Universal charset detector code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 2001
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
# Shy Shalom - original C code
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from .charsetprober import CharSetProber
from .constants import eNotMe
from .compat import wrap_ord
FREQ_CAT_NUM = 4
UDF = 0 # undefined
OTH = 1 # other
ASC = 2 # ascii capital letter
ASS = 3 # ascii small letter
ACV = 4 # accent capital vowel
ACO = 5 # accent capital other
ASV = 6 # accent small vowel
ASO = 7 # accent small other
CLASS_NUM = 8 # total classes
Latin1_CharToClass = (
OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 00 - 07
OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 08 - 0F
OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 10 - 17
OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 18 - 1F
OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 20 - 27
OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 28 - 2F
OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 30 - 37
OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 38 - 3F
OTH, ASC, ASC, ASC, ASC, ASC, ASC, ASC, # 40 - 47
ASC, ASC, ASC, ASC, ASC, ASC, ASC, ASC, # 48 - 4F
ASC, ASC, ASC, ASC, ASC, ASC, ASC, ASC, # 50 - 57
ASC, ASC, ASC, OTH, OTH, OTH, OTH, OTH, # 58 - 5F
OTH, ASS, ASS, ASS, ASS, ASS, ASS, ASS, # 60 - 67
ASS, ASS, ASS, ASS, ASS, ASS, ASS, ASS, # 68 - 6F
ASS, ASS, ASS, ASS, ASS, ASS, ASS, ASS, # 70 - 77
ASS, ASS, ASS, OTH, OTH, OTH, OTH, OTH, # 78 - 7F
OTH, UDF, OTH, ASO, OTH, OTH, OTH, OTH, # 80 - 87
OTH, OTH, ACO, OTH, ACO, UDF, ACO, UDF, # 88 - 8F
UDF, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 90 - 97
OTH, OTH, ASO, OTH, ASO, UDF, ASO, ACO, # 98 - 9F
OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # A0 - A7
OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # A8 - AF
OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # B0 - B7
OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # B8 - BF
ACV, ACV, ACV, ACV, ACV, ACV, ACO, ACO, # C0 - C7
ACV, ACV, ACV, ACV, ACV, ACV, ACV, ACV, # C8 - CF
ACO, ACO, ACV, ACV, ACV, ACV, ACV, OTH, # D0 - D7
ACV, ACV, ACV, ACV, ACV, ACO, ACO, ACO, # D8 - DF
ASV, ASV, ASV, ASV, ASV, ASV, ASO, ASO, # E0 - E7
ASV, ASV, ASV, ASV, ASV, ASV, ASV, ASV, # E8 - EF
ASO, ASO, ASV, ASV, ASV, ASV, ASV, OTH, # F0 - F7
ASV, ASV, ASV, ASV, ASV, ASO, ASO, ASO, # F8 - FF
)
# 0 : illegal
# 1 : very unlikely
# 2 : normal
# 3 : very likely
Latin1ClassModel = (
# UDF OTH ASC ASS ACV ACO ASV ASO
0, 0, 0, 0, 0, 0, 0, 0, # UDF
0, 3, 3, 3, 3, 3, 3, 3, # OTH
0, 3, 3, 3, 3, 3, 3, 3, # ASC
0, 3, 3, 3, 1, 1, 3, 3, # ASS
0, 3, 3, 3, 1, 2, 1, 2, # ACV
0, 3, 3, 3, 3, 3, 3, 3, # ACO
0, 3, 1, 3, 1, 1, 1, 3, # ASV
0, 3, 1, 3, 1, 1, 3, 3, # ASO
)
class Latin1Prober(CharSetProber):
def __init__(self):
CharSetProber.__init__(self)
self.reset()
def reset(self):
self._mLastCharClass = OTH
self._mFreqCounter = [0] * FREQ_CAT_NUM
CharSetProber.reset(self)
def get_charset_name(self):
return "windows-1252"
def feed(self, aBuf):
aBuf = self.filter_with_english_letters(aBuf)
for c in aBuf:
charClass = Latin1_CharToClass[wrap_ord(c)]
freq = Latin1ClassModel[(self._mLastCharClass * CLASS_NUM)
+ charClass]
if freq == 0:
self._mState = eNotMe
break
self._mFreqCounter[freq] += 1
self._mLastCharClass = charClass
return self.get_state()
def get_confidence(self):
if self.get_state() == eNotMe:
return 0.01
total = sum(self._mFreqCounter)
if total < 0.01:
confidence = 0.0
else:
confidence = ((self._mFreqCounter[3] - self._mFreqCounter[1] * 20.0)
/ total)
if confidence < 0.0:
confidence = 0.0
# lower the confidence of latin1 so that other more accurate
# detector can take priority.
confidence = confidence * 0.73
return confidence
| gpl-3.0 |
Erotemic/guitool | guitool_ibeis/api_table_view.py | 1 | 6221 | from __future__ import absolute_import, division, print_function
from guitool_ibeis.__PYQT__ import QtCore, QtGui
from guitool_ibeis.__PYQT__ import QtWidgets
from guitool_ibeis import api_item_view
from guitool_ibeis.guitool_decorators import signal_, slot_
import utool
(print, rrr, profile) = utool.inject2(__name__, '[APITableView]', DEBUG=False)
# If you need to set the selected index try:
# AbstractItemView::setCurrentIndex
# AbstractItemView::scrollTo
# AbstractItemView::keyboardSearch
API_VIEW_BASE = QtWidgets.QTableView
#API_VIEW_BASE = QtWidgets.QAbstractItemView
class APITableView(API_VIEW_BASE):
"""
Table view of API data.
Implicitly inherits from APIItemView
"""
rows_updated = signal_(str, int)
contextMenuClicked = signal_(QtCore.QModelIndex, QtCore.QPoint)
API_VIEW_BASE = API_VIEW_BASE
def __init__(view, parent=None):
# Qt Inheritance
API_VIEW_BASE.__init__(view, parent)
# Implicitly inject common APIItemView functions
api_item_view.injectviewinstance(view)
view._init_itemview_behavior()
view._init_table_behavior()
view._init_header_behavior()
view.col_hidden_list = []
# Context menu
view.setContextMenuPolicy(QtCore.Qt.CustomContextMenu)
view.customContextMenuRequested.connect(view.on_customMenuRequested)
view._init_api_item_view()
#---------------
# Initialization
#---------------
def _init_table_behavior(view):
""" Table behavior
SeeAlso:
api_item_view._init_itemview_behavior
"""
# Allow sorting by column
view.setCornerButtonEnabled(False)
view.setShowGrid(True)
view.setIconSize(QtCore.QSize(64, 64))
def _init_header_behavior(view):
""" Header behavior
CommandLine:
python -m guitool_ibeis.api_item_widget --test-simple_api_item_widget --show
python -m guitool_ibeis.api_table_view --test-_init_header_behavior --show
Example:
>>> # ENABLE_DOCTEST
>>> from guitool_ibeis.api_table_view import * # NOQA
>>> import guitool_ibeis
>>> guitool_ibeis.ensure_qapp()
>>> view = APITableView()
>>> view._init_header_behavior()
"""
# Row Headers
verticalHeader = view.verticalHeader()
verticalHeader.setVisible(False)
#verticalHeader.setSortIndicatorShown(True)
verticalHeader.setHighlightSections(True)
try:
verticalHeader.setResizeMode(QtWidgets.QHeaderView.Interactive)
verticalHeader.setMovable(False)
except AttributeError:
verticalHeader.setSectionResizeMode(QtWidgets.QHeaderView.Interactive)
verticalHeader.setSectionsMovable(False)
# TODO: get good estimate if there are thumbnails
#verticalHeader.setDefaultSectionSize(256)
# Column headers
horizontalHeader = view.horizontalHeader()
horizontalHeader.setVisible(True)
horizontalHeader.setStretchLastSection(True)
horizontalHeader.setSortIndicatorShown(True)
horizontalHeader.setHighlightSections(True)
# Column Sizes
# DO NOT USE ResizeToContents. IT MAKES THINGS VERY SLOW
#horizontalHeader.setResizeMode(QtWidgets.QHeaderView.ResizeToContents)
#horizontalHeader.setResizeMode(QtWidgets.QHeaderView.Stretch)
try:
horizontalHeader.setResizeMode(QtWidgets.QHeaderView.Interactive)
horizontalHeader.setMovable(True)
except AttributeError:
horizontalHeader.setSectionResizeMode(QtWidgets.QHeaderView.Interactive)
horizontalHeader.setSectionsMovable(True)
#horizontalHeader.setCascadingSectionResizes(True)
# Columns moveable
#---------------
# Qt Overrides
#---------------
def setModel(view, model):
""" QtOverride: Returns item delegate for this index """
api_item_view.setModel(view, model)
def keyPressEvent(view, event):
"""
CommandLine:
python -m guitool_ibeis.api_item_widget --test-simple_api_item_widget --show
python -m guitool_ibeis.api_table_view --test-keyPressEvent --show
Example:
>>> # ENABLE_DOCTEST
>>> from guitool_ibeis.api_table_view import * # NOQA
>>> import guitool_ibeis
>>> guitool_ibeis.ensure_qapp()
>>> view = APITableView()
>>> view._init_header_behavior()
"""
return api_item_view.keyPressEvent(view, event)
# # TODO: can this be in api_item_view?
# assert isinstance(event, QtGui.QKeyEvent)
# view.API_VIEW_BASE.keyPressEvent(view, event)
# if event.matches(QtGui.QKeySequence.Copy):
# #print('Received Ctrl+C in View')
# view.copy_selection_to_clipboard()
# #print ('[view] keyPressEvent: %s' % event.key())
# for func in view.registered_keypress_funcs:
# func(view, event)
# for key, func in view.registered_single_keys:
# #print(key)
# if event.key() == key:
# func(view, event)
def mouseMoveEvent(view, event):
assert isinstance(event, QtGui.QMouseEvent)
API_VIEW_BASE.mouseMoveEvent(view, event)
def mousePressEvent(view, event):
assert isinstance(event, QtGui.QMouseEvent)
API_VIEW_BASE.mousePressEvent(view, event)
#print('no editing')
view.setEditTriggers(QtWidgets.QAbstractItemView.NoEditTriggers)
def mouseReleaseEvent(view, event):
assert isinstance(event, QtGui.QMouseEvent)
#print('editing ok')
view.setEditTriggers(view._defaultEditTriggers)
API_VIEW_BASE.mouseReleaseEvent(view, event)
#---------------
# Slots
#---------------
@slot_(str, int)
def on_rows_updated(view, tblname, num):
# re-emit the model signal
view.rows_updated.emit(tblname, num)
@slot_(QtCore.QPoint)
def on_customMenuRequested(view, pos):
index = view.indexAt(pos)
view.contextMenuClicked.emit(index, pos)
| apache-2.0 |
mvidalgarcia/indico | indico/modules/events/abstracts/notifications.py | 2 | 6735 | # This file is part of Indico.
# Copyright (C) 2002 - 2019 CERN
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the MIT License; see the
# LICENSE file for more details.
from __future__ import unicode_literals
import itertools
from collections import OrderedDict
from flask import session
from indico.core.notifications import make_email, send_email
from indico.modules.events.abstracts.models.abstracts import AbstractState
from indico.modules.events.abstracts.models.email_logs import AbstractEmailLogEntry
from indico.util.i18n import _
from indico.util.placeholders import replace_placeholders
from indico.util.rules import Condition, check_rule
from indico.web.flask.templating import get_template_module
class EmailNotificationCondition(Condition):
#: Override if you want to customize the text
#: that shouls up for "Any"
any_caption = _("any")
#: same for text that shows up for "none"
none_caption = _("none")
#: Text that will show inline in rule descriptions
label_text = None
@classmethod
def get_available_values(cls, event=None, **kwargs):
choices = cls._iter_available_values(event=event, **kwargs)
if not cls.required:
return OrderedDict(itertools.chain([('*', cls.any_caption), ('', cls.none_caption)], choices))
else:
return OrderedDict(choices)
@classmethod
def _iter_available_values(cls, event, **kwargs):
raise NotImplemented
class TrackCondition(EmailNotificationCondition):
"""A condition that matches a particular track."""
name = 'track'
description = _("Destination Track")
any_caption = _("any track")
none_caption = _("no track")
label_text = _("in")
@classmethod
def _iter_available_values(cls, event, **kwargs):
return ((t.id, t.title) for t in event.tracks)
@classmethod
def get_test_track_set(cls, abstract):
if abstract.state == AbstractState.accepted:
return {abstract.accepted_track_id} if abstract.accepted_track_id else set()
else:
return {track.id for track in abstract.submitted_for_tracks}
@classmethod
def check(cls, values, abstract, **kwargs):
return bool(set(values) & cls.get_test_track_set(abstract))
@classmethod
def is_none(cls, abstract, **kwargs):
return not bool(cls.get_test_track_set(abstract))
class StateCondition(EmailNotificationCondition):
"""A condition that matches a particular abstract final state."""
name = 'state'
description = _("Final State")
required = True
compatible_with = {
AbstractState.invited.value: (),
AbstractState.submitted.value: (),
AbstractState.rejected.value: (),
AbstractState.accepted.value: ('contribution_type', 'track'),
AbstractState.merged.value: ('contribution_type', 'track'),
AbstractState.duplicate.value: ('contribution_type', 'track'),
AbstractState.withdrawn.value: ()
}
@classmethod
def _iter_available_values(cls, **kwargs):
return ((s.value, s.title) for s in AbstractState)
@classmethod
def check(cls, values, abstract, **kwargs):
return abstract.state in values
@classmethod
def is_none(cls, abstract, **kwargs):
return False
class ContributionTypeCondition(EmailNotificationCondition):
"""A condition that matches a particular contribution type."""
name = 'contribution_type'
description = _("Contribution Type")
any_caption = _("any type")
none_caption = _("no type")
label_text = _("as")
@classmethod
def _iter_available_values(cls, event, **kwargs):
return ((ct.id, ct.name) for ct in event.contribution_types)
@classmethod
def get_test_contrib_type_id(cls, abstract):
if abstract.state == AbstractState.accepted:
return abstract.accepted_contrib_type_id
else:
return abstract.submitted_contrib_type_id
@classmethod
def check(cls, values, abstract, **kwargs):
return cls.get_test_contrib_type_id(abstract) in values
@classmethod
def is_none(cls, abstract, **kwargs):
return cls.get_test_contrib_type_id(abstract) is None
def get_abstract_notification_tpl_module(email_tpl, abstract):
"""Get the Jinja template module for a notification email
:param email_tpl: the abstract email template used to populate the
email subject/body
:param abstract: the abstract the notification email is for
"""
subject = replace_placeholders('abstract-notification-email', email_tpl.subject,
abstract=abstract, escape_html=False)
body = replace_placeholders('abstract-notification-email', email_tpl.body,
abstract=abstract, escape_html=False)
return get_template_module('events/abstracts/emails/abstract_notification.txt',
event=email_tpl.event, subject=subject, body=body)
def send_abstract_notifications(abstract):
"""Send abstract notification e-mails.
:param abstract: the abstract that is going to be checked
against the event's notification rules
:return: whether an email has been sent
"""
sent = False
for email_tpl in abstract.event.abstract_email_templates:
matched = False
for rule in email_tpl.rules:
if check_rule('abstract-notifications', rule, abstract=abstract, event=abstract.event):
matched = True
to_recipients = []
if email_tpl.include_submitter:
to_recipients.append(abstract.submitter.email)
if email_tpl.include_authors:
to_recipients += [author.email for author in abstract.primary_authors]
cc_recipients = list(email_tpl.extra_cc_emails)
if email_tpl.include_coauthors:
cc_recipients += [author.email for author in abstract.secondary_authors]
tpl = get_abstract_notification_tpl_module(email_tpl, abstract)
email = make_email(to_list=to_recipients, cc_list=cc_recipients,
reply_address=email_tpl.reply_to_address, template=tpl)
send_email(email, abstract.event, 'Abstracts', session.user)
abstract.email_logs.append(AbstractEmailLogEntry.create_from_email(email, email_tpl=email_tpl,
user=session.user))
sent = True
if email_tpl.stop_on_match and matched:
break
return sent
| mit |
jkliff/project_walker | walker.py | 1 | 4914 | #!/usr/bin/python
# -*- coding: utf-8 -*-
import argparse
import os.path
import sys
import ProjectWalker
import Checkers
from yaml import load
from string import ljust, rjust
CHECKER_STATUS_PADDING = 40
COUNTS_PADDING = 8
DEFAULT_CONFIG_NAME = 'walker.conf'
IS_TERMINAL = sys.stdout.isatty()
def red(string):
if IS_TERMINAL:
return '\033[91m' + string + '\033[0m'
else:
return string
def green(string):
if IS_TERMINAL:
return '\033[92m' + string + '\033[0m'
else:
return string
def load_project(project):
tree_builder = ProjectWalker.ProjectStructureTreeBuilder()
return tree_builder.build(os.path.abspath(project))
def print_status(name, status, full_status=None):
failed_check_count = 0
check_count = 0
short_status = ''
long_status = ''
messages = []
for s in status:
if not s.isSuccessful():
failed_check_count = failed_check_count + 1
check_count = check_count + 1
if full_status:
for (c, m) in s.check_result:
messages.append(' * ' + m)
long_status = '\n'.join(messages)
if failed_check_count == 0:
ok_or_fail = green('OK')
else:
ok_or_fail = red('FAILED')
if check_count == 1:
counts = ''
else:
counts = '{}/{}'.format(failed_check_count, check_count)
short_status = '{} {} [{}]'.format(ljust(str(name), CHECKER_STATUS_PADDING), rjust(counts, COUNTS_PADDING),
ok_or_fail)
print short_status
if full_status:
print long_status
def is_successful(status):
return all(s.isSuccessful() for s in status)
def create_checkers(config):
vars = config['vars']
rules = config['rules']
checkers = []
for (rule_name, rule_config) in rules.iteritems():
if type(rule_config) == dict:
rc = [rule_config]
elif type(rule_config) == list:
rc = rule_config
elif rule_config == None:
rc = [{}]
else:
sys.exit('Invalid config [{}]'.format(rule_name))
for ct in rc:
if 'excludeFiles' in config:
add_global_exclude(ct, config['excludeFiles'])
c = getattr(Checkers, rule_name)
checkers.append(c(vars, ct))
return checkers
def add_global_exclude(config, exclude = None):
if exclude:
if type(exclude) != list:
exclude = [exclude]
if 'excludeFiles' in config:
if type(config['excludeFiles']) == list:
ex = config['excludeFiles']
else:
ex = [config['excludeFiles']]
ex.extend(exclude)
else:
config['excludeFiles'] = exclude
def list_checkers():
for c, t in Checkers.__dict__.iteritems():
if c.endswith('Checker') and issubclass(t, ProjectWalker.Checker):
print c
def group_status(status):
grouped = {}
for s in status:
name = s.checker_name
if name in grouped:
grouped[name].append(s)
else:
grouped[name] = [s]
return grouped
def loadConfig(projectPath, configName):
config = None
if configName:
path = configName
else:
path = os.path.join(os.path.abspath(projectPath), DEFAULT_CONFIG_NAME)
with open(path, 'r') as f:
config = load(f)
return config
def main():
parser = argparse.ArgumentParser(description='Checks a project with a set of rules.')
parser.add_argument('-p', '--project', default='.', help='project directory')
parser.add_argument('-c', '--config', help='configuration file')
parser.add_argument('-q', '--quiet', action='store_true', help='do not print anything')
parser.add_argument('-f', '--full-report', action='store_true', help='prints full report')
parser.add_argument('-l', '--list-checkers', action='store_true', help='lists all checkers')
args = parser.parse_args()
if (args.list_checkers):
list_checkers();
sys.exit('')
config = loadConfig(args.project, args.config)
if not config:
sys.exit('Could not find config [{}] in project directory [{}]!'.format(args.config, args.project))
if 'vars' not in config:
config['vars'] = {}
config['vars']['project_path'] = os.path.abspath(args.project)
config['vars']['project'] = os.path.basename(os.path.abspath(args.project))
checker = ProjectWalker.ProjectCheckEvaluator(load_project(args.project))
checkers = create_checkers(config)
status = checker.walk(checkers)
if not args.quiet:
grouped = group_status(status)
for n in sorted(grouped.iterkeys(), reverse=True):
s = grouped[n]
print_status(n, s, args.full_report)
if not is_successful(status):
sys.exit('')
if __name__ == '__main__':
main()
| bsd-3-clause |
shubhamdhama/zulip | zerver/tests/test_unread.py | 1 | 22793 | from typing import Any, List, Mapping
from unittest import mock
import ujson
from django.db import connection
from zerver.lib.fix_unreads import fix, fix_pre_pointer, fix_unsubscribed
from zerver.lib.test_classes import ZulipTestCase
from zerver.lib.test_helpers import get_subscription, tornado_redirected_to_list
from zerver.lib.topic_mutes import add_topic_mute
from zerver.models import Subscription, UserMessage, UserProfile, get_realm, get_stream, get_user
class PointerTest(ZulipTestCase):
def test_update_pointer(self) -> None:
"""
Posting a pointer to /update (in the form {"pointer": pointer}) changes
the pointer we store for your UserProfile.
"""
self.login('hamlet')
self.assertEqual(self.example_user('hamlet').pointer, -1)
msg_id = self.send_stream_message(self.example_user("othello"), "Verona")
result = self.client_post("/json/users/me/pointer", {"pointer": msg_id})
self.assert_json_success(result)
self.assertEqual(self.example_user('hamlet').pointer, msg_id)
def test_api_update_pointer(self) -> None:
"""
Same as above, but for the API view
"""
user = self.example_user('hamlet')
email = user.email
self.assertEqual(user.pointer, -1)
msg_id = self.send_stream_message(self.example_user("othello"), "Verona")
result = self.api_post(user, "/api/v1/users/me/pointer", {"pointer": msg_id})
self.assert_json_success(result)
self.assertEqual(get_user(email, user.realm).pointer, msg_id)
def test_missing_pointer(self) -> None:
"""
Posting json to /json/users/me/pointer which does not contain a pointer key/value pair
returns a 400 and error message.
"""
self.login('hamlet')
self.assertEqual(self.example_user('hamlet').pointer, -1)
result = self.client_post("/json/users/me/pointer", {"foo": 1})
self.assert_json_error(result, "Missing 'pointer' argument")
self.assertEqual(self.example_user('hamlet').pointer, -1)
def test_invalid_pointer(self) -> None:
"""
Posting json to /json/users/me/pointer with an invalid pointer returns a 400 and error
message.
"""
self.login('hamlet')
self.assertEqual(self.example_user('hamlet').pointer, -1)
result = self.client_post("/json/users/me/pointer", {"pointer": "foo"})
self.assert_json_error(result, "Bad value for 'pointer': foo")
self.assertEqual(self.example_user('hamlet').pointer, -1)
def test_pointer_out_of_range(self) -> None:
"""
Posting json to /json/users/me/pointer with an out of range (< 0) pointer returns a 400
and error message.
"""
self.login('hamlet')
self.assertEqual(self.example_user('hamlet').pointer, -1)
result = self.client_post("/json/users/me/pointer", {"pointer": -2})
self.assert_json_error(result, "Bad value for 'pointer': -2")
self.assertEqual(self.example_user('hamlet').pointer, -1)
def test_use_first_unread_anchor_interaction_with_pointer(self) -> None:
"""
Getting old messages (a get request to /json/messages) should never
return an unread message older than the current pointer, when there's
no narrow set.
"""
self.login('hamlet')
# Ensure the pointer is not set (-1)
self.assertEqual(self.example_user('hamlet').pointer, -1)
# Mark all existing messages as read
result = self.client_post("/json/mark_all_as_read")
self.assert_json_success(result)
# Send a new message (this will be unread)
new_message_id = self.send_stream_message(self.example_user("othello"), "Verona",
"test")
# If we call get_messages with use_first_unread_anchor=True, we
# should get the message we just sent
messages_response = self.get_messages_response(
anchor="first_unread", num_before=0, num_after=1)
self.assertEqual(messages_response['messages'][0]['id'], new_message_id)
self.assertEqual(messages_response['anchor'], new_message_id)
# Test with the old way of expressing use_first_unread_anchor=True
messages_response = self.get_messages_response(
anchor=0, num_before=0, num_after=1, use_first_unread_anchor=True)
self.assertEqual(messages_response['messages'][0]['id'], new_message_id)
self.assertEqual(messages_response['anchor'], new_message_id)
# We want to get the message_id of an arbitrary old message. We can
# call get_messages with use_first_unread_anchor=False and simply
# save the first message we're returned.
messages = self.get_messages(
anchor=0, num_before=0, num_after=2, use_first_unread_anchor=False)
old_message_id = messages[0]['id']
next_old_message_id = messages[1]['id']
# Verify the message is marked as read
user_message = UserMessage.objects.get(
message_id=old_message_id,
user_profile=self.example_user('hamlet'))
self.assertTrue(user_message.flags.read)
# Let's set this old message to be unread
result = self.client_post("/json/messages/flags",
{"messages": ujson.dumps([old_message_id]),
"op": "remove",
"flag": "read"})
# Verify it's now marked as unread
user_message = UserMessage.objects.get(
message_id=old_message_id,
user_profile=self.example_user('hamlet'))
self.assert_json_success(result)
self.assertFalse(user_message.flags.read)
# Now if we call get_messages with use_first_unread_anchor=True,
# we should get the old message we just set to unread
messages_response = self.get_messages_response(
anchor="first_unread", num_before=0, num_after=1)
self.assertEqual(messages_response['messages'][0]['id'], old_message_id)
self.assertEqual(messages_response['anchor'], old_message_id)
# Let's update the pointer to be *after* this old unread message (but
# still on or before the new unread message we just sent)
result = self.client_post("/json/users/me/pointer",
{"pointer": next_old_message_id})
self.assert_json_success(result)
self.assertEqual(self.example_user('hamlet').pointer,
next_old_message_id)
# Verify that moving the pointer didn't mark our message as read.
user_message = UserMessage.objects.get(
message_id=old_message_id,
user_profile=self.example_user('hamlet'))
self.assertFalse(user_message.flags.read)
def test_visible_messages_use_first_unread_anchor(self) -> None:
self.login('hamlet')
self.assertEqual(self.example_user('hamlet').pointer, -1)
result = self.client_post("/json/mark_all_as_read")
self.assert_json_success(result)
new_message_id = self.send_stream_message(self.example_user("othello"), "Verona",
"test")
messages_response = self.get_messages_response(
anchor="first_unread", num_before=0, num_after=1)
self.assertEqual(messages_response['messages'][0]['id'], new_message_id)
self.assertEqual(messages_response['anchor'], new_message_id)
with mock.patch('zerver.views.messages.get_first_visible_message_id', return_value=new_message_id):
messages_response = self.get_messages_response(
anchor="first_unread", num_before=0, num_after=1)
self.assertEqual(messages_response['messages'][0]['id'], new_message_id)
self.assertEqual(messages_response['anchor'], new_message_id)
with mock.patch('zerver.views.messages.get_first_visible_message_id', return_value=new_message_id + 1):
messages_reponse = self.get_messages_response(
anchor="first_unread", num_before=0, num_after=1)
self.assert_length(messages_reponse['messages'], 0)
self.assertIn('anchor', messages_reponse)
with mock.patch('zerver.views.messages.get_first_visible_message_id', return_value=new_message_id - 1):
messages = self.get_messages(
anchor="first_unread", num_before=0, num_after=1)
self.assert_length(messages, 1)
class UnreadCountTests(ZulipTestCase):
def setUp(self) -> None:
super().setUp()
with mock.patch('zerver.lib.push_notifications.push_notifications_enabled',
return_value = True) as mock_push_notifications_enabled:
self.unread_msg_ids = [
self.send_personal_message(
self.example_user("iago"), self.example_user("hamlet"), "hello"),
self.send_personal_message(
self.example_user("iago"), self.example_user("hamlet"), "hello2")]
mock_push_notifications_enabled.assert_called()
# Sending a new message results in unread UserMessages being created
def test_new_message(self) -> None:
self.login('hamlet')
content = "Test message for unset read bit"
last_msg = self.send_stream_message(self.example_user("hamlet"), "Verona", content)
user_messages = list(UserMessage.objects.filter(message=last_msg))
self.assertEqual(len(user_messages) > 0, True)
for um in user_messages:
self.assertEqual(um.message.content, content)
if um.user_profile.email != self.example_email("hamlet"):
self.assertFalse(um.flags.read)
def test_update_flags(self) -> None:
self.login('hamlet')
result = self.client_post("/json/messages/flags",
{"messages": ujson.dumps(self.unread_msg_ids),
"op": "add",
"flag": "read"})
self.assert_json_success(result)
# Ensure we properly set the flags
found = 0
for msg in self.get_messages():
if msg['id'] in self.unread_msg_ids:
self.assertEqual(msg['flags'], ['read'])
found += 1
self.assertEqual(found, 2)
result = self.client_post("/json/messages/flags",
{"messages": ujson.dumps([self.unread_msg_ids[1]]),
"op": "remove", "flag": "read"})
self.assert_json_success(result)
# Ensure we properly remove just one flag
for msg in self.get_messages():
if msg['id'] == self.unread_msg_ids[0]:
self.assertEqual(msg['flags'], ['read'])
elif msg['id'] == self.unread_msg_ids[1]:
self.assertEqual(msg['flags'], [])
def test_mark_all_in_stream_read(self) -> None:
self.login('hamlet')
user_profile = self.example_user('hamlet')
stream = self.subscribe(user_profile, "test_stream")
self.subscribe(self.example_user("cordelia"), "test_stream")
message_id = self.send_stream_message(self.example_user("hamlet"), "test_stream", "hello")
unrelated_message_id = self.send_stream_message(self.example_user("hamlet"), "Denmark", "hello")
events: List[Mapping[str, Any]] = []
with tornado_redirected_to_list(events):
result = self.client_post("/json/mark_stream_as_read", {
"stream_id": stream.id,
})
self.assert_json_success(result)
self.assertTrue(len(events) == 1)
event = events[0]['event']
expected = dict(operation='add',
messages=[message_id],
flag='read',
type='update_message_flags',
all=False)
differences = [key for key in expected if expected[key] != event[key]]
self.assertTrue(len(differences) == 0)
hamlet = self.example_user('hamlet')
um = list(UserMessage.objects.filter(message=message_id))
for msg in um:
if msg.user_profile.email == hamlet.email:
self.assertTrue(msg.flags.read)
else:
self.assertFalse(msg.flags.read)
unrelated_messages = list(UserMessage.objects.filter(message=unrelated_message_id))
for msg in unrelated_messages:
if msg.user_profile.email == hamlet.email:
self.assertFalse(msg.flags.read)
def test_mark_all_in_invalid_stream_read(self) -> None:
self.login('hamlet')
invalid_stream_id = "12345678"
result = self.client_post("/json/mark_stream_as_read", {
"stream_id": invalid_stream_id,
})
self.assert_json_error(result, 'Invalid stream id')
def test_mark_all_topics_unread_with_invalid_stream_name(self) -> None:
self.login('hamlet')
invalid_stream_id = "12345678"
result = self.client_post("/json/mark_topic_as_read", {
"stream_id": invalid_stream_id,
'topic_name': 'whatever',
})
self.assert_json_error(result, "Invalid stream id")
def test_mark_all_in_stream_topic_read(self) -> None:
self.login('hamlet')
user_profile = self.example_user('hamlet')
self.subscribe(user_profile, "test_stream")
message_id = self.send_stream_message(self.example_user("hamlet"), "test_stream", "hello", "test_topic")
unrelated_message_id = self.send_stream_message(self.example_user("hamlet"), "Denmark", "hello", "Denmark2")
events: List[Mapping[str, Any]] = []
with tornado_redirected_to_list(events):
result = self.client_post("/json/mark_topic_as_read", {
"stream_id": get_stream("test_stream", user_profile.realm).id,
"topic_name": "test_topic",
})
self.assert_json_success(result)
self.assertTrue(len(events) == 1)
event = events[0]['event']
expected = dict(operation='add',
messages=[message_id],
flag='read',
type='update_message_flags',
all=False)
differences = [key for key in expected if expected[key] != event[key]]
self.assertTrue(len(differences) == 0)
um = list(UserMessage.objects.filter(message=message_id))
for msg in um:
if msg.user_profile_id == user_profile.id:
self.assertTrue(msg.flags.read)
unrelated_messages = list(UserMessage.objects.filter(message=unrelated_message_id))
for msg in unrelated_messages:
if msg.user_profile_id == user_profile.id:
self.assertFalse(msg.flags.read)
def test_mark_all_in_invalid_topic_read(self) -> None:
self.login('hamlet')
invalid_topic_name = "abc"
result = self.client_post("/json/mark_topic_as_read", {
"stream_id": get_stream("Denmark", get_realm("zulip")).id,
"topic_name": invalid_topic_name,
})
self.assert_json_error(result, 'No such topic \'abc\'')
class FixUnreadTests(ZulipTestCase):
def test_fix_unreads(self) -> None:
user = self.example_user('hamlet')
realm = get_realm('zulip')
def send_message(stream_name: str, topic_name: str) -> int:
msg_id = self.send_stream_message(
self.example_user("othello"),
stream_name,
topic_name=topic_name)
um = UserMessage.objects.get(
user_profile=user,
message_id=msg_id)
return um.id
def assert_read(user_message_id: int) -> None:
um = UserMessage.objects.get(id=user_message_id)
self.assertTrue(um.flags.read)
def assert_unread(user_message_id: int) -> None:
um = UserMessage.objects.get(id=user_message_id)
self.assertFalse(um.flags.read)
def mute_stream(stream_name: str) -> None:
stream = get_stream(stream_name, realm)
recipient = stream.recipient
subscription = Subscription.objects.get(
user_profile=user,
recipient=recipient,
)
subscription.is_muted = True
subscription.save()
def mute_topic(stream_name: str, topic_name: str) -> None:
stream = get_stream(stream_name, realm)
recipient = stream.recipient
add_topic_mute(
user_profile=user,
stream_id=stream.id,
recipient_id=recipient.id,
topic_name=topic_name,
)
def force_unsubscribe(stream_name: str) -> None:
'''
We don't want side effects here, since the eventual
unsubscribe path may mark messages as read, defeating
the test setup here.
'''
sub = get_subscription(stream_name, user)
sub.active = False
sub.save()
# The data setup here is kind of funny, because some of these
# conditions should not actually happen in practice going forward,
# but we may have had bad data from the past.
mute_stream('Denmark')
mute_topic('Verona', 'muted_topic')
um_normal_id = send_message('Verona', 'normal')
um_muted_topic_id = send_message('Verona', 'muted_topic')
um_muted_stream_id = send_message('Denmark', 'whatever')
user.pointer = self.get_last_message().id
user.save()
um_post_pointer_id = send_message('Verona', 'muted_topic')
self.subscribe(user, 'temporary')
um_unsubscribed_id = send_message('temporary', 'whatever')
force_unsubscribe('temporary')
# verify data setup
assert_unread(um_normal_id)
assert_unread(um_muted_topic_id)
assert_unread(um_muted_stream_id)
assert_unread(um_post_pointer_id)
assert_unread(um_unsubscribed_id)
with connection.cursor() as cursor:
fix_pre_pointer(cursor, user)
# The only message that should have been fixed is the "normal"
# unumuted message before the pointer.
assert_read(um_normal_id)
# We don't "fix" any messages that are either muted or after the
# pointer, because they can be legitimately unread.
assert_unread(um_muted_topic_id)
assert_unread(um_muted_stream_id)
assert_unread(um_post_pointer_id)
assert_unread(um_unsubscribed_id)
# fix unsubscribed
with connection.cursor() as cursor:
fix_unsubscribed(cursor, user)
# Most messages don't change.
assert_unread(um_muted_topic_id)
assert_unread(um_muted_stream_id)
assert_unread(um_post_pointer_id)
# The unsubscribed entry should change.
assert_read(um_unsubscribed_id)
# test idempotency
fix(user)
assert_read(um_normal_id)
assert_unread(um_muted_topic_id)
assert_unread(um_muted_stream_id)
assert_unread(um_post_pointer_id)
assert_read(um_unsubscribed_id)
class PushNotificationMarkReadFlowsTest(ZulipTestCase):
def get_mobile_push_notification_ids(self, user_profile: UserProfile) -> List[int]:
return list(UserMessage.objects.filter(
user_profile=user_profile,
).extra(
where=[UserMessage.where_active_push_notification()],
).order_by("message_id").values_list("message_id", flat=True))
@mock.patch('zerver.lib.push_notifications.push_notifications_enabled', return_value=True)
def test_track_active_mobile_push_notifications(self, mock_push_notifications: mock.MagicMock) -> None:
mock_push_notifications.return_value = True
self.login('hamlet')
user_profile = self.example_user('hamlet')
stream = self.subscribe(user_profile, "test_stream")
second_stream = self.subscribe(user_profile, "second_stream")
property_name = "push_notifications"
result = self.api_post(user_profile, "/api/v1/users/me/subscriptions/properties",
{"subscription_data": ujson.dumps([{"property": property_name,
"value": True,
"stream_id": stream.id}])})
result = self.api_post(user_profile, "/api/v1/users/me/subscriptions/properties",
{"subscription_data": ujson.dumps([{"property": property_name,
"value": True,
"stream_id": second_stream.id}])})
self.assert_json_success(result)
self.assertEqual(self.get_mobile_push_notification_ids(user_profile), [])
message_id = self.send_stream_message(self.example_user("cordelia"), "test_stream", "hello", "test_topic")
second_message_id = self.send_stream_message(self.example_user("cordelia"), "test_stream", "hello", "other_topic")
third_message_id = self.send_stream_message(self.example_user("cordelia"), "second_stream", "hello", "test_topic")
self.assertEqual(self.get_mobile_push_notification_ids(user_profile),
[message_id, second_message_id, third_message_id])
result = self.client_post("/json/mark_topic_as_read", {
"stream_id": str(stream.id),
"topic_name": "test_topic",
})
self.assert_json_success(result)
self.assertEqual(self.get_mobile_push_notification_ids(user_profile),
[second_message_id, third_message_id])
result = self.client_post("/json/mark_stream_as_read", {
"stream_id": str(stream.id),
"topic_name": "test_topic",
})
self.assertEqual(self.get_mobile_push_notification_ids(user_profile),
[third_message_id])
fourth_message_id = self.send_stream_message(self.example_user("cordelia"), "test_stream", "hello", "test_topic")
self.assertEqual(self.get_mobile_push_notification_ids(user_profile),
[third_message_id, fourth_message_id])
result = self.client_post("/json/mark_all_as_read", {})
self.assertEqual(self.get_mobile_push_notification_ids(user_profile),
[])
mock_push_notifications.assert_called()
| apache-2.0 |
dentaku65/pelisalacarta | python/main-classic/servers/nosvideo.py | 9 | 4368 | # -*- coding: utf-8 -*-
#------------------------------------------------------------
# pelisalacarta - XBMC Plugin
# Conector para nosvideo
# http://blog.tvalacarta.info/plugin-xbmc/pelisalacarta/
#------------------------------------------------------------
import urlparse,urllib2,urllib,re
import os
from core import scrapertools
from core import logger
from core import config
def test_video_exists( page_url ):
logger.info("[nosvideo.py] test_video_exists(page_url='%s')" % page_url)
data = scrapertools.cache_page(page_url)
if "The file is being converted" in data:
return False,"El fichero está en proceso"
return True,""
def get_video_url( page_url , premium = False , user="" , password="", video_password="" ):
logger.info("[nosvideo.py] get_video_url(page_url='%s')" % page_url)
video_urls = []
# Lee la URL
data = scrapertools.cache_page( page_url )
bloque = scrapertools.get_match(data,'<Form method="POST"(.*)</.orm>')
#logger.info("bloque="+bloque)
op = scrapertools.get_match(bloque,'<input type="hidden" name="op" value="([^"]+)"')
id = scrapertools.get_match(bloque,'<input type="hidden" name="id" value="([^"]+)"')
rand = scrapertools.get_match(bloque,'<input type="hidden" name="rand" value="([^"]*)"')
referer = scrapertools.get_match(bloque,'<input type="hidden" name="referer" value="([^"]*)"')
usr_login = scrapertools.get_match(bloque,'<input type="hidden" name="usr_login" value="([^"]*)"')
fname = scrapertools.get_match(bloque,'<input type="hidden" name="fname" value="([^"]+)"')
method_free = scrapertools.get_match(bloque,'<input type="[^"]+" name="method_free" value="([^"]*)"')
method_premium = scrapertools.get_match(bloque,'<input type="[^"]+" name="method_premium" value="([^"]*)"')
# Simula el botón
#op=download1&id=iij5rw25kh4c&rand=&referer=&usr_login=&fname=TED-TS-Screener.Castellano.Ro_dri.avi&method_free=&method_premium=&down_script=1&method_free=Continue+to+Video
post = "op="+op+"&id="+id+"&rand="+rand+"&referer="+referer+"&usr_login="+usr_login+"&fname="+fname+"&method_free=&method_premium="+method_premium+"&down_script=1&method_free="+method_free
data = scrapertools.cache_page( page_url , post=post )
#logger.info("data="+data)
# Saca el bloque packed y lo descifra
packed = scrapertools.get_match(data,"(<script type='text/javascript'>eval\(function\(p,a,c,k,e,d\).*?</script>)")
from core import jsunpack
unpacked = jsunpack.unpack(packed)
logger.info("unpacked="+unpacked)
# Extrae el descriptor
playlist = scrapertools.get_match(unpacked,"playlist\=(.*?\.xml)")
data = scrapertools.cache_page( playlist )
location = scrapertools.get_match(data,"<file>([^<]+)</file>")
video_urls.append( [ scrapertools.get_filename_from_url(location)[-4:] + " [nosvideo]",location ] )
for video_url in video_urls:
logger.info("[nosvideo.py] %s - %s" % (video_url[0],video_url[1]))
return video_urls
# Encuentra vídeos del servidor en el texto pasado
def find_videos(data):
encontrados = set()
devuelve = []
#http://nosvideo.com/?v=iij5rw25kh4c
patronvideos = '(nosvideo.com/\?v\=[a-z0-9]+)'
logger.info("[nosvideo.py] find_videos #"+patronvideos+"#")
matches = re.compile(patronvideos,re.DOTALL).findall(data)
for match in matches:
titulo = "[nosvideo]"
url = "http://"+match
if url not in encontrados:
logger.info(" url="+url)
devuelve.append( [ titulo , url , 'nosvideo' ] )
encontrados.add(url)
else:
logger.info(" url duplicada="+url)
#http://nosupload.com/?v=iij5rw25kh4c
patronvideos = 'nosupload.com(/\?v\=[a-z0-9]+)'
logger.info("[nosvideo.py] find_videos #"+patronvideos+"#")
matches = re.compile(patronvideos,re.DOTALL).findall(data)
for match in matches:
titulo = "[nosvideo]"
url = "http://nosvideo.com"+match
if url not in encontrados:
logger.info(" url="+url)
devuelve.append( [ titulo , url , 'nosvideo' ] )
encontrados.add(url)
else:
logger.info(" url duplicada="+url)
return devuelve
def test():
video_urls = get_video_url("http://nosvideo.com/?v=zuxl97lozqmp")
return len(video_urls)>0 | gpl-3.0 |
jmztaylor/android_kernel_htc_zara | tools/perf/scripts/python/sched-migration.py | 11215 | 11670 | #!/usr/bin/python
#
# Cpu task migration overview toy
#
# Copyright (C) 2010 Frederic Weisbecker <[email protected]>
#
# perf script event handlers have been generated by perf script -g python
#
# This software is distributed under the terms of the GNU General
# Public License ("GPL") version 2 as published by the Free Software
# Foundation.
import os
import sys
from collections import defaultdict
from UserList import UserList
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
sys.path.append('scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from perf_trace_context import *
from Core import *
from SchedGui import *
threads = { 0 : "idle"}
def thread_name(pid):
return "%s:%d" % (threads[pid], pid)
class RunqueueEventUnknown:
@staticmethod
def color():
return None
def __repr__(self):
return "unknown"
class RunqueueEventSleep:
@staticmethod
def color():
return (0, 0, 0xff)
def __init__(self, sleeper):
self.sleeper = sleeper
def __repr__(self):
return "%s gone to sleep" % thread_name(self.sleeper)
class RunqueueEventWakeup:
@staticmethod
def color():
return (0xff, 0xff, 0)
def __init__(self, wakee):
self.wakee = wakee
def __repr__(self):
return "%s woke up" % thread_name(self.wakee)
class RunqueueEventFork:
@staticmethod
def color():
return (0, 0xff, 0)
def __init__(self, child):
self.child = child
def __repr__(self):
return "new forked task %s" % thread_name(self.child)
class RunqueueMigrateIn:
@staticmethod
def color():
return (0, 0xf0, 0xff)
def __init__(self, new):
self.new = new
def __repr__(self):
return "task migrated in %s" % thread_name(self.new)
class RunqueueMigrateOut:
@staticmethod
def color():
return (0xff, 0, 0xff)
def __init__(self, old):
self.old = old
def __repr__(self):
return "task migrated out %s" % thread_name(self.old)
class RunqueueSnapshot:
def __init__(self, tasks = [0], event = RunqueueEventUnknown()):
self.tasks = tuple(tasks)
self.event = event
def sched_switch(self, prev, prev_state, next):
event = RunqueueEventUnknown()
if taskState(prev_state) == "R" and next in self.tasks \
and prev in self.tasks:
return self
if taskState(prev_state) != "R":
event = RunqueueEventSleep(prev)
next_tasks = list(self.tasks[:])
if prev in self.tasks:
if taskState(prev_state) != "R":
next_tasks.remove(prev)
elif taskState(prev_state) == "R":
next_tasks.append(prev)
if next not in next_tasks:
next_tasks.append(next)
return RunqueueSnapshot(next_tasks, event)
def migrate_out(self, old):
if old not in self.tasks:
return self
next_tasks = [task for task in self.tasks if task != old]
return RunqueueSnapshot(next_tasks, RunqueueMigrateOut(old))
def __migrate_in(self, new, event):
if new in self.tasks:
self.event = event
return self
next_tasks = self.tasks[:] + tuple([new])
return RunqueueSnapshot(next_tasks, event)
def migrate_in(self, new):
return self.__migrate_in(new, RunqueueMigrateIn(new))
def wake_up(self, new):
return self.__migrate_in(new, RunqueueEventWakeup(new))
def wake_up_new(self, new):
return self.__migrate_in(new, RunqueueEventFork(new))
def load(self):
""" Provide the number of tasks on the runqueue.
Don't count idle"""
return len(self.tasks) - 1
def __repr__(self):
ret = self.tasks.__repr__()
ret += self.origin_tostring()
return ret
class TimeSlice:
def __init__(self, start, prev):
self.start = start
self.prev = prev
self.end = start
# cpus that triggered the event
self.event_cpus = []
if prev is not None:
self.total_load = prev.total_load
self.rqs = prev.rqs.copy()
else:
self.rqs = defaultdict(RunqueueSnapshot)
self.total_load = 0
def __update_total_load(self, old_rq, new_rq):
diff = new_rq.load() - old_rq.load()
self.total_load += diff
def sched_switch(self, ts_list, prev, prev_state, next, cpu):
old_rq = self.prev.rqs[cpu]
new_rq = old_rq.sched_switch(prev, prev_state, next)
if old_rq is new_rq:
return
self.rqs[cpu] = new_rq
self.__update_total_load(old_rq, new_rq)
ts_list.append(self)
self.event_cpus = [cpu]
def migrate(self, ts_list, new, old_cpu, new_cpu):
if old_cpu == new_cpu:
return
old_rq = self.prev.rqs[old_cpu]
out_rq = old_rq.migrate_out(new)
self.rqs[old_cpu] = out_rq
self.__update_total_load(old_rq, out_rq)
new_rq = self.prev.rqs[new_cpu]
in_rq = new_rq.migrate_in(new)
self.rqs[new_cpu] = in_rq
self.__update_total_load(new_rq, in_rq)
ts_list.append(self)
if old_rq is not out_rq:
self.event_cpus.append(old_cpu)
self.event_cpus.append(new_cpu)
def wake_up(self, ts_list, pid, cpu, fork):
old_rq = self.prev.rqs[cpu]
if fork:
new_rq = old_rq.wake_up_new(pid)
else:
new_rq = old_rq.wake_up(pid)
if new_rq is old_rq:
return
self.rqs[cpu] = new_rq
self.__update_total_load(old_rq, new_rq)
ts_list.append(self)
self.event_cpus = [cpu]
def next(self, t):
self.end = t
return TimeSlice(t, self)
class TimeSliceList(UserList):
def __init__(self, arg = []):
self.data = arg
def get_time_slice(self, ts):
if len(self.data) == 0:
slice = TimeSlice(ts, TimeSlice(-1, None))
else:
slice = self.data[-1].next(ts)
return slice
def find_time_slice(self, ts):
start = 0
end = len(self.data)
found = -1
searching = True
while searching:
if start == end or start == end - 1:
searching = False
i = (end + start) / 2
if self.data[i].start <= ts and self.data[i].end >= ts:
found = i
end = i
continue
if self.data[i].end < ts:
start = i
elif self.data[i].start > ts:
end = i
return found
def set_root_win(self, win):
self.root_win = win
def mouse_down(self, cpu, t):
idx = self.find_time_slice(t)
if idx == -1:
return
ts = self[idx]
rq = ts.rqs[cpu]
raw = "CPU: %d\n" % cpu
raw += "Last event : %s\n" % rq.event.__repr__()
raw += "Timestamp : %d.%06d\n" % (ts.start / (10 ** 9), (ts.start % (10 ** 9)) / 1000)
raw += "Duration : %6d us\n" % ((ts.end - ts.start) / (10 ** 6))
raw += "Load = %d\n" % rq.load()
for t in rq.tasks:
raw += "%s \n" % thread_name(t)
self.root_win.update_summary(raw)
def update_rectangle_cpu(self, slice, cpu):
rq = slice.rqs[cpu]
if slice.total_load != 0:
load_rate = rq.load() / float(slice.total_load)
else:
load_rate = 0
red_power = int(0xff - (0xff * load_rate))
color = (0xff, red_power, red_power)
top_color = None
if cpu in slice.event_cpus:
top_color = rq.event.color()
self.root_win.paint_rectangle_zone(cpu, color, top_color, slice.start, slice.end)
def fill_zone(self, start, end):
i = self.find_time_slice(start)
if i == -1:
return
for i in xrange(i, len(self.data)):
timeslice = self.data[i]
if timeslice.start > end:
return
for cpu in timeslice.rqs:
self.update_rectangle_cpu(timeslice, cpu)
def interval(self):
if len(self.data) == 0:
return (0, 0)
return (self.data[0].start, self.data[-1].end)
def nr_rectangles(self):
last_ts = self.data[-1]
max_cpu = 0
for cpu in last_ts.rqs:
if cpu > max_cpu:
max_cpu = cpu
return max_cpu
class SchedEventProxy:
def __init__(self):
self.current_tsk = defaultdict(lambda : -1)
self.timeslices = TimeSliceList()
def sched_switch(self, headers, prev_comm, prev_pid, prev_prio, prev_state,
next_comm, next_pid, next_prio):
""" Ensure the task we sched out this cpu is really the one
we logged. Otherwise we may have missed traces """
on_cpu_task = self.current_tsk[headers.cpu]
if on_cpu_task != -1 and on_cpu_task != prev_pid:
print "Sched switch event rejected ts: %s cpu: %d prev: %s(%d) next: %s(%d)" % \
(headers.ts_format(), headers.cpu, prev_comm, prev_pid, next_comm, next_pid)
threads[prev_pid] = prev_comm
threads[next_pid] = next_comm
self.current_tsk[headers.cpu] = next_pid
ts = self.timeslices.get_time_slice(headers.ts())
ts.sched_switch(self.timeslices, prev_pid, prev_state, next_pid, headers.cpu)
def migrate(self, headers, pid, prio, orig_cpu, dest_cpu):
ts = self.timeslices.get_time_slice(headers.ts())
ts.migrate(self.timeslices, pid, orig_cpu, dest_cpu)
def wake_up(self, headers, comm, pid, success, target_cpu, fork):
if success == 0:
return
ts = self.timeslices.get_time_slice(headers.ts())
ts.wake_up(self.timeslices, pid, target_cpu, fork)
def trace_begin():
global parser
parser = SchedEventProxy()
def trace_end():
app = wx.App(False)
timeslices = parser.timeslices
frame = RootFrame(timeslices, "Migration")
app.MainLoop()
def sched__sched_stat_runtime(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, runtime, vruntime):
pass
def sched__sched_stat_iowait(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, delay):
pass
def sched__sched_stat_sleep(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, delay):
pass
def sched__sched_stat_wait(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, delay):
pass
def sched__sched_process_fork(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
parent_comm, parent_pid, child_comm, child_pid):
pass
def sched__sched_process_wait(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, prio):
pass
def sched__sched_process_exit(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, prio):
pass
def sched__sched_process_free(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, prio):
pass
def sched__sched_migrate_task(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, prio, orig_cpu,
dest_cpu):
headers = EventHeaders(common_cpu, common_secs, common_nsecs,
common_pid, common_comm)
parser.migrate(headers, pid, prio, orig_cpu, dest_cpu)
def sched__sched_switch(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
prev_comm, prev_pid, prev_prio, prev_state,
next_comm, next_pid, next_prio):
headers = EventHeaders(common_cpu, common_secs, common_nsecs,
common_pid, common_comm)
parser.sched_switch(headers, prev_comm, prev_pid, prev_prio, prev_state,
next_comm, next_pid, next_prio)
def sched__sched_wakeup_new(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, prio, success,
target_cpu):
headers = EventHeaders(common_cpu, common_secs, common_nsecs,
common_pid, common_comm)
parser.wake_up(headers, comm, pid, success, target_cpu, 1)
def sched__sched_wakeup(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, prio, success,
target_cpu):
headers = EventHeaders(common_cpu, common_secs, common_nsecs,
common_pid, common_comm)
parser.wake_up(headers, comm, pid, success, target_cpu, 0)
def sched__sched_wait_task(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, prio):
pass
def sched__sched_kthread_stop_ret(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
ret):
pass
def sched__sched_kthread_stop(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid):
pass
def trace_unhandled(event_name, context, common_cpu, common_secs, common_nsecs,
common_pid, common_comm):
pass
| gpl-2.0 |
ric2b/Vivaldi-browser | chromium/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/third_party/h2/h2/frame_buffer.py | 46 | 6771 | # -*- coding: utf-8 -*-
"""
h2/frame_buffer
~~~~~~~~~~~~~~~
A data structure that provides a way to iterate over a byte buffer in terms of
frames.
"""
from hyperframe.exceptions import InvalidFrameError
from hyperframe.frame import (
Frame, HeadersFrame, ContinuationFrame, PushPromiseFrame
)
from .exceptions import (
ProtocolError, FrameTooLargeError, FrameDataMissingError
)
# To avoid a DOS attack based on sending loads of continuation frames, we limit
# the maximum number we're perpared to receive. In this case, we'll set the
# limit to 64, which means the largest encoded header block we can receive by
# default is 262144 bytes long, and the largest possible *at all* is 1073741760
# bytes long.
#
# This value seems reasonable for now, but in future we may want to evaluate
# making it configurable.
CONTINUATION_BACKLOG = 64
class FrameBuffer(object):
"""
This is a data structure that expects to act as a buffer for HTTP/2 data
that allows iteraton in terms of H2 frames.
"""
def __init__(self, server=False):
self.data = b''
self.max_frame_size = 0
self._preamble = b'PRI * HTTP/2.0\r\n\r\nSM\r\n\r\n' if server else b''
self._preamble_len = len(self._preamble)
self._headers_buffer = []
def add_data(self, data):
"""
Add more data to the frame buffer.
:param data: A bytestring containing the byte buffer.
"""
if self._preamble_len:
data_len = len(data)
of_which_preamble = min(self._preamble_len, data_len)
if self._preamble[:of_which_preamble] != data[:of_which_preamble]:
raise ProtocolError("Invalid HTTP/2 preamble.")
data = data[of_which_preamble:]
self._preamble_len -= of_which_preamble
self._preamble = self._preamble[of_which_preamble:]
self.data += data
def _parse_frame_header(self, data):
"""
Parses the frame header from the data. Either returns a tuple of
(frame, length), or throws an exception. The returned frame may be None
if the frame is of unknown type.
"""
try:
frame, length = Frame.parse_frame_header(data[:9])
except ValueError as e:
# The frame header is invalid. This is a ProtocolError
raise ProtocolError("Invalid frame header received: %s" % str(e))
return frame, length
def _validate_frame_length(self, length):
"""
Confirm that the frame is an appropriate length.
"""
if length > self.max_frame_size:
raise FrameTooLargeError(
"Received overlong frame: length %d, max %d" %
(length, self.max_frame_size)
)
def _update_header_buffer(self, f):
"""
Updates the internal header buffer. Returns a frame that should replace
the current one. May throw exceptions if this frame is invalid.
"""
# Check if we're in the middle of a headers block. If we are, this
# frame *must* be a CONTINUATION frame with the same stream ID as the
# leading HEADERS or PUSH_PROMISE frame. Anything else is a
# ProtocolError. If the frame *is* valid, append it to the header
# buffer.
if self._headers_buffer:
stream_id = self._headers_buffer[0].stream_id
valid_frame = (
f is not None and
isinstance(f, ContinuationFrame) and
f.stream_id == stream_id
)
if not valid_frame:
raise ProtocolError("Invalid frame during header block.")
# Append the frame to the buffer.
self._headers_buffer.append(f)
if len(self._headers_buffer) > CONTINUATION_BACKLOG:
raise ProtocolError("Too many continuation frames received.")
# If this is the end of the header block, then we want to build a
# mutant HEADERS frame that's massive. Use the original one we got,
# then set END_HEADERS and set its data appopriately. If it's not
# the end of the block, lose the current frame: we can't yield it.
if 'END_HEADERS' in f.flags:
f = self._headers_buffer[0]
f.flags.add('END_HEADERS')
f.data = b''.join(x.data for x in self._headers_buffer)
self._headers_buffer = []
else:
f = None
elif (isinstance(f, (HeadersFrame, PushPromiseFrame)) and
'END_HEADERS' not in f.flags):
# This is the start of a headers block! Save the frame off and then
# act like we didn't receive one.
self._headers_buffer.append(f)
f = None
return f
# The methods below support the iterator protocol.
def __iter__(self):
return self
def next(self): # Python 2
# First, check that we have enough data to successfully parse the
# next frame header. If not, bail. Otherwise, parse it.
if len(self.data) < 9:
raise StopIteration()
try:
f, length = self._parse_frame_header(self.data)
except InvalidFrameError: # pragma: no cover
raise ProtocolError("Received frame with invalid frame header.")
# Next, check that we have enough length to parse the frame body. If
# not, bail, leaving the frame header data in the buffer for next time.
if len(self.data) < length + 9:
raise StopIteration()
# Confirm the frame has an appropriate length.
self._validate_frame_length(length)
# Don't try to parse the body if we didn't get a frame we know about:
# there's nothing we can do with it anyway.
if f is not None:
try:
f.parse_body(memoryview(self.data[9:9+length]))
except InvalidFrameError:
raise FrameDataMissingError("Frame data missing or invalid")
# At this point, as we know we'll use or discard the entire frame, we
# can update the data.
self.data = self.data[9+length:]
# Pass the frame through the header buffer.
f = self._update_header_buffer(f)
# If we got a frame we didn't understand or shouldn't yield, rather
# than return None it'd be better if we just tried to get the next
# frame in the sequence instead. Recurse back into ourselves to do
# that. This is safe because the amount of work we have to do here is
# strictly bounded by the length of the buffer.
return f if f is not None else self.next()
def __next__(self): # Python 3
return self.next()
| bsd-3-clause |
ApuliaSoftware/l10n-italy | __unported__/l10n_it_corrispettivi/installer.py | 12 | 2596 | # -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2011 Associazione OpenERP Italia
# (<http://www.openerp-italia.org>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from osv import fields, osv
class corrispettivi_config_data(osv.osv_memory):
_name = 'corrispettivi.config.data'
_inherit = 'res.config'
_columns = {
'default_credit_account_id': fields.many2one('account.account', 'Default credit account',
domain=[('type','!=','view')], required=True, help='If doubtful, use income account'),
'default_debit_account_id': fields.many2one('account.account', 'Default debit account',
domain=[('type','!=','view')], required=True, help='If doubtful, use income account'),
}
def execute(self, cr, uid, ids, context=None):
for o in self.browse(cr, uid, ids, context=context):
seq_id = self.pool.get('ir.sequence').create(cr, uid, {
'name': 'Sezionale Corrispettivi',
'padding': 3,
'prefix': 'COJ/%(year)s/',
})
journal_id = self.pool.get('account.journal').create(cr, uid, {
'code': 'COJ',
'name': 'Sezionale Corrispettivi',
'type': 'sale',
'corrispettivi': True,
'sequence_id': seq_id,
'default_credit_account_id': o.default_credit_account_id.id,
'default_debit_account_id': o.default_debit_account_id.id,
})
partner_id = self.pool.get('res.partner').create(cr, uid, {
'name': 'Corrispettivi',
'ref': 'COJ',
'customer': False,
'supplier': False,
'corrispettivi': True,
})
corrispettivi_config_data()
| agpl-3.0 |
pahans/nototools | nototools/fix_khmer_and_lao_coverage.py | 6 | 2903 | #!/usr/bin/python
#
# Copyright 2014 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Fix Khmer and Lao fonts for better coverage."""
__author__ = "[email protected] (Roozbeh Pournader)"
import os
import sys
from fontTools import ttLib
from nototools import coverage
from nototools import font_data
from nototools import opentype_data
def merge_chars_from_bank(orig_font, bank_font, target_font, chars):
"""Merge glyphs from a bank font to another font.
Only the glyphs themselves, the horizontal metrics, and the cmaps will be
copied.
"""
bank_font = ttLib.TTFont(bank_font)
orig_font = ttLib.TTFont(orig_font)
bank_cmap = font_data.get_cmap(bank_font)
extra_cmap = {}
for char in sorted(chars):
assert char in bank_cmap
bank_glyph_name = bank_cmap[char]
assert bank_glyph_name not in orig_font['glyf'].glyphs
orig_font['glyf'][bank_glyph_name] = bank_font['glyf'][bank_glyph_name]
orig_font['hmtx'][bank_glyph_name] = bank_font['hmtx'][bank_glyph_name]
extra_cmap[char] = bank_glyph_name
font_data.add_to_cmap(orig_font, extra_cmap)
orig_font.save(target_font)
_UNHINTED_FONTS_DIR = os.path.abspath(
os.path.join(
os.path.dirname(__file__),
os.pardir,
'fonts',
'individual',
'unhinted'))
def main(argv):
"""Fix all the fonts given in the command line.
If they are Lao fonts, make sure they have ZWSP and dotted circle. If they
are Khmer fonts, make sure they have ZWSP, joiners, and dotted circle."""
for font_name in argv[1:]:
if 'Khmer' in font_name:
script = 'Khmr'
elif 'Lao' in font_name:
script = 'Laoo'
needed_chars = set(opentype_data.SPECIAL_CHARACTERS_NEEDED[script])
lgc_font_name = (
os.path.basename(font_name).replace('Khmer', '').replace('Lao', ''))
lgc_font_name = os.path.join(_UNHINTED_FONTS_DIR, lgc_font_name)
font_charset = coverage.character_set(font_name)
missing_chars = needed_chars - font_charset
if missing_chars:
merge_chars_from_bank(
font_name,
lgc_font_name,
os.path.dirname(font_name)+'/new/'+os.path.basename(font_name),
missing_chars)
if __name__ == '__main__':
main(sys.argv)
| apache-2.0 |
RecursiveForest/whipper | whipper/test/test_common_renamer.py | 3 | 4950 | # -*- Mode: Python; test-case-name: whipper.test.test_image_cue -*-
# vi:si:et:sw=4:sts=4:ts=4
import os
import tempfile
import unittest
from whipper.common import renamer
class RenameInFileTestcase(unittest.TestCase):
def setUp(self):
(fd, self._path) = tempfile.mkstemp(suffix='.whipper.renamer.infile')
os.write(fd, 'This is a test\nThis is another\n')
os.close(fd)
def testVerify(self):
o = renamer.RenameInFile(self._path, 'is is a', 'at was some')
self.assertEquals(o.verify(), None)
os.unlink(self._path)
self.assertRaises(AssertionError, o.verify)
def testDo(self):
o = renamer.RenameInFile(self._path, 'is is a', 'at was some')
o.do()
output = open(self._path).read()
self.assertEquals(output, 'That was some test\nThat was somenother\n')
os.unlink(self._path)
def testSerialize(self):
o = renamer.RenameInFile(self._path, 'is is a', 'at was some')
data = o.serialize()
o2 = renamer.RenameInFile.deserialize(data)
o2.do()
output = open(self._path).read()
self.assertEquals(output, 'That was some test\nThat was somenother\n')
os.unlink(self._path)
class RenameFileTestcase(unittest.TestCase):
def setUp(self):
(fd, self._source) = tempfile.mkstemp(suffix='.whipper.renamer.file')
os.write(fd, 'This is a test\nThis is another\n')
os.close(fd)
(fd, self._destination) = tempfile.mkstemp(
suffix='.whipper.renamer.file')
os.close(fd)
os.unlink(self._destination)
self._operation = renamer.RenameFile(self._source, self._destination)
def testVerify(self):
self.assertEquals(self._operation.verify(), None)
handle = open(self._destination, 'w')
handle.close()
self.assertRaises(AssertionError, self._operation.verify)
os.unlink(self._destination)
self.assertEquals(self._operation.verify(), None)
os.unlink(self._source)
self.assertRaises(AssertionError, self._operation.verify)
def testDo(self):
self._operation.do()
output = open(self._destination).read()
self.assertEquals(output, 'This is a test\nThis is another\n')
os.unlink(self._destination)
def testSerialize(self):
data = self._operation.serialize()
o = renamer.RenameFile.deserialize(data)
o.do()
output = open(self._destination).read()
self.assertEquals(output, 'This is a test\nThis is another\n')
os.unlink(self._destination)
class OperatorTestCase(unittest.TestCase):
def setUp(self):
self._statePath = tempfile.mkdtemp(suffix='.whipper.renamer.operator')
self._operator = renamer.Operator(self._statePath, 'test')
(fd, self._source) = tempfile.mkstemp(
suffix='.whipper.renamer.operator')
os.write(fd, 'This is a test\nThis is another\n')
os.close(fd)
(fd, self._destination) = tempfile.mkstemp(
suffix='.whipper.renamer.operator')
os.close(fd)
os.unlink(self._destination)
self._operator.addOperation(
renamer.RenameInFile(self._source, 'is is a', 'at was some'))
self._operator.addOperation(
renamer.RenameFile(self._source, self._destination))
def tearDown(self):
os.system('rm -rf %s' % self._statePath)
def testLoadNoneDone(self):
self._operator.save()
o = renamer.Operator(self._statePath, 'test')
o.load()
self.assertEquals(o._todo, self._operator._todo)
self.assertEquals(o._done, [])
os.unlink(self._source)
def testLoadOneDone(self):
self.assertEquals(len(self._operator._done), 0)
self._operator.save()
self._operator.next()
self.assertEquals(len(self._operator._done), 1)
o = renamer.Operator(self._statePath, 'test')
o.load()
self.assertEquals(len(o._done), 1)
self.assertEquals(o._todo, self._operator._todo)
self.assertEquals(o._done, self._operator._done)
# now continue
o.next()
self.assertEquals(len(o._done), 2)
os.unlink(self._destination)
def testLoadOneInterrupted(self):
self.assertEquals(len(self._operator._done), 0)
self._operator.save()
# cheat by doing a task without saving
self._operator._todo[0].do()
self.assertEquals(len(self._operator._done), 0)
o = renamer.Operator(self._statePath, 'test')
o.load()
self.assertEquals(len(o._done), 0)
self.assertEquals(o._todo, self._operator._todo)
self.assertEquals(o._done, self._operator._done)
# now continue, resuming
o.next()
self.assertEquals(len(o._done), 1)
o.next()
self.assertEquals(len(o._done), 2)
os.unlink(self._destination)
| gpl-3.0 |
davidwtbuxton/pycodebase | src/codebase/client.py | 1 | 24245 | import ConfigParser
import logging
import os
import notrequests
from . import utils
CODEBASE_API_URL = 'https://api3.codebasehq.com'
logger = logging.getLogger(__file__)
class BaseClient(object):
"""Codebase API client class."""
def __init__(self, (username, key)):
self.auth = (username, key)
self.base_url = CODEBASE_API_URL
if not self.base_url.endswith('/'):
self.base_url += '/'
def _api_method(self, method, path, params=None, json=None, files=None):
url = self.base_url + path
headers = {'Accept': 'application/json'}
logger.debug('%r %r params:%r', method, url, params)
response = notrequests.request(
method,
url,
auth=self.auth,
params=params,
headers=headers,
json=json,
files=files,
timeout=30,
)
try:
response.raise_for_status()
except notrequests.HTTPError:
msg = 'Response %r for %r. Content: %r'
logger.info(msg, response.status_code, url, response.content)
raise
return response
def _api_put(self, path, params=None, json=None, files=None):
return self._api_method('PUT', path, params=params, json=json, files=files)
def _api_post(self, path, params=None, json=None, files=None):
return self._api_method('POST', path, params=params, json=json, files=files)
def _api_get(self, path, params=None):
return self._api_method('GET', path, params=params)
def _api_get_generator(self, path, params=None):
"""Yields pages of results, until a request gets a 404 response."""
params = dict(params) if params else {}
params['page'] = 1
while True:
try:
response = self._api_get(path, params=params)
except notrequests.HTTPError:
break
else:
yield response
params['page'] += 1
def _get_activity(self, path, raw=True, since=None):
# This is used for both /activity and /foo/activity APIs.
params = {}
if raw:
params['raw'] = 'true'
if since:
params['since'] = utils.format_since_dt(since)
for response in self._api_get_generator(path, params=params):
data = response.json()
# /:project/activity returns an empty list, status 200 when there
# are no more events.
if not data:
break
for obj in data:
yield obj['event']
def get_users(self):
"""Get all users for this account.
:rtype: generator
"""
path = 'users'
data = self._api_get(path).json()
for obj in data:
yield obj['user']
def get_activity(self, raw=True, since=None):
"""Get all events on the account.
:param raw: show all details
:param since: exclude activity before this date
:type raw: bool
:type since: datetime.datetime
:rtype: generator
"""
path = 'activity'
return self._get_activity(path, raw=raw, since=since)
def get_projects(self):
"""Get all the projects on an account.
:rtype: generator
"""
# The API for projects is not paginated, all projects in one request.
path = 'projects'
data = self._api_get(path).json()
for obj in data:
yield obj['project']
def get_project_users(self, project):
"""Get the users assigned to a project.
:param project: permalink for a project
:type project: str
:rtype: generator
"""
path = '%s/assignments' % project
data = self._api_get(path).json()
for obj in data:
yield obj['user']
def get_project_activity(self, project, raw=True, since=None):
"""Get events for a project.
:param project: permalink for a project
:param raw: show all details
:param since: exclude activity before this date
:type project: str
:type raw: bool
:type since: datetime.datetime
:rtype: generator
"""
path = '%s/activity' % project
return self._get_activity(path, raw=raw, since=since)
def get_repositories(self, project):
"""Get the code repositories for a project.
:param project: permalink for a project
:type project: str
:rtype: generator
"""
path = '%s/repositories' % (project,)
response = self._api_get(path)
data = response.json()
for obj in data:
yield obj['repository']
def get_commits(self, project, repo, ref):
"""Get commits in a project's repository for the given reference.
:param project: permalink for a project
:param repo: permalink for a repository in the project
:param ref: branch, tag or commit reference
:type project: str
:type repo: str
:type ref: str
:rtype: generator
"""
path = '%s/%s/commits/%s' % (project, repo, ref)
for response in self._api_get_generator(path):
data = response.json()
# The API is supposed to 404 when there are no more pages, but
# /:project/:repo/commits/:ref returns an empty list, status 200.
if not data:
break
for obj in data:
yield obj['commit']
def get_deployments(self, project, repo):
"""Get the deployments recorded for a project.
:param project: permalink for a project
:param repo: permalink for a repository in the project
:type project: str
:type repo: str
:rtype: generator
"""
path = '%s/%s/deployments' % (project, repo)
for response in self._api_get_generator(path):
data = response.json()
# The API is supposed to 404 when there are no more pages, but
# /:project/:repo/deployments returns an empty list, status 200.
if not data:
break
for obj in data:
yield obj['deployment']
def create_deployment(self, project, repo, branch, revision, environment, servers):
"""Creates a new deployment.
You can create a deployment even if the named repo does not exist, but
then the deployment will not appear when listing deployments.
:param project: permalink for a project
:param repo: permalink for a repository in the project
:param branch: git branch name
:param revision: git revision ID
:param environment: a name (e.g. "live" or "staging")
:param servers: comma-separated list of server names
:type project: str
:type repo: str
:type branch: str
:type revision: str
:type evnvironment: str
:type servers: str
"""
path = '%s/%s/deployments' % (project, repo)
payload = {
'deployment': {
'branch': branch,
'revision': revision,
'environment': environment,
'servers': servers,
},
}
response = self._api_post(path, json=payload)
data = response.json()
return data
def get_milestones(self, project):
"""Get the milestones for a project.
:param project: permalink for a project
:type project: str
:rtype: generator
"""
path = '%s/milestones' % (project,)
# Seems to be unpaginated.
response = self._api_get(path)
data = response.json()
for obj in data:
yield obj['ticketing_milestone']
def create_milestone(self, project, name, deadline=None, description=None,
estimated_time=None, parent_id=None,
responsible_user_id=None, start_at=None, status=None):
"""Create a new milestone.
See the API documentation on `milestones`_ for details.
.. note:: The Codebase API allows multiple milestones to have the same
name, and this method does not check for duplicates.
.. _milestones: https://support.codebasehq.com/kb/tickets-and-milestones/milestones
:param project: permalink for a project
:param name: new milestone's name
:param deadline: the date of this milestone's deadline
:param description: a long description for the new milestone
:param estimated_time: the estimated time for the milestone
:param parent_id: the ID of this milestone's parent milestone
:param responsible_user_id: the id of the user responsible for the milestone
:param start_at: the date this milestone begins
:param status: the milestone status. One of "active", "completed" or "cancelled"
:type project: str
:type name: str
:type deadline: str|datetime.date
:type description: str
:type estimated_time: int
:type parent_id: str
:type responsible_user_id: str
:type start_at: str|datetime.date
:type status: str
:rtype: dict
"""
path = '%s/milestones' % (project,)
milestone_data = utils.build_milestone_payload(deadline=deadline,
description=description, estimated_time=estimated_time, name=name,
parent_id=parent_id, responsible_user_id=responsible_user_id,
start_at=start_at, status=status)
payload = {'ticketing_milestone': milestone_data}
response = self._api_post(path, json=payload)
data = response.json()
return data
def update_milestone(self, project, milestone_id, deadline=None,
description=None, estimated_time=None, name=None, parent_id=None,
responsible_user_id=None, start_at=None, status=None):
"""Update an existing milestone."""
path = '%s/milestones/%s' % (project, milestone_id)
milestone_data = utils.build_milestone_payload(deadline=deadline,
description=description, estimated_time=estimated_time, name=name,
parent_id=parent_id, responsible_user_id=responsible_user_id,
start_at=start_at, status=status)
payload = {'ticketing_milestone': milestone_data}
response = self._api_put(path, json=payload)
data = response.json()
return data
def get_tickets(self, project, assignee=None, status=None, category=None,
type=None, priority=None, milestone=None):
"""Get all tickets on a project, or search for tickets.
Search terms can be a string, or a list of strings.
:param project: permalink for a project
:param assignee: search for tickets assigned to a user
:param status: ticket status, e.g. "open"
:param category: ticket category, e.g. "General"
:param type: ticket type, e.g. "Bug"
:param priority: ticket priority, e.g. "High"
:param milestone: milestone, e.g. "Sprint 3"
:type project: str
:type assignee: str|list
:type status: str|list
:type category: str|list
:type type: str|list
:type priority: str|list
:type milestone: str|list
:rtype: generator
"""
path = '%s/tickets' % project
query = utils.build_ticket_search_query(
assignee=assignee,
status=status,
category=None,
type=type,
priority=priority,
milestone=milestone,
)
params = {'query': query} if query else {}
for response in self._api_get_generator(path, params=params):
data = response.json()
for obj in data:
yield obj['ticket']
def create_ticket(self, project, assignee_id=None, category_id=None,
description=None, milestone_id=None, priority_id=None,
reporter_id=None, status_id=None, summary=None, type=None,
upload_tokens=None):
"""Create a new ticket.
See the API documentation on `tickets and milestones`_ for details.
.. _tickets and milestones: https://support.codebasehq.com/kb/tickets-and-milestones
"""
path = '%s/tickets' % project
payload = {
'ticket': {
'summary': summary,
'ticket_type': type,
'reporter_id': reporter_id,
'assignee_id': assignee_id,
'category_id': category_id,
'priority_id': priority_id,
'status_id': status_id,
'milestone_id': milestone_id,
'upload_tokens': upload_tokens,
'description': description,
},
}
response = self._api_post(path, json=payload)
data = response.json()
return data
def get_ticket_notes(self, project, ticket_id):
"""Get all notes for a ticket in a project.
:param project: permalink for a project
:param ticket_id: a ticket number
:type project: str
:type ticket_id: int
:rtype: generator
"""
# The API returns all notes in a single response. Not paginated.
path = '%s/tickets/%s/notes' % (project, ticket_id)
data = self._api_get(path).json()
for obj in data:
yield obj['ticket_note']
def create_ticket_note(self, project, ticket_id, assignee_id=None,
category_id=None, content=None, milestone_id=None, priority_id=None,
private=None, status_id=None, summary=None, time_added=None,
upload_tokens=None):
"""Create a new note on a ticket in a project.
See the API documentation on `updating tickets`_ for details.
.. _updating tickets: https://support.codebasehq.com/kb/tickets-and-milestones/updating-tickets
"""
# You can change a ticket's properties by creating a note.
path = '%s/tickets/%s/notes' % (project, ticket_id)
note_data = utils.build_create_note_payload(
assignee_id=assignee_id,
category_id=category_id,
content=content,
milestone_id=milestone_id,
priority_id=priority_id,
private=private,
status_id=status_id,
summary=summary,
time_added=time_added,
upload_tokens=upload_tokens,
)
payload = {'ticket_note': note_data}
data = self._api_post(path, json=payload).json()
return data
def upload_files(self, files):
"""Upload files.
Each file in the list can be one of:
* A file-like object open for reading.
* A pair of (filename, file-like object).
* A pair of (filename, byte-string).
Returns a generator of upload info dictionaries. The 'identifier' key
for an uploaded file can be used in the `upload_tokens` argument when
creating a ticket or note.
:param files: list of files to upload
:type files: list
:rtype: generator
"""
# https://support.codebasehq.com/kb/uploading-files
path = 'uploads'
field_name = 'files[]'
files_data = [(field_name, obj) for obj in files]
response = self._api_post(path, files=files_data)
data = response.json()
for obj in data:
yield obj['upload']
def get_ticket_statuses(self, project):
"""Get all status choices in a project.
:param project: permalink for a project
:type project: str
:rtype: generator
"""
path = '%s/tickets/statuses' % project
data = self._api_get(path).json()
for obj in data:
yield obj['ticketing_status']
def get_ticket_categories(self, project):
"""Get all ticket category choices in a project.
:param project: permalink for a project
:type project: str
:rtype: generator
"""
path = '%s/tickets/categories' % project
data = self._api_get(path).json()
for obj in data:
yield obj['ticketing_category']
def get_ticket_types(self, project):
"""Get all ticket types in a project.
:param project: permalink for a project
:type project: str
:rtype: generator
"""
path = '%s/tickets/types' % project
data = self._api_get(path).json()
for obj in data:
yield obj['ticketing_type']
def get_ticket_priorities(self, project):
"""Get all ticket priorities in a project.
:param project: permalink for a project
:type project: str
:rtype: generator
"""
path = '%s/tickets/priorities' % project
data = self._api_get(path).json()
for obj in data:
yield obj['ticketing_priority']
def get_file_contents(self, project, repo, ref, file_path):
"""Get a file's content.
:param project: permalink for a project
:param repo: permalink for a repository in the project
:param ref: branch, tag or commit reference
:param file_path: path of the file
:type project: str
:type repo: str
:type ref: str
:type file_path: str
:rtype: string
"""
path = '%s/%s/blob/%s/%s' % (project, repo, ref, file_path)
response = self._api_get(path)
return response.content
def _my_username(self):
username, _ = self.auth
# Convert 'example/alice' to 'alice'.
_, _, username = username.rpartition('/')
return username
def get_user_keys(self, username):
"""Get public SSH keys for a user.
The username should be the sort version of a Codebase username. If your
API username is "example/alice" then the username is "alice".
:param username: the user's Codebase username
:type username: str
"""
path = 'users/%s/public_keys' % username
data = self._api_get(path).json()
for obj in data:
yield obj['public_key_join']
def get_my_keys(self):
"""Get the public SSH keys for the current authenticated user."""
username = self._my_username()
return self.get_user_keys(username)
def add_user_key(self, username, description, key):
"""Add a new SSH key for a user.
See the documentation for `public keys`_ for details of the key format.
See :py:meth:`~Client.get_user_keys` for the username format.
.. _public keys: https://support.codebasehq.com/kb/public-keys
:param username: the user's Codebase username
:param description: a short description for the key
:param key: the text of the public SSH key
:type username: str
:type description: str
:type key: str
"""
path = 'users/%s/public_keys' % username
payload = {
'public_key': {
'description': description,
'key': key,
},
}
data = self._api_post(path, json=payload).json()
return data
def add_my_key(self, description, key):
"""Add a new SSH key for the current authenticated user."""
username = self._my_username()
return self.add_user_key(username, description, key)
@classmethod
def with_secrets(cls, filename):
"""Create a new instance of Client.
The API username / key are read from a file. A filename like '~/.secrets'
is expanded to with a home directory.
The file must be in INI format, with a section named "api" and
properties for "username" and "key" within the section.
::
[api]
username = example/alice
key = topsecret
:param filename: path to INI file
:type filename: str
"""
return new_client_with_secrets_from_filename(cls, filename)
class Client(BaseClient):
"""Codebase API client class that allows some ticket properties to be
referenced by name instead of the object ID.
"""
def __init__(self, *args, **kwargs):
super(Client, self).__init__(*args, **kwargs)
self.reset_cache()
def reset_cache(self):
self._cache = {}
def use_cache(self, project, kwargs):
"""For some ticket classifiers (what Codebase calls organisational
objects, such as status, category, etc.), return a new dict with the
classifier's name replaced by the classifier's ID.
This works by making an API call to get ticket statuses, etc. and
caching the results for quick lookup. The cache can be cleared with
Client.reset_cache().
"""
# We take an argument like 'category', lookup a value and reassign it
# to 'category_id'.
classifiers = {
'assignee': self.cache_assignee,
'category': self.cache_category,
'milestone': self.cache_milestone,
'priority': self.cache_priority,
'status': self.cache_status,
}
# Make a copy of the original, don't mutate the dict we were given.
kwargs = dict(kwargs)
# Could use a defaultdict or something. Anyway, setup project's cache.
if project not in self._cache:
self._cache[project] = {}
for name in classifiers:
if name in kwargs:
# Check if we have cached the results for this classifier. If
# not, then call a method like 'Client.cache_category()'
# and assign the result to the cache. N.B. The cache is
# per-project.
if name not in self._cache[project]:
self._cache[project][name] = classifiers[name](project)
# OK. We've cached this classifier's stuff.
value = kwargs[name]
value = self._cache[project][name].get(value, value)
# Now set the '_id' version and unset the original argument.
kwargs[name + '_id'] = value
del kwargs[name]
return kwargs
def cache_assignee(self, project):
result = {}
users = self.get_project_users(project)
# We allow an assignee to be specified by the username or any of their
# email addresses.
for user in users:
user_id = user['id']
result[user['username']] = user_id
for email in user['email_addresses']:
result[email] = user_id
return result
def cache_category(self, project):
return {obj['name']: obj['id'] for obj in self.get_ticket_categories(project)}
def cache_milestone(self, project):
return {obj['name']: obj['id'] for obj in self.get_milestones(project)}
def cache_priority(self, project):
return {obj['name']: obj['id'] for obj in self.get_ticket_priorities(project)}
def cache_status(self, project):
return {obj['name']: obj['id'] for obj in self.get_ticket_statuses(project)}
def create_ticket(self, project, **kwargs):
kwargs = self.use_cache(project, kwargs)
return super(Client, self).create_ticket(project, **kwargs)
def create_ticket_note(self, project, ticket_id, **kwargs):
kwargs = self.use_cache(project, kwargs)
return super(Client, self).create_ticket_note(project, ticket_id, **kwargs)
def new_client_with_secrets_from_filename(cls, filename):
"""Returns a new instance of codebase.Client. The username / key are read
from the filename which must be in INI format. A filename like '~/.secrets'
is expanded to the current user's home directory.
"""
config = ConfigParser.SafeConfigParser()
filename = os.path.expanduser(filename)
with open(filename) as fh:
config.readfp(fh)
username = config.get('api', 'username')
key = config.get('api', 'key')
return cls((username, key))
| mit |
pgmillon/ansible | lib/ansible/modules/net_tools/nios/nios_naptr_record.py | 68 | 5884 | #!/usr/bin/python
# Copyright (c) 2018 Red Hat, Inc.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'certified'}
DOCUMENTATION = '''
---
module: nios_naptr_record
version_added: "2.7"
author: "Blair Rampling (@brampling)"
short_description: Configure Infoblox NIOS NAPTR records
description:
- Adds and/or removes instances of NAPTR record objects from
Infoblox NIOS servers. This module manages NIOS C(record:naptr) objects
using the Infoblox WAPI interface over REST.
requirements:
- infoblox_client
extends_documentation_fragment: nios
options:
name:
description:
- Specifies the fully qualified hostname to add or remove from
the system
required: true
view:
description:
- Sets the DNS view to associate this a record with. The DNS
view must already be configured on the system
required: true
default: default
aliases:
- dns_view
order:
description:
- Configures the order (0-65535) for this NAPTR record. This parameter
specifies the order in which the NAPTR rules are applied when
multiple rules are present.
required: true
preference:
description:
- Configures the preference (0-65535) for this NAPTR record. The
preference field determines the order NAPTR records are processed
when multiple records with the same order parameter are present.
required: true
replacement:
description:
- Configures the replacement field for this NAPTR record.
For nonterminal NAPTR records, this field specifies the
next domain name to look up.
required: true
services:
description:
- Configures the services field (128 characters maximum) for this
NAPTR record. The services field contains protocol and service
identifiers, such as "http+E2U" or "SIPS+D2T".
required: false
flags:
description:
- Configures the flags field for this NAPTR record. These control the
interpretation of the fields for an NAPTR record object. Supported
values for the flags field are "U", "S", "P" and "A".
required: false
regexp:
description:
- Configures the regexp field for this NAPTR record. This is the
regular expression-based rewriting rule of the NAPTR record. This
should be a POSIX compliant regular expression, including the
substitution rule and flags. Refer to RFC 2915 for the field syntax
details.
required: false
ttl:
description:
- Configures the TTL to be associated with this NAPTR record
extattrs:
description:
- Allows for the configuration of Extensible Attributes on the
instance of the object. This argument accepts a set of key / value
pairs for configuration.
comment:
description:
- Configures a text string comment to be associated with the instance
of this object. The provided text string will be configured on the
object instance.
state:
description:
- Configures the intended state of the instance of the object on
the NIOS server. When this value is set to C(present), the object
is configured on the device and when this value is set to C(absent)
the value is removed (if necessary) from the device.
default: present
choices:
- present
- absent
'''
EXAMPLES = '''
- name: configure a NAPTR record
nios_naptr_record:
name: '*.subscriber-100.ansiblezone.com'
order: 1000
preference: 10
replacement: replacement1.network.ansiblezone.com
state: present
provider:
host: "{{ inventory_hostname_short }}"
username: admin
password: admin
connection: local
- name: add a comment to an existing NAPTR record
nios_naptr_record:
name: '*.subscriber-100.ansiblezone.com'
order: 1000
preference: 10
replacement: replacement1.network.ansiblezone.com
comment: this is a test comment
state: present
provider:
host: "{{ inventory_hostname_short }}"
username: admin
password: admin
connection: local
- name: remove a NAPTR record from the system
nios_naptr_record:
name: '*.subscriber-100.ansiblezone.com'
order: 1000
preference: 10
replacement: replacement1.network.ansiblezone.com
state: absent
provider:
host: "{{ inventory_hostname_short }}"
username: admin
password: admin
connection: local
'''
RETURN = ''' # '''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.six import iteritems
from ansible.module_utils.net_tools.nios.api import WapiModule
def main():
''' Main entry point for module execution
'''
ib_spec = dict(
name=dict(required=True, ib_req=True),
view=dict(default='default', aliases=['dns_view'], ib_req=True),
order=dict(type='int', ib_req=True),
preference=dict(type='int', ib_req=True),
replacement=dict(ib_req=True),
services=dict(),
flags=dict(),
regexp=dict(),
ttl=dict(type='int'),
extattrs=dict(type='dict'),
comment=dict(),
)
argument_spec = dict(
provider=dict(required=True),
state=dict(default='present', choices=['present', 'absent'])
)
argument_spec.update(ib_spec)
argument_spec.update(WapiModule.provider_spec)
module = AnsibleModule(argument_spec=argument_spec,
supports_check_mode=True)
wapi = WapiModule(module)
result = wapi.run('record:naptr', ib_spec)
module.exit_json(**result)
if __name__ == '__main__':
main()
| gpl-3.0 |
an7oine/WinVHS | Cygwin/lib/python2.7/encodings/mac_arabic.py | 593 | 36723 | """ Python Character Mapping Codec generated from 'VENDORS/APPLE/ARABIC.TXT' with gencodec.py.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_map)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_table)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input,self.errors,encoding_map)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input,self.errors,decoding_table)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='mac-arabic',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
### Decoding Map
decoding_map = codecs.make_identity_dict(range(256))
decoding_map.update({
0x0080: 0x00c4, # LATIN CAPITAL LETTER A WITH DIAERESIS
0x0081: 0x00a0, # NO-BREAK SPACE, right-left
0x0082: 0x00c7, # LATIN CAPITAL LETTER C WITH CEDILLA
0x0083: 0x00c9, # LATIN CAPITAL LETTER E WITH ACUTE
0x0084: 0x00d1, # LATIN CAPITAL LETTER N WITH TILDE
0x0085: 0x00d6, # LATIN CAPITAL LETTER O WITH DIAERESIS
0x0086: 0x00dc, # LATIN CAPITAL LETTER U WITH DIAERESIS
0x0087: 0x00e1, # LATIN SMALL LETTER A WITH ACUTE
0x0088: 0x00e0, # LATIN SMALL LETTER A WITH GRAVE
0x0089: 0x00e2, # LATIN SMALL LETTER A WITH CIRCUMFLEX
0x008a: 0x00e4, # LATIN SMALL LETTER A WITH DIAERESIS
0x008b: 0x06ba, # ARABIC LETTER NOON GHUNNA
0x008c: 0x00ab, # LEFT-POINTING DOUBLE ANGLE QUOTATION MARK, right-left
0x008d: 0x00e7, # LATIN SMALL LETTER C WITH CEDILLA
0x008e: 0x00e9, # LATIN SMALL LETTER E WITH ACUTE
0x008f: 0x00e8, # LATIN SMALL LETTER E WITH GRAVE
0x0090: 0x00ea, # LATIN SMALL LETTER E WITH CIRCUMFLEX
0x0091: 0x00eb, # LATIN SMALL LETTER E WITH DIAERESIS
0x0092: 0x00ed, # LATIN SMALL LETTER I WITH ACUTE
0x0093: 0x2026, # HORIZONTAL ELLIPSIS, right-left
0x0094: 0x00ee, # LATIN SMALL LETTER I WITH CIRCUMFLEX
0x0095: 0x00ef, # LATIN SMALL LETTER I WITH DIAERESIS
0x0096: 0x00f1, # LATIN SMALL LETTER N WITH TILDE
0x0097: 0x00f3, # LATIN SMALL LETTER O WITH ACUTE
0x0098: 0x00bb, # RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK, right-left
0x0099: 0x00f4, # LATIN SMALL LETTER O WITH CIRCUMFLEX
0x009a: 0x00f6, # LATIN SMALL LETTER O WITH DIAERESIS
0x009b: 0x00f7, # DIVISION SIGN, right-left
0x009c: 0x00fa, # LATIN SMALL LETTER U WITH ACUTE
0x009d: 0x00f9, # LATIN SMALL LETTER U WITH GRAVE
0x009e: 0x00fb, # LATIN SMALL LETTER U WITH CIRCUMFLEX
0x009f: 0x00fc, # LATIN SMALL LETTER U WITH DIAERESIS
0x00a0: 0x0020, # SPACE, right-left
0x00a1: 0x0021, # EXCLAMATION MARK, right-left
0x00a2: 0x0022, # QUOTATION MARK, right-left
0x00a3: 0x0023, # NUMBER SIGN, right-left
0x00a4: 0x0024, # DOLLAR SIGN, right-left
0x00a5: 0x066a, # ARABIC PERCENT SIGN
0x00a6: 0x0026, # AMPERSAND, right-left
0x00a7: 0x0027, # APOSTROPHE, right-left
0x00a8: 0x0028, # LEFT PARENTHESIS, right-left
0x00a9: 0x0029, # RIGHT PARENTHESIS, right-left
0x00aa: 0x002a, # ASTERISK, right-left
0x00ab: 0x002b, # PLUS SIGN, right-left
0x00ac: 0x060c, # ARABIC COMMA
0x00ad: 0x002d, # HYPHEN-MINUS, right-left
0x00ae: 0x002e, # FULL STOP, right-left
0x00af: 0x002f, # SOLIDUS, right-left
0x00b0: 0x0660, # ARABIC-INDIC DIGIT ZERO, right-left (need override)
0x00b1: 0x0661, # ARABIC-INDIC DIGIT ONE, right-left (need override)
0x00b2: 0x0662, # ARABIC-INDIC DIGIT TWO, right-left (need override)
0x00b3: 0x0663, # ARABIC-INDIC DIGIT THREE, right-left (need override)
0x00b4: 0x0664, # ARABIC-INDIC DIGIT FOUR, right-left (need override)
0x00b5: 0x0665, # ARABIC-INDIC DIGIT FIVE, right-left (need override)
0x00b6: 0x0666, # ARABIC-INDIC DIGIT SIX, right-left (need override)
0x00b7: 0x0667, # ARABIC-INDIC DIGIT SEVEN, right-left (need override)
0x00b8: 0x0668, # ARABIC-INDIC DIGIT EIGHT, right-left (need override)
0x00b9: 0x0669, # ARABIC-INDIC DIGIT NINE, right-left (need override)
0x00ba: 0x003a, # COLON, right-left
0x00bb: 0x061b, # ARABIC SEMICOLON
0x00bc: 0x003c, # LESS-THAN SIGN, right-left
0x00bd: 0x003d, # EQUALS SIGN, right-left
0x00be: 0x003e, # GREATER-THAN SIGN, right-left
0x00bf: 0x061f, # ARABIC QUESTION MARK
0x00c0: 0x274a, # EIGHT TEARDROP-SPOKED PROPELLER ASTERISK, right-left
0x00c1: 0x0621, # ARABIC LETTER HAMZA
0x00c2: 0x0622, # ARABIC LETTER ALEF WITH MADDA ABOVE
0x00c3: 0x0623, # ARABIC LETTER ALEF WITH HAMZA ABOVE
0x00c4: 0x0624, # ARABIC LETTER WAW WITH HAMZA ABOVE
0x00c5: 0x0625, # ARABIC LETTER ALEF WITH HAMZA BELOW
0x00c6: 0x0626, # ARABIC LETTER YEH WITH HAMZA ABOVE
0x00c7: 0x0627, # ARABIC LETTER ALEF
0x00c8: 0x0628, # ARABIC LETTER BEH
0x00c9: 0x0629, # ARABIC LETTER TEH MARBUTA
0x00ca: 0x062a, # ARABIC LETTER TEH
0x00cb: 0x062b, # ARABIC LETTER THEH
0x00cc: 0x062c, # ARABIC LETTER JEEM
0x00cd: 0x062d, # ARABIC LETTER HAH
0x00ce: 0x062e, # ARABIC LETTER KHAH
0x00cf: 0x062f, # ARABIC LETTER DAL
0x00d0: 0x0630, # ARABIC LETTER THAL
0x00d1: 0x0631, # ARABIC LETTER REH
0x00d2: 0x0632, # ARABIC LETTER ZAIN
0x00d3: 0x0633, # ARABIC LETTER SEEN
0x00d4: 0x0634, # ARABIC LETTER SHEEN
0x00d5: 0x0635, # ARABIC LETTER SAD
0x00d6: 0x0636, # ARABIC LETTER DAD
0x00d7: 0x0637, # ARABIC LETTER TAH
0x00d8: 0x0638, # ARABIC LETTER ZAH
0x00d9: 0x0639, # ARABIC LETTER AIN
0x00da: 0x063a, # ARABIC LETTER GHAIN
0x00db: 0x005b, # LEFT SQUARE BRACKET, right-left
0x00dc: 0x005c, # REVERSE SOLIDUS, right-left
0x00dd: 0x005d, # RIGHT SQUARE BRACKET, right-left
0x00de: 0x005e, # CIRCUMFLEX ACCENT, right-left
0x00df: 0x005f, # LOW LINE, right-left
0x00e0: 0x0640, # ARABIC TATWEEL
0x00e1: 0x0641, # ARABIC LETTER FEH
0x00e2: 0x0642, # ARABIC LETTER QAF
0x00e3: 0x0643, # ARABIC LETTER KAF
0x00e4: 0x0644, # ARABIC LETTER LAM
0x00e5: 0x0645, # ARABIC LETTER MEEM
0x00e6: 0x0646, # ARABIC LETTER NOON
0x00e7: 0x0647, # ARABIC LETTER HEH
0x00e8: 0x0648, # ARABIC LETTER WAW
0x00e9: 0x0649, # ARABIC LETTER ALEF MAKSURA
0x00ea: 0x064a, # ARABIC LETTER YEH
0x00eb: 0x064b, # ARABIC FATHATAN
0x00ec: 0x064c, # ARABIC DAMMATAN
0x00ed: 0x064d, # ARABIC KASRATAN
0x00ee: 0x064e, # ARABIC FATHA
0x00ef: 0x064f, # ARABIC DAMMA
0x00f0: 0x0650, # ARABIC KASRA
0x00f1: 0x0651, # ARABIC SHADDA
0x00f2: 0x0652, # ARABIC SUKUN
0x00f3: 0x067e, # ARABIC LETTER PEH
0x00f4: 0x0679, # ARABIC LETTER TTEH
0x00f5: 0x0686, # ARABIC LETTER TCHEH
0x00f6: 0x06d5, # ARABIC LETTER AE
0x00f7: 0x06a4, # ARABIC LETTER VEH
0x00f8: 0x06af, # ARABIC LETTER GAF
0x00f9: 0x0688, # ARABIC LETTER DDAL
0x00fa: 0x0691, # ARABIC LETTER RREH
0x00fb: 0x007b, # LEFT CURLY BRACKET, right-left
0x00fc: 0x007c, # VERTICAL LINE, right-left
0x00fd: 0x007d, # RIGHT CURLY BRACKET, right-left
0x00fe: 0x0698, # ARABIC LETTER JEH
0x00ff: 0x06d2, # ARABIC LETTER YEH BARREE
})
### Decoding Table
decoding_table = (
u'\x00' # 0x0000 -> CONTROL CHARACTER
u'\x01' # 0x0001 -> CONTROL CHARACTER
u'\x02' # 0x0002 -> CONTROL CHARACTER
u'\x03' # 0x0003 -> CONTROL CHARACTER
u'\x04' # 0x0004 -> CONTROL CHARACTER
u'\x05' # 0x0005 -> CONTROL CHARACTER
u'\x06' # 0x0006 -> CONTROL CHARACTER
u'\x07' # 0x0007 -> CONTROL CHARACTER
u'\x08' # 0x0008 -> CONTROL CHARACTER
u'\t' # 0x0009 -> CONTROL CHARACTER
u'\n' # 0x000a -> CONTROL CHARACTER
u'\x0b' # 0x000b -> CONTROL CHARACTER
u'\x0c' # 0x000c -> CONTROL CHARACTER
u'\r' # 0x000d -> CONTROL CHARACTER
u'\x0e' # 0x000e -> CONTROL CHARACTER
u'\x0f' # 0x000f -> CONTROL CHARACTER
u'\x10' # 0x0010 -> CONTROL CHARACTER
u'\x11' # 0x0011 -> CONTROL CHARACTER
u'\x12' # 0x0012 -> CONTROL CHARACTER
u'\x13' # 0x0013 -> CONTROL CHARACTER
u'\x14' # 0x0014 -> CONTROL CHARACTER
u'\x15' # 0x0015 -> CONTROL CHARACTER
u'\x16' # 0x0016 -> CONTROL CHARACTER
u'\x17' # 0x0017 -> CONTROL CHARACTER
u'\x18' # 0x0018 -> CONTROL CHARACTER
u'\x19' # 0x0019 -> CONTROL CHARACTER
u'\x1a' # 0x001a -> CONTROL CHARACTER
u'\x1b' # 0x001b -> CONTROL CHARACTER
u'\x1c' # 0x001c -> CONTROL CHARACTER
u'\x1d' # 0x001d -> CONTROL CHARACTER
u'\x1e' # 0x001e -> CONTROL CHARACTER
u'\x1f' # 0x001f -> CONTROL CHARACTER
u' ' # 0x0020 -> SPACE, left-right
u'!' # 0x0021 -> EXCLAMATION MARK, left-right
u'"' # 0x0022 -> QUOTATION MARK, left-right
u'#' # 0x0023 -> NUMBER SIGN, left-right
u'$' # 0x0024 -> DOLLAR SIGN, left-right
u'%' # 0x0025 -> PERCENT SIGN, left-right
u'&' # 0x0026 -> AMPERSAND, left-right
u"'" # 0x0027 -> APOSTROPHE, left-right
u'(' # 0x0028 -> LEFT PARENTHESIS, left-right
u')' # 0x0029 -> RIGHT PARENTHESIS, left-right
u'*' # 0x002a -> ASTERISK, left-right
u'+' # 0x002b -> PLUS SIGN, left-right
u',' # 0x002c -> COMMA, left-right; in Arabic-script context, displayed as 0x066C ARABIC THOUSANDS SEPARATOR
u'-' # 0x002d -> HYPHEN-MINUS, left-right
u'.' # 0x002e -> FULL STOP, left-right; in Arabic-script context, displayed as 0x066B ARABIC DECIMAL SEPARATOR
u'/' # 0x002f -> SOLIDUS, left-right
u'0' # 0x0030 -> DIGIT ZERO; in Arabic-script context, displayed as 0x0660 ARABIC-INDIC DIGIT ZERO
u'1' # 0x0031 -> DIGIT ONE; in Arabic-script context, displayed as 0x0661 ARABIC-INDIC DIGIT ONE
u'2' # 0x0032 -> DIGIT TWO; in Arabic-script context, displayed as 0x0662 ARABIC-INDIC DIGIT TWO
u'3' # 0x0033 -> DIGIT THREE; in Arabic-script context, displayed as 0x0663 ARABIC-INDIC DIGIT THREE
u'4' # 0x0034 -> DIGIT FOUR; in Arabic-script context, displayed as 0x0664 ARABIC-INDIC DIGIT FOUR
u'5' # 0x0035 -> DIGIT FIVE; in Arabic-script context, displayed as 0x0665 ARABIC-INDIC DIGIT FIVE
u'6' # 0x0036 -> DIGIT SIX; in Arabic-script context, displayed as 0x0666 ARABIC-INDIC DIGIT SIX
u'7' # 0x0037 -> DIGIT SEVEN; in Arabic-script context, displayed as 0x0667 ARABIC-INDIC DIGIT SEVEN
u'8' # 0x0038 -> DIGIT EIGHT; in Arabic-script context, displayed as 0x0668 ARABIC-INDIC DIGIT EIGHT
u'9' # 0x0039 -> DIGIT NINE; in Arabic-script context, displayed as 0x0669 ARABIC-INDIC DIGIT NINE
u':' # 0x003a -> COLON, left-right
u';' # 0x003b -> SEMICOLON, left-right
u'<' # 0x003c -> LESS-THAN SIGN, left-right
u'=' # 0x003d -> EQUALS SIGN, left-right
u'>' # 0x003e -> GREATER-THAN SIGN, left-right
u'?' # 0x003f -> QUESTION MARK, left-right
u'@' # 0x0040 -> COMMERCIAL AT
u'A' # 0x0041 -> LATIN CAPITAL LETTER A
u'B' # 0x0042 -> LATIN CAPITAL LETTER B
u'C' # 0x0043 -> LATIN CAPITAL LETTER C
u'D' # 0x0044 -> LATIN CAPITAL LETTER D
u'E' # 0x0045 -> LATIN CAPITAL LETTER E
u'F' # 0x0046 -> LATIN CAPITAL LETTER F
u'G' # 0x0047 -> LATIN CAPITAL LETTER G
u'H' # 0x0048 -> LATIN CAPITAL LETTER H
u'I' # 0x0049 -> LATIN CAPITAL LETTER I
u'J' # 0x004a -> LATIN CAPITAL LETTER J
u'K' # 0x004b -> LATIN CAPITAL LETTER K
u'L' # 0x004c -> LATIN CAPITAL LETTER L
u'M' # 0x004d -> LATIN CAPITAL LETTER M
u'N' # 0x004e -> LATIN CAPITAL LETTER N
u'O' # 0x004f -> LATIN CAPITAL LETTER O
u'P' # 0x0050 -> LATIN CAPITAL LETTER P
u'Q' # 0x0051 -> LATIN CAPITAL LETTER Q
u'R' # 0x0052 -> LATIN CAPITAL LETTER R
u'S' # 0x0053 -> LATIN CAPITAL LETTER S
u'T' # 0x0054 -> LATIN CAPITAL LETTER T
u'U' # 0x0055 -> LATIN CAPITAL LETTER U
u'V' # 0x0056 -> LATIN CAPITAL LETTER V
u'W' # 0x0057 -> LATIN CAPITAL LETTER W
u'X' # 0x0058 -> LATIN CAPITAL LETTER X
u'Y' # 0x0059 -> LATIN CAPITAL LETTER Y
u'Z' # 0x005a -> LATIN CAPITAL LETTER Z
u'[' # 0x005b -> LEFT SQUARE BRACKET, left-right
u'\\' # 0x005c -> REVERSE SOLIDUS, left-right
u']' # 0x005d -> RIGHT SQUARE BRACKET, left-right
u'^' # 0x005e -> CIRCUMFLEX ACCENT, left-right
u'_' # 0x005f -> LOW LINE, left-right
u'`' # 0x0060 -> GRAVE ACCENT
u'a' # 0x0061 -> LATIN SMALL LETTER A
u'b' # 0x0062 -> LATIN SMALL LETTER B
u'c' # 0x0063 -> LATIN SMALL LETTER C
u'd' # 0x0064 -> LATIN SMALL LETTER D
u'e' # 0x0065 -> LATIN SMALL LETTER E
u'f' # 0x0066 -> LATIN SMALL LETTER F
u'g' # 0x0067 -> LATIN SMALL LETTER G
u'h' # 0x0068 -> LATIN SMALL LETTER H
u'i' # 0x0069 -> LATIN SMALL LETTER I
u'j' # 0x006a -> LATIN SMALL LETTER J
u'k' # 0x006b -> LATIN SMALL LETTER K
u'l' # 0x006c -> LATIN SMALL LETTER L
u'm' # 0x006d -> LATIN SMALL LETTER M
u'n' # 0x006e -> LATIN SMALL LETTER N
u'o' # 0x006f -> LATIN SMALL LETTER O
u'p' # 0x0070 -> LATIN SMALL LETTER P
u'q' # 0x0071 -> LATIN SMALL LETTER Q
u'r' # 0x0072 -> LATIN SMALL LETTER R
u's' # 0x0073 -> LATIN SMALL LETTER S
u't' # 0x0074 -> LATIN SMALL LETTER T
u'u' # 0x0075 -> LATIN SMALL LETTER U
u'v' # 0x0076 -> LATIN SMALL LETTER V
u'w' # 0x0077 -> LATIN SMALL LETTER W
u'x' # 0x0078 -> LATIN SMALL LETTER X
u'y' # 0x0079 -> LATIN SMALL LETTER Y
u'z' # 0x007a -> LATIN SMALL LETTER Z
u'{' # 0x007b -> LEFT CURLY BRACKET, left-right
u'|' # 0x007c -> VERTICAL LINE, left-right
u'}' # 0x007d -> RIGHT CURLY BRACKET, left-right
u'~' # 0x007e -> TILDE
u'\x7f' # 0x007f -> CONTROL CHARACTER
u'\xc4' # 0x0080 -> LATIN CAPITAL LETTER A WITH DIAERESIS
u'\xa0' # 0x0081 -> NO-BREAK SPACE, right-left
u'\xc7' # 0x0082 -> LATIN CAPITAL LETTER C WITH CEDILLA
u'\xc9' # 0x0083 -> LATIN CAPITAL LETTER E WITH ACUTE
u'\xd1' # 0x0084 -> LATIN CAPITAL LETTER N WITH TILDE
u'\xd6' # 0x0085 -> LATIN CAPITAL LETTER O WITH DIAERESIS
u'\xdc' # 0x0086 -> LATIN CAPITAL LETTER U WITH DIAERESIS
u'\xe1' # 0x0087 -> LATIN SMALL LETTER A WITH ACUTE
u'\xe0' # 0x0088 -> LATIN SMALL LETTER A WITH GRAVE
u'\xe2' # 0x0089 -> LATIN SMALL LETTER A WITH CIRCUMFLEX
u'\xe4' # 0x008a -> LATIN SMALL LETTER A WITH DIAERESIS
u'\u06ba' # 0x008b -> ARABIC LETTER NOON GHUNNA
u'\xab' # 0x008c -> LEFT-POINTING DOUBLE ANGLE QUOTATION MARK, right-left
u'\xe7' # 0x008d -> LATIN SMALL LETTER C WITH CEDILLA
u'\xe9' # 0x008e -> LATIN SMALL LETTER E WITH ACUTE
u'\xe8' # 0x008f -> LATIN SMALL LETTER E WITH GRAVE
u'\xea' # 0x0090 -> LATIN SMALL LETTER E WITH CIRCUMFLEX
u'\xeb' # 0x0091 -> LATIN SMALL LETTER E WITH DIAERESIS
u'\xed' # 0x0092 -> LATIN SMALL LETTER I WITH ACUTE
u'\u2026' # 0x0093 -> HORIZONTAL ELLIPSIS, right-left
u'\xee' # 0x0094 -> LATIN SMALL LETTER I WITH CIRCUMFLEX
u'\xef' # 0x0095 -> LATIN SMALL LETTER I WITH DIAERESIS
u'\xf1' # 0x0096 -> LATIN SMALL LETTER N WITH TILDE
u'\xf3' # 0x0097 -> LATIN SMALL LETTER O WITH ACUTE
u'\xbb' # 0x0098 -> RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK, right-left
u'\xf4' # 0x0099 -> LATIN SMALL LETTER O WITH CIRCUMFLEX
u'\xf6' # 0x009a -> LATIN SMALL LETTER O WITH DIAERESIS
u'\xf7' # 0x009b -> DIVISION SIGN, right-left
u'\xfa' # 0x009c -> LATIN SMALL LETTER U WITH ACUTE
u'\xf9' # 0x009d -> LATIN SMALL LETTER U WITH GRAVE
u'\xfb' # 0x009e -> LATIN SMALL LETTER U WITH CIRCUMFLEX
u'\xfc' # 0x009f -> LATIN SMALL LETTER U WITH DIAERESIS
u' ' # 0x00a0 -> SPACE, right-left
u'!' # 0x00a1 -> EXCLAMATION MARK, right-left
u'"' # 0x00a2 -> QUOTATION MARK, right-left
u'#' # 0x00a3 -> NUMBER SIGN, right-left
u'$' # 0x00a4 -> DOLLAR SIGN, right-left
u'\u066a' # 0x00a5 -> ARABIC PERCENT SIGN
u'&' # 0x00a6 -> AMPERSAND, right-left
u"'" # 0x00a7 -> APOSTROPHE, right-left
u'(' # 0x00a8 -> LEFT PARENTHESIS, right-left
u')' # 0x00a9 -> RIGHT PARENTHESIS, right-left
u'*' # 0x00aa -> ASTERISK, right-left
u'+' # 0x00ab -> PLUS SIGN, right-left
u'\u060c' # 0x00ac -> ARABIC COMMA
u'-' # 0x00ad -> HYPHEN-MINUS, right-left
u'.' # 0x00ae -> FULL STOP, right-left
u'/' # 0x00af -> SOLIDUS, right-left
u'\u0660' # 0x00b0 -> ARABIC-INDIC DIGIT ZERO, right-left (need override)
u'\u0661' # 0x00b1 -> ARABIC-INDIC DIGIT ONE, right-left (need override)
u'\u0662' # 0x00b2 -> ARABIC-INDIC DIGIT TWO, right-left (need override)
u'\u0663' # 0x00b3 -> ARABIC-INDIC DIGIT THREE, right-left (need override)
u'\u0664' # 0x00b4 -> ARABIC-INDIC DIGIT FOUR, right-left (need override)
u'\u0665' # 0x00b5 -> ARABIC-INDIC DIGIT FIVE, right-left (need override)
u'\u0666' # 0x00b6 -> ARABIC-INDIC DIGIT SIX, right-left (need override)
u'\u0667' # 0x00b7 -> ARABIC-INDIC DIGIT SEVEN, right-left (need override)
u'\u0668' # 0x00b8 -> ARABIC-INDIC DIGIT EIGHT, right-left (need override)
u'\u0669' # 0x00b9 -> ARABIC-INDIC DIGIT NINE, right-left (need override)
u':' # 0x00ba -> COLON, right-left
u'\u061b' # 0x00bb -> ARABIC SEMICOLON
u'<' # 0x00bc -> LESS-THAN SIGN, right-left
u'=' # 0x00bd -> EQUALS SIGN, right-left
u'>' # 0x00be -> GREATER-THAN SIGN, right-left
u'\u061f' # 0x00bf -> ARABIC QUESTION MARK
u'\u274a' # 0x00c0 -> EIGHT TEARDROP-SPOKED PROPELLER ASTERISK, right-left
u'\u0621' # 0x00c1 -> ARABIC LETTER HAMZA
u'\u0622' # 0x00c2 -> ARABIC LETTER ALEF WITH MADDA ABOVE
u'\u0623' # 0x00c3 -> ARABIC LETTER ALEF WITH HAMZA ABOVE
u'\u0624' # 0x00c4 -> ARABIC LETTER WAW WITH HAMZA ABOVE
u'\u0625' # 0x00c5 -> ARABIC LETTER ALEF WITH HAMZA BELOW
u'\u0626' # 0x00c6 -> ARABIC LETTER YEH WITH HAMZA ABOVE
u'\u0627' # 0x00c7 -> ARABIC LETTER ALEF
u'\u0628' # 0x00c8 -> ARABIC LETTER BEH
u'\u0629' # 0x00c9 -> ARABIC LETTER TEH MARBUTA
u'\u062a' # 0x00ca -> ARABIC LETTER TEH
u'\u062b' # 0x00cb -> ARABIC LETTER THEH
u'\u062c' # 0x00cc -> ARABIC LETTER JEEM
u'\u062d' # 0x00cd -> ARABIC LETTER HAH
u'\u062e' # 0x00ce -> ARABIC LETTER KHAH
u'\u062f' # 0x00cf -> ARABIC LETTER DAL
u'\u0630' # 0x00d0 -> ARABIC LETTER THAL
u'\u0631' # 0x00d1 -> ARABIC LETTER REH
u'\u0632' # 0x00d2 -> ARABIC LETTER ZAIN
u'\u0633' # 0x00d3 -> ARABIC LETTER SEEN
u'\u0634' # 0x00d4 -> ARABIC LETTER SHEEN
u'\u0635' # 0x00d5 -> ARABIC LETTER SAD
u'\u0636' # 0x00d6 -> ARABIC LETTER DAD
u'\u0637' # 0x00d7 -> ARABIC LETTER TAH
u'\u0638' # 0x00d8 -> ARABIC LETTER ZAH
u'\u0639' # 0x00d9 -> ARABIC LETTER AIN
u'\u063a' # 0x00da -> ARABIC LETTER GHAIN
u'[' # 0x00db -> LEFT SQUARE BRACKET, right-left
u'\\' # 0x00dc -> REVERSE SOLIDUS, right-left
u']' # 0x00dd -> RIGHT SQUARE BRACKET, right-left
u'^' # 0x00de -> CIRCUMFLEX ACCENT, right-left
u'_' # 0x00df -> LOW LINE, right-left
u'\u0640' # 0x00e0 -> ARABIC TATWEEL
u'\u0641' # 0x00e1 -> ARABIC LETTER FEH
u'\u0642' # 0x00e2 -> ARABIC LETTER QAF
u'\u0643' # 0x00e3 -> ARABIC LETTER KAF
u'\u0644' # 0x00e4 -> ARABIC LETTER LAM
u'\u0645' # 0x00e5 -> ARABIC LETTER MEEM
u'\u0646' # 0x00e6 -> ARABIC LETTER NOON
u'\u0647' # 0x00e7 -> ARABIC LETTER HEH
u'\u0648' # 0x00e8 -> ARABIC LETTER WAW
u'\u0649' # 0x00e9 -> ARABIC LETTER ALEF MAKSURA
u'\u064a' # 0x00ea -> ARABIC LETTER YEH
u'\u064b' # 0x00eb -> ARABIC FATHATAN
u'\u064c' # 0x00ec -> ARABIC DAMMATAN
u'\u064d' # 0x00ed -> ARABIC KASRATAN
u'\u064e' # 0x00ee -> ARABIC FATHA
u'\u064f' # 0x00ef -> ARABIC DAMMA
u'\u0650' # 0x00f0 -> ARABIC KASRA
u'\u0651' # 0x00f1 -> ARABIC SHADDA
u'\u0652' # 0x00f2 -> ARABIC SUKUN
u'\u067e' # 0x00f3 -> ARABIC LETTER PEH
u'\u0679' # 0x00f4 -> ARABIC LETTER TTEH
u'\u0686' # 0x00f5 -> ARABIC LETTER TCHEH
u'\u06d5' # 0x00f6 -> ARABIC LETTER AE
u'\u06a4' # 0x00f7 -> ARABIC LETTER VEH
u'\u06af' # 0x00f8 -> ARABIC LETTER GAF
u'\u0688' # 0x00f9 -> ARABIC LETTER DDAL
u'\u0691' # 0x00fa -> ARABIC LETTER RREH
u'{' # 0x00fb -> LEFT CURLY BRACKET, right-left
u'|' # 0x00fc -> VERTICAL LINE, right-left
u'}' # 0x00fd -> RIGHT CURLY BRACKET, right-left
u'\u0698' # 0x00fe -> ARABIC LETTER JEH
u'\u06d2' # 0x00ff -> ARABIC LETTER YEH BARREE
)
### Encoding Map
encoding_map = {
0x0000: 0x0000, # CONTROL CHARACTER
0x0001: 0x0001, # CONTROL CHARACTER
0x0002: 0x0002, # CONTROL CHARACTER
0x0003: 0x0003, # CONTROL CHARACTER
0x0004: 0x0004, # CONTROL CHARACTER
0x0005: 0x0005, # CONTROL CHARACTER
0x0006: 0x0006, # CONTROL CHARACTER
0x0007: 0x0007, # CONTROL CHARACTER
0x0008: 0x0008, # CONTROL CHARACTER
0x0009: 0x0009, # CONTROL CHARACTER
0x000a: 0x000a, # CONTROL CHARACTER
0x000b: 0x000b, # CONTROL CHARACTER
0x000c: 0x000c, # CONTROL CHARACTER
0x000d: 0x000d, # CONTROL CHARACTER
0x000e: 0x000e, # CONTROL CHARACTER
0x000f: 0x000f, # CONTROL CHARACTER
0x0010: 0x0010, # CONTROL CHARACTER
0x0011: 0x0011, # CONTROL CHARACTER
0x0012: 0x0012, # CONTROL CHARACTER
0x0013: 0x0013, # CONTROL CHARACTER
0x0014: 0x0014, # CONTROL CHARACTER
0x0015: 0x0015, # CONTROL CHARACTER
0x0016: 0x0016, # CONTROL CHARACTER
0x0017: 0x0017, # CONTROL CHARACTER
0x0018: 0x0018, # CONTROL CHARACTER
0x0019: 0x0019, # CONTROL CHARACTER
0x001a: 0x001a, # CONTROL CHARACTER
0x001b: 0x001b, # CONTROL CHARACTER
0x001c: 0x001c, # CONTROL CHARACTER
0x001d: 0x001d, # CONTROL CHARACTER
0x001e: 0x001e, # CONTROL CHARACTER
0x001f: 0x001f, # CONTROL CHARACTER
0x0020: 0x0020, # SPACE, left-right
0x0020: 0x00a0, # SPACE, right-left
0x0021: 0x0021, # EXCLAMATION MARK, left-right
0x0021: 0x00a1, # EXCLAMATION MARK, right-left
0x0022: 0x0022, # QUOTATION MARK, left-right
0x0022: 0x00a2, # QUOTATION MARK, right-left
0x0023: 0x0023, # NUMBER SIGN, left-right
0x0023: 0x00a3, # NUMBER SIGN, right-left
0x0024: 0x0024, # DOLLAR SIGN, left-right
0x0024: 0x00a4, # DOLLAR SIGN, right-left
0x0025: 0x0025, # PERCENT SIGN, left-right
0x0026: 0x0026, # AMPERSAND, left-right
0x0026: 0x00a6, # AMPERSAND, right-left
0x0027: 0x0027, # APOSTROPHE, left-right
0x0027: 0x00a7, # APOSTROPHE, right-left
0x0028: 0x0028, # LEFT PARENTHESIS, left-right
0x0028: 0x00a8, # LEFT PARENTHESIS, right-left
0x0029: 0x0029, # RIGHT PARENTHESIS, left-right
0x0029: 0x00a9, # RIGHT PARENTHESIS, right-left
0x002a: 0x002a, # ASTERISK, left-right
0x002a: 0x00aa, # ASTERISK, right-left
0x002b: 0x002b, # PLUS SIGN, left-right
0x002b: 0x00ab, # PLUS SIGN, right-left
0x002c: 0x002c, # COMMA, left-right; in Arabic-script context, displayed as 0x066C ARABIC THOUSANDS SEPARATOR
0x002d: 0x002d, # HYPHEN-MINUS, left-right
0x002d: 0x00ad, # HYPHEN-MINUS, right-left
0x002e: 0x002e, # FULL STOP, left-right; in Arabic-script context, displayed as 0x066B ARABIC DECIMAL SEPARATOR
0x002e: 0x00ae, # FULL STOP, right-left
0x002f: 0x002f, # SOLIDUS, left-right
0x002f: 0x00af, # SOLIDUS, right-left
0x0030: 0x0030, # DIGIT ZERO; in Arabic-script context, displayed as 0x0660 ARABIC-INDIC DIGIT ZERO
0x0031: 0x0031, # DIGIT ONE; in Arabic-script context, displayed as 0x0661 ARABIC-INDIC DIGIT ONE
0x0032: 0x0032, # DIGIT TWO; in Arabic-script context, displayed as 0x0662 ARABIC-INDIC DIGIT TWO
0x0033: 0x0033, # DIGIT THREE; in Arabic-script context, displayed as 0x0663 ARABIC-INDIC DIGIT THREE
0x0034: 0x0034, # DIGIT FOUR; in Arabic-script context, displayed as 0x0664 ARABIC-INDIC DIGIT FOUR
0x0035: 0x0035, # DIGIT FIVE; in Arabic-script context, displayed as 0x0665 ARABIC-INDIC DIGIT FIVE
0x0036: 0x0036, # DIGIT SIX; in Arabic-script context, displayed as 0x0666 ARABIC-INDIC DIGIT SIX
0x0037: 0x0037, # DIGIT SEVEN; in Arabic-script context, displayed as 0x0667 ARABIC-INDIC DIGIT SEVEN
0x0038: 0x0038, # DIGIT EIGHT; in Arabic-script context, displayed as 0x0668 ARABIC-INDIC DIGIT EIGHT
0x0039: 0x0039, # DIGIT NINE; in Arabic-script context, displayed as 0x0669 ARABIC-INDIC DIGIT NINE
0x003a: 0x003a, # COLON, left-right
0x003a: 0x00ba, # COLON, right-left
0x003b: 0x003b, # SEMICOLON, left-right
0x003c: 0x003c, # LESS-THAN SIGN, left-right
0x003c: 0x00bc, # LESS-THAN SIGN, right-left
0x003d: 0x003d, # EQUALS SIGN, left-right
0x003d: 0x00bd, # EQUALS SIGN, right-left
0x003e: 0x003e, # GREATER-THAN SIGN, left-right
0x003e: 0x00be, # GREATER-THAN SIGN, right-left
0x003f: 0x003f, # QUESTION MARK, left-right
0x0040: 0x0040, # COMMERCIAL AT
0x0041: 0x0041, # LATIN CAPITAL LETTER A
0x0042: 0x0042, # LATIN CAPITAL LETTER B
0x0043: 0x0043, # LATIN CAPITAL LETTER C
0x0044: 0x0044, # LATIN CAPITAL LETTER D
0x0045: 0x0045, # LATIN CAPITAL LETTER E
0x0046: 0x0046, # LATIN CAPITAL LETTER F
0x0047: 0x0047, # LATIN CAPITAL LETTER G
0x0048: 0x0048, # LATIN CAPITAL LETTER H
0x0049: 0x0049, # LATIN CAPITAL LETTER I
0x004a: 0x004a, # LATIN CAPITAL LETTER J
0x004b: 0x004b, # LATIN CAPITAL LETTER K
0x004c: 0x004c, # LATIN CAPITAL LETTER L
0x004d: 0x004d, # LATIN CAPITAL LETTER M
0x004e: 0x004e, # LATIN CAPITAL LETTER N
0x004f: 0x004f, # LATIN CAPITAL LETTER O
0x0050: 0x0050, # LATIN CAPITAL LETTER P
0x0051: 0x0051, # LATIN CAPITAL LETTER Q
0x0052: 0x0052, # LATIN CAPITAL LETTER R
0x0053: 0x0053, # LATIN CAPITAL LETTER S
0x0054: 0x0054, # LATIN CAPITAL LETTER T
0x0055: 0x0055, # LATIN CAPITAL LETTER U
0x0056: 0x0056, # LATIN CAPITAL LETTER V
0x0057: 0x0057, # LATIN CAPITAL LETTER W
0x0058: 0x0058, # LATIN CAPITAL LETTER X
0x0059: 0x0059, # LATIN CAPITAL LETTER Y
0x005a: 0x005a, # LATIN CAPITAL LETTER Z
0x005b: 0x005b, # LEFT SQUARE BRACKET, left-right
0x005b: 0x00db, # LEFT SQUARE BRACKET, right-left
0x005c: 0x005c, # REVERSE SOLIDUS, left-right
0x005c: 0x00dc, # REVERSE SOLIDUS, right-left
0x005d: 0x005d, # RIGHT SQUARE BRACKET, left-right
0x005d: 0x00dd, # RIGHT SQUARE BRACKET, right-left
0x005e: 0x005e, # CIRCUMFLEX ACCENT, left-right
0x005e: 0x00de, # CIRCUMFLEX ACCENT, right-left
0x005f: 0x005f, # LOW LINE, left-right
0x005f: 0x00df, # LOW LINE, right-left
0x0060: 0x0060, # GRAVE ACCENT
0x0061: 0x0061, # LATIN SMALL LETTER A
0x0062: 0x0062, # LATIN SMALL LETTER B
0x0063: 0x0063, # LATIN SMALL LETTER C
0x0064: 0x0064, # LATIN SMALL LETTER D
0x0065: 0x0065, # LATIN SMALL LETTER E
0x0066: 0x0066, # LATIN SMALL LETTER F
0x0067: 0x0067, # LATIN SMALL LETTER G
0x0068: 0x0068, # LATIN SMALL LETTER H
0x0069: 0x0069, # LATIN SMALL LETTER I
0x006a: 0x006a, # LATIN SMALL LETTER J
0x006b: 0x006b, # LATIN SMALL LETTER K
0x006c: 0x006c, # LATIN SMALL LETTER L
0x006d: 0x006d, # LATIN SMALL LETTER M
0x006e: 0x006e, # LATIN SMALL LETTER N
0x006f: 0x006f, # LATIN SMALL LETTER O
0x0070: 0x0070, # LATIN SMALL LETTER P
0x0071: 0x0071, # LATIN SMALL LETTER Q
0x0072: 0x0072, # LATIN SMALL LETTER R
0x0073: 0x0073, # LATIN SMALL LETTER S
0x0074: 0x0074, # LATIN SMALL LETTER T
0x0075: 0x0075, # LATIN SMALL LETTER U
0x0076: 0x0076, # LATIN SMALL LETTER V
0x0077: 0x0077, # LATIN SMALL LETTER W
0x0078: 0x0078, # LATIN SMALL LETTER X
0x0079: 0x0079, # LATIN SMALL LETTER Y
0x007a: 0x007a, # LATIN SMALL LETTER Z
0x007b: 0x007b, # LEFT CURLY BRACKET, left-right
0x007b: 0x00fb, # LEFT CURLY BRACKET, right-left
0x007c: 0x007c, # VERTICAL LINE, left-right
0x007c: 0x00fc, # VERTICAL LINE, right-left
0x007d: 0x007d, # RIGHT CURLY BRACKET, left-right
0x007d: 0x00fd, # RIGHT CURLY BRACKET, right-left
0x007e: 0x007e, # TILDE
0x007f: 0x007f, # CONTROL CHARACTER
0x00a0: 0x0081, # NO-BREAK SPACE, right-left
0x00ab: 0x008c, # LEFT-POINTING DOUBLE ANGLE QUOTATION MARK, right-left
0x00bb: 0x0098, # RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK, right-left
0x00c4: 0x0080, # LATIN CAPITAL LETTER A WITH DIAERESIS
0x00c7: 0x0082, # LATIN CAPITAL LETTER C WITH CEDILLA
0x00c9: 0x0083, # LATIN CAPITAL LETTER E WITH ACUTE
0x00d1: 0x0084, # LATIN CAPITAL LETTER N WITH TILDE
0x00d6: 0x0085, # LATIN CAPITAL LETTER O WITH DIAERESIS
0x00dc: 0x0086, # LATIN CAPITAL LETTER U WITH DIAERESIS
0x00e0: 0x0088, # LATIN SMALL LETTER A WITH GRAVE
0x00e1: 0x0087, # LATIN SMALL LETTER A WITH ACUTE
0x00e2: 0x0089, # LATIN SMALL LETTER A WITH CIRCUMFLEX
0x00e4: 0x008a, # LATIN SMALL LETTER A WITH DIAERESIS
0x00e7: 0x008d, # LATIN SMALL LETTER C WITH CEDILLA
0x00e8: 0x008f, # LATIN SMALL LETTER E WITH GRAVE
0x00e9: 0x008e, # LATIN SMALL LETTER E WITH ACUTE
0x00ea: 0x0090, # LATIN SMALL LETTER E WITH CIRCUMFLEX
0x00eb: 0x0091, # LATIN SMALL LETTER E WITH DIAERESIS
0x00ed: 0x0092, # LATIN SMALL LETTER I WITH ACUTE
0x00ee: 0x0094, # LATIN SMALL LETTER I WITH CIRCUMFLEX
0x00ef: 0x0095, # LATIN SMALL LETTER I WITH DIAERESIS
0x00f1: 0x0096, # LATIN SMALL LETTER N WITH TILDE
0x00f3: 0x0097, # LATIN SMALL LETTER O WITH ACUTE
0x00f4: 0x0099, # LATIN SMALL LETTER O WITH CIRCUMFLEX
0x00f6: 0x009a, # LATIN SMALL LETTER O WITH DIAERESIS
0x00f7: 0x009b, # DIVISION SIGN, right-left
0x00f9: 0x009d, # LATIN SMALL LETTER U WITH GRAVE
0x00fa: 0x009c, # LATIN SMALL LETTER U WITH ACUTE
0x00fb: 0x009e, # LATIN SMALL LETTER U WITH CIRCUMFLEX
0x00fc: 0x009f, # LATIN SMALL LETTER U WITH DIAERESIS
0x060c: 0x00ac, # ARABIC COMMA
0x061b: 0x00bb, # ARABIC SEMICOLON
0x061f: 0x00bf, # ARABIC QUESTION MARK
0x0621: 0x00c1, # ARABIC LETTER HAMZA
0x0622: 0x00c2, # ARABIC LETTER ALEF WITH MADDA ABOVE
0x0623: 0x00c3, # ARABIC LETTER ALEF WITH HAMZA ABOVE
0x0624: 0x00c4, # ARABIC LETTER WAW WITH HAMZA ABOVE
0x0625: 0x00c5, # ARABIC LETTER ALEF WITH HAMZA BELOW
0x0626: 0x00c6, # ARABIC LETTER YEH WITH HAMZA ABOVE
0x0627: 0x00c7, # ARABIC LETTER ALEF
0x0628: 0x00c8, # ARABIC LETTER BEH
0x0629: 0x00c9, # ARABIC LETTER TEH MARBUTA
0x062a: 0x00ca, # ARABIC LETTER TEH
0x062b: 0x00cb, # ARABIC LETTER THEH
0x062c: 0x00cc, # ARABIC LETTER JEEM
0x062d: 0x00cd, # ARABIC LETTER HAH
0x062e: 0x00ce, # ARABIC LETTER KHAH
0x062f: 0x00cf, # ARABIC LETTER DAL
0x0630: 0x00d0, # ARABIC LETTER THAL
0x0631: 0x00d1, # ARABIC LETTER REH
0x0632: 0x00d2, # ARABIC LETTER ZAIN
0x0633: 0x00d3, # ARABIC LETTER SEEN
0x0634: 0x00d4, # ARABIC LETTER SHEEN
0x0635: 0x00d5, # ARABIC LETTER SAD
0x0636: 0x00d6, # ARABIC LETTER DAD
0x0637: 0x00d7, # ARABIC LETTER TAH
0x0638: 0x00d8, # ARABIC LETTER ZAH
0x0639: 0x00d9, # ARABIC LETTER AIN
0x063a: 0x00da, # ARABIC LETTER GHAIN
0x0640: 0x00e0, # ARABIC TATWEEL
0x0641: 0x00e1, # ARABIC LETTER FEH
0x0642: 0x00e2, # ARABIC LETTER QAF
0x0643: 0x00e3, # ARABIC LETTER KAF
0x0644: 0x00e4, # ARABIC LETTER LAM
0x0645: 0x00e5, # ARABIC LETTER MEEM
0x0646: 0x00e6, # ARABIC LETTER NOON
0x0647: 0x00e7, # ARABIC LETTER HEH
0x0648: 0x00e8, # ARABIC LETTER WAW
0x0649: 0x00e9, # ARABIC LETTER ALEF MAKSURA
0x064a: 0x00ea, # ARABIC LETTER YEH
0x064b: 0x00eb, # ARABIC FATHATAN
0x064c: 0x00ec, # ARABIC DAMMATAN
0x064d: 0x00ed, # ARABIC KASRATAN
0x064e: 0x00ee, # ARABIC FATHA
0x064f: 0x00ef, # ARABIC DAMMA
0x0650: 0x00f0, # ARABIC KASRA
0x0651: 0x00f1, # ARABIC SHADDA
0x0652: 0x00f2, # ARABIC SUKUN
0x0660: 0x00b0, # ARABIC-INDIC DIGIT ZERO, right-left (need override)
0x0661: 0x00b1, # ARABIC-INDIC DIGIT ONE, right-left (need override)
0x0662: 0x00b2, # ARABIC-INDIC DIGIT TWO, right-left (need override)
0x0663: 0x00b3, # ARABIC-INDIC DIGIT THREE, right-left (need override)
0x0664: 0x00b4, # ARABIC-INDIC DIGIT FOUR, right-left (need override)
0x0665: 0x00b5, # ARABIC-INDIC DIGIT FIVE, right-left (need override)
0x0666: 0x00b6, # ARABIC-INDIC DIGIT SIX, right-left (need override)
0x0667: 0x00b7, # ARABIC-INDIC DIGIT SEVEN, right-left (need override)
0x0668: 0x00b8, # ARABIC-INDIC DIGIT EIGHT, right-left (need override)
0x0669: 0x00b9, # ARABIC-INDIC DIGIT NINE, right-left (need override)
0x066a: 0x00a5, # ARABIC PERCENT SIGN
0x0679: 0x00f4, # ARABIC LETTER TTEH
0x067e: 0x00f3, # ARABIC LETTER PEH
0x0686: 0x00f5, # ARABIC LETTER TCHEH
0x0688: 0x00f9, # ARABIC LETTER DDAL
0x0691: 0x00fa, # ARABIC LETTER RREH
0x0698: 0x00fe, # ARABIC LETTER JEH
0x06a4: 0x00f7, # ARABIC LETTER VEH
0x06af: 0x00f8, # ARABIC LETTER GAF
0x06ba: 0x008b, # ARABIC LETTER NOON GHUNNA
0x06d2: 0x00ff, # ARABIC LETTER YEH BARREE
0x06d5: 0x00f6, # ARABIC LETTER AE
0x2026: 0x0093, # HORIZONTAL ELLIPSIS, right-left
0x274a: 0x00c0, # EIGHT TEARDROP-SPOKED PROPELLER ASTERISK, right-left
}
| gpl-3.0 |
indevgr/django | tests/template_tests/filter_tests/test_striptags.py | 197 | 1632 | from django.template.defaultfilters import striptags
from django.test import SimpleTestCase
from django.utils.functional import lazystr
from django.utils.safestring import mark_safe
from ..utils import setup
class StriptagsTests(SimpleTestCase):
@setup({'striptags01': '{{ a|striptags }} {{ b|striptags }}'})
def test_striptags01(self):
output = self.engine.render_to_string(
'striptags01',
{
'a': '<a>x</a> <p><b>y</b></p>',
'b': mark_safe('<a>x</a> <p><b>y</b></p>'),
},
)
self.assertEqual(output, 'x y x y')
@setup({'striptags02': '{% autoescape off %}{{ a|striptags }} {{ b|striptags }}{% endautoescape %}'})
def test_striptags02(self):
output = self.engine.render_to_string(
'striptags02',
{
'a': '<a>x</a> <p><b>y</b></p>',
'b': mark_safe('<a>x</a> <p><b>y</b></p>'),
},
)
self.assertEqual(output, 'x y x y')
class FunctionTests(SimpleTestCase):
def test_strip(self):
self.assertEqual(
striptags('some <b>html</b> with <script>alert("You smell")</script> disallowed <img /> tags'),
'some html with alert("You smell") disallowed tags',
)
def test_non_string_input(self):
self.assertEqual(striptags(123), '123')
def test_strip_lazy_string(self):
self.assertEqual(
striptags(lazystr('some <b>html</b> with <script>alert("Hello")</script> disallowed <img /> tags')),
'some html with alert("Hello") disallowed tags',
)
| bsd-3-clause |
MeigaraJuma/XQS-Website-Angular | node_modules/node-gyp/gyp/pylib/gyp/win_tool.py | 1417 | 12751 | #!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Utility functions for Windows builds.
These functions are executed via gyp-win-tool when using the ninja generator.
"""
import os
import re
import shutil
import subprocess
import stat
import string
import sys
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
# A regex matching an argument corresponding to the output filename passed to
# link.exe.
_LINK_EXE_OUT_ARG = re.compile('/OUT:(?P<out>.+)$', re.IGNORECASE)
def main(args):
executor = WinTool()
exit_code = executor.Dispatch(args)
if exit_code is not None:
sys.exit(exit_code)
class WinTool(object):
"""This class performs all the Windows tooling steps. The methods can either
be executed directly, or dispatched from an argument list."""
def _UseSeparateMspdbsrv(self, env, args):
"""Allows to use a unique instance of mspdbsrv.exe per linker instead of a
shared one."""
if len(args) < 1:
raise Exception("Not enough arguments")
if args[0] != 'link.exe':
return
# Use the output filename passed to the linker to generate an endpoint name
# for mspdbsrv.exe.
endpoint_name = None
for arg in args:
m = _LINK_EXE_OUT_ARG.match(arg)
if m:
endpoint_name = re.sub(r'\W+', '',
'%s_%d' % (m.group('out'), os.getpid()))
break
if endpoint_name is None:
return
# Adds the appropriate environment variable. This will be read by link.exe
# to know which instance of mspdbsrv.exe it should connect to (if it's
# not set then the default endpoint is used).
env['_MSPDBSRV_ENDPOINT_'] = endpoint_name
def Dispatch(self, args):
"""Dispatches a string command to a method."""
if len(args) < 1:
raise Exception("Not enough arguments")
method = "Exec%s" % self._CommandifyName(args[0])
return getattr(self, method)(*args[1:])
def _CommandifyName(self, name_string):
"""Transforms a tool name like recursive-mirror to RecursiveMirror."""
return name_string.title().replace('-', '')
def _GetEnv(self, arch):
"""Gets the saved environment from a file for a given architecture."""
# The environment is saved as an "environment block" (see CreateProcess
# and msvs_emulation for details). We convert to a dict here.
# Drop last 2 NULs, one for list terminator, one for trailing vs. separator.
pairs = open(arch).read()[:-2].split('\0')
kvs = [item.split('=', 1) for item in pairs]
return dict(kvs)
def ExecStamp(self, path):
"""Simple stamp command."""
open(path, 'w').close()
def ExecRecursiveMirror(self, source, dest):
"""Emulation of rm -rf out && cp -af in out."""
if os.path.exists(dest):
if os.path.isdir(dest):
def _on_error(fn, path, excinfo):
# The operation failed, possibly because the file is set to
# read-only. If that's why, make it writable and try the op again.
if not os.access(path, os.W_OK):
os.chmod(path, stat.S_IWRITE)
fn(path)
shutil.rmtree(dest, onerror=_on_error)
else:
if not os.access(dest, os.W_OK):
# Attempt to make the file writable before deleting it.
os.chmod(dest, stat.S_IWRITE)
os.unlink(dest)
if os.path.isdir(source):
shutil.copytree(source, dest)
else:
shutil.copy2(source, dest)
def ExecLinkWrapper(self, arch, use_separate_mspdbsrv, *args):
"""Filter diagnostic output from link that looks like:
' Creating library ui.dll.lib and object ui.dll.exp'
This happens when there are exports from the dll or exe.
"""
env = self._GetEnv(arch)
if use_separate_mspdbsrv == 'True':
self._UseSeparateMspdbsrv(env, args)
link = subprocess.Popen([args[0].replace('/', '\\')] + list(args[1:]),
shell=True,
env=env,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
out, _ = link.communicate()
for line in out.splitlines():
if (not line.startswith(' Creating library ') and
not line.startswith('Generating code') and
not line.startswith('Finished generating code')):
print line
return link.returncode
def ExecLinkWithManifests(self, arch, embed_manifest, out, ldcmd, resname,
mt, rc, intermediate_manifest, *manifests):
"""A wrapper for handling creating a manifest resource and then executing
a link command."""
# The 'normal' way to do manifests is to have link generate a manifest
# based on gathering dependencies from the object files, then merge that
# manifest with other manifests supplied as sources, convert the merged
# manifest to a resource, and then *relink*, including the compiled
# version of the manifest resource. This breaks incremental linking, and
# is generally overly complicated. Instead, we merge all the manifests
# provided (along with one that includes what would normally be in the
# linker-generated one, see msvs_emulation.py), and include that into the
# first and only link. We still tell link to generate a manifest, but we
# only use that to assert that our simpler process did not miss anything.
variables = {
'python': sys.executable,
'arch': arch,
'out': out,
'ldcmd': ldcmd,
'resname': resname,
'mt': mt,
'rc': rc,
'intermediate_manifest': intermediate_manifest,
'manifests': ' '.join(manifests),
}
add_to_ld = ''
if manifests:
subprocess.check_call(
'%(python)s gyp-win-tool manifest-wrapper %(arch)s %(mt)s -nologo '
'-manifest %(manifests)s -out:%(out)s.manifest' % variables)
if embed_manifest == 'True':
subprocess.check_call(
'%(python)s gyp-win-tool manifest-to-rc %(arch)s %(out)s.manifest'
' %(out)s.manifest.rc %(resname)s' % variables)
subprocess.check_call(
'%(python)s gyp-win-tool rc-wrapper %(arch)s %(rc)s '
'%(out)s.manifest.rc' % variables)
add_to_ld = ' %(out)s.manifest.res' % variables
subprocess.check_call(ldcmd + add_to_ld)
# Run mt.exe on the theoretically complete manifest we generated, merging
# it with the one the linker generated to confirm that the linker
# generated one does not add anything. This is strictly unnecessary for
# correctness, it's only to verify that e.g. /MANIFESTDEPENDENCY was not
# used in a #pragma comment.
if manifests:
# Merge the intermediate one with ours to .assert.manifest, then check
# that .assert.manifest is identical to ours.
subprocess.check_call(
'%(python)s gyp-win-tool manifest-wrapper %(arch)s %(mt)s -nologo '
'-manifest %(out)s.manifest %(intermediate_manifest)s '
'-out:%(out)s.assert.manifest' % variables)
assert_manifest = '%(out)s.assert.manifest' % variables
our_manifest = '%(out)s.manifest' % variables
# Load and normalize the manifests. mt.exe sometimes removes whitespace,
# and sometimes doesn't unfortunately.
with open(our_manifest, 'rb') as our_f:
with open(assert_manifest, 'rb') as assert_f:
our_data = our_f.read().translate(None, string.whitespace)
assert_data = assert_f.read().translate(None, string.whitespace)
if our_data != assert_data:
os.unlink(out)
def dump(filename):
sys.stderr.write('%s\n-----\n' % filename)
with open(filename, 'rb') as f:
sys.stderr.write(f.read() + '\n-----\n')
dump(intermediate_manifest)
dump(our_manifest)
dump(assert_manifest)
sys.stderr.write(
'Linker generated manifest "%s" added to final manifest "%s" '
'(result in "%s"). '
'Were /MANIFEST switches used in #pragma statements? ' % (
intermediate_manifest, our_manifest, assert_manifest))
return 1
def ExecManifestWrapper(self, arch, *args):
"""Run manifest tool with environment set. Strip out undesirable warning
(some XML blocks are recognized by the OS loader, but not the manifest
tool)."""
env = self._GetEnv(arch)
popen = subprocess.Popen(args, shell=True, env=env,
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
out, _ = popen.communicate()
for line in out.splitlines():
if line and 'manifest authoring warning 81010002' not in line:
print line
return popen.returncode
def ExecManifestToRc(self, arch, *args):
"""Creates a resource file pointing a SxS assembly manifest.
|args| is tuple containing path to resource file, path to manifest file
and resource name which can be "1" (for executables) or "2" (for DLLs)."""
manifest_path, resource_path, resource_name = args
with open(resource_path, 'wb') as output:
output.write('#include <windows.h>\n%s RT_MANIFEST "%s"' % (
resource_name,
os.path.abspath(manifest_path).replace('\\', '/')))
def ExecMidlWrapper(self, arch, outdir, tlb, h, dlldata, iid, proxy, idl,
*flags):
"""Filter noisy filenames output from MIDL compile step that isn't
quietable via command line flags.
"""
args = ['midl', '/nologo'] + list(flags) + [
'/out', outdir,
'/tlb', tlb,
'/h', h,
'/dlldata', dlldata,
'/iid', iid,
'/proxy', proxy,
idl]
env = self._GetEnv(arch)
popen = subprocess.Popen(args, shell=True, env=env,
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
out, _ = popen.communicate()
# Filter junk out of stdout, and write filtered versions. Output we want
# to filter is pairs of lines that look like this:
# Processing C:\Program Files (x86)\Microsoft SDKs\...\include\objidl.idl
# objidl.idl
lines = out.splitlines()
prefixes = ('Processing ', '64 bit Processing ')
processing = set(os.path.basename(x)
for x in lines if x.startswith(prefixes))
for line in lines:
if not line.startswith(prefixes) and line not in processing:
print line
return popen.returncode
def ExecAsmWrapper(self, arch, *args):
"""Filter logo banner from invocations of asm.exe."""
env = self._GetEnv(arch)
popen = subprocess.Popen(args, shell=True, env=env,
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
out, _ = popen.communicate()
for line in out.splitlines():
if (not line.startswith('Copyright (C) Microsoft Corporation') and
not line.startswith('Microsoft (R) Macro Assembler') and
not line.startswith(' Assembling: ') and
line):
print line
return popen.returncode
def ExecRcWrapper(self, arch, *args):
"""Filter logo banner from invocations of rc.exe. Older versions of RC
don't support the /nologo flag."""
env = self._GetEnv(arch)
popen = subprocess.Popen(args, shell=True, env=env,
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
out, _ = popen.communicate()
for line in out.splitlines():
if (not line.startswith('Microsoft (R) Windows (R) Resource Compiler') and
not line.startswith('Copyright (C) Microsoft Corporation') and
line):
print line
return popen.returncode
def ExecActionWrapper(self, arch, rspfile, *dir):
"""Runs an action command line from a response file using the environment
for |arch|. If |dir| is supplied, use that as the working directory."""
env = self._GetEnv(arch)
# TODO(scottmg): This is a temporary hack to get some specific variables
# through to actions that are set after gyp-time. http://crbug.com/333738.
for k, v in os.environ.iteritems():
if k not in env:
env[k] = v
args = open(rspfile).read()
dir = dir[0] if dir else None
return subprocess.call(args, shell=True, env=env, cwd=dir)
def ExecClCompile(self, project_dir, selected_files):
"""Executed by msvs-ninja projects when the 'ClCompile' target is used to
build selected C/C++ files."""
project_dir = os.path.relpath(project_dir, BASE_DIR)
selected_files = selected_files.split(';')
ninja_targets = [os.path.join(project_dir, filename) + '^^'
for filename in selected_files]
cmd = ['ninja.exe']
cmd.extend(ninja_targets)
return subprocess.call(cmd, shell=True, cwd=BASE_DIR)
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
| mit |
zengluyang/ns3-d2d | examples/wireless/wifi-ap.py | 108 | 5883 | # -*- Mode: Python; -*-
# /*
# * Copyright (c) 2005,2006,2007 INRIA
# * Copyright (c) 2009 INESC Porto
# *
# * This program is free software; you can redistribute it and/or modify
# * it under the terms of the GNU General Public License version 2 as
# * published by the Free Software Foundation;
# *
# * This program is distributed in the hope that it will be useful,
# * but WITHOUT ANY WARRANTY; without even the implied warranty of
# * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# * GNU General Public License for more details.
# *
# * You should have received a copy of the GNU General Public License
# * along with this program; if not, write to the Free Software
# * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
# *
# * Authors: Mathieu Lacage <[email protected]>
# * Gustavo Carneiro <[email protected]>
# */
import sys
import ns.applications
import ns.core
import ns.internet
import ns.mobility
import ns.network
import ns.point_to_point
import ns.wifi
# void
# DevTxTrace (std::string context, Ptr<const Packet> p, Mac48Address address)
# {
# std::cout << " TX to=" << address << " p: " << *p << std::endl;
# }
# void
# DevRxTrace(std::string context, Ptr<const Packet> p, Mac48Address address)
# {
# std::cout << " RX from=" << address << " p: " << *p << std::endl;
# }
# void
# PhyRxOkTrace(std::string context, Ptr<const Packet> packet, double snr, WifiMode mode, enum WifiPreamble preamble)
# {
# std::cout << "PHYRXOK mode=" << mode << " snr=" << snr << " " << *packet << std::endl;
# }
# void
# PhyRxErrorTrace(std::string context, Ptr<const Packet> packet, double snr)
# {
# std::cout << "PHYRXERROR snr=" << snr << " " << *packet << std::endl;
# }
# void
# PhyTxTrace(std::string context, Ptr<const Packet> packet, WifiMode mode, WifiPreamble preamble, uint8_t txPower)
# {
# std::cout << "PHYTX mode=" << mode << " " << *packet << std::endl;
# }
# void
# PhyStateTrace(std::string context, Time start, Time duration, enum WifiPhy::State state)
# {
# std::cout << " state=";
# switch(state) {
# case WifiPhy::TX:
# std::cout << "tx ";
# break;
# case WifiPhy::SYNC:
# std::cout << "sync ";
# break;
# case WifiPhy::CCA_BUSY:
# std::cout << "cca-busy";
# break;
# case WifiPhy::IDLE:
# std::cout << "idle ";
# break;
# }
# std::cout << " start="<<start<<" duration="<<duration<<std::endl;
# }
def SetPosition(node, position):
mobility = node.GetObject(ns.mobility.MobilityModel.GetTypeId())
mobility.SetPosition(position)
def GetPosition(node):
mobility = node.GetObject(ns.mobility.MobilityModel.GetTypeId())
return mobility.GetPosition()
def AdvancePosition(node):
pos = GetPosition(node);
pos.x += 5.0
if pos.x >= 210.0:
return
SetPosition(node, pos)
ns.core.Simulator.Schedule(ns.core.Seconds(1.0), AdvancePosition, node)
def main(argv):
ns.core.CommandLine().Parse(argv)
ns.network.Packet.EnablePrinting();
# enable rts cts all the time.
ns.core.Config.SetDefault("ns3::WifiRemoteStationManager::RtsCtsThreshold", ns.core.StringValue("0"))
# disable fragmentation
ns.core.Config.SetDefault("ns3::WifiRemoteStationManager::FragmentationThreshold", ns.core.StringValue("2200"))
wifi = ns.wifi.WifiHelper.Default()
mobility = ns.mobility.MobilityHelper()
stas = ns.network.NodeContainer()
ap = ns.network.NodeContainer()
#NetDeviceContainer staDevs;
packetSocket = ns.network.PacketSocketHelper()
stas.Create(2)
ap.Create(1)
# give packet socket powers to nodes.
packetSocket.Install(stas)
packetSocket.Install(ap)
wifiPhy = ns.wifi.YansWifiPhyHelper.Default()
wifiChannel = ns.wifi.YansWifiChannelHelper.Default()
wifiPhy.SetChannel(wifiChannel.Create())
ssid = ns.wifi.Ssid("wifi-default")
wifi.SetRemoteStationManager("ns3::ArfWifiManager")
wifiMac = ns.wifi.NqosWifiMacHelper.Default()
# setup stas.
wifiMac.SetType("ns3::StaWifiMac",
"Ssid", ns.wifi.SsidValue(ssid),
"ActiveProbing", ns.core.BooleanValue(False))
staDevs = wifi.Install(wifiPhy, wifiMac, stas)
# setup ap.
wifiMac.SetType("ns3::ApWifiMac",
"Ssid", ns.wifi.SsidValue(ssid),
"BeaconGeneration", ns.core.BooleanValue(True),
"BeaconInterval", ns.core.TimeValue(ns.core.Seconds(2.5)))
wifi.Install(wifiPhy, wifiMac, ap)
# mobility.
mobility.Install(stas)
mobility.Install(ap)
ns.core.Simulator.Schedule(ns.core.Seconds(1.0), AdvancePosition, ap.Get(0))
socket = ns.network.PacketSocketAddress()
socket.SetSingleDevice(staDevs.Get(0).GetIfIndex())
socket.SetPhysicalAddress(staDevs.Get(1).GetAddress())
socket.SetProtocol(1)
onoff = ns.applications.OnOffHelper("ns3::PacketSocketFactory", ns.network.Address(socket))
onoff.SetConstantRate (ns.network.DataRate ("500kb/s"))
apps = onoff.Install(ns.network.NodeContainer(stas.Get(0)))
apps.Start(ns.core.Seconds(0.5))
apps.Stop(ns.core.Seconds(43.0))
ns.core.Simulator.Stop(ns.core.Seconds(44.0))
# Config::Connect("/NodeList/*/DeviceList/*/Tx", MakeCallback(&DevTxTrace));
# Config::Connect("/NodeList/*/DeviceList/*/Rx", MakeCallback(&DevRxTrace));
# Config::Connect("/NodeList/*/DeviceList/*/Phy/RxOk", MakeCallback(&PhyRxOkTrace));
# Config::Connect("/NodeList/*/DeviceList/*/Phy/RxError", MakeCallback(&PhyRxErrorTrace));
# Config::Connect("/NodeList/*/DeviceList/*/Phy/Tx", MakeCallback(&PhyTxTrace));
# Config::Connect("/NodeList/*/DeviceList/*/Phy/State", MakeCallback(&PhyStateTrace));
ns.core.Simulator.Run()
ns.core.Simulator.Destroy()
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv))
| gpl-2.0 |
btabibian/scikit-learn | sklearn/calibration.py | 37 | 20332 | """Calibration of predicted probabilities."""
# Author: Alexandre Gramfort <[email protected]>
# Balazs Kegl <[email protected]>
# Jan Hendrik Metzen <[email protected]>
# Mathieu Blondel <[email protected]>
#
# License: BSD 3 clause
from __future__ import division
import warnings
from math import log
import numpy as np
from scipy.optimize import fmin_bfgs
from sklearn.preprocessing import LabelEncoder
from .base import BaseEstimator, ClassifierMixin, RegressorMixin, clone
from .preprocessing import label_binarize, LabelBinarizer
from .utils import check_X_y, check_array, indexable, column_or_1d
from .utils.validation import check_is_fitted, check_consistent_length
from .utils.fixes import signature
from .isotonic import IsotonicRegression
from .svm import LinearSVC
from .model_selection import check_cv
from .metrics.classification import _check_binary_probabilistic_predictions
class CalibratedClassifierCV(BaseEstimator, ClassifierMixin):
"""Probability calibration with isotonic regression or sigmoid.
With this class, the base_estimator is fit on the train set of the
cross-validation generator and the test set is used for calibration.
The probabilities for each of the folds are then averaged
for prediction. In case that cv="prefit" is passed to __init__,
it is assumed that base_estimator has been fitted already and all
data is used for calibration. Note that data for fitting the
classifier and for calibrating it must be disjoint.
Read more in the :ref:`User Guide <calibration>`.
Parameters
----------
base_estimator : instance BaseEstimator
The classifier whose output decision function needs to be calibrated
to offer more accurate predict_proba outputs. If cv=prefit, the
classifier must have been fit already on data.
method : 'sigmoid' or 'isotonic'
The method to use for calibration. Can be 'sigmoid' which
corresponds to Platt's method or 'isotonic' which is a
non-parametric approach. It is not advised to use isotonic calibration
with too few calibration samples ``(<<1000)`` since it tends to
overfit.
Use sigmoids (Platt's calibration) in this case.
cv : integer, cross-validation generator, iterable or "prefit", optional
Determines the cross-validation splitting strategy.
Possible inputs for cv are:
- None, to use the default 3-fold cross-validation,
- integer, to specify the number of folds.
- An object to be used as a cross-validation generator.
- An iterable yielding train/test splits.
For integer/None inputs, if ``y`` is binary or multiclass,
:class:`sklearn.model_selection.StratifiedKFold` is used. If ``y`` is
neither binary nor multiclass, :class:`sklearn.model_selection.KFold`
is used.
Refer :ref:`User Guide <cross_validation>` for the various
cross-validation strategies that can be used here.
If "prefit" is passed, it is assumed that base_estimator has been
fitted already and all data is used for calibration.
Attributes
----------
classes_ : array, shape (n_classes)
The class labels.
calibrated_classifiers_ : list (len() equal to cv or 1 if cv == "prefit")
The list of calibrated classifiers, one for each crossvalidation fold,
which has been fitted on all but the validation fold and calibrated
on the validation fold.
References
----------
.. [1] Obtaining calibrated probability estimates from decision trees
and naive Bayesian classifiers, B. Zadrozny & C. Elkan, ICML 2001
.. [2] Transforming Classifier Scores into Accurate Multiclass
Probability Estimates, B. Zadrozny & C. Elkan, (KDD 2002)
.. [3] Probabilistic Outputs for Support Vector Machines and Comparisons to
Regularized Likelihood Methods, J. Platt, (1999)
.. [4] Predicting Good Probabilities with Supervised Learning,
A. Niculescu-Mizil & R. Caruana, ICML 2005
"""
def __init__(self, base_estimator=None, method='sigmoid', cv=3):
self.base_estimator = base_estimator
self.method = method
self.cv = cv
def fit(self, X, y, sample_weight=None):
"""Fit the calibrated model
Parameters
----------
X : array-like, shape (n_samples, n_features)
Training data.
y : array-like, shape (n_samples,)
Target values.
sample_weight : array-like, shape = [n_samples] or None
Sample weights. If None, then samples are equally weighted.
Returns
-------
self : object
Returns an instance of self.
"""
X, y = check_X_y(X, y, accept_sparse=['csc', 'csr', 'coo'],
force_all_finite=False)
X, y = indexable(X, y)
le = LabelBinarizer().fit(y)
self.classes_ = le.classes_
# Check that each cross-validation fold can have at least one
# example per class
n_folds = self.cv if isinstance(self.cv, int) \
else self.cv.n_folds if hasattr(self.cv, "n_folds") else None
if n_folds and \
np.any([np.sum(y == class_) < n_folds for class_ in
self.classes_]):
raise ValueError("Requesting %d-fold cross-validation but provided"
" less than %d examples for at least one class."
% (n_folds, n_folds))
self.calibrated_classifiers_ = []
if self.base_estimator is None:
# we want all classifiers that don't expose a random_state
# to be deterministic (and we don't want to expose this one).
base_estimator = LinearSVC(random_state=0)
else:
base_estimator = self.base_estimator
if self.cv == "prefit":
calibrated_classifier = _CalibratedClassifier(
base_estimator, method=self.method)
if sample_weight is not None:
calibrated_classifier.fit(X, y, sample_weight)
else:
calibrated_classifier.fit(X, y)
self.calibrated_classifiers_.append(calibrated_classifier)
else:
cv = check_cv(self.cv, y, classifier=True)
fit_parameters = signature(base_estimator.fit).parameters
estimator_name = type(base_estimator).__name__
if (sample_weight is not None
and "sample_weight" not in fit_parameters):
warnings.warn("%s does not support sample_weight. Samples"
" weights are only used for the calibration"
" itself." % estimator_name)
base_estimator_sample_weight = None
else:
if sample_weight is not None:
sample_weight = check_array(sample_weight, ensure_2d=False)
check_consistent_length(y, sample_weight)
base_estimator_sample_weight = sample_weight
for train, test in cv.split(X, y):
this_estimator = clone(base_estimator)
if base_estimator_sample_weight is not None:
this_estimator.fit(
X[train], y[train],
sample_weight=base_estimator_sample_weight[train])
else:
this_estimator.fit(X[train], y[train])
calibrated_classifier = _CalibratedClassifier(
this_estimator, method=self.method,
classes=self.classes_)
if sample_weight is not None:
calibrated_classifier.fit(X[test], y[test],
sample_weight[test])
else:
calibrated_classifier.fit(X[test], y[test])
self.calibrated_classifiers_.append(calibrated_classifier)
return self
def predict_proba(self, X):
"""Posterior probabilities of classification
This function returns posterior probabilities of classification
according to each class on an array of test vectors X.
Parameters
----------
X : array-like, shape (n_samples, n_features)
The samples.
Returns
-------
C : array, shape (n_samples, n_classes)
The predicted probas.
"""
check_is_fitted(self, ["classes_", "calibrated_classifiers_"])
X = check_array(X, accept_sparse=['csc', 'csr', 'coo'],
force_all_finite=False)
# Compute the arithmetic mean of the predictions of the calibrated
# classifiers
mean_proba = np.zeros((X.shape[0], len(self.classes_)))
for calibrated_classifier in self.calibrated_classifiers_:
proba = calibrated_classifier.predict_proba(X)
mean_proba += proba
mean_proba /= len(self.calibrated_classifiers_)
return mean_proba
def predict(self, X):
"""Predict the target of new samples. Can be different from the
prediction of the uncalibrated classifier.
Parameters
----------
X : array-like, shape (n_samples, n_features)
The samples.
Returns
-------
C : array, shape (n_samples,)
The predicted class.
"""
check_is_fitted(self, ["classes_", "calibrated_classifiers_"])
return self.classes_[np.argmax(self.predict_proba(X), axis=1)]
class _CalibratedClassifier(object):
"""Probability calibration with isotonic regression or sigmoid.
It assumes that base_estimator has already been fit, and trains the
calibration on the input set of the fit function. Note that this class
should not be used as an estimator directly. Use CalibratedClassifierCV
with cv="prefit" instead.
Parameters
----------
base_estimator : instance BaseEstimator
The classifier whose output decision function needs to be calibrated
to offer more accurate predict_proba outputs. No default value since
it has to be an already fitted estimator.
method : 'sigmoid' | 'isotonic'
The method to use for calibration. Can be 'sigmoid' which
corresponds to Platt's method or 'isotonic' which is a
non-parametric approach based on isotonic regression.
classes : array-like, shape (n_classes,), optional
Contains unique classes used to fit the base estimator.
if None, then classes is extracted from the given target values
in fit().
References
----------
.. [1] Obtaining calibrated probability estimates from decision trees
and naive Bayesian classifiers, B. Zadrozny & C. Elkan, ICML 2001
.. [2] Transforming Classifier Scores into Accurate Multiclass
Probability Estimates, B. Zadrozny & C. Elkan, (KDD 2002)
.. [3] Probabilistic Outputs for Support Vector Machines and Comparisons to
Regularized Likelihood Methods, J. Platt, (1999)
.. [4] Predicting Good Probabilities with Supervised Learning,
A. Niculescu-Mizil & R. Caruana, ICML 2005
"""
def __init__(self, base_estimator, method='sigmoid', classes=None):
self.base_estimator = base_estimator
self.method = method
self.classes = classes
def _preproc(self, X):
n_classes = len(self.classes_)
if hasattr(self.base_estimator, "decision_function"):
df = self.base_estimator.decision_function(X)
if df.ndim == 1:
df = df[:, np.newaxis]
elif hasattr(self.base_estimator, "predict_proba"):
df = self.base_estimator.predict_proba(X)
if n_classes == 2:
df = df[:, 1:]
else:
raise RuntimeError('classifier has no decision_function or '
'predict_proba method.')
idx_pos_class = self.label_encoder_.\
transform(self.base_estimator.classes_)
return df, idx_pos_class
def fit(self, X, y, sample_weight=None):
"""Calibrate the fitted model
Parameters
----------
X : array-like, shape (n_samples, n_features)
Training data.
y : array-like, shape (n_samples,)
Target values.
sample_weight : array-like, shape = [n_samples] or None
Sample weights. If None, then samples are equally weighted.
Returns
-------
self : object
Returns an instance of self.
"""
self.label_encoder_ = LabelEncoder()
if self.classes is None:
self.label_encoder_.fit(y)
else:
self.label_encoder_.fit(self.classes)
self.classes_ = self.label_encoder_.classes_
Y = label_binarize(y, self.classes_)
df, idx_pos_class = self._preproc(X)
self.calibrators_ = []
for k, this_df in zip(idx_pos_class, df.T):
if self.method == 'isotonic':
calibrator = IsotonicRegression(out_of_bounds='clip')
elif self.method == 'sigmoid':
calibrator = _SigmoidCalibration()
else:
raise ValueError('method should be "sigmoid" or '
'"isotonic". Got %s.' % self.method)
calibrator.fit(this_df, Y[:, k], sample_weight)
self.calibrators_.append(calibrator)
return self
def predict_proba(self, X):
"""Posterior probabilities of classification
This function returns posterior probabilities of classification
according to each class on an array of test vectors X.
Parameters
----------
X : array-like, shape (n_samples, n_features)
The samples.
Returns
-------
C : array, shape (n_samples, n_classes)
The predicted probas. Can be exact zeros.
"""
n_classes = len(self.classes_)
proba = np.zeros((X.shape[0], n_classes))
df, idx_pos_class = self._preproc(X)
for k, this_df, calibrator in \
zip(idx_pos_class, df.T, self.calibrators_):
if n_classes == 2:
k += 1
proba[:, k] = calibrator.predict(this_df)
# Normalize the probabilities
if n_classes == 2:
proba[:, 0] = 1. - proba[:, 1]
else:
proba /= np.sum(proba, axis=1)[:, np.newaxis]
# XXX : for some reason all probas can be 0
proba[np.isnan(proba)] = 1. / n_classes
# Deal with cases where the predicted probability minimally exceeds 1.0
proba[(1.0 < proba) & (proba <= 1.0 + 1e-5)] = 1.0
return proba
def _sigmoid_calibration(df, y, sample_weight=None):
"""Probability Calibration with sigmoid method (Platt 2000)
Parameters
----------
df : ndarray, shape (n_samples,)
The decision function or predict proba for the samples.
y : ndarray, shape (n_samples,)
The targets.
sample_weight : array-like, shape = [n_samples] or None
Sample weights. If None, then samples are equally weighted.
Returns
-------
a : float
The slope.
b : float
The intercept.
References
----------
Platt, "Probabilistic Outputs for Support Vector Machines"
"""
df = column_or_1d(df)
y = column_or_1d(y)
F = df # F follows Platt's notations
tiny = np.finfo(np.float).tiny # to avoid division by 0 warning
# Bayesian priors (see Platt end of section 2.2)
prior0 = float(np.sum(y <= 0))
prior1 = y.shape[0] - prior0
T = np.zeros(y.shape)
T[y > 0] = (prior1 + 1.) / (prior1 + 2.)
T[y <= 0] = 1. / (prior0 + 2.)
T1 = 1. - T
def objective(AB):
# From Platt (beginning of Section 2.2)
E = np.exp(AB[0] * F + AB[1])
P = 1. / (1. + E)
l = -(T * np.log(P + tiny) + T1 * np.log(1. - P + tiny))
if sample_weight is not None:
return (sample_weight * l).sum()
else:
return l.sum()
def grad(AB):
# gradient of the objective function
E = np.exp(AB[0] * F + AB[1])
P = 1. / (1. + E)
TEP_minus_T1P = P * (T * E - T1)
if sample_weight is not None:
TEP_minus_T1P *= sample_weight
dA = np.dot(TEP_minus_T1P, F)
dB = np.sum(TEP_minus_T1P)
return np.array([dA, dB])
AB0 = np.array([0., log((prior0 + 1.) / (prior1 + 1.))])
AB_ = fmin_bfgs(objective, AB0, fprime=grad, disp=False)
return AB_[0], AB_[1]
class _SigmoidCalibration(BaseEstimator, RegressorMixin):
"""Sigmoid regression model.
Attributes
----------
a_ : float
The slope.
b_ : float
The intercept.
"""
def fit(self, X, y, sample_weight=None):
"""Fit the model using X, y as training data.
Parameters
----------
X : array-like, shape (n_samples,)
Training data.
y : array-like, shape (n_samples,)
Training target.
sample_weight : array-like, shape = [n_samples] or None
Sample weights. If None, then samples are equally weighted.
Returns
-------
self : object
Returns an instance of self.
"""
X = column_or_1d(X)
y = column_or_1d(y)
X, y = indexable(X, y)
self.a_, self.b_ = _sigmoid_calibration(X, y, sample_weight)
return self
def predict(self, T):
"""Predict new data by linear interpolation.
Parameters
----------
T : array-like, shape (n_samples,)
Data to predict from.
Returns
-------
T_ : array, shape (n_samples,)
The predicted data.
"""
T = column_or_1d(T)
return 1. / (1. + np.exp(self.a_ * T + self.b_))
def calibration_curve(y_true, y_prob, normalize=False, n_bins=5):
"""Compute true and predicted probabilities for a calibration curve.
Calibration curves may also be referred to as reliability diagrams.
Read more in the :ref:`User Guide <calibration>`.
Parameters
----------
y_true : array, shape (n_samples,)
True targets.
y_prob : array, shape (n_samples,)
Probabilities of the positive class.
normalize : bool, optional, default=False
Whether y_prob needs to be normalized into the bin [0, 1], i.e. is not
a proper probability. If True, the smallest value in y_prob is mapped
onto 0 and the largest one onto 1.
n_bins : int
Number of bins. A bigger number requires more data.
Returns
-------
prob_true : array, shape (n_bins,)
The true probability in each bin (fraction of positives).
prob_pred : array, shape (n_bins,)
The mean predicted probability in each bin.
References
----------
Alexandru Niculescu-Mizil and Rich Caruana (2005) Predicting Good
Probabilities With Supervised Learning, in Proceedings of the 22nd
International Conference on Machine Learning (ICML).
See section 4 (Qualitative Analysis of Predictions).
"""
y_true = column_or_1d(y_true)
y_prob = column_or_1d(y_prob)
if normalize: # Normalize predicted values into interval [0, 1]
y_prob = (y_prob - y_prob.min()) / (y_prob.max() - y_prob.min())
elif y_prob.min() < 0 or y_prob.max() > 1:
raise ValueError("y_prob has values outside [0, 1] and normalize is "
"set to False.")
y_true = _check_binary_probabilistic_predictions(y_true, y_prob)
bins = np.linspace(0., 1. + 1e-8, n_bins + 1)
binids = np.digitize(y_prob, bins) - 1
bin_sums = np.bincount(binids, weights=y_prob, minlength=len(bins))
bin_true = np.bincount(binids, weights=y_true, minlength=len(bins))
bin_total = np.bincount(binids, minlength=len(bins))
nonzero = bin_total != 0
prob_true = (bin_true[nonzero] / bin_total[nonzero])
prob_pred = (bin_sums[nonzero] / bin_total[nonzero])
return prob_true, prob_pred
| bsd-3-clause |
slightstone/SickRage | tests/issue_submitter_tests.py | 5 | 1664 | # coding=UTF-8
# Author: Dennis Lutter <[email protected]>
# URL: http://code.google.com/p/sickbeard/
#
# This file is part of SickRage.
#
# SickRage is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SickRage is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SickRage. If not, see <http://www.gnu.org/licenses/>.
from __future__ import with_statement
import unittest
import sys, os.path
from configobj import ConfigObj
sys.path.append(os.path.abspath('..'))
sys.path.append(os.path.abspath('../lib'))
import sickbeard
import test_lib as test
def error():
try:
raise Exception('FAKE EXCEPTION')
except Exception as e:
sickbeard.logger.log("FAKE ERROR: " + sickbeard.exceptions.ex(e), sickbeard.logger.ERROR)
sickbeard.logger.submit_errors()
raise
class IssueSubmitterBasicTests(unittest.TestCase):
def test_submitter(self):
self.assertRaises(Exception, error)
if __name__ == "__main__":
print "=================="
print "STARTING - ISSUE SUBMITTER TESTS"
print "=================="
print "######################################################################"
suite = unittest.TestLoader().loadTestsFromTestCase(IssueSubmitterBasicTests) | gpl-3.0 |
leoliujie/odoo | addons/hr/res_config.py | 377 | 3452 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Business Applications
# Copyright (C) 2004-2012 OpenERP S.A. (<http://openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
class hr_config_settings(osv.osv_memory):
_name = 'hr.config.settings'
_inherit = 'res.config.settings'
_columns = {
'module_hr_timesheet_sheet': fields.boolean('Allow timesheets validation by managers',
help ="""This installs the module hr_timesheet_sheet."""),
'module_hr_attendance': fields.boolean('Install attendances feature',
help ="""This installs the module hr_attendance."""),
'module_hr_timesheet': fields.boolean('Manage timesheets',
help ="""This installs the module hr_timesheet."""),
'module_hr_holidays': fields.boolean('Manage holidays, leaves and allocation requests',
help ="""This installs the module hr_holidays."""),
'module_hr_expense': fields.boolean('Manage employees expenses',
help ="""This installs the module hr_expense."""),
'module_hr_recruitment': fields.boolean('Manage the recruitment process',
help ="""This installs the module hr_recruitment."""),
'module_hr_contract': fields.boolean('Record contracts per employee',
help ="""This installs the module hr_contract."""),
'module_hr_evaluation': fields.boolean('Organize employees periodic evaluation',
help ="""This installs the module hr_evaluation."""),
'module_hr_gamification': fields.boolean('Drive engagement with challenges and badges',
help ="""This installs the module hr_gamification."""),
'module_account_analytic_analysis': fields.boolean('Allow invoicing based on timesheets (the sale application will be installed)',
help ="""This installs the module account_analytic_analysis, which will install sales management too."""),
'module_hr_payroll': fields.boolean('Manage payroll',
help ="""This installs the module hr_payroll."""),
}
def onchange_hr_timesheet(self, cr, uid, ids, timesheet, context=None):
""" module_hr_timesheet implies module_hr_attendance """
if timesheet:
return {'value': {'module_hr_attendance': True}}
return {}
def onchange_hr_attendance(self, cr, uid, ids, attendance, context=None):
""" module_hr_timesheet implies module_hr_attendance """
if not attendance:
return {'value': {'module_hr_timesheet': False}}
return {}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
ruiting/opencog | opencog/python/pln_old/examples/attentionallocation/socrates_attention_agent.py | 5 | 2253 | __author__ = 'sebastian'
from opencog.cogserver import MindAgent
from opencog.atomspace import types
from pln.chainers import Chainer
from pln.rules import *
class SocratesAgent(MindAgent):
def __init__(self):
self.chainer = None
def create_chainer(self, atomspace):
self.chainer = Chainer(atomspace,
agent=self,
stimulateAtoms=True,
preferAttentionalFocus=True,
allow_output_with_variables=True,
delete_temporary_variables=True)
self.chainer.add_rule(
GeneralEvaluationToMemberRule(self.chainer, 0, 2))
self.chainer.add_rule(MemberToInheritanceRule(self.chainer))
self.chainer.add_rule(
DeductionRule(self.chainer, types.InheritanceLink))
self.chainer.add_rule(
InheritanceToMemberRule(self.chainer))
self.chainer.add_rule(
MemberToEvaluationRule(self.chainer))
self.chainer.add_rule(
AbductionRule(self.chainer, types.InheritanceLink))
def run(self, atomspace):
if self.chainer is None:
self.create_chainer(atomspace)
print("PLN Chainer created.")
return
print("PLN continuing.")
# there is no query here, so it doesn't give any stimulus
if not check_result(atomspace):
result = self.chainer.forward_step()
return result
def check_result(atomspace):
"""
Searches for an instance of
EvaluationLink
PredicateNode "breathe"
ListLink
ConceptNode "Socrates"
ConceptNode "air"
"""
result_found = False
eval_links = atomspace.get_atoms_by_type(types.EvaluationLink)
for eval_link in eval_links:
out = eval_link.out
if out[0].is_a(types.PredicateNode) and "breathe" in out[0].name\
and out[1].is_a(types.ListLink)\
and "Socrates" in out[1].out[0].name\
and "air" in out[1].out[1].name:
result_found = True
break
if result_found:
print("Result found? {0}.".format(result_found))
return result_found
| agpl-3.0 |
rmboggs/django | tests/auth_tests/test_validators.py | 49 | 7726 | from __future__ import unicode_literals
import os
from django.contrib.auth.models import User
from django.contrib.auth.password_validation import (
CommonPasswordValidator, MinimumLengthValidator, NumericPasswordValidator,
UserAttributeSimilarityValidator, get_default_password_validators,
get_password_validators, password_changed,
password_validators_help_text_html, password_validators_help_texts,
validate_password,
)
from django.core.exceptions import ValidationError
from django.test import TestCase, override_settings
from django.utils._os import upath
@override_settings(AUTH_PASSWORD_VALIDATORS=[
{'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator'},
{'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', 'OPTIONS': {
'min_length': 12,
}},
])
class PasswordValidationTest(TestCase):
def test_get_default_password_validators(self):
validators = get_default_password_validators()
self.assertEqual(len(validators), 2)
self.assertEqual(validators[0].__class__.__name__, 'CommonPasswordValidator')
self.assertEqual(validators[1].__class__.__name__, 'MinimumLengthValidator')
self.assertEqual(validators[1].min_length, 12)
def test_get_password_validators_custom(self):
validator_config = [{'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator'}]
validators = get_password_validators(validator_config)
self.assertEqual(len(validators), 1)
self.assertEqual(validators[0].__class__.__name__, 'CommonPasswordValidator')
self.assertEqual(get_password_validators([]), [])
def test_validate_password(self):
self.assertIsNone(validate_password('sufficiently-long'))
msg_too_short = 'This password is too short. It must contain at least 12 characters.'
with self.assertRaises(ValidationError) as cm:
validate_password('django4242')
self.assertEqual(cm.exception.messages, [msg_too_short])
self.assertEqual(cm.exception.error_list[0].code, 'password_too_short')
with self.assertRaises(ValidationError) as cm:
validate_password('password')
self.assertEqual(cm.exception.messages, ['This password is too common.', msg_too_short])
self.assertEqual(cm.exception.error_list[0].code, 'password_too_common')
self.assertIsNone(validate_password('password', password_validators=[]))
def test_password_changed(self):
self.assertIsNone(password_changed('password'))
def test_password_validators_help_texts(self):
help_texts = password_validators_help_texts()
self.assertEqual(len(help_texts), 2)
self.assertIn('12 characters', help_texts[1])
self.assertEqual(password_validators_help_texts(password_validators=[]), [])
def test_password_validators_help_text_html(self):
help_text = password_validators_help_text_html()
self.assertEqual(help_text.count('<li>'), 2)
self.assertIn('12 characters', help_text)
@override_settings(AUTH_PASSWORD_VALIDATORS=[])
def test_empty_password_validator_help_text_html(self):
self.assertEqual(password_validators_help_text_html(), '')
class MinimumLengthValidatorTest(TestCase):
def test_validate(self):
expected_error = "This password is too short. It must contain at least %d characters."
self.assertIsNone(MinimumLengthValidator().validate('12345678'))
self.assertIsNone(MinimumLengthValidator(min_length=3).validate('123'))
with self.assertRaises(ValidationError) as cm:
MinimumLengthValidator().validate('1234567')
self.assertEqual(cm.exception.messages, [expected_error % 8])
self.assertEqual(cm.exception.error_list[0].code, 'password_too_short')
with self.assertRaises(ValidationError) as cm:
MinimumLengthValidator(min_length=3).validate('12')
self.assertEqual(cm.exception.messages, [expected_error % 3])
def test_help_text(self):
self.assertEqual(
MinimumLengthValidator().get_help_text(),
"Your password must contain at least 8 characters."
)
class UserAttributeSimilarityValidatorTest(TestCase):
def test_validate(self):
user = User.objects.create(
username='testclient', first_name='Test', last_name='Client', email='[email protected]',
password='sha1$6efc0$f93efe9fd7542f25a7be94871ea45aa95de57161',
)
expected_error = "The password is too similar to the %s."
self.assertIsNone(UserAttributeSimilarityValidator().validate('testclient'))
with self.assertRaises(ValidationError) as cm:
UserAttributeSimilarityValidator().validate('testclient', user=user),
self.assertEqual(cm.exception.messages, [expected_error % "username"])
self.assertEqual(cm.exception.error_list[0].code, 'password_too_similar')
with self.assertRaises(ValidationError) as cm:
UserAttributeSimilarityValidator().validate('example.com', user=user),
self.assertEqual(cm.exception.messages, [expected_error % "email address"])
with self.assertRaises(ValidationError) as cm:
UserAttributeSimilarityValidator(
user_attributes=['first_name'],
max_similarity=0.3,
).validate('testclient', user=user)
self.assertEqual(cm.exception.messages, [expected_error % "first name"])
self.assertIsNone(
UserAttributeSimilarityValidator(user_attributes=['first_name']).validate('testclient', user=user)
)
def test_help_text(self):
self.assertEqual(
UserAttributeSimilarityValidator().get_help_text(),
"Your password can't be too similar to your other personal information."
)
class CommonPasswordValidatorTest(TestCase):
def test_validate(self):
expected_error = "This password is too common."
self.assertIsNone(CommonPasswordValidator().validate('a-safe-password'))
with self.assertRaises(ValidationError) as cm:
CommonPasswordValidator().validate('godzilla')
self.assertEqual(cm.exception.messages, [expected_error])
def test_validate_custom_list(self):
path = os.path.join(os.path.dirname(os.path.realpath(upath(__file__))), 'common-passwords-custom.txt')
validator = CommonPasswordValidator(password_list_path=path)
expected_error = "This password is too common."
self.assertIsNone(validator.validate('a-safe-password'))
with self.assertRaises(ValidationError) as cm:
validator.validate('from-my-custom-list')
self.assertEqual(cm.exception.messages, [expected_error])
self.assertEqual(cm.exception.error_list[0].code, 'password_too_common')
def test_help_text(self):
self.assertEqual(
CommonPasswordValidator().get_help_text(),
"Your password can't be a commonly used password."
)
class NumericPasswordValidatorTest(TestCase):
def test_validate(self):
expected_error = "This password is entirely numeric."
self.assertIsNone(NumericPasswordValidator().validate('a-safe-password'))
with self.assertRaises(ValidationError) as cm:
NumericPasswordValidator().validate('42424242')
self.assertEqual(cm.exception.messages, [expected_error])
self.assertEqual(cm.exception.error_list[0].code, 'password_entirely_numeric')
def test_help_text(self):
self.assertEqual(
NumericPasswordValidator().get_help_text(),
"Your password can't be entirely numeric."
)
| bsd-3-clause |
cxxgtxy/tensorflow | tensorflow/python/kernel_tests/sparse_conditional_accumulator_test.py | 132 | 22955 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import time
import numpy as np
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes as dtypes_lib
from tensorflow.python.framework import errors_impl
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import data_flow_ops
from tensorflow.python.platform import test
def _indexedslice(x, noshape=False):
x = np.array(x)
dense_shape = x.shape
ndim = len(dense_shape)
indices = np.where(np.sum(x, tuple(range(1, ndim))))[0]
values = x[indices]
if noshape:
dense_shape = None
return ops.IndexedSlices(
indices=indices.tolist(), values=values, dense_shape=dense_shape)
class IndexedSlicesConditionalAccumulatorTest(test.TestCase):
def _assertEqual_indexedslices(self, expected_tensor, result):
self.assertAllEqual(expected_tensor.indices, result.indices)
self.assertAllEqual(expected_tensor.values, result.values)
if (result.dense_shape is not None and
expected_tensor.dense_shape is not None):
self.assertAllEqual(expected_tensor.dense_shape, result.dense_shape)
def _assertEqual_nparray(self, expected_array, result, sess):
expected_tensor = _indexedslice(expected_array)
self._assertEqual_indexedslices(expected_tensor, result)
def testConstructor(self):
with ops.Graph().as_default():
q = data_flow_ops.SparseConditionalAccumulator(
dtypes_lib.float32, name="Q")
self.assertTrue(isinstance(q.accumulator_ref, ops.Tensor))
self.assertProtoEquals("""
name:'Q' op:'SparseConditionalAccumulator'
attr { key: 'dtype' value { type: DT_FLOAT } }
attr { key: 'shape' value { shape { unknown_rank: true} } }
attr { key: 'container' value { s: '' } }
attr { key: 'shared_name' value { s: '' } }
""", q.accumulator_ref.op.node_def)
def testConstructorWithShape(self):
with ops.Graph().as_default():
q = data_flow_ops.SparseConditionalAccumulator(
dtypes_lib.float32,
name="Q",
shape=tensor_shape.TensorShape([1, 5, 2, 8]))
self.assertTrue(isinstance(q.accumulator_ref, ops.Tensor))
self.assertProtoEquals("""
name:'Q' op:'SparseConditionalAccumulator'
attr { key: 'dtype' value { type: DT_FLOAT } }
attr { key: 'shape' value { shape { dim {size: 1 }
dim {size: 5 }
dim {size: 2 }
dim {size: 8 }
} } }
attr { key: 'container' value { s: '' } }
attr { key: 'shared_name' value { s: '' } }
""", q.accumulator_ref.op.node_def)
def testAccumulatorSizeEmpty(self):
with self.test_session():
q = data_flow_ops.SparseConditionalAccumulator(
dtypes_lib.float32, name="Q")
self.assertEqual(q.num_accumulated().eval(), 0)
def testAccumulatorSetGlobalStep(self):
with self.test_session():
q = data_flow_ops.SparseConditionalAccumulator(
dtypes_lib.float32, name="Q", shape=tensor_shape.TensorShape([1]))
set_global_step_op = q.set_global_step(1)
set_global_step_op.run()
def testAccumulatorApplyGradFloat32(self):
with self.test_session():
q = data_flow_ops.SparseConditionalAccumulator(
dtypes_lib.float32, name="Q", shape=tensor_shape.TensorShape([3, 3]))
accum_op = q.apply_indexed_slices_grad(
ops.IndexedSlices(
indices=[0, 2],
values=np.array([[0, 0, 1], [3, 0, 4]]).astype(np.float32)))
accum_op.run()
self.assertEqual(q.num_accumulated().eval(), 1)
def testDtypes(self):
with self.test_session() as sess:
dtypes = [dtypes_lib.float16, dtypes_lib.float32, dtypes_lib.float64]
for i in range(len(dtypes)):
dtype = dtypes[i]
q = data_flow_ops.SparseConditionalAccumulator(
dtype, shape=tensor_shape.TensorShape([3, 3, 3]))
elems = np.arange(2)
sum_elems = np.zeros([3, 3, 3]).astype(dtype.as_numpy_dtype)
for e in elems:
mat_to_add = np.zeros([3, 3, 3]).astype(dtype.as_numpy_dtype)
mat_to_add[i, i, i] = e + 1
sum_elems += mat_to_add
t = _indexedslice(mat_to_add)
q.apply_indexed_slices_grad(t).run()
result = sess.run(q.take_indexed_slices_grad(1))
self._assertEqual_nparray(sum_elems / len(elems), result, sess)
def testAccumulatorMultipleAccumulators(self):
with self.test_session() as sess:
q_f32_0 = data_flow_ops.SparseConditionalAccumulator(
dtypes_lib.float32, name="Q", shape=tensor_shape.TensorShape([2, 2]))
q_f32_1 = data_flow_ops.SparseConditionalAccumulator(
dtypes_lib.float32, name="Q", shape=tensor_shape.TensorShape([2, 2]))
q_f16_0 = data_flow_ops.SparseConditionalAccumulator(
dtypes_lib.float16, name="Q", shape=tensor_shape.TensorShape([2, 2]))
q_f16_1 = data_flow_ops.SparseConditionalAccumulator(
dtypes_lib.float16, name="Q", shape=tensor_shape.TensorShape([2, 2]))
accums = [q_f16_0, q_f16_1, q_f32_0, q_f32_1]
elems = [[[1, 0], [0, 0]], [[0, 1], [0, 0]], [[0, 0], [1, 0]], [[0, 0],
[0, 1]]]
expected_tensors = []
for i in range(len(accums)):
tensor_to_add = np.array(elems[i]).astype(accums[i]
.dtype.as_numpy_dtype)
expected_tensor = _indexedslice(tensor_to_add)
expected_tensors.append(expected_tensor)
st = _indexedslice(tensor_to_add)
accums[i].apply_indexed_slices_grad(st).run()
for i in range(len(accums)):
result = sess.run(accums[i].take_indexed_slices_grad(1))
self._assertEqual_indexedslices(expected_tensors[i], result)
def testAccumulatorTakeGrad(self):
with self.test_session() as sess:
q = data_flow_ops.SparseConditionalAccumulator(
dtypes_lib.float32, name="Q", shape=())
grad_indexed_slices = ops.IndexedSlices(
indices=[0, 1], values=np.array([[1, 0], [0, 2]]).astype(np.float32))
accum_op = q.apply_indexed_slices_grad(grad_indexed_slices)
accum_op.run()
accum_op = q.apply_grad([0, 2],
np.array([[0, 1], [3, 0]]).astype(np.float32),
[3, 2])
accum_op.run()
takeg_t = q.take_indexed_slices_grad(1)
val = sess.run(takeg_t)
self.assertAllEqual(val.indices, [0, 1, 2])
self.assertAllEqual(val.values, [[0.5, 0.5], [0, 2], [3, 0]])
self.assertAllEqual(val.dense_shape, [-1, 2])
def testAccumulatorRepeatedTakeGrad(self):
with self.test_session() as sess:
q = data_flow_ops.SparseConditionalAccumulator(
dtypes_lib.float32, name="Q", shape=())
grad_indexed_slices = ops.IndexedSlices(
indices=[0, 1], values=np.array([[1, 0], [0, 2]]).astype(np.float32))
accum_op = q.apply_indexed_slices_grad(grad_indexed_slices, local_step=0)
accum_op.run()
accum_op = q.apply_grad(
[0, 2],
np.array([[0, 1], [3, 0]]).astype(np.float32), [3, 2],
local_step=0)
accum_op.run()
takeg_t = q.take_indexed_slices_grad(1)
val = sess.run(takeg_t)
self.assertAllEqual(val.indices, [0, 1, 2])
self.assertAllEqual(val.values, [[0.5, 0.5], [0, 2], [3, 0]])
self.assertAllEqual(val.dense_shape, [-1, 2])
grad_indexed_slices = ops.IndexedSlices(
indices=[0, 1],
values=np.array([[10, 0], [0, 20]]).astype(np.float32))
accum_op = q.apply_indexed_slices_grad(grad_indexed_slices, local_step=1)
accum_op.run()
accum_op = q.apply_grad(
[0, 2],
np.array([[0, 10], [30, 0]]).astype(np.float32), [3, 2],
local_step=1)
accum_op.run()
takeg_t = q.take_indexed_slices_grad(1)
val = sess.run(takeg_t)
self.assertAllEqual(val.indices, [0, 1, 2])
self.assertAllEqual(val.values, [[5, 5], [0, 20], [30, 0]])
self.assertAllEqual(val.dense_shape, [-1, 2])
def testParallelApplyGrad(self):
with self.test_session() as sess:
q = data_flow_ops.SparseConditionalAccumulator(
dtypes_lib.float32, name="Q", shape=tensor_shape.TensorShape([2, 2]))
elems = [10.0, 20.0, 30.0, 40.0, 50.0, 60.0, 70.0, 80.0, 90.0, 100.0]
accum_ops = []
for x in elems:
x = _indexedslice(np.array([[x, 0], [0, x]]).astype(np.float32))
accum_ops.append(q.apply_indexed_slices_grad(x, local_step=0))
takeg_t = q.take_indexed_slices_grad(1)
def apply_indexed_slices_grad(accum_op):
sess.run(accum_op)
threads = [
self.checkedThread(
target=apply_indexed_slices_grad, args=(o,)) for o in accum_ops
]
for thread in threads:
thread.start()
for thread in threads:
thread.join()
val = sess.run(takeg_t)
expected_val = sum(elems) / len(elems)
self._assertEqual_nparray(
np.array([[expected_val, 0], [0, expected_val]]).astype(np.float32),
val, sess)
def testParallelTakeGrad(self):
with self.test_session() as sess:
q = data_flow_ops.SparseConditionalAccumulator(
dtypes_lib.float32, name="Q", shape=tensor_shape.TensorShape([2, 2]))
elems = [e + 1 for e in range(10)]
accum_ops = []
for e in elems:
v = _indexedslice(np.array([[0, 0], [e, 0]]).astype(np.float32))
accum_ops.append(q.apply_indexed_slices_grad(v, local_step=e - 1))
takeg_t = q.take_indexed_slices_grad(1)
results = []
def apply_indexed_slices_grad():
for accum_op in accum_ops:
time.sleep(1.0)
sess.run(accum_op)
apply_indexed_slices_grad_thread = self.checkedThread(
target=apply_indexed_slices_grad)
def take_grad():
t = sess.run(takeg_t)
results.append(t)
threads = [self.checkedThread(target=take_grad) for _ in range(10)]
for thread in threads:
thread.start()
apply_indexed_slices_grad_thread.start()
for thread in threads:
thread.join()
apply_indexed_slices_grad_thread.join()
for i in range(len(accum_ops)):
self._assertEqual_nparray(
np.array([[0, 0], [elems[i], 0]]), results[i], sess)
def testAccumulatorApplyAndBlockingTake(self):
with self.test_session() as sess:
q = data_flow_ops.SparseConditionalAccumulator(
dtypes_lib.float32, name="Q", shape=tensor_shape.TensorShape([2, 2]))
elems = [10.0, 20.0, 30.0]
elems_ave = sum(elems) / len(elems)
accum_ops = []
for x in elems:
x = _indexedslice(np.array([[0, x], [0, 0]]).astype(np.float32))
accum_ops.append(q.apply_indexed_slices_grad(x, local_step=0))
takeg_t = q.take_indexed_slices_grad(3)
results = []
def apply_indexed_slices_grad():
for accum_op in accum_ops:
sess.run(accum_op)
def take_grad():
results.append(sess.run(takeg_t))
accum_thread = self.checkedThread(target=apply_indexed_slices_grad)
takeg_thread = self.checkedThread(target=take_grad)
accum_thread.start()
takeg_thread.start()
accum_thread.join()
takeg_thread.join()
self._assertEqual_nparray([[0, elems_ave], [0, 0]], results[0], sess)
def _blocking_takeg(self, sess, takeg_op):
with self.assertRaisesOpError("was cancelled"):
sess.run(takeg_op)
def testAccumulatorCancel(self):
with self.test_session() as sess:
q = data_flow_ops.SparseConditionalAccumulator(
dtypes_lib.float32,
name="Q",
shape=tensor_shape.TensorShape([1, 2, 3]))
takeg_t = q.take_indexed_slices_grad(1)
takeg_thread = self.checkedThread(
self._blocking_takeg, args=(sess, takeg_t))
takeg_thread.start()
time.sleep(1.0)
sess.close() # Will cancel blocked operation
takeg_thread.join()
def testNonVectorIndices(self):
with self.test_session():
q = data_flow_ops.SparseConditionalAccumulator(
dtypes_lib.float32, name="Q", shape=tensor_shape.TensorShape([3, 3]))
with self.assertRaisesRegexp(
errors_impl.InvalidArgumentError,
"Input indices should be vector but received shape:"):
q.apply_grad(
grad_indices=[[0, 1], [1, 0]],
grad_values=np.array([1, 2]).astype(np.float32)).run()
def testZeroDimensionValues(self):
with self.test_session():
q = data_flow_ops.SparseConditionalAccumulator(
dtypes_lib.float32, name="Q", shape=tensor_shape.TensorShape([3, 3]))
with self.assertRaisesRegexp(errors_impl.InvalidArgumentError,
"Values cannot be 0-dimensional."):
q.apply_grad(
grad_indices=[0], grad_values=np.array(1).astype(np.float32)).run()
def testWrongNonEmptyInputValues(self):
with self.test_session():
q = data_flow_ops.SparseConditionalAccumulator(
dtypes_lib.float32, name="Q", shape=tensor_shape.TensorShape([3, 3]))
with self.assertRaisesRegexp(errors_impl.InvalidArgumentError,
" non-empty input values, got "):
q.apply_grad(
grad_indices=[0, 1],
grad_values=np.array([[0, 1, 1]]).astype(np.float32)).run()
def testDynamicNonVectorIndices(self):
with self.test_session() as sess:
q = data_flow_ops.SparseConditionalAccumulator(
dtypes_lib.float32, name="Q", shape=tensor_shape.TensorShape([3, 3]))
x_indices = array_ops.placeholder(dtypes_lib.int64)
x_values = array_ops.placeholder(dtypes_lib.float32)
accum_op = q.apply_grad(grad_indices=x_indices, grad_values=x_values)
with self.assertRaisesRegexp(
errors_impl.InvalidArgumentError,
"Input indices should be vector but received shape:"):
sess.run(accum_op,
feed_dict={
x_indices: [[0, 1], [1, 0]],
x_values: np.array([1, 2]).astype(np.float32)
})
def testDynamicWrongNonEmptyInputValues(self):
with self.test_session() as sess:
q = data_flow_ops.SparseConditionalAccumulator(
dtypes_lib.float32, name="Q", shape=tensor_shape.TensorShape([3, 3]))
x_indices = array_ops.placeholder(dtypes_lib.int64)
x_values = array_ops.placeholder(dtypes_lib.float32)
accum_op = q.apply_grad(grad_indices=x_indices, grad_values=x_values)
with self.assertRaisesRegexp(errors_impl.InvalidArgumentError,
" non-empty input values, got "):
sess.run(accum_op,
feed_dict={
x_indices: [0, 1],
x_values: np.array([[0, 1, 1]]).astype(np.float32)
})
def testEmptyShapeApply(self):
with self.test_session():
q = data_flow_ops.SparseConditionalAccumulator(
dtypes_lib.float32, name="Q", shape=tensor_shape.TensorShape([]))
with self.assertRaisesRegexp(errors_impl.InvalidArgumentError,
"Input indices should be vector"):
q.apply_grad(grad_indices=0, grad_values=[1.0], grad_shape=[]).run()
with self.assertRaisesRegexp(errors_impl.InvalidArgumentError,
"Input indices should be vector"):
q.apply_grad(grad_indices=0, grad_values=[1.0]).run()
with self.assertRaisesRegexp(errors_impl.InvalidArgumentError,
"Values cannot be 0-dimensional."):
q.apply_grad(grad_indices=[0], grad_values=1.0, grad_shape=[]).run()
with self.assertRaisesRegexp(errors_impl.InvalidArgumentError,
"Values cannot be 0-dimensional."):
q.apply_grad(grad_indices=[0], grad_values=1.0).run()
# The right way to apply a scalar
q.apply_grad(grad_indices=[0], grad_values=[1.0], grad_shape=[]).run()
q.apply_grad(grad_indices=[0], grad_values=[1.0]).run()
def testValidateShape(self):
with self.test_session() as sess:
q = data_flow_ops.SparseConditionalAccumulator(
dtypes_lib.float32, name="Q", shape=[2, 2, None])
# Provided shape has wrong rank
with self.assertRaisesRegexp(
errors_impl.InvalidArgumentError,
"Shape mismatch: expected shape rank at least 3, got 2"):
q.apply_grad(
grad_indices=[0],
grad_values=np.array([[1, 2]]).astype(np.float32),
grad_shape=[2, 2]).run()
# Provided shape has wrong dim
with self.assertRaisesRegexp(
errors_impl.InvalidArgumentError,
"Shape mismatch: expected shape dim 1 to be 2, got 3"):
q.apply_grad(
grad_indices=[0],
grad_values=np.array([[[1, 2], [3, 4], [5, 6]]]).astype(np.float32),
grad_shape=[2, 3, 2]).run()
# Indices exceeded accumulator's shape's limits
with self.assertRaisesRegexp(
errors_impl.InvalidArgumentError,
"Shape mismatch: index of slice 0 exceeded limits of shape;"
" index is 3 exceeded 2"):
q.apply_grad(
grad_indices=[3],
grad_values=np.array([[[1, 2], [3, 4]]]).astype(np.float32)).run()
# Values' rank does not match shape
with self.assertRaisesRegexp(
errors_impl.InvalidArgumentError,
"Shape mismatch: expected values rank at least 3, got 2"):
q.apply_grad(
grad_indices=[0, 1],
grad_values=np.array([[1, 2], [3, 4]]).astype(np.float32)).run()
# Values' dim does not match shape
with self.assertRaisesRegexp(
errors_impl.InvalidArgumentError,
"Shape mismatch: expected values dim 1 to be 2, got 3"):
q.apply_grad(
grad_indices=[0],
grad_values=np.array(
[[[1, 2], [3, 4], [5, 6]]]).astype(np.float32)).run()
# First successful gradient creates additional constraints
# Shape will be additionally be constrained to [None,2,2,2] hereafter.
q.apply_grad(
grad_indices=[0],
grad_values=np.array(
[[[[1, 2], [3, 4]], [[5, 6], [7, 8]]]]).astype(np.float32)).run()
# Values' rank does not match accumulated gradient
with self.assertRaisesRegexp(
errors_impl.InvalidArgumentError,
"Shape mismatch: expected values rank 4, got 3"):
q.apply_grad(
grad_indices=[0],
grad_values=np.array([[[1, 2], [3, 4]]]).astype(np.float32)).run()
# Values' dim does not match accumulated gradient
with self.assertRaisesRegexp(
errors_impl.InvalidArgumentError,
"Shape mismatch: expected values dim 3 to be 2, got 3"):
q.apply_grad(
grad_indices=[0],
grad_values=np.array(
[[[[1, 2, 3], [4, 5, 6]], [[7, 8, 9], [10, 11, 12]]]]).astype(
np.float32)).run()
# After take grad, constraints on accumulated gradient are removed
sess.run(q.take_grad(1))
# First successful gradient imposes new constraints.
# Hereafter, shape will additionally constrained to [None,2,2,3]
q.apply_grad(
grad_indices=[0],
grad_values=np.array(
[[[[1, 2, 3], [4, 5, 6]], [[7, 8, 9], [10, 11, 12]]]]).astype(
np.float32),
local_step=1).run()
with self.assertRaisesRegexp(
errors_impl.InvalidArgumentError,
"Shape mismatch: expected values dim 3 to be 3, got 2"):
q.apply_grad(
grad_indices=[0],
grad_values=np.array(
[[[[1, 2], [3, 4]], [[5, 6], [7, 8]]]]).astype(np.float32),
local_step=1).run()
def testReturnShape(self):
with self.test_session() as sess:
q = data_flow_ops.SparseConditionalAccumulator(
dtypes_lib.float32, name="Q", shape=[2, None])
q.apply_grad(
grad_indices=[0],
grad_values=np.array(
[[[[1, 2], [3, 4]], [[5, 6], [7, 8]]]]).astype(np.float32)).run()
val = sess.run(q.take_indexed_slices_grad(1))
self.assertAllEqual(val.dense_shape, [2, 2, 2, 2])
q = data_flow_ops.SparseConditionalAccumulator(
dtypes_lib.float32, name="Q", shape=[None, 2])
q.apply_grad(
grad_indices=[0],
grad_values=np.array(
[[[[1, 2, 3], [4, 5, 6]], [[7, 8, 9], [10, 11, 12]]]]).astype(
np.float32)).run()
val = sess.run(q.take_indexed_slices_grad(1))
self.assertAllEqual(val.dense_shape, [-1, 2, 2, 3])
def testApplyGradtInt32IndicesAndShape(self):
with self.test_session() as sess:
q = data_flow_ops.SparseConditionalAccumulator(
dtypes_lib.float32, name="Q", shape=tensor_shape.TensorShape([3, 3]))
accum_op = q.apply_grad(
grad_indices=constant_op.constant(
[0, 2], dtype=dtypes_lib.int32),
grad_values=constant_op.constant(
[[0, 0, 1], [3, 0, 4]], dtype=dtypes_lib.float32),
grad_shape=constant_op.constant(
[3, 3], dtype=dtypes_lib.int32))
accum_op.run()
accum_op = q.apply_indexed_slices_grad(
ops.IndexedSlices(
indices=constant_op.constant(
[0, 2], dtype=dtypes_lib.int32),
values=constant_op.constant(
[[0, 0, 1], [3, 0, 4]], dtype=dtypes_lib.float32),
dense_shape=constant_op.constant(
[3, 3], dtype=dtypes_lib.int32)))
accum_op.run()
self.assertEqual(q.num_accumulated().eval(), 2)
val = sess.run(q.take_indexed_slices_grad(1))
self.assertAllEqual(val.indices, [0, 2])
self.assertAllEqual(val.values, [[0, 0, 1], [3, 0, 4]])
self.assertAllEqual(val.dense_shape, [3, 3])
if __name__ == "__main__":
test.main()
| apache-2.0 |
albertomurillo/ansible | test/units/modules/network/netvisor/test_pn_vrouter_pim_config.py | 15 | 2490 | # Copyright: (c) 2018, Pluribus Networks
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import json
from units.compat.mock import patch
from ansible.modules.network.netvisor import pn_vrouter_pim_config
from units.modules.utils import set_module_args
from .nvos_module import TestNvosModule, load_fixture
class TestVrouterPimConfigModule(TestNvosModule):
module = pn_vrouter_pim_config
def setUp(self):
self.mock_run_nvos_commands = patch('ansible.modules.network.netvisor.pn_vrouter_pim_config.run_cli')
self.run_nvos_commands = self.mock_run_nvos_commands.start()
self.mock_run_check_cli = patch('ansible.modules.network.netvisor.pn_vrouter_pim_config.check_cli')
self.run_check_cli = self.mock_run_check_cli.start()
def tearDown(self):
self.mock_run_nvos_commands.stop()
self.mock_run_check_cli.stop()
def run_cli_patch(self, module, cli, state_map):
if state_map['update'] == 'vrouter-pim-config-modify':
results = dict(
changed=True,
cli_cmd=cli
)
module.exit_json(**results)
def load_fixtures(self, commands=None, state=None, transport='cli'):
self.run_nvos_commands.side_effect = self.run_cli_patch
if state == 'update':
self.run_check_cli.return_value = True
def test_vrouter_pim_config_t1(self):
set_module_args({'pn_cliswitch': 'sw01', 'pn_query_interval': '10',
'pn_querier_timeout': '30', 'pn_vrouter_name': 'foo-vrouter', 'state': 'update'})
result = self.execute_module(changed=True, state='update')
expected_cmd = ' switch sw01 vrouter-pim-config-modify vrouter-name foo-vrouter '
expected_cmd += 'querier-timeout 30 query-interval 10'
self.assertEqual(result['cli_cmd'], expected_cmd)
def test_vrouter_pim_config_t2(self):
set_module_args({'pn_cliswitch': 'sw01', 'pn_query_interval': '30',
'pn_hello_interval': '120', 'pn_vrouter_name': 'foo-vrouter', 'state': 'update'})
result = self.execute_module(changed=True, state='update')
expected_cmd = ' switch sw01 vrouter-pim-config-modify vrouter-name foo-vrouter '
expected_cmd += 'hello-interval 120 query-interval 30'
self.assertEqual(result['cli_cmd'], expected_cmd)
| gpl-3.0 |
SaschaMester/delicium | tools/perf/page_sets/tough_texture_upload_cases.py | 1 | 1659 | # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.page import page as page_module
from telemetry import story
class ToughTextureUploadCasesPage(page_module.Page):
def __init__(self, url, page_set):
super(
ToughTextureUploadCasesPage,
self).__init__(
url=url,
page_set=page_set)
def RunPageInteractions(self, action_runner):
with action_runner.CreateInteraction('Animation'):
action_runner.Wait(10)
class ToughTextureUploadCasesPageSet(story.StorySet):
"""
Description: A collection of texture upload performance tests
"""
def __init__(self):
super(ToughTextureUploadCasesPageSet, self).__init__()
urls_list = [
'file://tough_texture_upload_cases/background_color_animation.html',
# pylint: disable=C0301
'file://tough_texture_upload_cases/background_color_animation_and_transform_animation.html',
# pylint: disable=C0301
'file://tough_texture_upload_cases/background_color_animation_with_gradient.html',
# pylint: disable=C0301
'file://tough_texture_upload_cases/background_color_animation_with_gradient_and_transform_animation.html',
'file://tough_texture_upload_cases/small_texture_uploads.html',
'file://tough_texture_upload_cases/medium_texture_uploads.html',
'file://tough_texture_upload_cases/large_texture_uploads.html',
'file://tough_texture_upload_cases/extra_large_texture_uploads.html',
]
for url in urls_list:
self.AddUserStory(ToughTextureUploadCasesPage(url, self))
| bsd-3-clause |
kirca/odoo | addons/mail/ir_attachment.py | 378 | 5643 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2014-TODAY OpenERP SA (http://www.openerp.com)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
import os.path
class IrAttachment(osv.Model):
""" Update partner to add a field about notification preferences """
_name = "ir.attachment"
_inherit = 'ir.attachment'
_fileext_to_type = {
'7z': 'archive',
'aac': 'audio',
'ace': 'archive',
'ai': 'vector',
'aiff': 'audio',
'apk': 'archive',
'app': 'binary',
'as': 'script',
'asf': 'video',
'ass': 'text',
'avi': 'video',
'bat': 'script',
'bin': 'binary',
'bmp': 'image',
'bzip2': 'archive',
'c': 'script',
'cab': 'archive',
'cc': 'script',
'ccd': 'disk',
'cdi': 'disk',
'cdr': 'vector',
'cer': 'certificate',
'cgm': 'vector',
'cmd': 'script',
'coffee': 'script',
'com': 'binary',
'cpp': 'script',
'crl': 'certificate',
'crt': 'certificate',
'cs': 'script',
'csr': 'certificate',
'css': 'html',
'csv': 'spreadsheet',
'cue': 'disk',
'd': 'script',
'dds': 'image',
'deb': 'archive',
'der': 'certificate',
'djvu': 'image',
'dmg': 'archive',
'dng': 'image',
'doc': 'document',
'docx': 'document',
'dvi': 'print',
'eot': 'font',
'eps': 'vector',
'exe': 'binary',
'exr': 'image',
'flac': 'audio',
'flv': 'video',
'gif': 'webimage',
'gz': 'archive',
'gzip': 'archive',
'h': 'script',
'htm': 'html',
'html': 'html',
'ico': 'image',
'icon': 'image',
'img': 'disk',
'iso': 'disk',
'jar': 'archive',
'java': 'script',
'jp2': 'image',
'jpe': 'webimage',
'jpeg': 'webimage',
'jpg': 'webimage',
'jpx': 'image',
'js': 'script',
'key': 'presentation',
'keynote': 'presentation',
'lisp': 'script',
'lz': 'archive',
'lzip': 'archive',
'm': 'script',
'm4a': 'audio',
'm4v': 'video',
'mds': 'disk',
'mdx': 'disk',
'mid': 'audio',
'midi': 'audio',
'mkv': 'video',
'mng': 'image',
'mp2': 'audio',
'mp3': 'audio',
'mp4': 'video',
'mpe': 'video',
'mpeg': 'video',
'mpg': 'video',
'nrg': 'disk',
'numbers': 'spreadsheet',
'odg': 'vector',
'odm': 'document',
'odp': 'presentation',
'ods': 'spreadsheet',
'odt': 'document',
'ogg': 'audio',
'ogm': 'video',
'otf': 'font',
'p12': 'certificate',
'pak': 'archive',
'pbm': 'image',
'pdf': 'print',
'pem': 'certificate',
'pfx': 'certificate',
'pgf': 'image',
'pgm': 'image',
'pk3': 'archive',
'pk4': 'archive',
'pl': 'script',
'png': 'webimage',
'pnm': 'image',
'ppm': 'image',
'pps': 'presentation',
'ppt': 'presentation',
'ps': 'print',
'psd': 'image',
'psp': 'image',
'py': 'script',
'r': 'script',
'ra': 'audio',
'rar': 'archive',
'rb': 'script',
'rpm': 'archive',
'rtf': 'text',
'sh': 'script',
'sub': 'disk',
'svg': 'vector',
'sxc': 'spreadsheet',
'sxd': 'vector',
'tar': 'archive',
'tga': 'image',
'tif': 'image',
'tiff': 'image',
'ttf': 'font',
'txt': 'text',
'vbs': 'script',
'vc': 'spreadsheet',
'vml': 'vector',
'wav': 'audio',
'webp': 'image',
'wma': 'audio',
'wmv': 'video',
'woff': 'font',
'xar': 'vector',
'xbm': 'image',
'xcf': 'image',
'xhtml': 'html',
'xls': 'spreadsheet',
'xlsx': 'spreadsheet',
'xml': 'html',
'zip': 'archive'
}
def get_attachment_type(self, cr, uid, ids, name, args, context=None):
result = {}
for attachment in self.browse(cr, uid, ids, context=context):
fileext = os.path.splitext(attachment.datas_fname or '')[1].lower()[1:]
result[attachment.id] = self._fileext_to_type.get(fileext, 'unknown')
return result
_columns = {
'file_type_icon': fields.function(get_attachment_type, type='char', string='File Type Icon'),
'file_type': fields.related('file_type_icon', type='char'), # FIXME remove in trunk
}
| agpl-3.0 |
aajanki/youtube-dl | youtube_dl/extractor/myspass.py | 40 | 2670 | from __future__ import unicode_literals
import os.path
from .common import InfoExtractor
from ..compat import (
compat_urllib_parse_urlparse,
)
from ..utils import (
ExtractorError,
)
class MySpassIE(InfoExtractor):
_VALID_URL = r'http://www\.myspass\.de/.*'
_TEST = {
'url': 'http://www.myspass.de/myspass/shows/tvshows/absolute-mehrheit/Absolute-Mehrheit-vom-17022013-Die-Highlights-Teil-2--/11741/',
'md5': '0b49f4844a068f8b33f4b7c88405862b',
'info_dict': {
'id': '11741',
'ext': 'mp4',
"description": "Wer kann in die Fu\u00dfstapfen von Wolfgang Kubicki treten und die Mehrheit der Zuschauer hinter sich versammeln? Wird vielleicht sogar die Absolute Mehrheit geknackt und der Jackpot von 200.000 Euro mit nach Hause genommen?",
"title": "Absolute Mehrheit vom 17.02.2013 - Die Highlights, Teil 2",
},
}
def _real_extract(self, url):
META_DATA_URL_TEMPLATE = 'http://www.myspass.de/myspass/includes/apps/video/getvideometadataxml.php?id=%s'
# video id is the last path element of the URL
# usually there is a trailing slash, so also try the second but last
url_path = compat_urllib_parse_urlparse(url).path
url_parent_path, video_id = os.path.split(url_path)
if not video_id:
_, video_id = os.path.split(url_parent_path)
# get metadata
metadata_url = META_DATA_URL_TEMPLATE % video_id
metadata = self._download_xml(metadata_url, video_id)
# extract values from metadata
url_flv_el = metadata.find('url_flv')
if url_flv_el is None:
raise ExtractorError('Unable to extract download url')
video_url = url_flv_el.text
title_el = metadata.find('title')
if title_el is None:
raise ExtractorError('Unable to extract title')
title = title_el.text
format_id_el = metadata.find('format_id')
if format_id_el is None:
format = 'mp4'
else:
format = format_id_el.text
description_el = metadata.find('description')
if description_el is not None:
description = description_el.text
else:
description = None
imagePreview_el = metadata.find('imagePreview')
if imagePreview_el is not None:
thumbnail = imagePreview_el.text
else:
thumbnail = None
return {
'id': video_id,
'url': video_url,
'title': title,
'format': format,
'thumbnail': thumbnail,
'description': description,
}
| unlicense |
RPGOne/scikit-learn | sklearn/ensemble/__init__.py | 153 | 1382 | """
The :mod:`sklearn.ensemble` module includes ensemble-based methods for
classification, regression and anomaly detection.
"""
from .base import BaseEnsemble
from .forest import RandomForestClassifier
from .forest import RandomForestRegressor
from .forest import RandomTreesEmbedding
from .forest import ExtraTreesClassifier
from .forest import ExtraTreesRegressor
from .bagging import BaggingClassifier
from .bagging import BaggingRegressor
from .iforest import IsolationForest
from .weight_boosting import AdaBoostClassifier
from .weight_boosting import AdaBoostRegressor
from .gradient_boosting import GradientBoostingClassifier
from .gradient_boosting import GradientBoostingRegressor
from .voting_classifier import VotingClassifier
from . import bagging
from . import forest
from . import weight_boosting
from . import gradient_boosting
from . import partial_dependence
__all__ = ["BaseEnsemble",
"RandomForestClassifier", "RandomForestRegressor",
"RandomTreesEmbedding", "ExtraTreesClassifier",
"ExtraTreesRegressor", "BaggingClassifier",
"BaggingRegressor", "IsolationForest", "GradientBoostingClassifier",
"GradientBoostingRegressor", "AdaBoostClassifier",
"AdaBoostRegressor", "VotingClassifier",
"bagging", "forest", "gradient_boosting",
"partial_dependence", "weight_boosting"]
| bsd-3-clause |
zainabg/NOX | src/nox/lib/graph_topology.py | 11 | 4483 | import pydot
import time
import re
import os.path
from pyapps import locator
from pyapps import discovery
from vigil.packet import *
def dot_output(graph, name):
sfx = name.split('.')[len(name.split('.')) -1]
base = os.path.basename(name)
if sfx == "svg":
svg = graph.create_svg(prog='dot')
# Strip the XML prologue and just return the guts.
return svg[svg.index('<svg'):]
else:
graph.write_jpg(name, prog='dot')
def create_node_graph(output = "jpg"):
nodes = {}
switch_edge_ports = {}
edge_list = []
for link in discovery.adjacency_list:
node1_name = longlong_to_octstr(link[0])[6:].replace(':','')
node2_name = longlong_to_octstr(link[2])[6:].replace(':','')
nodes[node1_name] = True
nodes[node2_name] = True
edge_list.append((node1_name, node2_name))
switch_edge_ports[(node1_name, node2_name)] = (link[1], link[3])
g = pydot.graph_from_edges(edge_list, directed=True)
# for all edge inferred by discovery, set port labels
for linkt in switch_edge_ports:
edgel = g.get_edge(linkt[0], linkt[1])
if type(edgel) != type([]):
edgel.set('headlabel',str(switch_edge_ports[linkt][1]))
edgel.set('taillabel',str(switch_edge_ports[linkt][0]))
else:
for edge in edgel:
edge.set('headlabel',str(switch_edge_ports[linkt][1]))
edge.set('taillabel',str(switch_edge_ports[linkt][0]))
for node in g.get_node_list():
node.set('style', 'filled,setlinewidth(2)')
node.set('fillcolor', '#ffffcc')
node.set('color', '#99cc99')
node.set('tooltip', 'switch')
return dot_output(g, "pyapps/www/discovery."+output)
def create_topology_graph(output = "jpg",
locations = None,
adjacency_list = discovery.adjacency_list):
import pyapps.locator
if not locations:
locations = pyapps.locator.locations
hosts = {}
nodes = {}
locator_edges = {}
switch_edge_ports = {}
edge_list = []
for link in locations:
node1_name = longlong_to_octstr(link[0])[6:].replace(':','.')
node2_name = longlong_to_octstr(link[2])[6:].replace(':','.')
nodes[node1_name] = True
hosts[node2_name] = True
edge_list.append((node1_name, node2_name))
locator_edges[(node1_name, node2_name)] = (link[1], locations[link])
for link in adjacency_list:
node1_name = longlong_to_octstr(link[0])[6:].replace(':','.')
node2_name = longlong_to_octstr(link[2])[6:].replace(':','.')
nodes[node1_name] = True
nodes[node2_name] = True
edge_list.append((node1_name, node2_name))
switch_edge_ports[(node1_name, node2_name)] = (link[1], link[3])
g = pydot.graph_from_edges(edge_list, directed=True)
# for all edges inferred by locator, make them bidirection and
# set their color
for linkt in locator_edges:
edge = g.get_edge(linkt[0], linkt[1])
if type(edge) == type([]):
edge = edge[0]
edge.set('color','blue')
edge.set('dir','both')
edge.set('taillabel',str(locator_edges[linkt][0]))
#edge.set('label',"%2.3f" % (time.time() - locator_edges[linkt][1]))
# for all edge inferred by discovery, set port labels
for linkt in switch_edge_ports:
edgel = g.get_edge(linkt[0], linkt[1])
if type(edgel) != type([]):
edgel.set('headlabel',str(switch_edge_ports[linkt][1]))
edgel.set('taillabel',str(switch_edge_ports[linkt][0]))
else:
for edge in edgel:
edge.set('headlabel',str(switch_edge_ports[linkt][1]))
edge.set('taillabel',str(switch_edge_ports[linkt][0]))
for node in g.get_node_list():
if not node.get_name() in nodes:
node.set('shape', 'box')
node.set('style', 'rounded,setlinewidth(2)')
node.set('color', 'blue')
node.set('tooltip', 'host')
node.set('URL', '/display_host.mhtml?name='+node.get_name().replace('.',':'))
else:
node.set('style', 'filled,setlinewidth(2)')
node.set('fillcolor', '#ffffcc')
node.set('color', '#99cc99')
node.set('tooltip', 'switch')
return dot_output(g, "pyapps/www/topology."+output)
| gpl-3.0 |
stratosphereips/Manati | manati/api_manager/common/abstracts.py | 1 | 1595 | #
# Copyright (c) 2017 Stratosphere Laboratory.
#
# This file is part of ManaTI Project
# (see <https://stratosphereips.org>). It was created by 'Raul B. Netto <[email protected]>'
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. See the file 'docs/LICENSE' or see <http://www.gnu.org/licenses/>
# for copying permission.
#
from abc import ABCMeta, abstractmethod
class Module(object):
__metaclass__ = ABCMeta
module_name = ''
description = ''
version = ''
authors = []
events = []
def __init__(self):
pass
@abstractmethod
def run(self, *args):
# try:
# self.args = self.parser.parse_args(self.command_line)
# except ArgumentErrorCallback as e:
# self.log(*e.get())
pass
def module_key(self):
return self.module_name + "_" + self.version
def __str__(self):
return "; ".join([self.module_name, ", ".join(self.authors), self.description])
def __getitem__(self, key):
return self.module_name
| agpl-3.0 |
trendelkampschroer/PyEMMA | pyemma/util/tests/__init__.py | 3 | 1434 |
# Copyright (c) 2015, 2014 Computational Molecular Biology Group, Free University
# Berlin, 14195 Berlin, Germany.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation and/or
# other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS ``AS IS''
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
# ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
__author__ = 'noe'
| bsd-2-clause |
SummerLW/Perf-Insight-Report | telemetry/telemetry/internal/util/global_hooks.py | 18 | 1350 | # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Hooks that apply globally to all scripts that import or use Telemetry."""
import signal
import sys
from telemetry.internal.util import exception_formatter
def InstallHooks():
InstallUnhandledExceptionFormatter()
InstallStackDumpOnSigusr1()
InstallTerminationHook()
def InstallUnhandledExceptionFormatter():
"""Print prettier exceptions that also contain the stack frame's locals."""
sys.excepthook = exception_formatter.PrintFormattedException
def InstallStackDumpOnSigusr1():
"""Catch SIGUSR1 and print a stack trace."""
# Windows doesn't define SIGUSR1.
if not hasattr(signal, 'SIGUSR1'):
return
def PrintDiagnostics(_, stack_frame):
exception_string = 'SIGUSR1 received, printed stack trace'
exception_formatter.PrintFormattedFrame(stack_frame, exception_string)
signal.signal(signal.SIGUSR1, PrintDiagnostics)
def InstallTerminationHook():
"""Catch SIGTERM, print a stack trace, and exit."""
def PrintStackAndExit(sig, stack_frame):
exception_string = 'Received signal %s, exiting' % sig
exception_formatter.PrintFormattedFrame(stack_frame, exception_string)
sys.exit(-1)
signal.signal(signal.SIGTERM, PrintStackAndExit)
| bsd-3-clause |
teemulehtinen/a-plus | selenium_test/test/my_ajax_exercise_grader_test.py | 3 | 1797 | import unittest
from page_objects import LoginPage, MyAjaxExerciseGrader
from test_initializer import TestInitializer
class MyAjaxExerciseGraderTest(unittest.TestCase):
def setUp(self):
testInitializer = TestInitializer()
self.driver = testInitializer.getDefaultDriver()
testInitializer.recreateDatabase()
LoginPage(self.driver).loginAsStudent()
def testShouldGiveZeroPointsOnEmptySubmit(self):
myAjaxExercisePage = MyAjaxExerciseGrader(self.driver)
myAjaxExercisePage.submit()
self.assertEqual(myAjaxExercisePage.getAllowedSubmissions(), '1 / 10')
self.assertEqual(myAjaxExercisePage.getExerciseScore(), '0 / 100')
self.assertEqual(myAjaxExercisePage.getNumberOfSubmitters(), '1')
def testShouldGiveGivenPoints(self):
myAjaxExercisePage = MyAjaxExerciseGrader(self.driver)
myAjaxExercisePage.setText("50")
myAjaxExercisePage.submit()
self.assertEqual(myAjaxExercisePage.getAllowedSubmissions(), '1 / 10')
self.assertEqual(myAjaxExercisePage.getExerciseScore(), '50 / 100')
self.assertEqual(myAjaxExercisePage.getNumberOfSubmitters(), '1')
def testShouldGiveZeroPointsOnOverTheLimitSubmit(self):
myAjaxExercisePage = MyAjaxExerciseGrader(self.driver)
myAjaxExercisePage.setText("101")
myAjaxExercisePage.submit()
# Over the limit leaves submission to error state and does not count.
self.assertEqual(myAjaxExercisePage.getAllowedSubmissions(), '0 / 10')
self.assertEqual(myAjaxExercisePage.getExerciseScore(), '0 / 100')
self.assertEqual(myAjaxExercisePage.getNumberOfSubmitters(), '1')
def tearDown(self):
self.driver.quit()
if __name__ == '__main__':
unittest.main(verbosity=2)
| gpl-3.0 |
instantinfrastructure/linux-yocto-3.10 | tools/perf/scripts/python/failed-syscalls-by-pid.py | 11180 | 2058 | # failed system call counts, by pid
# (c) 2010, Tom Zanussi <[email protected]>
# Licensed under the terms of the GNU GPL License version 2
#
# Displays system-wide failed system call totals, broken down by pid.
# If a [comm] arg is specified, only syscalls called by [comm] are displayed.
import os
import sys
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from perf_trace_context import *
from Core import *
from Util import *
usage = "perf script -s syscall-counts-by-pid.py [comm|pid]\n";
for_comm = None
for_pid = None
if len(sys.argv) > 2:
sys.exit(usage)
if len(sys.argv) > 1:
try:
for_pid = int(sys.argv[1])
except:
for_comm = sys.argv[1]
syscalls = autodict()
def trace_begin():
print "Press control+C to stop and show the summary"
def trace_end():
print_error_totals()
def raw_syscalls__sys_exit(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
id, ret):
if (for_comm and common_comm != for_comm) or \
(for_pid and common_pid != for_pid ):
return
if ret < 0:
try:
syscalls[common_comm][common_pid][id][ret] += 1
except TypeError:
syscalls[common_comm][common_pid][id][ret] = 1
def print_error_totals():
if for_comm is not None:
print "\nsyscall errors for %s:\n\n" % (for_comm),
else:
print "\nsyscall errors:\n\n",
print "%-30s %10s\n" % ("comm [pid]", "count"),
print "%-30s %10s\n" % ("------------------------------", \
"----------"),
comm_keys = syscalls.keys()
for comm in comm_keys:
pid_keys = syscalls[comm].keys()
for pid in pid_keys:
print "\n%s [%d]\n" % (comm, pid),
id_keys = syscalls[comm][pid].keys()
for id in id_keys:
print " syscall: %-16s\n" % syscall_name(id),
ret_keys = syscalls[comm][pid][id].keys()
for ret, val in sorted(syscalls[comm][pid][id].iteritems(), key = lambda(k, v): (v, k), reverse = True):
print " err = %-20s %10d\n" % (strerror(ret), val),
| gpl-2.0 |
kostyll/micropython | tests/run-tests-exp.py | 43 | 2697 | #
# This is minimal MicroPython variant of run-tests script, which uses
# .exp files as generated by run-tests --write-exp. It is useful to run
# testsuite on systems which have neither CPython3 nor unix shell.
# This script is intended to be run by the same interpreter executable
# which is to be tested, so should use minimal language functionality.
#
import sys
import _os as os
tests = [
"basics", "micropython", "float", "import", "io",
" misc", "unicode", "extmod", "unix"
]
if sys.platform == 'win32':
MICROPYTHON = "micropython.exe"
else:
MICROPYTHON = "micropython"
def should_skip(test):
if test.startswith("native"):
return True
if test.startswith("viper"):
return True
test_count = 0
passed_count = 0
skip_count = 0
for suite in tests:
#print("Running in: %s" % suite)
if sys.platform == 'win32':
# dir /b prints only contained filenames, one on a line
# http://www.microsoft.com/resources/documentation/windows/xp/all/proddocs/en-us/dir.mspx
r = os.system("dir /b %s/*.py >tests.lst" % suite)
else:
r = os.system("ls %s/*.py | xargs -n1 basename >tests.lst" % suite)
assert r == 0
with open("tests.lst") as f:
testcases = f.readlines()
testcases = [l[:-1] for l in testcases]
assert testcases, "No tests found in dir '%s', which is implausible" % suite
#print(testcases)
for t in testcases:
if t == "native_check.py":
continue
qtest = "%s/%s" % (suite, t)
if should_skip(t):
print("skip " + qtest)
skip_count += 1
continue
exp = None
try:
f = open(qtest + ".exp")
exp = f.read()
f.close()
except OSError:
pass
if exp is not None:
#print("run " + qtest)
r = os.system(MICROPYTHON + " %s >.tst.out" % qtest)
if r == 0:
f = open(".tst.out")
out = f.read()
f.close()
else:
out = "CRASH"
if out == "SKIP\n":
print("skip " + qtest)
skip_count += 1
else:
if out == exp:
print("pass " + qtest)
passed_count += 1
else:
print("FAIL " + qtest)
test_count += 1
else:
skip_count += 1
print("%s tests performed" % test_count)
print("%s tests passed" % passed_count)
if test_count != passed_count:
print("%s tests failed" % (test_count - passed_count))
if skip_count:
print("%s tests skipped" % skip_count)
| mit |
HuaweiSwitch/ansible | lib/ansible/modules/commands/shell.py | 17 | 5631 | # There is actually no actual shell module source, when you use 'shell' in ansible,
# it runs the 'command' module with special arguments and it behaves differently.
# See the command source and the comment "#USE_SHELL".
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['stableinterface'],
'supported_by': 'core'}
DOCUMENTATION = '''
---
module: shell
short_description: Execute commands in nodes.
description:
- The C(shell) module takes the command name followed by a list of space-delimited arguments.
It is almost exactly like the M(command) module but runs
the command through a shell (C(/bin/sh)) on the remote node.
version_added: "0.2"
options:
free_form:
description:
- The shell module takes a free form command to run, as a string. There's not an actual
option named "free form". See the examples!
required: true
default: null
creates:
description:
- a filename, when it already exists, this step will B(not) be run.
required: no
default: null
removes:
description:
- a filename, when it does not exist, this step will B(not) be run.
version_added: "0.8"
required: no
default: null
chdir:
description:
- cd into this directory before running the command
required: false
default: null
version_added: "0.6"
executable:
description:
- change the shell used to execute the command. Should be an absolute path to the executable.
required: false
default: null
version_added: "0.9"
warn:
description:
- if command warnings are on in ansible.cfg, do not warn about this particular line if set to no/false.
required: false
default: True
version_added: "1.8"
notes:
- If you want to execute a command securely and predictably, it may be
better to use the M(command) module instead. Best practices when writing
playbooks will follow the trend of using M(command) unless the C(shell)
module is explicitly required. When running ad-hoc commands, use your best
judgement.
- To sanitize any variables passed to the shell module, you should use
"{{ var | quote }}" instead of just "{{ var }}" to make sure they don't include evil things like semicolons.
requirements: [ ]
author:
- Ansible Core Team
- Michael DeHaan
'''
EXAMPLES = '''
- name: Execute the command in remote shell; stdout goes to the specified file on the remote.
shell: somescript.sh >> somelog.txt
- name: Change the working directory to somedir/ before executing the command.
shell: somescript.sh >> somelog.txt
args:
chdir: somedir/
# You can also use the 'args' form to provide the options.
- name: This command will change the working directory to somedir/ and will only run when somedir/somelog.txt doesn't exist.
shell: somescript.sh >> somelog.txt
args:
chdir: somedir/
creates: somelog.txt
- name: Run a command that uses non-posix shell-isms (in this example /bin/sh doesn't handle redirection and wildcards together but bash does)
shell: cat < /tmp/*txt
args:
executable: /bin/bash
- name: Run a command using a templated variable (always use quote filter to avoid injection)
shell: cat {{ myfile|quote }}
# You can use shell to run other executables to perform actions inline
- name: Run expect to wait for a successful PXE boot via out-of-band CIMC
shell: |
set timeout 300
spawn ssh admin@{{ cimc_host }}
expect "password:"
send "{{ cimc_password }}\\n"
expect "\\n{{ cimc_name }}"
send "connect host\\n"
expect "pxeboot.n12"
send "\\n"
exit 0
args:
executable: /usr/bin/expect
delegate_to: localhost
'''
RETURN = '''
msg:
description: changed
returned: always
type: boolean
sample: True
start:
description: The command execution start time
returned: always
type: string
sample: '2016-02-25 09:18:26.429568'
end:
description: The command execution end time
returned: always
type: string
sample: '2016-02-25 09:18:26.755339'
delta:
description: The command execution delta time
returned: always
type: string
sample: '0:00:00.325771'
stdout:
description: The command standard output
returned: always
type: string
sample: 'Clustering node rabbit@slave1 with rabbit@master ...'
stderr:
description: The command standard error
returned: always
type: string
sample: 'ls: cannot access foo: No such file or directory'
cmd:
description: The command executed by the task
returned: always
type: string
sample: 'rabbitmqctl join_cluster rabbit@master'
rc:
description: The command return code (0 means success)
returned: always
type: int
sample: 0
stdout_lines:
description: The command standard output split in lines
returned: always
type: list
sample: [u'Clustering node rabbit@slave1 with rabbit@master ...']
'''
| gpl-3.0 |
ThomasGsp/eyeprox | script/eyeprox_mongodb.py | 1 | 6777 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Author: Tlams
Langage: Python
Minimum version require: 3.4
Version software: 2
Function: Generate a central database informations for multiples clusters proxmox.
"""
# Imports
import threading
import time
import web
import configparser
from pathlib import Path
import pymysql
import json
import os
from classe.api import *
from classe.tools import *
from classe.proxmox import *
from classe.eyeprox import *
from classe.pcmredis import *
from classe.update import *
def process(prxcluster_list, cluster_info, mongo, errorfile, timestamp, nongratalist):
json_cluster = {}
url = prxcluster_list[cluster_info]["domain"] + ":" + prxcluster_list[cluster_info]["port"]
cluster = GetInfoProxmox(cluster_info, errorfile)
cluster.get_ticket(url, prxcluster_list[cluster_info]["user"], prxcluster_list[cluster_info]["password"])
cluster.get_nodes(url)
json_cluster[cluster_info] = {}
walkprocess = Walk(cluster, timestamp, mongo)
groupdata = {'group': prxcluster_list[cluster_info]["group"], 'cluster': cluster_info, 'timestamp': timestamp}
mongo.add_group(groupdata)
try:
for node in cluster.nodes['data']:
""" ADD storage informations """
walkprocess.insert_storage(url, node['node'])
""" ADD HYP informations """
walkprocess.insert_hypervisor(url, node['node'], nongratalist)
""" ADD Qemu informations """
walkprocess.insert_qemu(url, node['node'])
except (TypeError, ValueError, AttributeError) as e:
write_error(cluster_info, "General error for %s (%s)" % (cluster_info, e),
errorfile)
class ThreadMain(threading.Thread):
def __init__(self, threadid, name):
threading.Thread.__init__(self)
self.signal = True
self.threadID = threadid
self.threadName = name
self.config = config
self.error_file = config['common']['error_file']
self.conf_proxmox = config['walker']['conf_proxmox']
self.update = config['walker']['update']
self.mongo_ip = config['mongodb']['ip']
self.mongo_port = config['mongodb']['port']
self.mongo_database = config['mongodb']['database']
self.mysql_ip = config['mysql']['ip']
self.mysql_port = config['mysql']['port']
self.mysql_database = config['mysql']['database']
self.mysql_user = config['mysql']['user']
self.mysql_password = config['mysql']['password']
def run(self):
print("Start main process...")
# Read configuration file
proxmox_conf = configparser.ConfigParser()
proxmox_conf.read(self.conf_proxmox)
# Generate dict
prxcluster_list = {}
for each_section in proxmox_conf.sections():
domain = proxmox_conf[each_section]['domain']
port = proxmox_conf[each_section]['port']
user = proxmox_conf[each_section]['user']
password = proxmox_conf[each_section]['password']
group = proxmox_conf[each_section]['group']
if group == "":
group = "Others"
# Convert in dict
prxcluster_list[each_section] = {
"domain": domain,
"port": port,
"user": user,
"password": password,
"group": group
}
# Mongo base connection
mdb = MongoDB(self.mongo_ip, self.mongo_port)
mdb.get_db()
# Open database connection
db = pymysql.connect(self.mysql_ip, self.mysql_user, self.mysql_password, self.mysql_database, int(self.mysql_port))
cursor = db.cursor()
cursor.execute("SELECT `name` FROM nodes WHERE `status` = 1")
nongrata = cursor.fetchall()
nongratalist = []
for srv in nongrata:
nongratalist.append(srv[0])
while self.signal:
# RUN (Multi Thread !)
timestamp = int(time.time())
newentry = {'date': timestamp, 'status': "inprogress"}
mdb.add_newentry(newentry)
list_threads = []
for cluster_info in prxcluster_list:
thc = threading.Thread(name=cluster_info, target=process,
args=(prxcluster_list, cluster_info, mdb, self.error_file, timestamp, nongratalist))
list_threads.append(thc)
thc.start()
# Wait all threads
for thc in list_threads:
thc.join()
dataid = {'date': timestamp}
newdata = {'$set': {'status': 'ok'}}
mdb.update(dataid, newdata)
# Force UPDATE REDIS DATA CACHE
updateredis = Update(config['api']['ip'], config['api']['port'], timestamp)
updateredis.insert()
# Wait next round
for x in range(0, int(self.update)):
if self.signal is True:
time.sleep(1)
else:
print("Stop main process...")
break
def stop(self):
self.signal = False
print("Winting the end of all process...")
class ThreadAPI(threading.Thread):
def __init__(self, threadid, name):
threading.Thread.__init__(self)
self.threadID = threadid
self.threadName = name
self.api_ip = config['api']['ip']
self.api_port = config['api']['port']
self.app = HttpApi(urls, globals())
def run(self):
print("Start API server...")
self.app.run(self.api_ip, self.api_port)
def stop(self):
print("Stop API server...")
self.app.stop()
class HttpApi(web.application):
def run(self, ip="127.0.0.1", port=8080, *middleware):
func = self.wsgifunc(*middleware)
return web.httpserver.runsimple(func, (ip, int(port)))
if __name__ == "__main__":
# URL with automatic routing
# Date / category / target
urls = \
(
'/([0-9]+)/([a-z0-9]+)/(.*)/(.*)', 'GetInfo',
'/([0-9]+)/([a-z0-9]+)/(.*)', 'GetInfo',
'/([0-9]+)/', 'Cagetory',
'/last', 'Last',
'/newvm', 'Newvm',
'/', 'Dates',
)
config = configparser.ConfigParser()
config.read(os.path.join(os.path.dirname(os.path.realpath(__file__)), "config_eyeprox.ini"))
api_th = ThreadAPI(1, "ThreadAPI")
api_th.start()
main_th = ThreadMain(2, "ThreadMain")
main_th.start()
pidfile = Path("/var/run/pcm.pid")
"""
pid = str(os.getpid())
if not os.path.isfile(pidfile):
file(pidfile, 'w').write(pid)
"""
while pidfile.is_file():
time.sleep(1)
main_th.stop()
api_th.stop()
| gpl-3.0 |
liberorbis/libernext | env/lib/python2.7/site-packages/pip/download.py | 61 | 30557 | from __future__ import absolute_import
import cgi
import email.utils
import hashlib
import getpass
import json
import logging
import mimetypes
import os
import platform
import re
import shutil
import sys
import tempfile
from pip._vendor.six.moves.urllib import parse as urllib_parse
from pip._vendor.six.moves.urllib import request as urllib_request
import pip
from pip.exceptions import InstallationError, HashMismatch
from pip.models import PyPI
from pip.utils import (splitext, rmtree, format_size, display_path,
backup_dir, ask_path_exists, unpack_file)
from pip.utils.filesystem import check_path_owner
from pip.utils.ui import DownloadProgressBar, DownloadProgressSpinner
from pip.locations import write_delete_marker_file
from pip.vcs import vcs
from pip._vendor import requests, six
from pip._vendor.requests.adapters import BaseAdapter, HTTPAdapter
from pip._vendor.requests.auth import AuthBase, HTTPBasicAuth
from pip._vendor.requests.models import Response
from pip._vendor.requests.structures import CaseInsensitiveDict
from pip._vendor.requests.packages import urllib3
from pip._vendor.cachecontrol import CacheControlAdapter
from pip._vendor.cachecontrol.caches import FileCache
from pip._vendor.lockfile import LockError
from pip._vendor.six.moves import xmlrpc_client
__all__ = ['get_file_content',
'is_url', 'url_to_path', 'path_to_url',
'is_archive_file', 'unpack_vcs_link',
'unpack_file_url', 'is_vcs_url', 'is_file_url',
'unpack_http_url', 'unpack_url']
logger = logging.getLogger(__name__)
def user_agent():
"""
Return a string representing the user agent.
"""
data = {
"installer": {"name": "pip", "version": pip.__version__},
"python": platform.python_version(),
"implementation": {
"name": platform.python_implementation(),
},
}
if data["implementation"]["name"] == 'CPython':
data["implementation"]["version"] = platform.python_version()
elif data["implementation"]["name"] == 'PyPy':
if sys.pypy_version_info.releaselevel == 'final':
pypy_version_info = sys.pypy_version_info[:3]
else:
pypy_version_info = sys.pypy_version_info
data["implementation"]["version"] = ".".join(
[str(x) for x in pypy_version_info]
)
elif data["implementation"]["name"] == 'Jython':
# Complete Guess
data["implementation"]["version"] = platform.python_version()
elif data["implementation"]["name"] == 'IronPython':
# Complete Guess
data["implementation"]["version"] = platform.python_version()
if sys.platform.startswith("linux"):
distro = dict(filter(
lambda x: x[1],
zip(["name", "version", "id"], platform.linux_distribution()),
))
libc = dict(filter(
lambda x: x[1],
zip(["lib", "version"], platform.libc_ver()),
))
if libc:
distro["libc"] = libc
if distro:
data["distro"] = distro
if sys.platform.startswith("darwin") and platform.mac_ver()[0]:
data["distro"] = {"name": "OS X", "version": platform.mac_ver()[0]}
if platform.system():
data.setdefault("system", {})["name"] = platform.system()
if platform.release():
data.setdefault("system", {})["release"] = platform.release()
if platform.machine():
data["cpu"] = platform.machine()
return "{data[installer][name]}/{data[installer][version]} {json}".format(
data=data,
json=json.dumps(data, separators=(",", ":"), sort_keys=True),
)
class MultiDomainBasicAuth(AuthBase):
def __init__(self, prompting=True):
self.prompting = prompting
self.passwords = {}
def __call__(self, req):
parsed = urllib_parse.urlparse(req.url)
# Get the netloc without any embedded credentials
netloc = parsed.netloc.rsplit("@", 1)[-1]
# Set the url of the request to the url without any credentials
req.url = urllib_parse.urlunparse(parsed[:1] + (netloc,) + parsed[2:])
# Use any stored credentials that we have for this netloc
username, password = self.passwords.get(netloc, (None, None))
# Extract credentials embedded in the url if we have none stored
if username is None:
username, password = self.parse_credentials(parsed.netloc)
if username or password:
# Store the username and password
self.passwords[netloc] = (username, password)
# Send the basic auth with this request
req = HTTPBasicAuth(username or "", password or "")(req)
# Attach a hook to handle 401 responses
req.register_hook("response", self.handle_401)
return req
def handle_401(self, resp, **kwargs):
# We only care about 401 responses, anything else we want to just
# pass through the actual response
if resp.status_code != 401:
return resp
# We are not able to prompt the user so simple return the response
if not self.prompting:
return resp
parsed = urllib_parse.urlparse(resp.url)
# Prompt the user for a new username and password
username = six.moves.input("User for %s: " % parsed.netloc)
password = getpass.getpass("Password: ")
# Store the new username and password to use for future requests
if username or password:
self.passwords[parsed.netloc] = (username, password)
# Consume content and release the original connection to allow our new
# request to reuse the same one.
resp.content
resp.raw.release_conn()
# Add our new username and password to the request
req = HTTPBasicAuth(username or "", password or "")(resp.request)
# Send our new request
new_resp = resp.connection.send(req, **kwargs)
new_resp.history.append(resp)
return new_resp
def parse_credentials(self, netloc):
if "@" in netloc:
userinfo = netloc.rsplit("@", 1)[0]
if ":" in userinfo:
return userinfo.split(":", 1)
return userinfo, None
return None, None
class LocalFSAdapter(BaseAdapter):
def send(self, request, stream=None, timeout=None, verify=None, cert=None,
proxies=None):
pathname = url_to_path(request.url)
resp = Response()
resp.status_code = 200
resp.url = request.url
try:
stats = os.stat(pathname)
except OSError as exc:
resp.status_code = 404
resp.raw = exc
else:
modified = email.utils.formatdate(stats.st_mtime, usegmt=True)
content_type = mimetypes.guess_type(pathname)[0] or "text/plain"
resp.headers = CaseInsensitiveDict({
"Content-Type": content_type,
"Content-Length": stats.st_size,
"Last-Modified": modified,
})
resp.raw = open(pathname, "rb")
resp.close = resp.raw.close
return resp
def close(self):
pass
class SafeFileCache(FileCache):
"""
A file based cache which is safe to use even when the target directory may
not be accessible or writable.
"""
def __init__(self, *args, **kwargs):
super(SafeFileCache, self).__init__(*args, **kwargs)
# Check to ensure that the directory containing our cache directory
# is owned by the user current executing pip. If it does not exist
# we will check the parent directory until we find one that does exist.
# If it is not owned by the user executing pip then we will disable
# the cache and log a warning.
if not check_path_owner(self.directory):
logger.warning(
"The directory '%s' or its parent directory is not owned by "
"the current user and the cache has been disabled. Please "
"check the permissions and owner of that directory. If "
"executing pip with sudo, you may want the -H flag.",
self.directory,
)
# Set our directory to None to disable the Cache
self.directory = None
def get(self, *args, **kwargs):
# If we don't have a directory, then the cache should be a no-op.
if self.directory is None:
return
try:
return super(SafeFileCache, self).get(*args, **kwargs)
except (LockError, OSError, IOError):
# We intentionally silence this error, if we can't access the cache
# then we can just skip caching and process the request as if
# caching wasn't enabled.
pass
def set(self, *args, **kwargs):
# If we don't have a directory, then the cache should be a no-op.
if self.directory is None:
return
try:
return super(SafeFileCache, self).set(*args, **kwargs)
except (LockError, OSError, IOError):
# We intentionally silence this error, if we can't access the cache
# then we can just skip caching and process the request as if
# caching wasn't enabled.
pass
def delete(self, *args, **kwargs):
# If we don't have a directory, then the cache should be a no-op.
if self.directory is None:
return
try:
return super(SafeFileCache, self).delete(*args, **kwargs)
except (LockError, OSError, IOError):
# We intentionally silence this error, if we can't access the cache
# then we can just skip caching and process the request as if
# caching wasn't enabled.
pass
class InsecureHTTPAdapter(HTTPAdapter):
def cert_verify(self, conn, url, verify, cert):
conn.cert_reqs = 'CERT_NONE'
conn.ca_certs = None
class PipSession(requests.Session):
timeout = None
def __init__(self, *args, **kwargs):
retries = kwargs.pop("retries", 0)
cache = kwargs.pop("cache", None)
insecure_hosts = kwargs.pop("insecure_hosts", [])
super(PipSession, self).__init__(*args, **kwargs)
# Attach our User Agent to the request
self.headers["User-Agent"] = user_agent()
# Attach our Authentication handler to the session
self.auth = MultiDomainBasicAuth()
# Create our urllib3.Retry instance which will allow us to customize
# how we handle retries.
retries = urllib3.Retry(
# Set the total number of retries that a particular request can
# have.
total=retries,
# A 503 error from PyPI typically means that the Fastly -> Origin
# connection got interupted in some way. A 503 error in general
# is typically considered a transient error so we'll go ahead and
# retry it.
status_forcelist=[503],
# Add a small amount of back off between failed requests in
# order to prevent hammering the service.
backoff_factor=0.25,
)
# We want to _only_ cache responses on securely fetched origins. We do
# this because we can't validate the response of an insecurely fetched
# origin, and we don't want someone to be able to poison the cache and
# require manual evication from the cache to fix it.
if cache:
secure_adapter = CacheControlAdapter(
cache=SafeFileCache(cache),
max_retries=retries,
)
else:
secure_adapter = HTTPAdapter(max_retries=retries)
# Our Insecure HTTPAdapter disables HTTPS validation. It does not
# support caching (see above) so we'll use it for all http:// URLs as
# well as any https:// host that we've marked as ignoring TLS errors
# for.
insecure_adapter = InsecureHTTPAdapter(max_retries=retries)
self.mount("https://", secure_adapter)
self.mount("http://", insecure_adapter)
# Enable file:// urls
self.mount("file://", LocalFSAdapter())
# We want to use a non-validating adapter for any requests which are
# deemed insecure.
for host in insecure_hosts:
self.mount("https://{0}/".format(host), insecure_adapter)
def request(self, method, url, *args, **kwargs):
# Allow setting a default timeout on a session
kwargs.setdefault("timeout", self.timeout)
# Dispatch the actual request
return super(PipSession, self).request(method, url, *args, **kwargs)
def get_file_content(url, comes_from=None, session=None):
"""Gets the content of a file; it may be a filename, file: URL, or
http: URL. Returns (location, content). Content is unicode."""
if session is None:
raise TypeError(
"get_file_content() missing 1 required keyword argument: 'session'"
)
match = _scheme_re.search(url)
if match:
scheme = match.group(1).lower()
if (scheme == 'file' and comes_from
and comes_from.startswith('http')):
raise InstallationError(
'Requirements file %s references URL %s, which is local'
% (comes_from, url))
if scheme == 'file':
path = url.split(':', 1)[1]
path = path.replace('\\', '/')
match = _url_slash_drive_re.match(path)
if match:
path = match.group(1) + ':' + path.split('|', 1)[1]
path = urllib_parse.unquote(path)
if path.startswith('/'):
path = '/' + path.lstrip('/')
url = path
else:
# FIXME: catch some errors
resp = session.get(url)
resp.raise_for_status()
if six.PY3:
return resp.url, resp.text
else:
return resp.url, resp.content
try:
with open(url) as f:
content = f.read()
except IOError as exc:
raise InstallationError(
'Could not open requirements file: %s' % str(exc)
)
return url, content
_scheme_re = re.compile(r'^(http|https|file):', re.I)
_url_slash_drive_re = re.compile(r'/*([a-z])\|', re.I)
def is_url(name):
"""Returns true if the name looks like a URL"""
if ':' not in name:
return False
scheme = name.split(':', 1)[0].lower()
return scheme in ['http', 'https', 'file', 'ftp'] + vcs.all_schemes
def url_to_path(url):
"""
Convert a file: URL to a path.
"""
assert url.startswith('file:'), (
"You can only turn file: urls into filenames (not %r)" % url)
_, netloc, path, _, _ = urllib_parse.urlsplit(url)
# if we have a UNC path, prepend UNC share notation
if netloc:
netloc = '\\\\' + netloc
path = urllib_request.url2pathname(netloc + path)
return path
def path_to_url(path):
"""
Convert a path to a file: URL. The path will be made absolute and have
quoted path parts.
"""
path = os.path.normpath(os.path.abspath(path))
url = urllib_parse.urljoin('file:', urllib_request.pathname2url(path))
return url
def is_archive_file(name):
"""Return True if `name` is a considered as an archive file."""
archives = (
'.zip', '.tar.gz', '.tar.bz2', '.tgz', '.tar', '.whl'
)
ext = splitext(name)[1].lower()
if ext in archives:
return True
return False
def unpack_vcs_link(link, location, only_download=False):
vcs_backend = _get_used_vcs_backend(link)
if only_download:
vcs_backend.export(location)
else:
vcs_backend.unpack(location)
def _get_used_vcs_backend(link):
for backend in vcs.backends:
if link.scheme in backend.schemes:
vcs_backend = backend(link.url)
return vcs_backend
def is_vcs_url(link):
return bool(_get_used_vcs_backend(link))
def is_file_url(link):
return link.url.lower().startswith('file:')
def _check_hash(download_hash, link):
if download_hash.digest_size != hashlib.new(link.hash_name).digest_size:
logger.critical(
"Hash digest size of the package %d (%s) doesn't match the "
"expected hash name %s!",
download_hash.digest_size, link, link.hash_name,
)
raise HashMismatch('Hash name mismatch for package %s' % link)
if download_hash.hexdigest() != link.hash:
logger.critical(
"Hash of the package %s (%s) doesn't match the expected hash %s!",
link, download_hash.hexdigest(), link.hash,
)
raise HashMismatch(
'Bad %s hash for package %s' % (link.hash_name, link)
)
def _get_hash_from_file(target_file, link):
try:
download_hash = hashlib.new(link.hash_name)
except (ValueError, TypeError):
logger.warning(
"Unsupported hash name %s for package %s", link.hash_name, link,
)
return None
with open(target_file, 'rb') as fp:
while True:
chunk = fp.read(4096)
if not chunk:
break
download_hash.update(chunk)
return download_hash
def _download_url(resp, link, content_file):
download_hash = None
if link.hash and link.hash_name:
try:
download_hash = hashlib.new(link.hash_name)
except ValueError:
logger.warning(
"Unsupported hash name %s for package %s",
link.hash_name, link,
)
try:
total_length = int(resp.headers['content-length'])
except (ValueError, KeyError, TypeError):
total_length = 0
cached_resp = getattr(resp, "from_cache", False)
if logger.getEffectiveLevel() > logging.INFO:
show_progress = False
elif cached_resp:
show_progress = False
elif total_length > (40 * 1000):
show_progress = True
elif not total_length:
show_progress = True
else:
show_progress = False
show_url = link.show_url
def resp_read(chunk_size):
try:
# Special case for urllib3.
for chunk in resp.raw.stream(
chunk_size,
# We use decode_content=False here because we do
# want urllib3 to mess with the raw bytes we get
# from the server. If we decompress inside of
# urllib3 then we cannot verify the checksum
# because the checksum will be of the compressed
# file. This breakage will only occur if the
# server adds a Content-Encoding header, which
# depends on how the server was configured:
# - Some servers will notice that the file isn't a
# compressible file and will leave the file alone
# and with an empty Content-Encoding
# - Some servers will notice that the file is
# already compressed and will leave the file
# alone and will add a Content-Encoding: gzip
# header
# - Some servers won't notice anything at all and
# will take a file that's already been compressed
# and compress it again and set the
# Content-Encoding: gzip header
#
# By setting this not to decode automatically we
# hope to eliminate problems with the second case.
decode_content=False):
yield chunk
except AttributeError:
# Standard file-like object.
while True:
chunk = resp.raw.read(chunk_size)
if not chunk:
break
yield chunk
progress_indicator = lambda x, *a, **k: x
if link.netloc == PyPI.netloc:
url = show_url
else:
url = link.url_without_fragment
if show_progress: # We don't show progress on cached responses
if total_length:
logger.info(
"Downloading %s (%s)", url, format_size(total_length),
)
progress_indicator = DownloadProgressBar(
max=total_length,
).iter
else:
logger.info("Downloading %s", url)
progress_indicator = DownloadProgressSpinner().iter
elif cached_resp:
logger.info("Using cached %s", url)
else:
logger.info("Downloading %s", url)
logger.debug('Downloading from URL %s', link)
for chunk in progress_indicator(resp_read(4096), 4096):
if download_hash is not None:
download_hash.update(chunk)
content_file.write(chunk)
if link.hash and link.hash_name:
_check_hash(download_hash, link)
return download_hash
def _copy_file(filename, location, content_type, link):
copy = True
download_location = os.path.join(location, link.filename)
if os.path.exists(download_location):
response = ask_path_exists(
'The file %s exists. (i)gnore, (w)ipe, (b)ackup ' %
display_path(download_location), ('i', 'w', 'b'))
if response == 'i':
copy = False
elif response == 'w':
logger.warning('Deleting %s', display_path(download_location))
os.remove(download_location)
elif response == 'b':
dest_file = backup_dir(download_location)
logger.warning(
'Backing up %s to %s',
display_path(download_location),
display_path(dest_file),
)
shutil.move(download_location, dest_file)
if copy:
shutil.copy(filename, download_location)
logger.info('Saved %s', display_path(download_location))
def unpack_http_url(link, location, download_dir=None, session=None):
if session is None:
raise TypeError(
"unpack_http_url() missing 1 required keyword argument: 'session'"
)
temp_dir = tempfile.mkdtemp('-unpack', 'pip-')
# If a download dir is specified, is the file already downloaded there?
already_downloaded_path = None
if download_dir:
already_downloaded_path = _check_download_dir(link, download_dir)
if already_downloaded_path:
from_path = already_downloaded_path
content_type = mimetypes.guess_type(from_path)[0]
else:
# let's download to a tmp dir
from_path, content_type = _download_http_url(link, session, temp_dir)
# unpack the archive to the build dir location. even when only downloading
# archives, they have to be unpacked to parse dependencies
unpack_file(from_path, location, content_type, link)
# a download dir is specified; let's copy the archive there
if download_dir and not already_downloaded_path:
_copy_file(from_path, download_dir, content_type, link)
if not already_downloaded_path:
os.unlink(from_path)
rmtree(temp_dir)
def unpack_file_url(link, location, download_dir=None):
"""Unpack link into location.
If download_dir is provided and link points to a file, make a copy
of the link file inside download_dir."""
link_path = url_to_path(link.url_without_fragment)
# If it's a url to a local directory
if os.path.isdir(link_path):
if os.path.isdir(location):
rmtree(location)
shutil.copytree(link_path, location, symlinks=True)
if download_dir:
logger.info('Link is a directory, ignoring download_dir')
return
# if link has a hash, let's confirm it matches
if link.hash:
link_path_hash = _get_hash_from_file(link_path, link)
_check_hash(link_path_hash, link)
# If a download dir is specified, is the file already there and valid?
already_downloaded_path = None
if download_dir:
already_downloaded_path = _check_download_dir(link, download_dir)
if already_downloaded_path:
from_path = already_downloaded_path
else:
from_path = link_path
content_type = mimetypes.guess_type(from_path)[0]
# unpack the archive to the build dir location. even when only downloading
# archives, they have to be unpacked to parse dependencies
unpack_file(from_path, location, content_type, link)
# a download dir is specified and not already downloaded
if download_dir and not already_downloaded_path:
_copy_file(from_path, download_dir, content_type, link)
class PipXmlrpcTransport(xmlrpc_client.Transport):
"""Provide a `xmlrpclib.Transport` implementation via a `PipSession`
object.
"""
def __init__(self, index_url, session, use_datetime=False):
xmlrpc_client.Transport.__init__(self, use_datetime)
index_parts = urllib_parse.urlparse(index_url)
self._scheme = index_parts.scheme
self._session = session
def request(self, host, handler, request_body, verbose=False):
parts = (self._scheme, host, handler, None, None, None)
url = urllib_parse.urlunparse(parts)
try:
headers = {'Content-Type': 'text/xml'}
response = self._session.post(url, data=request_body,
headers=headers, stream=True)
response.raise_for_status()
self.verbose = verbose
return self.parse_response(response.raw)
except requests.HTTPError as exc:
logger.critical(
"HTTP error %s while getting %s",
exc.response.status_code, url,
)
raise
def unpack_url(link, location, download_dir=None,
only_download=False, session=None):
"""Unpack link.
If link is a VCS link:
if only_download, export into download_dir and ignore location
else unpack into location
for other types of link:
- unpack into location
- if download_dir, copy the file into download_dir
- if only_download, mark location for deletion
"""
# non-editable vcs urls
if is_vcs_url(link):
unpack_vcs_link(link, location, only_download)
# file urls
elif is_file_url(link):
unpack_file_url(link, location, download_dir)
if only_download:
write_delete_marker_file(location)
# http urls
else:
if session is None:
session = PipSession()
unpack_http_url(
link,
location,
download_dir,
session,
)
if only_download:
write_delete_marker_file(location)
def _download_http_url(link, session, temp_dir):
"""Download link url into temp_dir using provided session"""
target_url = link.url.split('#', 1)[0]
try:
resp = session.get(
target_url,
# We use Accept-Encoding: identity here because requests
# defaults to accepting compressed responses. This breaks in
# a variety of ways depending on how the server is configured.
# - Some servers will notice that the file isn't a compressible
# file and will leave the file alone and with an empty
# Content-Encoding
# - Some servers will notice that the file is already
# compressed and will leave the file alone and will add a
# Content-Encoding: gzip header
# - Some servers won't notice anything at all and will take
# a file that's already been compressed and compress it again
# and set the Content-Encoding: gzip header
# By setting this to request only the identity encoding We're
# hoping to eliminate the third case. Hopefully there does not
# exist a server which when given a file will notice it is
# already compressed and that you're not asking for a
# compressed file and will then decompress it before sending
# because if that's the case I don't think it'll ever be
# possible to make this work.
headers={"Accept-Encoding": "identity"},
stream=True,
)
resp.raise_for_status()
except requests.HTTPError as exc:
logger.critical(
"HTTP error %s while getting %s", exc.response.status_code, link,
)
raise
content_type = resp.headers.get('content-type', '')
filename = link.filename # fallback
# Have a look at the Content-Disposition header for a better guess
content_disposition = resp.headers.get('content-disposition')
if content_disposition:
type, params = cgi.parse_header(content_disposition)
# We use ``or`` here because we don't want to use an "empty" value
# from the filename param.
filename = params.get('filename') or filename
ext = splitext(filename)[1]
if not ext:
ext = mimetypes.guess_extension(content_type)
if ext:
filename += ext
if not ext and link.url != resp.url:
ext = os.path.splitext(resp.url)[1]
if ext:
filename += ext
file_path = os.path.join(temp_dir, filename)
with open(file_path, 'wb') as content_file:
_download_url(resp, link, content_file)
return file_path, content_type
def _check_download_dir(link, download_dir):
""" Check download_dir for previously downloaded file with correct hash
If a correct file is found return its path else None
"""
download_path = os.path.join(download_dir, link.filename)
if os.path.exists(download_path):
# If already downloaded, does its hash match?
logger.info('File was already downloaded %s', download_path)
if link.hash:
download_hash = _get_hash_from_file(download_path, link)
try:
_check_hash(download_hash, link)
except HashMismatch:
logger.warning(
'Previously-downloaded file %s has bad hash, '
're-downloading.',
download_path
)
os.unlink(download_path)
return None
return download_path
return None
| gpl-2.0 |
ShwoognationHQ/bitcoin | qa/rpc-tests/test_framework/blockstore.py | 98 | 4096 | # BlockStore: a helper class that keeps a map of blocks and implements
# helper functions for responding to getheaders and getdata,
# and for constructing a getheaders message
#
from mininode import *
import dbm
class BlockStore(object):
def __init__(self, datadir):
self.blockDB = dbm.open(datadir + "/blocks", 'c')
self.currentBlock = 0L
def close(self):
self.blockDB.close()
def get(self, blockhash):
serialized_block = None
try:
serialized_block = self.blockDB[repr(blockhash)]
except KeyError:
return None
f = cStringIO.StringIO(serialized_block)
ret = CBlock()
ret.deserialize(f)
ret.calc_sha256()
return ret
# Note: this pulls full blocks out of the database just to retrieve
# the headers -- perhaps we could keep a separate data structure
# to avoid this overhead.
def headers_for(self, locator, hash_stop, current_tip=None):
if current_tip is None:
current_tip = self.currentBlock
current_block = self.get(current_tip)
if current_block is None:
return None
response = msg_headers()
headersList = [ CBlockHeader(current_block) ]
maxheaders = 2000
while (headersList[0].sha256 not in locator.vHave):
prevBlockHash = headersList[0].hashPrevBlock
prevBlock = self.get(prevBlockHash)
if prevBlock is not None:
headersList.insert(0, CBlockHeader(prevBlock))
else:
break
headersList = headersList[:maxheaders] # truncate if we have too many
hashList = [x.sha256 for x in headersList]
index = len(headersList)
if (hash_stop in hashList):
index = hashList.index(hash_stop)+1
response.headers = headersList[:index]
return response
def add_block(self, block):
block.calc_sha256()
try:
self.blockDB[repr(block.sha256)] = bytes(block.serialize())
except TypeError as e:
print "Unexpected error: ", sys.exc_info()[0], e.args
self.currentBlock = block.sha256
def get_blocks(self, inv):
responses = []
for i in inv:
if (i.type == 2): # MSG_BLOCK
block = self.get(i.hash)
if block is not None:
responses.append(msg_block(block))
return responses
def get_locator(self, current_tip=None):
if current_tip is None:
current_tip = self.currentBlock
r = []
counter = 0
step = 1
lastBlock = self.get(current_tip)
while lastBlock is not None:
r.append(lastBlock.hashPrevBlock)
for i in range(step):
lastBlock = self.get(lastBlock.hashPrevBlock)
if lastBlock is None:
break
counter += 1
if counter > 10:
step *= 2
locator = CBlockLocator()
locator.vHave = r
return locator
class TxStore(object):
def __init__(self, datadir):
self.txDB = dbm.open(datadir + "/transactions", 'c')
def close(self):
self.txDB.close()
def get(self, txhash):
serialized_tx = None
try:
serialized_tx = self.txDB[repr(txhash)]
except KeyError:
return None
f = cStringIO.StringIO(serialized_tx)
ret = CTransaction()
ret.deserialize(f)
ret.calc_sha256()
return ret
def add_transaction(self, tx):
tx.calc_sha256()
try:
self.txDB[repr(tx.sha256)] = bytes(tx.serialize())
except TypeError as e:
print "Unexpected error: ", sys.exc_info()[0], e.args
def get_transactions(self, inv):
responses = []
for i in inv:
if (i.type == 1): # MSG_TX
tx = self.get(i.hash)
if tx is not None:
responses.append(msg_tx(tx))
return responses
| mit |
amishb/youtube-dl | youtube_dl/extractor/telemb.py | 177 | 2964 | # coding: utf-8
from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..utils import remove_start
class TeleMBIE(InfoExtractor):
_VALID_URL = r'https?://(?:www\.)?telemb\.be/(?P<display_id>.+?)_d_(?P<id>\d+)\.html'
_TESTS = [
{
'url': 'http://www.telemb.be/mons-cook-with-danielle-des-cours-de-cuisine-en-anglais-_d_13466.html',
'md5': 'f45ea69878516ba039835794e0f8f783',
'info_dict': {
'id': '13466',
'display_id': 'mons-cook-with-danielle-des-cours-de-cuisine-en-anglais-',
'ext': 'mp4',
'title': 'Mons - Cook with Danielle : des cours de cuisine en anglais ! - Les reportages',
'description': 'md5:bc5225f47b17c309761c856ad4776265',
'thumbnail': 're:^http://.*\.(?:jpg|png)$',
}
},
{
# non-ASCII characters in download URL
'url': 'http://telemb.be/les-reportages-havre-incendie-mortel_d_13514.html',
'md5': '6e9682736e5ccd4eab7f21e855350733',
'info_dict': {
'id': '13514',
'display_id': 'les-reportages-havre-incendie-mortel',
'ext': 'mp4',
'title': 'Havré - Incendie mortel - Les reportages',
'description': 'md5:5e54cb449acb029c2b7734e2d946bd4a',
'thumbnail': 're:^http://.*\.(?:jpg|png)$',
}
},
]
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
video_id = mobj.group('id')
display_id = mobj.group('display_id')
webpage = self._download_webpage(url, display_id)
formats = []
for video_url in re.findall(r'file\s*:\s*"([^"]+)"', webpage):
fmt = {
'url': video_url,
'format_id': video_url.split(':')[0]
}
rtmp = re.search(r'^(?P<url>rtmp://[^/]+/(?P<app>.+))/(?P<playpath>mp4:.+)$', video_url)
if rtmp:
fmt.update({
'play_path': rtmp.group('playpath'),
'app': rtmp.group('app'),
'player_url': 'http://p.jwpcdn.com/6/10/jwplayer.flash.swf',
'page_url': 'http://www.telemb.be',
'preference': -1,
})
formats.append(fmt)
self._sort_formats(formats)
title = remove_start(self._og_search_title(webpage), 'TéléMB : ')
description = self._html_search_regex(
r'<meta property="og:description" content="(.+?)" />',
webpage, 'description', fatal=False)
thumbnail = self._og_search_thumbnail(webpage)
return {
'id': video_id,
'display_id': display_id,
'title': title,
'description': description,
'thumbnail': thumbnail,
'formats': formats,
}
| unlicense |
OBA-code/tgstation | tools/HitboxExpander/hitbox_expander.py | 206 | 2697 | import os
import sys
import inspect
import shutil
def AddToPath(path):
if path not in sys.path:
sys.path.insert(0, path)
delimeter = ':' if os.name == "posix" else ";"
os.environ['PATH'] = path + delimeter + os.environ['PATH']
current_dir = os.path.split(inspect.getfile(inspect.currentframe()))[0]
AddToPath(os.path.abspath(os.path.join(current_dir, "third_party/Imaging-1.1.7/PIL")))
AddToPath(os.path.abspath(os.path.join(current_dir, "third_party/zlib")))
import Image
import _imaging
def PngSave(im, file):
# From http://blog.client9.com/2007/08/28/python-pil-and-png-metadata-take-2.html
# these can be automatically added to Image.info dict
# they are not user-added metadata
reserved = ('interlace', 'gamma', 'dpi', 'transparency', 'aspect')
# undocumented class
import PngImagePlugin
meta = PngImagePlugin.PngInfo()
# copy metadata into new object
for k,v in im.info.iteritems():
if k in reserved: continue
meta.add_text(k, v, 0)
# and save
im.save(file, "PNG", pnginfo=meta)
def ProcessFile(path):
name, ext = os.path.splitext(path)
ext = ext.lower()
if (ext != ".dmi" and ext != ".png") or os.path.splitext(name)[1] == ".new":
return
try:
im = Image.open(path)
print name + ": " + im.format, im.size, im.mode
if im.mode != "RGBA":
return
width, height = im.size
pix = im.load()
n_transparent = 0
make_opaque = []
def add(x, y):
if pix[x, y][3] == 0:
make_opaque.append((x, y))
for x in range(0, width):
for y in range(0, height):
if pix[x, y][3] > 0:
if x > 0:
add(x - 1, y)
if x < width - 1:
add(x + 1, y)
if y > 0:
add(x, y - 1)
if y < height - 1:
add(x, y + 1)
else:
n_transparent += 1
for coords in make_opaque:
pix[coords] = (0, 0, 0, 1)
PngSave(im, path)
except:
print "Could not process " + name
root_dir = os.path.abspath(os.path.join(current_dir, "../../"))
icons_dir = os.path.join(root_dir, "icons")
def Main():
if len(sys.argv) != 2:
print "Usage: hitbox_expander.py filename.dmi"
return 0
try:
with open(sys.argv[1]):
ProcessFile(os.path.abspath(sys.argv[1]))
return 0
except IOError:
pass
for root, subdirs, files in os.walk(icons_dir):
for file in files:
if file == sys.argv[1]:
path = os.path.join(root, file)
ProcessFile(path)
return 0
print "File not found: " + sys.argv[1]
if __name__ == "__main__":
Main()
| agpl-3.0 |
SANBI-SA/tools-iuc | data_managers/data_manager_fetch_ncbi_taxonomy/data_manager/data_manager.py | 8 | 2307 | import argparse
import datetime
import json
import os
import shutil
import tarfile
import zipfile
try:
# For Python 3.0 and later
from urllib.request import Request, urlopen
except ImportError:
# Fall back to Python 2 imports
from urllib2 import Request, urlopen
def url_download(url, workdir):
file_path = os.path.join(workdir, 'download.dat')
if not os.path.exists(workdir):
os.makedirs(workdir)
src = None
dst = None
try:
req = Request(url)
src = urlopen(req)
with open(file_path, 'wb') as dst:
while True:
chunk = src.read(2**10)
if chunk:
dst.write(chunk)
else:
break
finally:
if src:
src.close()
if tarfile.is_tarfile(file_path):
fh = tarfile.open(file_path, 'r:*')
elif zipfile.is_zipfile(file_path):
fh = zipfile.ZipFile(file_path, 'r')
else:
return
fh.extractall(workdir)
os.remove(file_path)
def main(args):
workdir = os.path.join(os.getcwd(), 'taxonomy')
url_download(args.url, workdir)
data_manager_entry = {}
data_manager_entry['value'] = args.name.lower()
data_manager_entry['name'] = args.name
data_manager_entry['path'] = '.'
data_manager_json = dict(data_tables=dict(ncbi_taxonomy=data_manager_entry))
params = json.loads(open(args.output).read())
target_directory = params['output_data'][0]['extra_files_path']
os.mkdir(target_directory)
output_path = os.path.abspath(os.path.join(os.getcwd(), 'taxonomy'))
for filename in os.listdir(workdir):
shutil.move(os.path.join(output_path, filename), target_directory)
file(args.output, 'w').write(json.dumps(data_manager_json))
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Create data manager json.')
parser.add_argument('--out', dest='output', action='store', help='JSON filename')
parser.add_argument('--name', dest='name', action='store', default=str(datetime.date.today()), help='Data table entry unique ID')
parser.add_argument('--url', dest='url', action='store', default='ftp://ftp.ncbi.nih.gov/pub/taxonomy/taxdump.tar.gz', help='Download URL')
args = parser.parse_args()
main(args)
| mit |
johnwlockwood/appengine-mapreduce | python/test/mapreduce/operation/db_test.py | 4 | 1781 | #!/usr/bin/env python
#
# Copyright 2010 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from testlib import mox
import unittest
from mapreduce import context
from mapreduce import operation as op
class TestEntity(object):
"""Test entity class."""
class PutTest(unittest.TestCase):
"""Test Put operation."""
def testPut(self):
"""Test applying Put operation."""
m = mox.Mox()
ctx = context.Context(None, None)
ctx.mutation_pool = m.CreateMock(context.MutationPool)
entity = TestEntity()
operation = op.db.Put(entity)
# Record calls
ctx.mutation_pool.put(entity)
m.ReplayAll()
try: # test, verify
operation(ctx)
m.VerifyAll()
finally:
m.UnsetStubs()
class DeleteTest(unittest.TestCase):
"""Test Delete operation."""
def testDelete(self):
"""Test applying Delete operation."""
m = mox.Mox()
ctx = context.Context(None, None)
ctx.mutation_pool = m.CreateMock(context.MutationPool)
entity = TestEntity()
operation = op.db.Delete(entity)
# Record calls
ctx.mutation_pool.delete(entity)
m.ReplayAll()
try: # test, verify
operation(ctx)
m.VerifyAll()
finally:
m.UnsetStubs()
if __name__ == '__main__':
unittest.main()
| apache-2.0 |
shuggiefisher/potato | django/contrib/formtools/preview.py | 229 | 6829 | """
Formtools Preview application.
"""
import cPickle as pickle
from django.conf import settings
from django.http import Http404
from django.shortcuts import render_to_response
from django.template.context import RequestContext
from django.utils.hashcompat import md5_constructor
from django.utils.crypto import constant_time_compare
from django.contrib.formtools.utils import security_hash
AUTO_ID = 'formtools_%s' # Each form here uses this as its auto_id parameter.
class FormPreview(object):
preview_template = 'formtools/preview.html'
form_template = 'formtools/form.html'
# METHODS SUBCLASSES SHOULDN'T OVERRIDE ###################################
def __init__(self, form):
# form should be a Form class, not an instance.
self.form, self.state = form, {}
def __call__(self, request, *args, **kwargs):
stage = {'1': 'preview', '2': 'post'}.get(request.POST.get(self.unused_name('stage')), 'preview')
self.parse_params(*args, **kwargs)
try:
method = getattr(self, stage + '_' + request.method.lower())
except AttributeError:
raise Http404
return method(request)
def unused_name(self, name):
"""
Given a first-choice name, adds an underscore to the name until it
reaches a name that isn't claimed by any field in the form.
This is calculated rather than being hard-coded so that no field names
are off-limits for use in the form.
"""
while 1:
try:
f = self.form.base_fields[name]
except KeyError:
break # This field name isn't being used by the form.
name += '_'
return name
def preview_get(self, request):
"Displays the form"
f = self.form(auto_id=self.get_auto_id(), initial=self.get_initial(request))
return render_to_response(self.form_template,
self.get_context(request, f),
context_instance=RequestContext(request))
def preview_post(self, request):
"Validates the POST data. If valid, displays the preview page. Else, redisplays form."
f = self.form(request.POST, auto_id=self.get_auto_id())
context = self.get_context(request, f)
if f.is_valid():
self.process_preview(request, f, context)
context['hash_field'] = self.unused_name('hash')
context['hash_value'] = self.security_hash(request, f)
return render_to_response(self.preview_template, context, context_instance=RequestContext(request))
else:
return render_to_response(self.form_template, context, context_instance=RequestContext(request))
def _check_security_hash(self, token, request, form):
expected = self.security_hash(request, form)
if constant_time_compare(token, expected):
return True
else:
# Fall back to Django 1.2 method, for compatibility with forms that
# are in the middle of being used when the upgrade occurs. However,
# we don't want to do this fallback if a subclass has provided their
# own security_hash method - because they might have implemented a
# more secure method, and this would punch a hole in that.
# PendingDeprecationWarning <- left here to remind us that this
# compatibility fallback should be removed in Django 1.5
FormPreview_expected = FormPreview.security_hash(self, request, form)
if expected == FormPreview_expected:
# They didn't override security_hash, do the fallback:
old_expected = security_hash(request, form)
return constant_time_compare(token, old_expected)
else:
return False
def post_post(self, request):
"Validates the POST data. If valid, calls done(). Else, redisplays form."
f = self.form(request.POST, auto_id=self.get_auto_id())
if f.is_valid():
if not self._check_security_hash(request.POST.get(self.unused_name('hash'), ''),
request, f):
return self.failed_hash(request) # Security hash failed.
return self.done(request, f.cleaned_data)
else:
return render_to_response(self.form_template,
self.get_context(request, f),
context_instance=RequestContext(request))
# METHODS SUBCLASSES MIGHT OVERRIDE IF APPROPRIATE ########################
def get_auto_id(self):
"""
Hook to override the ``auto_id`` kwarg for the form. Needed when
rendering two form previews in the same template.
"""
return AUTO_ID
def get_initial(self, request):
"""
Takes a request argument and returns a dictionary to pass to the form's
``initial`` kwarg when the form is being created from an HTTP get.
"""
return {}
def get_context(self, request, form):
"Context for template rendering."
return {'form': form, 'stage_field': self.unused_name('stage'), 'state': self.state}
def parse_params(self, *args, **kwargs):
"""
Given captured args and kwargs from the URLconf, saves something in
self.state and/or raises Http404 if necessary.
For example, this URLconf captures a user_id variable:
(r'^contact/(?P<user_id>\d{1,6})/$', MyFormPreview(MyForm)),
In this case, the kwargs variable in parse_params would be
{'user_id': 32} for a request to '/contact/32/'. You can use that
user_id to make sure it's a valid user and/or save it for later, for
use in done().
"""
pass
def process_preview(self, request, form, context):
"""
Given a validated form, performs any extra processing before displaying
the preview page, and saves any extra data in context.
"""
pass
def security_hash(self, request, form):
"""
Calculates the security hash for the given HttpRequest and Form instances.
Subclasses may want to take into account request-specific information,
such as the IP address.
"""
return security_hash(request, form)
def failed_hash(self, request):
"Returns an HttpResponse in the case of an invalid security hash."
return self.preview_post(request)
# METHODS SUBCLASSES MUST OVERRIDE ########################################
def done(self, request, cleaned_data):
"""
Does something with the cleaned_data and returns an
HttpResponseRedirect.
"""
raise NotImplementedError('You must define a done() method on your %s subclass.' % self.__class__.__name__)
| bsd-3-clause |
bartoldeman/easybuild-framework | test/framework/modulestool.py | 1 | 9067 | # #
# Copyright 2014-2018 Ghent University
#
# This file is part of EasyBuild,
# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en),
# with support of Ghent University (http://ugent.be/hpc),
# the Flemish Supercomputer Centre (VSC) (https://www.vscentrum.be),
# Flemish Research Foundation (FWO) (http://www.fwo.be/en)
# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en).
#
# https://github.com/easybuilders/easybuild
#
# EasyBuild is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation v2.
#
# EasyBuild is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with EasyBuild. If not, see <http://www.gnu.org/licenses/>.
# #
"""
Unit tests for ModulesTool class.
@author: Stijn De Weirdt (Ghent University)
"""
import os
import re
import stat
import sys
import tempfile
from vsc.utils import fancylogger
from test.framework.utilities import EnhancedTestCase, TestLoaderFiltered
from unittest import TextTestRunner
from distutils.version import StrictVersion
import easybuild.tools.options as eboptions
from easybuild.tools import config, modules
from easybuild.tools.build_log import EasyBuildError
from easybuild.tools.config import build_option
from easybuild.tools.filetools import which, write_file
from easybuild.tools.modules import modules_tool, Lmod
from test.framework.utilities import init_config
class MockModulesTool(modules.ModulesTool):
""" MockModule class"""
COMMAND = 'echo'
VERSION_OPTION = '1.0'
VERSION_REGEXP = r'(?P<version>\d\S*)'
# redirect to stderr, ignore 'echo python' ($0 and $1)
COMMAND_SHELL = ["bash", "-c", "echo $2 $3 $4 1>&2"]
class BrokenMockModulesTool(MockModulesTool):
"""MockModulesTool class that is broken unless environment command is set"""
COMMAND = '/does/not/exist'
COMMAND_ENVIRONMENT = 'BMMT_CMD'
class ModulesToolTest(EnhancedTestCase):
""" Testcase for ModulesTool """
def setUp(self):
"""Testcase setup."""
super(ModulesToolTest, self).setUp()
# keep track of original 'module' function definition so we can restore it
self.orig_module = os.environ.get('module', None)
def test_mock(self):
"""Test the mock module"""
os.environ['module'] = "() { eval `/bin/echo $*`\n}"
# ue empty mod_path list, otherwise the install_path is called
mmt = MockModulesTool(mod_paths=[], testing=True)
# the version of the MMT is the commandline option
self.assertEqual(mmt.version, StrictVersion(MockModulesTool.VERSION_OPTION))
cmd_abspath = which(MockModulesTool.COMMAND)
# make sure absolute path of module command is being used
self.assertEqual(mmt.cmd, cmd_abspath)
def test_environment_command(self):
"""Test setting cmd via enviroment"""
os.environ['module'] = "() { %s $*\n}" % BrokenMockModulesTool.COMMAND
try:
bmmt = BrokenMockModulesTool(mod_paths=[], testing=True)
# should never get here
self.assertTrue(False, 'BrokenMockModulesTool should fail')
except EasyBuildError, err:
err_msg = "command is not available"
self.assertTrue(err_msg in str(err), "'%s' found in: %s" % (err_msg, err))
os.environ[BrokenMockModulesTool.COMMAND_ENVIRONMENT] = MockModulesTool.COMMAND
os.environ['module'] = "() { /bin/echo $*\n}"
bmmt = BrokenMockModulesTool(mod_paths=[], testing=True)
cmd_abspath = which(MockModulesTool.COMMAND)
self.assertEqual(bmmt.version, StrictVersion(MockModulesTool.VERSION_OPTION))
self.assertEqual(bmmt.cmd, cmd_abspath)
# clean it up
del os.environ[BrokenMockModulesTool.COMMAND_ENVIRONMENT]
def test_module_mismatch(self):
"""Test whether mismatch detection between modules tool and 'module' function works."""
# redefine 'module' function (deliberate mismatch with used module command in MockModulesTool)
os.environ['module'] = "() { eval `/tmp/Modules/$MODULE_VERSION/bin/modulecmd bash $*`\n}"
error_regex = ".*pattern .* not found in defined 'module' function"
self.assertErrorRegex(EasyBuildError, error_regex, MockModulesTool, testing=True)
# check whether escaping error by allowing mismatch via build options works
build_options = {
'allow_modules_tool_mismatch': True,
}
init_config(build_options=build_options)
fancylogger.logToFile(self.logfile)
mt = MockModulesTool(testing=True)
f = open(self.logfile, 'r')
logtxt = f.read()
f.close()
warn_regex = re.compile("WARNING .*pattern .* not found in defined 'module' function")
self.assertTrue(warn_regex.search(logtxt), "Found pattern '%s' in: %s" % (warn_regex.pattern, logtxt))
# redefine 'module' function with correct module command
os.environ['module'] = "() { eval `/bin/echo $*`\n}"
mt = MockModulesTool(testing=True)
self.assertTrue(isinstance(mt.loaded_modules(), list)) # dummy usage
# a warning should be logged if the 'module' function is undefined
del os.environ['module']
mt = MockModulesTool(testing=True)
f = open(self.logfile, 'r')
logtxt = f.read()
f.close()
warn_regex = re.compile("WARNING No 'module' function defined, can't check if it matches .*")
self.assertTrue(warn_regex.search(logtxt), "Pattern %s found in %s" % (warn_regex.pattern, logtxt))
fancylogger.logToFile(self.logfile, enable=False)
def test_lmod_specific(self):
"""Lmod-specific test (skipped unless Lmod is used as modules tool)."""
lmod_abspath = which(Lmod.COMMAND)
# only run this test if 'lmod' is available in $PATH
if lmod_abspath is not None:
build_options = {
'allow_modules_tool_mismatch': True,
'update_modules_tool_cache': True,
}
init_config(build_options=build_options)
lmod = Lmod(testing=True)
self.assertTrue(os.path.samefile(lmod.cmd, lmod_abspath))
# drop any location where 'lmod' or 'spider' can be found from $PATH
paths = os.environ.get('PATH', '').split(os.pathsep)
new_paths = []
for path in paths:
lmod_cand_path = os.path.join(path, Lmod.COMMAND)
spider_cand_path = os.path.join(path, 'spider')
if not os.path.isfile(lmod_cand_path) and not os.path.isfile(spider_cand_path):
new_paths.append(path)
os.environ['PATH'] = os.pathsep.join(new_paths)
# make sure $MODULEPATH contains path that provides some modules
os.environ['MODULEPATH'] = os.path.abspath(os.path.join(os.path.dirname(__file__), 'modules'))
# initialize Lmod modules tool, pass (fake) full path to 'lmod' via $LMOD_CMD
fake_path = os.path.join(self.test_installpath, 'lmod')
fake_lmod_txt = '\n'.join([
'#!/bin/bash',
'echo "Modules based on Lua: Version %s " >&2' % Lmod.REQ_VERSION,
'echo "os.environ[\'FOO\'] = \'foo\'"',
])
write_file(fake_path, fake_lmod_txt)
os.chmod(fake_path, stat.S_IRUSR|stat.S_IXUSR)
os.environ['LMOD_CMD'] = fake_path
init_config(build_options=build_options)
lmod = Lmod(testing=True)
self.assertTrue(os.path.samefile(lmod.cmd, fake_path))
# use correct full path for 'lmod' via $LMOD_CMD
os.environ['LMOD_CMD'] = lmod_abspath
init_config(build_options=build_options)
lmod = Lmod(testing=True)
# obtain list of availabe modules, should be non-empty
self.assertTrue(lmod.available(), "List of available modules obtained using Lmod is non-empty")
# test updating local spider cache (but don't actually update the local cache file!)
self.assertTrue(lmod.update(), "Updated local Lmod spider cache is non-empty")
def tearDown(self):
"""Testcase cleanup."""
super(ModulesToolTest, self).tearDown()
# restore 'module' function
if self.orig_module is not None:
os.environ['module'] = self.orig_module
else:
if 'module' in os.environ:
del os.environ['module']
def suite():
""" returns all the testcases in this module """
return TestLoaderFiltered().loadTestsFromTestCase(ModulesToolTest, sys.argv[1:])
if __name__ == '__main__':
TextTestRunner(verbosity=1).run(suite())
| gpl-2.0 |
JeyZeta/Dangerous | Dangerous/sqlmap/thirdparty/colorama/winterm.py | 18 | 4133 |
from . import win32
# from wincon.h
class WinColor(object):
BLACK = 0
BLUE = 1
GREEN = 2
CYAN = 3
RED = 4
MAGENTA = 5
YELLOW = 6
GREY = 7
# from wincon.h
class WinStyle(object):
NORMAL = 0x00 # dim text, dim background
BRIGHT = 0x08 # bright text, dim background
class WinTerm(object):
def __init__(self):
self._default = win32.GetConsoleScreenBufferInfo(win32.STDOUT).wAttributes
self.set_attrs(self._default)
self._default_fore = self._fore
self._default_back = self._back
self._default_style = self._style
def get_attrs(self):
return self._fore + self._back * 16 + self._style
def set_attrs(self, value):
self._fore = value & 7
self._back = (value >> 4) & 7
self._style = value & WinStyle.BRIGHT
def reset_all(self, on_stderr=None):
self.set_attrs(self._default)
self.set_console(attrs=self._default)
def fore(self, fore=None, on_stderr=False):
if fore is None:
fore = self._default_fore
self._fore = fore
self.set_console(on_stderr=on_stderr)
def back(self, back=None, on_stderr=False):
if back is None:
back = self._default_back
self._back = back
self.set_console(on_stderr=on_stderr)
def style(self, style=None, on_stderr=False):
if style is None:
style = self._default_style
self._style = style
self.set_console(on_stderr=on_stderr)
def set_console(self, attrs=None, on_stderr=False):
if attrs is None:
attrs = self.get_attrs()
handle = win32.STDOUT
if on_stderr:
handle = win32.STDERR
win32.SetConsoleTextAttribute(handle, attrs)
def get_position(self, handle):
position = win32.GetConsoleScreenBufferInfo(handle).dwCursorPosition
# Because Windows coordinates are 0-based,
# and win32.SetConsoleCursorPosition expects 1-based.
position.X += 1
position.Y += 1
return position
def set_cursor_position(self, position=None, on_stderr=False):
if position is None:
#I'm not currently tracking the position, so there is no default.
#position = self.get_position()
return
handle = win32.STDOUT
if on_stderr:
handle = win32.STDERR
win32.SetConsoleCursorPosition(handle, position)
def cursor_up(self, num_rows=0, on_stderr=False):
if num_rows == 0:
return
handle = win32.STDOUT
if on_stderr:
handle = win32.STDERR
position = self.get_position(handle)
adjusted_position = (position.Y - num_rows, position.X)
self.set_cursor_position(adjusted_position, on_stderr)
def erase_data(self, mode=0, on_stderr=False):
# 0 (or None) should clear from the cursor to the end of the screen.
# 1 should clear from the cursor to the beginning of the screen.
# 2 should clear the entire screen. (And maybe move cursor to (1,1)?)
#
# At the moment, I only support mode 2. From looking at the API, it
# should be possible to calculate a different number of bytes to clear,
# and to do so relative to the cursor position.
if mode[0] not in (2,):
return
handle = win32.STDOUT
if on_stderr:
handle = win32.STDERR
# here's where we'll home the cursor
coord_screen = win32.COORD(0,0)
csbi = win32.GetConsoleScreenBufferInfo(handle)
# get the number of character cells in the current buffer
dw_con_size = csbi.dwSize.X * csbi.dwSize.Y
# fill the entire screen with blanks
win32.FillConsoleOutputCharacter(handle, ord(' '), dw_con_size, coord_screen)
# now set the buffer's attributes accordingly
win32.FillConsoleOutputAttribute(handle, self.get_attrs(), dw_con_size, coord_screen );
# put the cursor at (0, 0)
win32.SetConsoleCursorPosition(handle, (coord_screen.X, coord_screen.Y))
| mit |
wrouesnel/ansible | test/units/modules/network/vyos/test_vyos_command.py | 43 | 4225 | # (c) 2016 Red Hat Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import json
from ansible.compat.tests.mock import patch
from ansible.modules.network.vyos import vyos_command
from units.modules.utils import set_module_args
from .vyos_module import TestVyosModule, load_fixture
class TestVyosCommandModule(TestVyosModule):
module = vyos_command
def setUp(self):
super(TestVyosCommandModule, self).setUp()
self.mock_run_commands = patch('ansible.modules.network.vyos.vyos_command.run_commands')
self.run_commands = self.mock_run_commands.start()
def tearDown(self):
super(TestVyosCommandModule, self).tearDown()
self.mock_run_commands.stop()
def load_fixtures(self, commands=None):
def load_from_file(*args, **kwargs):
module, commands = args
output = list()
for item in commands:
try:
obj = json.loads(item)
command = obj['command']
except ValueError:
command = item
filename = str(command).replace(' ', '_')
output.append(load_fixture(filename))
return output
self.run_commands.side_effect = load_from_file
def test_vyos_command_simple(self):
set_module_args(dict(commands=['show version']))
result = self.execute_module()
self.assertEqual(len(result['stdout']), 1)
self.assertTrue(result['stdout'][0].startswith('Version: VyOS'))
def test_vyos_command_multiple(self):
set_module_args(dict(commands=['show version', 'show version']))
result = self.execute_module()
self.assertEqual(len(result['stdout']), 2)
self.assertTrue(result['stdout'][0].startswith('Version: VyOS'))
def test_vyos_command_wait_for(self):
wait_for = 'result[0] contains "VyOS maintainers"'
set_module_args(dict(commands=['show version'], wait_for=wait_for))
self.execute_module()
def test_vyos_command_wait_for_fails(self):
wait_for = 'result[0] contains "test string"'
set_module_args(dict(commands=['show version'], wait_for=wait_for))
self.execute_module(failed=True)
self.assertEqual(self.run_commands.call_count, 10)
def test_vyos_command_retries(self):
wait_for = 'result[0] contains "test string"'
set_module_args(dict(commands=['show version'], wait_for=wait_for, retries=2))
self.execute_module(failed=True)
self.assertEqual(self.run_commands.call_count, 2)
def test_vyos_command_match_any(self):
wait_for = ['result[0] contains "VyOS maintainers"',
'result[0] contains "test string"']
set_module_args(dict(commands=['show version'], wait_for=wait_for, match='any'))
self.execute_module()
def test_vyos_command_match_all(self):
wait_for = ['result[0] contains "VyOS maintainers"',
'result[0] contains "[email protected]"']
set_module_args(dict(commands=['show version'], wait_for=wait_for, match='all'))
self.execute_module()
def test_vyos_command_match_all_failure(self):
wait_for = ['result[0] contains "VyOS maintainers"',
'result[0] contains "test string"']
commands = ['show version', 'show version']
set_module_args(dict(commands=commands, wait_for=wait_for, match='all'))
self.execute_module(failed=True)
| gpl-3.0 |
yuzhangcmu/Python-Study | Leetcode/Sort_Colors.py | 2 | 1167 | """
Given an array with n objects colored red, white or blue, sort them so that objects of the same color are adjacent, with the colors in the order red, white and blue.
Here, we will use the integers 0, 1, and 2 to represent the color red, white, and blue respectively.
Note:
You are not suppose to use the library's sort function for this problem.
click to show follow up.
Follow up:
A rather straight forward solution is a two-pass algorithm using counting sort.
First, iterate the array counting number of 0's, 1's, and 2's, then overwrite array with total number of 0's, then 1's and followed by 2's.
Could you come up with an one-pass algorithm using only constant space?
"""
class Solution:
# @param A a list of integers
# @return nothing, sort in place
def sortColors(self, A):
start = 0
end = len(A) - 1
cur = 0
while cur <= end:
if A[cur] == 0:
A[start], A[cur] = A[cur], A[start]
cur += 1
start += 1
elif A[cur] == 1:
cur += 1
else:
A[cur], A[end] = A[end], A[cur]
end -= 1
| mit |
TimBizeps/BachelorAP | FP 2018/V51 Operationsverstärker/auswertung/auswertung5.py | 1 | 1788 | import matplotlib as mpl
mpl.use('pgf')
import numpy as np
import scipy.constants as const
import matplotlib.pyplot as plt
from scipy.optimize import curve_fit
from uncertainties import ufloat
import uncertainties.unumpy as unp
from uncertainties.unumpy import (nominal_values as noms, std_devs as stds)
mpl.rcParams.update({
'font.family': 'serif',
'text.usetex': True,
'pgf.rcfonts': False,
'pgf.texsystem': 'lualatex',
'pgf.preamble': r'\usepackage{unicode-math}\usepackage{siunitx}'
})
x, y = np.genfromtxt('h.txt', unpack=True)
x2, y2 = np.genfromtxt('h2.txt', unpack=True)
def funktion(x,a,b,c,d):
return a*np.exp((-1*(x-b))/(20*c))*np.sin((x-b)/c)+d
params, cov = curve_fit(funktion, x2, y2, p0=(0.2, 0, 0.000224, -0.02))
errors = np.sqrt(np.diag(cov))
a = params[0]
a_err = errors[0]
b = params[1]
b_err = errors[1]
c = params[2]
c_err = errors[2]
d = params[3]
d_err = errors[3]
print('a = ', a ,'+-', a_err)
print('b = ', b ,'+-', b_err)
print('c = ', c ,'+-', c_err)
print('d = ', d ,'+-', d_err)
l = np.linspace(0, 0.045, 5000)
plt.plot(x, y, 'rx', label='Messwerte, die nicht verwendet werden')
plt.plot(x2, y2, 'kx', label='Messwerte für den Fit')
plt.plot(l, funktion(l,a,b,c,d), 'b', label='Fit')
plt.xlabel(r'Zeit $t$ in $\si{\second}$')
plt.ylabel(r'Ausgangsspannung $U_A$ in $\si{\volt}$')
#plt.xlim(,)
#plt.ylim(100, 500)
plt.legend(loc='best')
plt.tight_layout()
plt.savefig("plot5.pdf")
cmit = ufloat(c, c_err)
ctau = 20*cmit
print('ctau = ', ctau)
k = np.array([215, 236])
meank = np.mean(k)
fk = np.std(k, ddof=1)/np.sqrt(len(k))
kmit = ufloat(meank, fk)
print('kmit = ', kmit)
kmit = kmit * 10**(-10)
konstantec = kmit * 9960
tau = 20*konstantec
print('kmit = ', kmit)
print('konstantec = ', konstantec)
print('tau = ', tau)
| gpl-3.0 |
deltreey/ansible | lib/ansible/parsing/__init__.py | 22 | 8948 | # (c) 2012-2014, Michael DeHaan <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import json
import os
from yaml import load, YAMLError
from ansible.errors import AnsibleParserError
from ansible.errors.yaml_strings import YAML_SYNTAX_ERROR
from ansible.parsing.vault import VaultLib
from ansible.parsing.splitter import unquote
from ansible.parsing.yaml.loader import AnsibleLoader
from ansible.parsing.yaml.objects import AnsibleBaseYAMLObject, AnsibleUnicode
from ansible.utils.path import unfrackpath
from ansible.utils.unicode import to_unicode
class DataLoader():
'''
The DataLoader class is used to load and parse YAML or JSON content,
either from a given file name or from a string that was previously
read in through other means. A Vault password can be specified, and
any vault-encrypted files will be decrypted.
Data read from files will also be cached, so the file will never be
read from disk more than once.
Usage:
dl = DataLoader()
(or)
dl = DataLoader(vault_password='foo')
ds = dl.load('...')
ds = dl.load_from_file('/path/to/file')
'''
def __init__(self, vault_password=None):
self._basedir = '.'
self._vault_password = vault_password
self._FILE_CACHE = dict()
self._vault = VaultLib(password=vault_password)
def load(self, data, file_name='<string>', show_content=True):
'''
Creates a python datastructure from the given data, which can be either
a JSON or YAML string.
'''
try:
# we first try to load this data as JSON
return json.loads(data)
except:
# if loading JSON failed for any reason, we go ahead
# and try to parse it as YAML instead
if isinstance(data, AnsibleUnicode):
# The PyYAML's libyaml bindings use PyUnicode_CheckExact so
# they are unable to cope with our subclass.
# Unwrap and re-wrap the unicode so we can keep track of line
# numbers
new_data = unicode(data)
else:
new_data = data
try:
new_data = self._safe_load(new_data, file_name=file_name)
except YAMLError as yaml_exc:
self._handle_error(yaml_exc, file_name, show_content)
if isinstance(data, AnsibleUnicode):
new_data = AnsibleUnicode(new_data)
new_data.ansible_pos = data.ansible_pos
return new_data
def load_from_file(self, file_name):
''' Loads data from a file, which can contain either JSON or YAML. '''
file_name = self.path_dwim(file_name)
# if the file has already been read in and cached, we'll
# return those results to avoid more file/vault operations
if file_name in self._FILE_CACHE:
return self._FILE_CACHE[file_name]
# read the file contents and load the data structure from them
(file_data, show_content) = self._get_file_contents(file_name)
parsed_data = self.load(data=file_data, file_name=file_name, show_content=show_content)
# cache the file contents for next time
self._FILE_CACHE[file_name] = parsed_data
return parsed_data
def path_exists(self, path):
return os.path.exists(path)
def is_file(self, path):
return os.path.isfile(path)
def is_directory(self, path):
return os.path.isdir(path)
def list_directory(self, path):
return os.listdir(path)
def _safe_load(self, stream, file_name=None):
''' Implements yaml.safe_load(), except using our custom loader class. '''
loader = AnsibleLoader(stream, file_name)
try:
return loader.get_single_data()
finally:
loader.dispose()
def _get_file_contents(self, file_name):
'''
Reads the file contents from the given file name, and will decrypt them
if they are found to be vault-encrypted.
'''
if not file_name or not isinstance(file_name, basestring):
raise AnsibleParserError("Invalid filename: '%s'" % str(file_name))
if not self.path_exists(file_name) or not self.is_file(file_name):
raise AnsibleParserError("the file_name '%s' does not exist, or is not readable" % file_name)
show_content = True
try:
with open(file_name, 'r') as f:
data = f.read()
if self._vault.is_encrypted(data):
data = self._vault.decrypt(data)
show_content = False
return (data, show_content)
except (IOError, OSError) as e:
raise AnsibleParserError("an error occurred while trying to read the file '%s': %s" % (file_name, str(e)))
def _handle_error(self, yaml_exc, file_name, show_content):
'''
Optionally constructs an object (AnsibleBaseYAMLObject) to encapsulate the
file name/position where a YAML exception occurred, and raises an AnsibleParserError
to display the syntax exception information.
'''
# if the YAML exception contains a problem mark, use it to construct
# an object the error class can use to display the faulty line
err_obj = None
if hasattr(yaml_exc, 'problem_mark'):
err_obj = AnsibleBaseYAMLObject()
err_obj.ansible_pos = (file_name, yaml_exc.problem_mark.line + 1, yaml_exc.problem_mark.column + 1)
raise AnsibleParserError(YAML_SYNTAX_ERROR, obj=err_obj, show_content=show_content)
def get_basedir(self):
''' returns the current basedir '''
return self._basedir
def set_basedir(self, basedir):
''' sets the base directory, used to find files when a relative path is given '''
if basedir is not None:
self._basedir = to_unicode(basedir)
def path_dwim(self, given):
'''
make relative paths work like folks expect.
'''
given = unquote(given)
if given.startswith("/"):
return os.path.abspath(given)
elif given.startswith("~"):
return os.path.abspath(os.path.expanduser(given))
else:
return os.path.abspath(os.path.join(self._basedir, given))
def path_dwim_relative(self, path, dirname, source):
''' find one file in a role/playbook dirs with/without dirname subdir '''
search = []
isrole = False
# I have full path, nothing else needs to be looked at
if source.startswith('~') or source.startswith('/'):
search.append(self.path_dwim(source))
else:
# base role/play path + templates/files/vars + relative filename
search.append(os.path.join(path, dirname, source))
basedir = unfrackpath(path)
# is it a role and if so make sure you get correct base path
if path.endswith('tasks') and os.path.exists(os.path.join(path,'main.yml')) \
or os.path.exists(os.path.join(path,'tasks/main.yml')):
isrole = True
if path.endswith('tasks'):
basedir = unfrackpath(os.path.dirname(path))
cur_basedir = self._basedir
self.set_basedir(basedir)
# resolved base role/play path + templates/files/vars + relative filename
search.append(self.path_dwim(os.path.join(basedir, dirname, source)))
self.set_basedir(cur_basedir)
if isrole and not source.endswith(dirname):
# look in role's tasks dir w/o dirname
search.append(self.path_dwim(os.path.join(basedir, 'tasks', source)))
# try to create absolute path for loader basedir + templates/files/vars + filename
search.append(self.path_dwim(os.path.join(dirname,source)))
# try to create absolute path for loader basedir + filename
search.append(self.path_dwim(source))
for candidate in search:
if os.path.exists(candidate):
break
return candidate
| gpl-3.0 |
t3wz/mtasa-blue | vendor/google-breakpad/src/third_party/protobuf/protobuf/python/google/protobuf/internal/service_reflection_test.py | 560 | 5127 | #! /usr/bin/python
#
# Protocol Buffers - Google's data interchange format
# Copyright 2008 Google Inc. All rights reserved.
# http://code.google.com/p/protobuf/
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Tests for google.protobuf.internal.service_reflection."""
__author__ = '[email protected] (Petar Petrov)'
import unittest
from google.protobuf import unittest_pb2
from google.protobuf import service_reflection
from google.protobuf import service
class FooUnitTest(unittest.TestCase):
def testService(self):
class MockRpcChannel(service.RpcChannel):
def CallMethod(self, method, controller, request, response, callback):
self.method = method
self.controller = controller
self.request = request
callback(response)
class MockRpcController(service.RpcController):
def SetFailed(self, msg):
self.failure_message = msg
self.callback_response = None
class MyService(unittest_pb2.TestService):
pass
self.callback_response = None
def MyCallback(response):
self.callback_response = response
rpc_controller = MockRpcController()
channel = MockRpcChannel()
srvc = MyService()
srvc.Foo(rpc_controller, unittest_pb2.FooRequest(), MyCallback)
self.assertEqual('Method Foo not implemented.',
rpc_controller.failure_message)
self.assertEqual(None, self.callback_response)
rpc_controller.failure_message = None
service_descriptor = unittest_pb2.TestService.GetDescriptor()
srvc.CallMethod(service_descriptor.methods[1], rpc_controller,
unittest_pb2.BarRequest(), MyCallback)
self.assertEqual('Method Bar not implemented.',
rpc_controller.failure_message)
self.assertEqual(None, self.callback_response)
class MyServiceImpl(unittest_pb2.TestService):
def Foo(self, rpc_controller, request, done):
self.foo_called = True
def Bar(self, rpc_controller, request, done):
self.bar_called = True
srvc = MyServiceImpl()
rpc_controller.failure_message = None
srvc.Foo(rpc_controller, unittest_pb2.FooRequest(), MyCallback)
self.assertEqual(None, rpc_controller.failure_message)
self.assertEqual(True, srvc.foo_called)
rpc_controller.failure_message = None
srvc.CallMethod(service_descriptor.methods[1], rpc_controller,
unittest_pb2.BarRequest(), MyCallback)
self.assertEqual(None, rpc_controller.failure_message)
self.assertEqual(True, srvc.bar_called)
def testServiceStub(self):
class MockRpcChannel(service.RpcChannel):
def CallMethod(self, method, controller, request,
response_class, callback):
self.method = method
self.controller = controller
self.request = request
callback(response_class())
self.callback_response = None
def MyCallback(response):
self.callback_response = response
channel = MockRpcChannel()
stub = unittest_pb2.TestService_Stub(channel)
rpc_controller = 'controller'
request = 'request'
# GetDescriptor now static, still works as instance method for compatability
self.assertEqual(unittest_pb2.TestService_Stub.GetDescriptor(),
stub.GetDescriptor())
# Invoke method.
stub.Foo(rpc_controller, request, MyCallback)
self.assertTrue(isinstance(self.callback_response,
unittest_pb2.FooResponse))
self.assertEqual(request, channel.request)
self.assertEqual(rpc_controller, channel.controller)
self.assertEqual(stub.GetDescriptor().methods[0], channel.method)
if __name__ == '__main__':
unittest.main()
| gpl-3.0 |
naparuba/opsbro | data/global-configuration/packs/system/collectors/collector_interfaces.py | 2 | 1852 | import os
import re
from opsbro.collector import Collector
def extract(input):
mo = re.search(r'^(?P<interface>eth\d+|eth\d+:\d+)\s+' +
r'Link encap:(?P<link_encap>\S+)\s+' +
r'(HWaddr\s+(?P<hardware_address>\S+))?' +
r'(\s+inet addr:(?P<ip_address>\S+))?' +
r'(\s+Bcast:(?P<broadcast_address>\S+)\s+)?' +
r'(Mask:(?P<net_mask>\S+)\s+)?',
input, re.MULTILINE)
if mo:
info = mo.groupdict('')
info['running'] = False
info['up'] = False
info['multicast'] = False
info['broadcast'] = False
if 'RUNNING' in input:
info['running'] = True
if 'UP' in input:
info['up'] = True
if 'BROADCAST' in input:
info['broadcast'] = True
if 'MULTICAST' in input:
info['multicast'] = True
return info
return {}
# interfaces = [ extract(interface) for interface in ifconfig.split('\n\n') if interface.strip() ]
# print json.dumps(interfaces, indent=4)
class Interfaces(Collector):
def launch(self):
logger = self.logger
logger.debug('getInterfaces: start')
res = {}
for pth in ["/bin/ifconfig", "/sbin/ifconfig", "/usr/sbin/ifconfig"]:
if os.path.exists(pth):
status, output = self.execute_shell_and_state('%s -a' % pth)
if status != 0:
return res
paragraphs = output.split('\n\n')
for p in paragraphs:
r = extract(p.strip())
if 'interface' in r:
res[r['interface']] = r
return res
self.set_not_eligible('Your system is not managed, missing the ifconfig command.')
return res
| mit |
kch8qx/osf.io | framework/celery_tasks/utils.py | 54 | 1936 | from __future__ import unicode_literals
import logging
import inspect
from functools import wraps
from raven import Client
from website import settings
logger = logging.getLogger(__name__)
sentry = Client(dsn=settings.SENTRY_DSN, release=settings.VERSION, tags={'App': 'celery'})
# statuses
FAILED = 'failed'
CREATED = 'created'
STARTED = 'started'
COMPLETED = 'completed'
def log_to_sentry(message, **kwargs):
if not settings.SENTRY_DSN:
return logger.warn('send_to_raven called with no SENTRY_DSN')
return sentry.captureMessage(message, extra=kwargs)
# Use _index here as to not clutter the namespace for kwargs
def dispatch(_event, status, _index=None, **kwargs):
if _index:
_event = '{}.{}'.format(_event, _index)
logger.debug('[{}][{}]{!r}'.format(_event, status, kwargs))
def logged(event, index=None):
def _logged(func):
@wraps(func)
def wrapped(*args, **kwargs):
context = extract_context(func, *args, **kwargs)
dispatch(event, STARTED, _index=index, **context)
try:
res = func(*args, **kwargs)
except Exception as e:
if settings.SENTRY_DSN:
sentry.captureException()
dispatch(event, FAILED, _index=index, exception=e, **context)
raise
else:
dispatch(event, COMPLETED, _index=index, **context)
return res
return wrapped
return _logged
def extract_context(func, *args, **kwargs):
arginfo = inspect.getargspec(func)
arg_names = arginfo.args
defaults = {
arg_names.pop(-1): kwarg
for kwarg in (arginfo.defaults or [])
}
computed_args = zip(arg_names, args)
if arginfo.varargs:
computed_args.append(('args', list(args[len(arg_names):])))
if kwargs:
defaults['kwargs'] = kwargs
return dict(computed_args, **defaults)
| apache-2.0 |
kingvuplus/New | lib/python/Components/HdmiCec.py | 3 | 7495 | import struct
from config import config, ConfigSelection, ConfigYesNo, ConfigSubsection, ConfigText
from enigma import eHdmiCEC, eTimer
from Screens.Standby import inStandby
import Screens.Standby
from Tools import Notifications
import time
from os import system
from Tools.Directories import fileExists
class HdmiCec:
def __init__(self):
config.hdmicec = ConfigSubsection()
config.hdmicec.enabled = ConfigYesNo(default = False)
config.hdmicec.logenabledserial = ConfigYesNo(default = False)
config.hdmicec.logenabledfile = ConfigYesNo(default = False)
config.hdmicec.tvstandby = ConfigYesNo(default = False)
config.hdmicec.tvwakeup = ConfigYesNo(default = False)
config.hdmicec.boxstandby = ConfigYesNo(default = False)
config.hdmicec.enabletvrc = ConfigYesNo(default = True)
config.hdmicec.active_source_reply = ConfigYesNo(default = True)
config.hdmicec.standby_message = ConfigSelection(
choices = {
"standby,inactive": _("TV standby"),
"standby,avpwroff,inactive,": _("TV + A/V standby"),
"inactive": _("Source inactive"),
"nothing": _("Nothing"),
},
default = "standby,inactive")
config.hdmicec.deepstandby_message = ConfigSelection(
choices = {
"standby,inactive": _("TV standby"),
"standby,avdeeppwroff,inactive": _("TV + A/V standby"),
"inactive": _("Source inactive"),
"nothing": _("Nothing"),
},
default = "standby,inactive")
config.hdmicec.wakeup_message = ConfigSelection(
choices = {
"wakeup,active,activevu": _("TV wakeup"),
"wakeup,avpwron,active,activevu": _("TV + A/V wakeup"),
"active": _("Source active"),
"nothing": _("Nothing"),
},
default = "wakeup,active,activevu")
config.hdmicec.vustandby_message = ConfigSelection(
choices = {
"vustandby": _("VU standby"),
"vudeepstandby": _("VU DeepStandby"),
"vunothing": _("Nothing"),
},
default = "vustandby")
config.hdmicec.vuwakeup_message = ConfigSelection(
choices = {
"vuwakeup": _("VU wakeup"),
"vunothing": _("Nothing"),
},
default = "vuwakeup")
config.hdmicec.tvinput = ConfigSelection(default = "1",
choices = [
("1", _("HDMI 1")),
("2", _("HDMI 2")),
("3", _("HDMI 3")),
("4", _("HDMI 4")),
("5", _("HDMI 5"))])
config.hdmicec.avinput = ConfigSelection(default ="0",
choices = [
("0", _("no A/V Receiver")),
("1", _("HDMI 1")),
("2", _("HDMI 2")),
("3", _("HDMI 3")),
("4", _("HDMI 4")),
("5", _("HDMI 5"))])
config.hdmicec.devicename = ConfigText(default = self.getDeviceName(), visible_width = 50, fixed_size = False)
config.misc.standbyCounter.addNotifier(self.enterStandby, initial_call = False)
config.misc.DeepStandbyOn.addNotifier(self.enterDeepStandby, initial_call = False)
self.leaveDeepStandby()
def getDeviceName(self):
deviceList = {
"duo": "VU+ Duo",
"solo": "VU+ Solo",
"uno": "VU+ Uno",
"ultimo": "VU+ Ultimo",
"solo2": "VU+ Solo2",
"duo2": "VU+ Duo2",
"solose": "VU+ SoloSE",
"zero": "VU+ Zero",
}
if fileExists("/proc/stb/info/vumodel"):
vumodel = open("/proc/stb/info/vumodel")
info=vumodel.read().strip()
vumodel.close()
return deviceList.setdefault(info, "VU+")
else:
return "VU+"
def sendMessages(self, messages):
for message in messages.split(','):
cmd = None
logcmd = None
addressvaluebroadcast = int("0F",16)
addressvalue = int("0",16)
addressvalueav = int("5",16)
wakeupmessage = int("04",16)
standbymessage=int("36",16)
activesourcemessage=int("82",16)
inactivesourcemessage=int("9D",16)
sendkeymessage = int("44",16)
sendkeypwronmessage = int("6D",16)
sendkeypwroffmessage = int("6C",16)
activevumessage=int("85",16)
physaddress1 = int("0x" + str(config.hdmicec.tvinput.value) + str(config.hdmicec.avinput.value),16)
physaddress2 = int("0x00",16)
if message == "wakeup":
cmd = struct.pack('B', wakeupmessage)
logcmd = "[HDMI-CEC] ** WakeUpMessage ** send message: %x to address %x" % (wakeupmessage, addressvalue)
elif message == "active":
addressvalue = addressvaluebroadcast
cmd = struct.pack('BBB', activesourcemessage,physaddress1,physaddress2)
logcmd = "[HDMI-CEC] ** ActiveSourceMessage ** send message: %x:%x:%x to address %x" % (activesourcemessage,physaddress1,physaddress2,addressvalue)
elif message == "standby":
cmd = struct.pack('B', standbymessage)
logcmd = "[HDMI-CEC] ** StandByMessage ** send message: %x to address %x" % (standbymessage, addressvalue)
elif message == "inactive":
addressvalue = addressvaluebroadcast
cmd = struct.pack('BBB', inactivesourcemessage,physaddress1,physaddress2)
logcmd = "[HDMI-CEC] ** InActiveSourceMessage ** send message: %x:%x:%x to address %x" % (inactivesourcemessage,physaddress1,physaddress2,addressvalue)
elif message == "avpwron":
cmd = struct.pack('BB', sendkeymessage,sendkeypwronmessage)
addressvalue = addressvalueav
logcmd = "[HDMI-CEC] ** Power on A/V ** send message: %x:%x to address %x" % (sendkeymessage, sendkeypwronmessage, addressvalue)
elif message == "avdeeppwroff":
cmd = struct.pack('BB',sendkeymessage,sendkeypwroffmessage)
addressvalue = addressvalueav
logcmd = "[HDMI-CEC] ** Standby A/V (Deepstandby)** send message: %x:%x to address %x" % (sendkeymessage,sendkeypwroffmessage, addressvalue)
elif message == "avpwroff":
addressvalue = addressvalueav
cmd = struct.pack('BB',sendkeymessage,sendkeypwroffmessage)
logcmd = "[HDMI-CEC] ** Standby A/V ** send message: %x:%x to address %x" % (sendkeymessage,sendkeypwroffmessage, addressvalue)
elif message == "activevu":
addressvalue = addressvaluebroadcast
cmd = struct.pack('B', activevumessage)
logcmd = "[HDMI-CEC] ** Active VU Message ** send message: %x to address %x" % (activevumessage,addressvalue)
if cmd:
eHdmiCEC.getInstance().sendMessage(addressvalue, len(cmd), str(cmd))
time.sleep(1)
if logcmd:
if config.hdmicec.logenabledserial.value:
print logcmd
if config.hdmicec.logenabledfile.value:
filelog = "echo %s >> /tmp/hdmicec.log" % (logcmd)
system(filelog)
def leaveStandby(self):
if config.hdmicec.enabled.value is True:
self.sendMessages(config.hdmicec.wakeup_message.value)
def enterStandby(self, configElement):
from Screens.Standby import inStandby
inStandby.onClose.append(self.leaveStandby)
if config.hdmicec.enabled.value is True:
self.sendMessages(config.hdmicec.standby_message.value)
def enterDeepStandby(self,configElement):
if config.hdmicec.enabled.value is True:
self.sendMessages(config.hdmicec.deepstandby_message.value)
def leaveDeepStandby(self):
if config.hdmicec.enabled.value is True:
self.sendMessages(config.hdmicec.wakeup_message.value)
## not used
def activeSource(self):
if config.hdmicec.enabled.value is True:
physadress1 = "0x" + str(config.hdmicec.tvinput.value) + str(config.hdmicec.avinput.value)
physadress2 = "0x00"
cecmessage = int('0x82',16)
address = int('0x0F',16)
valuethree = int(physadress1,16)
valuefour = int(physadress2,16)
cmd = struct.pack('BBB',cecmessage,valuethree,valuefour)
eHdmiCEC.getInstance().sendMessage(address, len(cmd), str(cmd))
if config.hdmicec.enabletvrc.value:
cecmessage = int('0x8E',16)
address = int('0',16)
valuethree = int('0',16)
cmd = struct.pack('BB',cecmessage,valuethree)
eHdmiCEC.getInstance().sendMessage(address, len(cmd), str(cmd))
hdmi_cec = HdmiCec()
| gpl-2.0 |
ammarkhann/FinalSeniorCode | lib/python2.7/site-packages/mpmath/functions/elliptic.py | 2 | 38824 | r"""
Elliptic functions historically comprise the elliptic integrals
and their inverses, and originate from the problem of computing the
arc length of an ellipse. From a more modern point of view,
an elliptic function is defined as a doubly periodic function, i.e.
a function which satisfies
.. math ::
f(z + 2 \omega_1) = f(z + 2 \omega_2) = f(z)
for some half-periods `\omega_1, \omega_2` with
`\mathrm{Im}[\omega_1 / \omega_2] > 0`. The canonical elliptic
functions are the Jacobi elliptic functions. More broadly, this section
includes quasi-doubly periodic functions (such as the Jacobi theta
functions) and other functions useful in the study of elliptic functions.
Many different conventions for the arguments of
elliptic functions are in use. It is even standard to use
different parameterizations for different functions in the same
text or software (and mpmath is no exception).
The usual parameters are the elliptic nome `q`, which usually
must satisfy `|q| < 1`; the elliptic parameter `m` (an arbitrary
complex number); the elliptic modulus `k` (an arbitrary complex
number); and the half-period ratio `\tau`, which usually must
satisfy `\mathrm{Im}[\tau] > 0`.
These quantities can be expressed in terms of each other
using the following relations:
.. math ::
m = k^2
.. math ::
\tau = -i \frac{K(1-m)}{K(m)}
.. math ::
q = e^{i \pi \tau}
.. math ::
k = \frac{\vartheta_2^4(q)}{\vartheta_3^4(q)}
In addition, an alternative definition is used for the nome in
number theory, which we here denote by q-bar:
.. math ::
\bar{q} = q^2 = e^{2 i \pi \tau}
For convenience, mpmath provides functions to convert
between the various parameters (:func:`~mpmath.qfrom`, :func:`~mpmath.mfrom`,
:func:`~mpmath.kfrom`, :func:`~mpmath.taufrom`, :func:`~mpmath.qbarfrom`).
**References**
1. [AbramowitzStegun]_
2. [WhittakerWatson]_
"""
from .functions import defun, defun_wrapped
def nome(ctx, m):
m = ctx.convert(m)
if not m:
return m
if m == ctx.one:
return m
if ctx.isnan(m):
return m
if ctx.isinf(m):
if m == ctx.ninf:
return type(m)(-1)
else:
return ctx.mpc(-1)
a = ctx.ellipk(ctx.one-m)
b = ctx.ellipk(m)
v = ctx.exp(-ctx.pi*a/b)
if not ctx._im(m) and ctx._re(m) < 1:
if ctx._is_real_type(m):
return v.real
else:
return v.real + 0j
elif m == 2:
v = ctx.mpc(0, v.imag)
return v
@defun_wrapped
def qfrom(ctx, q=None, m=None, k=None, tau=None, qbar=None):
r"""
Returns the elliptic nome `q`, given any of `q, m, k, \tau, \bar{q}`::
>>> from mpmath import *
>>> mp.dps = 25; mp.pretty = True
>>> qfrom(q=0.25)
0.25
>>> qfrom(m=mfrom(q=0.25))
0.25
>>> qfrom(k=kfrom(q=0.25))
0.25
>>> qfrom(tau=taufrom(q=0.25))
(0.25 + 0.0j)
>>> qfrom(qbar=qbarfrom(q=0.25))
0.25
"""
if q is not None:
return ctx.convert(q)
if m is not None:
return nome(ctx, m)
if k is not None:
return nome(ctx, ctx.convert(k)**2)
if tau is not None:
return ctx.expjpi(tau)
if qbar is not None:
return ctx.sqrt(qbar)
@defun_wrapped
def qbarfrom(ctx, q=None, m=None, k=None, tau=None, qbar=None):
r"""
Returns the number-theoretic nome `\bar q`, given any of
`q, m, k, \tau, \bar{q}`::
>>> from mpmath import *
>>> mp.dps = 25; mp.pretty = True
>>> qbarfrom(qbar=0.25)
0.25
>>> qbarfrom(q=qfrom(qbar=0.25))
0.25
>>> qbarfrom(m=extraprec(20)(mfrom)(qbar=0.25)) # ill-conditioned
0.25
>>> qbarfrom(k=extraprec(20)(kfrom)(qbar=0.25)) # ill-conditioned
0.25
>>> qbarfrom(tau=taufrom(qbar=0.25))
(0.25 + 0.0j)
"""
if qbar is not None:
return ctx.convert(qbar)
if q is not None:
return ctx.convert(q) ** 2
if m is not None:
return nome(ctx, m) ** 2
if k is not None:
return nome(ctx, ctx.convert(k)**2) ** 2
if tau is not None:
return ctx.expjpi(2*tau)
@defun_wrapped
def taufrom(ctx, q=None, m=None, k=None, tau=None, qbar=None):
r"""
Returns the elliptic half-period ratio `\tau`, given any of
`q, m, k, \tau, \bar{q}`::
>>> from mpmath import *
>>> mp.dps = 25; mp.pretty = True
>>> taufrom(tau=0.5j)
(0.0 + 0.5j)
>>> taufrom(q=qfrom(tau=0.5j))
(0.0 + 0.5j)
>>> taufrom(m=mfrom(tau=0.5j))
(0.0 + 0.5j)
>>> taufrom(k=kfrom(tau=0.5j))
(0.0 + 0.5j)
>>> taufrom(qbar=qbarfrom(tau=0.5j))
(0.0 + 0.5j)
"""
if tau is not None:
return ctx.convert(tau)
if m is not None:
m = ctx.convert(m)
return ctx.j*ctx.ellipk(1-m)/ctx.ellipk(m)
if k is not None:
k = ctx.convert(k)
return ctx.j*ctx.ellipk(1-k**2)/ctx.ellipk(k**2)
if q is not None:
return ctx.log(q) / (ctx.pi*ctx.j)
if qbar is not None:
qbar = ctx.convert(qbar)
return ctx.log(qbar) / (2*ctx.pi*ctx.j)
@defun_wrapped
def kfrom(ctx, q=None, m=None, k=None, tau=None, qbar=None):
r"""
Returns the elliptic modulus `k`, given any of
`q, m, k, \tau, \bar{q}`::
>>> from mpmath import *
>>> mp.dps = 25; mp.pretty = True
>>> kfrom(k=0.25)
0.25
>>> kfrom(m=mfrom(k=0.25))
0.25
>>> kfrom(q=qfrom(k=0.25))
0.25
>>> kfrom(tau=taufrom(k=0.25))
(0.25 + 0.0j)
>>> kfrom(qbar=qbarfrom(k=0.25))
0.25
As `q \to 1` and `q \to -1`, `k` rapidly approaches
`1` and `i \infty` respectively::
>>> kfrom(q=0.75)
0.9999999999999899166471767
>>> kfrom(q=-0.75)
(0.0 + 7041781.096692038332790615j)
>>> kfrom(q=1)
1
>>> kfrom(q=-1)
(0.0 + +infj)
"""
if k is not None:
return ctx.convert(k)
if m is not None:
return ctx.sqrt(m)
if tau is not None:
q = ctx.expjpi(tau)
if qbar is not None:
q = ctx.sqrt(qbar)
if q == 1:
return q
if q == -1:
return ctx.mpc(0,'inf')
return (ctx.jtheta(2,0,q)/ctx.jtheta(3,0,q))**2
@defun_wrapped
def mfrom(ctx, q=None, m=None, k=None, tau=None, qbar=None):
r"""
Returns the elliptic parameter `m`, given any of
`q, m, k, \tau, \bar{q}`::
>>> from mpmath import *
>>> mp.dps = 25; mp.pretty = True
>>> mfrom(m=0.25)
0.25
>>> mfrom(q=qfrom(m=0.25))
0.25
>>> mfrom(k=kfrom(m=0.25))
0.25
>>> mfrom(tau=taufrom(m=0.25))
(0.25 + 0.0j)
>>> mfrom(qbar=qbarfrom(m=0.25))
0.25
As `q \to 1` and `q \to -1`, `m` rapidly approaches
`1` and `-\infty` respectively::
>>> mfrom(q=0.75)
0.9999999999999798332943533
>>> mfrom(q=-0.75)
-49586681013729.32611558353
>>> mfrom(q=1)
1.0
>>> mfrom(q=-1)
-inf
The inverse nome as a function of `q` has an integer
Taylor series expansion::
>>> taylor(lambda q: mfrom(q), 0, 7)
[0.0, 16.0, -128.0, 704.0, -3072.0, 11488.0, -38400.0, 117632.0]
"""
if m is not None:
return m
if k is not None:
return k**2
if tau is not None:
q = ctx.expjpi(tau)
if qbar is not None:
q = ctx.sqrt(qbar)
if q == 1:
return ctx.convert(q)
if q == -1:
return q*ctx.inf
v = (ctx.jtheta(2,0,q)/ctx.jtheta(3,0,q))**4
if ctx._is_real_type(q) and q < 0:
v = v.real
return v
jacobi_spec = {
'sn' : ([3],[2],[1],[4], 'sin', 'tanh'),
'cn' : ([4],[2],[2],[4], 'cos', 'sech'),
'dn' : ([4],[3],[3],[4], '1', 'sech'),
'ns' : ([2],[3],[4],[1], 'csc', 'coth'),
'nc' : ([2],[4],[4],[2], 'sec', 'cosh'),
'nd' : ([3],[4],[4],[3], '1', 'cosh'),
'sc' : ([3],[4],[1],[2], 'tan', 'sinh'),
'sd' : ([3,3],[2,4],[1],[3], 'sin', 'sinh'),
'cd' : ([3],[2],[2],[3], 'cos', '1'),
'cs' : ([4],[3],[2],[1], 'cot', 'csch'),
'dc' : ([2],[3],[3],[2], 'sec', '1'),
'ds' : ([2,4],[3,3],[3],[1], 'csc', 'csch'),
'cc' : None,
'ss' : None,
'nn' : None,
'dd' : None
}
@defun
def ellipfun(ctx, kind, u=None, m=None, q=None, k=None, tau=None):
try:
S = jacobi_spec[kind]
except KeyError:
raise ValueError("First argument must be a two-character string "
"containing 's', 'c', 'd' or 'n', e.g.: 'sn'")
if u is None:
def f(*args, **kwargs):
return ctx.ellipfun(kind, *args, **kwargs)
f.__name__ = kind
return f
prec = ctx.prec
try:
ctx.prec += 10
u = ctx.convert(u)
q = ctx.qfrom(m=m, q=q, k=k, tau=tau)
if S is None:
v = ctx.one + 0*q*u
elif q == ctx.zero:
if S[4] == '1': v = ctx.one
else: v = getattr(ctx, S[4])(u)
v += 0*q*u
elif q == ctx.one:
if S[5] == '1': v = ctx.one
else: v = getattr(ctx, S[5])(u)
v += 0*q*u
else:
t = u / ctx.jtheta(3, 0, q)**2
v = ctx.one
for a in S[0]: v *= ctx.jtheta(a, 0, q)
for b in S[1]: v /= ctx.jtheta(b, 0, q)
for c in S[2]: v *= ctx.jtheta(c, t, q)
for d in S[3]: v /= ctx.jtheta(d, t, q)
finally:
ctx.prec = prec
return +v
@defun_wrapped
def kleinj(ctx, tau=None, **kwargs):
r"""
Evaluates the Klein j-invariant, which is a modular function defined for
`\tau` in the upper half-plane as
.. math ::
J(\tau) = \frac{g_2^3(\tau)}{g_2^3(\tau) - 27 g_3^2(\tau)}
where `g_2` and `g_3` are the modular invariants of the Weierstrass
elliptic function,
.. math ::
g_2(\tau) = 60 \sum_{(m,n) \in \mathbb{Z}^2 \setminus (0,0)} (m \tau+n)^{-4}
g_3(\tau) = 140 \sum_{(m,n) \in \mathbb{Z}^2 \setminus (0,0)} (m \tau+n)^{-6}.
An alternative, common notation is that of the j-function
`j(\tau) = 1728 J(\tau)`.
**Plots**
.. literalinclude :: /plots/kleinj.py
.. image :: /plots/kleinj.png
.. literalinclude :: /plots/kleinj2.py
.. image :: /plots/kleinj2.png
**Examples**
Verifying the functional equation `J(\tau) = J(\tau+1) = J(-\tau^{-1})`::
>>> from mpmath import *
>>> mp.dps = 25; mp.pretty = True
>>> tau = 0.625+0.75*j
>>> tau = 0.625+0.75*j
>>> kleinj(tau)
(-0.1507492166511182267125242 + 0.07595948379084571927228948j)
>>> kleinj(tau+1)
(-0.1507492166511182267125242 + 0.07595948379084571927228948j)
>>> kleinj(-1/tau)
(-0.1507492166511182267125242 + 0.07595948379084571927228946j)
The j-function has a famous Laurent series expansion in terms of the nome
`\bar{q}`, `j(\tau) = \bar{q}^{-1} + 744 + 196884\bar{q} + \ldots`::
>>> mp.dps = 15
>>> taylor(lambda q: 1728*q*kleinj(qbar=q), 0, 5, singular=True)
[1.0, 744.0, 196884.0, 21493760.0, 864299970.0, 20245856256.0]
The j-function admits exact evaluation at special algebraic points
related to the Heegner numbers 1, 2, 3, 7, 11, 19, 43, 67, 163::
>>> @extraprec(10)
... def h(n):
... v = (1+sqrt(n)*j)
... if n > 2:
... v *= 0.5
... return v
...
>>> mp.dps = 25
>>> for n in [1,2,3,7,11,19,43,67,163]:
... n, chop(1728*kleinj(h(n)))
...
(1, 1728.0)
(2, 8000.0)
(3, 0.0)
(7, -3375.0)
(11, -32768.0)
(19, -884736.0)
(43, -884736000.0)
(67, -147197952000.0)
(163, -262537412640768000.0)
Also at other special points, the j-function assumes explicit
algebraic values, e.g.::
>>> chop(1728*kleinj(j*sqrt(5)))
1264538.909475140509320227
>>> identify(cbrt(_)) # note: not simplified
'((100+sqrt(13520))/2)'
>>> (50+26*sqrt(5))**3
1264538.909475140509320227
"""
q = ctx.qfrom(tau=tau, **kwargs)
t2 = ctx.jtheta(2,0,q)
t3 = ctx.jtheta(3,0,q)
t4 = ctx.jtheta(4,0,q)
P = (t2**8 + t3**8 + t4**8)**3
Q = 54*(t2*t3*t4)**8
return P/Q
def RF_calc(ctx, x, y, z, r):
if y == z: return RC_calc(ctx, x, y, r)
if x == z: return RC_calc(ctx, y, x, r)
if x == y: return RC_calc(ctx, z, x, r)
if not (ctx.isnormal(x) and ctx.isnormal(y) and ctx.isnormal(z)):
if ctx.isnan(x) or ctx.isnan(y) or ctx.isnan(z):
return x*y*z
if ctx.isinf(x) or ctx.isinf(y) or ctx.isinf(z):
return ctx.zero
xm,ym,zm = x,y,z
A0 = Am = (x+y+z)/3
Q = ctx.root(3*r, -6) * max(abs(A0-x),abs(A0-y),abs(A0-z))
g = ctx.mpf(0.25)
pow4 = ctx.one
m = 0
while 1:
xs = ctx.sqrt(xm)
ys = ctx.sqrt(ym)
zs = ctx.sqrt(zm)
lm = xs*ys + xs*zs + ys*zs
Am1 = (Am+lm)*g
xm, ym, zm = (xm+lm)*g, (ym+lm)*g, (zm+lm)*g
if pow4 * Q < abs(Am):
break
Am = Am1
m += 1
pow4 *= g
t = pow4/Am
X = (A0-x)*t
Y = (A0-y)*t
Z = -X-Y
E2 = X*Y-Z**2
E3 = X*Y*Z
return ctx.power(Am,-0.5) * (9240-924*E2+385*E2**2+660*E3-630*E2*E3)/9240
def RC_calc(ctx, x, y, r, pv=True):
if not (ctx.isnormal(x) and ctx.isnormal(y)):
if ctx.isinf(x) or ctx.isinf(y):
return 1/(x*y)
if y == 0:
return ctx.inf
if x == 0:
return ctx.pi / ctx.sqrt(y) / 2
raise ValueError
# Cauchy principal value
if pv and ctx._im(y) == 0 and ctx._re(y) < 0:
return ctx.sqrt(x/(x-y)) * RC_calc(ctx, x-y, -y, r)
if x == y:
return 1/ctx.sqrt(x)
extraprec = 2*max(0,-ctx.mag(x-y)+ctx.mag(x))
ctx.prec += extraprec
if ctx._is_real_type(x) and ctx._is_real_type(y):
x = ctx._re(x)
y = ctx._re(y)
a = ctx.sqrt(x/y)
if x < y:
b = ctx.sqrt(y-x)
v = ctx.acos(a)/b
else:
b = ctx.sqrt(x-y)
v = ctx.acosh(a)/b
else:
sx = ctx.sqrt(x)
sy = ctx.sqrt(y)
v = ctx.acos(sx/sy)/(ctx.sqrt((1-x/y))*sy)
ctx.prec -= extraprec
return v
def RJ_calc(ctx, x, y, z, p, r):
if not (ctx.isnormal(x) and ctx.isnormal(y) and \
ctx.isnormal(z) and ctx.isnormal(p)):
if ctx.isnan(x) or ctx.isnan(y) or ctx.isnan(z) or ctx.isnan(p):
return x*y*z
if ctx.isinf(x) or ctx.isinf(y) or ctx.isinf(z) or ctx.isinf(p):
return ctx.zero
if not p:
return ctx.inf
xm,ym,zm,pm = x,y,z,p
A0 = Am = (x + y + z + 2*p)/5
delta = (p-x)*(p-y)*(p-z)
Q = ctx.root(0.25*r, -6) * max(abs(A0-x),abs(A0-y),abs(A0-z),abs(A0-p))
m = 0
g = ctx.mpf(0.25)
pow4 = ctx.one
S = 0
while 1:
sx = ctx.sqrt(xm)
sy = ctx.sqrt(ym)
sz = ctx.sqrt(zm)
sp = ctx.sqrt(pm)
lm = sx*sy + sx*sz + sy*sz
Am1 = (Am+lm)*g
xm = (xm+lm)*g; ym = (ym+lm)*g; zm = (zm+lm)*g; pm = (pm+lm)*g
dm = (sp+sx) * (sp+sy) * (sp+sz)
em = delta * ctx.power(4, -3*m) / dm**2
if pow4 * Q < abs(Am):
break
T = RC_calc(ctx, ctx.one, ctx.one+em, r) * pow4 / dm
S += T
pow4 *= g
m += 1
Am = Am1
t = ctx.ldexp(1,-2*m) / Am
X = (A0-x)*t
Y = (A0-y)*t
Z = (A0-z)*t
P = (-X-Y-Z)/2
E2 = X*Y + X*Z + Y*Z - 3*P**2
E3 = X*Y*Z + 2*E2*P + 4*P**3
E4 = (2*X*Y*Z + E2*P + 3*P**3)*P
E5 = X*Y*Z*P**2
P = 24024 - 5148*E2 + 2457*E2**2 + 4004*E3 - 4158*E2*E3 - 3276*E4 + 2772*E5
Q = 24024
v1 = g**m * ctx.power(Am, -1.5) * P/Q
v2 = 6*S
return v1 + v2
@defun
def elliprf(ctx, x, y, z):
r"""
Evaluates the Carlson symmetric elliptic integral of the first kind
.. math ::
R_F(x,y,z) = \frac{1}{2}
\int_0^{\infty} \frac{dt}{\sqrt{(t+x)(t+y)(t+z)}}
which is defined for `x,y,z \notin (-\infty,0)`, and with
at most one of `x,y,z` being zero.
For real `x,y,z \ge 0`, the principal square root is taken in the integrand.
For complex `x,y,z`, the principal square root is taken as `t \to \infty`
and as `t \to 0` non-principal branches are chosen as necessary so as to
make the integrand continuous.
**Examples**
Some basic values and limits::
>>> from mpmath import *
>>> mp.dps = 25; mp.pretty = True
>>> elliprf(0,1,1); pi/2
1.570796326794896619231322
1.570796326794896619231322
>>> elliprf(0,1,inf)
0.0
>>> elliprf(1,1,1)
1.0
>>> elliprf(2,2,2)**2
0.5
>>> elliprf(1,0,0); elliprf(0,0,1); elliprf(0,1,0); elliprf(0,0,0)
+inf
+inf
+inf
+inf
Representing complete elliptic integrals in terms of `R_F`::
>>> m = mpf(0.75)
>>> ellipk(m); elliprf(0,1-m,1)
2.156515647499643235438675
2.156515647499643235438675
>>> ellipe(m); elliprf(0,1-m,1)-m*elliprd(0,1-m,1)/3
1.211056027568459524803563
1.211056027568459524803563
Some symmetries and argument transformations::
>>> x,y,z = 2,3,4
>>> elliprf(x,y,z); elliprf(y,x,z); elliprf(z,y,x)
0.5840828416771517066928492
0.5840828416771517066928492
0.5840828416771517066928492
>>> k = mpf(100000)
>>> elliprf(k*x,k*y,k*z); k**(-0.5) * elliprf(x,y,z)
0.001847032121923321253219284
0.001847032121923321253219284
>>> l = sqrt(x*y) + sqrt(y*z) + sqrt(z*x)
>>> elliprf(x,y,z); 2*elliprf(x+l,y+l,z+l)
0.5840828416771517066928492
0.5840828416771517066928492
>>> elliprf((x+l)/4,(y+l)/4,(z+l)/4)
0.5840828416771517066928492
Comparing with numerical integration::
>>> x,y,z = 2,3,4
>>> elliprf(x,y,z)
0.5840828416771517066928492
>>> f = lambda t: 0.5*((t+x)*(t+y)*(t+z))**(-0.5)
>>> q = extradps(25)(quad)
>>> q(f, [0,inf])
0.5840828416771517066928492
With the following arguments, the square root in the integrand becomes
discontinuous at `t = 1/2` if the principal branch is used. To obtain
the right value, `-\sqrt{r}` must be taken instead of `\sqrt{r}`
on `t \in (0, 1/2)`::
>>> x,y,z = j-1,j,0
>>> elliprf(x,y,z)
(0.7961258658423391329305694 - 1.213856669836495986430094j)
>>> -q(f, [0,0.5]) + q(f, [0.5,inf])
(0.7961258658423391329305694 - 1.213856669836495986430094j)
The so-called *first lemniscate constant*, a transcendental number::
>>> elliprf(0,1,2)
1.31102877714605990523242
>>> extradps(25)(quad)(lambda t: 1/sqrt(1-t**4), [0,1])
1.31102877714605990523242
>>> gamma('1/4')**2/(4*sqrt(2*pi))
1.31102877714605990523242
**References**
1. [Carlson]_
2. [DLMF]_ Chapter 19. Elliptic Integrals
"""
x = ctx.convert(x)
y = ctx.convert(y)
z = ctx.convert(z)
prec = ctx.prec
try:
ctx.prec += 20
tol = ctx.eps * 2**10
v = RF_calc(ctx, x, y, z, tol)
finally:
ctx.prec = prec
return +v
@defun
def elliprc(ctx, x, y, pv=True):
r"""
Evaluates the degenerate Carlson symmetric elliptic integral
of the first kind
.. math ::
R_C(x,y) = R_F(x,y,y) =
\frac{1}{2} \int_0^{\infty} \frac{dt}{(t+y) \sqrt{(t+x)}}.
If `y \in (-\infty,0)`, either a value defined by continuity,
or with *pv=True* the Cauchy principal value, can be computed.
If `x \ge 0, y > 0`, the value can be expressed in terms of
elementary functions as
.. math ::
R_C(x,y) =
\begin{cases}
\dfrac{1}{\sqrt{y-x}}
\cos^{-1}\left(\sqrt{\dfrac{x}{y}}\right), & x < y \\
\dfrac{1}{\sqrt{y}}, & x = y \\
\dfrac{1}{\sqrt{x-y}}
\cosh^{-1}\left(\sqrt{\dfrac{x}{y}}\right), & x > y \\
\end{cases}.
**Examples**
Some special values and limits::
>>> from mpmath import *
>>> mp.dps = 25; mp.pretty = True
>>> elliprc(1,2)*4; elliprc(0,1)*2; +pi
3.141592653589793238462643
3.141592653589793238462643
3.141592653589793238462643
>>> elliprc(1,0)
+inf
>>> elliprc(5,5)**2
0.2
>>> elliprc(1,inf); elliprc(inf,1); elliprc(inf,inf)
0.0
0.0
0.0
Comparing with the elementary closed-form solution::
>>> elliprc('1/3', '1/5'); sqrt(7.5)*acosh(sqrt('5/3'))
2.041630778983498390751238
2.041630778983498390751238
>>> elliprc('1/5', '1/3'); sqrt(7.5)*acos(sqrt('3/5'))
1.875180765206547065111085
1.875180765206547065111085
Comparing with numerical integration::
>>> q = extradps(25)(quad)
>>> elliprc(2, -3, pv=True)
0.3333969101113672670749334
>>> elliprc(2, -3, pv=False)
(0.3333969101113672670749334 + 0.7024814731040726393156375j)
>>> 0.5*q(lambda t: 1/(sqrt(t+2)*(t-3)), [0,3-j,6,inf])
(0.3333969101113672670749334 + 0.7024814731040726393156375j)
"""
x = ctx.convert(x)
y = ctx.convert(y)
prec = ctx.prec
try:
ctx.prec += 20
tol = ctx.eps * 2**10
v = RC_calc(ctx, x, y, tol, pv)
finally:
ctx.prec = prec
return +v
@defun
def elliprj(ctx, x, y, z, p):
r"""
Evaluates the Carlson symmetric elliptic integral of the third kind
.. math ::
R_J(x,y,z,p) = \frac{3}{2}
\int_0^{\infty} \frac{dt}{(t+p)\sqrt{(t+x)(t+y)(t+z)}}.
Like :func:`~mpmath.elliprf`, the branch of the square root in the integrand
is defined so as to be continuous along the path of integration for
complex values of the arguments.
**Examples**
Some values and limits::
>>> from mpmath import *
>>> mp.dps = 25; mp.pretty = True
>>> elliprj(1,1,1,1)
1.0
>>> elliprj(2,2,2,2); 1/(2*sqrt(2))
0.3535533905932737622004222
0.3535533905932737622004222
>>> elliprj(0,1,2,2)
1.067937989667395702268688
>>> 3*(2*gamma('5/4')**2-pi**2/gamma('1/4')**2)/(sqrt(2*pi))
1.067937989667395702268688
>>> elliprj(0,1,1,2); 3*pi*(2-sqrt(2))/4
1.380226776765915172432054
1.380226776765915172432054
>>> elliprj(1,3,2,0); elliprj(0,1,1,0); elliprj(0,0,0,0)
+inf
+inf
+inf
>>> elliprj(1,inf,1,0); elliprj(1,1,1,inf)
0.0
0.0
>>> chop(elliprj(1+j, 1-j, 1, 1))
0.8505007163686739432927844
Scale transformation::
>>> x,y,z,p = 2,3,4,5
>>> k = mpf(100000)
>>> elliprj(k*x,k*y,k*z,k*p); k**(-1.5)*elliprj(x,y,z,p)
4.521291677592745527851168e-9
4.521291677592745527851168e-9
Comparing with numerical integration::
>>> elliprj(1,2,3,4)
0.2398480997495677621758617
>>> f = lambda t: 1/((t+4)*sqrt((t+1)*(t+2)*(t+3)))
>>> 1.5*quad(f, [0,inf])
0.2398480997495677621758617
>>> elliprj(1,2+1j,3,4-2j)
(0.216888906014633498739952 + 0.04081912627366673332369512j)
>>> f = lambda t: 1/((t+4-2j)*sqrt((t+1)*(t+2+1j)*(t+3)))
>>> 1.5*quad(f, [0,inf])
(0.216888906014633498739952 + 0.04081912627366673332369511j)
"""
x = ctx.convert(x)
y = ctx.convert(y)
z = ctx.convert(z)
p = ctx.convert(p)
prec = ctx.prec
try:
ctx.prec += 20
tol = ctx.eps * 2**10
v = RJ_calc(ctx, x, y, z, p, tol)
finally:
ctx.prec = prec
return +v
@defun
def elliprd(ctx, x, y, z):
r"""
Evaluates the degenerate Carlson symmetric elliptic integral
of the third kind or Carlson elliptic integral of the
second kind `R_D(x,y,z) = R_J(x,y,z,z)`.
See :func:`~mpmath.elliprj` for additional information.
**Examples**
>>> from mpmath import *
>>> mp.dps = 25; mp.pretty = True
>>> elliprd(1,2,3)
0.2904602810289906442326534
>>> elliprj(1,2,3,3)
0.2904602810289906442326534
The so-called *second lemniscate constant*, a transcendental number::
>>> elliprd(0,2,1)/3
0.5990701173677961037199612
>>> extradps(25)(quad)(lambda t: t**2/sqrt(1-t**4), [0,1])
0.5990701173677961037199612
>>> gamma('3/4')**2/sqrt(2*pi)
0.5990701173677961037199612
"""
return ctx.elliprj(x,y,z,z)
@defun
def elliprg(ctx, x, y, z):
r"""
Evaluates the Carlson completely symmetric elliptic integral
of the second kind
.. math ::
R_G(x,y,z) = \frac{1}{4} \int_0^{\infty}
\frac{t}{\sqrt{(t+x)(t+y)(t+z)}}
\left( \frac{x}{t+x} + \frac{y}{t+y} + \frac{z}{t+z}\right) dt.
**Examples**
Evaluation for real and complex arguments::
>>> from mpmath import *
>>> mp.dps = 25; mp.pretty = True
>>> elliprg(0,1,1)*4; +pi
3.141592653589793238462643
3.141592653589793238462643
>>> elliprg(0,0.5,1)
0.6753219405238377512600874
>>> chop(elliprg(1+j, 1-j, 2))
1.172431327676416604532822
A double integral that can be evaluated in terms of `R_G`::
>>> x,y,z = 2,3,4
>>> def f(t,u):
... st = fp.sin(t); ct = fp.cos(t)
... su = fp.sin(u); cu = fp.cos(u)
... return (x*(st*cu)**2 + y*(st*su)**2 + z*ct**2)**0.5 * st
...
>>> nprint(mpf(fp.quad(f, [0,fp.pi], [0,2*fp.pi])/(4*fp.pi)), 13)
1.725503028069
>>> nprint(elliprg(x,y,z), 13)
1.725503028069
"""
x = ctx.convert(x)
y = ctx.convert(y)
z = ctx.convert(z)
if not z: x, z = z, x
if not z: y, z = x, y
if not z: return ctx.inf
def terms():
T1 = 0.5*z*ctx.elliprf(x,y,z)
T2 = -0.5*(x-z)*(y-z)*ctx.elliprd(x,y,z)/3
T3 = 0.5*ctx.sqrt(x*y/z)
return T1,T2,T3
return ctx.sum_accurately(terms)
@defun_wrapped
def ellipf(ctx, phi, m):
r"""
Evaluates the Legendre incomplete elliptic integral of the first kind
.. math ::
F(\phi,m) = \int_0^{\phi} \frac{dt}{\sqrt{1-m \sin^2 t}}
or equivalently
.. math ::
F(\phi,m) = \int_0^{\sin \phi}
\frac{dt}{\left(\sqrt{1-t^2}\right)\left(\sqrt{1-mt^2}\right)}.
The function reduces to a complete elliptic integral of the first kind
(see :func:`~mpmath.ellipk`) when `\phi = \frac{\pi}{2}`; that is,
.. math ::
F\left(\frac{\pi}{2}, m\right) = K(m).
In the defining integral, it is assumed that the principal branch
of the square root is taken and that the path of integration avoids
crossing any branch cuts. Outside `-\pi/2 \le \Re(\phi) \le \pi/2`,
the function extends quasi-periodically as
.. math ::
F(\phi + n \pi, m) = 2 n K(m) + F(\phi,m), n \in \mathbb{Z}.
**Plots**
.. literalinclude :: /plots/ellipf.py
.. image :: /plots/ellipf.png
**Examples**
Basic values and limits::
>>> from mpmath import *
>>> mp.dps = 25; mp.pretty = True
>>> ellipf(0,1)
0.0
>>> ellipf(0,0)
0.0
>>> ellipf(1,0); ellipf(2+3j,0)
1.0
(2.0 + 3.0j)
>>> ellipf(1,1); log(sec(1)+tan(1))
1.226191170883517070813061
1.226191170883517070813061
>>> ellipf(pi/2, -0.5); ellipk(-0.5)
1.415737208425956198892166
1.415737208425956198892166
>>> ellipf(pi/2+eps, 1); ellipf(-pi/2-eps, 1)
+inf
+inf
>>> ellipf(1.5, 1)
3.340677542798311003320813
Comparing with numerical integration::
>>> z,m = 0.5, 1.25
>>> ellipf(z,m)
0.5287219202206327872978255
>>> quad(lambda t: (1-m*sin(t)**2)**(-0.5), [0,z])
0.5287219202206327872978255
The arguments may be complex numbers::
>>> ellipf(3j, 0.5)
(0.0 + 1.713602407841590234804143j)
>>> ellipf(3+4j, 5-6j)
(1.269131241950351323305741 - 0.3561052815014558335412538j)
>>> z,m = 2+3j, 1.25
>>> k = 1011
>>> ellipf(z+pi*k,m); ellipf(z,m) + 2*k*ellipk(m)
(4086.184383622179764082821 - 3003.003538923749396546871j)
(4086.184383622179764082821 - 3003.003538923749396546871j)
For `|\Re(z)| < \pi/2`, the function can be expressed as a
hypergeometric series of two variables
(see :func:`~mpmath.appellf1`)::
>>> z,m = 0.5, 0.25
>>> ellipf(z,m)
0.5050887275786480788831083
>>> sin(z)*appellf1(0.5,0.5,0.5,1.5,sin(z)**2,m*sin(z)**2)
0.5050887275786480788831083
"""
z = phi
if not (ctx.isnormal(z) and ctx.isnormal(m)):
if m == 0:
return z + m
if z == 0:
return z * m
if m == ctx.inf or m == ctx.ninf: return z/m
raise ValueError
x = z.real
ctx.prec += max(0, ctx.mag(x))
pi = +ctx.pi
away = abs(x) > pi/2
if m == 1:
if away:
return ctx.inf
if away:
d = ctx.nint(x/pi)
z = z-pi*d
P = 2*d*ctx.ellipk(m)
else:
P = 0
c, s = ctx.cos_sin(z)
return s * ctx.elliprf(c**2, 1-m*s**2, 1) + P
@defun_wrapped
def ellipe(ctx, *args):
r"""
Called with a single argument `m`, evaluates the Legendre complete
elliptic integral of the second kind, `E(m)`, defined by
.. math :: E(m) = \int_0^{\pi/2} \sqrt{1-m \sin^2 t} \, dt \,=\,
\frac{\pi}{2}
\,_2F_1\left(\frac{1}{2}, -\frac{1}{2}, 1, m\right).
Called with two arguments `\phi, m`, evaluates the incomplete elliptic
integral of the second kind
.. math ::
E(\phi,m) = \int_0^{\phi} \sqrt{1-m \sin^2 t} \, dt =
\int_0^{\sin z}
\frac{\sqrt{1-mt^2}}{\sqrt{1-t^2}} \, dt.
The incomplete integral reduces to a complete integral when
`\phi = \frac{\pi}{2}`; that is,
.. math ::
E\left(\frac{\pi}{2}, m\right) = E(m).
In the defining integral, it is assumed that the principal branch
of the square root is taken and that the path of integration avoids
crossing any branch cuts. Outside `-\pi/2 \le \Re(z) \le \pi/2`,
the function extends quasi-periodically as
.. math ::
E(\phi + n \pi, m) = 2 n E(m) + F(\phi,m), n \in \mathbb{Z}.
**Plots**
.. literalinclude :: /plots/ellipe.py
.. image :: /plots/ellipe.png
**Examples for the complete integral**
Basic values and limits::
>>> from mpmath import *
>>> mp.dps = 25; mp.pretty = True
>>> ellipe(0)
1.570796326794896619231322
>>> ellipe(1)
1.0
>>> ellipe(-1)
1.910098894513856008952381
>>> ellipe(2)
(0.5990701173677961037199612 + 0.5990701173677961037199612j)
>>> ellipe(inf)
(0.0 + +infj)
>>> ellipe(-inf)
+inf
Verifying the defining integral and hypergeometric
representation::
>>> ellipe(0.5)
1.350643881047675502520175
>>> quad(lambda t: sqrt(1-0.5*sin(t)**2), [0, pi/2])
1.350643881047675502520175
>>> pi/2*hyp2f1(0.5,-0.5,1,0.5)
1.350643881047675502520175
Evaluation is supported for arbitrary complex `m`::
>>> ellipe(0.5+0.25j)
(1.360868682163129682716687 - 0.1238733442561786843557315j)
>>> ellipe(3+4j)
(1.499553520933346954333612 - 1.577879007912758274533309j)
A definite integral::
>>> quad(ellipe, [0,1])
1.333333333333333333333333
**Examples for the incomplete integral**
Basic values and limits::
>>> ellipe(0,1)
0.0
>>> ellipe(0,0)
0.0
>>> ellipe(1,0)
1.0
>>> ellipe(2+3j,0)
(2.0 + 3.0j)
>>> ellipe(1,1); sin(1)
0.8414709848078965066525023
0.8414709848078965066525023
>>> ellipe(pi/2, -0.5); ellipe(-0.5)
1.751771275694817862026502
1.751771275694817862026502
>>> ellipe(pi/2, 1); ellipe(-pi/2, 1)
1.0
-1.0
>>> ellipe(1.5, 1)
0.9974949866040544309417234
Comparing with numerical integration::
>>> z,m = 0.5, 1.25
>>> ellipe(z,m)
0.4740152182652628394264449
>>> quad(lambda t: sqrt(1-m*sin(t)**2), [0,z])
0.4740152182652628394264449
The arguments may be complex numbers::
>>> ellipe(3j, 0.5)
(0.0 + 7.551991234890371873502105j)
>>> ellipe(3+4j, 5-6j)
(24.15299022574220502424466 + 75.2503670480325997418156j)
>>> k = 35
>>> z,m = 2+3j, 1.25
>>> ellipe(z+pi*k,m); ellipe(z,m) + 2*k*ellipe(m)
(48.30138799412005235090766 + 17.47255216721987688224357j)
(48.30138799412005235090766 + 17.47255216721987688224357j)
For `|\Re(z)| < \pi/2`, the function can be expressed as a
hypergeometric series of two variables
(see :func:`~mpmath.appellf1`)::
>>> z,m = 0.5, 0.25
>>> ellipe(z,m)
0.4950017030164151928870375
>>> sin(z)*appellf1(0.5,0.5,-0.5,1.5,sin(z)**2,m*sin(z)**2)
0.4950017030164151928870376
"""
if len(args) == 1:
return ctx._ellipe(args[0])
else:
phi, m = args
z = phi
if not (ctx.isnormal(z) and ctx.isnormal(m)):
if m == 0:
return z + m
if z == 0:
return z * m
if m == ctx.inf or m == ctx.ninf:
return ctx.inf
raise ValueError
x = z.real
ctx.prec += max(0, ctx.mag(x))
pi = +ctx.pi
away = abs(x) > pi/2
if away:
d = ctx.nint(x/pi)
z = z-pi*d
P = 2*d*ctx.ellipe(m)
else:
P = 0
def terms():
c, s = ctx.cos_sin(z)
x = c**2
y = 1-m*s**2
RF = ctx.elliprf(x, y, 1)
RD = ctx.elliprd(x, y, 1)
return s*RF, -m*s**3*RD/3
return ctx.sum_accurately(terms) + P
@defun_wrapped
def ellippi(ctx, *args):
r"""
Called with three arguments `n, \phi, m`, evaluates the Legendre
incomplete elliptic integral of the third kind
.. math ::
\Pi(n; \phi, m) = \int_0^{\phi}
\frac{dt}{(1-n \sin^2 t) \sqrt{1-m \sin^2 t}} =
\int_0^{\sin \phi}
\frac{dt}{(1-nt^2) \sqrt{1-t^2} \sqrt{1-mt^2}}.
Called with two arguments `n, m`, evaluates the complete
elliptic integral of the third kind
`\Pi(n,m) = \Pi(n; \frac{\pi}{2},m)`.
In the defining integral, it is assumed that the principal branch
of the square root is taken and that the path of integration avoids
crossing any branch cuts. Outside `-\pi/2 \le \Re(\phi) \le \pi/2`,
the function extends quasi-periodically as
.. math ::
\Pi(n,\phi+k\pi,m) = 2k\Pi(n,m) + \Pi(n,\phi,m), k \in \mathbb{Z}.
**Plots**
.. literalinclude :: /plots/ellippi.py
.. image :: /plots/ellippi.png
**Examples for the complete integral**
Some basic values and limits::
>>> from mpmath import *
>>> mp.dps = 25; mp.pretty = True
>>> ellippi(0,-5); ellipk(-5)
0.9555039270640439337379334
0.9555039270640439337379334
>>> ellippi(inf,2)
0.0
>>> ellippi(2,inf)
0.0
>>> abs(ellippi(1,5))
+inf
>>> abs(ellippi(0.25,1))
+inf
Evaluation in terms of simpler functions::
>>> ellippi(0.25,0.25); ellipe(0.25)/(1-0.25)
1.956616279119236207279727
1.956616279119236207279727
>>> ellippi(3,0); pi/(2*sqrt(-2))
(0.0 - 1.11072073453959156175397j)
(0.0 - 1.11072073453959156175397j)
>>> ellippi(-3,0); pi/(2*sqrt(4))
0.7853981633974483096156609
0.7853981633974483096156609
**Examples for the incomplete integral**
Basic values and limits::
>>> ellippi(0.25,-0.5); ellippi(0.25,pi/2,-0.5)
1.622944760954741603710555
1.622944760954741603710555
>>> ellippi(1,0,1)
0.0
>>> ellippi(inf,0,1)
0.0
>>> ellippi(0,0.25,0.5); ellipf(0.25,0.5)
0.2513040086544925794134591
0.2513040086544925794134591
>>> ellippi(1,1,1); (log(sec(1)+tan(1))+sec(1)*tan(1))/2
2.054332933256248668692452
2.054332933256248668692452
>>> ellippi(0.25, 53*pi/2, 0.75); 53*ellippi(0.25,0.75)
135.240868757890840755058
135.240868757890840755058
>>> ellippi(0.5,pi/4,0.5); 2*ellipe(pi/4,0.5)-1/sqrt(3)
0.9190227391656969903987269
0.9190227391656969903987269
Complex arguments are supported::
>>> ellippi(0.5, 5+6j-2*pi, -7-8j)
(-0.3612856620076747660410167 + 0.5217735339984807829755815j)
Some degenerate cases::
>>> ellippi(1,1)
+inf
>>> ellippi(1,0)
+inf
>>> ellippi(1,2,0)
+inf
>>> ellippi(1,2,1)
+inf
>>> ellippi(1,0,1)
0.0
"""
if len(args) == 2:
n, m = args
complete = True
z = phi = ctx.pi/2
else:
n, phi, m = args
complete = False
z = phi
if not (ctx.isnormal(n) and ctx.isnormal(z) and ctx.isnormal(m)):
if ctx.isnan(n) or ctx.isnan(z) or ctx.isnan(m):
raise ValueError
if complete:
if m == 0:
if n == 1:
return ctx.inf
return ctx.pi/(2*ctx.sqrt(1-n))
if n == 0: return ctx.ellipk(m)
if ctx.isinf(n) or ctx.isinf(m): return ctx.zero
else:
if z == 0: return z
if ctx.isinf(n): return ctx.zero
if ctx.isinf(m): return ctx.zero
if ctx.isinf(n) or ctx.isinf(z) or ctx.isinf(m):
raise ValueError
if complete:
if m == 1:
if n == 1:
return ctx.inf
return -ctx.inf/ctx.sign(n-1)
away = False
else:
x = z.real
ctx.prec += max(0, ctx.mag(x))
pi = +ctx.pi
away = abs(x) > pi/2
if away:
d = ctx.nint(x/pi)
z = z-pi*d
P = 2*d*ctx.ellippi(n,m)
if ctx.isinf(P):
return ctx.inf
else:
P = 0
def terms():
if complete:
c, s = ctx.zero, ctx.one
else:
c, s = ctx.cos_sin(z)
x = c**2
y = 1-m*s**2
RF = ctx.elliprf(x, y, 1)
RJ = ctx.elliprj(x, y, 1, 1-n*s**2)
return s*RF, n*s**3*RJ/3
return ctx.sum_accurately(terms) + P
| mit |
VitalPet/odoo | addons/l10n_fr/report/compute_resultant_report.py | 50 | 1763 | # -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (c) 2008 JAILLET Simon - CrysaLEAD - www.crysalead.fr
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsability of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# garantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
##############################################################################
import base_report
from openerp.report import report_sxw
class cdr(base_report.base_report):
def __init__(self, cr, uid, name, context):
super(cdr, self).__init__(cr, uid, name, context)
report_sxw.report_sxw('report.l10n.fr.compute_resultant', 'account.move.line','addons/l10n_fr/report/compute_resultant_report.rml', parser=cdr, header=False)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
abhisg/scikit-learn | sklearn/manifold/mds.py | 257 | 15138 | """
Multi-dimensional Scaling (MDS)
"""
# author: Nelle Varoquaux <[email protected]>
# Licence: BSD
import numpy as np
import warnings
from ..base import BaseEstimator
from ..metrics import euclidean_distances
from ..utils import check_random_state, check_array, check_symmetric
from ..externals.joblib import Parallel
from ..externals.joblib import delayed
from ..isotonic import IsotonicRegression
def _smacof_single(similarities, metric=True, n_components=2, init=None,
max_iter=300, verbose=0, eps=1e-3, random_state=None):
"""
Computes multidimensional scaling using SMACOF algorithm
Parameters
----------
similarities: symmetric ndarray, shape [n * n]
similarities between the points
metric: boolean, optional, default: True
compute metric or nonmetric SMACOF algorithm
n_components: int, optional, default: 2
number of dimension in which to immerse the similarities
overwritten if initial array is provided.
init: {None or ndarray}, optional
if None, randomly chooses the initial configuration
if ndarray, initialize the SMACOF algorithm with this array
max_iter: int, optional, default: 300
Maximum number of iterations of the SMACOF algorithm for a single run
verbose: int, optional, default: 0
level of verbosity
eps: float, optional, default: 1e-6
relative tolerance w.r.t stress to declare converge
random_state: integer or numpy.RandomState, optional
The generator used to initialize the centers. If an integer is
given, it fixes the seed. Defaults to the global numpy random
number generator.
Returns
-------
X: ndarray (n_samples, n_components), float
coordinates of the n_samples points in a n_components-space
stress_: float
The final value of the stress (sum of squared distance of the
disparities and the distances for all constrained points)
n_iter : int
Number of iterations run.
"""
similarities = check_symmetric(similarities, raise_exception=True)
n_samples = similarities.shape[0]
random_state = check_random_state(random_state)
sim_flat = ((1 - np.tri(n_samples)) * similarities).ravel()
sim_flat_w = sim_flat[sim_flat != 0]
if init is None:
# Randomly choose initial configuration
X = random_state.rand(n_samples * n_components)
X = X.reshape((n_samples, n_components))
else:
# overrides the parameter p
n_components = init.shape[1]
if n_samples != init.shape[0]:
raise ValueError("init matrix should be of shape (%d, %d)" %
(n_samples, n_components))
X = init
old_stress = None
ir = IsotonicRegression()
for it in range(max_iter):
# Compute distance and monotonic regression
dis = euclidean_distances(X)
if metric:
disparities = similarities
else:
dis_flat = dis.ravel()
# similarities with 0 are considered as missing values
dis_flat_w = dis_flat[sim_flat != 0]
# Compute the disparities using a monotonic regression
disparities_flat = ir.fit_transform(sim_flat_w, dis_flat_w)
disparities = dis_flat.copy()
disparities[sim_flat != 0] = disparities_flat
disparities = disparities.reshape((n_samples, n_samples))
disparities *= np.sqrt((n_samples * (n_samples - 1) / 2) /
(disparities ** 2).sum())
# Compute stress
stress = ((dis.ravel() - disparities.ravel()) ** 2).sum() / 2
# Update X using the Guttman transform
dis[dis == 0] = 1e-5
ratio = disparities / dis
B = - ratio
B[np.arange(len(B)), np.arange(len(B))] += ratio.sum(axis=1)
X = 1. / n_samples * np.dot(B, X)
dis = np.sqrt((X ** 2).sum(axis=1)).sum()
if verbose >= 2:
print('it: %d, stress %s' % (it, stress))
if old_stress is not None:
if(old_stress - stress / dis) < eps:
if verbose:
print('breaking at iteration %d with stress %s' % (it,
stress))
break
old_stress = stress / dis
return X, stress, it + 1
def smacof(similarities, metric=True, n_components=2, init=None, n_init=8,
n_jobs=1, max_iter=300, verbose=0, eps=1e-3, random_state=None,
return_n_iter=False):
"""
Computes multidimensional scaling using SMACOF (Scaling by Majorizing a
Complicated Function) algorithm
The SMACOF algorithm is a multidimensional scaling algorithm: it minimizes
a objective function, the *stress*, using a majorization technique. The
Stress Majorization, also known as the Guttman Transform, guarantees a
monotone convergence of Stress, and is more powerful than traditional
techniques such as gradient descent.
The SMACOF algorithm for metric MDS can summarized by the following steps:
1. Set an initial start configuration, randomly or not.
2. Compute the stress
3. Compute the Guttman Transform
4. Iterate 2 and 3 until convergence.
The nonmetric algorithm adds a monotonic regression steps before computing
the stress.
Parameters
----------
similarities : symmetric ndarray, shape (n_samples, n_samples)
similarities between the points
metric : boolean, optional, default: True
compute metric or nonmetric SMACOF algorithm
n_components : int, optional, default: 2
number of dimension in which to immerse the similarities
overridden if initial array is provided.
init : {None or ndarray of shape (n_samples, n_components)}, optional
if None, randomly chooses the initial configuration
if ndarray, initialize the SMACOF algorithm with this array
n_init : int, optional, default: 8
Number of time the smacof algorithm will be run with different
initialisation. The final results will be the best output of the
n_init consecutive runs in terms of stress.
n_jobs : int, optional, default: 1
The number of jobs to use for the computation. This works by breaking
down the pairwise matrix into n_jobs even slices and computing them in
parallel.
If -1 all CPUs are used. If 1 is given, no parallel computing code is
used at all, which is useful for debugging. For n_jobs below -1,
(n_cpus + 1 + n_jobs) are used. Thus for n_jobs = -2, all CPUs but one
are used.
max_iter : int, optional, default: 300
Maximum number of iterations of the SMACOF algorithm for a single run
verbose : int, optional, default: 0
level of verbosity
eps : float, optional, default: 1e-6
relative tolerance w.r.t stress to declare converge
random_state : integer or numpy.RandomState, optional
The generator used to initialize the centers. If an integer is
given, it fixes the seed. Defaults to the global numpy random
number generator.
return_n_iter : bool
Whether or not to return the number of iterations.
Returns
-------
X : ndarray (n_samples,n_components)
Coordinates of the n_samples points in a n_components-space
stress : float
The final value of the stress (sum of squared distance of the
disparities and the distances for all constrained points)
n_iter : int
The number of iterations corresponding to the best stress.
Returned only if `return_n_iter` is set to True.
Notes
-----
"Modern Multidimensional Scaling - Theory and Applications" Borg, I.;
Groenen P. Springer Series in Statistics (1997)
"Nonmetric multidimensional scaling: a numerical method" Kruskal, J.
Psychometrika, 29 (1964)
"Multidimensional scaling by optimizing goodness of fit to a nonmetric
hypothesis" Kruskal, J. Psychometrika, 29, (1964)
"""
similarities = check_array(similarities)
random_state = check_random_state(random_state)
if hasattr(init, '__array__'):
init = np.asarray(init).copy()
if not n_init == 1:
warnings.warn(
'Explicit initial positions passed: '
'performing only one init of the MDS instead of %d'
% n_init)
n_init = 1
best_pos, best_stress = None, None
if n_jobs == 1:
for it in range(n_init):
pos, stress, n_iter_ = _smacof_single(
similarities, metric=metric,
n_components=n_components, init=init,
max_iter=max_iter, verbose=verbose,
eps=eps, random_state=random_state)
if best_stress is None or stress < best_stress:
best_stress = stress
best_pos = pos.copy()
best_iter = n_iter_
else:
seeds = random_state.randint(np.iinfo(np.int32).max, size=n_init)
results = Parallel(n_jobs=n_jobs, verbose=max(verbose - 1, 0))(
delayed(_smacof_single)(
similarities, metric=metric, n_components=n_components,
init=init, max_iter=max_iter, verbose=verbose, eps=eps,
random_state=seed)
for seed in seeds)
positions, stress, n_iters = zip(*results)
best = np.argmin(stress)
best_stress = stress[best]
best_pos = positions[best]
best_iter = n_iters[best]
if return_n_iter:
return best_pos, best_stress, best_iter
else:
return best_pos, best_stress
class MDS(BaseEstimator):
"""Multidimensional scaling
Read more in the :ref:`User Guide <multidimensional_scaling>`.
Parameters
----------
metric : boolean, optional, default: True
compute metric or nonmetric SMACOF (Scaling by Majorizing a
Complicated Function) algorithm
n_components : int, optional, default: 2
number of dimension in which to immerse the similarities
overridden if initial array is provided.
n_init : int, optional, default: 4
Number of time the smacof algorithm will be run with different
initialisation. The final results will be the best output of the
n_init consecutive runs in terms of stress.
max_iter : int, optional, default: 300
Maximum number of iterations of the SMACOF algorithm for a single run
verbose : int, optional, default: 0
level of verbosity
eps : float, optional, default: 1e-6
relative tolerance w.r.t stress to declare converge
n_jobs : int, optional, default: 1
The number of jobs to use for the computation. This works by breaking
down the pairwise matrix into n_jobs even slices and computing them in
parallel.
If -1 all CPUs are used. If 1 is given, no parallel computing code is
used at all, which is useful for debugging. For n_jobs below -1,
(n_cpus + 1 + n_jobs) are used. Thus for n_jobs = -2, all CPUs but one
are used.
random_state : integer or numpy.RandomState, optional
The generator used to initialize the centers. If an integer is
given, it fixes the seed. Defaults to the global numpy random
number generator.
dissimilarity : string
Which dissimilarity measure to use.
Supported are 'euclidean' and 'precomputed'.
Attributes
----------
embedding_ : array-like, shape [n_components, n_samples]
Stores the position of the dataset in the embedding space
stress_ : float
The final value of the stress (sum of squared distance of the
disparities and the distances for all constrained points)
References
----------
"Modern Multidimensional Scaling - Theory and Applications" Borg, I.;
Groenen P. Springer Series in Statistics (1997)
"Nonmetric multidimensional scaling: a numerical method" Kruskal, J.
Psychometrika, 29 (1964)
"Multidimensional scaling by optimizing goodness of fit to a nonmetric
hypothesis" Kruskal, J. Psychometrika, 29, (1964)
"""
def __init__(self, n_components=2, metric=True, n_init=4,
max_iter=300, verbose=0, eps=1e-3, n_jobs=1,
random_state=None, dissimilarity="euclidean"):
self.n_components = n_components
self.dissimilarity = dissimilarity
self.metric = metric
self.n_init = n_init
self.max_iter = max_iter
self.eps = eps
self.verbose = verbose
self.n_jobs = n_jobs
self.random_state = random_state
@property
def _pairwise(self):
return self.kernel == "precomputed"
def fit(self, X, y=None, init=None):
"""
Computes the position of the points in the embedding space
Parameters
----------
X : array, shape=[n_samples, n_features], or [n_samples, n_samples] \
if dissimilarity='precomputed'
Input data.
init : {None or ndarray, shape (n_samples,)}, optional
If None, randomly chooses the initial configuration
if ndarray, initialize the SMACOF algorithm with this array.
"""
self.fit_transform(X, init=init)
return self
def fit_transform(self, X, y=None, init=None):
"""
Fit the data from X, and returns the embedded coordinates
Parameters
----------
X : array, shape=[n_samples, n_features], or [n_samples, n_samples] \
if dissimilarity='precomputed'
Input data.
init : {None or ndarray, shape (n_samples,)}, optional
If None, randomly chooses the initial configuration
if ndarray, initialize the SMACOF algorithm with this array.
"""
X = check_array(X)
if X.shape[0] == X.shape[1] and self.dissimilarity != "precomputed":
warnings.warn("The MDS API has changed. ``fit`` now constructs an"
" dissimilarity matrix from data. To use a custom "
"dissimilarity matrix, set "
"``dissimilarity='precomputed'``.")
if self.dissimilarity == "precomputed":
self.dissimilarity_matrix_ = X
elif self.dissimilarity == "euclidean":
self.dissimilarity_matrix_ = euclidean_distances(X)
else:
raise ValueError("Proximity must be 'precomputed' or 'euclidean'."
" Got %s instead" % str(self.dissimilarity))
self.embedding_, self.stress_, self.n_iter_ = smacof(
self.dissimilarity_matrix_, metric=self.metric,
n_components=self.n_components, init=init, n_init=self.n_init,
n_jobs=self.n_jobs, max_iter=self.max_iter, verbose=self.verbose,
eps=self.eps, random_state=self.random_state,
return_n_iter=True)
return self.embedding_
| bsd-3-clause |
jdahlin/pygobject | gi/overrides/Pango.py | 5 | 1970 | # -*- Mode: Python; py-indent-offset: 4 -*-
# vim: tabstop=4 shiftwidth=4 expandtab
#
# Copyright (C) 2010 Paolo Borelli <[email protected]>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301
# USA
from ..overrides import override
from ..importer import modules
Pango = modules['Pango']._introspection_module
__all__ = []
class Context(Pango.Context):
def get_metrics(self, desc, language=None):
return super(Context, self).get_metrics(desc, language)
Context = override(Context)
__all__.append('Context')
class FontDescription(Pango.FontDescription):
def __new__(cls, string=None):
if string is not None:
return Pango.font_description_from_string(string)
else:
return Pango.FontDescription.__new__(cls)
FontDescription = override(FontDescription)
__all__.append('FontDescription')
class Layout(Pango.Layout):
def __new__(cls, context):
return Pango.Layout.new(context)
def __init__(self, context, **kwds):
# simply discard 'context', since it was set by
# __new__ and it is not a PangoLayout property
super(Layout, self).__init__(**kwds)
def set_markup(self, text, length=-1):
super(Layout, self).set_markup(text, length)
Layout = override(Layout)
__all__.append('Layout')
| lgpl-2.1 |
romulocollopy/rows | tests/tests_plugin_xlsx.py | 1 | 4320 | # coding: utf-8
# Copyright 2014-2015 Álvaro Justen <https://github.com/turicas/rows/>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
import tempfile
import unittest
from io import BytesIO
import mock
import rows
import rows.plugins.xlsx
from . import utils
class PluginXlsxTestCase(utils.RowsTestMixIn, unittest.TestCase):
plugin_name = 'xlsx'
filename = 'tests/data/all-field-types.xlsx'
def test_imports(self):
self.assertIs(rows.import_from_xlsx, rows.plugins.xlsx.import_from_xlsx)
@mock.patch('rows.plugins.xlsx.create_table')
def test_import_from_xlsx_uses_create_table(self, mocked_create_table):
mocked_create_table.return_value = 42
kwargs = {'encoding': 'iso-8859-15', 'some_key': 123, 'other': 456, }
result = rows.import_from_xlsx(self.filename, **kwargs)
self.assertTrue(mocked_create_table.called)
self.assertEqual(mocked_create_table.call_count, 1)
self.assertEqual(result, 42)
call = mocked_create_table.call_args
kwargs['meta'] = {'imported_from': 'xlsx', 'filename': self.filename, }
self.assertEqual(call[1], kwargs)
@mock.patch('rows.plugins.xlsx.create_table')
def test_import_from_xlsx_retrieve_desired_data(self, mocked_create_table):
mocked_create_table.return_value = 42
# import using filename
table_1 = rows.import_from_xlsx(self.filename)
call_args = mocked_create_table.call_args_list[0]
self.assert_create_table_data(call_args)
# import using fobj
with open(self.filename, 'rb') as fobj:
table_2 = rows.import_from_xlsx(fobj)
call_args = mocked_create_table.call_args_list[1]
self.assert_create_table_data(call_args)
def test_export_to_xlsx_filename(self):
temp = tempfile.NamedTemporaryFile()
filename = temp.name + '.xlsx'
temp.close()
self.files_to_delete.append(filename)
rows.export_to_xlsx(utils.table, filename)
table = rows.import_from_xlsx(filename)
self.assert_table_equal(table, utils.table)
export_in_memory = rows.export_to_xlsx(utils.table, None)
result_fobj = BytesIO()
result_fobj.write(export_in_memory)
result_fobj.seek(0)
result_table = rows.import_from_xlsx(result_fobj)
self.assert_table_equal(result_table, utils.table)
def test_export_to_xlsx_fobj(self):
temp = tempfile.NamedTemporaryFile()
filename = temp.name + '.xlsx'
temp.close()
fobj = open(filename, 'wb')
self.files_to_delete.append(filename)
rows.export_to_xlsx(utils.table, fobj)
fobj.close()
table = rows.import_from_xlsx(filename)
self.assert_table_equal(table, utils.table)
@mock.patch('rows.plugins.xlsx.prepare_to_export')
def test_export_to_xlsx_uses_prepare_to_export(self,
mocked_prepare_to_export):
temp = tempfile.NamedTemporaryFile()
filename = temp.name + '.xlsx'
temp.file.close()
fobj = open(filename, 'wb')
self.files_to_delete.append(filename)
kwargs = {'test': 123, 'parameter': 3.14, }
mocked_prepare_to_export.return_value = \
iter([list(utils.table.fields.keys())])
rows.export_to_xlsx(utils.table, temp.name, **kwargs)
self.assertTrue(mocked_prepare_to_export.called)
self.assertEqual(mocked_prepare_to_export.call_count, 1)
call = mocked_prepare_to_export.call_args
self.assertEqual(call[0], (utils.table, ))
self.assertEqual(call[1], kwargs)
| gpl-3.0 |
HybridF5/tempest_debug | tempest/cmd/cleanup_service.py | 2 | 31506 | #!/usr/bin/env python
# Copyright 2015 Dell Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_log import log as logging
from tempest.common import credentials_factory as credentials
from tempest.common import identity
from tempest import config
from tempest import test
LOG = logging.getLogger(__name__)
CONF = config.CONF
CONF_FLAVORS = None
CONF_IMAGES = None
CONF_NETWORKS = []
CONF_PRIV_NETWORK_NAME = None
CONF_PUB_NETWORK = None
CONF_PUB_ROUTER = None
CONF_TENANTS = None
CONF_USERS = None
IS_AODH = None
IS_CINDER = None
IS_GLANCE = None
IS_HEAT = None
IS_NEUTRON = None
IS_NOVA = None
def init_conf():
global CONF_FLAVORS
global CONF_IMAGES
global CONF_NETWORKS
global CONF_PRIV_NETWORK
global CONF_PRIV_NETWORK_NAME
global CONF_PUB_NETWORK
global CONF_PUB_ROUTER
global CONF_TENANTS
global CONF_USERS
global IS_AODH
global IS_CINDER
global IS_GLANCE
global IS_HEAT
global IS_NEUTRON
global IS_NOVA
IS_AODH = CONF.service_available.aodh
IS_CINDER = CONF.service_available.cinder
IS_GLANCE = CONF.service_available.glance
IS_HEAT = CONF.service_available.heat
IS_NEUTRON = CONF.service_available.neutron
IS_NOVA = CONF.service_available.nova
CONF_FLAVORS = [CONF.compute.flavor_ref, CONF.compute.flavor_ref_alt]
CONF_IMAGES = [CONF.compute.image_ref, CONF.compute.image_ref_alt]
CONF_PRIV_NETWORK_NAME = CONF.compute.fixed_network_name
CONF_PUB_NETWORK = CONF.network.public_network_id
CONF_PUB_ROUTER = CONF.network.public_router_id
CONF_TENANTS = [CONF.auth.admin_project_name,
CONF.identity.project_name,
CONF.identity.alt_project_name]
CONF_USERS = [CONF.auth.admin_username, CONF.identity.username,
CONF.identity.alt_username]
if IS_NEUTRON:
CONF_PRIV_NETWORK = _get_network_id(CONF.compute.fixed_network_name,
CONF.auth.admin_project_name)
CONF_NETWORKS = [CONF_PUB_NETWORK, CONF_PRIV_NETWORK]
def _get_network_id(net_name, project_name):
am = credentials.AdminManager()
net_cl = am.networks_client
tn_cl = am.tenants_client
networks = net_cl.list_networks()
tenant = identity.get_tenant_by_name(tn_cl, project_name)
t_id = tenant['id']
n_id = None
for net in networks['networks']:
if (net['tenant_id'] == t_id and net['name'] == net_name):
n_id = net['id']
break
return n_id
class BaseService(object):
def __init__(self, kwargs):
self.client = None
for key, value in kwargs.items():
setattr(self, key, value)
self.tenant_filter = {}
if hasattr(self, 'tenant_id'):
self.tenant_filter['tenant_id'] = self.tenant_id
def _filter_by_tenant_id(self, item_list):
if (item_list is None
or len(item_list) == 0
or not hasattr(self, 'tenant_id')
or self.tenant_id is None
or 'tenant_id' not in item_list[0]):
return item_list
return [item for item in item_list
if item['tenant_id'] == self.tenant_id]
def list(self):
pass
def delete(self):
pass
def dry_run(self):
pass
def save_state(self):
pass
def run(self):
if self.is_dry_run:
self.dry_run()
elif self.is_save_state:
self.save_state()
else:
self.delete()
class SnapshotService(BaseService):
def __init__(self, manager, **kwargs):
super(SnapshotService, self).__init__(kwargs)
self.client = manager.snapshots_client
def list(self):
client = self.client
snaps = client.list_snapshots()['snapshots']
LOG.debug("List count, %s Snapshots" % len(snaps))
return snaps
def delete(self):
snaps = self.list()
client = self.client
for snap in snaps:
try:
client.delete_snapshot(snap['id'])
except Exception:
LOG.exception("Delete Snapshot exception.")
def dry_run(self):
snaps = self.list()
self.data['snapshots'] = snaps
class ServerService(BaseService):
def __init__(self, manager, **kwargs):
super(ServerService, self).__init__(kwargs)
self.client = manager.servers_client
self.server_groups_client = manager.server_groups_client
def list(self):
client = self.client
servers_body = client.list_servers()
servers = servers_body['servers']
LOG.debug("List count, %s Servers" % len(servers))
return servers
def delete(self):
client = self.client
servers = self.list()
for server in servers:
try:
client.delete_server(server['id'])
except Exception:
LOG.exception("Delete Server exception.")
def dry_run(self):
servers = self.list()
self.data['servers'] = servers
class ServerGroupService(ServerService):
def list(self):
client = self.server_groups_client
sgs = client.list_server_groups()['server_groups']
LOG.debug("List count, %s Server Groups" % len(sgs))
return sgs
def delete(self):
client = self.client
sgs = self.list()
for sg in sgs:
try:
client.delete_server_group(sg['id'])
except Exception:
LOG.exception("Delete Server Group exception.")
def dry_run(self):
sgs = self.list()
self.data['server_groups'] = sgs
class StackService(BaseService):
def __init__(self, manager, **kwargs):
super(StackService, self).__init__(kwargs)
self.client = manager.orchestration_client
def list(self):
client = self.client
stacks = client.list_stacks()['stacks']
LOG.debug("List count, %s Stacks" % len(stacks))
return stacks
def delete(self):
client = self.client
stacks = self.list()
for stack in stacks:
try:
client.delete_stack(stack['id'])
except Exception:
LOG.exception("Delete Stack exception.")
def dry_run(self):
stacks = self.list()
self.data['stacks'] = stacks
class KeyPairService(BaseService):
def __init__(self, manager, **kwargs):
super(KeyPairService, self).__init__(kwargs)
self.client = manager.keypairs_client
def list(self):
client = self.client
keypairs = client.list_keypairs()['keypairs']
LOG.debug("List count, %s Keypairs" % len(keypairs))
return keypairs
def delete(self):
client = self.client
keypairs = self.list()
for k in keypairs:
try:
name = k['keypair']['name']
client.delete_keypair(name)
except Exception:
LOG.exception("Delete Keypairs exception.")
def dry_run(self):
keypairs = self.list()
self.data['keypairs'] = keypairs
class SecurityGroupService(BaseService):
def __init__(self, manager, **kwargs):
super(SecurityGroupService, self).__init__(kwargs)
self.client = manager.compute_security_groups_client
def list(self):
client = self.client
secgrps = client.list_security_groups()['security_groups']
secgrp_del = [grp for grp in secgrps if grp['name'] != 'default']
LOG.debug("List count, %s Security Groups" % len(secgrp_del))
return secgrp_del
def delete(self):
client = self.client
secgrp_del = self.list()
for g in secgrp_del:
try:
client.delete_security_group(g['id'])
except Exception:
LOG.exception("Delete Security Groups exception.")
def dry_run(self):
secgrp_del = self.list()
self.data['security_groups'] = secgrp_del
class FloatingIpService(BaseService):
def __init__(self, manager, **kwargs):
super(FloatingIpService, self).__init__(kwargs)
self.client = manager.compute_floating_ips_client
def list(self):
client = self.client
floating_ips = client.list_floating_ips()['floating_ips']
LOG.debug("List count, %s Floating IPs" % len(floating_ips))
return floating_ips
def delete(self):
client = self.client
floating_ips = self.list()
for f in floating_ips:
try:
client.delete_floating_ip(f['id'])
except Exception:
LOG.exception("Delete Floating IPs exception.")
def dry_run(self):
floating_ips = self.list()
self.data['floating_ips'] = floating_ips
class VolumeService(BaseService):
def __init__(self, manager, **kwargs):
super(VolumeService, self).__init__(kwargs)
self.client = manager.volumes_client
def list(self):
client = self.client
vols = client.list_volumes()['volumes']
LOG.debug("List count, %s Volumes" % len(vols))
return vols
def delete(self):
client = self.client
vols = self.list()
for v in vols:
try:
client.delete_volume(v['id'])
except Exception:
LOG.exception("Delete Volume exception.")
def dry_run(self):
vols = self.list()
self.data['volumes'] = vols
class VolumeQuotaService(BaseService):
def __init__(self, manager, **kwargs):
super(VolumeQuotaService, self).__init__(kwargs)
self.client = manager.volume_quotas_client
def delete(self):
client = self.client
try:
client.delete_quota_set(self.tenant_id)
except Exception:
LOG.exception("Delete Volume Quotas exception.")
def dry_run(self):
quotas = self.client.show_quota_usage(self.tenant_id)['quota_set']
self.data['volume_quotas'] = quotas
class NovaQuotaService(BaseService):
def __init__(self, manager, **kwargs):
super(NovaQuotaService, self).__init__(kwargs)
self.client = manager.quotas_client
self.limits_client = manager.limits_client
def delete(self):
client = self.client
try:
client.delete_quota_set(self.tenant_id)
except Exception:
LOG.exception("Delete Quotas exception.")
def dry_run(self):
client = self.limits_client
quotas = client.show_limits()['limits']
self.data['compute_quotas'] = quotas['absolute']
# Begin network service classes
class NetworkService(BaseService):
def __init__(self, manager, **kwargs):
super(NetworkService, self).__init__(kwargs)
self.networks_client = manager.networks_client
self.subnets_client = manager.subnets_client
self.ports_client = manager.ports_client
self.floating_ips_client = manager.floating_ips_client
self.metering_labels_client = manager.metering_labels_client
self.metering_label_rules_client = manager.metering_label_rules_client
self.security_groups_client = manager.security_groups_client
self.routers_client = manager.routers_client
def _filter_by_conf_networks(self, item_list):
if not item_list or not all(('network_id' in i for i in item_list)):
return item_list
return [item for item in item_list if item['network_id']
not in CONF_NETWORKS]
def list(self):
client = self.networks_client
networks = client.list_networks(**self.tenant_filter)
networks = networks['networks']
# filter out networks declared in tempest.conf
if self.is_preserve:
networks = [network for network in networks
if network['id'] not in CONF_NETWORKS]
LOG.debug("List count, %s Networks" % networks)
return networks
def delete(self):
client = self.networks_client
networks = self.list()
for n in networks:
try:
client.delete_network(n['id'])
except Exception:
LOG.exception("Delete Network exception.")
def dry_run(self):
networks = self.list()
self.data['networks'] = networks
class NetworkFloatingIpService(NetworkService):
def list(self):
client = self.floating_ips_client
flips = client.list_floatingips(**self.tenant_filter)
flips = flips['floatingips']
LOG.debug("List count, %s Network Floating IPs" % len(flips))
return flips
def delete(self):
client = self.client
flips = self.list()
for flip in flips:
try:
client.delete_floatingip(flip['id'])
except Exception:
LOG.exception("Delete Network Floating IP exception.")
def dry_run(self):
flips = self.list()
self.data['floating_ips'] = flips
class NetworkRouterService(NetworkService):
def list(self):
client = self.routers_client
routers = client.list_routers(**self.tenant_filter)
routers = routers['routers']
if self.is_preserve:
routers = [router for router in routers
if router['id'] != CONF_PUB_ROUTER]
LOG.debug("List count, %s Routers" % len(routers))
return routers
def delete(self):
client = self.routers_client
ports_client = self.ports_client
routers = self.list()
for router in routers:
try:
rid = router['id']
ports = [port for port
in ports_client.list_ports(device_id=rid)['ports']
if port["device_owner"] == "network:router_interface"]
for port in ports:
client.remove_router_interface(rid, port_id=port['id'])
client.delete_router(rid)
except Exception:
LOG.exception("Delete Router exception.")
def dry_run(self):
routers = self.list()
self.data['routers'] = routers
class NetworkHealthMonitorService(NetworkService):
def list(self):
client = self.client
hms = client.list_health_monitors()
hms = hms['health_monitors']
hms = self._filter_by_tenant_id(hms)
LOG.debug("List count, %s Health Monitors" % len(hms))
return hms
def delete(self):
client = self.client
hms = self.list()
for hm in hms:
try:
client.delete_health_monitor(hm['id'])
except Exception:
LOG.exception("Delete Health Monitor exception.")
def dry_run(self):
hms = self.list()
self.data['health_monitors'] = hms
class NetworkMemberService(NetworkService):
def list(self):
client = self.client
members = client.list_members()
members = members['members']
members = self._filter_by_tenant_id(members)
LOG.debug("List count, %s Members" % len(members))
return members
def delete(self):
client = self.client
members = self.list()
for member in members:
try:
client.delete_member(member['id'])
except Exception:
LOG.exception("Delete Member exception.")
def dry_run(self):
members = self.list()
self.data['members'] = members
class NetworkVipService(NetworkService):
def list(self):
client = self.client
vips = client.list_vips()
vips = vips['vips']
vips = self._filter_by_tenant_id(vips)
LOG.debug("List count, %s VIPs" % len(vips))
return vips
def delete(self):
client = self.client
vips = self.list()
for vip in vips:
try:
client.delete_vip(vip['id'])
except Exception:
LOG.exception("Delete VIP exception.")
def dry_run(self):
vips = self.list()
self.data['vips'] = vips
class NetworkPoolService(NetworkService):
def list(self):
client = self.client
pools = client.list_pools()
pools = pools['pools']
pools = self._filter_by_tenant_id(pools)
LOG.debug("List count, %s Pools" % len(pools))
return pools
def delete(self):
client = self.client
pools = self.list()
for pool in pools:
try:
client.delete_pool(pool['id'])
except Exception:
LOG.exception("Delete Pool exception.")
def dry_run(self):
pools = self.list()
self.data['pools'] = pools
class NetworkMeteringLabelRuleService(NetworkService):
def list(self):
client = self.metering_label_rules_client
rules = client.list_metering_label_rules()
rules = rules['metering_label_rules']
rules = self._filter_by_tenant_id(rules)
LOG.debug("List count, %s Metering Label Rules" % len(rules))
return rules
def delete(self):
client = self.metering_label_rules_client
rules = self.list()
for rule in rules:
try:
client.delete_metering_label_rule(rule['id'])
except Exception:
LOG.exception("Delete Metering Label Rule exception.")
def dry_run(self):
rules = self.list()
self.data['rules'] = rules
class NetworkMeteringLabelService(NetworkService):
def list(self):
client = self.metering_labels_client
labels = client.list_metering_labels()
labels = labels['metering_labels']
labels = self._filter_by_tenant_id(labels)
LOG.debug("List count, %s Metering Labels" % len(labels))
return labels
def delete(self):
client = self.metering_labels_client
labels = self.list()
for label in labels:
try:
client.delete_metering_label(label['id'])
except Exception:
LOG.exception("Delete Metering Label exception.")
def dry_run(self):
labels = self.list()
self.data['labels'] = labels
class NetworkPortService(NetworkService):
def list(self):
client = self.ports_client
ports = [port for port in
client.list_ports(**self.tenant_filter)['ports']
if port["device_owner"] == "" or
port["device_owner"].startswith("compute:")]
if self.is_preserve:
ports = self._filter_by_conf_networks(ports)
LOG.debug("List count, %s Ports" % len(ports))
return ports
def delete(self):
client = self.ports_client
ports = self.list()
for port in ports:
try:
client.delete_port(port['id'])
except Exception:
LOG.exception("Delete Port exception.")
def dry_run(self):
ports = self.list()
self.data['ports'] = ports
class NetworkSecGroupService(NetworkService):
def list(self):
client = self.security_groups_client
filter = self.tenant_filter
# cannot delete default sec group so never show it.
secgroups = [secgroup for secgroup in
client.list_security_groups(**filter)['security_groups']
if secgroup['name'] != 'default']
if self.is_preserve:
secgroups = self._filter_by_conf_networks(secgroups)
LOG.debug("List count, %s securtiy_groups" % len(secgroups))
return secgroups
def delete(self):
client = self.client
secgroups = self.list()
for secgroup in secgroups:
try:
client.delete_secgroup(secgroup['id'])
except Exception:
LOG.exception("Delete security_group exception.")
def dry_run(self):
secgroups = self.list()
self.data['secgroups'] = secgroups
class NetworkSubnetService(NetworkService):
def list(self):
client = self.subnets_client
subnets = client.list_subnets(**self.tenant_filter)
subnets = subnets['subnets']
if self.is_preserve:
subnets = self._filter_by_conf_networks(subnets)
LOG.debug("List count, %s Subnets" % len(subnets))
return subnets
def delete(self):
client = self.subnets_client
subnets = self.list()
for subnet in subnets:
try:
client.delete_subnet(subnet['id'])
except Exception:
LOG.exception("Delete Subnet exception.")
def dry_run(self):
subnets = self.list()
self.data['subnets'] = subnets
# Telemetry services
class TelemetryAlarmService(BaseService):
def __init__(self, manager, **kwargs):
super(TelemetryAlarmService, self).__init__(kwargs)
self.client = manager.alarming_client
def list(self):
client = self.client
alarms = client.list_alarms()
LOG.debug("List count, %s Alarms" % len(alarms))
return alarms
def delete(self):
client = self.client
alarms = self.list()
for alarm in alarms:
try:
client.delete_alarm(alarm['id'])
except Exception:
LOG.exception("Delete Alarms exception.")
def dry_run(self):
alarms = self.list()
self.data['alarms'] = alarms
# begin global services
class FlavorService(BaseService):
def __init__(self, manager, **kwargs):
super(FlavorService, self).__init__(kwargs)
self.client = manager.flavors_client
def list(self):
client = self.client
flavors = client.list_flavors({"is_public": None})['flavors']
if not self.is_save_state:
# recreate list removing saved flavors
flavors = [flavor for flavor in flavors if flavor['id']
not in self.saved_state_json['flavors'].keys()]
if self.is_preserve:
flavors = [flavor for flavor in flavors
if flavor['id'] not in CONF_FLAVORS]
LOG.debug("List count, %s Flavors after reconcile" % len(flavors))
return flavors
def delete(self):
client = self.client
flavors = self.list()
for flavor in flavors:
try:
client.delete_flavor(flavor['id'])
except Exception:
LOG.exception("Delete Flavor exception.")
def dry_run(self):
flavors = self.list()
self.data['flavors'] = flavors
def save_state(self):
flavors = self.list()
self.data['flavors'] = {}
for flavor in flavors:
self.data['flavors'][flavor['id']] = flavor['name']
class ImageService(BaseService):
def __init__(self, manager, **kwargs):
super(ImageService, self).__init__(kwargs)
self.client = manager.compute_images_client
def list(self):
client = self.client
images = client.list_images({"all_tenants": True})['images']
if not self.is_save_state:
images = [image for image in images if image['id']
not in self.saved_state_json['images'].keys()]
if self.is_preserve:
images = [image for image in images
if image['id'] not in CONF_IMAGES]
LOG.debug("List count, %s Images after reconcile" % len(images))
return images
def delete(self):
client = self.client
images = self.list()
for image in images:
try:
client.delete_image(image['id'])
except Exception:
LOG.exception("Delete Image exception.")
def dry_run(self):
images = self.list()
self.data['images'] = images
def save_state(self):
self.data['images'] = {}
images = self.list()
for image in images:
self.data['images'][image['id']] = image['name']
class IdentityService(BaseService):
def __init__(self, manager, **kwargs):
super(IdentityService, self).__init__(kwargs)
self.client = manager.identity_client
class UserService(BaseService):
def __init__(self, manager, **kwargs):
super(UserService, self).__init__(kwargs)
self.client = manager.users_client
def list(self):
users = self.client.list_users()['users']
if not self.is_save_state:
users = [user for user in users if user['id']
not in self.saved_state_json['users'].keys()]
if self.is_preserve:
users = [user for user in users if user['name']
not in CONF_USERS]
elif not self.is_save_state: # Never delete admin user
users = [user for user in users if user['name'] !=
CONF.auth.admin_username]
LOG.debug("List count, %s Users after reconcile" % len(users))
return users
def delete(self):
users = self.list()
for user in users:
try:
self.client.delete_user(user['id'])
except Exception:
LOG.exception("Delete User exception.")
def dry_run(self):
users = self.list()
self.data['users'] = users
def save_state(self):
users = self.list()
self.data['users'] = {}
for user in users:
self.data['users'][user['id']] = user['name']
class RoleService(BaseService):
def __init__(self, manager, **kwargs):
super(RoleService, self).__init__(kwargs)
self.client = manager.roles_client
def list(self):
try:
roles = self.client.list_roles()['roles']
# reconcile roles with saved state and never list admin role
if not self.is_save_state:
roles = [role for role in roles if
(role['id'] not in
self.saved_state_json['roles'].keys()
and role['name'] != CONF.identity.admin_role)]
LOG.debug("List count, %s Roles after reconcile" % len(roles))
return roles
except Exception:
LOG.exception("Cannot retrieve Roles.")
return []
def delete(self):
roles = self.list()
for role in roles:
try:
self.client.delete_role(role['id'])
except Exception:
LOG.exception("Delete Role exception.")
def dry_run(self):
roles = self.list()
self.data['roles'] = roles
def save_state(self):
roles = self.list()
self.data['roles'] = {}
for role in roles:
self.data['roles'][role['id']] = role['name']
class TenantService(BaseService):
def __init__(self, manager, **kwargs):
super(TenantService, self).__init__(kwargs)
self.client = manager.tenants_client
def list(self):
tenants = self.client.list_tenants()['tenants']
if not self.is_save_state:
tenants = [tenant for tenant in tenants if (tenant['id']
not in self.saved_state_json['tenants'].keys()
and tenant['name'] != CONF.auth.admin_project_name)]
if self.is_preserve:
tenants = [tenant for tenant in tenants if tenant['name']
not in CONF_TENANTS]
LOG.debug("List count, %s Tenants after reconcile" % len(tenants))
return tenants
def delete(self):
tenants = self.list()
for tenant in tenants:
try:
self.client.delete_tenant(tenant['id'])
except Exception:
LOG.exception("Delete Tenant exception.")
def dry_run(self):
tenants = self.list()
self.data['tenants'] = tenants
def save_state(self):
tenants = self.list()
self.data['tenants'] = {}
for tenant in tenants:
self.data['tenants'][tenant['id']] = tenant['name']
class DomainService(BaseService):
def __init__(self, manager, **kwargs):
super(DomainService, self).__init__(kwargs)
self.client = manager.domains_client
def list(self):
client = self.client
domains = client.list_domains()['domains']
if not self.is_save_state:
domains = [domain for domain in domains if domain['id']
not in self.saved_state_json['domains'].keys()]
LOG.debug("List count, %s Domains after reconcile" % len(domains))
return domains
def delete(self):
client = self.client
domains = self.list()
for domain in domains:
try:
client.update_domain(domain['id'], enabled=False)
client.delete_domain(domain['id'])
except Exception:
LOG.exception("Delete Domain exception.")
def dry_run(self):
domains = self.list()
self.data['domains'] = domains
def save_state(self):
domains = self.list()
self.data['domains'] = {}
for domain in domains:
self.data['domains'][domain['id']] = domain['name']
def get_tenant_cleanup_services():
tenant_services = []
if IS_AODH:
tenant_services.append(TelemetryAlarmService)
if IS_NOVA:
tenant_services.append(ServerService)
tenant_services.append(KeyPairService)
tenant_services.append(SecurityGroupService)
tenant_services.append(ServerGroupService)
if not IS_NEUTRON:
tenant_services.append(FloatingIpService)
tenant_services.append(NovaQuotaService)
if IS_HEAT:
tenant_services.append(StackService)
if IS_NEUTRON:
tenant_services.append(NetworkFloatingIpService)
if test.is_extension_enabled('metering', 'network'):
tenant_services.append(NetworkMeteringLabelRuleService)
tenant_services.append(NetworkMeteringLabelService)
tenant_services.append(NetworkRouterService)
tenant_services.append(NetworkPortService)
tenant_services.append(NetworkSubnetService)
tenant_services.append(NetworkService)
tenant_services.append(NetworkSecGroupService)
if IS_CINDER:
tenant_services.append(SnapshotService)
tenant_services.append(VolumeService)
tenant_services.append(VolumeQuotaService)
return tenant_services
def get_global_cleanup_services():
global_services = []
if IS_NOVA:
global_services.append(FlavorService)
if IS_GLANCE:
global_services.append(ImageService)
global_services.append(UserService)
global_services.append(TenantService)
global_services.append(DomainService)
global_services.append(RoleService)
return global_services
| apache-2.0 |
frvcoin/frvcoin | share/qt/extract_strings_qt.py | 2945 | 1844 | #!/usr/bin/python
'''
Extract _("...") strings for translation and convert to Qt4 stringdefs so that
they can be picked up by Qt linguist.
'''
from subprocess import Popen, PIPE
import glob
import operator
OUT_CPP="src/qt/bitcoinstrings.cpp"
EMPTY=['""']
def parse_po(text):
"""
Parse 'po' format produced by xgettext.
Return a list of (msgid,msgstr) tuples.
"""
messages = []
msgid = []
msgstr = []
in_msgid = False
in_msgstr = False
for line in text.split('\n'):
line = line.rstrip('\r')
if line.startswith('msgid '):
if in_msgstr:
messages.append((msgid, msgstr))
in_msgstr = False
# message start
in_msgid = True
msgid = [line[6:]]
elif line.startswith('msgstr '):
in_msgid = False
in_msgstr = True
msgstr = [line[7:]]
elif line.startswith('"'):
if in_msgid:
msgid.append(line)
if in_msgstr:
msgstr.append(line)
if in_msgstr:
messages.append((msgid, msgstr))
return messages
files = glob.glob('src/*.cpp') + glob.glob('src/*.h')
# xgettext -n --keyword=_ $FILES
child = Popen(['xgettext','--output=-','-n','--keyword=_'] + files, stdout=PIPE)
(out, err) = child.communicate()
messages = parse_po(out)
f = open(OUT_CPP, 'w')
f.write("""#include <QtGlobal>
// Automatically generated by extract_strings.py
#ifdef __GNUC__
#define UNUSED __attribute__((unused))
#else
#define UNUSED
#endif
""")
f.write('static const char UNUSED *bitcoin_strings[] = {\n')
messages.sort(key=operator.itemgetter(0))
for (msgid, msgstr) in messages:
if msgid != EMPTY:
f.write('QT_TRANSLATE_NOOP("bitcoin-core", %s),\n' % ('\n'.join(msgid)))
f.write('};')
f.close()
| mit |
crcresearch/osf.io | reviews/workflow.py | 1 | 2410 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from enum import Enum
from enum import unique
class ChoiceEnum(Enum):
@classmethod
def choices(cls):
return tuple((v, unicode(v).title()) for v in cls.values())
@classmethod
def values(cls):
return tuple(c.value for c in cls)
@unique
class Workflows(ChoiceEnum):
NONE = None
PRE_MODERATION = 'pre-moderation'
POST_MODERATION = 'post-moderation'
@unique
class States(ChoiceEnum):
INITIAL = 'initial'
PENDING = 'pending'
ACCEPTED = 'accepted'
REJECTED = 'rejected'
@unique
class Triggers(ChoiceEnum):
SUBMIT = 'submit'
ACCEPT = 'accept'
REJECT = 'reject'
EDIT_COMMENT = 'edit_comment'
PUBLIC_STATES = {
Workflows.NONE.value: (
States.INITIAL.value,
States.PENDING.value,
States.ACCEPTED.value,
States.REJECTED.value,
),
Workflows.PRE_MODERATION.value: (
States.ACCEPTED.value,
),
Workflows.POST_MODERATION.value: (
States.PENDING.value,
States.ACCEPTED.value,
)
}
TRANSITIONS = [
{
'trigger': Triggers.SUBMIT.value,
'source': [States.INITIAL.value],
'dest': States.PENDING.value,
'after': ['save_action', 'update_last_transitioned', 'save_changes', 'notify_submit'],
},
{
'trigger': Triggers.SUBMIT.value,
'source': [States.PENDING.value, States.REJECTED.value],
'conditions': 'resubmission_allowed',
'dest': States.PENDING.value,
'after': ['save_action', 'update_last_transitioned', 'save_changes', 'notify_resubmit'],
},
{
'trigger': Triggers.ACCEPT.value,
'source': [States.PENDING.value, States.REJECTED.value],
'dest': States.ACCEPTED.value,
'after': ['save_action', 'update_last_transitioned', 'save_changes', 'notify_accept_reject'],
},
{
'trigger': Triggers.REJECT.value,
'source': [States.PENDING.value, States.ACCEPTED.value],
'dest': States.REJECTED.value,
'after': ['save_action', 'update_last_transitioned', 'save_changes', 'notify_accept_reject'],
},
{
'trigger': Triggers.EDIT_COMMENT.value,
'source': [States.PENDING.value, States.REJECTED.value, States.ACCEPTED.value],
'dest': '=',
'after': ['save_action', 'save_changes', 'notify_edit_comment'],
},
]
| apache-2.0 |
breuderink/golem | golem/helpers.py | 1 | 1501 | import csv, itertools
import numpy as np
def to_one_of_n(labels, class_rows=None):
'''
Convert a list with integers to one-of-N coding for to use in a DataSet.
Note that the rows correspond to the classes in *sorted* order.
>>> to_one_of_n([0, 0, 0, 2, 0, 1])
array([[ 1., 1., 1., 0., 1., 0.],
[ 0., 0., 0., 0., 0., 1.],
[ 0., 0., 0., 1., 0., 0.]])
'''
a = np.asarray(labels, int)
if a.ndim != 1:
raise ValueError('Labels should be 1D')
if not class_rows:
class_rows = np.unique(a) # is automatically sorted
Y = np.zeros((len(class_rows), a.size))
for i, n in enumerate(class_rows):
Y[i, a==n] = 1
return Y
def hard_max(X):
'''
Find the maximum of each column and return an array containing 1 on the
location of each maximum. If a column contains a NaN, the output column
consists of NaNs.
'''
X = np.atleast_2d(X)
assert X.shape[0] != 0
if X.shape[1] == 0:
return X.copy()
result = np.zeros(X.shape)
result[np.argmax(X, axis=0),range(X.shape[1])] = 1
result[:, np.any(np.isnan(X), axis=0)] *= np.nan
return result
def write_csv_table(rows, fname):
f = open(fname, 'w')
csv.writer(f).writerows(rows)
f.close()
def write_latex_table(rows, fname):
rows = list(rows)
ncols = max(len(r) for r in rows)
f = open(fname, 'w')
f.write('\\begin{tabular}{%s}\n' % ' '.join('c'*ncols))
for r in rows:
f.write(' & '.join(map(str, r)) + '\\\\\n')
f.write('\\end{tabular}\n')
f.close()
| bsd-3-clause |
styskin/pybrain | pybrain/rl/environments/renderer.py | 31 | 1454 | # obsolete - should be deleted if there are no objections.
__author__ = 'Thomas Rueckstiess, [email protected]'
from pybrain.utilities import abstractMethod
import threading
class Renderer(threading.Thread):
""" The general interface for a class displays what is happening in an environment.
The renderer is executed as concurrent thread. Start the renderer with the function
start() inherited from Thread, and check with isAlive(), if the thread is running.
"""
def __init__(self):
""" initializes some variables and parent init functions """
threading.Thread.__init__(self)
def updateData(self):
""" overwrite this class to update whatever data the renderer needs to display the current
state of the world. """
abstractMethod()
def _render(self):
""" Here, the render methods are called. This function has to be implemented by subclasses. """
abstractMethod()
def start(self):
""" wrapper for Thread.start(). only calls start if thread has not been started before. """
if not self.isAlive():
threading.Thread.start(self)
def run(self):
""" Don't call this function on its own. Use start() instead. """
self._render()
def stop(self):
""" stop signal requested. stop current thread.
@note: only if possible. OpenGL glutMainLoop is not stoppable.
"""
pass
| bsd-3-clause |
timofeymukha/eddylicious | eddylicious/generators/lund_rescaling.py | 1 | 15250 | # This file is part of eddylicious
# (c) Timofey Mukha
# The code is released under the GNU GPL Version 3 licence.
# See LICENCE.txt and the Legal section in the User Guide for more information
"""Functions for generating inflow velocity fields using
Lund et al's rescaling, see
Lund T.S., Wu X., Squires K.D. Generation of turbulent inflow
data for spatially-developing boundary layer simulations.
J. Comp. Phys. 1998; 140:233-58.
"""
from __future__ import print_function
from __future__ import division
import numpy as np
from mpi4py import MPI
from scipy.interpolate import interp1d
from scipy.interpolate import interp2d
from .helper_functions import chunks_and_offsets
from ..writers.ofnative_writers import write_velocity_to_ofnative
from ..writers.hdf5_writers import write_velocity_to_hdf5
__all__ = ["lund_rescale_mean_velocity", "lund_rescale_fluctuations",
"lund_generate"]
def lund_rescale_mean_velocity(etaPrec, yPlusPrec,
uMeanXPrec, uMeanYPrec,
nInfl, etaInfl, yPlusInfl, nPointsZInfl,
u0Infl, u0Prec,
gamma, blending):
"""Rescale the mean velocity profile using Lunds rescaling.
This function rescales the mean velocity profile taken from the
precursor simulation using Lund et al's rescaling.
Parameters
----------
etaPrec : ndarray
The values of eta for the corresponding values of the mean
velocity from the precursor.
yPlusPrec : ndarray
The values of y+ for the corresponding values of the mean
velocity from the precursor.
uMeanXPrec : ndarray
The values of the mean streamwise velocity from the precursor.
uMeanYPrec : ndarray
The values of the mean wall-normal velocity from the precursor.
nInfl : int
The amount of points in the wall-normal direction that contain
the boundary layer at the inflow boundary.
etaInfl : ndarray
The values of eta for the mesh points at the inflow boundary.
yPlusInfl : ndarray
The values of y+ for the mesh points at the inflow boundary.
nPointsZInfl : int
The amount of points in the spanwise direction for the inflow
boundary.
u0Infl : float
The freestream velocity at the inflow.
u0Prec : float
The freestream velocity for the precursor.
gamma : float
The ratio of the friction velocities in the inflow boundary
layer and the precursor.
blending : ndarray
The weights for blending the inner and outer profiles.
Returns
-------
uX, ndarray
The values of the mean streamwise velocity.
uY, ndarray
The values of the mean wall-normal velocity.
"""
assert nInfl > 0
assert nPointsZInfl > 0
assert u0Infl > 0
assert u0Prec > 0
assert gamma > 0
assert np.all(etaInfl >= 0)
assert np.all(etaPrec >= 0)
assert np.all(yPlusInfl >= 0)
assert np.all(yPlusPrec >= 0)
# Check if the wall is at the top, if so flip
flip = False
if etaInfl[0] > etaInfl[1]:
etaInfl = np.flipud(etaInfl)
yPlusInfl = np.flipud(yPlusInfl)
flip = True
# The streamwise component
uMeanXInterp = interp1d(etaPrec, uMeanXPrec)
uMeanXInterpPlus = interp1d(yPlusPrec, uMeanXPrec)
uMeanXInner = gamma*uMeanXInterpPlus(yPlusInfl[:nInfl])
uMeanXOuter = gamma*uMeanXInterp(etaInfl[:nInfl]) + u0Infl - gamma*u0Prec
uMeanXInfl = np.zeros(etaInfl.shape)
uMeanXInfl[:nInfl] = uMeanXInner*(1 - blending[:nInfl]) + \
uMeanXOuter*blending[:nInfl]
uMeanXInfl[nInfl:] = uMeanXInfl[nInfl-1]
uMeanXInfl = np.ones((etaInfl.size, nPointsZInfl))*uMeanXInfl[:,
np.newaxis]
# The wall-normal component
uMeanYInterp = interp1d(etaPrec, uMeanYPrec)
uMeanYInterpPlus = interp1d(yPlusPrec, uMeanYPrec)
uMeanYInner = uMeanYInterpPlus(yPlusInfl[:nInfl])
uMeanYOuter = uMeanYInterp(etaInfl[:nInfl])
uMeanYInfl = np.zeros(etaInfl.shape)
uMeanYInfl[:nInfl] = uMeanYInner*(1 - blending[:nInfl]) + \
uMeanYOuter*blending[:nInfl]
uMeanYInfl[nInfl:] = uMeanYInfl[nInfl-1]
uMeanYInfl = np.ones((etaInfl.size, nPointsZInfl))*uMeanYInfl[:,
np.newaxis]
assert np.all(uMeanXInfl >= 0)
if flip:
return np.flipud(uMeanXInfl), np.flipud(uMeanYInfl)
else:
return uMeanXInfl, uMeanYInfl
def lund_rescale_fluctuations(etaPrec, yPlusPrec, pointsZ,
uPrimeX, uPrimeY, uPrimeZ, gamma,
etaInfl, yPlusInfl, pointsZInfl,
nInfl, blending):
"""Rescale the fluctuations of velocity using Lund et al's
rescaling.
This function rescales the fluctuations of the three components of
the velocity field taken from the precursor simulation using Lund et
al's rescaling.
Parameters
----------
etaPrec : ndarray
The values of eta for the corresponding values of the mean
velocity from the precursor.
yPlusPrec : ndarray
The values of y+ for the corresponding values of the mean
velocity from the precursor.
pointsZ : ndarray
A 2d array containing the values of z for the points of the
precursor mesh.
uPrimeX : ndarray
A 2d array containing the values of the fluctuations of the x
component of velocity.
uPrimeY : ndarray
A 2d array containing the values of the fluctuations of the y
component of velocity.
uPrimeZ : ndarray
A 2d array containing the values of the fluctuations of the z
component of velocity.
gamma : float
The ratio of the friction velocities in the inflow boundary
layer and the precursor.
etaInfl : ndarray
The values of eta for the mesh points at the inflow boundary.
yPlusInfl : ndarray
The values of y+ for the mesh points at the inflow boundary.
pointsZInfl : ndarray
A 2d array containing the values of z for the points of the
inflow boundary.
nInfl : int
The amount of points in the wall-normal direction that contain
the boundary layer at the inflow boundary.
blending : ndarray
The weights for blending the inner and outer profiles.
Returns
-------
List of ndarrays
The list contains three items, each a 2d ndarray. The first
array contains the rescaled fluctuations of the x component of
velocity. The second -- of the y component of velocity. The
third -- of the z component of velocity.
"""
assert np.all(etaPrec >= 0)
assert np.all(yPlusPrec >= 0)
assert np.all(etaInfl >= 0)
assert np.all(yPlusInfl >= 0)
assert nInfl > 0
assert gamma > 0
# Check if the wall is at the top, if so flip
flip = False
if etaInfl[0] > etaInfl[1]:
etaInfl = np.flipud(etaInfl)
yPlusInfl = np.flipud(yPlusInfl)
flip = True
uPrimeXInfl = np.zeros(pointsZInfl.shape)
uPrimeYInfl = np.zeros(pointsZInfl.shape)
uPrimeZInfl = np.zeros(pointsZInfl.shape)
uPrimeXInterp = interp2d(pointsZ[0, :]/pointsZ[0, -1], etaPrec, uPrimeX)
uPrimeYInterp = interp2d(pointsZ[0, :]/pointsZ[0, -1], etaPrec, uPrimeY)
uPrimeZInterp = interp2d(pointsZ[0, :]/pointsZ[0, -1], etaPrec, uPrimeZ)
uPrimeXPlusInterp = interp2d(pointsZ[0, :]/pointsZ[0, -1], yPlusPrec,
uPrimeX)
uPrimeYPlusInterp = interp2d(pointsZ[0, :]/pointsZ[0, -1], yPlusPrec,
uPrimeY)
uPrimeZPlusInterp = interp2d(pointsZ[0, :]/pointsZ[0, -1], yPlusPrec,
uPrimeZ)
uPrimeXInner = \
gamma*uPrimeXPlusInterp(pointsZInfl[0, :]/pointsZInfl[0, -1],
yPlusInfl[:nInfl])
uPrimeYInner = \
gamma*uPrimeYPlusInterp(pointsZInfl[0, :]/pointsZInfl[0, -1],
yPlusInfl[:nInfl])
uPrimeZInner = \
gamma*uPrimeZPlusInterp(pointsZInfl[0, :]/pointsZInfl[0, -1],
yPlusInfl[:nInfl])
uPrimeXOuter = gamma*uPrimeXInterp(pointsZInfl[0, :]/pointsZInfl[0, -1],
etaInfl[:nInfl])
uPrimeYOuter = gamma*uPrimeYInterp(pointsZInfl[0, :]/pointsZInfl[0, -1],
etaInfl[:nInfl])
uPrimeZOuter = gamma*uPrimeZInterp(pointsZInfl[0, :]/pointsZInfl[0, -1],
etaInfl[:nInfl])
uPrimeXInfl[:nInfl] = \
uPrimeXInner*(1 - blending[0:nInfl])[:, np.newaxis] + \
uPrimeXOuter*blending[0:nInfl][:, np.newaxis]
uPrimeYInfl[:nInfl] = \
uPrimeYInner*(1 - blending[0:nInfl])[:, np.newaxis] + \
uPrimeYOuter*blending[0:nInfl][:, np.newaxis]
uPrimeZInfl[:nInfl] = \
uPrimeZInner*(1 - blending[0:nInfl])[:, np.newaxis] + \
uPrimeZOuter*blending[0:nInfl][:, np.newaxis]
if flip:
return map(np.flipud, [uPrimeXInfl, uPrimeYInfl, uPrimeZInfl])
else:
return [uPrimeXInfl, uPrimeYInfl, uPrimeZInfl]
def lund_generate(readerFunction,
writer, writePath,
dt, t0, tEnd, timePrecision,
uMeanXPrec, uMeanXInfl,
uMeanYPrec, uMeanYInfl,
etaPrec, yPlusPrec, pointsZ,
etaInfl, yPlusInfl, pointsZInfl,
nInfl, gamma,
times, blending):
"""Generate the the inflow velocity using Lund's
rescaling.
This function will use Lund et al's rescaling in order to generate
velocity fields for the inflow boundary. The rescaling for the mean
profile should be done beforehand and is one of the input parameters
for this function.
Parameters
----------
readerFunction : function
The function to use for reading in data, generated by the
reader. Should contain the reader's name in the attribute
"reader".
writer: str
The writer that will be used to save the values of the velocity
field.
writePath : str
The path for the writer.
dt : float
The time-step to be used in the simulation. This will be used to
associate a time-value with the produced velocity fields.
t0 : float
The starting time to be used in the simulation. This will be
used to associate a time-value with the produced velocity.
timePrecision : int
Number of points after the decimal to keep for the time value.
tEnd : float
The ending time for the simulation.
uMeanXPrec : ndarray
The values of the mean streamwise velocity from the precursor.
uMeanXInfl : ndarray
The values of the mean streamwise velocity for the inflow
boundary layer.
uMeanYPrec : ndarray
The values of the mean wall-normal velocity from the precursor.
uMeanYInfl : ndarray
The values of the mean wall-normal velocity for the inflow
boundary layer.
etaPrec : ndarray
The values of eta for the corresponding values of the mean
velocity from the precursor.
yPlusPrec : ndarray
The values of y+ for the corresponding values of the mean
velocity from the precursor.
pointsZ : ndarray
A 2d array containing the values of z for the points of the
precursor mesh.
etaInfl : ndarray
The values of eta for the mesh points at the inflow boundary.
yPlusInfl : ndarray
The values of y+ for the mesh points at the inflow boundary.
pointsZInfl : int
A 2d array containing the values of z for the points of the
inflow boundary.
nInfl : int
The amount of points in the wall-normal direction that contain
the boundary layer at the inflow boundary.
gamma : float
The ration of the friction velocities in the inflow boundary
layer and the precursor.
times : list of floats or strings
The times for which the velocity field was sampled in the
precursor simulation.
blending : ndarray
The weights for blending the inner and outer profiles.
"""
# Grab info regarding parallelization
comm = MPI.COMM_WORLD
rank = comm.Get_rank()
nProcs = comm.Get_size()
# Get the total amount of rescalings to be done
size = int((tEnd-t0)/dt+1)
# Calculate the amount of rescalings each processor is responsible for
[chunks, offsets] = chunks_and_offsets(nProcs, size)
# Perform the rescaling
for i in range(chunks[rank]):
t = t0 + dt*i + dt*int(offsets[rank])
t = float(("{0:."+str(timePrecision)+"f}").format(t))
position = int(offsets[rank]) + i
if (rank == 0) and (np.mod(i, int(chunks[rank]/10)) == 0):
print(" Rescaled about "+str(int(i/chunks[rank]*100))+"%")
# Read U data
if readerFunction.reader == "foamFile":
assert position < len(times)
[uPrimeX, uPrimeY, uPrimeZ] = readerFunction(times[position])
elif readerFunction.reader == "hdf5":
assert position < len(times)
[uPrimeX, uPrimeY, uPrimeZ] = readerFunction(position)
else:
raise ValueError("Unknown reader")
# Subtract mean
uPrimeX -= uMeanXPrec[:, np.newaxis]
uPrimeY -= uMeanYPrec[:, np.newaxis]
[uXInfl, uYInfl, uZInfl] = lund_rescale_fluctuations(etaPrec,
yPlusPrec,
pointsZ,
uPrimeX,
uPrimeY,
uPrimeZ,
gamma,
etaInfl,
yPlusInfl,
pointsZInfl,
nInfl,
blending)
# Add mean
uXInfl += uMeanXInfl
uYInfl += uMeanYInfl
# Write
if writer == "ofnative":
write_velocity_to_ofnative(writePath, t, uXInfl, uYInfl, uZInfl)
elif writer == "hdf5":
write_velocity_to_hdf5(writePath, t, uXInfl, uYInfl, uZInfl,
position)
else:
raise ValueError("Unknown writer")
| gpl-3.0 |
fzheng/codejam | lib/python2.7/site-packages/setuptools/command/install_lib.py | 104 | 3839 | import os
import imp
from itertools import product, starmap
import distutils.command.install_lib as orig
class install_lib(orig.install_lib):
"""Don't add compiled flags to filenames of non-Python files"""
def run(self):
self.build()
outfiles = self.install()
if outfiles is not None:
# always compile, in case we have any extension stubs to deal with
self.byte_compile(outfiles)
def get_exclusions(self):
"""
Return a collections.Sized collections.Container of paths to be
excluded for single_version_externally_managed installations.
"""
all_packages = (
pkg
for ns_pkg in self._get_SVEM_NSPs()
for pkg in self._all_packages(ns_pkg)
)
excl_specs = product(all_packages, self._gen_exclusion_paths())
return set(starmap(self._exclude_pkg_path, excl_specs))
def _exclude_pkg_path(self, pkg, exclusion_path):
"""
Given a package name and exclusion path within that package,
compute the full exclusion path.
"""
parts = pkg.split('.') + [exclusion_path]
return os.path.join(self.install_dir, *parts)
@staticmethod
def _all_packages(pkg_name):
"""
>>> list(install_lib._all_packages('foo.bar.baz'))
['foo.bar.baz', 'foo.bar', 'foo']
"""
while pkg_name:
yield pkg_name
pkg_name, sep, child = pkg_name.rpartition('.')
def _get_SVEM_NSPs(self):
"""
Get namespace packages (list) but only for
single_version_externally_managed installations and empty otherwise.
"""
# TODO: is it necessary to short-circuit here? i.e. what's the cost
# if get_finalized_command is called even when namespace_packages is
# False?
if not self.distribution.namespace_packages:
return []
install_cmd = self.get_finalized_command('install')
svem = install_cmd.single_version_externally_managed
return self.distribution.namespace_packages if svem else []
@staticmethod
def _gen_exclusion_paths():
"""
Generate file paths to be excluded for namespace packages (bytecode
cache files).
"""
# always exclude the package module itself
yield '__init__.py'
yield '__init__.pyc'
yield '__init__.pyo'
if not hasattr(imp, 'get_tag'):
return
base = os.path.join('__pycache__', '__init__.' + imp.get_tag())
yield base + '.pyc'
yield base + '.pyo'
yield base + '.opt-1.pyc'
yield base + '.opt-2.pyc'
def copy_tree(
self, infile, outfile,
preserve_mode=1, preserve_times=1, preserve_symlinks=0, level=1
):
assert preserve_mode and preserve_times and not preserve_symlinks
exclude = self.get_exclusions()
if not exclude:
return orig.install_lib.copy_tree(self, infile, outfile)
# Exclude namespace package __init__.py* files from the output
from setuptools.archive_util import unpack_directory
from distutils import log
outfiles = []
def pf(src, dst):
if dst in exclude:
log.warn("Skipping installation of %s (namespace package)",
dst)
return False
log.info("copying %s -> %s", src, os.path.dirname(dst))
outfiles.append(dst)
return dst
unpack_directory(infile, outfile, pf)
return outfiles
def get_outputs(self):
outputs = orig.install_lib.get_outputs(self)
exclude = self.get_exclusions()
if exclude:
return [f for f in outputs if f not in exclude]
return outputs
| mit |
tchellomello/home-assistant | homeassistant/components/knx/schema.py | 1 | 14928 | """Voluptuous schemas for the KNX integration."""
import voluptuous as vol
from xknx.devices.climate import SetpointShiftMode
from homeassistant.const import (
CONF_ADDRESS,
CONF_DEVICE_CLASS,
CONF_ENTITY_ID,
CONF_HOST,
CONF_NAME,
CONF_PORT,
CONF_TYPE,
)
import homeassistant.helpers.config_validation as cv
from .const import (
CONF_STATE_ADDRESS,
CONF_SYNC_STATE,
OPERATION_MODES,
PRESET_MODES,
ColorTempModes,
)
class ConnectionSchema:
"""Voluptuous schema for KNX connection."""
CONF_KNX_LOCAL_IP = "local_ip"
TUNNELING_SCHEMA = vol.Schema(
{
vol.Required(CONF_HOST): cv.string,
vol.Optional(CONF_KNX_LOCAL_IP): cv.string,
vol.Optional(CONF_PORT): cv.port,
}
)
ROUTING_SCHEMA = vol.Schema({vol.Optional(CONF_KNX_LOCAL_IP): cv.string})
class CoverSchema:
"""Voluptuous schema for KNX covers."""
CONF_MOVE_LONG_ADDRESS = "move_long_address"
CONF_MOVE_SHORT_ADDRESS = "move_short_address"
CONF_STOP_ADDRESS = "stop_address"
CONF_POSITION_ADDRESS = "position_address"
CONF_POSITION_STATE_ADDRESS = "position_state_address"
CONF_ANGLE_ADDRESS = "angle_address"
CONF_ANGLE_STATE_ADDRESS = "angle_state_address"
CONF_TRAVELLING_TIME_DOWN = "travelling_time_down"
CONF_TRAVELLING_TIME_UP = "travelling_time_up"
CONF_INVERT_POSITION = "invert_position"
CONF_INVERT_ANGLE = "invert_angle"
DEFAULT_TRAVEL_TIME = 25
DEFAULT_NAME = "KNX Cover"
SCHEMA = vol.Schema(
{
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_MOVE_LONG_ADDRESS): cv.string,
vol.Optional(CONF_MOVE_SHORT_ADDRESS): cv.string,
vol.Optional(CONF_STOP_ADDRESS): cv.string,
vol.Optional(CONF_POSITION_ADDRESS): cv.string,
vol.Optional(CONF_POSITION_STATE_ADDRESS): cv.string,
vol.Optional(CONF_ANGLE_ADDRESS): cv.string,
vol.Optional(CONF_ANGLE_STATE_ADDRESS): cv.string,
vol.Optional(
CONF_TRAVELLING_TIME_DOWN, default=DEFAULT_TRAVEL_TIME
): cv.positive_int,
vol.Optional(
CONF_TRAVELLING_TIME_UP, default=DEFAULT_TRAVEL_TIME
): cv.positive_int,
vol.Optional(CONF_INVERT_POSITION, default=False): cv.boolean,
vol.Optional(CONF_INVERT_ANGLE, default=False): cv.boolean,
}
)
class BinarySensorSchema:
"""Voluptuous schema for KNX binary sensors."""
CONF_STATE_ADDRESS = CONF_STATE_ADDRESS
CONF_SYNC_STATE = CONF_SYNC_STATE
CONF_IGNORE_INTERNAL_STATE = "ignore_internal_state"
CONF_AUTOMATION = "automation"
CONF_HOOK = "hook"
CONF_DEFAULT_HOOK = "on"
CONF_COUNTER = "counter"
CONF_DEFAULT_COUNTER = 1
CONF_ACTION = "action"
CONF_RESET_AFTER = "reset_after"
DEFAULT_NAME = "KNX Binary Sensor"
AUTOMATION_SCHEMA = vol.Schema(
{
vol.Optional(CONF_HOOK, default=CONF_DEFAULT_HOOK): cv.string,
vol.Optional(CONF_COUNTER, default=CONF_DEFAULT_COUNTER): cv.port,
vol.Required(CONF_ACTION): cv.SCRIPT_SCHEMA,
}
)
AUTOMATIONS_SCHEMA = vol.All(cv.ensure_list, [AUTOMATION_SCHEMA])
SCHEMA = vol.All(
cv.deprecated("significant_bit"),
vol.Schema(
{
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_SYNC_STATE, default=True): vol.Any(
vol.All(vol.Coerce(int), vol.Range(min=2, max=1440)),
cv.boolean,
cv.string,
),
vol.Optional(CONF_IGNORE_INTERNAL_STATE, default=False): cv.boolean,
vol.Required(CONF_STATE_ADDRESS): cv.string,
vol.Optional(CONF_DEVICE_CLASS): cv.string,
vol.Optional(CONF_RESET_AFTER): cv.positive_int,
vol.Optional(CONF_AUTOMATION): AUTOMATIONS_SCHEMA,
}
),
)
class LightSchema:
"""Voluptuous schema for KNX lights."""
CONF_STATE_ADDRESS = CONF_STATE_ADDRESS
CONF_BRIGHTNESS_ADDRESS = "brightness_address"
CONF_BRIGHTNESS_STATE_ADDRESS = "brightness_state_address"
CONF_COLOR_ADDRESS = "color_address"
CONF_COLOR_STATE_ADDRESS = "color_state_address"
CONF_COLOR_TEMP_ADDRESS = "color_temperature_address"
CONF_COLOR_TEMP_STATE_ADDRESS = "color_temperature_state_address"
CONF_COLOR_TEMP_MODE = "color_temperature_mode"
CONF_RGBW_ADDRESS = "rgbw_address"
CONF_RGBW_STATE_ADDRESS = "rgbw_state_address"
CONF_MIN_KELVIN = "min_kelvin"
CONF_MAX_KELVIN = "max_kelvin"
DEFAULT_NAME = "KNX Light"
DEFAULT_COLOR_TEMP_MODE = "absolute"
DEFAULT_MIN_KELVIN = 2700 # 370 mireds
DEFAULT_MAX_KELVIN = 6000 # 166 mireds
SCHEMA = vol.Schema(
{
vol.Required(CONF_ADDRESS): cv.string,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_STATE_ADDRESS): cv.string,
vol.Optional(CONF_BRIGHTNESS_ADDRESS): cv.string,
vol.Optional(CONF_BRIGHTNESS_STATE_ADDRESS): cv.string,
vol.Optional(CONF_COLOR_ADDRESS): cv.string,
vol.Optional(CONF_COLOR_STATE_ADDRESS): cv.string,
vol.Optional(CONF_COLOR_TEMP_ADDRESS): cv.string,
vol.Optional(CONF_COLOR_TEMP_STATE_ADDRESS): cv.string,
vol.Optional(
CONF_COLOR_TEMP_MODE, default=DEFAULT_COLOR_TEMP_MODE
): cv.enum(ColorTempModes),
vol.Optional(CONF_RGBW_ADDRESS): cv.string,
vol.Optional(CONF_RGBW_STATE_ADDRESS): cv.string,
vol.Optional(CONF_MIN_KELVIN, default=DEFAULT_MIN_KELVIN): vol.All(
vol.Coerce(int), vol.Range(min=1)
),
vol.Optional(CONF_MAX_KELVIN, default=DEFAULT_MAX_KELVIN): vol.All(
vol.Coerce(int), vol.Range(min=1)
),
}
)
class ClimateSchema:
"""Voluptuous schema for KNX climate devices."""
CONF_SETPOINT_SHIFT_ADDRESS = "setpoint_shift_address"
CONF_SETPOINT_SHIFT_STATE_ADDRESS = "setpoint_shift_state_address"
CONF_SETPOINT_SHIFT_MODE = "setpoint_shift_mode"
CONF_SETPOINT_SHIFT_MAX = "setpoint_shift_max"
CONF_SETPOINT_SHIFT_MIN = "setpoint_shift_min"
CONF_TEMPERATURE_ADDRESS = "temperature_address"
CONF_TEMPERATURE_STEP = "temperature_step"
CONF_TARGET_TEMPERATURE_ADDRESS = "target_temperature_address"
CONF_TARGET_TEMPERATURE_STATE_ADDRESS = "target_temperature_state_address"
CONF_OPERATION_MODE_ADDRESS = "operation_mode_address"
CONF_OPERATION_MODE_STATE_ADDRESS = "operation_mode_state_address"
CONF_CONTROLLER_STATUS_ADDRESS = "controller_status_address"
CONF_CONTROLLER_STATUS_STATE_ADDRESS = "controller_status_state_address"
CONF_CONTROLLER_MODE_ADDRESS = "controller_mode_address"
CONF_CONTROLLER_MODE_STATE_ADDRESS = "controller_mode_state_address"
CONF_HEAT_COOL_ADDRESS = "heat_cool_address"
CONF_HEAT_COOL_STATE_ADDRESS = "heat_cool_state_address"
CONF_OPERATION_MODE_FROST_PROTECTION_ADDRESS = (
"operation_mode_frost_protection_address"
)
CONF_OPERATION_MODE_NIGHT_ADDRESS = "operation_mode_night_address"
CONF_OPERATION_MODE_COMFORT_ADDRESS = "operation_mode_comfort_address"
CONF_OPERATION_MODE_STANDBY_ADDRESS = "operation_mode_standby_address"
CONF_OPERATION_MODES = "operation_modes"
CONF_ON_OFF_ADDRESS = "on_off_address"
CONF_ON_OFF_STATE_ADDRESS = "on_off_state_address"
CONF_ON_OFF_INVERT = "on_off_invert"
CONF_MIN_TEMP = "min_temp"
CONF_MAX_TEMP = "max_temp"
DEFAULT_NAME = "KNX Climate"
DEFAULT_SETPOINT_SHIFT_MODE = "DPT6010"
DEFAULT_SETPOINT_SHIFT_MAX = 6
DEFAULT_SETPOINT_SHIFT_MIN = -6
DEFAULT_TEMPERATURE_STEP = 0.1
DEFAULT_ON_OFF_INVERT = False
SCHEMA = vol.All(
cv.deprecated("setpoint_shift_step", replacement_key=CONF_TEMPERATURE_STEP),
vol.Schema(
{
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(
CONF_SETPOINT_SHIFT_MODE, default=DEFAULT_SETPOINT_SHIFT_MODE
): cv.enum(SetpointShiftMode),
vol.Optional(
CONF_SETPOINT_SHIFT_MAX, default=DEFAULT_SETPOINT_SHIFT_MAX
): vol.All(int, vol.Range(min=0, max=32)),
vol.Optional(
CONF_SETPOINT_SHIFT_MIN, default=DEFAULT_SETPOINT_SHIFT_MIN
): vol.All(int, vol.Range(min=-32, max=0)),
vol.Optional(
CONF_TEMPERATURE_STEP, default=DEFAULT_TEMPERATURE_STEP
): vol.All(float, vol.Range(min=0, max=2)),
vol.Required(CONF_TEMPERATURE_ADDRESS): cv.string,
vol.Required(CONF_TARGET_TEMPERATURE_STATE_ADDRESS): cv.string,
vol.Optional(CONF_TARGET_TEMPERATURE_ADDRESS): cv.string,
vol.Optional(CONF_SETPOINT_SHIFT_ADDRESS): cv.string,
vol.Optional(CONF_SETPOINT_SHIFT_STATE_ADDRESS): cv.string,
vol.Optional(CONF_OPERATION_MODE_ADDRESS): cv.string,
vol.Optional(CONF_OPERATION_MODE_STATE_ADDRESS): cv.string,
vol.Optional(CONF_CONTROLLER_STATUS_ADDRESS): cv.string,
vol.Optional(CONF_CONTROLLER_STATUS_STATE_ADDRESS): cv.string,
vol.Optional(CONF_CONTROLLER_MODE_ADDRESS): cv.string,
vol.Optional(CONF_CONTROLLER_MODE_STATE_ADDRESS): cv.string,
vol.Optional(CONF_HEAT_COOL_ADDRESS): cv.string,
vol.Optional(CONF_HEAT_COOL_STATE_ADDRESS): cv.string,
vol.Optional(CONF_OPERATION_MODE_FROST_PROTECTION_ADDRESS): cv.string,
vol.Optional(CONF_OPERATION_MODE_NIGHT_ADDRESS): cv.string,
vol.Optional(CONF_OPERATION_MODE_COMFORT_ADDRESS): cv.string,
vol.Optional(CONF_OPERATION_MODE_STANDBY_ADDRESS): cv.string,
vol.Optional(CONF_ON_OFF_ADDRESS): cv.string,
vol.Optional(CONF_ON_OFF_STATE_ADDRESS): cv.string,
vol.Optional(
CONF_ON_OFF_INVERT, default=DEFAULT_ON_OFF_INVERT
): cv.boolean,
vol.Optional(CONF_OPERATION_MODES): vol.All(
cv.ensure_list, [vol.In({**OPERATION_MODES, **PRESET_MODES})]
),
vol.Optional(CONF_MIN_TEMP): vol.Coerce(float),
vol.Optional(CONF_MAX_TEMP): vol.Coerce(float),
}
),
)
class SwitchSchema:
"""Voluptuous schema for KNX switches."""
CONF_STATE_ADDRESS = CONF_STATE_ADDRESS
DEFAULT_NAME = "KNX Switch"
SCHEMA = vol.Schema(
{
vol.Required(CONF_ADDRESS): cv.string,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_STATE_ADDRESS): cv.string,
}
)
class ExposeSchema:
"""Voluptuous schema for KNX exposures."""
CONF_KNX_EXPOSE_TYPE = CONF_TYPE
CONF_KNX_EXPOSE_ATTRIBUTE = "attribute"
CONF_KNX_EXPOSE_DEFAULT = "default"
CONF_KNX_EXPOSE_ADDRESS = CONF_ADDRESS
SCHEMA = vol.Schema(
{
vol.Required(CONF_KNX_EXPOSE_TYPE): vol.Any(int, float, str),
vol.Optional(CONF_ENTITY_ID): cv.entity_id,
vol.Optional(CONF_KNX_EXPOSE_ATTRIBUTE): cv.string,
vol.Optional(CONF_KNX_EXPOSE_DEFAULT): cv.match_all,
vol.Required(CONF_KNX_EXPOSE_ADDRESS): cv.string,
}
)
class NotifySchema:
"""Voluptuous schema for KNX notifications."""
DEFAULT_NAME = "KNX Notify"
SCHEMA = vol.Schema(
{
vol.Required(CONF_ADDRESS): cv.string,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
}
)
class SensorSchema:
"""Voluptuous schema for KNX sensors."""
CONF_STATE_ADDRESS = CONF_STATE_ADDRESS
CONF_SYNC_STATE = CONF_SYNC_STATE
DEFAULT_NAME = "KNX Sensor"
SCHEMA = vol.Schema(
{
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_SYNC_STATE, default=True): vol.Any(
vol.All(vol.Coerce(int), vol.Range(min=2, max=1440)),
cv.boolean,
cv.string,
),
vol.Required(CONF_STATE_ADDRESS): cv.string,
vol.Required(CONF_TYPE): vol.Any(int, float, str),
}
)
class SceneSchema:
"""Voluptuous schema for KNX scenes."""
CONF_SCENE_NUMBER = "scene_number"
DEFAULT_NAME = "KNX SCENE"
SCHEMA = vol.Schema(
{
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Required(CONF_ADDRESS): cv.string,
vol.Required(CONF_SCENE_NUMBER): cv.positive_int,
}
)
class WeatherSchema:
"""Voluptuous schema for KNX weather station."""
CONF_SYNC_STATE = CONF_SYNC_STATE
CONF_KNX_TEMPERATURE_ADDRESS = "address_temperature"
CONF_KNX_BRIGHTNESS_SOUTH_ADDRESS = "address_brightness_south"
CONF_KNX_BRIGHTNESS_EAST_ADDRESS = "address_brightness_east"
CONF_KNX_BRIGHTNESS_WEST_ADDRESS = "address_brightness_west"
CONF_KNX_WIND_SPEED_ADDRESS = "address_wind_speed"
CONF_KNX_RAIN_ALARM_ADDRESS = "address_rain_alarm"
CONF_KNX_FROST_ALARM_ADDRESS = "address_frost_alarm"
CONF_KNX_WIND_ALARM_ADDRESS = "address_wind_alarm"
CONF_KNX_DAY_NIGHT_ADDRESS = "address_day_night"
CONF_KNX_AIR_PRESSURE_ADDRESS = "address_air_pressure"
CONF_KNX_HUMIDITY_ADDRESS = "address_humidity"
CONF_KNX_EXPOSE_SENSORS = "expose_sensors"
DEFAULT_NAME = "KNX Weather Station"
SCHEMA = vol.Schema(
{
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_SYNC_STATE, default=True): vol.Any(
vol.All(vol.Coerce(int), vol.Range(min=2, max=1440)),
cv.boolean,
cv.string,
),
vol.Optional(CONF_KNX_EXPOSE_SENSORS, default=False): cv.boolean,
vol.Required(CONF_KNX_TEMPERATURE_ADDRESS): cv.string,
vol.Optional(CONF_KNX_BRIGHTNESS_SOUTH_ADDRESS): cv.string,
vol.Optional(CONF_KNX_BRIGHTNESS_EAST_ADDRESS): cv.string,
vol.Optional(CONF_KNX_BRIGHTNESS_WEST_ADDRESS): cv.string,
vol.Optional(CONF_KNX_WIND_SPEED_ADDRESS): cv.string,
vol.Optional(CONF_KNX_RAIN_ALARM_ADDRESS): cv.string,
vol.Optional(CONF_KNX_FROST_ALARM_ADDRESS): cv.string,
vol.Optional(CONF_KNX_WIND_ALARM_ADDRESS): cv.string,
vol.Optional(CONF_KNX_DAY_NIGHT_ADDRESS): cv.string,
vol.Optional(CONF_KNX_AIR_PRESSURE_ADDRESS): cv.string,
vol.Optional(CONF_KNX_HUMIDITY_ADDRESS): cv.string,
}
)
| apache-2.0 |
olymk2/maidstone-hackspace | website/data/sql/donate.py | 4 | 1034 | import os
from scaffold.core.data.select import select_data
from scaffold.core.data.insert import insert_data
#~ from scaffold.core.data.update import update_data
#~ from scaffold.core.data.delete import delete_data
from scaffold.core.data.sql import query_builder
query_builder.query_path = os.path.abspath('./data/sql/')
class get_pledge(select_data):
debug = True
table = 'pledges'
columns = {'id', 'name', 'total'}
required = {'name'}
class get_pledges(select_data):
debug = True
#~ table = 'pledges'
query_file = 'pledge_totals.sql'
required = {'environment'}
columns_where = {'expired', 'environment'}
grouping = {'name'}
class add_pledge(insert_data):
debug = True
table = 'pledges'
required = {'name'}
columns = {'name'}
class add_payment(insert_data):
debug = True
table = 'pledge_amounts'
required = {'provider_id', 'pledge_id', 'reference', 'amount', 'environment'}
columns = {'provider_id', 'pledge_id', 'reference', 'amount', 'environment'}
| gpl-3.0 |
GitHublong/hue | desktop/core/ext-py/requests-2.6.0/requests/auth.py | 197 | 6710 | # -*- coding: utf-8 -*-
"""
requests.auth
~~~~~~~~~~~~~
This module contains the authentication handlers for Requests.
"""
import os
import re
import time
import hashlib
from base64 import b64encode
from .compat import urlparse, str
from .cookies import extract_cookies_to_jar
from .utils import parse_dict_header, to_native_string
from .status_codes import codes
CONTENT_TYPE_FORM_URLENCODED = 'application/x-www-form-urlencoded'
CONTENT_TYPE_MULTI_PART = 'multipart/form-data'
def _basic_auth_str(username, password):
"""Returns a Basic Auth string."""
authstr = 'Basic ' + to_native_string(
b64encode(('%s:%s' % (username, password)).encode('latin1')).strip()
)
return authstr
class AuthBase(object):
"""Base class that all auth implementations derive from"""
def __call__(self, r):
raise NotImplementedError('Auth hooks must be callable.')
class HTTPBasicAuth(AuthBase):
"""Attaches HTTP Basic Authentication to the given Request object."""
def __init__(self, username, password):
self.username = username
self.password = password
def __call__(self, r):
r.headers['Authorization'] = _basic_auth_str(self.username, self.password)
return r
class HTTPProxyAuth(HTTPBasicAuth):
"""Attaches HTTP Proxy Authentication to a given Request object."""
def __call__(self, r):
r.headers['Proxy-Authorization'] = _basic_auth_str(self.username, self.password)
return r
class HTTPDigestAuth(AuthBase):
"""Attaches HTTP Digest Authentication to the given Request object."""
def __init__(self, username, password):
self.username = username
self.password = password
self.last_nonce = ''
self.nonce_count = 0
self.chal = {}
self.pos = None
self.num_401_calls = 1
def build_digest_header(self, method, url):
realm = self.chal['realm']
nonce = self.chal['nonce']
qop = self.chal.get('qop')
algorithm = self.chal.get('algorithm')
opaque = self.chal.get('opaque')
if algorithm is None:
_algorithm = 'MD5'
else:
_algorithm = algorithm.upper()
# lambdas assume digest modules are imported at the top level
if _algorithm == 'MD5' or _algorithm == 'MD5-SESS':
def md5_utf8(x):
if isinstance(x, str):
x = x.encode('utf-8')
return hashlib.md5(x).hexdigest()
hash_utf8 = md5_utf8
elif _algorithm == 'SHA':
def sha_utf8(x):
if isinstance(x, str):
x = x.encode('utf-8')
return hashlib.sha1(x).hexdigest()
hash_utf8 = sha_utf8
KD = lambda s, d: hash_utf8("%s:%s" % (s, d))
if hash_utf8 is None:
return None
# XXX not implemented yet
entdig = None
p_parsed = urlparse(url)
path = p_parsed.path
if p_parsed.query:
path += '?' + p_parsed.query
A1 = '%s:%s:%s' % (self.username, realm, self.password)
A2 = '%s:%s' % (method, path)
HA1 = hash_utf8(A1)
HA2 = hash_utf8(A2)
if nonce == self.last_nonce:
self.nonce_count += 1
else:
self.nonce_count = 1
ncvalue = '%08x' % self.nonce_count
s = str(self.nonce_count).encode('utf-8')
s += nonce.encode('utf-8')
s += time.ctime().encode('utf-8')
s += os.urandom(8)
cnonce = (hashlib.sha1(s).hexdigest()[:16])
if _algorithm == 'MD5-SESS':
HA1 = hash_utf8('%s:%s:%s' % (HA1, nonce, cnonce))
if qop is None:
respdig = KD(HA1, "%s:%s" % (nonce, HA2))
elif qop == 'auth' or 'auth' in qop.split(','):
noncebit = "%s:%s:%s:%s:%s" % (
nonce, ncvalue, cnonce, 'auth', HA2
)
respdig = KD(HA1, noncebit)
else:
# XXX handle auth-int.
return None
self.last_nonce = nonce
# XXX should the partial digests be encoded too?
base = 'username="%s", realm="%s", nonce="%s", uri="%s", ' \
'response="%s"' % (self.username, realm, nonce, path, respdig)
if opaque:
base += ', opaque="%s"' % opaque
if algorithm:
base += ', algorithm="%s"' % algorithm
if entdig:
base += ', digest="%s"' % entdig
if qop:
base += ', qop="auth", nc=%s, cnonce="%s"' % (ncvalue, cnonce)
return 'Digest %s' % (base)
def handle_redirect(self, r, **kwargs):
"""Reset num_401_calls counter on redirects."""
if r.is_redirect:
self.num_401_calls = 1
def handle_401(self, r, **kwargs):
"""Takes the given response and tries digest-auth, if needed."""
if self.pos is not None:
# Rewind the file position indicator of the body to where
# it was to resend the request.
r.request.body.seek(self.pos)
num_401_calls = getattr(self, 'num_401_calls', 1)
s_auth = r.headers.get('www-authenticate', '')
if 'digest' in s_auth.lower() and num_401_calls < 2:
self.num_401_calls += 1
pat = re.compile(r'digest ', flags=re.IGNORECASE)
self.chal = parse_dict_header(pat.sub('', s_auth, count=1))
# Consume content and release the original connection
# to allow our new request to reuse the same one.
r.content
r.raw.release_conn()
prep = r.request.copy()
extract_cookies_to_jar(prep._cookies, r.request, r.raw)
prep.prepare_cookies(prep._cookies)
prep.headers['Authorization'] = self.build_digest_header(
prep.method, prep.url)
_r = r.connection.send(prep, **kwargs)
_r.history.append(r)
_r.request = prep
return _r
self.num_401_calls = 1
return r
def __call__(self, r):
# If we have a saved nonce, skip the 401
if self.last_nonce:
r.headers['Authorization'] = self.build_digest_header(r.method, r.url)
try:
self.pos = r.body.tell()
except AttributeError:
# In the case of HTTPDigestAuth being reused and the body of
# the previous request was a file-like object, pos has the
# file position of the previous body. Ensure it's set to
# None.
self.pos = None
r.register_hook('response', self.handle_401)
r.register_hook('response', self.handle_redirect)
return r
| apache-2.0 |
SP7-Ritmare/starterkit | geosk/mdtools/views.py | 1 | 5159 | import os
import sys
import json
import logging
import pycsw
import subprocess
import re
from urlparse import urlparse
from django.conf import settings
from django.http import HttpResponse, JsonResponse
from django.forms import model_to_dict
from django.core.serializers.json import DjangoJSONEncoder
from django.forms.models import model_to_dict
from django.contrib.sites.models import Site
from geonode.layers.forms import LayerForm
from geonode.layers.models import Layer
from geonode.maps.forms import MapForm
from geonode.maps.models import Map
from geosk.mdtools.models import ServicesMetadata
from geosk import get_version as geosk_version
from geonode import get_version as geonode_version
from django import get_version as django_version
from django.db import connection
logger = logging.getLogger(__name__)
def get_data_api(request, format='json'):
if request.method not in ('GET','POST'):
return HttpResponse(status=405)
rtype = request.POST.get('rtype')
id = request.POST.get('id')
if rtype == 'layer':
r = Layer.objects.get(pk=id)
exclude = LayerForm._meta.exclude
elif rtype == 'map':
r = Map.objects.get(pk=id)
exclude = MapForm._meta.exclude
exclude = exclude + ('owner', 'title', 'distribution_url', 'distribution_description')
data = model_to_dict(r, exclude=exclude)
#f=LayerForm(model_to_dict(l), prefix='layer')
data['keywords'] = r.keyword_csv
data['regions'] = [reg.name for reg in r.regions.all()] if r.regions else []
# data_prefix = {"layer-%s" % k: v for k, v in data.items()}
results = {
'data': data
}
return HttpResponse(json.dumps(results, cls=DjangoJSONEncoder), mimetype="application/json")
def get_ubuntu_release():
try:
version = os.getenv('VERSION_UBUNTU', subprocess.check_output(['lsb_release', '-sr']))
#version = subprocess.check_output(['lsb_release', '-sr'])
except:
version = ''
return version.strip()
def get_postgres_version():
c = connection.cursor()
try:
c.execute('show server_version')
version = c.fetchone()[0]
except:
version = ''
finally:
c.close()
return version
def get_postgis_version():
c = connection.cursor()
try:
c.execute('SELECT PostGIS_version()')
version = c.fetchone()[0]
except:
version = ''
finally:
c.close()
return version
def get_java_version():
try:
version = os.getenv('VERSION_JAVA')
if not version:
version = subprocess.check_output(['java', '-version'], stderr=subprocess.STDOUT)
pattern = '\"(\d+\.\d+).*\"'
version = re.search(pattern, version).groups()[0]
except:
version = ''
return version
def get_tomcat_version():
try:
version = os.getenv('VERSION_TOMCAT')
if not version:
out = subprocess.check_output(['ps', '-efww']).split("\n")
reg_cat_home = re.compile('catalina.home=(\S*)')
for o in out:
if o.find('tomcat') >= 0:
_find = reg_cat_home.search(o)
if _find:
version = _get_tomcat_version(_find.groups()[0])
except:
version = ''
return version
def _get_tomcat_version(catalina_home):
cmd = os.path.join(catalina_home, 'bin', 'version.sh')
out = subprocess.check_output(cmd).split("\n")
for o in out:
if o.find('Server number') >= 0:
return o.split(':')[1]
def get_sos_version():
try:
version = os.getenv('VERSION_SOS')
if not version:
try:
with open('/var/lib/tomcat7/webapps/observations/version-info.txt') as f:
out = f.readlines()
for o in out:
if o.find('version =') >= 0:
version = o.split('=')[1]
except BaseException:
version = ''
except TypeError:
version = ''
return version.strip()
def whoami(request, format='json'):
if ServicesMetadata.objects.count() == 1:
services_metadata = ServicesMetadata.objects.all()[0]
_md = model_to_dict(services_metadata)
domain = Site.objects.all()[0].domain
upurl = urlparse(os.environ['SITEURL'])
net_scheme = upurl.scheme
_md['uri'] = '%s://%s' % (net_scheme, domain)
_md['sk_domain_name'] = domain
# TODO sistemare
_md['endpoint_SOS_url'] = settings.SITEURL + 'observations/sos'
else:
_md = {
'message': 'Missing metadata'
}
# software
_md['software'] = {
'ubuntu': get_ubuntu_release(),
'java': get_java_version(),
'tomcat': get_tomcat_version(),
'postgresql': get_postgres_version(),
'postgis': get_postgis_version(),
'python': sys.version,
'getit': geosk_version(),
'geonode': geonode_version(),
'pycsw': pycsw.__version__,
'django': django_version(),
'sos': get_sos_version()
}
return JsonResponse(_md)
| gpl-3.0 |
cmacmackin/isoft | plotting/eos.py | 1 | 1429 | #
# entrainment.py
# This file is part of ISOFT.
#
# Copyright 2017 Chris MacMackin <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
#
'''Contains classes for calculating density of water.
'''
import numpy as np
import calculus
class LinearEos(object):
'''A class representing a linearised equation of state. It uses the
equation:
density = ref_density*[1 - beta_T*(T-T_ref) + beta_S*(S-S_ref)]
'''
def __init__(this, ref_density, beta_T, beta_S, T_ref, S_ref):
this.rd = ref_density
this.bT = beta_T
this.bS = beta_S
this.Tr = T_ref
this.Sr = S_ref
def __call__(this, T, S):
return this.rd*(1 - this.bT*(T - this.Tr) + this.bS*(S - this.Sr))
| gpl-3.0 |
priyam0074/musicApp | node_modules/node-gyp/gyp/pylib/gyp/msvs_emulation.py | 1407 | 47697 | # Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
This module helps emulate Visual Studio 2008 behavior on top of other
build systems, primarily ninja.
"""
import os
import re
import subprocess
import sys
from gyp.common import OrderedSet
import gyp.MSVSUtil
import gyp.MSVSVersion
windows_quoter_regex = re.compile(r'(\\*)"')
def QuoteForRspFile(arg):
"""Quote a command line argument so that it appears as one argument when
processed via cmd.exe and parsed by CommandLineToArgvW (as is typical for
Windows programs)."""
# See http://goo.gl/cuFbX and http://goo.gl/dhPnp including the comment
# threads. This is actually the quoting rules for CommandLineToArgvW, not
# for the shell, because the shell doesn't do anything in Windows. This
# works more or less because most programs (including the compiler, etc.)
# use that function to handle command line arguments.
# For a literal quote, CommandLineToArgvW requires 2n+1 backslashes
# preceding it, and results in n backslashes + the quote. So we substitute
# in 2* what we match, +1 more, plus the quote.
arg = windows_quoter_regex.sub(lambda mo: 2 * mo.group(1) + '\\"', arg)
# %'s also need to be doubled otherwise they're interpreted as batch
# positional arguments. Also make sure to escape the % so that they're
# passed literally through escaping so they can be singled to just the
# original %. Otherwise, trying to pass the literal representation that
# looks like an environment variable to the shell (e.g. %PATH%) would fail.
arg = arg.replace('%', '%%')
# These commands are used in rsp files, so no escaping for the shell (via ^)
# is necessary.
# Finally, wrap the whole thing in quotes so that the above quote rule
# applies and whitespace isn't a word break.
return '"' + arg + '"'
def EncodeRspFileList(args):
"""Process a list of arguments using QuoteCmdExeArgument."""
# Note that the first argument is assumed to be the command. Don't add
# quotes around it because then built-ins like 'echo', etc. won't work.
# Take care to normpath only the path in the case of 'call ../x.bat' because
# otherwise the whole thing is incorrectly interpreted as a path and not
# normalized correctly.
if not args: return ''
if args[0].startswith('call '):
call, program = args[0].split(' ', 1)
program = call + ' ' + os.path.normpath(program)
else:
program = os.path.normpath(args[0])
return program + ' ' + ' '.join(QuoteForRspFile(arg) for arg in args[1:])
def _GenericRetrieve(root, default, path):
"""Given a list of dictionary keys |path| and a tree of dicts |root|, find
value at path, or return |default| if any of the path doesn't exist."""
if not root:
return default
if not path:
return root
return _GenericRetrieve(root.get(path[0]), default, path[1:])
def _AddPrefix(element, prefix):
"""Add |prefix| to |element| or each subelement if element is iterable."""
if element is None:
return element
# Note, not Iterable because we don't want to handle strings like that.
if isinstance(element, list) or isinstance(element, tuple):
return [prefix + e for e in element]
else:
return prefix + element
def _DoRemapping(element, map):
"""If |element| then remap it through |map|. If |element| is iterable then
each item will be remapped. Any elements not found will be removed."""
if map is not None and element is not None:
if not callable(map):
map = map.get # Assume it's a dict, otherwise a callable to do the remap.
if isinstance(element, list) or isinstance(element, tuple):
element = filter(None, [map(elem) for elem in element])
else:
element = map(element)
return element
def _AppendOrReturn(append, element):
"""If |append| is None, simply return |element|. If |append| is not None,
then add |element| to it, adding each item in |element| if it's a list or
tuple."""
if append is not None and element is not None:
if isinstance(element, list) or isinstance(element, tuple):
append.extend(element)
else:
append.append(element)
else:
return element
def _FindDirectXInstallation():
"""Try to find an installation location for the DirectX SDK. Check for the
standard environment variable, and if that doesn't exist, try to find
via the registry. May return None if not found in either location."""
# Return previously calculated value, if there is one
if hasattr(_FindDirectXInstallation, 'dxsdk_dir'):
return _FindDirectXInstallation.dxsdk_dir
dxsdk_dir = os.environ.get('DXSDK_DIR')
if not dxsdk_dir:
# Setup params to pass to and attempt to launch reg.exe.
cmd = ['reg.exe', 'query', r'HKLM\Software\Microsoft\DirectX', '/s']
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
for line in p.communicate()[0].splitlines():
if 'InstallPath' in line:
dxsdk_dir = line.split(' ')[3] + "\\"
# Cache return value
_FindDirectXInstallation.dxsdk_dir = dxsdk_dir
return dxsdk_dir
def GetGlobalVSMacroEnv(vs_version):
"""Get a dict of variables mapping internal VS macro names to their gyp
equivalents. Returns all variables that are independent of the target."""
env = {}
# '$(VSInstallDir)' and '$(VCInstallDir)' are available when and only when
# Visual Studio is actually installed.
if vs_version.Path():
env['$(VSInstallDir)'] = vs_version.Path()
env['$(VCInstallDir)'] = os.path.join(vs_version.Path(), 'VC') + '\\'
# Chromium uses DXSDK_DIR in include/lib paths, but it may or may not be
# set. This happens when the SDK is sync'd via src-internal, rather than
# by typical end-user installation of the SDK. If it's not set, we don't
# want to leave the unexpanded variable in the path, so simply strip it.
dxsdk_dir = _FindDirectXInstallation()
env['$(DXSDK_DIR)'] = dxsdk_dir if dxsdk_dir else ''
# Try to find an installation location for the Windows DDK by checking
# the WDK_DIR environment variable, may be None.
env['$(WDK_DIR)'] = os.environ.get('WDK_DIR', '')
return env
def ExtractSharedMSVSSystemIncludes(configs, generator_flags):
"""Finds msvs_system_include_dirs that are common to all targets, removes
them from all targets, and returns an OrderedSet containing them."""
all_system_includes = OrderedSet(
configs[0].get('msvs_system_include_dirs', []))
for config in configs[1:]:
system_includes = config.get('msvs_system_include_dirs', [])
all_system_includes = all_system_includes & OrderedSet(system_includes)
if not all_system_includes:
return None
# Expand macros in all_system_includes.
env = GetGlobalVSMacroEnv(GetVSVersion(generator_flags))
expanded_system_includes = OrderedSet([ExpandMacros(include, env)
for include in all_system_includes])
if any(['$' in include for include in expanded_system_includes]):
# Some path relies on target-specific variables, bail.
return None
# Remove system includes shared by all targets from the targets.
for config in configs:
includes = config.get('msvs_system_include_dirs', [])
if includes: # Don't insert a msvs_system_include_dirs key if not needed.
# This must check the unexpanded includes list:
new_includes = [i for i in includes if i not in all_system_includes]
config['msvs_system_include_dirs'] = new_includes
return expanded_system_includes
class MsvsSettings(object):
"""A class that understands the gyp 'msvs_...' values (especially the
msvs_settings field). They largely correpond to the VS2008 IDE DOM. This
class helps map those settings to command line options."""
def __init__(self, spec, generator_flags):
self.spec = spec
self.vs_version = GetVSVersion(generator_flags)
supported_fields = [
('msvs_configuration_attributes', dict),
('msvs_settings', dict),
('msvs_system_include_dirs', list),
('msvs_disabled_warnings', list),
('msvs_precompiled_header', str),
('msvs_precompiled_source', str),
('msvs_configuration_platform', str),
('msvs_target_platform', str),
]
configs = spec['configurations']
for field, default in supported_fields:
setattr(self, field, {})
for configname, config in configs.iteritems():
getattr(self, field)[configname] = config.get(field, default())
self.msvs_cygwin_dirs = spec.get('msvs_cygwin_dirs', ['.'])
unsupported_fields = [
'msvs_prebuild',
'msvs_postbuild',
]
unsupported = []
for field in unsupported_fields:
for config in configs.values():
if field in config:
unsupported += ["%s not supported (target %s)." %
(field, spec['target_name'])]
if unsupported:
raise Exception('\n'.join(unsupported))
def GetExtension(self):
"""Returns the extension for the target, with no leading dot.
Uses 'product_extension' if specified, otherwise uses MSVS defaults based on
the target type.
"""
ext = self.spec.get('product_extension', None)
if ext:
return ext
return gyp.MSVSUtil.TARGET_TYPE_EXT.get(self.spec['type'], '')
def GetVSMacroEnv(self, base_to_build=None, config=None):
"""Get a dict of variables mapping internal VS macro names to their gyp
equivalents."""
target_platform = 'Win32' if self.GetArch(config) == 'x86' else 'x64'
target_name = self.spec.get('product_prefix', '') + \
self.spec.get('product_name', self.spec['target_name'])
target_dir = base_to_build + '\\' if base_to_build else ''
target_ext = '.' + self.GetExtension()
target_file_name = target_name + target_ext
replacements = {
'$(InputName)': '${root}',
'$(InputPath)': '${source}',
'$(IntDir)': '$!INTERMEDIATE_DIR',
'$(OutDir)\\': target_dir,
'$(PlatformName)': target_platform,
'$(ProjectDir)\\': '',
'$(ProjectName)': self.spec['target_name'],
'$(TargetDir)\\': target_dir,
'$(TargetExt)': target_ext,
'$(TargetFileName)': target_file_name,
'$(TargetName)': target_name,
'$(TargetPath)': os.path.join(target_dir, target_file_name),
}
replacements.update(GetGlobalVSMacroEnv(self.vs_version))
return replacements
def ConvertVSMacros(self, s, base_to_build=None, config=None):
"""Convert from VS macro names to something equivalent."""
env = self.GetVSMacroEnv(base_to_build, config=config)
return ExpandMacros(s, env)
def AdjustLibraries(self, libraries):
"""Strip -l from library if it's specified with that."""
libs = [lib[2:] if lib.startswith('-l') else lib for lib in libraries]
return [lib + '.lib' if not lib.endswith('.lib') else lib for lib in libs]
def _GetAndMunge(self, field, path, default, prefix, append, map):
"""Retrieve a value from |field| at |path| or return |default|. If
|append| is specified, and the item is found, it will be appended to that
object instead of returned. If |map| is specified, results will be
remapped through |map| before being returned or appended."""
result = _GenericRetrieve(field, default, path)
result = _DoRemapping(result, map)
result = _AddPrefix(result, prefix)
return _AppendOrReturn(append, result)
class _GetWrapper(object):
def __init__(self, parent, field, base_path, append=None):
self.parent = parent
self.field = field
self.base_path = [base_path]
self.append = append
def __call__(self, name, map=None, prefix='', default=None):
return self.parent._GetAndMunge(self.field, self.base_path + [name],
default=default, prefix=prefix, append=self.append, map=map)
def GetArch(self, config):
"""Get architecture based on msvs_configuration_platform and
msvs_target_platform. Returns either 'x86' or 'x64'."""
configuration_platform = self.msvs_configuration_platform.get(config, '')
platform = self.msvs_target_platform.get(config, '')
if not platform: # If no specific override, use the configuration's.
platform = configuration_platform
# Map from platform to architecture.
return {'Win32': 'x86', 'x64': 'x64'}.get(platform, 'x86')
def _TargetConfig(self, config):
"""Returns the target-specific configuration."""
# There's two levels of architecture/platform specification in VS. The
# first level is globally for the configuration (this is what we consider
# "the" config at the gyp level, which will be something like 'Debug' or
# 'Release_x64'), and a second target-specific configuration, which is an
# override for the global one. |config| is remapped here to take into
# account the local target-specific overrides to the global configuration.
arch = self.GetArch(config)
if arch == 'x64' and not config.endswith('_x64'):
config += '_x64'
if arch == 'x86' and config.endswith('_x64'):
config = config.rsplit('_', 1)[0]
return config
def _Setting(self, path, config,
default=None, prefix='', append=None, map=None):
"""_GetAndMunge for msvs_settings."""
return self._GetAndMunge(
self.msvs_settings[config], path, default, prefix, append, map)
def _ConfigAttrib(self, path, config,
default=None, prefix='', append=None, map=None):
"""_GetAndMunge for msvs_configuration_attributes."""
return self._GetAndMunge(
self.msvs_configuration_attributes[config],
path, default, prefix, append, map)
def AdjustIncludeDirs(self, include_dirs, config):
"""Updates include_dirs to expand VS specific paths, and adds the system
include dirs used for platform SDK and similar."""
config = self._TargetConfig(config)
includes = include_dirs + self.msvs_system_include_dirs[config]
includes.extend(self._Setting(
('VCCLCompilerTool', 'AdditionalIncludeDirectories'), config, default=[]))
return [self.ConvertVSMacros(p, config=config) for p in includes]
def AdjustMidlIncludeDirs(self, midl_include_dirs, config):
"""Updates midl_include_dirs to expand VS specific paths, and adds the
system include dirs used for platform SDK and similar."""
config = self._TargetConfig(config)
includes = midl_include_dirs + self.msvs_system_include_dirs[config]
includes.extend(self._Setting(
('VCMIDLTool', 'AdditionalIncludeDirectories'), config, default=[]))
return [self.ConvertVSMacros(p, config=config) for p in includes]
def GetComputedDefines(self, config):
"""Returns the set of defines that are injected to the defines list based
on other VS settings."""
config = self._TargetConfig(config)
defines = []
if self._ConfigAttrib(['CharacterSet'], config) == '1':
defines.extend(('_UNICODE', 'UNICODE'))
if self._ConfigAttrib(['CharacterSet'], config) == '2':
defines.append('_MBCS')
defines.extend(self._Setting(
('VCCLCompilerTool', 'PreprocessorDefinitions'), config, default=[]))
return defines
def GetCompilerPdbName(self, config, expand_special):
"""Get the pdb file name that should be used for compiler invocations, or
None if there's no explicit name specified."""
config = self._TargetConfig(config)
pdbname = self._Setting(
('VCCLCompilerTool', 'ProgramDataBaseFileName'), config)
if pdbname:
pdbname = expand_special(self.ConvertVSMacros(pdbname))
return pdbname
def GetMapFileName(self, config, expand_special):
"""Gets the explicitly overriden map file name for a target or returns None
if it's not set."""
config = self._TargetConfig(config)
map_file = self._Setting(('VCLinkerTool', 'MapFileName'), config)
if map_file:
map_file = expand_special(self.ConvertVSMacros(map_file, config=config))
return map_file
def GetOutputName(self, config, expand_special):
"""Gets the explicitly overridden output name for a target or returns None
if it's not overridden."""
config = self._TargetConfig(config)
type = self.spec['type']
root = 'VCLibrarianTool' if type == 'static_library' else 'VCLinkerTool'
# TODO(scottmg): Handle OutputDirectory without OutputFile.
output_file = self._Setting((root, 'OutputFile'), config)
if output_file:
output_file = expand_special(self.ConvertVSMacros(
output_file, config=config))
return output_file
def GetPDBName(self, config, expand_special, default):
"""Gets the explicitly overridden pdb name for a target or returns
default if it's not overridden, or if no pdb will be generated."""
config = self._TargetConfig(config)
output_file = self._Setting(('VCLinkerTool', 'ProgramDatabaseFile'), config)
generate_debug_info = self._Setting(
('VCLinkerTool', 'GenerateDebugInformation'), config)
if generate_debug_info == 'true':
if output_file:
return expand_special(self.ConvertVSMacros(output_file, config=config))
else:
return default
else:
return None
def GetNoImportLibrary(self, config):
"""If NoImportLibrary: true, ninja will not expect the output to include
an import library."""
config = self._TargetConfig(config)
noimplib = self._Setting(('NoImportLibrary',), config)
return noimplib == 'true'
def GetAsmflags(self, config):
"""Returns the flags that need to be added to ml invocations."""
config = self._TargetConfig(config)
asmflags = []
safeseh = self._Setting(('MASM', 'UseSafeExceptionHandlers'), config)
if safeseh == 'true':
asmflags.append('/safeseh')
return asmflags
def GetCflags(self, config):
"""Returns the flags that need to be added to .c and .cc compilations."""
config = self._TargetConfig(config)
cflags = []
cflags.extend(['/wd' + w for w in self.msvs_disabled_warnings[config]])
cl = self._GetWrapper(self, self.msvs_settings[config],
'VCCLCompilerTool', append=cflags)
cl('Optimization',
map={'0': 'd', '1': '1', '2': '2', '3': 'x'}, prefix='/O', default='2')
cl('InlineFunctionExpansion', prefix='/Ob')
cl('DisableSpecificWarnings', prefix='/wd')
cl('StringPooling', map={'true': '/GF'})
cl('EnableFiberSafeOptimizations', map={'true': '/GT'})
cl('OmitFramePointers', map={'false': '-', 'true': ''}, prefix='/Oy')
cl('EnableIntrinsicFunctions', map={'false': '-', 'true': ''}, prefix='/Oi')
cl('FavorSizeOrSpeed', map={'1': 't', '2': 's'}, prefix='/O')
cl('FloatingPointModel',
map={'0': 'precise', '1': 'strict', '2': 'fast'}, prefix='/fp:',
default='0')
cl('CompileAsManaged', map={'false': '', 'true': '/clr'})
cl('WholeProgramOptimization', map={'true': '/GL'})
cl('WarningLevel', prefix='/W')
cl('WarnAsError', map={'true': '/WX'})
cl('CallingConvention',
map={'0': 'd', '1': 'r', '2': 'z', '3': 'v'}, prefix='/G')
cl('DebugInformationFormat',
map={'1': '7', '3': 'i', '4': 'I'}, prefix='/Z')
cl('RuntimeTypeInfo', map={'true': '/GR', 'false': '/GR-'})
cl('EnableFunctionLevelLinking', map={'true': '/Gy', 'false': '/Gy-'})
cl('MinimalRebuild', map={'true': '/Gm'})
cl('BufferSecurityCheck', map={'true': '/GS', 'false': '/GS-'})
cl('BasicRuntimeChecks', map={'1': 's', '2': 'u', '3': '1'}, prefix='/RTC')
cl('RuntimeLibrary',
map={'0': 'T', '1': 'Td', '2': 'D', '3': 'Dd'}, prefix='/M')
cl('ExceptionHandling', map={'1': 'sc','2': 'a'}, prefix='/EH')
cl('DefaultCharIsUnsigned', map={'true': '/J'})
cl('TreatWChar_tAsBuiltInType',
map={'false': '-', 'true': ''}, prefix='/Zc:wchar_t')
cl('EnablePREfast', map={'true': '/analyze'})
cl('AdditionalOptions', prefix='')
cl('EnableEnhancedInstructionSet',
map={'1': 'SSE', '2': 'SSE2', '3': 'AVX', '4': 'IA32', '5': 'AVX2'},
prefix='/arch:')
cflags.extend(['/FI' + f for f in self._Setting(
('VCCLCompilerTool', 'ForcedIncludeFiles'), config, default=[])])
if self.vs_version.short_name in ('2013', '2013e', '2015'):
# New flag required in 2013 to maintain previous PDB behavior.
cflags.append('/FS')
# ninja handles parallelism by itself, don't have the compiler do it too.
cflags = filter(lambda x: not x.startswith('/MP'), cflags)
return cflags
def _GetPchFlags(self, config, extension):
"""Get the flags to be added to the cflags for precompiled header support.
"""
config = self._TargetConfig(config)
# The PCH is only built once by a particular source file. Usage of PCH must
# only be for the same language (i.e. C vs. C++), so only include the pch
# flags when the language matches.
if self.msvs_precompiled_header[config]:
source_ext = os.path.splitext(self.msvs_precompiled_source[config])[1]
if _LanguageMatchesForPch(source_ext, extension):
pch = os.path.split(self.msvs_precompiled_header[config])[1]
return ['/Yu' + pch, '/FI' + pch, '/Fp${pchprefix}.' + pch + '.pch']
return []
def GetCflagsC(self, config):
"""Returns the flags that need to be added to .c compilations."""
config = self._TargetConfig(config)
return self._GetPchFlags(config, '.c')
def GetCflagsCC(self, config):
"""Returns the flags that need to be added to .cc compilations."""
config = self._TargetConfig(config)
return ['/TP'] + self._GetPchFlags(config, '.cc')
def _GetAdditionalLibraryDirectories(self, root, config, gyp_to_build_path):
"""Get and normalize the list of paths in AdditionalLibraryDirectories
setting."""
config = self._TargetConfig(config)
libpaths = self._Setting((root, 'AdditionalLibraryDirectories'),
config, default=[])
libpaths = [os.path.normpath(
gyp_to_build_path(self.ConvertVSMacros(p, config=config)))
for p in libpaths]
return ['/LIBPATH:"' + p + '"' for p in libpaths]
def GetLibFlags(self, config, gyp_to_build_path):
"""Returns the flags that need to be added to lib commands."""
config = self._TargetConfig(config)
libflags = []
lib = self._GetWrapper(self, self.msvs_settings[config],
'VCLibrarianTool', append=libflags)
libflags.extend(self._GetAdditionalLibraryDirectories(
'VCLibrarianTool', config, gyp_to_build_path))
lib('LinkTimeCodeGeneration', map={'true': '/LTCG'})
lib('TargetMachine', map={'1': 'X86', '17': 'X64', '3': 'ARM'},
prefix='/MACHINE:')
lib('AdditionalOptions')
return libflags
def GetDefFile(self, gyp_to_build_path):
"""Returns the .def file from sources, if any. Otherwise returns None."""
spec = self.spec
if spec['type'] in ('shared_library', 'loadable_module', 'executable'):
def_files = [s for s in spec.get('sources', []) if s.endswith('.def')]
if len(def_files) == 1:
return gyp_to_build_path(def_files[0])
elif len(def_files) > 1:
raise Exception("Multiple .def files")
return None
def _GetDefFileAsLdflags(self, ldflags, gyp_to_build_path):
""".def files get implicitly converted to a ModuleDefinitionFile for the
linker in the VS generator. Emulate that behaviour here."""
def_file = self.GetDefFile(gyp_to_build_path)
if def_file:
ldflags.append('/DEF:"%s"' % def_file)
def GetPGDName(self, config, expand_special):
"""Gets the explicitly overridden pgd name for a target or returns None
if it's not overridden."""
config = self._TargetConfig(config)
output_file = self._Setting(
('VCLinkerTool', 'ProfileGuidedDatabase'), config)
if output_file:
output_file = expand_special(self.ConvertVSMacros(
output_file, config=config))
return output_file
def GetLdflags(self, config, gyp_to_build_path, expand_special,
manifest_base_name, output_name, is_executable, build_dir):
"""Returns the flags that need to be added to link commands, and the
manifest files."""
config = self._TargetConfig(config)
ldflags = []
ld = self._GetWrapper(self, self.msvs_settings[config],
'VCLinkerTool', append=ldflags)
self._GetDefFileAsLdflags(ldflags, gyp_to_build_path)
ld('GenerateDebugInformation', map={'true': '/DEBUG'})
ld('TargetMachine', map={'1': 'X86', '17': 'X64', '3': 'ARM'},
prefix='/MACHINE:')
ldflags.extend(self._GetAdditionalLibraryDirectories(
'VCLinkerTool', config, gyp_to_build_path))
ld('DelayLoadDLLs', prefix='/DELAYLOAD:')
ld('TreatLinkerWarningAsErrors', prefix='/WX',
map={'true': '', 'false': ':NO'})
out = self.GetOutputName(config, expand_special)
if out:
ldflags.append('/OUT:' + out)
pdb = self.GetPDBName(config, expand_special, output_name + '.pdb')
if pdb:
ldflags.append('/PDB:' + pdb)
pgd = self.GetPGDName(config, expand_special)
if pgd:
ldflags.append('/PGD:' + pgd)
map_file = self.GetMapFileName(config, expand_special)
ld('GenerateMapFile', map={'true': '/MAP:' + map_file if map_file
else '/MAP'})
ld('MapExports', map={'true': '/MAPINFO:EXPORTS'})
ld('AdditionalOptions', prefix='')
minimum_required_version = self._Setting(
('VCLinkerTool', 'MinimumRequiredVersion'), config, default='')
if minimum_required_version:
minimum_required_version = ',' + minimum_required_version
ld('SubSystem',
map={'1': 'CONSOLE%s' % minimum_required_version,
'2': 'WINDOWS%s' % minimum_required_version},
prefix='/SUBSYSTEM:')
stack_reserve_size = self._Setting(
('VCLinkerTool', 'StackReserveSize'), config, default='')
if stack_reserve_size:
stack_commit_size = self._Setting(
('VCLinkerTool', 'StackCommitSize'), config, default='')
if stack_commit_size:
stack_commit_size = ',' + stack_commit_size
ldflags.append('/STACK:%s%s' % (stack_reserve_size, stack_commit_size))
ld('TerminalServerAware', map={'1': ':NO', '2': ''}, prefix='/TSAWARE')
ld('LinkIncremental', map={'1': ':NO', '2': ''}, prefix='/INCREMENTAL')
ld('BaseAddress', prefix='/BASE:')
ld('FixedBaseAddress', map={'1': ':NO', '2': ''}, prefix='/FIXED')
ld('RandomizedBaseAddress',
map={'1': ':NO', '2': ''}, prefix='/DYNAMICBASE')
ld('DataExecutionPrevention',
map={'1': ':NO', '2': ''}, prefix='/NXCOMPAT')
ld('OptimizeReferences', map={'1': 'NOREF', '2': 'REF'}, prefix='/OPT:')
ld('ForceSymbolReferences', prefix='/INCLUDE:')
ld('EnableCOMDATFolding', map={'1': 'NOICF', '2': 'ICF'}, prefix='/OPT:')
ld('LinkTimeCodeGeneration',
map={'1': '', '2': ':PGINSTRUMENT', '3': ':PGOPTIMIZE',
'4': ':PGUPDATE'},
prefix='/LTCG')
ld('IgnoreDefaultLibraryNames', prefix='/NODEFAULTLIB:')
ld('ResourceOnlyDLL', map={'true': '/NOENTRY'})
ld('EntryPointSymbol', prefix='/ENTRY:')
ld('Profile', map={'true': '/PROFILE'})
ld('LargeAddressAware',
map={'1': ':NO', '2': ''}, prefix='/LARGEADDRESSAWARE')
# TODO(scottmg): This should sort of be somewhere else (not really a flag).
ld('AdditionalDependencies', prefix='')
if self.GetArch(config) == 'x86':
safeseh_default = 'true'
else:
safeseh_default = None
ld('ImageHasSafeExceptionHandlers',
map={'false': ':NO', 'true': ''}, prefix='/SAFESEH',
default=safeseh_default)
# If the base address is not specifically controlled, DYNAMICBASE should
# be on by default.
base_flags = filter(lambda x: 'DYNAMICBASE' in x or x == '/FIXED',
ldflags)
if not base_flags:
ldflags.append('/DYNAMICBASE')
# If the NXCOMPAT flag has not been specified, default to on. Despite the
# documentation that says this only defaults to on when the subsystem is
# Vista or greater (which applies to the linker), the IDE defaults it on
# unless it's explicitly off.
if not filter(lambda x: 'NXCOMPAT' in x, ldflags):
ldflags.append('/NXCOMPAT')
have_def_file = filter(lambda x: x.startswith('/DEF:'), ldflags)
manifest_flags, intermediate_manifest, manifest_files = \
self._GetLdManifestFlags(config, manifest_base_name, gyp_to_build_path,
is_executable and not have_def_file, build_dir)
ldflags.extend(manifest_flags)
return ldflags, intermediate_manifest, manifest_files
def _GetLdManifestFlags(self, config, name, gyp_to_build_path,
allow_isolation, build_dir):
"""Returns a 3-tuple:
- the set of flags that need to be added to the link to generate
a default manifest
- the intermediate manifest that the linker will generate that should be
used to assert it doesn't add anything to the merged one.
- the list of all the manifest files to be merged by the manifest tool and
included into the link."""
generate_manifest = self._Setting(('VCLinkerTool', 'GenerateManifest'),
config,
default='true')
if generate_manifest != 'true':
# This means not only that the linker should not generate the intermediate
# manifest but also that the manifest tool should do nothing even when
# additional manifests are specified.
return ['/MANIFEST:NO'], [], []
output_name = name + '.intermediate.manifest'
flags = [
'/MANIFEST',
'/ManifestFile:' + output_name,
]
# Instead of using the MANIFESTUAC flags, we generate a .manifest to
# include into the list of manifests. This allows us to avoid the need to
# do two passes during linking. The /MANIFEST flag and /ManifestFile are
# still used, and the intermediate manifest is used to assert that the
# final manifest we get from merging all the additional manifest files
# (plus the one we generate here) isn't modified by merging the
# intermediate into it.
# Always NO, because we generate a manifest file that has what we want.
flags.append('/MANIFESTUAC:NO')
config = self._TargetConfig(config)
enable_uac = self._Setting(('VCLinkerTool', 'EnableUAC'), config,
default='true')
manifest_files = []
generated_manifest_outer = \
"<?xml version='1.0' encoding='UTF-8' standalone='yes'?>" \
"<assembly xmlns='urn:schemas-microsoft-com:asm.v1' manifestVersion='1.0'>%s" \
"</assembly>"
if enable_uac == 'true':
execution_level = self._Setting(('VCLinkerTool', 'UACExecutionLevel'),
config, default='0')
execution_level_map = {
'0': 'asInvoker',
'1': 'highestAvailable',
'2': 'requireAdministrator'
}
ui_access = self._Setting(('VCLinkerTool', 'UACUIAccess'), config,
default='false')
inner = '''
<trustInfo xmlns="urn:schemas-microsoft-com:asm.v3">
<security>
<requestedPrivileges>
<requestedExecutionLevel level='%s' uiAccess='%s' />
</requestedPrivileges>
</security>
</trustInfo>''' % (execution_level_map[execution_level], ui_access)
else:
inner = ''
generated_manifest_contents = generated_manifest_outer % inner
generated_name = name + '.generated.manifest'
# Need to join with the build_dir here as we're writing it during
# generation time, but we return the un-joined version because the build
# will occur in that directory. We only write the file if the contents
# have changed so that simply regenerating the project files doesn't
# cause a relink.
build_dir_generated_name = os.path.join(build_dir, generated_name)
gyp.common.EnsureDirExists(build_dir_generated_name)
f = gyp.common.WriteOnDiff(build_dir_generated_name)
f.write(generated_manifest_contents)
f.close()
manifest_files = [generated_name]
if allow_isolation:
flags.append('/ALLOWISOLATION')
manifest_files += self._GetAdditionalManifestFiles(config,
gyp_to_build_path)
return flags, output_name, manifest_files
def _GetAdditionalManifestFiles(self, config, gyp_to_build_path):
"""Gets additional manifest files that are added to the default one
generated by the linker."""
files = self._Setting(('VCManifestTool', 'AdditionalManifestFiles'), config,
default=[])
if isinstance(files, str):
files = files.split(';')
return [os.path.normpath(
gyp_to_build_path(self.ConvertVSMacros(f, config=config)))
for f in files]
def IsUseLibraryDependencyInputs(self, config):
"""Returns whether the target should be linked via Use Library Dependency
Inputs (using component .objs of a given .lib)."""
config = self._TargetConfig(config)
uldi = self._Setting(('VCLinkerTool', 'UseLibraryDependencyInputs'), config)
return uldi == 'true'
def IsEmbedManifest(self, config):
"""Returns whether manifest should be linked into binary."""
config = self._TargetConfig(config)
embed = self._Setting(('VCManifestTool', 'EmbedManifest'), config,
default='true')
return embed == 'true'
def IsLinkIncremental(self, config):
"""Returns whether the target should be linked incrementally."""
config = self._TargetConfig(config)
link_inc = self._Setting(('VCLinkerTool', 'LinkIncremental'), config)
return link_inc != '1'
def GetRcflags(self, config, gyp_to_ninja_path):
"""Returns the flags that need to be added to invocations of the resource
compiler."""
config = self._TargetConfig(config)
rcflags = []
rc = self._GetWrapper(self, self.msvs_settings[config],
'VCResourceCompilerTool', append=rcflags)
rc('AdditionalIncludeDirectories', map=gyp_to_ninja_path, prefix='/I')
rcflags.append('/I' + gyp_to_ninja_path('.'))
rc('PreprocessorDefinitions', prefix='/d')
# /l arg must be in hex without leading '0x'
rc('Culture', prefix='/l', map=lambda x: hex(int(x))[2:])
return rcflags
def BuildCygwinBashCommandLine(self, args, path_to_base):
"""Build a command line that runs args via cygwin bash. We assume that all
incoming paths are in Windows normpath'd form, so they need to be
converted to posix style for the part of the command line that's passed to
bash. We also have to do some Visual Studio macro emulation here because
various rules use magic VS names for things. Also note that rules that
contain ninja variables cannot be fixed here (for example ${source}), so
the outer generator needs to make sure that the paths that are written out
are in posix style, if the command line will be used here."""
cygwin_dir = os.path.normpath(
os.path.join(path_to_base, self.msvs_cygwin_dirs[0]))
cd = ('cd %s' % path_to_base).replace('\\', '/')
args = [a.replace('\\', '/').replace('"', '\\"') for a in args]
args = ["'%s'" % a.replace("'", "'\\''") for a in args]
bash_cmd = ' '.join(args)
cmd = (
'call "%s\\setup_env.bat" && set CYGWIN=nontsec && ' % cygwin_dir +
'bash -c "%s ; %s"' % (cd, bash_cmd))
return cmd
def IsRuleRunUnderCygwin(self, rule):
"""Determine if an action should be run under cygwin. If the variable is
unset, or set to 1 we use cygwin."""
return int(rule.get('msvs_cygwin_shell',
self.spec.get('msvs_cygwin_shell', 1))) != 0
def _HasExplicitRuleForExtension(self, spec, extension):
"""Determine if there's an explicit rule for a particular extension."""
for rule in spec.get('rules', []):
if rule['extension'] == extension:
return True
return False
def _HasExplicitIdlActions(self, spec):
"""Determine if an action should not run midl for .idl files."""
return any([action.get('explicit_idl_action', 0)
for action in spec.get('actions', [])])
def HasExplicitIdlRulesOrActions(self, spec):
"""Determine if there's an explicit rule or action for idl files. When
there isn't we need to generate implicit rules to build MIDL .idl files."""
return (self._HasExplicitRuleForExtension(spec, 'idl') or
self._HasExplicitIdlActions(spec))
def HasExplicitAsmRules(self, spec):
"""Determine if there's an explicit rule for asm files. When there isn't we
need to generate implicit rules to assemble .asm files."""
return self._HasExplicitRuleForExtension(spec, 'asm')
def GetIdlBuildData(self, source, config):
"""Determine the implicit outputs for an idl file. Returns output
directory, outputs, and variables and flags that are required."""
config = self._TargetConfig(config)
midl_get = self._GetWrapper(self, self.msvs_settings[config], 'VCMIDLTool')
def midl(name, default=None):
return self.ConvertVSMacros(midl_get(name, default=default),
config=config)
tlb = midl('TypeLibraryName', default='${root}.tlb')
header = midl('HeaderFileName', default='${root}.h')
dlldata = midl('DLLDataFileName', default='dlldata.c')
iid = midl('InterfaceIdentifierFileName', default='${root}_i.c')
proxy = midl('ProxyFileName', default='${root}_p.c')
# Note that .tlb is not included in the outputs as it is not always
# generated depending on the content of the input idl file.
outdir = midl('OutputDirectory', default='')
output = [header, dlldata, iid, proxy]
variables = [('tlb', tlb),
('h', header),
('dlldata', dlldata),
('iid', iid),
('proxy', proxy)]
# TODO(scottmg): Are there configuration settings to set these flags?
target_platform = 'win32' if self.GetArch(config) == 'x86' else 'x64'
flags = ['/char', 'signed', '/env', target_platform, '/Oicf']
return outdir, output, variables, flags
def _LanguageMatchesForPch(source_ext, pch_source_ext):
c_exts = ('.c',)
cc_exts = ('.cc', '.cxx', '.cpp')
return ((source_ext in c_exts and pch_source_ext in c_exts) or
(source_ext in cc_exts and pch_source_ext in cc_exts))
class PrecompiledHeader(object):
"""Helper to generate dependencies and build rules to handle generation of
precompiled headers. Interface matches the GCH handler in xcode_emulation.py.
"""
def __init__(
self, settings, config, gyp_to_build_path, gyp_to_unique_output, obj_ext):
self.settings = settings
self.config = config
pch_source = self.settings.msvs_precompiled_source[self.config]
self.pch_source = gyp_to_build_path(pch_source)
filename, _ = os.path.splitext(pch_source)
self.output_obj = gyp_to_unique_output(filename + obj_ext).lower()
def _PchHeader(self):
"""Get the header that will appear in an #include line for all source
files."""
return os.path.split(self.settings.msvs_precompiled_header[self.config])[1]
def GetObjDependencies(self, sources, objs, arch):
"""Given a list of sources files and the corresponding object files,
returns a list of the pch files that should be depended upon. The
additional wrapping in the return value is for interface compatibility
with make.py on Mac, and xcode_emulation.py."""
assert arch is None
if not self._PchHeader():
return []
pch_ext = os.path.splitext(self.pch_source)[1]
for source in sources:
if _LanguageMatchesForPch(os.path.splitext(source)[1], pch_ext):
return [(None, None, self.output_obj)]
return []
def GetPchBuildCommands(self, arch):
"""Not used on Windows as there are no additional build steps required
(instead, existing steps are modified in GetFlagsModifications below)."""
return []
def GetFlagsModifications(self, input, output, implicit, command,
cflags_c, cflags_cc, expand_special):
"""Get the modified cflags and implicit dependencies that should be used
for the pch compilation step."""
if input == self.pch_source:
pch_output = ['/Yc' + self._PchHeader()]
if command == 'cxx':
return ([('cflags_cc', map(expand_special, cflags_cc + pch_output))],
self.output_obj, [])
elif command == 'cc':
return ([('cflags_c', map(expand_special, cflags_c + pch_output))],
self.output_obj, [])
return [], output, implicit
vs_version = None
def GetVSVersion(generator_flags):
global vs_version
if not vs_version:
vs_version = gyp.MSVSVersion.SelectVisualStudioVersion(
generator_flags.get('msvs_version', 'auto'),
allow_fallback=False)
return vs_version
def _GetVsvarsSetupArgs(generator_flags, arch):
vs = GetVSVersion(generator_flags)
return vs.SetupScript()
def ExpandMacros(string, expansions):
"""Expand $(Variable) per expansions dict. See MsvsSettings.GetVSMacroEnv
for the canonical way to retrieve a suitable dict."""
if '$' in string:
for old, new in expansions.iteritems():
assert '$(' not in new, new
string = string.replace(old, new)
return string
def _ExtractImportantEnvironment(output_of_set):
"""Extracts environment variables required for the toolchain to run from
a textual dump output by the cmd.exe 'set' command."""
envvars_to_save = (
'goma_.*', # TODO(scottmg): This is ugly, but needed for goma.
'include',
'lib',
'libpath',
'path',
'pathext',
'systemroot',
'temp',
'tmp',
)
env = {}
for line in output_of_set.splitlines():
for envvar in envvars_to_save:
if re.match(envvar + '=', line.lower()):
var, setting = line.split('=', 1)
if envvar == 'path':
# Our own rules (for running gyp-win-tool) and other actions in
# Chromium rely on python being in the path. Add the path to this
# python here so that if it's not in the path when ninja is run
# later, python will still be found.
setting = os.path.dirname(sys.executable) + os.pathsep + setting
env[var.upper()] = setting
break
for required in ('SYSTEMROOT', 'TEMP', 'TMP'):
if required not in env:
raise Exception('Environment variable "%s" '
'required to be set to valid path' % required)
return env
def _FormatAsEnvironmentBlock(envvar_dict):
"""Format as an 'environment block' directly suitable for CreateProcess.
Briefly this is a list of key=value\0, terminated by an additional \0. See
CreateProcess documentation for more details."""
block = ''
nul = '\0'
for key, value in envvar_dict.iteritems():
block += key + '=' + value + nul
block += nul
return block
def _ExtractCLPath(output_of_where):
"""Gets the path to cl.exe based on the output of calling the environment
setup batch file, followed by the equivalent of `where`."""
# Take the first line, as that's the first found in the PATH.
for line in output_of_where.strip().splitlines():
if line.startswith('LOC:'):
return line[len('LOC:'):].strip()
def GenerateEnvironmentFiles(toplevel_build_dir, generator_flags,
system_includes, open_out):
"""It's not sufficient to have the absolute path to the compiler, linker,
etc. on Windows, as those tools rely on .dlls being in the PATH. We also
need to support both x86 and x64 compilers within the same build (to support
msvs_target_platform hackery). Different architectures require a different
compiler binary, and different supporting environment variables (INCLUDE,
LIB, LIBPATH). So, we extract the environment here, wrap all invocations
of compiler tools (cl, link, lib, rc, midl, etc.) via win_tool.py which
sets up the environment, and then we do not prefix the compiler with
an absolute path, instead preferring something like "cl.exe" in the rule
which will then run whichever the environment setup has put in the path.
When the following procedure to generate environment files does not
meet your requirement (e.g. for custom toolchains), you can pass
"-G ninja_use_custom_environment_files" to the gyp to suppress file
generation and use custom environment files prepared by yourself."""
archs = ('x86', 'x64')
if generator_flags.get('ninja_use_custom_environment_files', 0):
cl_paths = {}
for arch in archs:
cl_paths[arch] = 'cl.exe'
return cl_paths
vs = GetVSVersion(generator_flags)
cl_paths = {}
for arch in archs:
# Extract environment variables for subprocesses.
args = vs.SetupScript(arch)
args.extend(('&&', 'set'))
popen = subprocess.Popen(
args, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
variables, _ = popen.communicate()
env = _ExtractImportantEnvironment(variables)
# Inject system includes from gyp files into INCLUDE.
if system_includes:
system_includes = system_includes | OrderedSet(
env.get('INCLUDE', '').split(';'))
env['INCLUDE'] = ';'.join(system_includes)
env_block = _FormatAsEnvironmentBlock(env)
f = open_out(os.path.join(toplevel_build_dir, 'environment.' + arch), 'wb')
f.write(env_block)
f.close()
# Find cl.exe location for this architecture.
args = vs.SetupScript(arch)
args.extend(('&&',
'for', '%i', 'in', '(cl.exe)', 'do', '@echo', 'LOC:%~$PATH:i'))
popen = subprocess.Popen(args, shell=True, stdout=subprocess.PIPE)
output, _ = popen.communicate()
cl_paths[arch] = _ExtractCLPath(output)
return cl_paths
def VerifyMissingSources(sources, build_dir, generator_flags, gyp_to_ninja):
"""Emulate behavior of msvs_error_on_missing_sources present in the msvs
generator: Check that all regular source files, i.e. not created at run time,
exist on disk. Missing files cause needless recompilation when building via
VS, and we want this check to match for people/bots that build using ninja,
so they're not surprised when the VS build fails."""
if int(generator_flags.get('msvs_error_on_missing_sources', 0)):
no_specials = filter(lambda x: '$' not in x, sources)
relative = [os.path.join(build_dir, gyp_to_ninja(s)) for s in no_specials]
missing = filter(lambda x: not os.path.exists(x), relative)
if missing:
# They'll look like out\Release\..\..\stuff\things.cc, so normalize the
# path for a slightly less crazy looking output.
cleaned_up = [os.path.normpath(x) for x in missing]
raise Exception('Missing input files:\n%s' % '\n'.join(cleaned_up))
# Sets some values in default_variables, which are required for many
# generators, run on Windows.
def CalculateCommonVariables(default_variables, params):
generator_flags = params.get('generator_flags', {})
# Set a variable so conditions can be based on msvs_version.
msvs_version = gyp.msvs_emulation.GetVSVersion(generator_flags)
default_variables['MSVS_VERSION'] = msvs_version.ShortName()
# To determine processor word size on Windows, in addition to checking
# PROCESSOR_ARCHITECTURE (which reflects the word size of the current
# process), it is also necessary to check PROCESSOR_ARCHITEW6432 (which
# contains the actual word size of the system when running thru WOW64).
if ('64' in os.environ.get('PROCESSOR_ARCHITECTURE', '') or
'64' in os.environ.get('PROCESSOR_ARCHITEW6432', '')):
default_variables['MSVS_OS_BITS'] = 64
else:
default_variables['MSVS_OS_BITS'] = 32
| mit |
alonsebastian/SocialID | personal_page/views.py | 1 | 5302 | from django.shortcuts import render_to_response, get_object_or_404, redirect
from django.contrib.auth.decorators import login_required
from django.template import RequestContext
from personal_page.models import PersonalPage
from personal_page.forms import PersonalPageForm
from accounts.models import UserProfile
def personal(request, id_): #send id from url.py as argument
""" Return the personal page requested. In case none was found with such an ID,
no 404 code is returned, but the message that no such personal page has been created"""
id_ = id_.lower()
user = UserProfile.objects.filter(social_id = id_)
site = PersonalPage.objects.filter(user = user)
context_instance = RequestContext(request)
editable = no_more_data = False
if request.user.is_authenticated():
for dictionary in context_instance:
if 'id_' in dictionary:
if dictionary['id_'] == id_:
editable = True
break
if user and site:
site = site[0]
no_bio = edit_button = False
if site.bio == "":
no_bio = True
if request.user == user[0].user:
edit_button = True
if "\n No more data available." in site.bio:
site.bio = site.bio[:-25]
no_more_data = True
return render_to_response("personal_page/general.html", {'site' : site, 'no_bio' : no_bio, 'edit_button' : edit_button, 'editable' : editable, 'no_more_data' : no_more_data},
context_instance)
return render_to_response("personal_page/general.html", {'not_found' : True, 'editable' : editable}, context_instance)
@login_required
def manage (request):
"""In case the request is a get, a form to modify the personal page is shown.
If the login user had personal page before, the form is filled with the field's
information in the database.
In case the request is post, the form is validated and the database updated."""
if not request.POST:
form = PersonalPageForm()
profile = UserProfile.objects.get(user = request.user)
old = PersonalPage.objects.filter(user = profile)
if old:
old = old [0]
form.initial = {'bio' : old.bio, 'facebook': old.facebook, 'location': old.location, 'email': old.email, 'linkedin' : old.linkedin, 'twitter' : old.twitter,
'tumblr' : old.tumblr, 'personal_site': old.personal_site}
return render_to_response("personal_page/modify.html", {'form':form}, context_instance=RequestContext(request))
else:
form = PersonalPageForm(request.POST)
profile = UserProfile.objects.get(user = request.user)
old = PersonalPage.objects.filter(user = profile)
if old and form.is_valid():
old = old[0] #Bio: (u"I'm saraza men!",) WTF
old.bio = form.cleaned_data['bio']
old.location = form.cleaned_data['location']
old.email = form.cleaned_data['email']
old.linkedin = form.cleaned_data['linkedin']
old.facebook = linkMaker(form.cleaned_data['facebook'])
old.twitter = twitterizer(form.cleaned_data['twitter'])
old.tumblr = linkMaker(form.cleaned_data['tumblr'])
old.personal_site = linkMaker(form.cleaned_data['personal_site'])
address = "/" + profile.social_id + "/"
old.save()
return redirect(address) #aca va el home
if form.is_valid():
page = PersonalPage(user = profile,
bio = form.cleaned_data['bio'],
location = form.cleaned_data['location'],
email = form.cleaned_data['email'],
linkedin = linkMaker(form.cleaned_data['linkedin']),
facebook = linkMaker(form.cleaned_data['facebook']),
twitter = twitterizer(form.cleaned_data['twitter']),
tumblr = linkMaker(form.cleaned_data['tumblr']),
personal_site = linkMaker(form.cleaned_data['personal_site']))
page.save()
address = "/" + profile.social_id + "/"
return redirect(address) #aca va el home
else:
form = PersonalPageForm()
return render_to_response("personal_page/modify.html", {'form':form}, context_instance=RequestContext(request))
def linkMaker(string):
""" Some users would give the full URL to their online profiles (facebook, linkedin, etc).
Other didn't. Because of this every URL goes through this simple filter to make sure it
is a link."""
if string == "": return ""
if "http://" not in string.lower():
string = "http://" + string
return string
else: return string
def twitterizer (string):
""" Some users would use the URL to their twitter profile and some used their user
'@myCoolAccount'. Due to this, this function normalizes everything to a link displayable
in the personal page."""
if string and string[0] == "@":
return "https://twitter.com/#!/" + string [1:]
else:
return linkMaker(string)
| gpl-2.0 |
halberom/ansible-modules-core | cloud/google/gce_lb.py | 7 | 12559 | #!/usr/bin/python
# Copyright 2013 Google Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: gce_lb
version_added: "1.5"
short_description: create/destroy GCE load-balancer resources
description:
- This module can create and destroy Google Compute Engine C(loadbalancer)
and C(httphealthcheck) resources. The primary LB resource is the
C(load_balancer) resource and the health check parameters are all
prefixed with I(httphealthcheck).
The full documentation for Google Compute Engine load balancing is at
U(https://developers.google.com/compute/docs/load-balancing/). However,
the ansible module simplifies the configuration by following the
libcloud model.
Full install/configuration instructions for the gce* modules can
be found in the comments of ansible/test/gce_tests.py.
options:
httphealthcheck_name:
description:
- the name identifier for the HTTP health check
required: false
default: null
httphealthcheck_port:
description:
- the TCP port to use for HTTP health checking
required: false
default: 80
httphealthcheck_path:
description:
- the url path to use for HTTP health checking
required: false
default: "/"
httphealthcheck_interval:
description:
- the duration in seconds between each health check request
required: false
default: 5
httphealthcheck_timeout:
description:
- the timeout in seconds before a request is considered a failed check
required: false
default: 5
httphealthcheck_unhealthy_count:
description:
- number of consecutive failed checks before marking a node unhealthy
required: false
default: 2
httphealthcheck_healthy_count:
description:
- number of consecutive successful checks before marking a node healthy
required: false
default: 2
httphealthcheck_host:
description:
- host header to pass through on HTTP check requests
required: false
default: null
name:
description:
- name of the load-balancer resource
required: false
default: null
protocol:
description:
- the protocol used for the load-balancer packet forwarding, tcp or udp
required: false
default: "tcp"
choices: ['tcp', 'udp']
region:
description:
- the GCE region where the load-balancer is defined
required: false
external_ip:
description:
- the external static IPv4 (or auto-assigned) address for the LB
required: false
default: null
port_range:
description:
- the port (range) to forward, e.g. 80 or 8000-8888 defaults to all ports
required: false
default: null
members:
description:
- a list of zone/nodename pairs, e.g ['us-central1-a/www-a', ...]
required: false
aliases: ['nodes']
state:
description:
- desired state of the LB
default: "present"
choices: ["active", "present", "absent", "deleted"]
aliases: []
required: false
service_account_email:
version_added: "1.6"
description:
- service account email
required: false
default: null
aliases: []
pem_file:
version_added: "1.6"
description:
- path to the pem file associated with the service account email
This option is deprecated. Use 'credentials_file'.
required: false
default: null
aliases: []
credentials_file:
version_added: "2.1.0"
description:
- path to the JSON file associated with the service account email
default: null
required: false
project_id:
version_added: "1.6"
description:
- your GCE project ID
required: false
default: null
aliases: []
requirements:
- "python >= 2.6"
- "apache-libcloud >= 0.13.3, >= 0.17.0 if using JSON credentials"
author: "Eric Johnson (@erjohnso) <[email protected]>"
'''
EXAMPLES = '''
# Simple example of creating a new LB, adding members, and a health check
- local_action:
module: gce_lb
name: testlb
region: us-central1
members: ["us-central1-a/www-a", "us-central1-b/www-b"]
httphealthcheck_name: hc
httphealthcheck_port: 80
httphealthcheck_path: "/up"
'''
try:
from libcloud.compute.types import Provider
from libcloud.compute.providers import get_driver
from libcloud.loadbalancer.types import Provider as Provider_lb
from libcloud.loadbalancer.providers import get_driver as get_driver_lb
from libcloud.common.google import GoogleBaseError, QuotaExceededError, \
ResourceExistsError, ResourceNotFoundError
_ = Provider.GCE
HAS_LIBCLOUD = True
except ImportError:
HAS_LIBCLOUD = False
def main():
module = AnsibleModule(
argument_spec = dict(
httphealthcheck_name = dict(),
httphealthcheck_port = dict(default=80),
httphealthcheck_path = dict(default='/'),
httphealthcheck_interval = dict(default=5),
httphealthcheck_timeout = dict(default=5),
httphealthcheck_unhealthy_count = dict(default=2),
httphealthcheck_healthy_count = dict(default=2),
httphealthcheck_host = dict(),
name = dict(),
protocol = dict(default='tcp'),
region = dict(),
external_ip = dict(),
port_range = dict(),
members = dict(type='list'),
state = dict(default='present'),
service_account_email = dict(),
pem_file = dict(),
credentials_file = dict(),
project_id = dict(),
)
)
if not HAS_LIBCLOUD:
module.fail_json(msg='libcloud with GCE support (0.13.3+) required for this module.')
gce = gce_connect(module)
httphealthcheck_name = module.params.get('httphealthcheck_name')
httphealthcheck_port = module.params.get('httphealthcheck_port')
httphealthcheck_path = module.params.get('httphealthcheck_path')
httphealthcheck_interval = module.params.get('httphealthcheck_interval')
httphealthcheck_timeout = module.params.get('httphealthcheck_timeout')
httphealthcheck_unhealthy_count = \
module.params.get('httphealthcheck_unhealthy_count')
httphealthcheck_healthy_count = \
module.params.get('httphealthcheck_healthy_count')
httphealthcheck_host = module.params.get('httphealthcheck_host')
name = module.params.get('name')
protocol = module.params.get('protocol')
region = module.params.get('region')
external_ip = module.params.get('external_ip')
port_range = module.params.get('port_range')
members = module.params.get('members')
state = module.params.get('state')
try:
gcelb = get_driver_lb(Provider_lb.GCE)(gce_driver=gce)
gcelb.connection.user_agent_append("%s/%s" % (
USER_AGENT_PRODUCT, USER_AGENT_VERSION))
except Exception, e:
module.fail_json(msg=unexpected_error_msg(e), changed=False)
changed = False
json_output = {'name': name, 'state': state}
if not name and not httphealthcheck_name:
module.fail_json(msg='Nothing to do, please specify a "name" ' + \
'or "httphealthcheck_name" parameter', changed=False)
if state in ['active', 'present']:
# first, create the httphealthcheck if requested
hc = None
if httphealthcheck_name:
json_output['httphealthcheck_name'] = httphealthcheck_name
try:
hc = gcelb.ex_create_healthcheck(httphealthcheck_name,
host=httphealthcheck_host, path=httphealthcheck_path,
port=httphealthcheck_port,
interval=httphealthcheck_interval,
timeout=httphealthcheck_timeout,
unhealthy_threshold=httphealthcheck_unhealthy_count,
healthy_threshold=httphealthcheck_healthy_count)
changed = True
except ResourceExistsError:
hc = gce.ex_get_healthcheck(httphealthcheck_name)
except Exception, e:
module.fail_json(msg=unexpected_error_msg(e), changed=False)
if hc is not None:
json_output['httphealthcheck_host'] = hc.extra['host']
json_output['httphealthcheck_path'] = hc.path
json_output['httphealthcheck_port'] = hc.port
json_output['httphealthcheck_interval'] = hc.interval
json_output['httphealthcheck_timeout'] = hc.timeout
json_output['httphealthcheck_unhealthy_count'] = \
hc.unhealthy_threshold
json_output['httphealthcheck_healthy_count'] = \
hc.healthy_threshold
# create the forwarding rule (and target pool under the hood)
lb = None
if name:
if not region:
module.fail_json(msg='Missing required region name',
changed=False)
nodes = []
output_nodes = []
json_output['name'] = name
# members is a python list of 'zone/inst' strings
if members:
for node in members:
try:
zone, node_name = node.split('/')
nodes.append(gce.ex_get_node(node_name, zone))
output_nodes.append(node)
except:
# skip nodes that are badly formatted or don't exist
pass
try:
if hc is not None:
lb = gcelb.create_balancer(name, port_range, protocol,
None, nodes, ex_region=region, ex_healthchecks=[hc],
ex_address=external_ip)
else:
lb = gcelb.create_balancer(name, port_range, protocol,
None, nodes, ex_region=region, ex_address=external_ip)
changed = True
except ResourceExistsError:
lb = gcelb.get_balancer(name)
except Exception, e:
module.fail_json(msg=unexpected_error_msg(e), changed=False)
if lb is not None:
json_output['members'] = output_nodes
json_output['protocol'] = protocol
json_output['region'] = region
json_output['external_ip'] = lb.ip
json_output['port_range'] = lb.port
hc_names = []
if 'healthchecks' in lb.extra:
for hc in lb.extra['healthchecks']:
hc_names.append(hc.name)
json_output['httphealthchecks'] = hc_names
if state in ['absent', 'deleted']:
# first, delete the load balancer (forwarding rule and target pool)
# if specified.
if name:
json_output['name'] = name
try:
lb = gcelb.get_balancer(name)
gcelb.destroy_balancer(lb)
changed = True
except ResourceNotFoundError:
pass
except Exception, e:
module.fail_json(msg=unexpected_error_msg(e), changed=False)
# destroy the health check if specified
if httphealthcheck_name:
json_output['httphealthcheck_name'] = httphealthcheck_name
try:
hc = gce.ex_get_healthcheck(httphealthcheck_name)
gce.ex_destroy_healthcheck(hc)
changed = True
except ResourceNotFoundError:
pass
except Exception, e:
module.fail_json(msg=unexpected_error_msg(e), changed=False)
json_output['changed'] = changed
module.exit_json(**json_output)
# import module snippets
from ansible.module_utils.basic import *
from ansible.module_utils.gce import *
if __name__ == '__main__':
main()
| gpl-3.0 |
taknira/certificate-transparency | python/ct/client/log_client_test.py | 26 | 18164 | #!/usr/bin/env python
import unittest
import base64
import json
import mock
import requests
import sys
from ct.client import log_client
from ct.client import log_client_test_util as test_util
from ct.crypto import merkle
from ct.proto import client_pb2
import gflags
FLAGS = gflags.FLAGS
class LogClientTest(unittest.TestCase):
class FakeHandler(test_util.FakeHandlerBase):
# A class that mimics requests.models.Response
class FakeResponse(object):
def __init__(self, code, reason, json_content=None):
self.status_code = code
self.reason = reason
self.headers = ''
if json_content is not None:
self.content = json.dumps(json_content)
else:
self.content = ""
@classmethod
def make_response(cls, code, reason, json_content=None):
return cls.FakeResponse(code, reason, json_content=json_content)
@staticmethod
def one_shot_client(json_content):
"""Make a one-shot client and give it a mock response."""
mock_handler = mock.Mock()
mock_handler.get_response_body.return_value = json.dumps(json_content)
return log_client.LogClient("some address", handler=mock_handler)
def default_client(self):
# A client whose responder is configured to answer queries for the
# correct uri.
return log_client.LogClient(test_util.DEFAULT_URI, self.FakeHandler(
test_util.DEFAULT_URI))
def test_get_sth(self):
client = self.default_client()
sth_response = client.get_sth()
self.assertEqual(sth_response.timestamp,
test_util.DEFAULT_STH.timestamp)
self.assertEqual(sth_response.tree_size,
test_util.DEFAULT_STH.tree_size)
self.assertEqual(sth_response.sha256_root_hash,
test_util.DEFAULT_STH.sha256_root_hash)
self.assertEqual(sth_response.tree_head_signature,
test_util.DEFAULT_STH.tree_head_signature)
def test_get_sth_raises_on_invalid_response(self):
json_sth = test_util.sth_to_json(test_util.DEFAULT_STH)
json_sth.pop("timestamp")
client = self.one_shot_client(json_sth)
self.assertRaises(log_client.InvalidResponseError, client.get_sth)
def test_get_sth_raises_on_invalid_base64(self):
json_sth = test_util.sth_to_json(test_util.DEFAULT_STH)
json_sth["tree_head_signature"] = "garbagebase64^^^"
client = self.one_shot_client(json_sth)
self.assertRaises(log_client.InvalidResponseError, client.get_sth)
def test_get_entries(self):
client = self.default_client()
returned_entries = list(client.get_entries(0, 9))
self.assertTrue(test_util.verify_entries(returned_entries, 0, 9))
def test_get_entries_raises_on_invalid_response(self):
json_entries = test_util.entries_to_json(test_util.make_entries(4, 4))
json_entries["entries"][0].pop("leaf_input")
client = self.one_shot_client(json_entries)
entries = client.get_entries(4, 4)
self.assertRaises(log_client.InvalidResponseError,
entries.next)
def test_get_entries_raises_immediately_on_invalid_base64(self):
json_entries = test_util.entries_to_json(test_util.make_entries(3, 4))
json_entries["entries"][1]["leaf_input"] = "garbagebase64^^^"
client = self.one_shot_client(json_entries)
entries = client.get_entries(3, 4)
# We shouldn't see anything, even if the first entry appeared valid.
self.assertRaises(log_client.InvalidResponseError,
entries.next)
def test_get_entries_raises_on_empty_response(self):
empty_entries = test_util.entries_to_json([])
client = self.one_shot_client(empty_entries)
entries = client.get_entries(4, 4)
self.assertRaises(log_client.InvalidResponseError,
entries.next)
def test_get_entries_raises_on_too_large_response(self):
large_response = test_util.entries_to_json(
test_util.make_entries(4, 5))
client = self.one_shot_client(large_response)
entries = client.get_entries(4, 4)
self.assertRaises(log_client.InvalidResponseError,
entries.next)
def test_get_entries_returns_all_in_batches(self):
mock_handler = mock.Mock()
fake_responder = self.FakeHandler(test_util.DEFAULT_URI)
mock_handler.get_response_body.side_effect = (
fake_responder.get_response_body)
client = log_client.LogClient(test_util.DEFAULT_URI,
handler=mock_handler)
returned_entries = list(client.get_entries(0, 9, batch_size=4))
self.assertTrue(test_util.verify_entries(returned_entries, 0, 9))
self.assertEqual(3, len(mock_handler.get_response_body.call_args_list))
# Same as above, but using a flag to control the batch size.
mock_handler.reset_mock()
# TODO(ekasper): find a more elegant and robust way to save flags.
original = FLAGS.entry_fetch_batch_size
FLAGS.entry_fetch_batch_size = 4
returned_entries = list(client.get_entries(0, 9))
FLAGS.entry_fetch_batch_size = original
self.assertTrue(test_util.verify_entries(returned_entries, 0, 9))
self.assertEqual(3, len(mock_handler.get_response_body.call_args_list))
def test_get_entries_returns_all_for_limiting_server(self):
client = log_client.LogClient(test_util.DEFAULT_URI, self.FakeHandler(
test_util.DEFAULT_URI, entry_limit=3))
returned_entries = list(client.get_entries(0, 9))
self.assertTrue(test_util.verify_entries(returned_entries, 0, 9))
def test_get_entries_returns_partial_if_log_returns_partial(self):
client = log_client.LogClient(test_util.DEFAULT_URI, self.FakeHandler(
test_util.DEFAULT_URI, tree_size=3))
entries = client.get_entries(0, 9)
partial = []
for _ in range(3):
partial.append(entries.next())
self.assertTrue(test_util.verify_entries(partial, 0, 2))
self.assertRaises(log_client.HTTPClientError, entries.next)
def test_get_sth_consistency(self):
client = log_client.LogClient(test_util.DEFAULT_URI, self.FakeHandler(
test_util.DEFAULT_URI, tree_size=3))
proof = client.get_sth_consistency(1, 2)
self.assertEqual(proof, test_util.DEFAULT_FAKE_PROOF)
def test_get_sth_consistency_trivial(self):
client = log_client.LogClient(test_util.DEFAULT_URI, self.FakeHandler(
test_util.DEFAULT_URI, tree_size=3))
self.assertEqual(client.get_sth_consistency(0, 0), [])
self.assertEqual(client.get_sth_consistency(0, 2), [])
self.assertEqual(client.get_sth_consistency(2, 2), [])
def test_get_sth_consistency_raises_on_invalid_input(self):
client = log_client.LogClient(test_util.DEFAULT_URI, self.FakeHandler(
test_util.DEFAULT_URI, tree_size=3))
self.assertRaises(log_client.InvalidRequestError,
client.get_sth_consistency, -1, 1)
self.assertRaises(log_client.InvalidRequestError,
client.get_sth_consistency, -3, -1)
self.assertRaises(log_client.InvalidRequestError,
client.get_sth_consistency, 3, 1)
def test_get_sth_consistency_raises_on_client_error(self):
client = log_client.LogClient(test_util.DEFAULT_URI, self.FakeHandler(
test_util.DEFAULT_URI, tree_size=3))
self.assertRaises(log_client.HTTPClientError,
client.get_sth_consistency, 1, 5)
def test_get_sth_consistency_raises_on_invalid_response(self):
client = self.one_shot_client({})
self.assertRaises(log_client.InvalidResponseError,
client.get_sth_consistency, 1, 2)
def test_get_sth_consistency_raises_on_invalid_base64(self):
json_proof = {"consistency": ["garbagebase64^^^"]}
client = self.one_shot_client(json_proof)
self.assertRaises(log_client.InvalidResponseError,
client.get_sth_consistency, 1, 2)
def test_get_roots(self):
client = self.default_client()
roots = client.get_roots()
self.assertEqual(roots, test_util.DEFAULT_FAKE_ROOTS)
def test_get_roots_raises_on_invalid_response(self):
client = self.one_shot_client({})
self.assertRaises(log_client.InvalidResponseError, client.get_roots)
def test_get_roots_raises_on_invalid_base64(self):
json_roots = {"certificates": ["garbagebase64^^^"]}
client = self.one_shot_client(json_roots)
self.assertRaises(log_client.InvalidResponseError, client.get_roots)
def test_get_entry_and_proof(self):
client = self.default_client()
entry_and_proof = client.get_entry_and_proof(1, 2)
self.assertEqual(entry_and_proof.entry, test_util.make_entry(1))
self.assertEqual(entry_and_proof.audit_path,
test_util.DEFAULT_FAKE_PROOF)
def test_get_entry_and_proof_raises_on_invalid_input(self):
client = self.default_client()
self.assertRaises(log_client.InvalidRequestError,
client.get_entry_and_proof, -1, 1)
self.assertRaises(log_client.InvalidRequestError,
client.get_entry_and_proof, -3, -1)
self.assertRaises(log_client.InvalidRequestError,
client.get_entry_and_proof, 3, 1)
def test_get_entry_and_proof_raises_on_client_error(self):
client = log_client.LogClient(test_util.DEFAULT_URI, self.FakeHandler(
test_util.DEFAULT_URI, tree_size=3))
self.assertRaises(log_client.HTTPClientError,
client.get_entry_and_proof, 1, 5)
def test_get_entry_and_proof_raises_on_invalid_response(self):
json_response = test_util.entry_and_proof_to_json(
test_util.make_entry(1), test_util.DEFAULT_FAKE_PROOF)
json_response.pop("leaf_input")
client = self.one_shot_client(json_response)
self.assertRaises(log_client.InvalidResponseError,
client.get_entry_and_proof, 1, 2)
def test_get_entry_and_proof_raises_on_invalid_base64(self):
json_response = test_util.entry_and_proof_to_json(
test_util.make_entry(1), test_util.DEFAULT_FAKE_PROOF)
json_response["leaf_input"] = ["garbagebase64^^^"]
client = self.one_shot_client(json_response)
self.assertRaises(log_client.InvalidResponseError,
client.get_entry_and_proof, 1, 2)
def test_get_proof_by_hash(self):
client = self.default_client()
entry = test_util.make_entry(1)
hasher = merkle.TreeHasher()
leaf_hash = hasher.hash_leaf(entry.leaf_input)
proof_by_hash = client.get_proof_by_hash(leaf_hash, 2)
self.assertEqual(proof_by_hash.audit_path, test_util.DEFAULT_FAKE_PROOF)
self.assertEqual(proof_by_hash.leaf_index, 1)
def test_get_proof_by_hash_raises_on_invalid_input(self):
client = self.default_client()
leaf_hash = "hash"
self.assertRaises(log_client.InvalidRequestError,
client.get_proof_by_hash, leaf_hash, 0)
self.assertRaises(log_client.InvalidRequestError,
client.get_proof_by_hash, leaf_hash, -1)
def test_get_proof_by_hash_raises_on_unknown_hash(self):
client = log_client.LogClient(test_util.DEFAULT_URI, self.FakeHandler(
test_util.DEFAULT_URI, tree_size=3))
leaf_hash = "bogus"
self.assertRaises(log_client.HTTPClientError,
client.get_proof_by_hash, leaf_hash, 2)
def test_get_proof_by_hash_raises_on_invalid_response(self):
json_response = test_util.proof_and_index_to_json(
test_util.DEFAULT_FAKE_PROOF, 1)
json_response.pop("leaf_index")
client = self.one_shot_client(json_response)
self.assertRaises(log_client.InvalidResponseError,
client.get_proof_by_hash, "hash", 2)
def test_get_proof_by_hash_raises_on_invalid_base64(self):
json_response = test_util.proof_and_index_to_json(
test_util.DEFAULT_FAKE_PROOF, 1)
json_response["leaf_index"] = "garbagebase64^^^"
client = self.one_shot_client(json_response)
self.assertRaises(log_client.InvalidResponseError,
client.get_proof_by_hash, "hash", 2)
def _verify_sct_contents(self, sct):
LOG_ID = base64.b64decode(
'pLkJkLQYWBSHuxOizGdwCjw1mAT5G9+443fNDsgN3BA=')
self.assertEqual(client_pb2.V1, sct.version)
self.assertEqual(LOG_ID, sct.id.key_id)
self.assertEqual(1373015623951L, sct.timestamp)
self.assertEqual(client_pb2.DigitallySigned.SHA256,
sct.signature.hash_algorithm)
self.assertEqual(client_pb2.DigitallySigned.ECDSA,
sct.signature.sig_algorithm)
RAW_SIGNATURE = ('304402202080fb4a50c159e3398d9cf85cec0b3b551d4379db1d'
'820b3d6bca52107a32180220286b2f20f0d98039f14e3198f1f4'
'81c24975e5d0344d4a96e5ec761c253bc84f').decode('hex')
self.assertEqual(RAW_SIGNATURE, sct.signature.signature)
def test_add_valid_chain(self):
certs_chain = ["one", "two", "three"]
json_sct_response = (
'{"sct_version":0,"id":"pLkJkLQYWBSHuxOizGdwCjw1m'
'AT5G9+443fNDsgN3BA=","timestamp":1373015623951,\n'
'"extensions":"",\n'
'"signature":"BAMARjBEAiAggPtKUMFZ4zmNnPhc7As7VR1Dedsdggs9a8pSEHoy'
'GAIgKGsvIPDZgDnxTjGY8fSBwkl15dA0TUqW5ex2HCU7yE8="}')
mock_handler = mock.Mock()
mock_handler.post_response_body.return_value = json_sct_response
client = log_client.LogClient("http://ctlog", handler=mock_handler)
received_sct = client.add_chain(certs_chain)
mock_handler.post_response_body.assert_called_once_with(
"http://ctlog/ct/v1/add-chain",
post_data={'chain': [base64.b64encode(t) for t in certs_chain]})
self._verify_sct_contents(received_sct)
def test_fails_parsing_sct_invalid_version(self):
json_sct_response = (
'{"sct_version":2,"id":"pLkJkLQYWBSHuxOizGdwCjw1m'
'AT5G9+443fNDsgN3BA=","timestamp":1373015623951,\n'
'"extensions":"",\n'
'"signature":"BAMARjBEAiAggPtKUMFZ4zmNnPhc7As7VR1Dedsdggs9a8pSEHoy'
'GAIgKGsvIPDZgDnxTjGY8fSBwkl15dA0TUqW5ex2HCU7yE8="}')
client = self.default_client()
self.assertRaises(log_client.InvalidResponseError,
client._parse_sct, json_sct_response)
def test_fails_parsing_sct_missing_contents(self):
json_sct_response = (
'{"sct_version":0,"id":"pLkJkLQYWBSHuxOizGdwCjw1m'
'AT5G9+443fNDsgN3BA=",\n'
'"extensions":"",\n'
'"signature":"BAMARjBEAiAggPtKUMFZ4zmNnPhc7As7VR1Dedsdggs9a8pSEHoy'
'GAIgKGsvIPDZgDnxTjGY8fSBwkl15dA0TUqW5ex2HCU7yE8="}')
client = self.default_client()
self.assertRaises(log_client.InvalidResponseError,
client._parse_sct, json_sct_response)
class RequestHandlerTest(unittest.TestCase):
class RequestSideEffect:
def __init__(self, num_failures, canned_response=None):
self._num_failures = num_failures
self._canned_response = canned_response
def __call__(self, req_url, params, timeout, verify):
if self._num_failures <= 0:
return self._canned_response
self._num_failures = self._num_failures - 1
raise requests.exceptions.ConnectionError("incomplete read!")
def test_uri_with_params(self):
self.assertEqual(
'http://www.google.com',
log_client.RequestHandler._uri_with_params('http://www.google.com',
{}))
self.assertIn(
log_client.RequestHandler._uri_with_params('http://www.google.com',
{'a': 1, 'b': 2}),
['http://www.google.com?a=1&b=2', 'http://www.google.com?b=2&a=1'])
self.assertIn(
log_client.RequestHandler._uri_with_params('http://www.google.com/',
{'a': 1,
'b': 'foo bar'}),
['http://www.google.com/?a=1&b=foo+bar',
'http://www.google.com/?b=foo+bar&a=1'])
def test_get_response_one_retry(self):
expected_body = 'valid_body'
handler = log_client.RequestHandler(num_retries=1)
canned_response = requests.models.Response()
canned_response.status_code = 200
canned_response._content = expected_body
log_client.requests.get = mock.Mock(
side_effect=
self.RequestSideEffect(1, canned_response))
received_response = handler.get_response('http://www.example.com')
self.assertEqual(expected_body, received_response.content)
self.assertEqual(200, received_response.status_code)
def test_get_response_too_many_retries(self):
handler = log_client.RequestHandler(num_retries=3)
canned_response = requests.models.Response()
canned_response.status_code = 200
canned_response._content = 'body'
log_client.requests.get = mock.Mock(
side_effect=self.RequestSideEffect(
4, canned_response))
self.assertRaises(log_client.HTTPError,
handler.get_response, ('http://www.example.com'))
if __name__ == "__main__":
sys.argv = FLAGS(sys.argv)
unittest.main()
| apache-2.0 |
rouault/Quantum-GIS | python/plugins/processing/tools/raster.py | 20 | 2896 | # -*- coding: utf-8 -*-
"""
***************************************************************************
raster.py
---------------------
Date : February 2013
Copyright : (C) 2013 by Victor Olaya and Alexander Bruy
Email : volayaf at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Victor Olaya and Alexander Bruy'
__date__ = 'February 2013'
__copyright__ = '(C) 2013, Victor Olaya and Alexander Bruy'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
import os
import struct
import numpy
from osgeo import gdal
from qgis.core import QgsProcessingException
def scanraster(layer, feedback, band_number=1):
filename = str(layer.source())
dataset = gdal.Open(filename, gdal.GA_ReadOnly)
band = dataset.GetRasterBand(band_number)
nodata = band.GetNoDataValue()
bandtype = gdal.GetDataTypeName(band.DataType)
for y in range(band.YSize):
feedback.setProgress(y / float(band.YSize) * 100)
scanline = band.ReadRaster(0, y, band.XSize, 1, band.XSize, 1,
band.DataType)
if bandtype == 'Byte':
values = struct.unpack('B' * band.XSize, scanline)
elif bandtype == 'Int16':
values = struct.unpack('h' * band.XSize, scanline)
elif bandtype == 'UInt16':
values = struct.unpack('H' * band.XSize, scanline)
elif bandtype == 'Int32':
values = struct.unpack('i' * band.XSize, scanline)
elif bandtype == 'UInt32':
values = struct.unpack('I' * band.XSize, scanline)
elif bandtype == 'Float32':
values = struct.unpack('f' * band.XSize, scanline)
elif bandtype == 'Float64':
values = struct.unpack('d' * band.XSize, scanline)
else:
raise QgsProcessingException('Raster format not supported')
for value in values:
if value == nodata:
value = None
yield value
def mapToPixel(mX, mY, geoTransform):
(pX, pY) = gdal.ApplyGeoTransform(
gdal.InvGeoTransform(geoTransform), mX, mY)
return (int(pX), int(pY))
def pixelToMap(pX, pY, geoTransform):
return gdal.ApplyGeoTransform(geoTransform, pX + 0.5, pY + 0.5)
| gpl-2.0 |
lneisenman/meanet | meanet/graph_theory.py | 1 | 4877 | # -*- coding: utf-8 -*-
from __future__ import (print_function, division, absolute_import,
unicode_literals)
import networkx as nx
import numpy as np
from .meanet import corr_matrix_to_graph
def analyse_data(data, threshold=0.66):
""" perform graph theory analysis on data
Parameters
----------
data: dict
the keys are the names of the datasets
and the values are dicts that include 'corr' which represents
the corr matrix from which to derive the graph
Returns
-------
result: dict of graph theory results
the keys are the names of the datasets
the values are another dict containing
'L' - the average shortest path length
'CC' - the average clustering coefficient
'DD' - the degree histogram
'Nodes' - the number of nodes in the graph
'Edges' - the number of edges in the graph
"""
result = dict()
for label, dataset in data.items():
summary = dict()
corr = dataset['corr']
graph, _ = corr_matrix_to_graph(corr, threshold=threshold)
summary['L'] = nx.average_shortest_path_length(graph)
summary['CC'] = nx.average_clustering(graph)
summary['DD'] = nx.degree_histogram(graph)
summary['Nodes'] = graph.number_of_nodes()
summary['Edges'] = graph.number_of_edges()
result[label] = summary
return result
def _distance_matrix(G):
""" create a numpy 2-d array of distances between nodes
Parameters
----------
G : NetworkX undirected graph
Returns
-------
matrix: ndarray
numpy 2-d array of distances between nodes
"""
size = len(G)
matrix = np.zeros((size, size))
nodes = nx.nodes(G)
for i, node1 in enumerate(nodes):
for j, node2 in enumerate(nodes):
try:
matrix[i, j] = nx.shortest_path_length(G, node1, node2)
except:
pass
return matrix
def old_average_shortest_path_length(G):
""" Compute the average shortest path length (L) of the graph
assuming that the pathlength between unconnected nodes is equal to zero
Parameters
----------
G : NetworkX undirected graph
Returns
-------
L: float
the average shortest path length (L) of the graph
Notes
-----
This is based on the old NetworkX behavior. The current behavior is to
raise an exception if there are unconnected nodes
"""
# test for correct type of input
if not isinstance(G, nx.classes.graph.Graph):
raise TypeError('This function only works for undirected graphs')
# make sure the Graph isn't empty
if len(G) == 0:
raise ValueError('The graph is empty')
# create a numpy 2-d array of distances between nodes called matrix
matrix = _distance_matrix(G)
# calculate L
size = matrix.shape[0]
L = matrix.sum()/(size*(size - 1))
return L
def bullmore_average_shortest_path_length(G):
""" Compute the average shortest path length (L) of the graph
assuming that the pathlength between unconnected nodes is equal to
the longest path length between connected nodes in the network
Parameters
----------
G : NetworkX undirected graph
Returns
-------
L: float
the average shortest path length (L) of the graph
Notes
-----
This is based on Fornito et al Front Syst Neurosci 4:22 2010 and references
therein
"""
# test for correct type of input
if not isinstance(G, nx.classes.graph.Graph):
raise TypeError('This function only works for undirected graphs')
# make sure the Graph isn't empty
if len(G) == 0:
raise ValueError('The graph is empty')
# create a numpy 2-d array of distances between nodes called matrix
matrix = _distance_matrix(G)
# set all zero distances to the max distance in matrix
maxdist = np.nanmax(matrix)
indices = np.where(matrix == 0)
matrix[indices] = maxdist
# reset distances from each node to itself back to zero
np.fill_diagonal(matrix, 0)
# calculate L
size = matrix.shape[0]
L = matrix.sum()/(size*(size - 1))
return L
def small_world_random(G):
""" Compute the average clustering coefficient and average shortest path
length of a random network with the same number of nodes and edges as G
Parameters
----------
G: Network X undirected graph
Returns
-------
C: float
the random network clustering coefficient
L: float
the random network average shortest path length
Notes
-----
Formulas from Albert and Barabasi 2002
"""
N = len(G)
d = 2 * G.number_of_edges() / N
C = d / N
if d == 1:
L = 1
else:
L = np.log(N) / np.log(d)
return C, L
| bsd-3-clause |
40423219/2017springcd_hw | blog/theme/glow/primitive.py | 161 | 4838 | from javascript import JSConstructor, JSObject
from .vector import vec
class primitive:
def __init__(self, prim, **kwargs):
for _key in kwargs.keys():
if isinstance(kwargs[_key], vec):
kwargs[_key]=kwargs[_key]._vec
self._prim=prim(kwargs)
def rotate(self, **kwargs):
if 'axis' in kwargs:
#for now lets assume axis is a vector
kwargs['axis']=kwargs['axis']._vec
self._prim.rotate(kwargs)
@property
def pos(self):
_v=vec()
_v._set_vec(self._prim.pos)
return _v
@pos.setter
def pos(self, value):
if isinstance(value, vec):
self._prim.pos=value._vec
else:
print("Error! pos must be a vector")
@property
def color(self):
_v=vec()
_v._set_vec(self._prim.color)
return _v
@color.setter
def color(self, value):
if isinstance(value, vec):
self._prim.color=value._vec
else:
print("Error! color must be a vec")
@property
def axis(self):
_v=vec()
_v._set_vec(self._prim.axis)
return _v
@axis.setter
def axis(self, value):
if isinstance(value, vec):
self._prim.axis=value._vec
else:
print("Error! axis must be a vec")
@property
def size(self):
return self._prim.size
@size.setter
def size(self, value):
self._prim.size=value
@property
def up(self):
_v=vec()
_v._set_vec(self._prim.up)
return _v
@up.setter
def up(self, value):
if isinstance(value, vec):
self._prim.up=value._vec
else:
print("Error! up must be a vec")
@property
def opacity(self):
return self._prim.opacity
@opacity.setter
def opacity(self, value):
self._prim.opacity=value
@property
def shininess(self):
return self._prim.shininess
@shininess.setter
def shininess(self, value):
self._prim.shininess=value
@property
def emissive(self):
return self._prim.emissive
@emissive.setter
def emissive(self, value):
self._prim.emissive=value
@property
def texture(self):
return self._prim.texture
@texture.setter
def texture(self, **kwargs):
self._prim.texture=kwargs
@property
def visible(self):
return self._prim.visible
@visible.setter
def visible(self, flag):
assert isinstance(flag, bool)
self._prim.visble=flag
class arrow(primitive):
def __init__(self, **kwargs):
primitive.__init__(self, JSConstructor(glowscript.arrow), **kwargs)
class box(primitive):
def __init__(self, **kwargs):
primitive.__init__(self, JSConstructor(glowscript.box), **kwargs)
class cone(primitive):
def __init__(self, **kwargs):
primitive.__init__(self, JSConstructor(glowscript.cone), **kwargs)
class curve(primitive):
def __init__(self, **kwargs):
primitive.__init__(self, JSConstructor(glowscript.curve), **kwargs)
def push(self, v):
if isinstance(v, vec):
self._prim.push(v._vec)
elif isinstance(v, dict):
for _key in v.keys():
if isinstance(_key, vec):
v[_key]=v[_key]._vec
self._prim.push(v)
def append(self, v):
self.push(v)
class cylinder(primitive):
def __init__(self, **kwargs):
primitive.__init__(self, JSConstructor(glowscript.cylinder), **kwargs)
class helix(cylinder):
def __init__(self, **kwargs):
primitive.__init__(self, JSConstructor(glowscript.helix), **kwargs)
class pyramid(primitive):
def __init__(self, **kwargs):
primitive.__init__(self, JSConstructor(glowscript.pyramid), **kwargs)
#class ring(curve):
class sphere(primitive):
def __init__(self, **kwargs):
primitive.__init__(self, JSConstructor(glowscript.sphere), **kwargs)
#triangle
#class triangle:
# def __init__(self, **kwargs):
# self._tri = JSConstructor(glowscript.triangle)(kwargs)
#vertex
#class vertex:
# def __init__(self, **kwargs):
# self._ver = JSConstructor(glowscript.vertex)(kwargs)
#quad
#compound
#class compound(box):
# def __init__(self, **kwargs):
# box.__init__(self, kwargs)
# I'm not sure if the declarations below are correct. Will fix later.
class distinct_light:
def __init__(self, **kwargs):
self._dl=JSConstructor(glowscript.distant_light)(kwargs)
class local_light:
def __init__(self, **kwargs):
self._ll=JSConstructor(glowscript.local_light)(kwargs)
class draw:
def __init__(self, **kwargs):
self._draw=JSConstructor(glowscript.draw)(kwargs)
class label:
def __init__(self, **kwargs):
self._label=JSConstructor(glowscript.label)(kwargs)
def attach_trail(object, **kwargs):
if isinstance(object, primitive):
JSObject(glowscript.attach_trail)(object._prim, kwargs)
else:
JSObject(glowscript.attach_trail)(object, kwargs)
| agpl-3.0 |
mushtaqak/edx-platform | common/lib/xmodule/xmodule/tests/test_html_module.py | 58 | 4458 | import unittest
from mock import Mock
from xblock.field_data import DictFieldData
from xmodule.html_module import HtmlModule, HtmlDescriptor
from . import get_test_system, get_test_descriptor_system
from opaque_keys.edx.locations import SlashSeparatedCourseKey
from xblock.fields import ScopeIds
def instantiate_descriptor(**field_data):
"""
Instantiate descriptor with most properties.
"""
system = get_test_descriptor_system()
course_key = SlashSeparatedCourseKey('org', 'course', 'run')
usage_key = course_key.make_usage_key('html', 'SampleHtml')
return system.construct_xblock_from_class(
HtmlDescriptor,
scope_ids=ScopeIds(None, None, usage_key, usage_key),
field_data=DictFieldData(field_data),
)
class HtmlModuleSubstitutionTestCase(unittest.TestCase):
descriptor = Mock()
def test_substitution_works(self):
sample_xml = '''%%USER_ID%%'''
field_data = DictFieldData({'data': sample_xml})
module_system = get_test_system()
module = HtmlModule(self.descriptor, module_system, field_data, Mock())
self.assertEqual(module.get_html(), str(module_system.anonymous_student_id))
def test_substitution_without_magic_string(self):
sample_xml = '''
<html>
<p>Hi USER_ID!11!</p>
</html>
'''
field_data = DictFieldData({'data': sample_xml})
module_system = get_test_system()
module = HtmlModule(self.descriptor, module_system, field_data, Mock())
self.assertEqual(module.get_html(), sample_xml)
def test_substitution_without_anonymous_student_id(self):
sample_xml = '''%%USER_ID%%'''
field_data = DictFieldData({'data': sample_xml})
module_system = get_test_system()
module_system.anonymous_student_id = None
module = HtmlModule(self.descriptor, module_system, field_data, Mock())
self.assertEqual(module.get_html(), sample_xml)
class HtmlDescriptorIndexingTestCase(unittest.TestCase):
"""
Make sure that HtmlDescriptor can format data for indexing as expected.
"""
def test_index_dictionary(self):
sample_xml = '''
<html>
<p>Hello World!</p>
</html>
'''
descriptor = instantiate_descriptor(data=sample_xml)
self.assertEqual(descriptor.index_dictionary(), {
"content": {"html_content": " Hello World! ", "display_name": "Text"},
"content_type": "Text"
})
sample_xml_cdata = '''
<html>
<p>This has CDATA in it.</p>
<![CDATA[This is just a CDATA!]]>
</html>
'''
descriptor = instantiate_descriptor(data=sample_xml_cdata)
self.assertEqual(descriptor.index_dictionary(), {
"content": {"html_content": " This has CDATA in it. ", "display_name": "Text"},
"content_type": "Text"
})
sample_xml_tab_spaces = '''
<html>
<p> Text has spaces :) </p>
</html>
'''
descriptor = instantiate_descriptor(data=sample_xml_tab_spaces)
self.assertEqual(descriptor.index_dictionary(), {
"content": {"html_content": " Text has spaces :) ", "display_name": "Text"},
"content_type": "Text"
})
sample_xml_comment = '''
<html>
<p>This has HTML comment in it.</p>
<!-- Html Comment -->
</html>
'''
descriptor = instantiate_descriptor(data=sample_xml_comment)
self.assertEqual(descriptor.index_dictionary(), {
"content": {"html_content": " This has HTML comment in it. ", "display_name": "Text"},
"content_type": "Text"
})
sample_xml_mix_comment_cdata = '''
<html>
<!-- Beginning of the html -->
<p>This has HTML comment in it.<!-- Commenting Content --></p>
<!-- Here comes CDATA -->
<![CDATA[This is just a CDATA!]]>
<p>HTML end.</p>
</html>
'''
descriptor = instantiate_descriptor(data=sample_xml_mix_comment_cdata)
self.assertEqual(descriptor.index_dictionary(), {
"content": {"html_content": " This has HTML comment in it. HTML end. ", "display_name": "Text"},
"content_type": "Text"
})
| agpl-3.0 |
BYK/fb2goog | gdata/finance/service.py | 261 | 8974 | #!/usr/bin/python
#
# Copyright (C) 2009 Tan Swee Heng
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Classes to interact with the Google Finance server."""
__author__ = '[email protected]'
import gdata.service
import gdata.finance
import atom
class PortfolioQuery(gdata.service.Query):
"""A query object for the list of a user's portfolios."""
def returns(self):
return self.get('returns', False)
def set_returns(self, value):
if value is 'true' or value is True:
self['returns'] = 'true'
returns = property(returns, set_returns, doc="The returns query parameter")
def positions(self):
return self.get('positions', False)
def set_positions(self, value):
if value is 'true' or value is True:
self['positions'] = 'true'
positions = property(positions, set_positions,
doc="The positions query parameter")
class PositionQuery(gdata.service.Query):
"""A query object for the list of a user's positions in a portfolio."""
def returns(self):
return self.get('returns', False)
def set_returns(self, value):
if value is 'true' or value is True:
self['returns'] = 'true'
returns = property(returns, set_returns,
doc="The returns query parameter")
def transactions(self):
return self.get('transactions', False)
def set_transactions(self, value):
if value is 'true' or value is True:
self['transactions'] = 'true'
transactions = property(transactions, set_transactions,
doc="The transactions query parameter")
class FinanceService(gdata.service.GDataService):
def __init__(self, email=None, password=None, source=None,
server='finance.google.com', **kwargs):
"""Creates a client for the Finance service.
Args:
email: string (optional) The user's email address, used for
authentication.
password: string (optional) The user's password.
source: string (optional) The name of the user's application.
server: string (optional) The name of the server to which a connection
will be opened. Default value: 'finance.google.com'.
**kwargs: The other parameters to pass to gdata.service.GDataService
constructor.
"""
gdata.service.GDataService.__init__(self,
email=email, password=password, service='finance', server=server,
**kwargs)
def GetPortfolioFeed(self, query=None):
uri = '/finance/feeds/default/portfolios'
if query:
uri = PortfolioQuery(feed=uri, params=query).ToUri()
return self.Get(uri, converter=gdata.finance.PortfolioFeedFromString)
def GetPositionFeed(self, portfolio_entry=None, portfolio_id=None,
query=None):
"""
Args:
portfolio_entry: PortfolioEntry (optional; see Notes)
portfolio_id: string (optional; see Notes) This may be obtained
from a PortfolioEntry's portfolio_id attribute.
query: PortfolioQuery (optional)
Notes:
Either a PortfolioEntry OR a portfolio ID must be provided.
"""
if portfolio_entry:
uri = portfolio_entry.GetSelfLink().href + '/positions'
elif portfolio_id:
uri = '/finance/feeds/default/portfolios/%s/positions' % portfolio_id
if query:
uri = PositionQuery(feed=uri, params=query).ToUri()
return self.Get(uri, converter=gdata.finance.PositionFeedFromString)
def GetTransactionFeed(self, position_entry=None,
portfolio_id=None, ticker_id=None):
"""
Args:
position_entry: PositionEntry (optional; see Notes)
portfolio_id: string (optional; see Notes) This may be obtained
from a PortfolioEntry's portfolio_id attribute.
ticker_id: string (optional; see Notes) This may be obtained from
a PositionEntry's ticker_id attribute. Alternatively it can
be constructed using the security's exchange and symbol,
e.g. 'NASDAQ:GOOG'
Notes:
Either a PositionEntry OR (a portfolio ID AND ticker ID) must
be provided.
"""
if position_entry:
uri = position_entry.GetSelfLink().href + '/transactions'
elif portfolio_id and ticker_id:
uri = '/finance/feeds/default/portfolios/%s/positions/%s/transactions' \
% (portfolio_id, ticker_id)
return self.Get(uri, converter=gdata.finance.TransactionFeedFromString)
def GetPortfolio(self, portfolio_id=None, query=None):
uri = '/finance/feeds/default/portfolios/%s' % portfolio_id
if query:
uri = PortfolioQuery(feed=uri, params=query).ToUri()
return self.Get(uri, converter=gdata.finance.PortfolioEntryFromString)
def AddPortfolio(self, portfolio_entry=None):
uri = '/finance/feeds/default/portfolios'
return self.Post(portfolio_entry, uri,
converter=gdata.finance.PortfolioEntryFromString)
def UpdatePortfolio(self, portfolio_entry=None):
uri = portfolio_entry.GetEditLink().href
return self.Put(portfolio_entry, uri,
converter=gdata.finance.PortfolioEntryFromString)
def DeletePortfolio(self, portfolio_entry=None):
uri = portfolio_entry.GetEditLink().href
return self.Delete(uri)
def GetPosition(self, portfolio_id=None, ticker_id=None, query=None):
uri = '/finance/feeds/default/portfolios/%s/positions/%s' \
% (portfolio_id, ticker_id)
if query:
uri = PositionQuery(feed=uri, params=query).ToUri()
return self.Get(uri, converter=gdata.finance.PositionEntryFromString)
def DeletePosition(self, position_entry=None,
portfolio_id=None, ticker_id=None, transaction_feed=None):
"""A position is deleted by deleting all its transactions.
Args:
position_entry: PositionEntry (optional; see Notes)
portfolio_id: string (optional; see Notes) This may be obtained
from a PortfolioEntry's portfolio_id attribute.
ticker_id: string (optional; see Notes) This may be obtained from
a PositionEntry's ticker_id attribute. Alternatively it can
be constructed using the security's exchange and symbol,
e.g. 'NASDAQ:GOOG'
transaction_feed: TransactionFeed (optional; see Notes)
Notes:
Either a PositionEntry OR (a portfolio ID AND ticker ID) OR
a TransactionFeed must be provided.
"""
if transaction_feed:
feed = transaction_feed
else:
if position_entry:
feed = self.GetTransactionFeed(position_entry=position_entry)
elif portfolio_id and ticker_id:
feed = self.GetTransactionFeed(
portfolio_id=portfolio_id, ticker_id=ticker_id)
for txn in feed.entry:
self.DeleteTransaction(txn)
return True
def GetTransaction(self, portfolio_id=None, ticker_id=None,
transaction_id=None):
uri = '/finance/feeds/default/portfolios/%s/positions/%s/transactions/%s' \
% (portfolio_id, ticker_id, transaction_id)
return self.Get(uri, converter=gdata.finance.TransactionEntryFromString)
def AddTransaction(self, transaction_entry=None, transaction_feed = None,
position_entry=None, portfolio_id=None, ticker_id=None):
"""
Args:
transaction_entry: TransactionEntry (required)
transaction_feed: TransactionFeed (optional; see Notes)
position_entry: PositionEntry (optional; see Notes)
portfolio_id: string (optional; see Notes) This may be obtained
from a PortfolioEntry's portfolio_id attribute.
ticker_id: string (optional; see Notes) This may be obtained from
a PositionEntry's ticker_id attribute. Alternatively it can
be constructed using the security's exchange and symbol,
e.g. 'NASDAQ:GOOG'
Notes:
Either a TransactionFeed OR a PositionEntry OR (a portfolio ID AND
ticker ID) must be provided.
"""
if transaction_feed:
uri = transaction_feed.GetPostLink().href
elif position_entry:
uri = position_entry.GetSelfLink().href + '/transactions'
elif portfolio_id and ticker_id:
uri = '/finance/feeds/default/portfolios/%s/positions/%s/transactions' \
% (portfolio_id, ticker_id)
return self.Post(transaction_entry, uri,
converter=gdata.finance.TransactionEntryFromString)
def UpdateTransaction(self, transaction_entry=None):
uri = transaction_entry.GetEditLink().href
return self.Put(transaction_entry, uri,
converter=gdata.finance.TransactionEntryFromString)
def DeleteTransaction(self, transaction_entry=None):
uri = transaction_entry.GetEditLink().href
return self.Delete(uri)
| mpl-2.0 |
40223226/2015cdbg80420 | static/Brython3.1.1-20150328-091302/Lib/pydoc.py | 637 | 102017 | #!/usr/bin/env python3
"""Generate Python documentation in HTML or text for interactive use.
In the Python interpreter, do "from pydoc import help" to provide
help. Calling help(thing) on a Python object documents the object.
Or, at the shell command line outside of Python:
Run "pydoc <name>" to show documentation on something. <name> may be
the name of a function, module, package, or a dotted reference to a
class or function within a module or module in a package. If the
argument contains a path segment delimiter (e.g. slash on Unix,
backslash on Windows) it is treated as the path to a Python source file.
Run "pydoc -k <keyword>" to search for a keyword in the synopsis lines
of all available modules.
Run "pydoc -p <port>" to start an HTTP server on the given port on the
local machine. Port number 0 can be used to get an arbitrary unused port.
Run "pydoc -b" to start an HTTP server on an arbitrary unused port and
open a Web browser to interactively browse documentation. The -p option
can be used with the -b option to explicitly specify the server port.
Run "pydoc -w <name>" to write out the HTML documentation for a module
to a file named "<name>.html".
Module docs for core modules are assumed to be in
http://docs.python.org/X.Y/library/
This can be overridden by setting the PYTHONDOCS environment variable
to a different URL or to a local directory containing the Library
Reference Manual pages.
"""
__all__ = ['help']
__author__ = "Ka-Ping Yee <[email protected]>"
__date__ = "26 February 2001"
__credits__ = """Guido van Rossum, for an excellent programming language.
Tommy Burnette, the original creator of manpy.
Paul Prescod, for all his work on onlinehelp.
Richard Chamberlain, for the first implementation of textdoc.
"""
# Known bugs that can't be fixed here:
# - imp.load_module() cannot be prevented from clobbering existing
# loaded modules, so calling synopsis() on a binary module file
# changes the contents of any existing module with the same name.
# - If the __file__ attribute on a module is a relative path and
# the current directory is changed with os.chdir(), an incorrect
# path will be displayed.
import builtins
import imp
import importlib.machinery
#brython fix me
import inspect
import io
import os
#brython fix me
#import pkgutil
import platform
import re
import sys
import time
import tokenize
import warnings
from collections import deque
from reprlib import Repr
#fix me brython
#from traceback import extract_tb, format_exception_only
# --------------------------------------------------------- common routines
def pathdirs():
"""Convert sys.path into a list of absolute, existing, unique paths."""
dirs = []
normdirs = []
for dir in sys.path:
dir = os.path.abspath(dir or '.')
normdir = os.path.normcase(dir)
if normdir not in normdirs and os.path.isdir(dir):
dirs.append(dir)
normdirs.append(normdir)
return dirs
def getdoc(object):
"""Get the doc string or comments for an object."""
result = inspect.getdoc(object) or inspect.getcomments(object)
return result and re.sub('^ *\n', '', result.rstrip()) or ''
def splitdoc(doc):
"""Split a doc string into a synopsis line (if any) and the rest."""
lines = doc.strip().split('\n')
if len(lines) == 1:
return lines[0], ''
elif len(lines) >= 2 and not lines[1].rstrip():
return lines[0], '\n'.join(lines[2:])
return '', '\n'.join(lines)
def classname(object, modname):
"""Get a class name and qualify it with a module name if necessary."""
name = object.__name__
if object.__module__ != modname:
name = object.__module__ + '.' + name
return name
def isdata(object):
"""Check if an object is of a type that probably means it's data."""
return not (inspect.ismodule(object) or inspect.isclass(object) or
inspect.isroutine(object) or inspect.isframe(object) or
inspect.istraceback(object) or inspect.iscode(object))
def replace(text, *pairs):
"""Do a series of global replacements on a string."""
while pairs:
text = pairs[1].join(text.split(pairs[0]))
pairs = pairs[2:]
return text
def cram(text, maxlen):
"""Omit part of a string if needed to make it fit in a maximum length."""
if len(text) > maxlen:
pre = max(0, (maxlen-3)//2)
post = max(0, maxlen-3-pre)
return text[:pre] + '...' + text[len(text)-post:]
return text
_re_stripid = re.compile(r' at 0x[0-9a-f]{6,16}(>+)$', re.IGNORECASE)
def stripid(text):
"""Remove the hexadecimal id from a Python object representation."""
# The behaviour of %p is implementation-dependent in terms of case.
#fix me brython
#return _re_stripid.sub(r'\1', text)
return text
def _is_some_method(obj):
return (inspect.isfunction(obj) or
inspect.ismethod(obj) or
inspect.isbuiltin(obj) or
inspect.ismethoddescriptor(obj))
def allmethods(cl):
methods = {}
for key, value in inspect.getmembers(cl, _is_some_method):
methods[key] = 1
for base in cl.__bases__:
methods.update(allmethods(base)) # all your base are belong to us
for key in methods.keys():
methods[key] = getattr(cl, key)
return methods
def _split_list(s, predicate):
"""Split sequence s via predicate, and return pair ([true], [false]).
The return value is a 2-tuple of lists,
([x for x in s if predicate(x)],
[x for x in s if not predicate(x)])
"""
yes = []
no = []
for x in s:
if predicate(x):
yes.append(x)
else:
no.append(x)
return yes, no
def visiblename(name, all=None, obj=None):
"""Decide whether to show documentation on a variable."""
# Certain special names are redundant or internal.
if name in {'__author__', '__builtins__', '__cached__', '__credits__',
'__date__', '__doc__', '__file__', '__initializing__',
'__loader__', '__module__', '__name__', '__package__',
'__path__', '__qualname__', '__slots__', '__version__'}:
return 0
# Private names are hidden, but special names are displayed.
if name.startswith('__') and name.endswith('__'): return 1
# Namedtuples have public fields and methods with a single leading underscore
if name.startswith('_') and hasattr(obj, '_fields'):
return True
if all is not None:
# only document that which the programmer exported in __all__
return name in all
else:
return not name.startswith('_')
def classify_class_attrs(object):
"""Wrap inspect.classify_class_attrs, with fixup for data descriptors."""
results = []
for (name, kind, cls, value) in inspect.classify_class_attrs(object):
if inspect.isdatadescriptor(value):
kind = 'data descriptor'
results.append((name, kind, cls, value))
return results
# ----------------------------------------------------- module manipulation
def ispackage(path):
"""Guess whether a path refers to a package directory."""
if os.path.isdir(path):
for ext in ('.py', '.pyc', '.pyo'):
if os.path.isfile(os.path.join(path, '__init__' + ext)):
return True
return False
def source_synopsis(file):
line = file.readline()
while line[:1] == '#' or not line.strip():
line = file.readline()
if not line: break
line = line.strip()
if line[:4] == 'r"""': line = line[1:]
if line[:3] == '"""':
line = line[3:]
if line[-1:] == '\\': line = line[:-1]
while not line.strip():
line = file.readline()
if not line: break
result = line.split('"""')[0].strip()
else: result = None
return result
def synopsis(filename, cache={}):
"""Get the one-line summary out of a module file."""
mtime = os.stat(filename).st_mtime
lastupdate, result = cache.get(filename, (None, None))
if lastupdate is None or lastupdate < mtime:
try:
file = tokenize.open(filename)
except IOError:
# module can't be opened, so skip it
return None
binary_suffixes = importlib.machinery.BYTECODE_SUFFIXES[:]
binary_suffixes += importlib.machinery.EXTENSION_SUFFIXES[:]
if any(filename.endswith(x) for x in binary_suffixes):
# binary modules have to be imported
file.close()
if any(filename.endswith(x) for x in
importlib.machinery.BYTECODE_SUFFIXES):
loader = importlib.machinery.SourcelessFileLoader('__temp__',
filename)
else:
loader = importlib.machinery.ExtensionFileLoader('__temp__',
filename)
try:
module = loader.load_module('__temp__')
except:
return None
result = (module.__doc__ or '').splitlines()[0]
del sys.modules['__temp__']
else:
# text modules can be directly examined
result = source_synopsis(file)
file.close()
cache[filename] = (mtime, result)
return result
class ErrorDuringImport(Exception):
"""Errors that occurred while trying to import something to document it."""
def __init__(self, filename, exc_info):
self.filename = filename
self.exc, self.value, self.tb = exc_info
def __str__(self):
exc = self.exc.__name__
return 'problem in %s - %s: %s' % (self.filename, exc, self.value)
def importfile(path):
"""Import a Python source file or compiled file given its path."""
magic = imp.get_magic()
with open(path, 'rb') as file:
if file.read(len(magic)) == magic:
kind = imp.PY_COMPILED
else:
kind = imp.PY_SOURCE
file.seek(0)
filename = os.path.basename(path)
name, ext = os.path.splitext(filename)
try:
module = imp.load_module(name, file, path, (ext, 'r', kind))
except:
raise ErrorDuringImport(path, sys.exc_info())
return module
def safeimport(path, forceload=0, cache={}):
"""Import a module; handle errors; return None if the module isn't found.
If the module *is* found but an exception occurs, it's wrapped in an
ErrorDuringImport exception and reraised. Unlike __import__, if a
package path is specified, the module at the end of the path is returned,
not the package at the beginning. If the optional 'forceload' argument
is 1, we reload the module from disk (unless it's a dynamic extension)."""
try:
# If forceload is 1 and the module has been previously loaded from
# disk, we always have to reload the module. Checking the file's
# mtime isn't good enough (e.g. the module could contain a class
# that inherits from another module that has changed).
if forceload and path in sys.modules:
if path not in sys.builtin_module_names:
# Remove the module from sys.modules and re-import to try
# and avoid problems with partially loaded modules.
# Also remove any submodules because they won't appear
# in the newly loaded module's namespace if they're already
# in sys.modules.
subs = [m for m in sys.modules if m.startswith(path + '.')]
for key in [path] + subs:
# Prevent garbage collection.
cache[key] = sys.modules[key]
del sys.modules[key]
module = __import__(path)
except:
# Did the error occur before or after the module was found?
(exc, value, tb) = info = sys.exc_info()
if path in sys.modules:
# An error occurred while executing the imported module.
raise ErrorDuringImport(sys.modules[path].__file__, info)
elif exc is SyntaxError:
# A SyntaxError occurred before we could execute the module.
raise ErrorDuringImport(value.filename, info)
#fix me brython
#elif exc is ImportError and value.name == path:
elif exc is ImportError and str(value) == str(path):
# No such module in the path.
return None
else:
# Some other error occurred during the importing process.
raise ErrorDuringImport(path, sys.exc_info())
for part in path.split('.')[1:]:
try: module = getattr(module, part)
except AttributeError: return None
return module
# ---------------------------------------------------- formatter base class
class Doc:
PYTHONDOCS = os.environ.get("PYTHONDOCS",
"http://docs.python.org/%d.%d/library"
% sys.version_info[:2])
def document(self, object, name=None, *args):
"""Generate documentation for an object."""
args = (object, name) + args
# 'try' clause is to attempt to handle the possibility that inspect
# identifies something in a way that pydoc itself has issues handling;
# think 'super' and how it is a descriptor (which raises the exception
# by lacking a __name__ attribute) and an instance.
if inspect.isgetsetdescriptor(object): return self.docdata(*args)
if inspect.ismemberdescriptor(object): return self.docdata(*args)
try:
if inspect.ismodule(object): return self.docmodule(*args)
if inspect.isclass(object): return self.docclass(*args)
if inspect.isroutine(object): return self.docroutine(*args)
except AttributeError:
pass
if isinstance(object, property): return self.docproperty(*args)
return self.docother(*args)
def fail(self, object, name=None, *args):
"""Raise an exception for unimplemented types."""
message = "don't know how to document object%s of type %s" % (
name and ' ' + repr(name), type(object).__name__)
raise TypeError(message)
docmodule = docclass = docroutine = docother = docproperty = docdata = fail
def getdocloc(self, object):
"""Return the location of module docs or None"""
try:
file = inspect.getabsfile(object)
except TypeError:
file = '(built-in)'
docloc = os.environ.get("PYTHONDOCS", self.PYTHONDOCS)
basedir = os.path.join(sys.base_exec_prefix, "lib",
"python%d.%d" % sys.version_info[:2])
if (isinstance(object, type(os)) and
(object.__name__ in ('errno', 'exceptions', 'gc', 'imp',
'marshal', 'posix', 'signal', 'sys',
'_thread', 'zipimport') or
(file.startswith(basedir) and
not file.startswith(os.path.join(basedir, 'site-packages')))) and
object.__name__ not in ('xml.etree', 'test.pydoc_mod')):
if docloc.startswith("http://"):
docloc = "%s/%s" % (docloc.rstrip("/"), object.__name__)
else:
docloc = os.path.join(docloc, object.__name__ + ".html")
else:
docloc = None
return docloc
# -------------------------------------------- HTML documentation generator
class HTMLRepr(Repr):
"""Class for safely making an HTML representation of a Python object."""
def __init__(self):
Repr.__init__(self)
self.maxlist = self.maxtuple = 20
self.maxdict = 10
self.maxstring = self.maxother = 100
def escape(self, text):
return replace(text, '&', '&', '<', '<', '>', '>')
def repr(self, object):
return Repr.repr(self, object)
def repr1(self, x, level):
if hasattr(type(x), '__name__'):
methodname = 'repr_' + '_'.join(type(x).__name__.split())
if hasattr(self, methodname):
return getattr(self, methodname)(x, level)
return self.escape(cram(stripid(repr(x)), self.maxother))
def repr_string(self, x, level):
test = cram(x, self.maxstring)
testrepr = repr(test)
if '\\' in test and '\\' not in replace(testrepr, r'\\', ''):
# Backslashes are only literal in the string and are never
# needed to make any special characters, so show a raw string.
return 'r' + testrepr[0] + self.escape(test) + testrepr[0]
return re.sub(r'((\\[\\abfnrtv\'"]|\\[0-9]..|\\x..|\\u....)+)',
r'<font color="#c040c0">\1</font>',
self.escape(testrepr))
repr_str = repr_string
def repr_instance(self, x, level):
try:
return self.escape(cram(stripid(repr(x)), self.maxstring))
except:
return self.escape('<%s instance>' % x.__class__.__name__)
repr_unicode = repr_string
class HTMLDoc(Doc):
"""Formatter class for HTML documentation."""
# ------------------------------------------- HTML formatting utilities
_repr_instance = HTMLRepr()
repr = _repr_instance.repr
escape = _repr_instance.escape
def page(self, title, contents):
"""Format an HTML page."""
return '''\
<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.0 Transitional//EN">
<html><head><title>Python: %s</title>
<meta http-equiv="Content-Type" content="text/html; charset=utf-8">
</head><body bgcolor="#f0f0f8">
%s
</body></html>''' % (title, contents)
def heading(self, title, fgcol, bgcol, extras=''):
"""Format a page heading."""
return '''
<table width="100%%" cellspacing=0 cellpadding=2 border=0 summary="heading">
<tr bgcolor="%s">
<td valign=bottom> <br>
<font color="%s" face="helvetica, arial"> <br>%s</font></td
><td align=right valign=bottom
><font color="%s" face="helvetica, arial">%s</font></td></tr></table>
''' % (bgcol, fgcol, title, fgcol, extras or ' ')
def section(self, title, fgcol, bgcol, contents, width=6,
prelude='', marginalia=None, gap=' '):
"""Format a section with a heading."""
if marginalia is None:
marginalia = '<tt>' + ' ' * width + '</tt>'
result = '''<p>
<table width="100%%" cellspacing=0 cellpadding=2 border=0 summary="section">
<tr bgcolor="%s">
<td colspan=3 valign=bottom> <br>
<font color="%s" face="helvetica, arial">%s</font></td></tr>
''' % (bgcol, fgcol, title)
if prelude:
result = result + '''
<tr bgcolor="%s"><td rowspan=2>%s</td>
<td colspan=2>%s</td></tr>
<tr><td>%s</td>''' % (bgcol, marginalia, prelude, gap)
else:
result = result + '''
<tr><td bgcolor="%s">%s</td><td>%s</td>''' % (bgcol, marginalia, gap)
return result + '\n<td width="100%%">%s</td></tr></table>' % contents
def bigsection(self, title, *args):
"""Format a section with a big heading."""
title = '<big><strong>%s</strong></big>' % title
return self.section(title, *args)
def preformat(self, text):
"""Format literal preformatted text."""
text = self.escape(text.expandtabs())
return replace(text, '\n\n', '\n \n', '\n\n', '\n \n',
' ', ' ', '\n', '<br>\n')
def multicolumn(self, list, format, cols=4):
"""Format a list of items into a multi-column list."""
result = ''
rows = (len(list)+cols-1)//cols
for col in range(cols):
result = result + '<td width="%d%%" valign=top>' % (100//cols)
for i in range(rows*col, rows*col+rows):
if i < len(list):
result = result + format(list[i]) + '<br>\n'
result = result + '</td>'
return '<table width="100%%" summary="list"><tr>%s</tr></table>' % result
def grey(self, text): return '<font color="#909090">%s</font>' % text
def namelink(self, name, *dicts):
"""Make a link for an identifier, given name-to-URL mappings."""
for dict in dicts:
if name in dict:
return '<a href="%s">%s</a>' % (dict[name], name)
return name
def classlink(self, object, modname):
"""Make a link for a class."""
name, module = object.__name__, sys.modules.get(object.__module__)
if hasattr(module, name) and getattr(module, name) is object:
return '<a href="%s.html#%s">%s</a>' % (
module.__name__, name, classname(object, modname))
return classname(object, modname)
def modulelink(self, object):
"""Make a link for a module."""
return '<a href="%s.html">%s</a>' % (object.__name__, object.__name__)
def modpkglink(self, modpkginfo):
"""Make a link for a module or package to display in an index."""
name, path, ispackage, shadowed = modpkginfo
if shadowed:
return self.grey(name)
if path:
url = '%s.%s.html' % (path, name)
else:
url = '%s.html' % name
if ispackage:
text = '<strong>%s</strong> (package)' % name
else:
text = name
return '<a href="%s">%s</a>' % (url, text)
def filelink(self, url, path):
"""Make a link to source file."""
return '<a href="file:%s">%s</a>' % (url, path)
def markup(self, text, escape=None, funcs={}, classes={}, methods={}):
"""Mark up some plain text, given a context of symbols to look for.
Each context dictionary maps object names to anchor names."""
escape = escape or self.escape
results = []
here = 0
pattern = re.compile(r'\b((http|ftp)://\S+[\w/]|'
r'RFC[- ]?(\d+)|'
r'PEP[- ]?(\d+)|'
r'(self\.)?(\w+))')
while True:
match = pattern.search(text, here)
if not match: break
start, end = match.span()
results.append(escape(text[here:start]))
all, scheme, rfc, pep, selfdot, name = match.groups()
if scheme:
url = escape(all).replace('"', '"')
results.append('<a href="%s">%s</a>' % (url, url))
elif rfc:
url = 'http://www.rfc-editor.org/rfc/rfc%d.txt' % int(rfc)
results.append('<a href="%s">%s</a>' % (url, escape(all)))
elif pep:
url = 'http://www.python.org/dev/peps/pep-%04d/' % int(pep)
results.append('<a href="%s">%s</a>' % (url, escape(all)))
elif text[end:end+1] == '(':
results.append(self.namelink(name, methods, funcs, classes))
elif selfdot:
results.append('self.<strong>%s</strong>' % name)
else:
results.append(self.namelink(name, classes))
here = end
results.append(escape(text[here:]))
return ''.join(results)
# ---------------------------------------------- type-specific routines
def formattree(self, tree, modname, parent=None):
"""Produce HTML for a class tree as given by inspect.getclasstree()."""
result = ''
for entry in tree:
if type(entry) is type(()):
c, bases = entry
result = result + '<dt><font face="helvetica, arial">'
result = result + self.classlink(c, modname)
if bases and bases != (parent,):
parents = []
for base in bases:
parents.append(self.classlink(base, modname))
result = result + '(' + ', '.join(parents) + ')'
result = result + '\n</font></dt>'
elif type(entry) is type([]):
result = result + '<dd>\n%s</dd>\n' % self.formattree(
entry, modname, c)
return '<dl>\n%s</dl>\n' % result
def docmodule(self, object, name=None, mod=None, *ignored):
"""Produce HTML documentation for a module object."""
name = object.__name__ # ignore the passed-in name
try:
all = object.__all__
except AttributeError:
all = None
parts = name.split('.')
links = []
for i in range(len(parts)-1):
links.append(
'<a href="%s.html"><font color="#ffffff">%s</font></a>' %
('.'.join(parts[:i+1]), parts[i]))
linkedname = '.'.join(links + parts[-1:])
head = '<big><big><strong>%s</strong></big></big>' % linkedname
try:
path = inspect.getabsfile(object)
url = path
if sys.platform == 'win32':
import nturl2path
url = nturl2path.pathname2url(path)
filelink = self.filelink(url, path)
except TypeError:
filelink = '(built-in)'
info = []
if hasattr(object, '__version__'):
version = str(object.__version__)
if version[:11] == '$' + 'Revision: ' and version[-1:] == '$':
version = version[11:-1].strip()
info.append('version %s' % self.escape(version))
if hasattr(object, '__date__'):
info.append(self.escape(str(object.__date__)))
if info:
head = head + ' (%s)' % ', '.join(info)
docloc = self.getdocloc(object)
if docloc is not None:
docloc = '<br><a href="%(docloc)s">Module Reference</a>' % locals()
else:
docloc = ''
result = self.heading(
head, '#ffffff', '#7799ee',
'<a href=".">index</a><br>' + filelink + docloc)
modules = inspect.getmembers(object, inspect.ismodule)
classes, cdict = [], {}
for key, value in inspect.getmembers(object, inspect.isclass):
# if __all__ exists, believe it. Otherwise use old heuristic.
if (all is not None or
(inspect.getmodule(value) or object) is object):
if visiblename(key, all, object):
classes.append((key, value))
cdict[key] = cdict[value] = '#' + key
for key, value in classes:
for base in value.__bases__:
key, modname = base.__name__, base.__module__
module = sys.modules.get(modname)
if modname != name and module and hasattr(module, key):
if getattr(module, key) is base:
if not key in cdict:
cdict[key] = cdict[base] = modname + '.html#' + key
funcs, fdict = [], {}
for key, value in inspect.getmembers(object, inspect.isroutine):
# if __all__ exists, believe it. Otherwise use old heuristic.
if (all is not None or
inspect.isbuiltin(value) or inspect.getmodule(value) is object):
if visiblename(key, all, object):
funcs.append((key, value))
fdict[key] = '#-' + key
if inspect.isfunction(value): fdict[value] = fdict[key]
data = []
for key, value in inspect.getmembers(object, isdata):
if visiblename(key, all, object):
data.append((key, value))
doc = self.markup(getdoc(object), self.preformat, fdict, cdict)
doc = doc and '<tt>%s</tt>' % doc
result = result + '<p>%s</p>\n' % doc
if hasattr(object, '__path__'):
modpkgs = []
for importer, modname, ispkg in pkgutil.iter_modules(object.__path__):
modpkgs.append((modname, name, ispkg, 0))
modpkgs.sort()
contents = self.multicolumn(modpkgs, self.modpkglink)
result = result + self.bigsection(
'Package Contents', '#ffffff', '#aa55cc', contents)
elif modules:
contents = self.multicolumn(
modules, lambda t: self.modulelink(t[1]))
result = result + self.bigsection(
'Modules', '#ffffff', '#aa55cc', contents)
if classes:
classlist = [value for (key, value) in classes]
contents = [
self.formattree(inspect.getclasstree(classlist, 1), name)]
for key, value in classes:
contents.append(self.document(value, key, name, fdict, cdict))
result = result + self.bigsection(
'Classes', '#ffffff', '#ee77aa', ' '.join(contents))
if funcs:
contents = []
for key, value in funcs:
contents.append(self.document(value, key, name, fdict, cdict))
result = result + self.bigsection(
'Functions', '#ffffff', '#eeaa77', ' '.join(contents))
if data:
contents = []
for key, value in data:
contents.append(self.document(value, key))
result = result + self.bigsection(
'Data', '#ffffff', '#55aa55', '<br>\n'.join(contents))
if hasattr(object, '__author__'):
contents = self.markup(str(object.__author__), self.preformat)
result = result + self.bigsection(
'Author', '#ffffff', '#7799ee', contents)
if hasattr(object, '__credits__'):
contents = self.markup(str(object.__credits__), self.preformat)
result = result + self.bigsection(
'Credits', '#ffffff', '#7799ee', contents)
return result
def docclass(self, object, name=None, mod=None, funcs={}, classes={},
*ignored):
"""Produce HTML documentation for a class object."""
print('docclass')
realname = object.__name__
name = name or realname
bases = object.__bases__
contents = []
push = contents.append
# Cute little class to pump out a horizontal rule between sections.
class HorizontalRule:
def __init__(self):
self.needone = 0
def maybe(self):
if self.needone:
push('<hr>\n')
self.needone = 1
hr = HorizontalRule()
# List the mro, if non-trivial.
mro = deque(inspect.getmro(object))
if len(mro) > 2:
hr.maybe()
push('<dl><dt>Method resolution order:</dt>\n')
for base in mro:
push('<dd>%s</dd>\n' % self.classlink(base,
object.__module__))
push('</dl>\n')
def spill(msg, attrs, predicate):
ok, attrs = _split_list(attrs, predicate)
if ok:
hr.maybe()
push(msg)
for name, kind, homecls, value in ok:
try:
value = getattr(object, name)
except Exception:
# Some descriptors may meet a failure in their __get__.
# (bug #1785)
push(self._docdescriptor(name, value, mod))
else:
push(self.document(value, name, mod,
funcs, classes, mdict, object))
push('\n')
return attrs
def spilldescriptors(msg, attrs, predicate):
ok, attrs = _split_list(attrs, predicate)
if ok:
hr.maybe()
push(msg)
for name, kind, homecls, value in ok:
push(self._docdescriptor(name, value, mod))
return attrs
def spilldata(msg, attrs, predicate):
ok, attrs = _split_list(attrs, predicate)
if ok:
hr.maybe()
push(msg)
for name, kind, homecls, value in ok:
base = self.docother(getattr(object, name), name, mod)
if callable(value) or inspect.isdatadescriptor(value):
doc = getattr(value, "__doc__", None)
else:
doc = None
if doc is None:
push('<dl><dt>%s</dl>\n' % base)
else:
doc = self.markup(getdoc(value), self.preformat,
funcs, classes, mdict)
doc = '<dd><tt>%s</tt>' % doc
push('<dl><dt>%s%s</dl>\n' % (base, doc))
push('\n')
return attrs
attrs = [(name, kind, cls, value)
for name, kind, cls, value in classify_class_attrs(object)
if visiblename(name, obj=object)]
mdict = {}
for key, kind, homecls, value in attrs:
mdict[key] = anchor = '#' + name + '-' + key
try:
value = getattr(object, name)
except Exception:
# Some descriptors may meet a failure in their __get__.
# (bug #1785)
pass
try:
# The value may not be hashable (e.g., a data attr with
# a dict or list value).
mdict[value] = anchor
except TypeError:
pass
while attrs:
if mro:
thisclass = mro.popleft()
else:
thisclass = attrs[0][2]
attrs, inherited = _split_list(attrs, lambda t: t[2] is thisclass)
if thisclass is builtins.object:
attrs = inherited
continue
elif thisclass is object:
tag = 'defined here'
else:
tag = 'inherited from %s' % self.classlink(thisclass,
object.__module__)
tag += ':<br>\n'
# Sort attrs by name.
attrs.sort(key=lambda t: t[0])
# Pump out the attrs, segregated by kind.
attrs = spill('Methods %s' % tag, attrs,
lambda t: t[1] == 'method')
attrs = spill('Class methods %s' % tag, attrs,
lambda t: t[1] == 'class method')
attrs = spill('Static methods %s' % tag, attrs,
lambda t: t[1] == 'static method')
attrs = spilldescriptors('Data descriptors %s' % tag, attrs,
lambda t: t[1] == 'data descriptor')
attrs = spilldata('Data and other attributes %s' % tag, attrs,
lambda t: t[1] == 'data')
assert attrs == []
attrs = inherited
contents = ''.join(contents)
if name == realname:
title = '<a name="%s">class <strong>%s</strong></a>' % (
name, realname)
else:
title = '<strong>%s</strong> = <a name="%s">class %s</a>' % (
name, name, realname)
if bases:
parents = []
for base in bases:
parents.append(self.classlink(base, object.__module__))
title = title + '(%s)' % ', '.join(parents)
doc = self.markup(getdoc(object), self.preformat, funcs, classes, mdict)
doc = doc and '<tt>%s<br> </tt>' % doc
return self.section(title, '#000000', '#ffc8d8', contents, 3, doc)
def formatvalue(self, object):
"""Format an argument default value as text."""
return self.grey('=' + self.repr(object))
def docroutine(self, object, name=None, mod=None,
funcs={}, classes={}, methods={}, cl=None):
"""Produce HTML documentation for a function or method object."""
realname = object.__name__
name = name or realname
anchor = (cl and cl.__name__ or '') + '-' + name
note = ''
skipdocs = 0
if inspect.ismethod(object):
imclass = object.__self__.__class__
if cl:
if imclass is not cl:
note = ' from ' + self.classlink(imclass, mod)
else:
if object.__self__ is not None:
note = ' method of %s instance' % self.classlink(
object.__self__.__class__, mod)
else:
note = ' unbound %s method' % self.classlink(imclass,mod)
object = object.__func__
if name == realname:
title = '<a name="%s"><strong>%s</strong></a>' % (anchor, realname)
else:
if (cl and realname in cl.__dict__ and
cl.__dict__[realname] is object):
reallink = '<a href="#%s">%s</a>' % (
cl.__name__ + '-' + realname, realname)
skipdocs = 1
else:
reallink = realname
title = '<a name="%s"><strong>%s</strong></a> = %s' % (
anchor, name, reallink)
if inspect.isfunction(object):
args, varargs, kwonlyargs, kwdefaults, varkw, defaults, ann = \
inspect.getfullargspec(object)
argspec = inspect.formatargspec(
args, varargs, kwonlyargs, kwdefaults, varkw, defaults, ann,
formatvalue=self.formatvalue,
formatannotation=inspect.formatannotationrelativeto(object))
if realname == '<lambda>':
title = '<strong>%s</strong> <em>lambda</em> ' % name
# XXX lambda's won't usually have func_annotations['return']
# since the syntax doesn't support but it is possible.
# So removing parentheses isn't truly safe.
argspec = argspec[1:-1] # remove parentheses
else:
argspec = '(...)'
decl = title + argspec + (note and self.grey(
'<font face="helvetica, arial">%s</font>' % note))
if skipdocs:
return '<dl><dt>%s</dt></dl>\n' % decl
else:
doc = self.markup(
getdoc(object), self.preformat, funcs, classes, methods)
doc = doc and '<dd><tt>%s</tt></dd>' % doc
return '<dl><dt>%s</dt>%s</dl>\n' % (decl, doc)
def _docdescriptor(self, name, value, mod):
results = []
push = results.append
if name:
push('<dl><dt><strong>%s</strong></dt>\n' % name)
if value.__doc__ is not None:
doc = self.markup(getdoc(value), self.preformat)
push('<dd><tt>%s</tt></dd>\n' % doc)
push('</dl>\n')
return ''.join(results)
def docproperty(self, object, name=None, mod=None, cl=None):
"""Produce html documentation for a property."""
return self._docdescriptor(name, object, mod)
def docother(self, object, name=None, mod=None, *ignored):
"""Produce HTML documentation for a data object."""
lhs = name and '<strong>%s</strong> = ' % name or ''
return lhs + self.repr(object)
def docdata(self, object, name=None, mod=None, cl=None):
"""Produce html documentation for a data descriptor."""
return self._docdescriptor(name, object, mod)
def index(self, dir, shadowed=None):
"""Generate an HTML index for a directory of modules."""
modpkgs = []
if shadowed is None: shadowed = {}
for importer, name, ispkg in pkgutil.iter_modules([dir]):
if any((0xD800 <= ord(ch) <= 0xDFFF) for ch in name):
# ignore a module if its name contains a surrogate character
continue
modpkgs.append((name, '', ispkg, name in shadowed))
shadowed[name] = 1
modpkgs.sort()
contents = self.multicolumn(modpkgs, self.modpkglink)
return self.bigsection(dir, '#ffffff', '#ee77aa', contents)
# -------------------------------------------- text documentation generator
class TextRepr(Repr):
"""Class for safely making a text representation of a Python object."""
def __init__(self):
Repr.__init__(self)
self.maxlist = self.maxtuple = 20
self.maxdict = 10
self.maxstring = self.maxother = 100
#def repr1(self, x, level):
# if hasattr(type(x), '__name__'):
# methodname = 'repr_' + '_'.join(type(x).__name__.split())
# if hasattr(self, methodname):
# return getattr(self, methodname)(x, level)
# return cram(stripid(repr(x)), self.maxother)
def repr_string(self, x, level):
test = cram(x, self.maxstring)
testrepr = repr(test)
if '\\' in test and '\\' not in replace(testrepr, r'\\', ''):
# Backslashes are only literal in the string and are never
# needed to make any special characters, so show a raw string.
return 'r' + testrepr[0] + test + testrepr[0]
return testrepr
repr_str = repr_string
def repr_instance(self, x, level):
try:
return cram(stripid(repr(x)), self.maxstring)
except:
return '<%s instance>' % x.__class__.__name__
class TextDoc(Doc):
"""Formatter class for text documentation."""
# ------------------------------------------- text formatting utilities
_repr_instance = TextRepr()
repr = _repr_instance.repr
def bold(self, text):
"""Format a string in bold by overstriking."""
return ''.join(ch + '\b' + ch for ch in text)
def indent(self, text, prefix=' '):
"""Indent text by prepending a given prefix to each line."""
if not text: return ''
lines = [prefix + line for line in text.split('\n')]
if lines: lines[-1] = lines[-1].rstrip()
return '\n'.join(lines)
def section(self, title, contents):
"""Format a section with a given heading."""
clean_contents = self.indent(contents).rstrip()
return self.bold(title) + '\n' + clean_contents + '\n\n'
# ---------------------------------------------- type-specific routines
def formattree(self, tree, modname, parent=None, prefix=''):
"""Render in text a class tree as returned by inspect.getclasstree()."""
result = ''
for entry in tree:
if type(entry) is type(()):
c, bases = entry
result = result + prefix + classname(c, modname)
if bases and bases != (parent,):
parents = (classname(c, modname) for c in bases)
result = result + '(%s)' % ', '.join(parents)
result = result + '\n'
elif type(entry) is type([]):
result = result + self.formattree(
entry, modname, c, prefix + ' ')
return result
def docmodule(self, object, name=None, mod=None):
"""Produce text documentation for a given module object."""
name = object.__name__ # ignore the passed-in name
synop, desc = splitdoc(getdoc(object))
result = self.section('NAME', name + (synop and ' - ' + synop))
all = getattr(object, '__all__', None)
docloc = self.getdocloc(object)
if docloc is not None:
result = result + self.section('MODULE REFERENCE', docloc + """
The following documentation is automatically generated from the Python
source files. It may be incomplete, incorrect or include features that
are considered implementation detail and may vary between Python
implementations. When in doubt, consult the module reference at the
location listed above.
""")
if desc:
result = result + self.section('DESCRIPTION', desc)
classes = []
for key, value in inspect.getmembers(object, inspect.isclass):
# if __all__ exists, believe it. Otherwise use old heuristic.
if (all is not None
or (inspect.getmodule(value) or object) is object):
if visiblename(key, all, object):
classes.append((key, value))
funcs = []
for key, value in inspect.getmembers(object, inspect.isroutine):
# if __all__ exists, believe it. Otherwise use old heuristic.
if (all is not None or
inspect.isbuiltin(value) or inspect.getmodule(value) is object):
if visiblename(key, all, object):
funcs.append((key, value))
data = []
for key, value in inspect.getmembers(object, isdata):
if visiblename(key, all, object):
data.append((key, value))
modpkgs = []
modpkgs_names = set()
if hasattr(object, '__path__'):
for importer, modname, ispkg in pkgutil.iter_modules(object.__path__):
modpkgs_names.add(modname)
if ispkg:
modpkgs.append(modname + ' (package)')
else:
modpkgs.append(modname)
modpkgs.sort()
result = result + self.section(
'PACKAGE CONTENTS', '\n'.join(modpkgs))
# Detect submodules as sometimes created by C extensions
submodules = []
for key, value in inspect.getmembers(object, inspect.ismodule):
if value.__name__.startswith(name + '.') and key not in modpkgs_names:
submodules.append(key)
if submodules:
submodules.sort()
result = result + self.section(
'SUBMODULES', '\n'.join(submodules))
if classes:
classlist = [value for key, value in classes]
contents = [self.formattree(
inspect.getclasstree(classlist, 1), name)]
for key, value in classes:
contents.append(self.document(value, key, name))
result = result + self.section('CLASSES', '\n'.join(contents))
if funcs:
contents = []
for key, value in funcs:
contents.append(self.document(value, key, name))
result = result + self.section('FUNCTIONS', '\n'.join(contents))
if data:
contents = []
for key, value in data:
contents.append(self.docother(value, key, name, maxlen=70))
result = result + self.section('DATA', '\n'.join(contents))
if hasattr(object, '__version__'):
version = str(object.__version__)
if version[:11] == '$' + 'Revision: ' and version[-1:] == '$':
version = version[11:-1].strip()
result = result + self.section('VERSION', version)
if hasattr(object, '__date__'):
result = result + self.section('DATE', str(object.__date__))
if hasattr(object, '__author__'):
result = result + self.section('AUTHOR', str(object.__author__))
if hasattr(object, '__credits__'):
result = result + self.section('CREDITS', str(object.__credits__))
try:
file = inspect.getabsfile(object)
except TypeError:
file = '(built-in)'
result = result + self.section('FILE', file)
return result
def docclass(self, object, name=None, mod=None, *ignored):
"""Produce text documentation for a given class object."""
realname = object.__name__
name = name or realname
bases = object.__bases__
def makename(c, m=object.__module__):
return classname(c, m)
if name == realname:
title = 'class ' + self.bold(realname)
else:
title = self.bold(name) + ' = class ' + realname
if bases:
parents = map(makename, bases)
title = title + '(%s)' % ', '.join(parents)
doc = getdoc(object)
contents = doc and [doc + '\n'] or []
push = contents.append
# List the mro, if non-trivial.
mro = deque(inspect.getmro(object))
if len(mro) > 2:
push("Method resolution order:")
for base in mro:
push(' ' + makename(base))
push('')
# Cute little class to pump out a horizontal rule between sections.
class HorizontalRule:
def __init__(self):
self.needone = 0
def maybe(self):
if self.needone:
push('-' * 70)
self.needone = 1
hr = HorizontalRule()
def spill(msg, attrs, predicate):
ok, attrs = _split_list(attrs, predicate)
if ok:
hr.maybe()
push(msg)
for name, kind, homecls, value in ok:
try:
value = getattr(object, name)
except Exception:
# Some descriptors may meet a failure in their __get__.
# (bug #1785)
push(self._docdescriptor(name, value, mod))
else:
push(self.document(value,
name, mod, object))
return attrs
def spilldescriptors(msg, attrs, predicate):
ok, attrs = _split_list(attrs, predicate)
if ok:
hr.maybe()
push(msg)
for name, kind, homecls, value in ok:
push(self._docdescriptor(name, value, mod))
return attrs
def spilldata(msg, attrs, predicate):
ok, attrs = _split_list(attrs, predicate)
if ok:
hr.maybe()
push(msg)
for name, kind, homecls, value in ok:
if callable(value) or inspect.isdatadescriptor(value):
doc = getdoc(value)
else:
doc = None
push(self.docother(getattr(object, name),
name, mod, maxlen=70, doc=doc) + '\n')
return attrs
attrs = [(name, kind, cls, value)
for name, kind, cls, value in classify_class_attrs(object)
if visiblename(name, obj=object)]
while attrs:
if mro:
thisclass = mro.popleft()
else:
thisclass = attrs[0][2]
attrs, inherited = _split_list(attrs, lambda t: t[2] is thisclass)
if thisclass is builtins.object:
attrs = inherited
continue
elif thisclass is object:
tag = "defined here"
else:
tag = "inherited from %s" % classname(thisclass,
object.__module__)
# Sort attrs by name.
attrs.sort()
# Pump out the attrs, segregated by kind.
attrs = spill("Methods %s:\n" % tag, attrs,
lambda t: t[1] == 'method')
attrs = spill("Class methods %s:\n" % tag, attrs,
lambda t: t[1] == 'class method')
attrs = spill("Static methods %s:\n" % tag, attrs,
lambda t: t[1] == 'static method')
attrs = spilldescriptors("Data descriptors %s:\n" % tag, attrs,
lambda t: t[1] == 'data descriptor')
attrs = spilldata("Data and other attributes %s:\n" % tag, attrs,
lambda t: t[1] == 'data')
assert attrs == []
attrs = inherited
contents = '\n'.join(contents)
if not contents:
return title + '\n'
return title + '\n' + self.indent(contents.rstrip(), ' | ') + '\n'
def formatvalue(self, object):
"""Format an argument default value as text."""
return '=' + self.repr(object)
def docroutine(self, object, name=None, mod=None, cl=None):
"""Produce text documentation for a function or method object."""
realname = object.__name__
name = name or realname
note = ''
skipdocs = 0
if inspect.ismethod(object):
imclass = object.__self__.__class__
if cl:
if imclass is not cl:
note = ' from ' + classname(imclass, mod)
else:
if object.__self__ is not None:
note = ' method of %s instance' % classname(
object.__self__.__class__, mod)
else:
note = ' unbound %s method' % classname(imclass,mod)
object = object.__func__
if name == realname:
title = self.bold(realname)
else:
if (cl and realname in cl.__dict__ and
cl.__dict__[realname] is object):
skipdocs = 1
title = self.bold(name) + ' = ' + realname
if inspect.isfunction(object):
args, varargs, varkw, defaults, kwonlyargs, kwdefaults, ann = \
inspect.getfullargspec(object)
argspec = inspect.formatargspec(
args, varargs, varkw, defaults, kwonlyargs, kwdefaults, ann,
formatvalue=self.formatvalue,
formatannotation=inspect.formatannotationrelativeto(object))
if realname == '<lambda>':
title = self.bold(name) + ' lambda '
# XXX lambda's won't usually have func_annotations['return']
# since the syntax doesn't support but it is possible.
# So removing parentheses isn't truly safe.
argspec = argspec[1:-1] # remove parentheses
else:
argspec = '(...)'
decl = title + argspec + note
if skipdocs:
return decl + '\n'
else:
doc = getdoc(object) or ''
return decl + '\n' + (doc and self.indent(doc).rstrip() + '\n')
def _docdescriptor(self, name, value, mod):
results = []
push = results.append
if name:
push(self.bold(name))
push('\n')
doc = getdoc(value) or ''
if doc:
push(self.indent(doc))
push('\n')
return ''.join(results)
def docproperty(self, object, name=None, mod=None, cl=None):
"""Produce text documentation for a property."""
return self._docdescriptor(name, object, mod)
def docdata(self, object, name=None, mod=None, cl=None):
"""Produce text documentation for a data descriptor."""
return self._docdescriptor(name, object, mod)
def docother(self, object, name=None, mod=None, parent=None, maxlen=None, doc=None):
"""Produce text documentation for a data object."""
repr = self.repr(object)
if maxlen:
line = (name and name + ' = ' or '') + repr
chop = maxlen - len(line)
if chop < 0: repr = repr[:chop] + '...'
line = (name and self.bold(name) + ' = ' or '') + repr
if doc is not None:
line += '\n' + self.indent(str(doc))
return line
class _PlainTextDoc(TextDoc):
"""Subclass of TextDoc which overrides string styling"""
def bold(self, text):
return text
# --------------------------------------------------------- user interfaces
def pager(text):
"""The first time this is called, determine what kind of pager to use."""
global pager
pager = getpager()
pager(text)
def getpager():
"""Decide what method to use for paging through text."""
if not hasattr(sys.stdout, "isatty"):
return plainpager
if not sys.stdin.isatty() or not sys.stdout.isatty():
return plainpager
if 'PAGER' in os.environ:
if sys.platform == 'win32': # pipes completely broken in Windows
return lambda text: tempfilepager(plain(text), os.environ['PAGER'])
elif os.environ.get('TERM') in ('dumb', 'emacs'):
return lambda text: pipepager(plain(text), os.environ['PAGER'])
else:
return lambda text: pipepager(text, os.environ['PAGER'])
if os.environ.get('TERM') in ('dumb', 'emacs'):
return plainpager
if sys.platform == 'win32' or sys.platform.startswith('os2'):
return lambda text: tempfilepager(plain(text), 'more <')
if hasattr(os, 'system') and os.system('(less) 2>/dev/null') == 0:
return lambda text: pipepager(text, 'less')
import tempfile
(fd, filename) = tempfile.mkstemp()
os.close(fd)
try:
if hasattr(os, 'system') and os.system('more "%s"' % filename) == 0:
return lambda text: pipepager(text, 'more')
else:
return ttypager
finally:
os.unlink(filename)
def plain(text):
"""Remove boldface formatting from text."""
return re.sub('.\b', '', text)
def pipepager(text, cmd):
"""Page through text by feeding it to another program."""
pipe = os.popen(cmd, 'w')
try:
pipe.write(text)
pipe.close()
except IOError:
pass # Ignore broken pipes caused by quitting the pager program.
def tempfilepager(text, cmd):
"""Page through text by invoking a program on a temporary file."""
import tempfile
filename = tempfile.mktemp()
file = open(filename, 'w')
file.write(text)
file.close()
try:
os.system(cmd + ' "' + filename + '"')
finally:
os.unlink(filename)
def ttypager(text):
"""Page through text on a text terminal."""
lines = plain(text).split('\n')
try:
import tty
fd = sys.stdin.fileno()
old = tty.tcgetattr(fd)
tty.setcbreak(fd)
getchar = lambda: sys.stdin.read(1)
except (ImportError, AttributeError):
tty = None
getchar = lambda: sys.stdin.readline()[:-1][:1]
try:
r = inc = os.environ.get('LINES', 25) - 1
sys.stdout.write('\n'.join(lines[:inc]) + '\n')
while lines[r:]:
sys.stdout.write('-- more --')
sys.stdout.flush()
c = getchar()
if c in ('q', 'Q'):
sys.stdout.write('\r \r')
break
elif c in ('\r', '\n'):
sys.stdout.write('\r \r' + lines[r] + '\n')
r = r + 1
continue
if c in ('b', 'B', '\x1b'):
r = r - inc - inc
if r < 0: r = 0
sys.stdout.write('\n' + '\n'.join(lines[r:r+inc]) + '\n')
r = r + inc
finally:
if tty:
tty.tcsetattr(fd, tty.TCSAFLUSH, old)
def plainpager(text):
"""Simply print unformatted text. This is the ultimate fallback."""
sys.stdout.write(plain(text))
def describe(thing):
"""Produce a short description of the given thing."""
if inspect.ismodule(thing):
if thing.__name__ in sys.builtin_module_names:
return 'built-in module ' + thing.__name__
if hasattr(thing, '__path__'):
return 'package ' + thing.__name__
else:
return 'module ' + thing.__name__
if inspect.isbuiltin(thing):
return 'built-in function ' + thing.__name__
if inspect.isgetsetdescriptor(thing):
return 'getset descriptor %s.%s.%s' % (
thing.__objclass__.__module__, thing.__objclass__.__name__,
thing.__name__)
if inspect.ismemberdescriptor(thing):
return 'member descriptor %s.%s.%s' % (
thing.__objclass__.__module__, thing.__objclass__.__name__,
thing.__name__)
if inspect.isclass(thing):
return 'class ' + thing.__name__
if inspect.isfunction(thing):
return 'function ' + thing.__name__
if inspect.ismethod(thing):
return 'method ' + thing.__name__
return type(thing).__name__
def locate(path, forceload=0):
"""Locate an object by name or dotted path, importing as necessary."""
parts = [part for part in path.split('.') if part]
module, n = None, 0
while n < len(parts):
nextmodule = safeimport('.'.join(parts[:n+1]), forceload)
if nextmodule: module, n = nextmodule, n + 1
else: break
if module:
object = module
else:
object = builtins
for part in parts[n:]:
try:
object = getattr(object, part)
except AttributeError:
return None
return object
# --------------------------------------- interactive interpreter interface
text = TextDoc()
plaintext = _PlainTextDoc()
html = HTMLDoc()
def resolve(thing, forceload=0):
"""Given an object or a path to an object, get the object and its name."""
if isinstance(thing, str):
object = locate(thing, forceload)
if not object:
raise ImportError('no Python documentation found for %r' % thing)
return object, thing
else:
name = getattr(thing, '__name__', None)
return thing, name if isinstance(name, str) else None
def render_doc(thing, title='Python Library Documentation: %s', forceload=0,
renderer=None):
"""Render text documentation, given an object or a path to an object."""
if renderer is None:
renderer = text
object, name = resolve(thing, forceload)
desc = describe(object)
module = inspect.getmodule(object)
if name and '.' in name:
desc += ' in ' + name[:name.rfind('.')]
elif module and module is not object:
desc += ' in module ' + module.__name__
if not (inspect.ismodule(object) or
inspect.isclass(object) or
inspect.isroutine(object) or
inspect.isgetsetdescriptor(object) or
inspect.ismemberdescriptor(object) or
isinstance(object, property)):
# If the passed object is a piece of data or an instance,
# document its available methods instead of its value.
object = type(object)
desc += ' object'
return title % desc + '\n\n' + renderer.document(object, name)
def doc(thing, title='Python Library Documentation: %s', forceload=0,
output=None):
"""Display text documentation, given an object or a path to an object."""
try:
if output is None:
pager(render_doc(thing, title, forceload))
else:
output.write(render_doc(thing, title, forceload, plaintext))
except (ImportError, ErrorDuringImport) as value:
print(value)
def writedoc(thing, forceload=0):
"""Write HTML documentation to a file in the current directory."""
try:
object, name = resolve(thing, forceload)
page = html.page(describe(object), html.document(object, name))
file = open(name + '.html', 'w', encoding='utf-8')
file.write(page)
file.close()
print('wrote', name + '.html')
except (ImportError, ErrorDuringImport) as value:
print(value)
def writedocs(dir, pkgpath='', done=None):
"""Write out HTML documentation for all modules in a directory tree."""
if done is None: done = {}
for importer, modname, ispkg in pkgutil.walk_packages([dir], pkgpath):
writedoc(modname)
return
class Helper:
# These dictionaries map a topic name to either an alias, or a tuple
# (label, seealso-items). The "label" is the label of the corresponding
# section in the .rst file under Doc/ and an index into the dictionary
# in pydoc_data/topics.py.
#
# CAUTION: if you change one of these dictionaries, be sure to adapt the
# list of needed labels in Doc/tools/sphinxext/pyspecific.py and
# regenerate the pydoc_data/topics.py file by running
# make pydoc-topics
# in Doc/ and copying the output file into the Lib/ directory.
keywords = {
'False': '',
'None': '',
'True': '',
'and': 'BOOLEAN',
'as': 'with',
'assert': ('assert', ''),
'break': ('break', 'while for'),
'class': ('class', 'CLASSES SPECIALMETHODS'),
'continue': ('continue', 'while for'),
'def': ('function', ''),
'del': ('del', 'BASICMETHODS'),
'elif': 'if',
'else': ('else', 'while for'),
'except': 'try',
'finally': 'try',
'for': ('for', 'break continue while'),
'from': 'import',
'global': ('global', 'nonlocal NAMESPACES'),
'if': ('if', 'TRUTHVALUE'),
'import': ('import', 'MODULES'),
'in': ('in', 'SEQUENCEMETHODS'),
'is': 'COMPARISON',
'lambda': ('lambda', 'FUNCTIONS'),
'nonlocal': ('nonlocal', 'global NAMESPACES'),
'not': 'BOOLEAN',
'or': 'BOOLEAN',
'pass': ('pass', ''),
'raise': ('raise', 'EXCEPTIONS'),
'return': ('return', 'FUNCTIONS'),
'try': ('try', 'EXCEPTIONS'),
'while': ('while', 'break continue if TRUTHVALUE'),
'with': ('with', 'CONTEXTMANAGERS EXCEPTIONS yield'),
'yield': ('yield', ''),
}
# Either add symbols to this dictionary or to the symbols dictionary
# directly: Whichever is easier. They are merged later.
_symbols_inverse = {
'STRINGS' : ("'", "'''", "r'", "b'", '"""', '"', 'r"', 'b"'),
'OPERATORS' : ('+', '-', '*', '**', '/', '//', '%', '<<', '>>', '&',
'|', '^', '~', '<', '>', '<=', '>=', '==', '!=', '<>'),
'COMPARISON' : ('<', '>', '<=', '>=', '==', '!=', '<>'),
'UNARY' : ('-', '~'),
'AUGMENTEDASSIGNMENT' : ('+=', '-=', '*=', '/=', '%=', '&=', '|=',
'^=', '<<=', '>>=', '**=', '//='),
'BITWISE' : ('<<', '>>', '&', '|', '^', '~'),
'COMPLEX' : ('j', 'J')
}
symbols = {
'%': 'OPERATORS FORMATTING',
'**': 'POWER',
',': 'TUPLES LISTS FUNCTIONS',
'.': 'ATTRIBUTES FLOAT MODULES OBJECTS',
'...': 'ELLIPSIS',
':': 'SLICINGS DICTIONARYLITERALS',
'@': 'def class',
'\\': 'STRINGS',
'_': 'PRIVATENAMES',
'__': 'PRIVATENAMES SPECIALMETHODS',
'`': 'BACKQUOTES',
'(': 'TUPLES FUNCTIONS CALLS',
')': 'TUPLES FUNCTIONS CALLS',
'[': 'LISTS SUBSCRIPTS SLICINGS',
']': 'LISTS SUBSCRIPTS SLICINGS'
}
for topic, symbols_ in _symbols_inverse.items():
for symbol in symbols_:
topics = symbols.get(symbol, topic)
if topic not in topics:
topics = topics + ' ' + topic
symbols[symbol] = topics
topics = {
'TYPES': ('types', 'STRINGS UNICODE NUMBERS SEQUENCES MAPPINGS '
'FUNCTIONS CLASSES MODULES FILES inspect'),
'STRINGS': ('strings', 'str UNICODE SEQUENCES STRINGMETHODS '
'FORMATTING TYPES'),
'STRINGMETHODS': ('string-methods', 'STRINGS FORMATTING'),
'FORMATTING': ('formatstrings', 'OPERATORS'),
'UNICODE': ('strings', 'encodings unicode SEQUENCES STRINGMETHODS '
'FORMATTING TYPES'),
'NUMBERS': ('numbers', 'INTEGER FLOAT COMPLEX TYPES'),
'INTEGER': ('integers', 'int range'),
'FLOAT': ('floating', 'float math'),
'COMPLEX': ('imaginary', 'complex cmath'),
'SEQUENCES': ('typesseq', 'STRINGMETHODS FORMATTING range LISTS'),
'MAPPINGS': 'DICTIONARIES',
'FUNCTIONS': ('typesfunctions', 'def TYPES'),
'METHODS': ('typesmethods', 'class def CLASSES TYPES'),
'CODEOBJECTS': ('bltin-code-objects', 'compile FUNCTIONS TYPES'),
'TYPEOBJECTS': ('bltin-type-objects', 'types TYPES'),
'FRAMEOBJECTS': 'TYPES',
'TRACEBACKS': 'TYPES',
'NONE': ('bltin-null-object', ''),
'ELLIPSIS': ('bltin-ellipsis-object', 'SLICINGS'),
'FILES': ('bltin-file-objects', ''),
'SPECIALATTRIBUTES': ('specialattrs', ''),
'CLASSES': ('types', 'class SPECIALMETHODS PRIVATENAMES'),
'MODULES': ('typesmodules', 'import'),
'PACKAGES': 'import',
'EXPRESSIONS': ('operator-summary', 'lambda or and not in is BOOLEAN '
'COMPARISON BITWISE SHIFTING BINARY FORMATTING POWER '
'UNARY ATTRIBUTES SUBSCRIPTS SLICINGS CALLS TUPLES '
'LISTS DICTIONARIES'),
'OPERATORS': 'EXPRESSIONS',
'PRECEDENCE': 'EXPRESSIONS',
'OBJECTS': ('objects', 'TYPES'),
'SPECIALMETHODS': ('specialnames', 'BASICMETHODS ATTRIBUTEMETHODS '
'CALLABLEMETHODS SEQUENCEMETHODS MAPPINGMETHODS '
'NUMBERMETHODS CLASSES'),
'BASICMETHODS': ('customization', 'hash repr str SPECIALMETHODS'),
'ATTRIBUTEMETHODS': ('attribute-access', 'ATTRIBUTES SPECIALMETHODS'),
'CALLABLEMETHODS': ('callable-types', 'CALLS SPECIALMETHODS'),
'SEQUENCEMETHODS': ('sequence-types', 'SEQUENCES SEQUENCEMETHODS '
'SPECIALMETHODS'),
'MAPPINGMETHODS': ('sequence-types', 'MAPPINGS SPECIALMETHODS'),
'NUMBERMETHODS': ('numeric-types', 'NUMBERS AUGMENTEDASSIGNMENT '
'SPECIALMETHODS'),
'EXECUTION': ('execmodel', 'NAMESPACES DYNAMICFEATURES EXCEPTIONS'),
'NAMESPACES': ('naming', 'global nonlocal ASSIGNMENT DELETION DYNAMICFEATURES'),
'DYNAMICFEATURES': ('dynamic-features', ''),
'SCOPING': 'NAMESPACES',
'FRAMES': 'NAMESPACES',
'EXCEPTIONS': ('exceptions', 'try except finally raise'),
'CONVERSIONS': ('conversions', ''),
'IDENTIFIERS': ('identifiers', 'keywords SPECIALIDENTIFIERS'),
'SPECIALIDENTIFIERS': ('id-classes', ''),
'PRIVATENAMES': ('atom-identifiers', ''),
'LITERALS': ('atom-literals', 'STRINGS NUMBERS TUPLELITERALS '
'LISTLITERALS DICTIONARYLITERALS'),
'TUPLES': 'SEQUENCES',
'TUPLELITERALS': ('exprlists', 'TUPLES LITERALS'),
'LISTS': ('typesseq-mutable', 'LISTLITERALS'),
'LISTLITERALS': ('lists', 'LISTS LITERALS'),
'DICTIONARIES': ('typesmapping', 'DICTIONARYLITERALS'),
'DICTIONARYLITERALS': ('dict', 'DICTIONARIES LITERALS'),
'ATTRIBUTES': ('attribute-references', 'getattr hasattr setattr ATTRIBUTEMETHODS'),
'SUBSCRIPTS': ('subscriptions', 'SEQUENCEMETHODS'),
'SLICINGS': ('slicings', 'SEQUENCEMETHODS'),
'CALLS': ('calls', 'EXPRESSIONS'),
'POWER': ('power', 'EXPRESSIONS'),
'UNARY': ('unary', 'EXPRESSIONS'),
'BINARY': ('binary', 'EXPRESSIONS'),
'SHIFTING': ('shifting', 'EXPRESSIONS'),
'BITWISE': ('bitwise', 'EXPRESSIONS'),
'COMPARISON': ('comparisons', 'EXPRESSIONS BASICMETHODS'),
'BOOLEAN': ('booleans', 'EXPRESSIONS TRUTHVALUE'),
'ASSERTION': 'assert',
'ASSIGNMENT': ('assignment', 'AUGMENTEDASSIGNMENT'),
'AUGMENTEDASSIGNMENT': ('augassign', 'NUMBERMETHODS'),
'DELETION': 'del',
'RETURNING': 'return',
'IMPORTING': 'import',
'CONDITIONAL': 'if',
'LOOPING': ('compound', 'for while break continue'),
'TRUTHVALUE': ('truth', 'if while and or not BASICMETHODS'),
'DEBUGGING': ('debugger', 'pdb'),
'CONTEXTMANAGERS': ('context-managers', 'with'),
}
def __init__(self, input=None, output=None):
self._input = input
self._output = output
#fix me brython
self.input = self._input or sys.stdin
self.output = self._output or sys.stdout
#fix me brython
#input = property(lambda self: self._input or sys.stdin)
#output = property(lambda self: self._output or sys.stdout)
def __repr__(self):
if inspect.stack()[1][3] == '?':
self()
return ''
return '<pydoc.Helper instance>'
_GoInteractive = object()
def __call__(self, request=_GoInteractive):
if request is not self._GoInteractive:
self.help(request)
else:
self.intro()
self.interact()
self.output.write('''
You are now leaving help and returning to the Python interpreter.
If you want to ask for help on a particular object directly from the
interpreter, you can type "help(object)". Executing "help('string')"
has the same effect as typing a particular string at the help> prompt.
''')
def interact(self):
self.output.write('\n')
while True:
try:
request = self.getline('help> ')
if not request: break
except (KeyboardInterrupt, EOFError):
break
request = replace(request, '"', '', "'", '').strip()
if request.lower() in ('q', 'quit'): break
self.help(request)
def getline(self, prompt):
"""Read one line, using input() when appropriate."""
if self.input is sys.stdin:
return input(prompt)
else:
self.output.write(prompt)
self.output.flush()
return self.input.readline()
def help(self, request):
if type(request) is type(''):
request = request.strip()
if request == 'help': self.intro()
elif request == 'keywords': self.listkeywords()
elif request == 'symbols': self.listsymbols()
elif request == 'topics': self.listtopics()
elif request == 'modules': self.listmodules()
elif request[:8] == 'modules ':
self.listmodules(request.split()[1])
elif request in self.symbols: self.showsymbol(request)
elif request in ['True', 'False', 'None']:
# special case these keywords since they are objects too
doc(eval(request), 'Help on %s:')
elif request in self.keywords: self.showtopic(request)
elif request in self.topics: self.showtopic(request)
elif request: doc(request, 'Help on %s:', output=self._output)
elif isinstance(request, Helper): self()
else: doc(request, 'Help on %s:', output=self._output)
self.output.write('\n')
def intro(self):
self.output.write('''
Welcome to Python %s! This is the interactive help utility.
If this is your first time using Python, you should definitely check out
the tutorial on the Internet at http://docs.python.org/%s/tutorial/.
Enter the name of any module, keyword, or topic to get help on writing
Python programs and using Python modules. To quit this help utility and
return to the interpreter, just type "quit".
To get a list of available modules, keywords, or topics, type "modules",
"keywords", or "topics". Each module also comes with a one-line summary
of what it does; to list the modules whose summaries contain a given word
such as "spam", type "modules spam".
''' % tuple([sys.version[:3]]*2))
def list(self, items, columns=4, width=80):
items = list(sorted(items))
colw = width // columns
rows = (len(items) + columns - 1) // columns
for row in range(rows):
for col in range(columns):
i = col * rows + row
if i < len(items):
self.output.write(items[i])
if col < columns - 1:
self.output.write(' ' + ' ' * (colw - 1 - len(items[i])))
self.output.write('\n')
def listkeywords(self):
self.output.write('''
Here is a list of the Python keywords. Enter any keyword to get more help.
''')
self.list(self.keywords.keys())
def listsymbols(self):
self.output.write('''
Here is a list of the punctuation symbols which Python assigns special meaning
to. Enter any symbol to get more help.
''')
self.list(self.symbols.keys())
def listtopics(self):
self.output.write('''
Here is a list of available topics. Enter any topic name to get more help.
''')
self.list(self.topics.keys())
def showtopic(self, topic, more_xrefs=''):
try:
import pydoc_data.topics
except ImportError:
self.output.write('''
Sorry, topic and keyword documentation is not available because the
module "pydoc_data.topics" could not be found.
''')
return
target = self.topics.get(topic, self.keywords.get(topic))
if not target:
self.output.write('no documentation found for %s\n' % repr(topic))
return
if type(target) is type(''):
return self.showtopic(target, more_xrefs)
label, xrefs = target
try:
doc = pydoc_data.topics.topics[label]
except KeyError:
self.output.write('no documentation found for %s\n' % repr(topic))
return
pager(doc.strip() + '\n')
if more_xrefs:
xrefs = (xrefs or '') + ' ' + more_xrefs
if xrefs:
import formatter
buffer = io.StringIO()
formatter.DumbWriter(buffer).send_flowing_data(
'Related help topics: ' + ', '.join(xrefs.split()) + '\n')
self.output.write('\n%s\n' % buffer.getvalue())
def _gettopic(self, topic, more_xrefs=''):
"""Return unbuffered tuple of (topic, xrefs).
If an error occurs here, the exception is caught and displayed by
the url handler.
This function duplicates the showtopic method but returns its
result directly so it can be formatted for display in an html page.
"""
try:
import pydoc_data.topics
except ImportError:
return('''
Sorry, topic and keyword documentation is not available because the
module "pydoc_data.topics" could not be found.
''' , '')
target = self.topics.get(topic, self.keywords.get(topic))
if not target:
raise ValueError('could not find topic')
if isinstance(target, str):
return self._gettopic(target, more_xrefs)
label, xrefs = target
doc = pydoc_data.topics.topics[label]
if more_xrefs:
xrefs = (xrefs or '') + ' ' + more_xrefs
return doc, xrefs
def showsymbol(self, symbol):
target = self.symbols[symbol]
topic, _, xrefs = target.partition(' ')
self.showtopic(topic, xrefs)
def listmodules(self, key=''):
if key:
self.output.write('''
Here is a list of matching modules. Enter any module name to get more help.
''')
apropos(key)
else:
self.output.write('''
Please wait a moment while I gather a list of all available modules...
''')
modules = {}
def callback(path, modname, desc, modules=modules):
if modname and modname[-9:] == '.__init__':
modname = modname[:-9] + ' (package)'
if modname.find('.') < 0:
modules[modname] = 1
def onerror(modname):
callback(None, modname, None)
ModuleScanner().run(callback, onerror=onerror)
self.list(modules.keys())
self.output.write('''
Enter any module name to get more help. Or, type "modules spam" to search
for modules whose descriptions contain the word "spam".
''')
help = Helper()
class Scanner:
"""A generic tree iterator."""
def __init__(self, roots, children, descendp):
self.roots = roots[:]
self.state = []
self.children = children
self.descendp = descendp
def next(self):
if not self.state:
if not self.roots:
return None
root = self.roots.pop(0)
self.state = [(root, self.children(root))]
node, children = self.state[-1]
if not children:
self.state.pop()
return self.next()
child = children.pop(0)
if self.descendp(child):
self.state.append((child, self.children(child)))
return child
class ModuleScanner:
"""An interruptible scanner that searches module synopses."""
def run(self, callback, key=None, completer=None, onerror=None):
if key: key = key.lower()
self.quit = False
seen = {}
for modname in sys.builtin_module_names:
if modname != '__main__':
seen[modname] = 1
if key is None:
callback(None, modname, '')
else:
name = __import__(modname).__doc__ or ''
desc = name.split('\n')[0]
name = modname + ' - ' + desc
if name.lower().find(key) >= 0:
callback(None, modname, desc)
for importer, modname, ispkg in pkgutil.walk_packages(onerror=onerror):
if self.quit:
break
if key is None:
callback(None, modname, '')
else:
try:
loader = importer.find_module(modname)
except SyntaxError:
# raised by tests for bad coding cookies or BOM
continue
if hasattr(loader, 'get_source'):
try:
source = loader.get_source(modname)
except Exception:
if onerror:
onerror(modname)
continue
desc = source_synopsis(io.StringIO(source)) or ''
if hasattr(loader, 'get_filename'):
path = loader.get_filename(modname)
else:
path = None
else:
try:
module = loader.load_module(modname)
except ImportError:
if onerror:
onerror(modname)
continue
desc = (module.__doc__ or '').splitlines()[0]
path = getattr(module,'__file__',None)
name = modname + ' - ' + desc
if name.lower().find(key) >= 0:
callback(path, modname, desc)
if completer:
completer()
def apropos(key):
"""Print all the one-line module summaries that contain a substring."""
def callback(path, modname, desc):
if modname[-9:] == '.__init__':
modname = modname[:-9] + ' (package)'
print(modname, desc and '- ' + desc)
def onerror(modname):
pass
with warnings.catch_warnings():
warnings.filterwarnings('ignore') # ignore problems during import
ModuleScanner().run(callback, key, onerror=onerror)
# --------------------------------------- enhanced Web browser interface
def _start_server(urlhandler, port):
"""Start an HTTP server thread on a specific port.
Start an HTML/text server thread, so HTML or text documents can be
browsed dynamically and interactively with a Web browser. Example use:
>>> import time
>>> import pydoc
Define a URL handler. To determine what the client is asking
for, check the URL and content_type.
Then get or generate some text or HTML code and return it.
>>> def my_url_handler(url, content_type):
... text = 'the URL sent was: (%s, %s)' % (url, content_type)
... return text
Start server thread on port 0.
If you use port 0, the server will pick a random port number.
You can then use serverthread.port to get the port number.
>>> port = 0
>>> serverthread = pydoc._start_server(my_url_handler, port)
Check that the server is really started. If it is, open browser
and get first page. Use serverthread.url as the starting page.
>>> if serverthread.serving:
... import webbrowser
The next two lines are commented out so a browser doesn't open if
doctest is run on this module.
#... webbrowser.open(serverthread.url)
#True
Let the server do its thing. We just need to monitor its status.
Use time.sleep so the loop doesn't hog the CPU.
>>> starttime = time.time()
>>> timeout = 1 #seconds
This is a short timeout for testing purposes.
>>> while serverthread.serving:
... time.sleep(.01)
... if serverthread.serving and time.time() - starttime > timeout:
... serverthread.stop()
... break
Print any errors that may have occurred.
>>> print(serverthread.error)
None
"""
import http.server
import email.message
import select
import threading
class DocHandler(http.server.BaseHTTPRequestHandler):
def do_GET(self):
"""Process a request from an HTML browser.
The URL received is in self.path.
Get an HTML page from self.urlhandler and send it.
"""
if self.path.endswith('.css'):
content_type = 'text/css'
else:
content_type = 'text/html'
self.send_response(200)
self.send_header('Content-Type', '%s; charset=UTF-8' % content_type)
self.end_headers()
self.wfile.write(self.urlhandler(
self.path, content_type).encode('utf-8'))
def log_message(self, *args):
# Don't log messages.
pass
class DocServer(http.server.HTTPServer):
def __init__(self, port, callback):
self.host = (sys.platform == 'mac') and '127.0.0.1' or 'localhost'
self.address = ('', port)
self.callback = callback
self.base.__init__(self, self.address, self.handler)
self.quit = False
def serve_until_quit(self):
while not self.quit:
rd, wr, ex = select.select([self.socket.fileno()], [], [], 1)
if rd:
self.handle_request()
self.server_close()
def server_activate(self):
self.base.server_activate(self)
if self.callback:
self.callback(self)
class ServerThread(threading.Thread):
def __init__(self, urlhandler, port):
self.urlhandler = urlhandler
self.port = int(port)
threading.Thread.__init__(self)
self.serving = False
self.error = None
def run(self):
"""Start the server."""
try:
DocServer.base = http.server.HTTPServer
DocServer.handler = DocHandler
DocHandler.MessageClass = email.message.Message
DocHandler.urlhandler = staticmethod(self.urlhandler)
docsvr = DocServer(self.port, self.ready)
self.docserver = docsvr
docsvr.serve_until_quit()
except Exception as e:
self.error = e
def ready(self, server):
self.serving = True
self.host = server.host
self.port = server.server_port
self.url = 'http://%s:%d/' % (self.host, self.port)
def stop(self):
"""Stop the server and this thread nicely"""
self.docserver.quit = True
self.serving = False
self.url = None
thread = ServerThread(urlhandler, port)
thread.start()
# Wait until thread.serving is True to make sure we are
# really up before returning.
while not thread.error and not thread.serving:
time.sleep(.01)
return thread
def _url_handler(url, content_type="text/html"):
"""The pydoc url handler for use with the pydoc server.
If the content_type is 'text/css', the _pydoc.css style
sheet is read and returned if it exits.
If the content_type is 'text/html', then the result of
get_html_page(url) is returned.
"""
class _HTMLDoc(HTMLDoc):
def page(self, title, contents):
"""Format an HTML page."""
css_path = "pydoc_data/_pydoc.css"
css_link = (
'<link rel="stylesheet" type="text/css" href="%s">' %
css_path)
return '''\
<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.0 Transitional//EN">
<html><head><title>Pydoc: %s</title>
<meta http-equiv="Content-Type" content="text/html; charset=utf-8">
%s</head><body bgcolor="#f0f0f8">%s<div style="clear:both;padding-top:.5em;">%s</div>
</body></html>''' % (title, css_link, html_navbar(), contents)
def filelink(self, url, path):
return '<a href="getfile?key=%s">%s</a>' % (url, path)
html = _HTMLDoc()
def html_navbar():
version = html.escape("%s [%s, %s]" % (platform.python_version(),
platform.python_build()[0],
platform.python_compiler()))
return """
<div style='float:left'>
Python %s<br>%s
</div>
<div style='float:right'>
<div style='text-align:center'>
<a href="index.html">Module Index</a>
: <a href="topics.html">Topics</a>
: <a href="keywords.html">Keywords</a>
</div>
<div>
<form action="get" style='display:inline;'>
<input type=text name=key size=15>
<input type=submit value="Get">
</form>
<form action="search" style='display:inline;'>
<input type=text name=key size=15>
<input type=submit value="Search">
</form>
</div>
</div>
""" % (version, html.escape(platform.platform(terse=True)))
def html_index():
"""Module Index page."""
def bltinlink(name):
return '<a href="%s.html">%s</a>' % (name, name)
heading = html.heading(
'<big><big><strong>Index of Modules</strong></big></big>',
'#ffffff', '#7799ee')
names = [name for name in sys.builtin_module_names
if name != '__main__']
contents = html.multicolumn(names, bltinlink)
contents = [heading, '<p>' + html.bigsection(
'Built-in Modules', '#ffffff', '#ee77aa', contents)]
seen = {}
for dir in sys.path:
contents.append(html.index(dir, seen))
contents.append(
'<p align=right><font color="#909090" face="helvetica,'
'arial"><strong>pydoc</strong> by Ka-Ping Yee'
'<[email protected]></font>')
return 'Index of Modules', ''.join(contents)
def html_search(key):
"""Search results page."""
# scan for modules
search_result = []
def callback(path, modname, desc):
if modname[-9:] == '.__init__':
modname = modname[:-9] + ' (package)'
search_result.append((modname, desc and '- ' + desc))
with warnings.catch_warnings():
warnings.filterwarnings('ignore') # ignore problems during import
ModuleScanner().run(callback, key)
# format page
def bltinlink(name):
return '<a href="%s.html">%s</a>' % (name, name)
results = []
heading = html.heading(
'<big><big><strong>Search Results</strong></big></big>',
'#ffffff', '#7799ee')
for name, desc in search_result:
results.append(bltinlink(name) + desc)
contents = heading + html.bigsection(
'key = %s' % key, '#ffffff', '#ee77aa', '<br>'.join(results))
return 'Search Results', contents
def html_getfile(path):
"""Get and display a source file listing safely."""
path = path.replace('%20', ' ')
with tokenize.open(path) as fp:
lines = html.escape(fp.read())
body = '<pre>%s</pre>' % lines
heading = html.heading(
'<big><big><strong>File Listing</strong></big></big>',
'#ffffff', '#7799ee')
contents = heading + html.bigsection(
'File: %s' % path, '#ffffff', '#ee77aa', body)
return 'getfile %s' % path, contents
def html_topics():
"""Index of topic texts available."""
def bltinlink(name):
return '<a href="topic?key=%s">%s</a>' % (name, name)
heading = html.heading(
'<big><big><strong>INDEX</strong></big></big>',
'#ffffff', '#7799ee')
names = sorted(Helper.topics.keys())
contents = html.multicolumn(names, bltinlink)
contents = heading + html.bigsection(
'Topics', '#ffffff', '#ee77aa', contents)
return 'Topics', contents
def html_keywords():
"""Index of keywords."""
heading = html.heading(
'<big><big><strong>INDEX</strong></big></big>',
'#ffffff', '#7799ee')
names = sorted(Helper.keywords.keys())
def bltinlink(name):
return '<a href="topic?key=%s">%s</a>' % (name, name)
contents = html.multicolumn(names, bltinlink)
contents = heading + html.bigsection(
'Keywords', '#ffffff', '#ee77aa', contents)
return 'Keywords', contents
def html_topicpage(topic):
"""Topic or keyword help page."""
buf = io.StringIO()
htmlhelp = Helper(buf, buf)
contents, xrefs = htmlhelp._gettopic(topic)
if topic in htmlhelp.keywords:
title = 'KEYWORD'
else:
title = 'TOPIC'
heading = html.heading(
'<big><big><strong>%s</strong></big></big>' % title,
'#ffffff', '#7799ee')
contents = '<pre>%s</pre>' % html.markup(contents)
contents = html.bigsection(topic , '#ffffff','#ee77aa', contents)
if xrefs:
xrefs = sorted(xrefs.split())
def bltinlink(name):
return '<a href="topic?key=%s">%s</a>' % (name, name)
xrefs = html.multicolumn(xrefs, bltinlink)
xrefs = html.section('Related help topics: ',
'#ffffff', '#ee77aa', xrefs)
return ('%s %s' % (title, topic),
''.join((heading, contents, xrefs)))
def html_getobj(url):
obj = locate(url, forceload=1)
if obj is None and url != 'None':
raise ValueError('could not find object')
title = describe(obj)
content = html.document(obj, url)
return title, content
def html_error(url, exc):
heading = html.heading(
'<big><big><strong>Error</strong></big></big>',
'#ffffff', '#7799ee')
contents = '<br>'.join(html.escape(line) for line in
format_exception_only(type(exc), exc))
contents = heading + html.bigsection(url, '#ffffff', '#bb0000',
contents)
return "Error - %s" % url, contents
def get_html_page(url):
"""Generate an HTML page for url."""
complete_url = url
if url.endswith('.html'):
url = url[:-5]
try:
if url in ("", "index"):
title, content = html_index()
elif url == "topics":
title, content = html_topics()
elif url == "keywords":
title, content = html_keywords()
elif '=' in url:
op, _, url = url.partition('=')
if op == "search?key":
title, content = html_search(url)
elif op == "getfile?key":
title, content = html_getfile(url)
elif op == "topic?key":
# try topics first, then objects.
try:
title, content = html_topicpage(url)
except ValueError:
title, content = html_getobj(url)
elif op == "get?key":
# try objects first, then topics.
if url in ("", "index"):
title, content = html_index()
else:
try:
title, content = html_getobj(url)
except ValueError:
title, content = html_topicpage(url)
else:
raise ValueError('bad pydoc url')
else:
title, content = html_getobj(url)
except Exception as exc:
# Catch any errors and display them in an error page.
title, content = html_error(complete_url, exc)
return html.page(title, content)
if url.startswith('/'):
url = url[1:]
if content_type == 'text/css':
path_here = os.path.dirname(os.path.realpath(__file__))
css_path = os.path.join(path_here, url)
with open(css_path) as fp:
return ''.join(fp.readlines())
elif content_type == 'text/html':
return get_html_page(url)
# Errors outside the url handler are caught by the server.
raise TypeError('unknown content type %r for url %s' % (content_type, url))
def browse(port=0, *, open_browser=True):
"""Start the enhanced pydoc Web server and open a Web browser.
Use port '0' to start the server on an arbitrary port.
Set open_browser to False to suppress opening a browser.
"""
import webbrowser
serverthread = _start_server(_url_handler, port)
if serverthread.error:
print(serverthread.error)
return
if serverthread.serving:
server_help_msg = 'Server commands: [b]rowser, [q]uit'
if open_browser:
webbrowser.open(serverthread.url)
try:
print('Server ready at', serverthread.url)
print(server_help_msg)
while serverthread.serving:
cmd = input('server> ')
cmd = cmd.lower()
if cmd == 'q':
break
elif cmd == 'b':
webbrowser.open(serverthread.url)
else:
print(server_help_msg)
except (KeyboardInterrupt, EOFError):
print()
finally:
if serverthread.serving:
serverthread.stop()
print('Server stopped')
# -------------------------------------------------- command-line interface
def ispath(x):
return isinstance(x, str) and x.find(os.sep) >= 0
def cli():
"""Command-line interface (looks at sys.argv to decide what to do)."""
import getopt
class BadUsage(Exception): pass
# Scripts don't get the current directory in their path by default
# unless they are run with the '-m' switch
if '' not in sys.path:
scriptdir = os.path.dirname(sys.argv[0])
if scriptdir in sys.path:
sys.path.remove(scriptdir)
sys.path.insert(0, '.')
try:
opts, args = getopt.getopt(sys.argv[1:], 'bk:p:w')
writing = False
start_server = False
open_browser = False
port = None
for opt, val in opts:
if opt == '-b':
start_server = True
open_browser = True
if opt == '-k':
apropos(val)
return
if opt == '-p':
start_server = True
port = val
if opt == '-w':
writing = True
if start_server:
if port is None:
port = 0
browse(port, open_browser=open_browser)
return
if not args: raise BadUsage
for arg in args:
if ispath(arg) and not os.path.exists(arg):
print('file %r does not exist' % arg)
break
try:
if ispath(arg) and os.path.isfile(arg):
arg = importfile(arg)
if writing:
if ispath(arg) and os.path.isdir(arg):
writedocs(arg)
else:
writedoc(arg)
else:
help.help(arg)
except ErrorDuringImport as value:
print(value)
except (getopt.error, BadUsage):
cmd = os.path.splitext(os.path.basename(sys.argv[0]))[0]
print("""pydoc - the Python documentation tool
{cmd} <name> ...
Show text documentation on something. <name> may be the name of a
Python keyword, topic, function, module, or package, or a dotted
reference to a class or function within a module or module in a
package. If <name> contains a '{sep}', it is used as the path to a
Python source file to document. If name is 'keywords', 'topics',
or 'modules', a listing of these things is displayed.
{cmd} -k <keyword>
Search for a keyword in the synopsis lines of all available modules.
{cmd} -p <port>
Start an HTTP server on the given port on the local machine. Port
number 0 can be used to get an arbitrary unused port.
{cmd} -b
Start an HTTP server on an arbitrary unused port and open a Web browser
to interactively browse documentation. The -p option can be used with
the -b option to explicitly specify the server port.
{cmd} -w <name> ...
Write out the HTML documentation for a module to a file in the current
directory. If <name> contains a '{sep}', it is treated as a filename; if
it names a directory, documentation is written for all the contents.
""".format(cmd=cmd, sep=os.sep))
if __name__ == '__main__':
cli()
| gpl-3.0 |
Greennut/ostproject | django/contrib/sites/models.py | 387 | 2867 | from django.db import models
from django.utils.translation import ugettext_lazy as _
SITE_CACHE = {}
class SiteManager(models.Manager):
def get_current(self):
"""
Returns the current ``Site`` based on the SITE_ID in the
project's settings. The ``Site`` object is cached the first
time it's retrieved from the database.
"""
from django.conf import settings
try:
sid = settings.SITE_ID
except AttributeError:
from django.core.exceptions import ImproperlyConfigured
raise ImproperlyConfigured("You're using the Django \"sites framework\" without having set the SITE_ID setting. Create a site in your database and set the SITE_ID setting to fix this error.")
try:
current_site = SITE_CACHE[sid]
except KeyError:
current_site = self.get(pk=sid)
SITE_CACHE[sid] = current_site
return current_site
def clear_cache(self):
"""Clears the ``Site`` object cache."""
global SITE_CACHE
SITE_CACHE = {}
class Site(models.Model):
domain = models.CharField(_('domain name'), max_length=100)
name = models.CharField(_('display name'), max_length=50)
objects = SiteManager()
class Meta:
db_table = 'django_site'
verbose_name = _('site')
verbose_name_plural = _('sites')
ordering = ('domain',)
def __unicode__(self):
return self.domain
def save(self, *args, **kwargs):
super(Site, self).save(*args, **kwargs)
# Cached information will likely be incorrect now.
if self.id in SITE_CACHE:
del SITE_CACHE[self.id]
def delete(self):
pk = self.pk
super(Site, self).delete()
try:
del SITE_CACHE[pk]
except KeyError:
pass
class RequestSite(object):
"""
A class that shares the primary interface of Site (i.e., it has
``domain`` and ``name`` attributes) but gets its data from a Django
HttpRequest object rather than from a database.
The save() and delete() methods raise NotImplementedError.
"""
def __init__(self, request):
self.domain = self.name = request.get_host()
def __unicode__(self):
return self.domain
def save(self, force_insert=False, force_update=False):
raise NotImplementedError('RequestSite cannot be saved.')
def delete(self):
raise NotImplementedError('RequestSite cannot be deleted.')
def get_current_site(request):
"""
Checks if contrib.sites is installed and returns either the current
``Site`` object or a ``RequestSite`` object based on the request.
"""
if Site._meta.installed:
current_site = Site.objects.get_current()
else:
current_site = RequestSite(request)
return current_site
| bsd-3-clause |
aburleigh94/LehighPV | doc/ext/pvsample.py | 6 | 2202 | """
Adds the pv-sample directive that allows to show code for an
example which is at the same time executed life on the web-page.
"""
from docutils import nodes
from docutils.parsers.rst import Directive
from sphinx.util.compat import make_admonition
from sphinx.util.nodes import set_source_info
from sphinx.locale import _
import os
class PVSample(nodes.Admonition, nodes.Element):
pass
RAW_CODE_PRELUDE='''
<script type='text/javascript' src='%s/bio-pv.min.js'></script>
<style>
#viewer {
border-width:1px;
border-style:solid;
border-color:#eee;
padding : 0px;
width : 300px;
height : 300px;
margin-left : auto; margin-right: auto;
}
#viewer-wrap {
text-align:center;
width: 100%%;
}
</style>
<div id=viewer-wrap>
<div id=viewer></div>
</div>
'''
class PVSampleDirective(Directive):
# this enables content in the directive
has_content = True
def run(self):
env = self.state.document.settings.env
code = '\n'.join(self.content)
literal = nodes.literal_block(code, code)
literal['language' ] = 'html'
print env.docname
doc_dir = os.path.dirname(env.docname)
relative_static_path = os.path.relpath(env.config.html_static_path[0],
doc_dir)
prelude = RAW_CODE_PRELUDE % relative_static_path
raw_html_code = nodes.raw(code, prelude + code + '</br>',
format='html')
set_source_info(self, literal)
set_source_info(self, raw_html_code)
return [raw_html_code, nodes.subtitle('', 'Source Code'), literal]
class PVSampleListDirective(Directive):
def run(self):
return [PVSampleList('')]
def visit_pv_sample(self, node):
self.visit_admonition(node)
def depart_pv_sample(self, node):
self.depart_admonition(node)
def setup(app):
app.add_node(PVSample,
html=(visit_pv_sample, depart_pv_sample),
latex=(visit_pv_sample, depart_pv_sample),
text=(visit_pv_sample, depart_pv_sample))
app.add_directive('pv-sample', PVSampleDirective)
return {'version': '0.1'} # identifies the version of our extension
| mit |
yasoob/youtube-dl-GUI | youtube_dl/extractor/discoveryvr.py | 59 | 2129 | # coding: utf-8
from __future__ import unicode_literals
from .common import InfoExtractor
from ..utils import parse_duration
class DiscoveryVRIE(InfoExtractor):
_VALID_URL = r'https?://(?:www\.)?discoveryvr\.com/watch/(?P<id>[^/?#]+)'
_TEST = {
'url': 'http://www.discoveryvr.com/watch/discovery-vr-an-introduction',
'md5': '32b1929798c464a54356378b7912eca4',
'info_dict': {
'id': 'discovery-vr-an-introduction',
'ext': 'mp4',
'title': 'Discovery VR - An Introduction',
'description': 'md5:80d418a10efb8899d9403e61d8790f06',
}
}
def _real_extract(self, url):
display_id = self._match_id(url)
webpage = self._download_webpage(url, display_id)
bootstrap_data = self._search_regex(
r'root\.DVR\.bootstrapData\s+=\s+"({.+?})";',
webpage, 'bootstrap data')
bootstrap_data = self._parse_json(
bootstrap_data.encode('utf-8').decode('unicode_escape'),
display_id)
videos = self._parse_json(bootstrap_data['videos'], display_id)['allVideos']
video_data = next(video for video in videos if video.get('slug') == display_id)
series = video_data.get('showTitle')
title = episode = video_data.get('title') or series
if series and series != title:
title = '%s - %s' % (series, title)
formats = []
for f, format_id in (('cdnUriM3U8', 'mobi'), ('webVideoUrlSd', 'sd'), ('webVideoUrlHd', 'hd')):
f_url = video_data.get(f)
if not f_url:
continue
formats.append({
'format_id': format_id,
'url': f_url,
})
return {
'id': display_id,
'display_id': display_id,
'title': title,
'description': video_data.get('description'),
'thumbnail': video_data.get('thumbnail'),
'duration': parse_duration(video_data.get('runTime')),
'formats': formats,
'episode': episode,
'series': series,
}
| mit |
wujuguang/sentry | src/sentry/interfaces/base.py | 13 | 3049 | from __future__ import absolute_import
from django.conf import settings
from django.utils.html import escape
from django.utils.translation import ugettext as _
from sentry.utils.imports import import_string
def get_interface(name):
try:
import_path = settings.SENTRY_INTERFACES[name]
except KeyError:
raise ValueError('Invalid interface name: %s' % (name,))
try:
interface = import_string(import_path)
except Exception:
raise ValueError('Unable to load interface: %s' % (name,))
return interface
class Interface(object):
"""
An interface is a structured representation of data, which may
render differently than the default ``extra`` metadata in an event.
"""
_data = None
score = 0
display_score = None
__slots__ = ['_data']
def __init__(self, **data):
self._data = data or {}
def __eq__(self, other):
if type(self) != type(other):
return False
return self._data == other._data
def __getstate__(self):
return dict(
(slot, self.__dict__.get(slot))
for slot in self.__slots__
)
def __setstate__(self, state):
self.__dict__.update(state)
if not hasattr(self, '_data'):
self._data = {}
def __getattr__(self, name):
return self._data[name]
def __setattr__(self, name, value):
if name == '_data':
self.__dict__['_data'] = value
else:
self._data[name] = value
@classmethod
def to_python(cls, data):
return cls(data)
def get_api_context(self):
return self.to_json()
def to_json(self):
# eliminate empty values for serialization to compress the keyspace
# and save (seriously) ridiculous amounts of bytes
# XXX(dcramer): its important that we keep zero values here, but empty
# lists and strings get discarded as we've deemed them not important
return dict(
(k, v) for k, v in self._data.iteritems() if (v == 0 or v)
)
def get_path(self):
cls = type(self)
return '%s.%s' % (cls.__module__, cls.__name__)
def get_alias(self):
return self.get_slug()
def get_hash(self):
return []
def compute_hashes(self, platform):
result = self.get_hash()
if not result:
return []
return [result]
def get_slug(self):
return type(self).__name__.lower()
def get_title(self):
return _(type(self).__name__)
def get_display_score(self):
return self.display_score or self.score
def get_score(self):
return self.score
def to_html(self, event, is_public=False, **kwargs):
return ''
def to_string(self, event, is_public=False, **kwargs):
return ''
def to_email_html(self, event, **kwargs):
body = self.to_string(event)
if not body:
return ''
return '<pre>%s</pre>' % (escape(body).replace('\n', '<br>'),)
| bsd-3-clause |
zakuro9715/lettuce | tests/integration/lib/Django-1.3/django/contrib/sitemaps/tests/urls.py | 233 | 1275 | from datetime import datetime
from django.conf.urls.defaults import *
from django.contrib.sitemaps import Sitemap, GenericSitemap, FlatPageSitemap
from django.contrib.auth.models import User
class SimpleSitemap(Sitemap):
changefreq = "never"
priority = 0.5
location = '/location/'
lastmod = datetime.now()
def items(self):
return [object()]
simple_sitemaps = {
'simple': SimpleSitemap,
}
generic_sitemaps = {
'generic': GenericSitemap({
'queryset': User.objects.all()
}),
}
flatpage_sitemaps = {
'flatpages': FlatPageSitemap,
}
urlpatterns = patterns('django.contrib.sitemaps.views',
(r'^simple/index\.xml$', 'index', {'sitemaps': simple_sitemaps}),
(r'^simple/custom-index\.xml$', 'index', {'sitemaps': simple_sitemaps, 'template_name': 'custom_sitemap_index.xml'}),
(r'^simple/sitemap-(?P<section>.+)\.xml$', 'sitemap', {'sitemaps': simple_sitemaps}),
(r'^simple/sitemap\.xml$', 'sitemap', {'sitemaps': simple_sitemaps}),
(r'^simple/custom-sitemap\.xml$', 'sitemap', {'sitemaps': simple_sitemaps, 'template_name': 'custom_sitemap.xml'}),
(r'^generic/sitemap\.xml$', 'sitemap', {'sitemaps': generic_sitemaps}),
(r'^flatpages/sitemap\.xml$', 'sitemap', {'sitemaps': flatpage_sitemaps}),
)
| gpl-3.0 |
msiedlarek/qtwebkit | Tools/Scripts/webkitpy/test/runner.py | 130 | 3330 | # Copyright (C) 2012 Google, Inc.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS'' AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""code to actually run a list of python tests."""
import re
import time
import unittest
from webkitpy.common import message_pool
_test_description = re.compile("(\w+) \(([\w.]+)\)")
def unit_test_name(test):
m = _test_description.match(str(test))
return "%s.%s" % (m.group(2), m.group(1))
class Runner(object):
def __init__(self, printer, loader):
self.printer = printer
self.loader = loader
self.tests_run = 0
self.errors = []
self.failures = []
self.worker_factory = lambda caller: _Worker(caller, self.loader)
def run(self, test_names, num_workers):
if not test_names:
return
num_workers = min(num_workers, len(test_names))
with message_pool.get(self, self.worker_factory, num_workers) as pool:
pool.run(('test', test_name) for test_name in test_names)
def handle(self, message_name, source, test_name, delay=None, failures=None, errors=None):
if message_name == 'started_test':
self.printer.print_started_test(source, test_name)
return
self.tests_run += 1
if failures:
self.failures.append((test_name, failures))
if errors:
self.errors.append((test_name, errors))
self.printer.print_finished_test(source, test_name, delay, failures, errors)
class _Worker(object):
def __init__(self, caller, loader):
self._caller = caller
self._loader = loader
def handle(self, message_name, source, test_name):
assert message_name == 'test'
result = unittest.TestResult()
start = time.time()
self._caller.post('started_test', test_name)
# We will need to rework this if a test_name results in multiple tests.
self._loader.loadTestsFromName(test_name, None).run(result)
self._caller.post('finished_test', test_name, time.time() - start,
[failure[1] for failure in result.failures], [error[1] for error in result.errors])
| lgpl-3.0 |
Oteng/youtube-dl | youtube_dl/extractor/c56.py | 162 | 1365 | # coding: utf-8
from __future__ import unicode_literals
import re
from .common import InfoExtractor
class C56IE(InfoExtractor):
_VALID_URL = r'https?://(?:(?:www|player)\.)?56\.com/(?:.+?/)?(?:v_|(?:play_album.+-))(?P<textid>.+?)\.(?:html|swf)'
IE_NAME = '56.com'
_TEST = {
'url': 'http://www.56.com/u39/v_OTM0NDA3MTY.html',
'md5': 'e59995ac63d0457783ea05f93f12a866',
'info_dict': {
'id': '93440716',
'ext': 'flv',
'title': '网事知多少 第32期:车怒',
'duration': 283.813,
},
}
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url, flags=re.VERBOSE)
text_id = mobj.group('textid')
page = self._download_json(
'http://vxml.56.com/json/%s/' % text_id, text_id, 'Downloading video info')
info = page['info']
formats = [
{
'format_id': f['type'],
'filesize': int(f['filesize']),
'url': f['url']
} for f in info['rfiles']
]
self._sort_formats(formats)
return {
'id': info['vid'],
'title': info['Subject'],
'duration': int(info['duration']) / 1000.0,
'formats': formats,
'thumbnail': info.get('bimg') or info.get('img'),
}
| unlicense |
manaris/jythonMusic | library/jython2.5.3/Lib/test/test_generators.py | 9 | 50380 | tutorial_tests = """
Let's try a simple generator:
>>> def f():
... yield 1
... yield 2
>>> for i in f():
... print i
1
2
>>> g = f()
>>> g.next()
1
>>> g.next()
2
"Falling off the end" stops the generator:
>>> g.next()
Traceback (most recent call last):
File "<stdin>", line 1, in ?
File "<stdin>", line 2, in g
StopIteration
"return" also stops the generator:
>>> def f():
... yield 1
... return
... yield 2 # never reached
...
>>> g = f()
>>> g.next()
1
>>> g.next()
Traceback (most recent call last):
File "<stdin>", line 1, in ?
File "<stdin>", line 3, in f
StopIteration
>>> g.next() # once stopped, can't be resumed
Traceback (most recent call last):
File "<stdin>", line 1, in ?
StopIteration
"raise StopIteration" stops the generator too:
>>> def f():
... yield 1
... raise StopIteration
... yield 2 # never reached
...
>>> g = f()
>>> g.next()
1
>>> g.next()
Traceback (most recent call last):
File "<stdin>", line 1, in ?
StopIteration
>>> g.next()
Traceback (most recent call last):
File "<stdin>", line 1, in ?
StopIteration
However, they are not exactly equivalent:
>>> def g1():
... try:
... return
... except:
... yield 1
...
>>> list(g1())
[]
>>> def g2():
... try:
... raise StopIteration
... except:
... yield 42
>>> print list(g2())
[42]
This may be surprising at first:
>>> def g3():
... try:
... return
... finally:
... yield 1
...
>>> list(g3())
[1]
Let's create an alternate range() function implemented as a generator:
>>> def yrange(n):
... for i in range(n):
... yield i
...
>>> list(yrange(5))
[0, 1, 2, 3, 4]
Generators always return to the most recent caller:
>>> def creator():
... r = yrange(5)
... print "creator", r.next()
... return r
...
>>> def caller():
... r = creator()
... for i in r:
... print "caller", i
...
>>> caller()
creator 0
caller 1
caller 2
caller 3
caller 4
Generators can call other generators:
>>> def zrange(n):
... for i in yrange(n):
... yield i
...
>>> list(zrange(5))
[0, 1, 2, 3, 4]
"""
# The examples from PEP 255.
pep_tests = """
Specification: Yield
Restriction: A generator cannot be resumed while it is actively
running:
>>> def g():
... i = me.next()
... yield i
>>> me = g()
>>> me.next()
Traceback (most recent call last):
...
File "<string>", line 2, in g
ValueError: generator already executing
Specification: Return
Note that return isn't always equivalent to raising StopIteration: the
difference lies in how enclosing try/except constructs are treated.
For example,
>>> def f1():
... try:
... return
... except:
... yield 1
>>> print list(f1())
[]
because, as in any function, return simply exits, but
>>> def f2():
... try:
... raise StopIteration
... except:
... yield 42
>>> print list(f2())
[42]
because StopIteration is captured by a bare "except", as is any
exception.
Specification: Generators and Exception Propagation
>>> def f():
... return 1//0
>>> def g():
... yield f() # the zero division exception propagates
... yield 42 # and we'll never get here
>>> k = g()
>>> k.next()
Traceback (most recent call last):
File "<stdin>", line 1, in ?
File "<stdin>", line 2, in g
File "<stdin>", line 2, in f
ZeroDivisionError: integer division or modulo by zero
>>> k.next() # and the generator cannot be resumed
Traceback (most recent call last):
File "<stdin>", line 1, in ?
StopIteration
>>>
Specification: Try/Except/Finally
>>> def f():
... try:
... yield 1
... try:
... yield 2
... 1//0
... yield 3 # never get here
... except ZeroDivisionError:
... yield 4
... yield 5
... raise
... except:
... yield 6
... yield 7 # the "raise" above stops this
... except:
... yield 8
... yield 9
... try:
... x = 12
... finally:
... yield 10
... yield 11
>>> print list(f())
[1, 2, 4, 5, 8, 9, 10, 11]
>>>
Guido's binary tree example.
>>> # A binary tree class.
>>> class Tree:
...
... def __init__(self, label, left=None, right=None):
... self.label = label
... self.left = left
... self.right = right
...
... def __repr__(self, level=0, indent=" "):
... s = level*indent + repr(self.label)
... if self.left:
... s = s + "\\n" + self.left.__repr__(level+1, indent)
... if self.right:
... s = s + "\\n" + self.right.__repr__(level+1, indent)
... return s
...
... def __iter__(self):
... return inorder(self)
>>> # Create a Tree from a list.
>>> def tree(list):
... n = len(list)
... if n == 0:
... return []
... i = n // 2
... return Tree(list[i], tree(list[:i]), tree(list[i+1:]))
>>> # Show it off: create a tree.
>>> t = tree("ABCDEFGHIJKLMNOPQRSTUVWXYZ")
>>> # A recursive generator that generates Tree labels in in-order.
>>> def inorder(t):
... if t:
... for x in inorder(t.left):
... yield x
... yield t.label
... for x in inorder(t.right):
... yield x
>>> # Show it off: create a tree.
>>> t = tree("ABCDEFGHIJKLMNOPQRSTUVWXYZ")
>>> # Print the nodes of the tree in in-order.
>>> for x in t:
... print x,
A B C D E F G H I J K L M N O P Q R S T U V W X Y Z
>>> # A non-recursive generator.
>>> def inorder(node):
... stack = []
... while node:
... while node.left:
... stack.append(node)
... node = node.left
... yield node.label
... while not node.right:
... try:
... node = stack.pop()
... except IndexError:
... return
... yield node.label
... node = node.right
>>> # Exercise the non-recursive generator.
>>> for x in t:
... print x,
A B C D E F G H I J K L M N O P Q R S T U V W X Y Z
"""
# Examples from Iterator-List and Python-Dev and c.l.py.
email_tests = """
The difference between yielding None and returning it.
>>> def g():
... for i in range(3):
... yield None
... yield None
... return
>>> list(g())
[None, None, None, None]
Ensure that explicitly raising StopIteration acts like any other exception
in try/except, not like a return.
>>> def g():
... yield 1
... try:
... raise StopIteration
... except:
... yield 2
... yield 3
>>> list(g())
[1, 2, 3]
Next one was posted to c.l.py.
>>> def gcomb(x, k):
... "Generate all combinations of k elements from list x."
...
... if k > len(x):
... return
... if k == 0:
... yield []
... else:
... first, rest = x[0], x[1:]
... # A combination does or doesn't contain first.
... # If it does, the remainder is a k-1 comb of rest.
... for c in gcomb(rest, k-1):
... c.insert(0, first)
... yield c
... # If it doesn't contain first, it's a k comb of rest.
... for c in gcomb(rest, k):
... yield c
>>> seq = range(1, 5)
>>> for k in range(len(seq) + 2):
... print "%d-combs of %s:" % (k, seq)
... for c in gcomb(seq, k):
... print " ", c
0-combs of [1, 2, 3, 4]:
[]
1-combs of [1, 2, 3, 4]:
[1]
[2]
[3]
[4]
2-combs of [1, 2, 3, 4]:
[1, 2]
[1, 3]
[1, 4]
[2, 3]
[2, 4]
[3, 4]
3-combs of [1, 2, 3, 4]:
[1, 2, 3]
[1, 2, 4]
[1, 3, 4]
[2, 3, 4]
4-combs of [1, 2, 3, 4]:
[1, 2, 3, 4]
5-combs of [1, 2, 3, 4]:
From the Iterators list, about the types of these things.
>>> def g():
... yield 1
...
>>> type(g)
<type 'function'>
>>> i = g()
>>> type(i)
<type 'generator'>
>>> [s for s in dir(i) if not s.startswith('_')]
['close', 'gi_frame', 'gi_running', 'next', 'send', 'throw']
>>> print i.next.__doc__
x.next() -> the next value, or raise StopIteration
>>> iter(i) is i
True
>>> import types
>>> isinstance(i, types.GeneratorType)
True
And more, added later.
>>> i.gi_running
0
>>> type(i.gi_frame)
<type 'frame'>
>>> i.gi_running = 42
Traceback (most recent call last):
...
TypeError: readonly attribute
>>> def g():
... yield me.gi_running
>>> me = g()
>>> me.gi_running
0
>>> me.next()
1
>>> me.gi_running
0
A clever union-find implementation from c.l.py, due to David Eppstein.
Sent: Friday, June 29, 2001 12:16 PM
To: [email protected]
Subject: Re: PEP 255: Simple Generators
>>> class disjointSet:
... def __init__(self, name):
... self.name = name
... self.parent = None
... self.generator = self.generate()
...
... def generate(self):
... while not self.parent:
... yield self
... for x in self.parent.generator:
... yield x
...
... def find(self):
... return self.generator.next()
...
... def union(self, parent):
... if self.parent:
... raise ValueError("Sorry, I'm not a root!")
... self.parent = parent
...
... def __str__(self):
... return self.name
>>> names = "ABCDEFGHIJKLM"
>>> sets = [disjointSet(name) for name in names]
>>> roots = sets[:]
>>> import random
>>> gen = random.WichmannHill(42)
>>> while 1:
... for s in sets:
... print "%s->%s" % (s, s.find()),
... print
... if len(roots) > 1:
... s1 = gen.choice(roots)
... roots.remove(s1)
... s2 = gen.choice(roots)
... s1.union(s2)
... print "merged", s1, "into", s2
... else:
... break
A->A B->B C->C D->D E->E F->F G->G H->H I->I J->J K->K L->L M->M
merged D into G
A->A B->B C->C D->G E->E F->F G->G H->H I->I J->J K->K L->L M->M
merged C into F
A->A B->B C->F D->G E->E F->F G->G H->H I->I J->J K->K L->L M->M
merged L into A
A->A B->B C->F D->G E->E F->F G->G H->H I->I J->J K->K L->A M->M
merged H into E
A->A B->B C->F D->G E->E F->F G->G H->E I->I J->J K->K L->A M->M
merged B into E
A->A B->E C->F D->G E->E F->F G->G H->E I->I J->J K->K L->A M->M
merged J into G
A->A B->E C->F D->G E->E F->F G->G H->E I->I J->G K->K L->A M->M
merged E into G
A->A B->G C->F D->G E->G F->F G->G H->G I->I J->G K->K L->A M->M
merged M into G
A->A B->G C->F D->G E->G F->F G->G H->G I->I J->G K->K L->A M->G
merged I into K
A->A B->G C->F D->G E->G F->F G->G H->G I->K J->G K->K L->A M->G
merged K into A
A->A B->G C->F D->G E->G F->F G->G H->G I->A J->G K->A L->A M->G
merged F into A
A->A B->G C->A D->G E->G F->A G->G H->G I->A J->G K->A L->A M->G
merged A into G
A->G B->G C->G D->G E->G F->G G->G H->G I->G J->G K->G L->G M->G
"""
# Emacs turd '
# Fun tests (for sufficiently warped notions of "fun").
fun_tests = """
Build up to a recursive Sieve of Eratosthenes generator.
>>> def firstn(g, n):
... return [g.next() for i in range(n)]
>>> def intsfrom(i):
... while 1:
... yield i
... i += 1
>>> firstn(intsfrom(5), 7)
[5, 6, 7, 8, 9, 10, 11]
>>> def exclude_multiples(n, ints):
... for i in ints:
... if i % n:
... yield i
>>> firstn(exclude_multiples(3, intsfrom(1)), 6)
[1, 2, 4, 5, 7, 8]
>>> def sieve(ints):
... prime = ints.next()
... yield prime
... not_divisible_by_prime = exclude_multiples(prime, ints)
... for p in sieve(not_divisible_by_prime):
... yield p
>>> primes = sieve(intsfrom(2))
>>> firstn(primes, 20)
[2, 3, 5, 7, 11, 13, 17, 19, 23, 29, 31, 37, 41, 43, 47, 53, 59, 61, 67, 71]
Another famous problem: generate all integers of the form
2**i * 3**j * 5**k
in increasing order, where i,j,k >= 0. Trickier than it may look at first!
Try writing it without generators, and correctly, and without generating
3 internal results for each result output.
>>> def times(n, g):
... for i in g:
... yield n * i
>>> firstn(times(10, intsfrom(1)), 10)
[10, 20, 30, 40, 50, 60, 70, 80, 90, 100]
>>> def merge(g, h):
... ng = g.next()
... nh = h.next()
... while 1:
... if ng < nh:
... yield ng
... ng = g.next()
... elif ng > nh:
... yield nh
... nh = h.next()
... else:
... yield ng
... ng = g.next()
... nh = h.next()
The following works, but is doing a whale of a lot of redundant work --
it's not clear how to get the internal uses of m235 to share a single
generator. Note that me_times2 (etc) each need to see every element in the
result sequence. So this is an example where lazy lists are more natural
(you can look at the head of a lazy list any number of times).
>>> def m235():
... yield 1
... me_times2 = times(2, m235())
... me_times3 = times(3, m235())
... me_times5 = times(5, m235())
... for i in merge(merge(me_times2,
... me_times3),
... me_times5):
... yield i
Don't print "too many" of these -- the implementation above is extremely
inefficient: each call of m235() leads to 3 recursive calls, and in
turn each of those 3 more, and so on, and so on, until we've descended
enough levels to satisfy the print stmts. Very odd: when I printed 5
lines of results below, this managed to screw up Win98's malloc in "the
usual" way, i.e. the heap grew over 4Mb so Win98 started fragmenting
address space, and it *looked* like a very slow leak.
>>> result = m235()
>>> for i in range(3):
... print firstn(result, 15)
[1, 2, 3, 4, 5, 6, 8, 9, 10, 12, 15, 16, 18, 20, 24]
[25, 27, 30, 32, 36, 40, 45, 48, 50, 54, 60, 64, 72, 75, 80]
[81, 90, 96, 100, 108, 120, 125, 128, 135, 144, 150, 160, 162, 180, 192]
Heh. Here's one way to get a shared list, complete with an excruciating
namespace renaming trick. The *pretty* part is that the times() and merge()
functions can be reused as-is, because they only assume their stream
arguments are iterable -- a LazyList is the same as a generator to times().
>>> class LazyList:
... def __init__(self, g):
... self.sofar = []
... self.fetch = g.next
...
... def __getitem__(self, i):
... sofar, fetch = self.sofar, self.fetch
... while i >= len(sofar):
... sofar.append(fetch())
... return sofar[i]
>>> def m235():
... yield 1
... # Gack: m235 below actually refers to a LazyList.
... me_times2 = times(2, m235)
... me_times3 = times(3, m235)
... me_times5 = times(5, m235)
... for i in merge(merge(me_times2,
... me_times3),
... me_times5):
... yield i
Print as many of these as you like -- *this* implementation is memory-
efficient.
>>> m235 = LazyList(m235())
>>> for i in range(5):
... print [m235[j] for j in range(15*i, 15*(i+1))]
[1, 2, 3, 4, 5, 6, 8, 9, 10, 12, 15, 16, 18, 20, 24]
[25, 27, 30, 32, 36, 40, 45, 48, 50, 54, 60, 64, 72, 75, 80]
[81, 90, 96, 100, 108, 120, 125, 128, 135, 144, 150, 160, 162, 180, 192]
[200, 216, 225, 240, 243, 250, 256, 270, 288, 300, 320, 324, 360, 375, 384]
[400, 405, 432, 450, 480, 486, 500, 512, 540, 576, 600, 625, 640, 648, 675]
Ye olde Fibonacci generator, LazyList style.
>>> def fibgen(a, b):
...
... def sum(g, h):
... while 1:
... yield g.next() + h.next()
...
... def tail(g):
... g.next() # throw first away
... for x in g:
... yield x
...
... yield a
... yield b
... for s in sum(iter(fib),
... tail(iter(fib))):
... yield s
>>> fib = LazyList(fibgen(1, 2))
>>> firstn(iter(fib), 17)
[1, 2, 3, 5, 8, 13, 21, 34, 55, 89, 144, 233, 377, 610, 987, 1597, 2584]
Running after your tail with itertools.tee (new in version 2.4)
The algorithms "m235" (Hamming) and Fibonacci presented above are both
examples of a whole family of FP (functional programming) algorithms
where a function produces and returns a list while the production algorithm
suppose the list as already produced by recursively calling itself.
For these algorithms to work, they must:
- produce at least a first element without presupposing the existence of
the rest of the list
- produce their elements in a lazy manner
To work efficiently, the beginning of the list must not be recomputed over
and over again. This is ensured in most FP languages as a built-in feature.
In python, we have to explicitly maintain a list of already computed results
and abandon genuine recursivity.
This is what had been attempted above with the LazyList class. One problem
with that class is that it keeps a list of all of the generated results and
therefore continually grows. This partially defeats the goal of the generator
concept, viz. produce the results only as needed instead of producing them
all and thereby wasting memory.
Thanks to itertools.tee, it is now clear "how to get the internal uses of
m235 to share a single generator".
>>> from itertools import tee
>>> def m235():
... def _m235():
... yield 1
... for n in merge(times(2, m2),
... merge(times(3, m3),
... times(5, m5))):
... yield n
... m1 = _m235()
... m2, m3, m5, mRes = tee(m1, 4)
... return mRes
>>> it = m235()
>>> for i in range(5):
... print firstn(it, 15)
[1, 2, 3, 4, 5, 6, 8, 9, 10, 12, 15, 16, 18, 20, 24]
[25, 27, 30, 32, 36, 40, 45, 48, 50, 54, 60, 64, 72, 75, 80]
[81, 90, 96, 100, 108, 120, 125, 128, 135, 144, 150, 160, 162, 180, 192]
[200, 216, 225, 240, 243, 250, 256, 270, 288, 300, 320, 324, 360, 375, 384]
[400, 405, 432, 450, 480, 486, 500, 512, 540, 576, 600, 625, 640, 648, 675]
The "tee" function does just what we want. It internally keeps a generated
result for as long as it has not been "consumed" from all of the duplicated
iterators, whereupon it is deleted. You can therefore print the hamming
sequence during hours without increasing memory usage, or very little.
The beauty of it is that recursive running-after-their-tail FP algorithms
are quite straightforwardly expressed with this Python idiom.
Ye olde Fibonacci generator, tee style.
>>> def fib():
...
... def _isum(g, h):
... while 1:
... yield g.next() + h.next()
...
... def _fib():
... yield 1
... yield 2
... fibTail.next() # throw first away
... for res in _isum(fibHead, fibTail):
... yield res
...
... realfib = _fib()
... fibHead, fibTail, fibRes = tee(realfib, 3)
... return fibRes
>>> firstn(fib(), 17)
[1, 2, 3, 5, 8, 13, 21, 34, 55, 89, 144, 233, 377, 610, 987, 1597, 2584]
"""
# syntax_tests mostly provokes SyntaxErrors. Also fiddling with #if 0
# hackery.
syntax_tests = """
>>> def f(): #doctest: +IGNORE_EXCEPTION_DETAIL, +NORMALIZE_WHITESPACE
... return 22
... yield 1
Traceback (most recent call last):
..
SyntaxError: 'return' with argument inside generator (<doctest test.test_generators.__test__.syntax[0]>, line 3)
>>> def f(): #doctest: +IGNORE_EXCEPTION_DETAIL, +NORMALIZE_WHITESPACE
... yield 1
... return 22
Traceback (most recent call last):
..
SyntaxError: 'return' with argument inside generator (<doctest test.test_generators.__test__.syntax[1]>, line 3)
"return None" is not the same as "return" in a generator:
>>> def f(): #doctest: +IGNORE_EXCEPTION_DETAIL, +NORMALIZE_WHITESPACE
... yield 1
... return None
Traceback (most recent call last):
..
SyntaxError: 'return' with argument inside generator (<doctest test.test_generators.__test__.syntax[2]>, line 3)
These are fine:
>>> def f():
... yield 1
... return
>>> def f():
... try:
... yield 1
... finally:
... pass
>>> def f():
... try:
... try:
... 1//0
... except ZeroDivisionError:
... yield 666
... except:
... pass
... finally:
... pass
>>> def f():
... try:
... try:
... yield 12
... 1//0
... except ZeroDivisionError:
... yield 666
... except:
... try:
... x = 12
... finally:
... yield 12
... except:
... return
>>> list(f())
[12, 666]
>>> def f():
... yield
>>> type(f())
<type 'generator'>
>>> def f():
... if 0:
... yield
>>> type(f())
<type 'generator'>
>>> def f():
... if 0:
... yield 1
>>> type(f())
<type 'generator'>
>>> def f():
... if "":
... yield None
>>> type(f())
<type 'generator'>
>>> def f():
... return
... try:
... if x==4:
... pass
... elif 0:
... try:
... 1//0
... except SyntaxError:
... pass
... else:
... if 0:
... while 12:
... x += 1
... yield 2 # don't blink
... f(a, b, c, d, e)
... else:
... pass
... except:
... x = 1
... return
>>> type(f())
<type 'generator'>
>>> def f():
... if 0:
... def g():
... yield 1
...
>>> type(f())
<type 'NoneType'>
>>> def f():
... if 0:
... class C:
... def __init__(self):
... yield 1
... def f(self):
... yield 2
>>> type(f())
<type 'NoneType'>
>>> def f():
... if 0:
... return
... if 0:
... yield 2
>>> type(f())
<type 'generator'>
>>> def f(): #doctest: +IGNORE_EXCEPTION_DETAIL, +NORMALIZE_WHITESPACE
... if 0:
... lambda x: x # shouldn't trigger here
... return # or here
... def f(i):
... return 2*i # or here
... if 0:
... return 3 # but *this* sucks (line 8)
... if 0:
... yield 2 # because it's a generator (line 10)
Traceback (most recent call last):
SyntaxError: 'return' with argument inside generator (<doctest test.test_generators.__test__.syntax[24]>, line 10)
This one caused a crash (see SF bug 567538):
>>> def f():
... for i in range(3):
... try:
... continue
... finally:
... yield i
...
>>> g = f()
>>> print g.next()
0
>>> print g.next()
1
>>> print g.next()
2
>>> print g.next()
Traceback (most recent call last):
StopIteration
"""
# conjoin is a simple backtracking generator, named in honor of Icon's
# "conjunction" control structure. Pass a list of no-argument functions
# that return iterable objects. Easiest to explain by example: assume the
# function list [x, y, z] is passed. Then conjoin acts like:
#
# def g():
# values = [None] * 3
# for values[0] in x():
# for values[1] in y():
# for values[2] in z():
# yield values
#
# So some 3-lists of values *may* be generated, each time we successfully
# get into the innermost loop. If an iterator fails (is exhausted) before
# then, it "backtracks" to get the next value from the nearest enclosing
# iterator (the one "to the left"), and starts all over again at the next
# slot (pumps a fresh iterator). Of course this is most useful when the
# iterators have side-effects, so that which values *can* be generated at
# each slot depend on the values iterated at previous slots.
def conjoin(gs):
values = [None] * len(gs)
def gen(i, values=values):
if i >= len(gs):
yield values
else:
for values[i] in gs[i]():
for x in gen(i+1):
yield x
for x in gen(0):
yield x
# That works fine, but recursing a level and checking i against len(gs) for
# each item produced is inefficient. By doing manual loop unrolling across
# generator boundaries, it's possible to eliminate most of that overhead.
# This isn't worth the bother *in general* for generators, but conjoin() is
# a core building block for some CPU-intensive generator applications.
def conjoin(gs):
n = len(gs)
values = [None] * n
# Do one loop nest at time recursively, until the # of loop nests
# remaining is divisible by 3.
def gen(i, values=values):
if i >= n:
yield values
elif (n-i) % 3:
ip1 = i+1
for values[i] in gs[i]():
for x in gen(ip1):
yield x
else:
for x in _gen3(i):
yield x
# Do three loop nests at a time, recursing only if at least three more
# remain. Don't call directly: this is an internal optimization for
# gen's use.
def _gen3(i, values=values):
assert i < n and (n-i) % 3 == 0
ip1, ip2, ip3 = i+1, i+2, i+3
g, g1, g2 = gs[i : ip3]
if ip3 >= n:
# These are the last three, so we can yield values directly.
for values[i] in g():
for values[ip1] in g1():
for values[ip2] in g2():
yield values
else:
# At least 6 loop nests remain; peel off 3 and recurse for the
# rest.
for values[i] in g():
for values[ip1] in g1():
for values[ip2] in g2():
for x in _gen3(ip3):
yield x
for x in gen(0):
yield x
# And one more approach: For backtracking apps like the Knight's Tour
# solver below, the number of backtracking levels can be enormous (one
# level per square, for the Knight's Tour, so that e.g. a 100x100 board
# needs 10,000 levels). In such cases Python is likely to run out of
# stack space due to recursion. So here's a recursion-free version of
# conjoin too.
# NOTE WELL: This allows large problems to be solved with only trivial
# demands on stack space. Without explicitly resumable generators, this is
# much harder to achieve. OTOH, this is much slower (up to a factor of 2)
# than the fancy unrolled recursive conjoin.
def flat_conjoin(gs): # rename to conjoin to run tests with this instead
n = len(gs)
values = [None] * n
iters = [None] * n
_StopIteration = StopIteration # make local because caught a *lot*
i = 0
while 1:
# Descend.
try:
while i < n:
it = iters[i] = gs[i]().next
values[i] = it()
i += 1
except _StopIteration:
pass
else:
assert i == n
yield values
# Backtrack until an older iterator can be resumed.
i -= 1
while i >= 0:
try:
values[i] = iters[i]()
# Success! Start fresh at next level.
i += 1
break
except _StopIteration:
# Continue backtracking.
i -= 1
else:
assert i < 0
break
# A conjoin-based N-Queens solver.
class Queens:
def __init__(self, n):
self.n = n
rangen = range(n)
# Assign a unique int to each column and diagonal.
# columns: n of those, range(n).
# NW-SE diagonals: 2n-1 of these, i-j unique and invariant along
# each, smallest i-j is 0-(n-1) = 1-n, so add n-1 to shift to 0-
# based.
# NE-SW diagonals: 2n-1 of these, i+j unique and invariant along
# each, smallest i+j is 0, largest is 2n-2.
# For each square, compute a bit vector of the columns and
# diagonals it covers, and for each row compute a function that
# generates the possiblities for the columns in that row.
self.rowgenerators = []
for i in rangen:
rowuses = [(1L << j) | # column ordinal
(1L << (n + i-j + n-1)) | # NW-SE ordinal
(1L << (n + 2*n-1 + i+j)) # NE-SW ordinal
for j in rangen]
def rowgen(rowuses=rowuses):
for j in rangen:
uses = rowuses[j]
if uses & self.used == 0:
self.used |= uses
yield j
self.used &= ~uses
self.rowgenerators.append(rowgen)
# Generate solutions.
def solve(self):
self.used = 0
for row2col in conjoin(self.rowgenerators):
yield row2col
def printsolution(self, row2col):
n = self.n
assert n == len(row2col)
sep = "+" + "-+" * n
print sep
for i in range(n):
squares = [" " for j in range(n)]
squares[row2col[i]] = "Q"
print "|" + "|".join(squares) + "|"
print sep
# A conjoin-based Knight's Tour solver. This is pretty sophisticated
# (e.g., when used with flat_conjoin above, and passing hard=1 to the
# constructor, a 200x200 Knight's Tour was found quickly -- note that we're
# creating 10s of thousands of generators then!), and is lengthy.
class Knights:
def __init__(self, m, n, hard=0):
self.m, self.n = m, n
# solve() will set up succs[i] to be a list of square #i's
# successors.
succs = self.succs = []
# Remove i0 from each of its successor's successor lists, i.e.
# successors can't go back to i0 again. Return 0 if we can
# detect this makes a solution impossible, else return 1.
def remove_from_successors(i0, len=len):
# If we remove all exits from a free square, we're dead:
# even if we move to it next, we can't leave it again.
# If we create a square with one exit, we must visit it next;
# else somebody else will have to visit it, and since there's
# only one adjacent, there won't be a way to leave it again.
# Finelly, if we create more than one free square with a
# single exit, we can only move to one of them next, leaving
# the other one a dead end.
ne0 = ne1 = 0
for i in succs[i0]:
s = succs[i]
s.remove(i0)
e = len(s)
if e == 0:
ne0 += 1
elif e == 1:
ne1 += 1
return ne0 == 0 and ne1 < 2
# Put i0 back in each of its successor's successor lists.
def add_to_successors(i0):
for i in succs[i0]:
succs[i].append(i0)
# Generate the first move.
def first():
if m < 1 or n < 1:
return
# Since we're looking for a cycle, it doesn't matter where we
# start. Starting in a corner makes the 2nd move easy.
corner = self.coords2index(0, 0)
remove_from_successors(corner)
self.lastij = corner
yield corner
add_to_successors(corner)
# Generate the second moves.
def second():
corner = self.coords2index(0, 0)
assert self.lastij == corner # i.e., we started in the corner
if m < 3 or n < 3:
return
assert len(succs[corner]) == 2
assert self.coords2index(1, 2) in succs[corner]
assert self.coords2index(2, 1) in succs[corner]
# Only two choices. Whichever we pick, the other must be the
# square picked on move m*n, as it's the only way to get back
# to (0, 0). Save its index in self.final so that moves before
# the last know it must be kept free.
for i, j in (1, 2), (2, 1):
this = self.coords2index(i, j)
final = self.coords2index(3-i, 3-j)
self.final = final
remove_from_successors(this)
succs[final].append(corner)
self.lastij = this
yield this
succs[final].remove(corner)
add_to_successors(this)
# Generate moves 3 thru m*n-1.
def advance(len=len):
# If some successor has only one exit, must take it.
# Else favor successors with fewer exits.
candidates = []
for i in succs[self.lastij]:
e = len(succs[i])
assert e > 0, "else remove_from_successors() pruning flawed"
if e == 1:
candidates = [(e, i)]
break
candidates.append((e, i))
else:
candidates.sort()
for e, i in candidates:
if i != self.final:
if remove_from_successors(i):
self.lastij = i
yield i
add_to_successors(i)
# Generate moves 3 thru m*n-1. Alternative version using a
# stronger (but more expensive) heuristic to order successors.
# Since the # of backtracking levels is m*n, a poor move early on
# can take eons to undo. Smallest square board for which this
# matters a lot is 52x52.
def advance_hard(vmid=(m-1)/2.0, hmid=(n-1)/2.0, len=len):
# If some successor has only one exit, must take it.
# Else favor successors with fewer exits.
# Break ties via max distance from board centerpoint (favor
# corners and edges whenever possible).
candidates = []
for i in succs[self.lastij]:
e = len(succs[i])
assert e > 0, "else remove_from_successors() pruning flawed"
if e == 1:
candidates = [(e, 0, i)]
break
i1, j1 = self.index2coords(i)
d = (i1 - vmid)**2 + (j1 - hmid)**2
candidates.append((e, -d, i))
else:
candidates.sort()
for e, d, i in candidates:
if i != self.final:
if remove_from_successors(i):
self.lastij = i
yield i
add_to_successors(i)
# Generate the last move.
def last():
assert self.final in succs[self.lastij]
yield self.final
if m*n < 4:
self.squaregenerators = [first]
else:
self.squaregenerators = [first, second] + \
[hard and advance_hard or advance] * (m*n - 3) + \
[last]
def coords2index(self, i, j):
assert 0 <= i < self.m
assert 0 <= j < self.n
return i * self.n + j
def index2coords(self, index):
assert 0 <= index < self.m * self.n
return divmod(index, self.n)
def _init_board(self):
succs = self.succs
del succs[:]
m, n = self.m, self.n
c2i = self.coords2index
offsets = [( 1, 2), ( 2, 1), ( 2, -1), ( 1, -2),
(-1, -2), (-2, -1), (-2, 1), (-1, 2)]
rangen = range(n)
for i in range(m):
for j in rangen:
s = [c2i(i+io, j+jo) for io, jo in offsets
if 0 <= i+io < m and
0 <= j+jo < n]
succs.append(s)
# Generate solutions.
def solve(self):
self._init_board()
for x in conjoin(self.squaregenerators):
yield x
def printsolution(self, x):
m, n = self.m, self.n
assert len(x) == m*n
w = len(str(m*n))
format = "%" + str(w) + "d"
squares = [[None] * n for i in range(m)]
k = 1
for i in x:
i1, j1 = self.index2coords(i)
squares[i1][j1] = format % k
k += 1
sep = "+" + ("-" * w + "+") * n
print sep
for i in range(m):
row = squares[i]
print "|" + "|".join(row) + "|"
print sep
conjoin_tests = """
Generate the 3-bit binary numbers in order. This illustrates dumbest-
possible use of conjoin, just to generate the full cross-product.
>>> for c in conjoin([lambda: iter((0, 1))] * 3):
... print c
[0, 0, 0]
[0, 0, 1]
[0, 1, 0]
[0, 1, 1]
[1, 0, 0]
[1, 0, 1]
[1, 1, 0]
[1, 1, 1]
For efficiency in typical backtracking apps, conjoin() yields the same list
object each time. So if you want to save away a full account of its
generated sequence, you need to copy its results.
>>> def gencopy(iterator):
... for x in iterator:
... yield x[:]
>>> for n in range(10):
... all = list(gencopy(conjoin([lambda: iter((0, 1))] * n)))
... print n, len(all), all[0] == [0] * n, all[-1] == [1] * n
0 1 True True
1 2 True True
2 4 True True
3 8 True True
4 16 True True
5 32 True True
6 64 True True
7 128 True True
8 256 True True
9 512 True True
And run an 8-queens solver.
>>> q = Queens(8)
>>> LIMIT = 2
>>> count = 0
>>> for row2col in q.solve():
... count += 1
... if count <= LIMIT:
... print "Solution", count
... q.printsolution(row2col)
Solution 1
+-+-+-+-+-+-+-+-+
|Q| | | | | | | |
+-+-+-+-+-+-+-+-+
| | | | |Q| | | |
+-+-+-+-+-+-+-+-+
| | | | | | | |Q|
+-+-+-+-+-+-+-+-+
| | | | | |Q| | |
+-+-+-+-+-+-+-+-+
| | |Q| | | | | |
+-+-+-+-+-+-+-+-+
| | | | | | |Q| |
+-+-+-+-+-+-+-+-+
| |Q| | | | | | |
+-+-+-+-+-+-+-+-+
| | | |Q| | | | |
+-+-+-+-+-+-+-+-+
Solution 2
+-+-+-+-+-+-+-+-+
|Q| | | | | | | |
+-+-+-+-+-+-+-+-+
| | | | | |Q| | |
+-+-+-+-+-+-+-+-+
| | | | | | | |Q|
+-+-+-+-+-+-+-+-+
| | |Q| | | | | |
+-+-+-+-+-+-+-+-+
| | | | | | |Q| |
+-+-+-+-+-+-+-+-+
| | | |Q| | | | |
+-+-+-+-+-+-+-+-+
| |Q| | | | | | |
+-+-+-+-+-+-+-+-+
| | | | |Q| | | |
+-+-+-+-+-+-+-+-+
>>> print count, "solutions in all."
92 solutions in all.
And run a Knight's Tour on a 10x10 board. Note that there are about
20,000 solutions even on a 6x6 board, so don't dare run this to exhaustion.
>>> k = Knights(10, 10)
>>> LIMIT = 2
>>> count = 0
>>> for x in k.solve():
... count += 1
... if count <= LIMIT:
... print "Solution", count
... k.printsolution(x)
... else:
... break
Solution 1
+---+---+---+---+---+---+---+---+---+---+
| 1| 58| 27| 34| 3| 40| 29| 10| 5| 8|
+---+---+---+---+---+---+---+---+---+---+
| 26| 35| 2| 57| 28| 33| 4| 7| 30| 11|
+---+---+---+---+---+---+---+---+---+---+
| 59|100| 73| 36| 41| 56| 39| 32| 9| 6|
+---+---+---+---+---+---+---+---+---+---+
| 74| 25| 60| 55| 72| 37| 42| 49| 12| 31|
+---+---+---+---+---+---+---+---+---+---+
| 61| 86| 99| 76| 63| 52| 47| 38| 43| 50|
+---+---+---+---+---+---+---+---+---+---+
| 24| 75| 62| 85| 54| 71| 64| 51| 48| 13|
+---+---+---+---+---+---+---+---+---+---+
| 87| 98| 91| 80| 77| 84| 53| 46| 65| 44|
+---+---+---+---+---+---+---+---+---+---+
| 90| 23| 88| 95| 70| 79| 68| 83| 14| 17|
+---+---+---+---+---+---+---+---+---+---+
| 97| 92| 21| 78| 81| 94| 19| 16| 45| 66|
+---+---+---+---+---+---+---+---+---+---+
| 22| 89| 96| 93| 20| 69| 82| 67| 18| 15|
+---+---+---+---+---+---+---+---+---+---+
Solution 2
+---+---+---+---+---+---+---+---+---+---+
| 1| 58| 27| 34| 3| 40| 29| 10| 5| 8|
+---+---+---+---+---+---+---+---+---+---+
| 26| 35| 2| 57| 28| 33| 4| 7| 30| 11|
+---+---+---+---+---+---+---+---+---+---+
| 59|100| 73| 36| 41| 56| 39| 32| 9| 6|
+---+---+---+---+---+---+---+---+---+---+
| 74| 25| 60| 55| 72| 37| 42| 49| 12| 31|
+---+---+---+---+---+---+---+---+---+---+
| 61| 86| 99| 76| 63| 52| 47| 38| 43| 50|
+---+---+---+---+---+---+---+---+---+---+
| 24| 75| 62| 85| 54| 71| 64| 51| 48| 13|
+---+---+---+---+---+---+---+---+---+---+
| 87| 98| 89| 80| 77| 84| 53| 46| 65| 44|
+---+---+---+---+---+---+---+---+---+---+
| 90| 23| 92| 95| 70| 79| 68| 83| 14| 17|
+---+---+---+---+---+---+---+---+---+---+
| 97| 88| 21| 78| 81| 94| 19| 16| 45| 66|
+---+---+---+---+---+---+---+---+---+---+
| 22| 91| 96| 93| 20| 69| 82| 67| 18| 15|
+---+---+---+---+---+---+---+---+---+---+
"""
weakref_tests = """\
Generators are weakly referencable:
>>> import weakref
>>> def gen():
... yield 'foo!'
...
>>> wr = weakref.ref(gen)
>>> wr() is gen
True
>>> p = weakref.proxy(gen)
Generator-iterators are weakly referencable as well:
>>> gi = gen()
>>> wr = weakref.ref(gi)
>>> wr() is gi
True
>>> p = weakref.proxy(gi)
>>> list(p)
['foo!']
"""
coroutine_tests = """\
Sending a value into a started generator:
>>> def f():
... print (yield 1)
... yield 2
>>> g = f()
>>> g.next()
1
>>> g.send(42)
42
2
Sending a value into a new generator produces a TypeError:
>>> f().send("foo")
Traceback (most recent call last):
...
TypeError: can't send non-None value to a just-started generator
Yield by itself yields None:
>>> def f(): yield
>>> list(f())
[None]
An obscene abuse of a yield expression within a generator expression:
>>> list((yield 21) for i in range(4))
[21, None, 21, None, 21, None, 21, None]
And a more sane, but still weird usage:
>>> def f(): list(i for i in [(yield 26)])
>>> type(f())
<type 'generator'>
A yield expression with augmented assignment.
>>> def coroutine(seq):
... count = 0
... while count < 200:
... count += yield
... seq.append(count)
>>> seq = []
>>> c = coroutine(seq)
>>> c.next()
>>> print seq
[]
>>> c.send(10)
>>> print seq
[10]
>>> c.send(10)
>>> print seq
[10, 20]
>>> c.send(10)
>>> print seq
[10, 20, 30]
Check some syntax errors for yield expressions:
>>> f=lambda: (yield 1),(yield 2) #doctest: +IGNORE_EXCEPTION_DETAIL, +NORMALIZE_WHITESPACE
Traceback (most recent call last):
...
SyntaxError: 'yield' outside function (<doctest test.test_generators.__test__.coroutine[21]>, line 1)
>>> def f(): return lambda x=(yield): 1 #doctest: +IGNORE_EXCEPTION_DETAIL, +NORMALIZE_WHITESPACE
Traceback (most recent call last):
...
SyntaxError: 'return' with argument inside generator (<doctest test.test_generators.__test__.coroutine[22]>, line 1)
>>> def f(): x = yield = y #doctest: +IGNORE_EXCEPTION_DETAIL, +NORMALIZE_WHITESPACE
Traceback (most recent call last):
...
SyntaxError: assignment to yield expression not possible (<doctest test.test_generators.__test__.coroutine[23]>, line 1)
>>> def f(): (yield bar) = y #doctest: +IGNORE_EXCEPTION_DETAIL
Traceback (most recent call last):
...
SyntaxError: can't assign to yield expression (<doctest test.test_generators.__test__.coroutine[24]>, line 1)
>>> def f(): (yield bar) += y #doctest: +IGNORE_EXCEPTION_DETAIL
Traceback (most recent call last):
...
SyntaxError: augmented assignment to yield expression not possible (<doctest test.test_generators.__test__.coroutine[25]>, line 1)
Now check some throw() conditions:
>>> def f():
... while True:
... try:
... print (yield)
... except ValueError,v:
... print "caught ValueError (%s)" % (v),
>>> import sys
>>> g = f()
>>> g.next()
>>> g.throw(ValueError) # type only
caught ValueError ()
>>> g.throw(ValueError("xyz")) # value only
caught ValueError (xyz)
>>> g.throw(ValueError, ValueError(1)) # value+matching type
caught ValueError (1)
>>> g.throw(ValueError, TypeError(1)) # mismatched type, rewrapped
caught ValueError (1)
>>> g.throw(ValueError, ValueError(1), None) # explicit None traceback
caught ValueError (1)
>>> g.throw(ValueError(1), "foo") # bad args
Traceback (most recent call last):
...
TypeError: instance exception may not have a separate value
>>> g.throw(ValueError, "foo", 23) # bad args #doctest: +IGNORE_EXCEPTION_DETAIL
Traceback (most recent call last):
...
TypeError: throw() third argument must be a traceback object
>>> def throw(g,exc):
... try:
... raise exc
... except:
... g.throw(*sys.exc_info())
>>> throw(g,ValueError) # do it with traceback included
caught ValueError ()
>>> g.send(1)
1
>>> throw(g,TypeError) # terminate the generator
Traceback (most recent call last):
...
TypeError
>>> print g.gi_frame
None
>>> g.send(2)
Traceback (most recent call last):
...
StopIteration
>>> g.throw(ValueError,6) # throw on closed generator
Traceback (most recent call last):
...
ValueError: 6
>>> f().throw(ValueError,7) # throw on just-opened generator
Traceback (most recent call last):
...
ValueError: 7
>>> f().throw("abc") # throw on just-opened generator
Traceback (most recent call last):
...
abc
Now let's try closing a generator:
>>> def f():
... try: yield
... except GeneratorExit:
... print "exiting"
>>> g = f()
>>> g.next()
>>> g.close()
exiting
>>> g.close() # should be no-op now
>>> f().close() # close on just-opened generator should be fine
>>> def f(): yield # an even simpler generator
>>> f().close() # close before opening
>>> g = f()
>>> g.next()
>>> g.close() # close normally
And finalization. But we have to force the timing of GC here, since we are running on Jython:
>>> def f():
... try: yield
... finally:
... print "exiting"
>>> g = f()
>>> g.next()
>>> del g; extra_collect()
exiting
Now let's try some ill-behaved generators:
>>> def f():
... try: yield
... except GeneratorExit:
... yield "foo!"
>>> g = f()
>>> g.next()
>>> g.close()
Traceback (most recent call last):
...
RuntimeError: generator ignored GeneratorExit
>>> g.close()
Our ill-behaved code should be invoked during GC:
>>> import sys, StringIO
>>> old, sys.stderr = sys.stderr, StringIO.StringIO()
>>> g = f()
>>> g.next()
>>> del g; extra_collect()
>>> sys.stderr.getvalue().startswith(
... "Exception RuntimeError"
... )
True
>>> sys.stderr = old
And errors thrown during closing should propagate:
>>> def f():
... try: yield
... except GeneratorExit:
... raise TypeError("fie!")
>>> g = f()
>>> g.next()
>>> g.close()
Traceback (most recent call last):
...
TypeError: fie!
Ensure that various yield expression constructs make their
enclosing function a generator:
>>> def f(): x += yield
>>> type(f())
<type 'generator'>
>>> def f(): x = yield
>>> type(f())
<type 'generator'>
>>> def f(): lambda x=(yield): 1
>>> type(f())
<type 'generator'>
>>> def f(): x=(i for i in (yield) if (yield))
>>> type(f())
<type 'generator'>
>>> def f(d): d[(yield "a")] = d[(yield "b")] = 27
>>> data = [1,2]
>>> g = f(data)
>>> type(g)
<type 'generator'>
>>> g.send(None)
'a'
>>> data
[1, 2]
>>> g.send(0)
'b'
>>> data
[27, 2]
>>> try: g.send(1)
... except StopIteration: pass
>>> data
[27, 27]
"""
refleaks_tests = """
Prior to adding cycle-GC support to itertools.tee, this code would leak
references. We add it to the standard suite so the routine refleak-tests
would trigger if it starts being uncleanable again.
>>> import itertools
>>> def leak():
... class gen:
... def __iter__(self):
... return self
... def next(self):
... return self.item
... g = gen()
... head, tail = itertools.tee(g)
... g.item = head
... return head
>>> it = leak()
Make sure to also test the involvement of the tee-internal teedataobject,
which stores returned items.
>>> item = it.next()
This test leaked at one point due to generator finalization/destruction.
It was copied from Lib/test/leakers/test_generator_cycle.py before the file
was removed.
>>> def leak():
... def gen():
... while True:
... yield g
... g = gen()
>>> leak()
This test isn't really generator related, but rather exception-in-cleanup
related. The coroutine tests (above) just happen to cause an exception in
the generator's __del__ (tp_del) method. We can also test for this
explicitly, without generators. We do have to redirect stderr to avoid
printing warnings and to doublecheck that we actually tested what we wanted
to test.
>>> import sys, StringIO
>>> from time import sleep
>>> old = sys.stderr
>>> try:
... sys.stderr = StringIO.StringIO()
... class Leaker:
... def __del__(self):
... raise RuntimeError
...
... l = Leaker()
... del l; extra_collect()
... err = sys.stderr.getvalue().strip()
... err.startswith(
... "Exception RuntimeError in <"
... )
... err.endswith("> ignored")
... len(err.splitlines())
... finally:
... sys.stderr = old
True
True
1
These refleak tests should perhaps be in a testfile of their own,
test_generators just happened to be the test that drew these out.
"""
__test__ = {"tut": tutorial_tests,
"pep": pep_tests,
"email": email_tests,
"fun": fun_tests,
"syntax": syntax_tests,
"conjoin": conjoin_tests,
"weakref": weakref_tests,
"coroutine": coroutine_tests,
"refleaks": refleaks_tests,
}
# Magic test name that regrtest.py invokes *after* importing this module.
# This worms around a bootstrap problem.
# Note that doctest and regrtest both look in sys.argv for a "-v" argument,
# so this works as expected in both ways of running regrtest.
def test_main(verbose=None):
from test import test_support, test_generators
test_support.run_doctest(test_generators, verbose)
def extra_collect():
import gc
from time import sleep
gc.collect(); sleep(1); gc.collect(); sleep(0.1); gc.collect()
# This part isn't needed for regrtest, but for running the test directly.
if __name__ == "__main__":
test_main(1)
| gpl-3.0 |
dnerdy/namesync | namesync/packages/requests/packages/chardet/utf8prober.py | 2919 | 2652 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is mozilla.org code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from . import constants
from .charsetprober import CharSetProber
from .codingstatemachine import CodingStateMachine
from .mbcssm import UTF8SMModel
ONE_CHAR_PROB = 0.5
class UTF8Prober(CharSetProber):
def __init__(self):
CharSetProber.__init__(self)
self._mCodingSM = CodingStateMachine(UTF8SMModel)
self.reset()
def reset(self):
CharSetProber.reset(self)
self._mCodingSM.reset()
self._mNumOfMBChar = 0
def get_charset_name(self):
return "utf-8"
def feed(self, aBuf):
for c in aBuf:
codingState = self._mCodingSM.next_state(c)
if codingState == constants.eError:
self._mState = constants.eNotMe
break
elif codingState == constants.eItsMe:
self._mState = constants.eFoundIt
break
elif codingState == constants.eStart:
if self._mCodingSM.get_current_charlen() >= 2:
self._mNumOfMBChar += 1
if self.get_state() == constants.eDetecting:
if self.get_confidence() > constants.SHORTCUT_THRESHOLD:
self._mState = constants.eFoundIt
return self.get_state()
def get_confidence(self):
unlike = 0.99
if self._mNumOfMBChar < 6:
for i in range(0, self._mNumOfMBChar):
unlike = unlike * ONE_CHAR_PROB
return 1.0 - unlike
else:
return unlike
| mit |
stephane-martin/salt-debian-packaging | salt-2016.3.3/salt/modules/ini_manage.py | 2 | 11110 | # -*- coding: utf-8 -*-
'''
Edit ini files
:maintainer: <[email protected]>
:maturity: new
:depends: re
:platform: all
(for example /etc/sysctl.conf)
'''
# Import Python libs
from __future__ import print_function
from __future__ import absolute_import
import re
import json
from salt.utils.odict import OrderedDict
from salt.utils import fopen as _fopen
__virtualname__ = 'ini'
def __virtual__():
'''
Rename to ini
'''
return __virtualname__
ini_regx = re.compile(r'^\s*\[(.+?)\]\s*$', flags=re.M)
com_regx = re.compile(r'^\s*(#|;)\s*(.*)')
indented_regx = re.compile(r'(\s+)(.*)')
opt_regx = re.compile(r'(\s*)(.+?)\s*(\=|\:)\s*(.*)\s*')
def set_option(file_name, sections=None):
'''
Edit an ini file, replacing one or more sections. Returns a dictionary
containing the changes made.
file_name
path of ini_file
sections : None
A dictionary representing the sections to be edited ini file
The keys are the section names and the values are the dictionary
containing the options
If the Ini does not contain sections the keys and values represent the
options
API Example:
.. code-block:: python
import salt
sc = salt.client.get_local_client()
sc.cmd('target', 'ini.set_option',
['path_to_ini_file', '{"section_to_change": {"key": "value"}}'])
CLI Example:
.. code-block:: bash
salt '*' ini.set_option /path/to/ini '{section_foo: {key: value}}'
'''
sections = sections or {}
changes = {}
inifile = _Ini.get_ini_file(file_name)
if not inifile:
changes.update({'error': 'ini file not found'})
return changes
changes = inifile.update(sections)
inifile.flush()
return changes
def get_option(file_name, section, option):
'''
Get value of a key from a section in an ini file. Returns ``None`` if
no matching key was found.
API Example:
.. code-block:: python
import salt
sc = salt.client.get_local_client()
sc.cmd('target', 'ini.get_option',
[path_to_ini_file, section_name, option])
CLI Example:
.. code-block:: bash
salt '*' ini.get_option /path/to/ini section_name option_name
'''
inifile = _Ini.get_ini_file(file_name)
return inifile.get(section, {}).get(option, None)
def remove_option(file_name, section, option):
'''
Remove a key/value pair from a section in an ini file. Returns the value of
the removed key, or ``None`` if nothing was removed.
API Example:
.. code-block:: python
import salt
sc = salt.client.get_local_client()
sc.cmd('target', 'ini.remove_option',
[path_to_ini_file, section_name, option])
CLI Example:
.. code-block:: bash
salt '*' ini.remove_option /path/to/ini section_name option_name
'''
inifile = _Ini.get_ini_file(file_name)
value = inifile.get(section, {}).pop(option, None)
inifile.flush()
return value
def get_section(file_name, section):
'''
Retrieve a section from an ini file. Returns the section as dictionary. If
the section is not found, an empty dictionary is returned.
API Example:
.. code-block:: python
import salt
sc = salt.client.get_local_client()
sc.cmd('target', 'ini.get_section',
[path_to_ini_file, section_name])
CLI Example:
.. code-block:: bash
salt '*' ini.get_section /path/to/ini section_name
'''
inifile = _Ini.get_ini_file(file_name)
ret = {}
for key, value in inifile.get(section, {}).iteritems():
if key[0] != '#':
ret.update({key: value})
return ret
def remove_section(file_name, section):
'''
Remove a section in an ini file. Returns the removed section as dictionary,
or ``None`` if nothing was removed.
API Example:
.. code-block:: python
import salt
sc = salt.client.get_local_client()
sc.cmd('target', 'ini.remove_section',
[path_to_ini_file, section_name])
CLI Example:
.. code-block:: bash
salt '*' ini.remove_section /path/to/ini section_name
'''
inifile = _Ini.get_ini_file(file_name)
section = inifile.pop(section, {})
inifile.flush()
ret = {}
for key, value in section.iteritems():
if key[0] != '#':
ret.update({key: value})
return ret
class _Section(OrderedDict):
def __init__(self, name, inicontents='', seperator='=', commenter='#'):
super(_Section, self).__init__(self)
self.name = name
self.inicontents = inicontents
self.sep = seperator
self.com = commenter
def refresh(self, inicontents=None):
comment_count = 1
unknown_count = 1
curr_indent = ''
inicontents = inicontents or self.inicontents
inicontents = inicontents.strip('\n')
if not inicontents:
return
for opt in self:
self.pop(opt)
for opt_str in inicontents.split('\n'):
com_match = com_regx.match(opt_str)
if com_match:
name = '#comment{0}'.format(comment_count)
self.com = com_match.group(1)
comment_count += 1
self.update({name: opt_str})
continue
indented_match = indented_regx.match(opt_str)
if indented_match:
indent = indented_match.group(1).replace('\t', ' ')
if indent > curr_indent:
options = self.keys()
if options:
prev_opt = options[-1]
value = self.get(prev_opt)
self.update({prev_opt: '\n'.join((value, opt_str))})
continue
opt_match = opt_regx.match(opt_str)
if opt_match:
curr_indent, name, self.sep, value = opt_match.groups()
curr_indent = curr_indent.replace('\t', ' ')
self.update({name: value})
continue
name = '#unknown{0}'.format(unknown_count)
self.update({name: opt_str})
unknown_count += 1
def _uncomment_if_commented(self, opt_key):
# should be called only if opt_key is not already present
# will uncomment the key if commented and create a place holder
# for the key where the correct value can be update later
# used to preserve the ordering of comments and commented options
# and to make sure options without sectons go above any section
options_backup = OrderedDict()
comment_index = None
for key, value in self.iteritems():
if comment_index is not None:
options_backup.update({key: value})
continue
if '#comment' not in key:
continue
opt_match = opt_regx.match(value.lstrip('#'))
if opt_match and opt_match.group(2) == opt_key:
comment_index = key
for key in options_backup:
self.pop(key)
self.pop(comment_index, None)
super(_Section, self).update({opt_key: None})
for key, value in options_backup.iteritems():
super(_Section, self).update({key: value})
def update(self, update_dict):
changes = {}
for key, value in update_dict.iteritems():
if key not in self:
changes.update({key: {'before': None,
'after': value}})
if hasattr(value, 'iteritems'):
sect = _Section(key, '', self.sep, self.com)
sect.update(value)
super(_Section, self).update({key: sect})
else:
self._uncomment_if_commented(key)
super(_Section, self).update({key: value})
else:
curr_value = self.get(key, None)
if isinstance(curr_value, _Section):
sub_changes = curr_value.update(value)
if sub_changes:
changes.update({key: sub_changes})
else:
if not curr_value == value:
changes.update({key: {'before': curr_value,
'after': value}})
super(_Section, self).update({key: value})
return changes
def gen_ini(self):
yield '\n[{0}]\n'.format(self.name)
sections_dict = OrderedDict()
for name, value in self.iteritems():
if com_regx.match(name):
yield '{0}\n'.format(value)
elif isinstance(value, _Section):
sections_dict.update({name: value})
else:
yield '{0} {1} {2}\n'.format(name, self.sep, value)
for name, value in sections_dict.iteritems():
for line in value.gen_ini():
yield line
def as_ini(self):
return ''.join(self.gen_ini())
def as_dict(self):
return dict(self)
def dump(self):
print(str(self))
def __repr__(self, _repr_running=None):
_repr_running = _repr_running or {}
super_repr = super(_Section, self).__repr__(_repr_running)
return '\n'.join((super_repr, json.dumps(self, indent=4)))
def __str__(self):
return json.dumps(self, indent=4)
def __eq__(self, item):
return (isinstance(item, self.__class__) and
self.name == item.name)
def __ne__(self, item):
return not (isinstance(item, self.__class__) and
self.name == item.name)
class _Ini(_Section):
def __init__(self, name, inicontents='', seperator='=', commenter='#'):
super(_Ini, self).__init__(name, inicontents, seperator, commenter)
def refresh(self, inicontents=None):
inicontents = inicontents or _fopen(self.name).read()
if not inicontents:
return
for opt in self:
self.pop(opt)
inicontents = ini_regx.split(inicontents)
inicontents.reverse()
super(_Ini, self).refresh(inicontents.pop())
for section_name, sect_ini in self._gen_tuples(inicontents):
sect_obj = _Section(section_name, sect_ini)
sect_obj.refresh()
self.update({sect_obj.name: sect_obj})
def flush(self):
with _fopen(self.name, 'w') as outfile:
ini_gen = self.gen_ini()
next(ini_gen)
outfile.writelines(ini_gen)
@staticmethod
def get_ini_file(file_name):
inifile = _Ini(file_name)
inifile.refresh()
return inifile
@staticmethod
def _gen_tuples(list_object):
while True:
try:
key = list_object.pop()
value = list_object.pop()
except IndexError:
raise StopIteration
else:
yield key, value
| apache-2.0 |
GbalsaC/bitnamiP | venv/lib/python2.7/site-packages/nltk/tag/api.py | 17 | 3616 | # Natural Language Toolkit: Tagger Interface
#
# Copyright (C) 2001-2012 NLTK Project
# Author: Edward Loper <[email protected]>
# Steven Bird <[email protected]> (minor additions)
# URL: <http://www.nltk.org/>
# For license information, see LICENSE.TXT
"""
Interface for tagging each token in a sentence with supplementary
information, such as its part of speech.
"""
from nltk.internals import overridden
from nltk.metrics import accuracy
from nltk.tag.util import untag
class TaggerI(object):
"""
A processing interface for assigning a tag to each token in a list.
Tags are case sensitive strings that identify some property of each
token, such as its part of speech or its sense.
Some taggers require specific types for their tokens. This is
generally indicated by the use of a sub-interface to ``TaggerI``.
For example, featureset taggers, which are subclassed from
``FeaturesetTagger``, require that each token be a ``featureset``.
Subclasses must define:
- either ``tag()`` or ``batch_tag()`` (or both)
"""
def tag(self, tokens):
"""
Determine the most appropriate tag sequence for the given
token sequence, and return a corresponding list of tagged
tokens. A tagged token is encoded as a tuple ``(token, tag)``.
:rtype: list(tuple(str, str))
"""
if overridden(self.batch_tag):
return self.batch_tag([tokens])[0]
else:
raise NotImplementedError()
def batch_tag(self, sentences):
"""
Apply ``self.tag()`` to each element of *sentences*. I.e.:
return [self.tag(sent) for sent in sentences]
"""
return [self.tag(sent) for sent in sentences]
def evaluate(self, gold):
"""
Score the accuracy of the tagger against the gold standard.
Strip the tags from the gold standard text, retag it using
the tagger, then compute the accuracy score.
:type gold: list(list(tuple(str, str)))
:param gold: The list of tagged sentences to score the tagger on.
:rtype: float
"""
tagged_sents = self.batch_tag([untag(sent) for sent in gold])
gold_tokens = sum(gold, [])
test_tokens = sum(tagged_sents, [])
return accuracy(gold_tokens, test_tokens)
def _check_params(self, train, model):
if (train and model) or (not train and not model):
raise ValueError('Must specify either training data or trained model.')
class FeaturesetTaggerI(TaggerI):
"""
A tagger that requires tokens to be ``featuresets``. A featureset
is a dictionary that maps from feature names to feature
values. See ``nltk.classify`` for more information about features
and featuresets.
"""
class HiddenMarkovModelTaggerTransformI(object):
"""
An interface for a transformation to be used as the transform parameter
of ``HiddenMarkovModelTagger``.
"""
def __init__(self):
if self.__class__ == HiddenMarkovModelTaggerTransformI:
raise NotImplementedError("Interfaces can't be instantiated")
def transform(self, labeled_symbols):
"""
:return: a list of transformed symbols
:rtype: list
:param labeled_symbols: a list of labeled untransformed symbols,
i.e. symbols that are not (token, tag) or (word, tag)
:type labeled_symbols: list
"""
raise NotImplementedError()
if __name__ == "__main__":
import doctest
doctest.testmod(optionflags=doctest.NORMALIZE_WHITESPACE)
| agpl-3.0 |
GunoH/intellij-community | python/helpers/coveragepy/coverage/parser.py | 39 | 39537 | # Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0
# For details: https://bitbucket.org/ned/coveragepy/src/default/NOTICE.txt
"""Code parsing for coverage.py."""
import ast
import collections
import os
import re
import token
import tokenize
from coverage import env
from coverage.backward import range # pylint: disable=redefined-builtin
from coverage.backward import bytes_to_ints, string_class
from coverage.bytecode import CodeObjects
from coverage.debug import short_stack
from coverage.misc import contract, new_contract, nice_pair, join_regex
from coverage.misc import CoverageException, NoSource, NotPython
from coverage.phystokens import compile_unicode, generate_tokens, neuter_encoding_declaration
class PythonParser(object):
"""Parse code to find executable lines, excluded lines, etc.
This information is all based on static analysis: no code execution is
involved.
"""
@contract(text='unicode|None')
def __init__(self, text=None, filename=None, exclude=None):
"""
Source can be provided as `text`, the text itself, or `filename`, from
which the text will be read. Excluded lines are those that match
`exclude`, a regex.
"""
assert text or filename, "PythonParser needs either text or filename"
self.filename = filename or "<code>"
self.text = text
if not self.text:
from coverage.python import get_python_source
try:
self.text = get_python_source(self.filename)
except IOError as err:
raise NoSource(
"No source for code: '%s': %s" % (self.filename, err)
)
self.exclude = exclude
# The text lines of the parsed code.
self.lines = self.text.split('\n')
# The normalized line numbers of the statements in the code. Exclusions
# are taken into account, and statements are adjusted to their first
# lines.
self.statements = set()
# The normalized line numbers of the excluded lines in the code,
# adjusted to their first lines.
self.excluded = set()
# The raw_* attributes are only used in this class, and in
# lab/parser.py to show how this class is working.
# The line numbers that start statements, as reported by the line
# number table in the bytecode.
self.raw_statements = set()
# The raw line numbers of excluded lines of code, as marked by pragmas.
self.raw_excluded = set()
# The line numbers of class and function definitions.
self.raw_classdefs = set()
# The line numbers of docstring lines.
self.raw_docstrings = set()
# Internal detail, used by lab/parser.py.
self.show_tokens = False
# A dict mapping line numbers to lexical statement starts for
# multi-line statements.
self._multiline = {}
# Lazily-created ByteParser, arc data, and missing arc descriptions.
self._byte_parser = None
self._all_arcs = None
self._missing_arc_fragments = None
@property
def byte_parser(self):
"""Create a ByteParser on demand."""
if not self._byte_parser:
self._byte_parser = ByteParser(self.text, filename=self.filename)
return self._byte_parser
def lines_matching(self, *regexes):
"""Find the lines matching one of a list of regexes.
Returns a set of line numbers, the lines that contain a match for one
of the regexes in `regexes`. The entire line needn't match, just a
part of it.
"""
combined = join_regex(regexes)
if env.PY2:
# pylint: disable=redefined-variable-type
combined = combined.decode("utf8")
regex_c = re.compile(combined)
matches = set()
for i, ltext in enumerate(self.lines, start=1):
if regex_c.search(ltext):
matches.add(i)
return matches
def _raw_parse(self):
"""Parse the source to find the interesting facts about its lines.
A handful of attributes are updated.
"""
# Find lines which match an exclusion pattern.
if self.exclude:
self.raw_excluded = self.lines_matching(self.exclude)
# Tokenize, to find excluded suites, to find docstrings, and to find
# multi-line statements.
indent = 0
exclude_indent = 0
excluding = False
excluding_decorators = False
prev_toktype = token.INDENT
first_line = None
empty = True
first_on_line = True
tokgen = generate_tokens(self.text)
for toktype, ttext, (slineno, _), (elineno, _), ltext in tokgen:
if self.show_tokens: # pragma: not covered
print("%10s %5s %-20r %r" % (
tokenize.tok_name.get(toktype, toktype),
nice_pair((slineno, elineno)), ttext, ltext
))
if toktype == token.INDENT:
indent += 1
elif toktype == token.DEDENT:
indent -= 1
elif toktype == token.NAME:
if ttext == 'class':
# Class definitions look like branches in the bytecode, so
# we need to exclude them. The simplest way is to note the
# lines with the 'class' keyword.
self.raw_classdefs.add(slineno)
elif toktype == token.OP:
if ttext == ':':
should_exclude = (elineno in self.raw_excluded) or excluding_decorators
if not excluding and should_exclude:
# Start excluding a suite. We trigger off of the colon
# token so that the #pragma comment will be recognized on
# the same line as the colon.
self.raw_excluded.add(elineno)
exclude_indent = indent
excluding = True
excluding_decorators = False
elif ttext == '@' and first_on_line:
# A decorator.
if elineno in self.raw_excluded:
excluding_decorators = True
if excluding_decorators:
self.raw_excluded.add(elineno)
elif toktype == token.STRING and prev_toktype == token.INDENT:
# Strings that are first on an indented line are docstrings.
# (a trick from trace.py in the stdlib.) This works for
# 99.9999% of cases. For the rest (!) see:
# http://stackoverflow.com/questions/1769332/x/1769794#1769794
self.raw_docstrings.update(range(slineno, elineno+1))
elif toktype == token.NEWLINE:
if first_line is not None and elineno != first_line:
# We're at the end of a line, and we've ended on a
# different line than the first line of the statement,
# so record a multi-line range.
for l in range(first_line, elineno+1):
self._multiline[l] = first_line
first_line = None
first_on_line = True
if ttext.strip() and toktype != tokenize.COMMENT:
# A non-whitespace token.
empty = False
if first_line is None:
# The token is not whitespace, and is the first in a
# statement.
first_line = slineno
# Check whether to end an excluded suite.
if excluding and indent <= exclude_indent:
excluding = False
if excluding:
self.raw_excluded.add(elineno)
first_on_line = False
prev_toktype = toktype
# Find the starts of the executable statements.
if not empty:
self.raw_statements.update(self.byte_parser._find_statements())
def first_line(self, line):
"""Return the first line number of the statement including `line`."""
return self._multiline.get(line, line)
def first_lines(self, lines):
"""Map the line numbers in `lines` to the correct first line of the
statement.
Returns a set of the first lines.
"""
return set(self.first_line(l) for l in lines)
def translate_lines(self, lines):
"""Implement `FileReporter.translate_lines`."""
return self.first_lines(lines)
def translate_arcs(self, arcs):
"""Implement `FileReporter.translate_arcs`."""
return [(self.first_line(a), self.first_line(b)) for (a, b) in arcs]
def parse_source(self):
"""Parse source text to find executable lines, excluded lines, etc.
Sets the .excluded and .statements attributes, normalized to the first
line of multi-line statements.
"""
try:
self._raw_parse()
except (tokenize.TokenError, IndentationError) as err:
if hasattr(err, "lineno"):
lineno = err.lineno # IndentationError
else:
lineno = err.args[1][0] # TokenError
raise NotPython(
u"Couldn't parse '%s' as Python source: '%s' at line %d" % (
self.filename, err.args[0], lineno
)
)
self.excluded = self.first_lines(self.raw_excluded)
ignore = self.excluded | self.raw_docstrings
starts = self.raw_statements - ignore
self.statements = self.first_lines(starts) - ignore
def arcs(self):
"""Get information about the arcs available in the code.
Returns a set of line number pairs. Line numbers have been normalized
to the first line of multi-line statements.
"""
if self._all_arcs is None:
self._analyze_ast()
return self._all_arcs
def _analyze_ast(self):
"""Run the AstArcAnalyzer and save its results.
`_all_arcs` is the set of arcs in the code.
"""
aaa = AstArcAnalyzer(self.text, self.raw_statements, self._multiline)
aaa.analyze()
self._all_arcs = set()
for l1, l2 in aaa.arcs:
fl1 = self.first_line(l1)
fl2 = self.first_line(l2)
if fl1 != fl2:
self._all_arcs.add((fl1, fl2))
self._missing_arc_fragments = aaa.missing_arc_fragments
def exit_counts(self):
"""Get a count of exits from that each line.
Excluded lines are excluded.
"""
exit_counts = collections.defaultdict(int)
for l1, l2 in self.arcs():
if l1 < 0:
# Don't ever report -1 as a line number
continue
if l1 in self.excluded:
# Don't report excluded lines as line numbers.
continue
if l2 in self.excluded:
# Arcs to excluded lines shouldn't count.
continue
exit_counts[l1] += 1
# Class definitions have one extra exit, so remove one for each:
for l in self.raw_classdefs:
# Ensure key is there: class definitions can include excluded lines.
if l in exit_counts:
exit_counts[l] -= 1
return exit_counts
def missing_arc_description(self, start, end, executed_arcs=None):
"""Provide an English sentence describing a missing arc."""
if self._missing_arc_fragments is None:
self._analyze_ast()
actual_start = start
if (
executed_arcs and
end < 0 and end == -start and
(end, start) not in executed_arcs and
(end, start) in self._missing_arc_fragments
):
# It's a one-line callable, and we never even started it,
# and we have a message about not starting it.
start, end = end, start
fragment_pairs = self._missing_arc_fragments.get((start, end), [(None, None)])
msgs = []
for fragment_pair in fragment_pairs:
smsg, emsg = fragment_pair
if emsg is None:
if end < 0:
# Hmm, maybe we have a one-line callable, let's check.
if (-end, end) in self._missing_arc_fragments:
return self.missing_arc_description(-end, end)
emsg = "didn't jump to the function exit"
else:
emsg = "didn't jump to line {lineno}"
emsg = emsg.format(lineno=end)
msg = "line {start} {emsg}".format(start=actual_start, emsg=emsg)
if smsg is not None:
msg += ", because {smsg}".format(smsg=smsg.format(lineno=actual_start))
msgs.append(msg)
return " or ".join(msgs)
class ByteParser(object):
"""Parse bytecode to understand the structure of code."""
@contract(text='unicode')
def __init__(self, text, code=None, filename=None):
self.text = text
if code:
self.code = code
else:
try:
self.code = compile_unicode(text, filename, "exec")
except SyntaxError as synerr:
raise NotPython(
u"Couldn't parse '%s' as Python source: '%s' at line %d" % (
filename, synerr.msg, synerr.lineno
)
)
# Alternative Python implementations don't always provide all the
# attributes on code objects that we need to do the analysis.
for attr in ['co_lnotab', 'co_firstlineno', 'co_consts']:
if not hasattr(self.code, attr):
raise CoverageException(
"This implementation of Python doesn't support code analysis.\n"
"Run coverage.py under CPython for this command."
)
def child_parsers(self):
"""Iterate over all the code objects nested within this one.
The iteration includes `self` as its first value.
"""
children = CodeObjects(self.code)
return (ByteParser(self.text, code=c) for c in children)
def _bytes_lines(self):
"""Map byte offsets to line numbers in `code`.
Uses co_lnotab described in Python/compile.c to map byte offsets to
line numbers. Produces a sequence: (b0, l0), (b1, l1), ...
Only byte offsets that correspond to line numbers are included in the
results.
"""
# Adapted from dis.py in the standard library.
byte_increments = bytes_to_ints(self.code.co_lnotab[0::2])
line_increments = bytes_to_ints(self.code.co_lnotab[1::2])
last_line_num = None
line_num = self.code.co_firstlineno
byte_num = 0
for byte_incr, line_incr in zip(byte_increments, line_increments):
if byte_incr:
if line_num != last_line_num:
yield (byte_num, line_num)
last_line_num = line_num
byte_num += byte_incr
line_num += line_incr
if line_num != last_line_num:
yield (byte_num, line_num)
def _find_statements(self):
"""Find the statements in `self.code`.
Produce a sequence of line numbers that start statements. Recurses
into all code objects reachable from `self.code`.
"""
for bp in self.child_parsers():
# Get all of the lineno information from this code.
for _, l in bp._bytes_lines():
yield l
#
# AST analysis
#
class LoopBlock(object):
"""A block on the block stack representing a `for` or `while` loop."""
def __init__(self, start):
self.start = start
self.break_exits = set()
class FunctionBlock(object):
"""A block on the block stack representing a function definition."""
def __init__(self, start, name):
self.start = start
self.name = name
class TryBlock(object):
"""A block on the block stack representing a `try` block."""
def __init__(self, handler_start=None, final_start=None):
self.handler_start = handler_start
self.final_start = final_start
self.break_from = set()
self.continue_from = set()
self.return_from = set()
self.raise_from = set()
class ArcStart(collections.namedtuple("Arc", "lineno, cause")):
"""The information needed to start an arc.
`lineno` is the line number the arc starts from. `cause` is a fragment
used as the startmsg for AstArcAnalyzer.missing_arc_fragments.
"""
def __new__(cls, lineno, cause=None):
return super(ArcStart, cls).__new__(cls, lineno, cause)
# Define contract words that PyContract doesn't have.
# ArcStarts is for a list or set of ArcStart's.
new_contract('ArcStarts', lambda seq: all(isinstance(x, ArcStart) for x in seq))
class AstArcAnalyzer(object):
"""Analyze source text with an AST to find executable code paths."""
@contract(text='unicode', statements=set)
def __init__(self, text, statements, multiline):
self.root_node = ast.parse(neuter_encoding_declaration(text))
# TODO: I think this is happening in too many places.
self.statements = set(multiline.get(l, l) for l in statements)
self.multiline = multiline
if int(os.environ.get("COVERAGE_ASTDUMP", 0)): # pragma: debugging
# Dump the AST so that failing tests have helpful output.
print("Statements: {}".format(self.statements))
print("Multiline map: {}".format(self.multiline))
ast_dump(self.root_node)
self.arcs = set()
# A map from arc pairs to a pair of sentence fragments: (startmsg, endmsg).
# For an arc from line 17, they should be usable like:
# "Line 17 {endmsg}, because {startmsg}"
self.missing_arc_fragments = collections.defaultdict(list)
self.block_stack = []
self.debug = bool(int(os.environ.get("COVERAGE_TRACK_ARCS", 0)))
def analyze(self):
"""Examine the AST tree from `root_node` to determine possible arcs.
This sets the `arcs` attribute to be a set of (from, to) line number
pairs.
"""
for node in ast.walk(self.root_node):
node_name = node.__class__.__name__
code_object_handler = getattr(self, "_code_object__" + node_name, None)
if code_object_handler is not None:
code_object_handler(node)
def add_arc(self, start, end, smsg=None, emsg=None):
"""Add an arc, including message fragments to use if it is missing."""
if self.debug:
print("\nAdding arc: ({}, {}): {!r}, {!r}".format(start, end, smsg, emsg))
print(short_stack(limit=6))
self.arcs.add((start, end))
if smsg is not None or emsg is not None:
self.missing_arc_fragments[(start, end)].append((smsg, emsg))
def nearest_blocks(self):
"""Yield the blocks in nearest-to-farthest order."""
return reversed(self.block_stack)
@contract(returns=int)
def line_for_node(self, node):
"""What is the right line number to use for this node?
This dispatches to _line__Node functions where needed.
"""
node_name = node.__class__.__name__
handler = getattr(self, "_line__" + node_name, None)
if handler is not None:
return handler(node)
else:
return node.lineno
def _line__Assign(self, node):
return self.line_for_node(node.value)
def _line__Dict(self, node):
# Python 3.5 changed how dict literals are made.
if env.PYVERSION >= (3, 5) and node.keys:
if node.keys[0] is not None:
return node.keys[0].lineno
else:
# Unpacked dict literals `{**{'a':1}}` have None as the key,
# use the value in that case.
return node.values[0].lineno
else:
return node.lineno
def _line__List(self, node):
if node.elts:
return self.line_for_node(node.elts[0])
else:
return node.lineno
def _line__Module(self, node):
if node.body:
return self.line_for_node(node.body[0])
else:
# Modules have no line number, they always start at 1.
return 1
OK_TO_DEFAULT = set([
"Assign", "Assert", "AugAssign", "Delete", "Exec", "Expr", "Global",
"Import", "ImportFrom", "Nonlocal", "Pass", "Print",
])
@contract(returns='ArcStarts')
def add_arcs(self, node):
"""Add the arcs for `node`.
Return a set of ArcStarts, exits from this node to the next.
"""
node_name = node.__class__.__name__
handler = getattr(self, "_handle__" + node_name, None)
if handler is not None:
return handler(node)
if 0:
node_name = node.__class__.__name__
if node_name not in self.OK_TO_DEFAULT:
print("*** Unhandled: {0}".format(node))
return set([ArcStart(self.line_for_node(node), cause=None)])
@contract(returns='ArcStarts')
def add_body_arcs(self, body, from_start=None, prev_starts=None):
"""Add arcs for the body of a compound statement.
`body` is the body node. `from_start` is a single `ArcStart` that can
be the previous line in flow before this body. `prev_starts` is a set
of ArcStarts that can be the previous line. Only one of them should be
given.
Returns a set of ArcStarts, the exits from this body.
"""
if prev_starts is None:
prev_starts = set([from_start])
for body_node in body:
lineno = self.line_for_node(body_node)
first_line = self.multiline.get(lineno, lineno)
if first_line not in self.statements:
continue
for prev_start in prev_starts:
self.add_arc(prev_start.lineno, lineno, prev_start.cause)
prev_starts = self.add_arcs(body_node)
return prev_starts
def is_constant_expr(self, node):
"""Is this a compile-time constant?"""
node_name = node.__class__.__name__
if node_name in ["NameConstant", "Num"]:
return True
elif node_name == "Name":
if env.PY3 and node.id in ["True", "False", "None"]:
return True
return False
# tests to write:
# TODO: while EXPR:
# TODO: while False:
# TODO: listcomps hidden deep in other expressions
# TODO: listcomps hidden in lists: x = [[i for i in range(10)]]
# TODO: nested function definitions
@contract(exits='ArcStarts')
def process_break_exits(self, exits):
"""Add arcs due to jumps from `exits` being breaks."""
for block in self.nearest_blocks():
if isinstance(block, LoopBlock):
block.break_exits.update(exits)
break
elif isinstance(block, TryBlock) and block.final_start is not None:
block.break_from.update(exits)
break
@contract(exits='ArcStarts')
def process_continue_exits(self, exits):
"""Add arcs due to jumps from `exits` being continues."""
for block in self.nearest_blocks():
if isinstance(block, LoopBlock):
for xit in exits:
self.add_arc(xit.lineno, block.start, xit.cause)
break
elif isinstance(block, TryBlock) and block.final_start is not None:
block.continue_from.update(exits)
break
@contract(exits='ArcStarts')
def process_raise_exits(self, exits):
"""Add arcs due to jumps from `exits` being raises."""
for block in self.nearest_blocks():
if isinstance(block, TryBlock):
if block.handler_start is not None:
for xit in exits:
self.add_arc(xit.lineno, block.handler_start, xit.cause)
break
elif block.final_start is not None:
block.raise_from.update(exits)
break
elif isinstance(block, FunctionBlock):
for xit in exits:
self.add_arc(
xit.lineno, -block.start, xit.cause,
"didn't except from function '{0}'".format(block.name),
)
break
@contract(exits='ArcStarts')
def process_return_exits(self, exits):
"""Add arcs due to jumps from `exits` being returns."""
for block in self.nearest_blocks():
if isinstance(block, TryBlock) and block.final_start is not None:
block.return_from.update(exits)
break
elif isinstance(block, FunctionBlock):
for xit in exits:
self.add_arc(
xit.lineno, -block.start, xit.cause,
"didn't return from function '{0}'".format(block.name),
)
break
## Handlers
@contract(returns='ArcStarts')
def _handle__Break(self, node):
here = self.line_for_node(node)
break_start = ArcStart(here, cause="the break on line {lineno} wasn't executed")
self.process_break_exits([break_start])
return set()
@contract(returns='ArcStarts')
def _handle_decorated(self, node):
"""Add arcs for things that can be decorated (classes and functions)."""
last = self.line_for_node(node)
if node.decorator_list:
for dec_node in node.decorator_list:
dec_start = self.line_for_node(dec_node)
if dec_start != last:
self.add_arc(last, dec_start)
last = dec_start
# The definition line may have been missed, but we should have it
# in `self.statements`. For some constructs, `line_for_node` is
# not what we'd think of as the first line in the statement, so map
# it to the first one.
body_start = self.line_for_node(node.body[0])
body_start = self.multiline.get(body_start, body_start)
for lineno in range(last+1, body_start):
if lineno in self.statements:
self.add_arc(last, lineno)
last = lineno
# The body is handled in collect_arcs.
return set([ArcStart(last, cause=None)])
_handle__ClassDef = _handle_decorated
@contract(returns='ArcStarts')
def _handle__Continue(self, node):
here = self.line_for_node(node)
continue_start = ArcStart(here, cause="the continue on line {lineno} wasn't executed")
self.process_continue_exits([continue_start])
return set()
@contract(returns='ArcStarts')
def _handle__For(self, node):
start = self.line_for_node(node.iter)
self.block_stack.append(LoopBlock(start=start))
from_start = ArcStart(start, cause="the loop on line {lineno} never started")
exits = self.add_body_arcs(node.body, from_start=from_start)
# Any exit from the body will go back to the top of the loop.
for xit in exits:
self.add_arc(xit.lineno, start, xit.cause)
my_block = self.block_stack.pop()
exits = my_block.break_exits
from_start = ArcStart(start, cause="the loop on line {lineno} didn't complete")
if node.orelse:
else_exits = self.add_body_arcs(node.orelse, from_start=from_start)
exits |= else_exits
else:
# no else clause: exit from the for line.
exits.add(from_start)
return exits
_handle__AsyncFor = _handle__For
_handle__FunctionDef = _handle_decorated
_handle__AsyncFunctionDef = _handle_decorated
@contract(returns='ArcStarts')
def _handle__If(self, node):
start = self.line_for_node(node.test)
from_start = ArcStart(start, cause="the condition on line {lineno} was never true")
exits = self.add_body_arcs(node.body, from_start=from_start)
from_start = ArcStart(start, cause="the condition on line {lineno} was never false")
exits |= self.add_body_arcs(node.orelse, from_start=from_start)
return exits
@contract(returns='ArcStarts')
def _handle__Raise(self, node):
here = self.line_for_node(node)
raise_start = ArcStart(here, cause="the raise on line {lineno} wasn't executed")
self.process_raise_exits([raise_start])
# `raise` statement jumps away, no exits from here.
return set()
@contract(returns='ArcStarts')
def _handle__Return(self, node):
here = self.line_for_node(node)
return_start = ArcStart(here, cause="the return on line {lineno} wasn't executed")
self.process_return_exits([return_start])
# `return` statement jumps away, no exits from here.
return set()
@contract(returns='ArcStarts')
def _handle__Try(self, node):
if node.handlers:
handler_start = self.line_for_node(node.handlers[0])
else:
handler_start = None
if node.finalbody:
final_start = self.line_for_node(node.finalbody[0])
else:
final_start = None
try_block = TryBlock(handler_start=handler_start, final_start=final_start)
self.block_stack.append(try_block)
start = self.line_for_node(node)
exits = self.add_body_arcs(node.body, from_start=ArcStart(start, cause=None))
# We're done with the `try` body, so this block no longer handles
# exceptions. We keep the block so the `finally` clause can pick up
# flows from the handlers and `else` clause.
if node.finalbody:
try_block.handler_start = None
if node.handlers:
# If there are `except` clauses, then raises in the try body
# will already jump to them. Start this set over for raises in
# `except` and `else`.
try_block.raise_from = set([])
else:
self.block_stack.pop()
handler_exits = set()
if node.handlers:
last_handler_start = None
for handler_node in node.handlers:
handler_start = self.line_for_node(handler_node)
if last_handler_start is not None:
self.add_arc(last_handler_start, handler_start)
last_handler_start = handler_start
from_cause = "the exception caught by line {lineno} didn't happen"
from_start = ArcStart(handler_start, cause=from_cause)
handler_exits |= self.add_body_arcs(handler_node.body, from_start=from_start)
if node.orelse:
exits = self.add_body_arcs(node.orelse, prev_starts=exits)
exits |= handler_exits
if node.finalbody:
self.block_stack.pop()
final_from = ( # You can get to the `finally` clause from:
exits | # the exits of the body or `else` clause,
try_block.break_from | # or a `break`,
try_block.continue_from | # or a `continue`,
try_block.raise_from | # or a `raise`,
try_block.return_from # or a `return`.
)
exits = self.add_body_arcs(node.finalbody, prev_starts=final_from)
if try_block.break_from:
break_exits = self._combine_finally_starts(try_block.break_from, exits)
self.process_break_exits(break_exits)
if try_block.continue_from:
continue_exits = self._combine_finally_starts(try_block.continue_from, exits)
self.process_continue_exits(continue_exits)
if try_block.raise_from:
raise_exits = self._combine_finally_starts(try_block.raise_from, exits)
self.process_raise_exits(raise_exits)
if try_block.return_from:
return_exits = self._combine_finally_starts(try_block.return_from, exits)
self.process_return_exits(return_exits)
return exits
def _combine_finally_starts(self, starts, exits):
"""Helper for building the cause of `finally` branches."""
causes = []
for lineno, cause in sorted(starts):
if cause is not None:
causes.append(cause.format(lineno=lineno))
cause = " or ".join(causes)
exits = set(ArcStart(ex.lineno, cause) for ex in exits)
return exits
@contract(returns='ArcStarts')
def _handle__TryExcept(self, node):
# Python 2.7 uses separate TryExcept and TryFinally nodes. If we get
# TryExcept, it means there was no finally, so fake it, and treat as
# a general Try node.
node.finalbody = []
return self._handle__Try(node)
@contract(returns='ArcStarts')
def _handle__TryFinally(self, node):
# Python 2.7 uses separate TryExcept and TryFinally nodes. If we get
# TryFinally, see if there's a TryExcept nested inside. If so, merge
# them. Otherwise, fake fields to complete a Try node.
node.handlers = []
node.orelse = []
first = node.body[0]
if first.__class__.__name__ == "TryExcept" and node.lineno == first.lineno:
assert len(node.body) == 1
node.body = first.body
node.handlers = first.handlers
node.orelse = first.orelse
return self._handle__Try(node)
@contract(returns='ArcStarts')
def _handle__While(self, node):
constant_test = self.is_constant_expr(node.test)
start = to_top = self.line_for_node(node.test)
if constant_test:
to_top = self.line_for_node(node.body[0])
self.block_stack.append(LoopBlock(start=start))
from_start = ArcStart(start, cause="the condition on line {lineno} was never true")
exits = self.add_body_arcs(node.body, from_start=from_start)
for xit in exits:
self.add_arc(xit.lineno, to_top, xit.cause)
exits = set()
my_block = self.block_stack.pop()
exits.update(my_block.break_exits)
from_start = ArcStart(start, cause="the condition on line {lineno} was never false")
if node.orelse:
else_exits = self.add_body_arcs(node.orelse, from_start=from_start)
exits |= else_exits
else:
# No `else` clause: you can exit from the start.
if not constant_test:
exits.add(from_start)
return exits
@contract(returns='ArcStarts')
def _handle__With(self, node):
start = self.line_for_node(node)
exits = self.add_body_arcs(node.body, from_start=ArcStart(start))
return exits
_handle__AsyncWith = _handle__With
def _code_object__Module(self, node):
start = self.line_for_node(node)
if node.body:
exits = self.add_body_arcs(node.body, from_start=ArcStart(-start))
for xit in exits:
self.add_arc(xit.lineno, -start, xit.cause, "didn't exit the module")
else:
# Empty module.
self.add_arc(-start, start)
self.add_arc(start, -start)
def _code_object__FunctionDef(self, node):
start = self.line_for_node(node)
self.block_stack.append(FunctionBlock(start=start, name=node.name))
exits = self.add_body_arcs(node.body, from_start=ArcStart(-start))
self.process_return_exits(exits)
self.block_stack.pop()
_code_object__AsyncFunctionDef = _code_object__FunctionDef
def _code_object__ClassDef(self, node):
start = self.line_for_node(node)
self.add_arc(-start, start)
exits = self.add_body_arcs(node.body, from_start=ArcStart(start))
for xit in exits:
self.add_arc(
xit.lineno, -start, xit.cause,
"didn't exit the body of class '{0}'".format(node.name),
)
def _make_oneline_code_method(noun): # pylint: disable=no-self-argument
"""A function to make methods for online callable _code_object__ methods."""
def _code_object__oneline_callable(self, node):
start = self.line_for_node(node)
self.add_arc(-start, start, None, "didn't run the {0} on line {1}".format(noun, start))
self.add_arc(
start, -start, None,
"didn't finish the {0} on line {1}".format(noun, start),
)
return _code_object__oneline_callable
_code_object__Lambda = _make_oneline_code_method("lambda")
_code_object__GeneratorExp = _make_oneline_code_method("generator expression")
_code_object__DictComp = _make_oneline_code_method("dictionary comprehension")
_code_object__SetComp = _make_oneline_code_method("set comprehension")
if env.PY3:
_code_object__ListComp = _make_oneline_code_method("list comprehension")
SKIP_DUMP_FIELDS = ["ctx"]
def _is_simple_value(value):
"""Is `value` simple enough to be displayed on a single line?"""
return (
value in [None, [], (), {}, set()] or
isinstance(value, (string_class, int, float))
)
# TODO: a test of ast_dump?
def ast_dump(node, depth=0):
"""Dump the AST for `node`.
This recursively walks the AST, printing a readable version.
"""
indent = " " * depth
if not isinstance(node, ast.AST):
print("{0}<{1} {2!r}>".format(indent, node.__class__.__name__, node))
return
lineno = getattr(node, "lineno", None)
if lineno is not None:
linemark = " @ {0}".format(node.lineno)
else:
linemark = ""
head = "{0}<{1}{2}".format(indent, node.__class__.__name__, linemark)
named_fields = [
(name, value)
for name, value in ast.iter_fields(node)
if name not in SKIP_DUMP_FIELDS
]
if not named_fields:
print("{0}>".format(head))
elif len(named_fields) == 1 and _is_simple_value(named_fields[0][1]):
field_name, value = named_fields[0]
print("{0} {1}: {2!r}>".format(head, field_name, value))
else:
print(head)
if 0:
print("{0}# mro: {1}".format(
indent, ", ".join(c.__name__ for c in node.__class__.__mro__[1:]),
))
next_indent = indent + " "
for field_name, value in named_fields:
prefix = "{0}{1}:".format(next_indent, field_name)
if _is_simple_value(value):
print("{0} {1!r}".format(prefix, value))
elif isinstance(value, list):
print("{0} [".format(prefix))
for n in value:
ast_dump(n, depth + 8)
print("{0}]".format(next_indent))
else:
print(prefix)
ast_dump(value, depth + 8)
print("{0}>".format(indent))
| apache-2.0 |
robblack007/clase-dinamica-robot | Practicas/practica2/robots/graficacion.py | 4 | 3522 | def rotacion_geom_3d(pathpatch, rotacion):
'''
Esta función toma un objeto asociado a una gráfica de matplotlib y aplica una matriz
de rotación al objeto (convirtiendolo a tres dimensiones).
>>> from robots.utilidades import DH
>>> from robots.graficacion import rotacion_geom_3d
>>> from matplotlib.pyplot import figure
>>> from matplotlib.patches import Wedge
>>> from numpy import pi
>>> fig = figure()
>>> ax = fig.gca()
>>> marcador = Wedge([0, 0], 0.2, 0, mag*180/pi)
>>> ax.add_patch(marcador)
>>> ori = DH([0, 0, pi/2, pi/3])[:3, :3]
>>> rotacion_geom_3d(marcador, array(ori))
'''
from numpy import array, eye, dot
from mpl_toolkits.mplot3d import art3d
path = pathpatch.get_path()
trans = pathpatch.get_patch_transform()
path = trans.transform_path(path)
pathpatch.__class__ = art3d.PathPatch3D
pathpatch._code3d = path.codes
pathpatch._facecolor3d = pathpatch.get_facecolor
verts = path.vertices
pathpatch._segment3d = array([dot(rotacion, (x, y, 0)) for x, y in verts])
def traslacion_geom_3d(pathpatch, delta):
'''
Esta función toma un objeto asociado a una gráfica de matplotlib y aplica una
traslación dada como un arreglo de numpy (el objeto tiene que ser de tres
dimensiones).
>>> from robots.utilidades import DH
>>> from robots.graficacion import rotacion_geom_3d
>>> from matplotlib.pyplot import figure
>>> from matplotlib.patches import Wedge
>>> from numpy import pi
>>> fig = figure()
>>> ax = fig.gca()
>>> marcador = Wedge([0, 0], 0.2, 0, mag*180/pi)
>>> ax.add_patch(marcador)
>>> ori = DH([0, 0, pi/2, pi/3])[:3, :3]
>>> rotacion_geom_3d(marcador, array(ori))
>>> traslacion_geom_3d(marcador, array([0,0,1]))
'''
pathpatch._segment3d += delta
def eje_rotacional(eje, pos, ori, mag):
'''
Esta función toma un eje de una grafica de matplotlib y dibuja un indicador del grado
de libertad rotacional normal al eje de rotacion.
>>> from robots.utilidades import DH
>>> from robots.graficacion import rotacion_geom_3d
>>> from matplotlib.pyplot import figure
>>> from numpy import pi
>>> fig = figure()
>>> ax = fig.gca()
>>> ori = DH([0, 0, pi/2, pi/3])[:3, :3]
>>> pos = DH([0, 0, pi/2, pi/3])[:3, 3:]
>>> eje_rotacional(ax, pos, ori, pi/3)
'''
from numpy import pi, array
from matplotlib.patches import Wedge
marcador = Wedge([0, 0], 0.2, 0, mag*180/pi, alpha=0.8)
eje.add_patch(marcador)
rotacion_geom_3d(marcador, array(ori))
traslacion_geom_3d(marcador, array(pos.T.tolist()))
marcador = Wedge([0, 0], 0.2, mag*180/pi, 360, ec="None", color="k", alpha=0.2)
eje.add_patch(marcador)
rotacion_geom_3d(marcador, array(ori))
traslacion_geom_3d(marcador, array(pos.T.tolist()))
def configuracion_grafica(eje):
'''
Esta función configura el eje de una gráfica para desplegar consistentemente un estilo.
>>> from robots.utilidades import DH
>>> from robots.graficacion import rotacion_geom_3d
>>> from matplotlib.pyplot import figure
>>> from numpy import pi
>>> fig = figure()
>>> ax = fig.gca()
>>> configuracion_grafica(ax)
'''
eje.set_facecolor((1.0, 1.0, 1.0, 1.0))
#eje._axis3don = False
eje.view_init(elev=30., azim=45.)
eje.set_xlim3d([-0.6, 0.6])
eje.set_ylim3d([-0.6, 0.6])
eje.set_zlim3d([-0.1, 1.1]) | mit |
huobaowangxi/scikit-learn | examples/text/hashing_vs_dict_vectorizer.py | 284 | 3265 | """
===========================================
FeatureHasher and DictVectorizer Comparison
===========================================
Compares FeatureHasher and DictVectorizer by using both to vectorize
text documents.
The example demonstrates syntax and speed only; it doesn't actually do
anything useful with the extracted vectors. See the example scripts
{document_classification_20newsgroups,clustering}.py for actual learning
on text documents.
A discrepancy between the number of terms reported for DictVectorizer and
for FeatureHasher is to be expected due to hash collisions.
"""
# Author: Lars Buitinck <[email protected]>
# License: BSD 3 clause
from __future__ import print_function
from collections import defaultdict
import re
import sys
from time import time
import numpy as np
from sklearn.datasets import fetch_20newsgroups
from sklearn.feature_extraction import DictVectorizer, FeatureHasher
def n_nonzero_columns(X):
"""Returns the number of non-zero columns in a CSR matrix X."""
return len(np.unique(X.nonzero()[1]))
def tokens(doc):
"""Extract tokens from doc.
This uses a simple regex to break strings into tokens. For a more
principled approach, see CountVectorizer or TfidfVectorizer.
"""
return (tok.lower() for tok in re.findall(r"\w+", doc))
def token_freqs(doc):
"""Extract a dict mapping tokens from doc to their frequencies."""
freq = defaultdict(int)
for tok in tokens(doc):
freq[tok] += 1
return freq
categories = [
'alt.atheism',
'comp.graphics',
'comp.sys.ibm.pc.hardware',
'misc.forsale',
'rec.autos',
'sci.space',
'talk.religion.misc',
]
# Uncomment the following line to use a larger set (11k+ documents)
#categories = None
print(__doc__)
print("Usage: %s [n_features_for_hashing]" % sys.argv[0])
print(" The default number of features is 2**18.")
print()
try:
n_features = int(sys.argv[1])
except IndexError:
n_features = 2 ** 18
except ValueError:
print("not a valid number of features: %r" % sys.argv[1])
sys.exit(1)
print("Loading 20 newsgroups training data")
raw_data = fetch_20newsgroups(subset='train', categories=categories).data
data_size_mb = sum(len(s.encode('utf-8')) for s in raw_data) / 1e6
print("%d documents - %0.3fMB" % (len(raw_data), data_size_mb))
print()
print("DictVectorizer")
t0 = time()
vectorizer = DictVectorizer()
vectorizer.fit_transform(token_freqs(d) for d in raw_data)
duration = time() - t0
print("done in %fs at %0.3fMB/s" % (duration, data_size_mb / duration))
print("Found %d unique terms" % len(vectorizer.get_feature_names()))
print()
print("FeatureHasher on frequency dicts")
t0 = time()
hasher = FeatureHasher(n_features=n_features)
X = hasher.transform(token_freqs(d) for d in raw_data)
duration = time() - t0
print("done in %fs at %0.3fMB/s" % (duration, data_size_mb / duration))
print("Found %d unique terms" % n_nonzero_columns(X))
print()
print("FeatureHasher on raw tokens")
t0 = time()
hasher = FeatureHasher(n_features=n_features, input_type="string")
X = hasher.transform(tokens(d) for d in raw_data)
duration = time() - t0
print("done in %fs at %0.3fMB/s" % (duration, data_size_mb / duration))
print("Found %d unique terms" % n_nonzero_columns(X))
| bsd-3-clause |
mikedanese/contrib | diurnal/Godeps/_workspace/src/github.com/ugorji/go/codec/test.py | 1138 | 3876 | #!/usr/bin/env python
# This will create golden files in a directory passed to it.
# A Test calls this internally to create the golden files
# So it can process them (so we don't have to checkin the files).
# Ensure msgpack-python and cbor are installed first, using:
# sudo apt-get install python-dev
# sudo apt-get install python-pip
# pip install --user msgpack-python msgpack-rpc-python cbor
import cbor, msgpack, msgpackrpc, sys, os, threading
def get_test_data_list():
# get list with all primitive types, and a combo type
l0 = [
-8,
-1616,
-32323232,
-6464646464646464,
192,
1616,
32323232,
6464646464646464,
192,
-3232.0,
-6464646464.0,
3232.0,
6464646464.0,
False,
True,
None,
u"someday",
u"",
u"bytestring",
1328176922000002000,
-2206187877999998000,
270,
-2013855847999995777,
#-6795364578871345152,
]
l1 = [
{ "true": True,
"false": False },
{ "true": "True",
"false": False,
"uint16(1616)": 1616 },
{ "list": [1616, 32323232, True, -3232.0, {"TRUE":True, "FALSE":False}, [True, False] ],
"int32":32323232, "bool": True,
"LONG STRING": "123456789012345678901234567890123456789012345678901234567890",
"SHORT STRING": "1234567890" },
{ True: "true", 8: False, "false": 0 }
]
l = []
l.extend(l0)
l.append(l0)
l.extend(l1)
return l
def build_test_data(destdir):
l = get_test_data_list()
for i in range(len(l)):
# packer = msgpack.Packer()
serialized = msgpack.dumps(l[i])
f = open(os.path.join(destdir, str(i) + '.msgpack.golden'), 'wb')
f.write(serialized)
f.close()
serialized = cbor.dumps(l[i])
f = open(os.path.join(destdir, str(i) + '.cbor.golden'), 'wb')
f.write(serialized)
f.close()
def doRpcServer(port, stopTimeSec):
class EchoHandler(object):
def Echo123(self, msg1, msg2, msg3):
return ("1:%s 2:%s 3:%s" % (msg1, msg2, msg3))
def EchoStruct(self, msg):
return ("%s" % msg)
addr = msgpackrpc.Address('localhost', port)
server = msgpackrpc.Server(EchoHandler())
server.listen(addr)
# run thread to stop it after stopTimeSec seconds if > 0
if stopTimeSec > 0:
def myStopRpcServer():
server.stop()
t = threading.Timer(stopTimeSec, myStopRpcServer)
t.start()
server.start()
def doRpcClientToPythonSvc(port):
address = msgpackrpc.Address('localhost', port)
client = msgpackrpc.Client(address, unpack_encoding='utf-8')
print client.call("Echo123", "A1", "B2", "C3")
print client.call("EchoStruct", {"A" :"Aa", "B":"Bb", "C":"Cc"})
def doRpcClientToGoSvc(port):
# print ">>>> port: ", port, " <<<<<"
address = msgpackrpc.Address('localhost', port)
client = msgpackrpc.Client(address, unpack_encoding='utf-8')
print client.call("TestRpcInt.Echo123", ["A1", "B2", "C3"])
print client.call("TestRpcInt.EchoStruct", {"A" :"Aa", "B":"Bb", "C":"Cc"})
def doMain(args):
if len(args) == 2 and args[0] == "testdata":
build_test_data(args[1])
elif len(args) == 3 and args[0] == "rpc-server":
doRpcServer(int(args[1]), int(args[2]))
elif len(args) == 2 and args[0] == "rpc-client-python-service":
doRpcClientToPythonSvc(int(args[1]))
elif len(args) == 2 and args[0] == "rpc-client-go-service":
doRpcClientToGoSvc(int(args[1]))
else:
print("Usage: test.py " +
"[testdata|rpc-server|rpc-client-python-service|rpc-client-go-service] ...")
if __name__ == "__main__":
doMain(sys.argv[1:])
| apache-2.0 |
louisLouL/pair_trading | capstone_env/lib/python3.6/site-packages/matplotlib/lines.py | 2 | 50873 | """
This module contains all the 2D line class which can draw with a
variety of line styles, markers and colors.
"""
# TODO: expose cap and join style attrs
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import six
import warnings
import numpy as np
from . import artist, colors as mcolors, docstring, rcParams
from .artist import Artist, allow_rasterization
from .cbook import (
_to_unmasked_float_array, iterable, is_numlike, ls_mapper, ls_mapper_r,
STEP_LOOKUP_MAP)
from .markers import MarkerStyle
from .path import Path
from .transforms import Bbox, TransformedPath, IdentityTransform
# Imported here for backward compatibility, even though they don't
# really belong.
from numpy import ma
from . import _path
from .markers import (
CARETLEFT, CARETRIGHT, CARETUP, CARETDOWN,
CARETLEFTBASE, CARETRIGHTBASE, CARETUPBASE, CARETDOWNBASE,
TICKLEFT, TICKRIGHT, TICKUP, TICKDOWN)
def _get_dash_pattern(style):
"""Convert linestyle -> dash pattern
"""
# go from short hand -> full strings
if isinstance(style, six.string_types):
style = ls_mapper.get(style, style)
# un-dashed styles
if style in ['solid', 'None']:
offset, dashes = None, None
# dashed styles
elif style in ['dashed', 'dashdot', 'dotted']:
offset = 0
dashes = tuple(rcParams['lines.{}_pattern'.format(style)])
#
elif isinstance(style, tuple):
offset, dashes = style
else:
raise ValueError('Unrecognized linestyle: %s' % str(style))
# normalize offset to be positive and shorter than the dash cycle
if dashes is not None and offset is not None:
dsum = sum(dashes)
if dsum:
offset %= dsum
return offset, dashes
def _scale_dashes(offset, dashes, lw):
if not rcParams['lines.scale_dashes']:
return offset, dashes
scaled_offset = scaled_dashes = None
if offset is not None:
scaled_offset = offset * lw
if dashes is not None:
scaled_dashes = [x * lw if x is not None else None
for x in dashes]
return scaled_offset, scaled_dashes
def segment_hits(cx, cy, x, y, radius):
"""
Determine if any line segments are within radius of a
point. Returns the list of line segments that are within that
radius.
"""
# Process single points specially
if len(x) < 2:
res, = np.nonzero((cx - x) ** 2 + (cy - y) ** 2 <= radius ** 2)
return res
# We need to lop the last element off a lot.
xr, yr = x[:-1], y[:-1]
# Only look at line segments whose nearest point to C on the line
# lies within the segment.
dx, dy = x[1:] - xr, y[1:] - yr
Lnorm_sq = dx ** 2 + dy ** 2 # Possibly want to eliminate Lnorm==0
u = ((cx - xr) * dx + (cy - yr) * dy) / Lnorm_sq
candidates = (u >= 0) & (u <= 1)
#if any(candidates): print "candidates",xr[candidates]
# Note that there is a little area near one side of each point
# which will be near neither segment, and another which will
# be near both, depending on the angle of the lines. The
# following radius test eliminates these ambiguities.
point_hits = (cx - x) ** 2 + (cy - y) ** 2 <= radius ** 2
#if any(point_hits): print "points",xr[candidates]
candidates = candidates & ~(point_hits[:-1] | point_hits[1:])
# For those candidates which remain, determine how far they lie away
# from the line.
px, py = xr + u * dx, yr + u * dy
line_hits = (cx - px) ** 2 + (cy - py) ** 2 <= radius ** 2
#if any(line_hits): print "lines",xr[candidates]
line_hits = line_hits & candidates
points, = point_hits.ravel().nonzero()
lines, = line_hits.ravel().nonzero()
#print points,lines
return np.concatenate((points, lines))
def _mark_every_path(markevery, tpath, affine, ax_transform):
"""
Helper function that sorts out how to deal the input
`markevery` and returns the points where markers should be drawn.
Takes in the `markevery` value and the line path and returns the
sub-sampled path.
"""
# pull out the two bits of data we want from the path
codes, verts = tpath.codes, tpath.vertices
def _slice_or_none(in_v, slc):
'''
Helper function to cope with `codes` being an
ndarray or `None`
'''
if in_v is None:
return None
return in_v[slc]
# if just a float, assume starting at 0.0 and make a tuple
if isinstance(markevery, float):
markevery = (0.0, markevery)
# if just an int, assume starting at 0 and make a tuple
elif isinstance(markevery, int):
markevery = (0, markevery)
# if just an numpy int, assume starting at 0 and make a tuple
elif isinstance(markevery, np.integer):
markevery = (0, markevery.item())
if isinstance(markevery, tuple):
if len(markevery) != 2:
raise ValueError('`markevery` is a tuple but its '
'len is not 2; '
'markevery=%s' % (markevery,))
start, step = markevery
# if step is an int, old behavior
if isinstance(step, int):
#tuple of 2 int is for backwards compatibility,
if not(isinstance(start, int)):
raise ValueError('`markevery` is a tuple with '
'len 2 and second element is an int, but '
'the first element is not an int; '
'markevery=%s' % (markevery,))
# just return, we are done here
return Path(verts[slice(start, None, step)],
_slice_or_none(codes, slice(start, None, step)))
elif isinstance(step, float):
if not (isinstance(start, int) or
isinstance(start, float)):
raise ValueError('`markevery` is a tuple with '
'len 2 and second element is a float, but '
'the first element is not a float or an '
'int; '
'markevery=%s' % (markevery,))
#calc cumulative distance along path (in display
# coords):
disp_coords = affine.transform(tpath.vertices)
delta = np.empty((len(disp_coords), 2),
dtype=float)
delta[0, :] = 0.0
delta[1:, :] = (disp_coords[1:, :] -
disp_coords[:-1, :])
delta = np.sum(delta**2, axis=1)
delta = np.sqrt(delta)
delta = np.cumsum(delta)
#calc distance between markers along path based on
# the axes bounding box diagonal being a distance
# of unity:
scale = ax_transform.transform(
np.array([[0, 0], [1, 1]]))
scale = np.diff(scale, axis=0)
scale = np.sum(scale**2)
scale = np.sqrt(scale)
marker_delta = np.arange(start * scale,
delta[-1],
step * scale)
#find closest actual data point that is closest to
# the theoretical distance along the path:
inds = np.abs(delta[np.newaxis, :] -
marker_delta[:, np.newaxis])
inds = inds.argmin(axis=1)
inds = np.unique(inds)
# return, we are done here
return Path(verts[inds],
_slice_or_none(codes, inds))
else:
raise ValueError('`markevery` is a tuple with '
'len 2, but its second element is not an int '
'or a float; '
'markevery=%s' % (markevery,))
elif isinstance(markevery, slice):
# mazol tov, it's already a slice, just return
return Path(verts[markevery],
_slice_or_none(codes, markevery))
elif iterable(markevery):
#fancy indexing
try:
return Path(verts[markevery],
_slice_or_none(codes, markevery))
except (ValueError, IndexError):
raise ValueError('`markevery` is iterable but '
'not a valid form of numpy fancy indexing; '
'markevery=%s' % (markevery,))
else:
raise ValueError('Value of `markevery` is not '
'recognized; '
'markevery=%s' % (markevery,))
class Line2D(Artist):
"""
A line - the line can have both a solid linestyle connecting all
the vertices, and a marker at each vertex. Additionally, the
drawing of the solid line is influenced by the drawstyle, e.g., one
can create "stepped" lines in various styles.
"""
lineStyles = _lineStyles = { # hidden names deprecated
'-': '_draw_solid',
'--': '_draw_dashed',
'-.': '_draw_dash_dot',
':': '_draw_dotted',
'None': '_draw_nothing',
' ': '_draw_nothing',
'': '_draw_nothing',
}
_drawStyles_l = {
'default': '_draw_lines',
'steps-mid': '_draw_steps_mid',
'steps-pre': '_draw_steps_pre',
'steps-post': '_draw_steps_post',
}
_drawStyles_s = {
'steps': '_draw_steps_pre',
}
# drawStyles should now be deprecated.
drawStyles = {}
drawStyles.update(_drawStyles_l)
drawStyles.update(_drawStyles_s)
# Need a list ordered with long names first:
drawStyleKeys = list(_drawStyles_l) + list(_drawStyles_s)
# Referenced here to maintain API. These are defined in
# MarkerStyle
markers = MarkerStyle.markers
filled_markers = MarkerStyle.filled_markers
fillStyles = MarkerStyle.fillstyles
zorder = 2
validCap = ('butt', 'round', 'projecting')
validJoin = ('miter', 'round', 'bevel')
def __str__(self):
if self._label != "":
return "Line2D(%s)" % (self._label)
elif self._x is None:
return "Line2D()"
elif len(self._x) > 3:
return "Line2D((%g,%g),(%g,%g),...,(%g,%g))"\
% (self._x[0], self._y[0], self._x[0],
self._y[0], self._x[-1], self._y[-1])
else:
return "Line2D(%s)"\
% (",".join(["(%g,%g)" % (x, y) for x, y
in zip(self._x, self._y)]))
def __init__(self, xdata, ydata,
linewidth=None, # all Nones default to rc
linestyle=None,
color=None,
marker=None,
markersize=None,
markeredgewidth=None,
markeredgecolor=None,
markerfacecolor=None,
markerfacecoloralt='none',
fillstyle=None,
antialiased=None,
dash_capstyle=None,
solid_capstyle=None,
dash_joinstyle=None,
solid_joinstyle=None,
pickradius=5,
drawstyle=None,
markevery=None,
**kwargs
):
"""
Create a :class:`~matplotlib.lines.Line2D` instance with *x*
and *y* data in sequences *xdata*, *ydata*.
The kwargs are :class:`~matplotlib.lines.Line2D` properties:
%(Line2D)s
See :meth:`set_linestyle` for a decription of the line styles,
:meth:`set_marker` for a description of the markers, and
:meth:`set_drawstyle` for a description of the draw styles.
"""
Artist.__init__(self)
#convert sequences to numpy arrays
if not iterable(xdata):
raise RuntimeError('xdata must be a sequence')
if not iterable(ydata):
raise RuntimeError('ydata must be a sequence')
if linewidth is None:
linewidth = rcParams['lines.linewidth']
if linestyle is None:
linestyle = rcParams['lines.linestyle']
if marker is None:
marker = rcParams['lines.marker']
if color is None:
color = rcParams['lines.color']
if markersize is None:
markersize = rcParams['lines.markersize']
if antialiased is None:
antialiased = rcParams['lines.antialiased']
if dash_capstyle is None:
dash_capstyle = rcParams['lines.dash_capstyle']
if dash_joinstyle is None:
dash_joinstyle = rcParams['lines.dash_joinstyle']
if solid_capstyle is None:
solid_capstyle = rcParams['lines.solid_capstyle']
if solid_joinstyle is None:
solid_joinstyle = rcParams['lines.solid_joinstyle']
if isinstance(linestyle, six.string_types):
ds, ls = self._split_drawstyle_linestyle(linestyle)
if ds is not None and drawstyle is not None and ds != drawstyle:
raise ValueError("Inconsistent drawstyle ({0!r}) and "
"linestyle ({1!r})".format(drawstyle,
linestyle)
)
linestyle = ls
if ds is not None:
drawstyle = ds
if drawstyle is None:
drawstyle = 'default'
self._dashcapstyle = None
self._dashjoinstyle = None
self._solidjoinstyle = None
self._solidcapstyle = None
self.set_dash_capstyle(dash_capstyle)
self.set_dash_joinstyle(dash_joinstyle)
self.set_solid_capstyle(solid_capstyle)
self.set_solid_joinstyle(solid_joinstyle)
self._linestyles = None
self._drawstyle = None
self._linewidth = linewidth
# scaled dash + offset
self._dashSeq = None
self._dashOffset = 0
# unscaled dash + offset
# this is needed scaling the dash pattern by linewidth
self._us_dashSeq = None
self._us_dashOffset = 0
self.set_linestyle(linestyle)
self.set_drawstyle(drawstyle)
self.set_linewidth(linewidth)
self._color = None
self.set_color(color)
self._marker = MarkerStyle(marker, fillstyle)
self._markevery = None
self._markersize = None
self._antialiased = None
self.set_markevery(markevery)
self.set_antialiased(antialiased)
self.set_markersize(markersize)
self._markeredgecolor = None
self._markeredgewidth = None
self._markerfacecolor = None
self._markerfacecoloralt = None
self.set_markerfacecolor(markerfacecolor)
self.set_markerfacecoloralt(markerfacecoloralt)
self.set_markeredgecolor(markeredgecolor)
self.set_markeredgewidth(markeredgewidth)
self.verticalOffset = None
# update kwargs before updating data to give the caller a
# chance to init axes (and hence unit support)
self.update(kwargs)
self.pickradius = pickradius
self.ind_offset = 0
if is_numlike(self._picker):
self.pickradius = self._picker
self._xorig = np.asarray([])
self._yorig = np.asarray([])
self._invalidx = True
self._invalidy = True
self._x = None
self._y = None
self._xy = None
self._path = None
self._transformed_path = None
self._subslice = False
self._x_filled = None # used in subslicing; only x is needed
self.set_data(xdata, ydata)
def contains(self, mouseevent):
"""
Test whether the mouse event occurred on the line. The pick
radius determines the precision of the location test (usually
within five points of the value). Use
:meth:`~matplotlib.lines.Line2D.get_pickradius` or
:meth:`~matplotlib.lines.Line2D.set_pickradius` to view or
modify it.
Returns *True* if any values are within the radius along with
``{'ind': pointlist}``, where *pointlist* is the set of points
within the radius.
TODO: sort returned indices by distance
"""
if callable(self._contains):
return self._contains(self, mouseevent)
if not is_numlike(self.pickradius):
raise ValueError("pick radius should be a distance")
# Make sure we have data to plot
if self._invalidy or self._invalidx:
self.recache()
if len(self._xy) == 0:
return False, {}
# Convert points to pixels
transformed_path = self._get_transformed_path()
path, affine = transformed_path.get_transformed_path_and_affine()
path = affine.transform_path(path)
xy = path.vertices
xt = xy[:, 0]
yt = xy[:, 1]
# Convert pick radius from points to pixels
if self.figure is None:
warnings.warn('no figure set when check if mouse is on line')
pixels = self.pickradius
else:
pixels = self.figure.dpi / 72. * self.pickradius
# the math involved in checking for containment (here and inside of
# segment_hits) assumes that it is OK to overflow. In case the
# application has set the error flags such that an exception is raised
# on overflow, we temporarily set the appropriate error flags here and
# set them back when we are finished.
with np.errstate(all='ignore'):
# Check for collision
if self._linestyle in ['None', None]:
# If no line, return the nearby point(s)
d = (xt - mouseevent.x) ** 2 + (yt - mouseevent.y) ** 2
ind, = np.nonzero(np.less_equal(d, pixels ** 2))
else:
# If line, return the nearby segment(s)
ind = segment_hits(mouseevent.x, mouseevent.y, xt, yt, pixels)
if self._drawstyle.startswith("steps"):
ind //= 2
ind += self.ind_offset
# Return the point(s) within radius
return len(ind) > 0, dict(ind=ind)
def get_pickradius(self):
"""return the pick radius used for containment tests"""
return self.pickradius
def set_pickradius(self, d):
"""Sets the pick radius used for containment tests
ACCEPTS: float distance in points
"""
self.pickradius = d
def get_fillstyle(self):
"""
return the marker fillstyle
"""
return self._marker.get_fillstyle()
def set_fillstyle(self, fs):
"""
Set the marker fill style; 'full' means fill the whole marker.
'none' means no filling; other options are for half-filled markers.
ACCEPTS: ['full' | 'left' | 'right' | 'bottom' | 'top' | 'none']
"""
self._marker.set_fillstyle(fs)
self.stale = True
def set_markevery(self, every):
"""Set the markevery property to subsample the plot when using markers.
e.g., if `every=5`, every 5-th marker will be plotted.
ACCEPTS: [None | int | length-2 tuple of int | slice |
list/array of int | float | length-2 tuple of float]
Parameters
----------
every: None | int | length-2 tuple of int | slice | list/array of int |
float | length-2 tuple of float
Which markers to plot.
- every=None, every point will be plotted.
- every=N, every N-th marker will be plotted starting with
marker 0.
- every=(start, N), every N-th marker, starting at point
start, will be plotted.
- every=slice(start, end, N), every N-th marker, starting at
point start, upto but not including point end, will be plotted.
- every=[i, j, m, n], only markers at points i, j, m, and n
will be plotted.
- every=0.1, (i.e. a float) then markers will be spaced at
approximately equal distances along the line; the distance
along the line between markers is determined by multiplying the
display-coordinate distance of the axes bounding-box diagonal
by the value of every.
- every=(0.5, 0.1) (i.e. a length-2 tuple of float), the
same functionality as every=0.1 is exhibited but the first
marker will be 0.5 multiplied by the
display-cordinate-diagonal-distance along the line.
Notes
-----
Setting the markevery property will only show markers at actual data
points. When using float arguments to set the markevery property
on irregularly spaced data, the markers will likely not appear evenly
spaced because the actual data points do not coincide with the
theoretical spacing between markers.
When using a start offset to specify the first marker, the offset will
be from the first data point which may be different from the first
the visible data point if the plot is zoomed in.
If zooming in on a plot when using float arguments then the actual
data points that have markers will change because the distance between
markers is always determined from the display-coordinates
axes-bounding-box-diagonal regardless of the actual axes data limits.
"""
if self._markevery != every:
self.stale = True
self._markevery = every
def get_markevery(self):
"""return the markevery setting"""
return self._markevery
def set_picker(self, p):
"""Sets the event picker details for the line.
ACCEPTS: float distance in points or callable pick function
``fn(artist, event)``
"""
if callable(p):
self._contains = p
else:
self.pickradius = p
self._picker = p
def get_window_extent(self, renderer):
bbox = Bbox([[0, 0], [0, 0]])
trans_data_to_xy = self.get_transform().transform
bbox.update_from_data_xy(trans_data_to_xy(self.get_xydata()),
ignore=True)
# correct for marker size, if any
if self._marker:
ms = (self._markersize / 72.0 * self.figure.dpi) * 0.5
bbox = bbox.padded(ms)
return bbox
@Artist.axes.setter
def axes(self, ax):
# call the set method from the base-class property
Artist.axes.fset(self, ax)
if ax is not None:
# connect unit-related callbacks
if ax.xaxis is not None:
self._xcid = ax.xaxis.callbacks.connect('units',
self.recache_always)
if ax.yaxis is not None:
self._ycid = ax.yaxis.callbacks.connect('units',
self.recache_always)
def set_data(self, *args):
"""
Set the x and y data
ACCEPTS: 2D array (rows are x, y) or two 1D arrays
"""
if len(args) == 1:
x, y = args[0]
else:
x, y = args
self.set_xdata(x)
self.set_ydata(y)
def recache_always(self):
self.recache(always=True)
def recache(self, always=False):
if always or self._invalidx:
xconv = self.convert_xunits(self._xorig)
x = _to_unmasked_float_array(xconv).ravel()
else:
x = self._x
if always or self._invalidy:
yconv = self.convert_yunits(self._yorig)
y = _to_unmasked_float_array(yconv).ravel()
else:
y = self._y
self._xy = np.column_stack(np.broadcast_arrays(x, y)).astype(float)
self._x, self._y = self._xy.T # views
self._subslice = False
if (self.axes and len(x) > 1000 and self._is_sorted(x) and
self.axes.name == 'rectilinear' and
self.axes.get_xscale() == 'linear' and
self._markevery is None and
self.get_clip_on() is True):
self._subslice = True
nanmask = np.isnan(x)
if nanmask.any():
self._x_filled = self._x.copy()
indices = np.arange(len(x))
self._x_filled[nanmask] = np.interp(indices[nanmask],
indices[~nanmask], self._x[~nanmask])
else:
self._x_filled = self._x
if self._path is not None:
interpolation_steps = self._path._interpolation_steps
else:
interpolation_steps = 1
xy = STEP_LOOKUP_MAP[self._drawstyle](*self._xy.T)
self._path = Path(np.asarray(xy).T,
_interpolation_steps=interpolation_steps)
self._transformed_path = None
self._invalidx = False
self._invalidy = False
def _transform_path(self, subslice=None):
"""
Puts a TransformedPath instance at self._transformed_path;
all invalidation of the transform is then handled by the
TransformedPath instance.
"""
# Masked arrays are now handled by the Path class itself
if subslice is not None:
xy = STEP_LOOKUP_MAP[self._drawstyle](*self._xy[subslice, :].T)
_path = Path(np.asarray(xy).T,
_interpolation_steps=self._path._interpolation_steps)
else:
_path = self._path
self._transformed_path = TransformedPath(_path, self.get_transform())
def _get_transformed_path(self):
"""
Return the :class:`~matplotlib.transforms.TransformedPath` instance
of this line.
"""
if self._transformed_path is None:
self._transform_path()
return self._transformed_path
def set_transform(self, t):
"""
set the Transformation instance used by this artist
ACCEPTS: a :class:`matplotlib.transforms.Transform` instance
"""
Artist.set_transform(self, t)
self._invalidx = True
self._invalidy = True
self.stale = True
def _is_sorted(self, x):
"""return True if x is sorted in ascending order"""
# We don't handle the monotonically decreasing case.
return _path.is_sorted(x)
@allow_rasterization
def draw(self, renderer):
"""draw the Line with `renderer` unless visibility is False"""
if not self.get_visible():
return
if self._invalidy or self._invalidx:
self.recache()
self.ind_offset = 0 # Needed for contains() method.
if self._subslice and self.axes:
x0, x1 = self.axes.get_xbound()
i0, = self._x_filled.searchsorted([x0], 'left')
i1, = self._x_filled.searchsorted([x1], 'right')
subslice = slice(max(i0 - 1, 0), i1 + 1)
self.ind_offset = subslice.start
self._transform_path(subslice)
transf_path = self._get_transformed_path()
if self.get_path_effects():
from matplotlib.patheffects import PathEffectRenderer
renderer = PathEffectRenderer(self.get_path_effects(), renderer)
renderer.open_group('line2d', self.get_gid())
if self._lineStyles[self._linestyle] != '_draw_nothing':
tpath, affine = transf_path.get_transformed_path_and_affine()
if len(tpath.vertices):
gc = renderer.new_gc()
self._set_gc_clip(gc)
ln_color_rgba = self._get_rgba_ln_color()
gc.set_foreground(ln_color_rgba, isRGBA=True)
gc.set_alpha(ln_color_rgba[3])
gc.set_antialiased(self._antialiased)
gc.set_linewidth(self._linewidth)
if self.is_dashed():
cap = self._dashcapstyle
join = self._dashjoinstyle
else:
cap = self._solidcapstyle
join = self._solidjoinstyle
gc.set_joinstyle(join)
gc.set_capstyle(cap)
gc.set_snap(self.get_snap())
if self.get_sketch_params() is not None:
gc.set_sketch_params(*self.get_sketch_params())
gc.set_dashes(self._dashOffset, self._dashSeq)
renderer.draw_path(gc, tpath, affine.frozen())
gc.restore()
if self._marker and self._markersize > 0:
gc = renderer.new_gc()
self._set_gc_clip(gc)
rgbaFace = self._get_rgba_face()
rgbaFaceAlt = self._get_rgba_face(alt=True)
edgecolor = self.get_markeredgecolor()
if (isinstance(edgecolor, six.string_types)
and edgecolor.lower() == 'none'):
gc.set_linewidth(0)
gc.set_foreground(rgbaFace, isRGBA=True)
else:
gc.set_foreground(edgecolor)
gc.set_linewidth(self._markeredgewidth)
mec = self._markeredgecolor
if (isinstance(mec, six.string_types) and mec == 'auto' and
rgbaFace is not None):
gc.set_alpha(rgbaFace[3])
else:
gc.set_alpha(self.get_alpha())
marker = self._marker
tpath, affine = transf_path.get_transformed_points_and_affine()
if len(tpath.vertices):
# subsample the markers if markevery is not None
markevery = self.get_markevery()
if markevery is not None:
subsampled = _mark_every_path(markevery, tpath,
affine, self.axes.transAxes)
else:
subsampled = tpath
snap = marker.get_snap_threshold()
if type(snap) == float:
snap = renderer.points_to_pixels(self._markersize) >= snap
gc.set_snap(snap)
gc.set_joinstyle(marker.get_joinstyle())
gc.set_capstyle(marker.get_capstyle())
marker_path = marker.get_path()
marker_trans = marker.get_transform()
w = renderer.points_to_pixels(self._markersize)
if (isinstance(marker.get_marker(), six.string_types) and
marker.get_marker() == ','):
gc.set_linewidth(0)
else:
# Don't scale for pixels, and don't stroke them
marker_trans = marker_trans.scale(w)
renderer.draw_markers(gc, marker_path, marker_trans,
subsampled, affine.frozen(),
rgbaFace)
alt_marker_path = marker.get_alt_path()
if alt_marker_path:
alt_marker_trans = marker.get_alt_transform()
alt_marker_trans = alt_marker_trans.scale(w)
if (isinstance(mec, six.string_types) and mec == 'auto' and
rgbaFaceAlt is not None):
gc.set_alpha(rgbaFaceAlt[3])
else:
gc.set_alpha(self.get_alpha())
renderer.draw_markers(
gc, alt_marker_path, alt_marker_trans, subsampled,
affine.frozen(), rgbaFaceAlt)
gc.restore()
renderer.close_group('line2d')
self.stale = False
def get_antialiased(self):
return self._antialiased
def get_color(self):
return self._color
def get_drawstyle(self):
return self._drawstyle
def get_linestyle(self):
return self._linestyle
def get_linewidth(self):
return self._linewidth
def get_marker(self):
return self._marker.get_marker()
def get_markeredgecolor(self):
mec = self._markeredgecolor
if isinstance(mec, six.string_types) and mec == 'auto':
if rcParams['_internal.classic_mode']:
if self._marker.get_marker() in ('.', ','):
return self._color
if self._marker.is_filled() and self.get_fillstyle() != 'none':
return 'k' # Bad hard-wired default...
return self._color
else:
return mec
def get_markeredgewidth(self):
return self._markeredgewidth
def _get_markerfacecolor(self, alt=False):
if alt:
fc = self._markerfacecoloralt
else:
fc = self._markerfacecolor
if (isinstance(fc, six.string_types) and fc.lower() == 'auto'):
if self.get_fillstyle() == 'none':
return 'none'
else:
return self._color
else:
return fc
def get_markerfacecolor(self):
return self._get_markerfacecolor(alt=False)
def get_markerfacecoloralt(self):
return self._get_markerfacecolor(alt=True)
def get_markersize(self):
return self._markersize
def get_data(self, orig=True):
"""
Return the xdata, ydata.
If *orig* is *True*, return the original data.
"""
return self.get_xdata(orig=orig), self.get_ydata(orig=orig)
def get_xdata(self, orig=True):
"""
Return the xdata.
If *orig* is *True*, return the original data, else the
processed data.
"""
if orig:
return self._xorig
if self._invalidx:
self.recache()
return self._x
def get_ydata(self, orig=True):
"""
Return the ydata.
If *orig* is *True*, return the original data, else the
processed data.
"""
if orig:
return self._yorig
if self._invalidy:
self.recache()
return self._y
def get_path(self):
"""
Return the :class:`~matplotlib.path.Path` object associated
with this line.
"""
if self._invalidy or self._invalidx:
self.recache()
return self._path
def get_xydata(self):
"""
Return the *xy* data as a Nx2 numpy array.
"""
if self._invalidy or self._invalidx:
self.recache()
return self._xy
def set_antialiased(self, b):
"""
True if line should be drawin with antialiased rendering
ACCEPTS: [True | False]
"""
if self._antialiased != b:
self.stale = True
self._antialiased = b
def set_color(self, color):
"""
Set the color of the line
ACCEPTS: any matplotlib color
"""
self._color = color
self.stale = True
def set_drawstyle(self, drawstyle):
"""
Set the drawstyle of the plot
'default' connects the points with lines. The steps variants
produce step-plots. 'steps' is equivalent to 'steps-pre' and
is maintained for backward-compatibility.
ACCEPTS: ['default' | 'steps' | 'steps-pre' | 'steps-mid' |
'steps-post']
"""
if drawstyle is None:
drawstyle = 'default'
if drawstyle not in self.drawStyles:
raise ValueError('Unrecognized drawstyle {!r}'.format(drawstyle))
if self._drawstyle != drawstyle:
self.stale = True
self._drawstyle = drawstyle
def set_linewidth(self, w):
"""
Set the line width in points
ACCEPTS: float value in points
"""
w = float(w)
if self._linewidth != w:
self.stale = True
self._linewidth = w
# rescale the dashes + offset
self._dashOffset, self._dashSeq = _scale_dashes(
self._us_dashOffset, self._us_dashSeq, self._linewidth)
def _split_drawstyle_linestyle(self, ls):
'''Split drawstyle from linestyle string
If `ls` is only a drawstyle default to returning a linestyle
of '-'.
Parameters
----------
ls : str
The linestyle to be processed
Returns
-------
ret_ds : str or None
If the linestyle string does not contain a drawstyle prefix
return None, otherwise return it.
ls : str
The linestyle with the drawstyle (if any) stripped.
'''
ret_ds = None
for ds in self.drawStyleKeys: # long names are first in the list
if ls.startswith(ds):
ret_ds = ds
if len(ls) > len(ds):
ls = ls[len(ds):]
else:
ls = '-'
break
return ret_ds, ls
def set_linestyle(self, ls):
"""
Set the linestyle of the line (also accepts drawstyles,
e.g., ``'steps--'``)
=========================== =================
linestyle description
=========================== =================
``'-'`` or ``'solid'`` solid line
``'--'`` or ``'dashed'`` dashed line
``'-.'`` or ``'dashdot'`` dash-dotted line
``':'`` or ``'dotted'`` dotted line
``'None'`` draw nothing
``' '`` draw nothing
``''`` draw nothing
=========================== =================
'steps' is equivalent to 'steps-pre' and is maintained for
backward-compatibility.
Alternatively a dash tuple of the following form can be provided::
(offset, onoffseq),
where ``onoffseq`` is an even length tuple of on and off ink
in points.
ACCEPTS: ['solid' | 'dashed', 'dashdot', 'dotted' |
(offset, on-off-dash-seq) |
``'-'`` | ``'--'`` | ``'-.'`` | ``':'`` | ``'None'`` |
``' '`` | ``''``]
.. seealso::
:meth:`set_drawstyle`
To set the drawing style (stepping) of the plot.
Parameters
----------
ls : { ``'-'``, ``'--'``, ``'-.'``, ``':'``} and more see description
The line style.
"""
if isinstance(ls, six.string_types):
ds, ls = self._split_drawstyle_linestyle(ls)
if ds is not None:
self.set_drawstyle(ds)
if ls in [' ', '', 'none']:
ls = 'None'
if ls not in self._lineStyles:
try:
ls = ls_mapper_r[ls]
except KeyError:
raise ValueError(("You passed in an invalid linestyle, "
"`{0}`. See "
"docs of Line2D.set_linestyle for "
"valid values.").format(ls))
self._linestyle = ls
else:
self._linestyle = '--'
# get the unscaled dashes
self._us_dashOffset, self._us_dashSeq = _get_dash_pattern(ls)
# compute the linewidth scaled dashes
self._dashOffset, self._dashSeq = _scale_dashes(
self._us_dashOffset, self._us_dashSeq, self._linewidth)
@docstring.dedent_interpd
def set_marker(self, marker):
"""
Set the line marker
ACCEPTS: :mod:`A valid marker style <matplotlib.markers>`
Parameters
----------
marker: marker style
See `~matplotlib.markers` for full description of possible
argument
"""
self._marker.set_marker(marker)
self.stale = True
def set_markeredgecolor(self, ec):
"""
Set the marker edge color
ACCEPTS: any matplotlib color
"""
if ec is None:
ec = 'auto'
if self._markeredgecolor is None or \
np.any(self._markeredgecolor != ec):
self.stale = True
self._markeredgecolor = ec
def set_markeredgewidth(self, ew):
"""
Set the marker edge width in points
ACCEPTS: float value in points
"""
if ew is None:
ew = rcParams['lines.markeredgewidth']
if self._markeredgewidth != ew:
self.stale = True
self._markeredgewidth = ew
def set_markerfacecolor(self, fc):
"""
Set the marker face color.
ACCEPTS: any matplotlib color
"""
if fc is None:
fc = 'auto'
if np.any(self._markerfacecolor != fc):
self.stale = True
self._markerfacecolor = fc
def set_markerfacecoloralt(self, fc):
"""
Set the alternate marker face color.
ACCEPTS: any matplotlib color
"""
if fc is None:
fc = 'auto'
if np.any(self._markerfacecoloralt != fc):
self.stale = True
self._markerfacecoloralt = fc
def set_markersize(self, sz):
"""
Set the marker size in points
ACCEPTS: float
"""
sz = float(sz)
if self._markersize != sz:
self.stale = True
self._markersize = sz
def set_xdata(self, x):
"""
Set the data np.array for x
ACCEPTS: 1D array
"""
self._xorig = x
self._invalidx = True
self.stale = True
def set_ydata(self, y):
"""
Set the data np.array for y
ACCEPTS: 1D array
"""
self._yorig = y
self._invalidy = True
self.stale = True
def set_dashes(self, seq):
"""
Set the dash sequence, sequence of dashes with on off ink in
points. If seq is empty or if seq = (None, None), the
linestyle will be set to solid.
ACCEPTS: sequence of on/off ink in points
"""
if seq == (None, None) or len(seq) == 0:
self.set_linestyle('-')
else:
self.set_linestyle((0, seq))
def update_from(self, other):
"""copy properties from other to self"""
Artist.update_from(self, other)
self._linestyle = other._linestyle
self._linewidth = other._linewidth
self._color = other._color
self._markersize = other._markersize
self._markerfacecolor = other._markerfacecolor
self._markerfacecoloralt = other._markerfacecoloralt
self._markeredgecolor = other._markeredgecolor
self._markeredgewidth = other._markeredgewidth
self._dashSeq = other._dashSeq
self._us_dashSeq = other._us_dashSeq
self._dashOffset = other._dashOffset
self._us_dashOffset = other._us_dashOffset
self._dashcapstyle = other._dashcapstyle
self._dashjoinstyle = other._dashjoinstyle
self._solidcapstyle = other._solidcapstyle
self._solidjoinstyle = other._solidjoinstyle
self._linestyle = other._linestyle
self._marker = MarkerStyle(other._marker.get_marker(),
other._marker.get_fillstyle())
self._drawstyle = other._drawstyle
def _get_rgba_face(self, alt=False):
facecolor = self._get_markerfacecolor(alt=alt)
if (isinstance(facecolor, six.string_types)
and facecolor.lower() == 'none'):
rgbaFace = None
else:
rgbaFace = mcolors.to_rgba(facecolor, self._alpha)
return rgbaFace
def _get_rgba_ln_color(self, alt=False):
return mcolors.to_rgba(self._color, self._alpha)
# some aliases....
def set_aa(self, val):
'alias for set_antialiased'
self.set_antialiased(val)
def set_c(self, val):
'alias for set_color'
self.set_color(val)
def set_ls(self, val):
"""alias for set_linestyle"""
self.set_linestyle(val)
def set_lw(self, val):
"""alias for set_linewidth"""
self.set_linewidth(val)
def set_mec(self, val):
"""alias for set_markeredgecolor"""
self.set_markeredgecolor(val)
def set_mew(self, val):
"""alias for set_markeredgewidth"""
self.set_markeredgewidth(val)
def set_mfc(self, val):
"""alias for set_markerfacecolor"""
self.set_markerfacecolor(val)
def set_mfcalt(self, val):
"""alias for set_markerfacecoloralt"""
self.set_markerfacecoloralt(val)
def set_ms(self, val):
"""alias for set_markersize"""
self.set_markersize(val)
def get_aa(self):
"""alias for get_antialiased"""
return self.get_antialiased()
def get_c(self):
"""alias for get_color"""
return self.get_color()
def get_ls(self):
"""alias for get_linestyle"""
return self.get_linestyle()
def get_lw(self):
"""alias for get_linewidth"""
return self.get_linewidth()
def get_mec(self):
"""alias for get_markeredgecolor"""
return self.get_markeredgecolor()
def get_mew(self):
"""alias for get_markeredgewidth"""
return self.get_markeredgewidth()
def get_mfc(self):
"""alias for get_markerfacecolor"""
return self.get_markerfacecolor()
def get_mfcalt(self, alt=False):
"""alias for get_markerfacecoloralt"""
return self.get_markerfacecoloralt()
def get_ms(self):
"""alias for get_markersize"""
return self.get_markersize()
def set_dash_joinstyle(self, s):
"""
Set the join style for dashed linestyles
ACCEPTS: ['miter' | 'round' | 'bevel']
"""
s = s.lower()
if s not in self.validJoin:
raise ValueError('set_dash_joinstyle passed "%s";\n' % (s,)
+ 'valid joinstyles are %s' % (self.validJoin,))
if self._dashjoinstyle != s:
self.stale = True
self._dashjoinstyle = s
def set_solid_joinstyle(self, s):
"""
Set the join style for solid linestyles
ACCEPTS: ['miter' | 'round' | 'bevel']
"""
s = s.lower()
if s not in self.validJoin:
raise ValueError('set_solid_joinstyle passed "%s";\n' % (s,)
+ 'valid joinstyles are %s' % (self.validJoin,))
if self._solidjoinstyle != s:
self.stale = True
self._solidjoinstyle = s
def get_dash_joinstyle(self):
"""
Get the join style for dashed linestyles
"""
return self._dashjoinstyle
def get_solid_joinstyle(self):
"""
Get the join style for solid linestyles
"""
return self._solidjoinstyle
def set_dash_capstyle(self, s):
"""
Set the cap style for dashed linestyles
ACCEPTS: ['butt' | 'round' | 'projecting']
"""
s = s.lower()
if s not in self.validCap:
raise ValueError('set_dash_capstyle passed "%s";\n' % (s,)
+ 'valid capstyles are %s' % (self.validCap,))
if self._dashcapstyle != s:
self.stale = True
self._dashcapstyle = s
def set_solid_capstyle(self, s):
"""
Set the cap style for solid linestyles
ACCEPTS: ['butt' | 'round' | 'projecting']
"""
s = s.lower()
if s not in self.validCap:
raise ValueError('set_solid_capstyle passed "%s";\n' % (s,)
+ 'valid capstyles are %s' % (self.validCap,))
if self._solidcapstyle != s:
self.stale = True
self._solidcapstyle = s
def get_dash_capstyle(self):
"""
Get the cap style for dashed linestyles
"""
return self._dashcapstyle
def get_solid_capstyle(self):
"""
Get the cap style for solid linestyles
"""
return self._solidcapstyle
def is_dashed(self):
'return True if line is dashstyle'
return self._linestyle in ('--', '-.', ':')
class VertexSelector(object):
"""
Manage the callbacks to maintain a list of selected vertices for
:class:`matplotlib.lines.Line2D`. Derived classes should override
:meth:`~matplotlib.lines.VertexSelector.process_selected` to do
something with the picks.
Here is an example which highlights the selected verts with red
circles::
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.lines as lines
class HighlightSelected(lines.VertexSelector):
def __init__(self, line, fmt='ro', **kwargs):
lines.VertexSelector.__init__(self, line)
self.markers, = self.axes.plot([], [], fmt, **kwargs)
def process_selected(self, ind, xs, ys):
self.markers.set_data(xs, ys)
self.canvas.draw()
fig = plt.figure()
ax = fig.add_subplot(111)
x, y = np.random.rand(2, 30)
line, = ax.plot(x, y, 'bs-', picker=5)
selector = HighlightSelected(line)
plt.show()
"""
def __init__(self, line):
"""
Initialize the class with a :class:`matplotlib.lines.Line2D`
instance. The line should already be added to some
:class:`matplotlib.axes.Axes` instance and should have the
picker property set.
"""
if line.axes is None:
raise RuntimeError('You must first add the line to the Axes')
if line.get_picker() is None:
raise RuntimeError('You must first set the picker property '
'of the line')
self.axes = line.axes
self.line = line
self.canvas = self.axes.figure.canvas
self.cid = self.canvas.mpl_connect('pick_event', self.onpick)
self.ind = set()
def process_selected(self, ind, xs, ys):
"""
Default "do nothing" implementation of the
:meth:`process_selected` method.
*ind* are the indices of the selected vertices. *xs* and *ys*
are the coordinates of the selected vertices.
"""
pass
def onpick(self, event):
"""When the line is picked, update the set of selected indicies."""
if event.artist is not self.line:
return
self.ind ^= set(event.ind)
ind = sorted(self.ind)
xdata, ydata = self.line.get_data()
self.process_selected(ind, xdata[ind], ydata[ind])
lineStyles = Line2D._lineStyles
lineMarkers = MarkerStyle.markers
drawStyles = Line2D.drawStyles
fillStyles = MarkerStyle.fillstyles
docstring.interpd.update(Line2D=artist.kwdoc(Line2D))
# You can not set the docstring of an instancemethod,
# but you can on the underlying function. Go figure.
docstring.dedent_interpd(Line2D.__init__)
| mit |
tsw-apropos/mapbiographer | mapBiographer/ui_mapbio_manager.py | 1 | 98907 | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'ui_mapbio_manager.ui'
#
# Created by: PyQt4 UI code generator 4.11.4
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
class Ui_mapbioManager(object):
def setupUi(self, mapbioManager):
mapbioManager.setObjectName(_fromUtf8("mapbioManager"))
mapbioManager.resize(788, 711)
self.gridLayout_14 = QtGui.QGridLayout(mapbioManager)
self.gridLayout_14.setObjectName(_fromUtf8("gridLayout_14"))
self.twMapBioSettings = QtGui.QTabWidget(mapbioManager)
self.twMapBioSettings.setObjectName(_fromUtf8("twMapBioSettings"))
self.tbSettings = QtGui.QWidget()
self.tbSettings.setObjectName(_fromUtf8("tbSettings"))
self.gridLayout_3 = QtGui.QGridLayout(self.tbSettings)
self.gridLayout_3.setObjectName(_fromUtf8("gridLayout_3"))
self.glMapBioSettings = QtGui.QGridLayout()
self.glMapBioSettings.setObjectName(_fromUtf8("glMapBioSettings"))
self.hlProjectDir = QtGui.QHBoxLayout()
self.hlProjectDir.setObjectName(_fromUtf8("hlProjectDir"))
self.leProjectDir = QtGui.QLineEdit(self.tbSettings)
self.leProjectDir.setEnabled(True)
self.leProjectDir.setReadOnly(True)
self.leProjectDir.setObjectName(_fromUtf8("leProjectDir"))
self.hlProjectDir.addWidget(self.leProjectDir)
self.tbSelectProjectDir = QtGui.QToolButton(self.tbSettings)
self.tbSelectProjectDir.setObjectName(_fromUtf8("tbSelectProjectDir"))
self.hlProjectDir.addWidget(self.tbSelectProjectDir)
self.glMapBioSettings.addLayout(self.hlProjectDir, 0, 1, 1, 1)
self.cbCurrentProject = QtGui.QComboBox(self.tbSettings)
self.cbCurrentProject.setObjectName(_fromUtf8("cbCurrentProject"))
self.glMapBioSettings.addWidget(self.cbCurrentProject, 1, 1, 1, 1)
self.lblCurrentProject = QtGui.QLabel(self.tbSettings)
self.lblCurrentProject.setObjectName(_fromUtf8("lblCurrentProject"))
self.glMapBioSettings.addWidget(self.lblCurrentProject, 1, 0, 1, 1)
self.lblProjectsDir = QtGui.QLabel(self.tbSettings)
self.lblProjectsDir.setObjectName(_fromUtf8("lblProjectsDir"))
self.glMapBioSettings.addWidget(self.lblProjectsDir, 0, 0, 1, 1)
self.lblOggEnc = QtGui.QLabel(self.tbSettings)
self.lblOggEnc.setObjectName(_fromUtf8("lblOggEnc"))
self.glMapBioSettings.addWidget(self.lblOggEnc, 2, 0, 1, 1)
self.horizontalLayout_2 = QtGui.QHBoxLayout()
self.horizontalLayout_2.setObjectName(_fromUtf8("horizontalLayout_2"))
self.leOggEnc = QtGui.QLineEdit(self.tbSettings)
self.leOggEnc.setObjectName(_fromUtf8("leOggEnc"))
self.horizontalLayout_2.addWidget(self.leOggEnc)
self.tbOggEnc = QtGui.QToolButton(self.tbSettings)
self.tbOggEnc.setObjectName(_fromUtf8("tbOggEnc"))
self.horizontalLayout_2.addWidget(self.tbOggEnc)
self.glMapBioSettings.addLayout(self.horizontalLayout_2, 2, 1, 1, 1)
self.gridLayout_3.addLayout(self.glMapBioSettings, 0, 0, 1, 1)
self.hlSettingsButtons = QtGui.QHBoxLayout()
self.hlSettingsButtons.setObjectName(_fromUtf8("hlSettingsButtons"))
self.pbSaveSettings = QtGui.QPushButton(self.tbSettings)
self.pbSaveSettings.setObjectName(_fromUtf8("pbSaveSettings"))
self.hlSettingsButtons.addWidget(self.pbSaveSettings)
self.pbCancelSettings = QtGui.QPushButton(self.tbSettings)
self.pbCancelSettings.setObjectName(_fromUtf8("pbCancelSettings"))
self.hlSettingsButtons.addWidget(self.pbCancelSettings)
self.pbDeleteProject = QtGui.QPushButton(self.tbSettings)
self.pbDeleteProject.setEnabled(False)
self.pbDeleteProject.setObjectName(_fromUtf8("pbDeleteProject"))
self.hlSettingsButtons.addWidget(self.pbDeleteProject)
self.pbTransfer = QtGui.QPushButton(self.tbSettings)
self.pbTransfer.setObjectName(_fromUtf8("pbTransfer"))
self.hlSettingsButtons.addWidget(self.pbTransfer)
self.pbSystemTest = QtGui.QPushButton(self.tbSettings)
self.pbSystemTest.setObjectName(_fromUtf8("pbSystemTest"))
self.hlSettingsButtons.addWidget(self.pbSystemTest)
self.gridLayout_3.addLayout(self.hlSettingsButtons, 1, 0, 1, 1)
self.lnBelowLMBSettings = QtGui.QFrame(self.tbSettings)
self.lnBelowLMBSettings.setFrameShape(QtGui.QFrame.HLine)
self.lnBelowLMBSettings.setFrameShadow(QtGui.QFrame.Sunken)
self.lnBelowLMBSettings.setObjectName(_fromUtf8("lnBelowLMBSettings"))
self.gridLayout_3.addWidget(self.lnBelowLMBSettings, 2, 0, 1, 1)
self.lblProjectMapSettings = QtGui.QLabel(self.tbSettings)
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.lblProjectMapSettings.setFont(font)
self.lblProjectMapSettings.setObjectName(_fromUtf8("lblProjectMapSettings"))
self.gridLayout_3.addWidget(self.lblProjectMapSettings, 3, 0, 1, 1)
self.frProjectMapSettings = QtGui.QFrame(self.tbSettings)
self.frProjectMapSettings.setObjectName(_fromUtf8("frProjectMapSettings"))
self.flQGISSettings = QtGui.QFormLayout(self.frProjectMapSettings)
self.flQGISSettings.setFieldGrowthPolicy(QtGui.QFormLayout.AllNonFixedFieldsGrow)
self.flQGISSettings.setMargin(0)
self.flQGISSettings.setObjectName(_fromUtf8("flQGISSettings"))
self.lblQgsProject = QtGui.QLabel(self.frProjectMapSettings)
self.lblQgsProject.setObjectName(_fromUtf8("lblQgsProject"))
self.flQGISSettings.setWidget(3, QtGui.QFormLayout.LabelRole, self.lblQgsProject)
self.hlQgsProject = QtGui.QHBoxLayout()
self.hlQgsProject.setObjectName(_fromUtf8("hlQgsProject"))
self.leQgsProject = QtGui.QLineEdit(self.frProjectMapSettings)
self.leQgsProject.setEnabled(True)
self.leQgsProject.setReadOnly(True)
self.leQgsProject.setObjectName(_fromUtf8("leQgsProject"))
self.hlQgsProject.addWidget(self.leQgsProject)
self.tbSelectQgsProject = QtGui.QToolButton(self.frProjectMapSettings)
self.tbSelectQgsProject.setObjectName(_fromUtf8("tbSelectQgsProject"))
self.hlQgsProject.addWidget(self.tbSelectQgsProject)
self.flQGISSettings.setLayout(3, QtGui.QFormLayout.FieldRole, self.hlQgsProject)
self.lblBaseLayerGroups = QtGui.QLabel(self.frProjectMapSettings)
self.lblBaseLayerGroups.setMinimumSize(QtCore.QSize(176, 0))
self.lblBaseLayerGroups.setObjectName(_fromUtf8("lblBaseLayerGroups"))
self.flQGISSettings.setWidget(4, QtGui.QFormLayout.LabelRole, self.lblBaseLayerGroups)
self.hlBaseLayers = QtGui.QHBoxLayout()
self.hlBaseLayers.setObjectName(_fromUtf8("hlBaseLayers"))
self.tblBaseGroups = QtGui.QTableWidget(self.frProjectMapSettings)
self.tblBaseGroups.setEditTriggers(QtGui.QAbstractItemView.NoEditTriggers)
self.tblBaseGroups.setSelectionMode(QtGui.QAbstractItemView.SingleSelection)
self.tblBaseGroups.setSelectionBehavior(QtGui.QAbstractItemView.SelectRows)
self.tblBaseGroups.setObjectName(_fromUtf8("tblBaseGroups"))
self.tblBaseGroups.setColumnCount(0)
self.tblBaseGroups.setRowCount(0)
self.hlBaseLayers.addWidget(self.tblBaseGroups)
self.vlBaseGroupActions = QtGui.QVBoxLayout()
self.vlBaseGroupActions.setObjectName(_fromUtf8("vlBaseGroupActions"))
self.lblBaseGroupOptions = QtGui.QLabel(self.frProjectMapSettings)
self.lblBaseGroupOptions.setMinimumSize(QtCore.QSize(200, 0))
self.lblBaseGroupOptions.setObjectName(_fromUtf8("lblBaseGroupOptions"))
self.vlBaseGroupActions.addWidget(self.lblBaseGroupOptions)
self.cbProjectGroups = QtGui.QComboBox(self.frProjectMapSettings)
self.cbProjectGroups.setObjectName(_fromUtf8("cbProjectGroups"))
self.vlBaseGroupActions.addWidget(self.cbProjectGroups)
self.hlBaseGroups = QtGui.QHBoxLayout()
self.hlBaseGroups.setObjectName(_fromUtf8("hlBaseGroups"))
self.pbAddBaseGroup = QtGui.QPushButton(self.frProjectMapSettings)
self.pbAddBaseGroup.setObjectName(_fromUtf8("pbAddBaseGroup"))
self.hlBaseGroups.addWidget(self.pbAddBaseGroup)
self.pbRemoveBaseGroup = QtGui.QPushButton(self.frProjectMapSettings)
self.pbRemoveBaseGroup.setEnabled(False)
self.pbRemoveBaseGroup.setObjectName(_fromUtf8("pbRemoveBaseGroup"))
self.hlBaseGroups.addWidget(self.pbRemoveBaseGroup)
self.vlBaseGroupActions.addLayout(self.hlBaseGroups)
self.hlBaseLayers.addLayout(self.vlBaseGroupActions)
self.flQGISSettings.setLayout(4, QtGui.QFormLayout.FieldRole, self.hlBaseLayers)
self.lblMaxScale = QtGui.QLabel(self.frProjectMapSettings)
self.lblMaxScale.setObjectName(_fromUtf8("lblMaxScale"))
self.flQGISSettings.setWidget(8, QtGui.QFormLayout.LabelRole, self.lblMaxScale)
self.cbMaxScale = QtGui.QComboBox(self.frProjectMapSettings)
self.cbMaxScale.setObjectName(_fromUtf8("cbMaxScale"))
self.cbMaxScale.addItem(_fromUtf8(""))
self.cbMaxScale.addItem(_fromUtf8(""))
self.cbMaxScale.addItem(_fromUtf8(""))
self.cbMaxScale.addItem(_fromUtf8(""))
self.cbMaxScale.addItem(_fromUtf8(""))
self.cbMaxScale.addItem(_fromUtf8(""))
self.cbMaxScale.addItem(_fromUtf8(""))
self.cbMaxScale.addItem(_fromUtf8(""))
self.cbMaxScale.addItem(_fromUtf8(""))
self.cbMaxScale.addItem(_fromUtf8(""))
self.cbMaxScale.addItem(_fromUtf8(""))
self.cbMaxScale.addItem(_fromUtf8(""))
self.cbMaxScale.addItem(_fromUtf8(""))
self.flQGISSettings.setWidget(8, QtGui.QFormLayout.FieldRole, self.cbMaxScale)
self.lblMinScale = QtGui.QLabel(self.frProjectMapSettings)
self.lblMinScale.setObjectName(_fromUtf8("lblMinScale"))
self.flQGISSettings.setWidget(9, QtGui.QFormLayout.LabelRole, self.lblMinScale)
self.cbMinScale = QtGui.QComboBox(self.frProjectMapSettings)
self.cbMinScale.setObjectName(_fromUtf8("cbMinScale"))
self.cbMinScale.addItem(_fromUtf8(""))
self.cbMinScale.addItem(_fromUtf8(""))
self.cbMinScale.addItem(_fromUtf8(""))
self.cbMinScale.addItem(_fromUtf8(""))
self.cbMinScale.addItem(_fromUtf8(""))
self.cbMinScale.addItem(_fromUtf8(""))
self.cbMinScale.addItem(_fromUtf8(""))
self.cbMinScale.addItem(_fromUtf8(""))
self.cbMinScale.addItem(_fromUtf8(""))
self.cbMinScale.addItem(_fromUtf8(""))
self.cbMinScale.addItem(_fromUtf8(""))
self.cbMinScale.addItem(_fromUtf8(""))
self.cbMinScale.addItem(_fromUtf8(""))
self.flQGISSettings.setWidget(9, QtGui.QFormLayout.FieldRole, self.cbMinScale)
self.lblZoomNotices = QtGui.QLabel(self.frProjectMapSettings)
self.lblZoomNotices.setObjectName(_fromUtf8("lblZoomNotices"))
self.flQGISSettings.setWidget(10, QtGui.QFormLayout.LabelRole, self.lblZoomNotices)
self.cbZoomRangeNotices = QtGui.QComboBox(self.frProjectMapSettings)
self.cbZoomRangeNotices.setObjectName(_fromUtf8("cbZoomRangeNotices"))
self.cbZoomRangeNotices.addItem(_fromUtf8(""))
self.cbZoomRangeNotices.addItem(_fromUtf8(""))
self.flQGISSettings.setWidget(10, QtGui.QFormLayout.FieldRole, self.cbZoomRangeNotices)
self.lblProjectBoundary = QtGui.QLabel(self.frProjectMapSettings)
self.lblProjectBoundary.setObjectName(_fromUtf8("lblProjectBoundary"))
self.flQGISSettings.setWidget(5, QtGui.QFormLayout.LabelRole, self.lblProjectBoundary)
self.lblEnableReference = QtGui.QLabel(self.frProjectMapSettings)
self.lblEnableReference.setObjectName(_fromUtf8("lblEnableReference"))
self.flQGISSettings.setWidget(6, QtGui.QFormLayout.LabelRole, self.lblEnableReference)
self.lblReferenceLayer = QtGui.QLabel(self.frProjectMapSettings)
self.lblReferenceLayer.setObjectName(_fromUtf8("lblReferenceLayer"))
self.flQGISSettings.setWidget(7, QtGui.QFormLayout.LabelRole, self.lblReferenceLayer)
self.cbBoundaryLayer = QtGui.QComboBox(self.frProjectMapSettings)
self.cbBoundaryLayer.setObjectName(_fromUtf8("cbBoundaryLayer"))
self.flQGISSettings.setWidget(5, QtGui.QFormLayout.FieldRole, self.cbBoundaryLayer)
self.cbEnableReference = QtGui.QComboBox(self.frProjectMapSettings)
self.cbEnableReference.setObjectName(_fromUtf8("cbEnableReference"))
self.cbEnableReference.addItem(_fromUtf8(""))
self.cbEnableReference.addItem(_fromUtf8(""))
self.flQGISSettings.setWidget(6, QtGui.QFormLayout.FieldRole, self.cbEnableReference)
self.cbReferenceLayer = QtGui.QComboBox(self.frProjectMapSettings)
self.cbReferenceLayer.setObjectName(_fromUtf8("cbReferenceLayer"))
self.flQGISSettings.setWidget(7, QtGui.QFormLayout.FieldRole, self.cbReferenceLayer)
self.gridLayout_3.addWidget(self.frProjectMapSettings, 4, 0, 1, 1)
self.hlnSettingsAboveButtons = QtGui.QFrame(self.tbSettings)
self.hlnSettingsAboveButtons.setFrameShape(QtGui.QFrame.HLine)
self.hlnSettingsAboveButtons.setFrameShadow(QtGui.QFrame.Sunken)
self.hlnSettingsAboveButtons.setObjectName(_fromUtf8("hlnSettingsAboveButtons"))
self.gridLayout_3.addWidget(self.hlnSettingsAboveButtons, 5, 0, 1, 1)
self.hlLMBProjectDefaults = QtGui.QHBoxLayout()
self.hlLMBProjectDefaults.setObjectName(_fromUtf8("hlLMBProjectDefaults"))
self.pbSaveProjectMapSettings = QtGui.QPushButton(self.tbSettings)
self.pbSaveProjectMapSettings.setObjectName(_fromUtf8("pbSaveProjectMapSettings"))
self.hlLMBProjectDefaults.addWidget(self.pbSaveProjectMapSettings)
self.pbCancelProjectMapSettings = QtGui.QPushButton(self.tbSettings)
self.pbCancelProjectMapSettings.setObjectName(_fromUtf8("pbCancelProjectMapSettings"))
self.hlLMBProjectDefaults.addWidget(self.pbCancelProjectMapSettings)
self.gridLayout_3.addLayout(self.hlLMBProjectDefaults, 6, 0, 1, 1)
self.twMapBioSettings.addTab(self.tbSettings, _fromUtf8(""))
self.tbProjectDetails = QtGui.QWidget()
self.tbProjectDetails.setObjectName(_fromUtf8("tbProjectDetails"))
self.gridLayout_15 = QtGui.QGridLayout(self.tbProjectDetails)
self.gridLayout_15.setObjectName(_fromUtf8("gridLayout_15"))
self.frContentCode = QtGui.QFrame(self.tbProjectDetails)
self.frContentCode.setFrameShape(QtGui.QFrame.NoFrame)
self.frContentCode.setFrameShadow(QtGui.QFrame.Plain)
self.frContentCode.setObjectName(_fromUtf8("frContentCode"))
self.gridLayout_2 = QtGui.QGridLayout(self.frContentCode)
self.gridLayout_2.setMargin(0)
self.gridLayout_2.setObjectName(_fromUtf8("gridLayout_2"))
self.vlProjectDetails = QtGui.QVBoxLayout()
self.vlProjectDetails.setSpacing(2)
self.vlProjectDetails.setObjectName(_fromUtf8("vlProjectDetails"))
self.lblProjCode = QtGui.QLabel(self.frContentCode)
self.lblProjCode.setObjectName(_fromUtf8("lblProjCode"))
self.vlProjectDetails.addWidget(self.lblProjCode)
self.leProjectCode = QtGui.QLineEdit(self.frContentCode)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.MinimumExpanding, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.leProjectCode.sizePolicy().hasHeightForWidth())
self.leProjectCode.setSizePolicy(sizePolicy)
self.leProjectCode.setObjectName(_fromUtf8("leProjectCode"))
self.vlProjectDetails.addWidget(self.leProjectCode)
self.lblProjDescription = QtGui.QLabel(self.frContentCode)
self.lblProjDescription.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignTop)
self.lblProjDescription.setObjectName(_fromUtf8("lblProjDescription"))
self.vlProjectDetails.addWidget(self.lblProjDescription)
self.pteProjectDescription = QtGui.QPlainTextEdit(self.frContentCode)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.MinimumExpanding, QtGui.QSizePolicy.MinimumExpanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.pteProjectDescription.sizePolicy().hasHeightForWidth())
self.pteProjectDescription.setSizePolicy(sizePolicy)
self.pteProjectDescription.setMinimumSize(QtCore.QSize(0, 50))
self.pteProjectDescription.setMaximumSize(QtCore.QSize(16777215, 100))
self.pteProjectDescription.setObjectName(_fromUtf8("pteProjectDescription"))
self.vlProjectDetails.addWidget(self.pteProjectDescription)
self.cbUseHeritage = QtGui.QCheckBox(self.frContentCode)
self.cbUseHeritage.setObjectName(_fromUtf8("cbUseHeritage"))
self.vlProjectDetails.addWidget(self.cbUseHeritage)
self.lblProjOther = QtGui.QLabel(self.frContentCode)
self.lblProjOther.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignTop)
self.lblProjOther.setObjectName(_fromUtf8("lblProjOther"))
self.vlProjectDetails.addWidget(self.lblProjOther)
self.pteProjectOther = QtGui.QPlainTextEdit(self.frContentCode)
self.pteProjectOther.setEnabled(True)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.MinimumExpanding, QtGui.QSizePolicy.MinimumExpanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.pteProjectOther.sizePolicy().hasHeightForWidth())
self.pteProjectOther.setSizePolicy(sizePolicy)
self.pteProjectOther.setMinimumSize(QtCore.QSize(0, 100))
self.pteProjectOther.setMaximumSize(QtCore.QSize(16777215, 200))
self.pteProjectOther.setAutoFillBackground(False)
self.pteProjectOther.setReadOnly(True)
self.pteProjectOther.setBackgroundVisible(False)
self.pteProjectOther.setObjectName(_fromUtf8("pteProjectOther"))
self.vlProjectDetails.addWidget(self.pteProjectOther)
self.lblCustomFields = QtGui.QLabel(self.frContentCode)
self.lblCustomFields.setObjectName(_fromUtf8("lblCustomFields"))
self.vlProjectDetails.addWidget(self.lblCustomFields)
self.pteCustomFields = QtGui.QPlainTextEdit(self.frContentCode)
self.pteCustomFields.setEnabled(True)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.MinimumExpanding, QtGui.QSizePolicy.MinimumExpanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.pteCustomFields.sizePolicy().hasHeightForWidth())
self.pteCustomFields.setSizePolicy(sizePolicy)
self.pteCustomFields.setMinimumSize(QtCore.QSize(0, 50))
self.pteCustomFields.setMaximumSize(QtCore.QSize(16777215, 100))
self.pteCustomFields.setAutoFillBackground(False)
self.pteCustomFields.setFrameShape(QtGui.QFrame.StyledPanel)
self.pteCustomFields.setReadOnly(True)
self.pteCustomFields.setBackgroundVisible(False)
self.pteCustomFields.setObjectName(_fromUtf8("pteCustomFields"))
self.vlProjectDetails.addWidget(self.pteCustomFields)
self.horizontalLayout_4 = QtGui.QHBoxLayout()
self.horizontalLayout_4.setObjectName(_fromUtf8("horizontalLayout_4"))
self.lblDateAndTime = QtGui.QLabel(self.frContentCode)
self.lblDateAndTime.setObjectName(_fromUtf8("lblDateAndTime"))
self.horizontalLayout_4.addWidget(self.lblDateAndTime)
self.cbUsePeriod = QtGui.QCheckBox(self.frContentCode)
self.cbUsePeriod.setObjectName(_fromUtf8("cbUsePeriod"))
self.horizontalLayout_4.addWidget(self.cbUsePeriod)
self.vlProjectDetails.addLayout(self.horizontalLayout_4)
self.pteDateAndTime = QtGui.QPlainTextEdit(self.frContentCode)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.MinimumExpanding, QtGui.QSizePolicy.MinimumExpanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.pteDateAndTime.sizePolicy().hasHeightForWidth())
self.pteDateAndTime.setSizePolicy(sizePolicy)
self.pteDateAndTime.setMinimumSize(QtCore.QSize(0, 75))
self.pteDateAndTime.setMaximumSize(QtCore.QSize(16777215, 100))
self.pteDateAndTime.setObjectName(_fromUtf8("pteDateAndTime"))
self.vlProjectDetails.addWidget(self.pteDateAndTime)
self.gridLayout_2.addLayout(self.vlProjectDetails, 0, 0, 1, 1)
self.vlContentCoding = QtGui.QVBoxLayout()
self.vlContentCoding.setSpacing(2)
self.vlContentCoding.setObjectName(_fromUtf8("vlContentCoding"))
self.horizontalLayout_5 = QtGui.QHBoxLayout()
self.horizontalLayout_5.setObjectName(_fromUtf8("horizontalLayout_5"))
self.lblTimeOfYear = QtGui.QLabel(self.frContentCode)
self.lblTimeOfYear.setObjectName(_fromUtf8("lblTimeOfYear"))
self.horizontalLayout_5.addWidget(self.lblTimeOfYear)
self.cbTimeOfYear = QtGui.QCheckBox(self.frContentCode)
self.cbTimeOfYear.setLayoutDirection(QtCore.Qt.LeftToRight)
self.cbTimeOfYear.setObjectName(_fromUtf8("cbTimeOfYear"))
self.horizontalLayout_5.addWidget(self.cbTimeOfYear)
self.vlContentCoding.addLayout(self.horizontalLayout_5)
self.pteTimeOfYear = QtGui.QPlainTextEdit(self.frContentCode)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.MinimumExpanding, QtGui.QSizePolicy.MinimumExpanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.pteTimeOfYear.sizePolicy().hasHeightForWidth())
self.pteTimeOfYear.setSizePolicy(sizePolicy)
self.pteTimeOfYear.setMinimumSize(QtCore.QSize(0, 75))
self.pteTimeOfYear.setMaximumSize(QtCore.QSize(16777215, 100))
self.pteTimeOfYear.setObjectName(_fromUtf8("pteTimeOfYear"))
self.vlContentCoding.addWidget(self.pteTimeOfYear)
self.horizontalLayout_3 = QtGui.QHBoxLayout()
self.horizontalLayout_3.setObjectName(_fromUtf8("horizontalLayout_3"))
self.lblContentCodes = QtGui.QLabel(self.frContentCode)
self.lblContentCodes.setObjectName(_fromUtf8("lblContentCodes"))
self.horizontalLayout_3.addWidget(self.lblContentCodes)
self.tbSortCodes = QtGui.QToolButton(self.frContentCode)
self.tbSortCodes.setObjectName(_fromUtf8("tbSortCodes"))
self.horizontalLayout_3.addWidget(self.tbSortCodes)
self.vlContentCoding.addLayout(self.horizontalLayout_3)
self.pteContentCodes = QtGui.QPlainTextEdit(self.frContentCode)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.MinimumExpanding, QtGui.QSizePolicy.MinimumExpanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.pteContentCodes.sizePolicy().hasHeightForWidth())
self.pteContentCodes.setSizePolicy(sizePolicy)
self.pteContentCodes.setMinimumSize(QtCore.QSize(0, 250))
self.pteContentCodes.setMaximumSize(QtCore.QSize(16777215, 500))
self.pteContentCodes.setObjectName(_fromUtf8("pteContentCodes"))
self.vlContentCoding.addWidget(self.pteContentCodes)
self.lblDefaultCodes = QtGui.QLabel(self.frContentCode)
self.lblDefaultCodes.setObjectName(_fromUtf8("lblDefaultCodes"))
self.vlContentCoding.addWidget(self.lblDefaultCodes)
self.glContentCodes = QtGui.QGridLayout()
self.glContentCodes.setObjectName(_fromUtf8("glContentCodes"))
self.lblDefaultCode = QtGui.QLabel(self.frContentCode)
self.lblDefaultCode.setObjectName(_fromUtf8("lblDefaultCode"))
self.glContentCodes.addWidget(self.lblDefaultCode, 0, 0, 1, 1)
self.lblDefaultPointCode = QtGui.QLabel(self.frContentCode)
self.lblDefaultPointCode.setObjectName(_fromUtf8("lblDefaultPointCode"))
self.glContentCodes.addWidget(self.lblDefaultPointCode, 1, 0, 1, 1)
self.cbDefaultCode = QtGui.QComboBox(self.frContentCode)
self.cbDefaultCode.setObjectName(_fromUtf8("cbDefaultCode"))
self.glContentCodes.addWidget(self.cbDefaultCode, 0, 1, 1, 1)
self.cbPointCode = QtGui.QComboBox(self.frContentCode)
self.cbPointCode.setObjectName(_fromUtf8("cbPointCode"))
self.glContentCodes.addWidget(self.cbPointCode, 1, 1, 1, 1)
self.lblDefaultLineCode = QtGui.QLabel(self.frContentCode)
self.lblDefaultLineCode.setObjectName(_fromUtf8("lblDefaultLineCode"))
self.glContentCodes.addWidget(self.lblDefaultLineCode, 0, 2, 1, 1)
self.cbLineCode = QtGui.QComboBox(self.frContentCode)
self.cbLineCode.setObjectName(_fromUtf8("cbLineCode"))
self.glContentCodes.addWidget(self.cbLineCode, 0, 3, 1, 1)
self.lblDefaultPolygonCode = QtGui.QLabel(self.frContentCode)
self.lblDefaultPolygonCode.setObjectName(_fromUtf8("lblDefaultPolygonCode"))
self.glContentCodes.addWidget(self.lblDefaultPolygonCode, 1, 2, 1, 1)
self.cbPolygonCode = QtGui.QComboBox(self.frContentCode)
self.cbPolygonCode.setObjectName(_fromUtf8("cbPolygonCode"))
self.glContentCodes.addWidget(self.cbPolygonCode, 1, 3, 1, 1)
self.vlContentCoding.addLayout(self.glContentCodes)
self.gridLayout_2.addLayout(self.vlContentCoding, 0, 1, 1, 1)
self.gridLayout_15.addWidget(self.frContentCode, 0, 0, 1, 1)
self.hlProjectButtons = QtGui.QHBoxLayout()
self.hlProjectButtons.setObjectName(_fromUtf8("hlProjectButtons"))
self.pbProjectDetailsSave = QtGui.QPushButton(self.tbProjectDetails)
self.pbProjectDetailsSave.setObjectName(_fromUtf8("pbProjectDetailsSave"))
self.hlProjectButtons.addWidget(self.pbProjectDetailsSave)
self.pbProjectDetailsCancel = QtGui.QPushButton(self.tbProjectDetails)
self.pbProjectDetailsCancel.setEnabled(True)
self.pbProjectDetailsCancel.setObjectName(_fromUtf8("pbProjectDetailsCancel"))
self.hlProjectButtons.addWidget(self.pbProjectDetailsCancel)
self.gridLayout_15.addLayout(self.hlProjectButtons, 1, 0, 1, 1)
self.twMapBioSettings.addTab(self.tbProjectDetails, _fromUtf8(""))
self.tbPeople = QtGui.QWidget()
self.tbPeople.setObjectName(_fromUtf8("tbPeople"))
self.gridLayout_13 = QtGui.QGridLayout(self.tbPeople)
self.gridLayout_13.setObjectName(_fromUtf8("gridLayout_13"))
self.verticalLayout_2 = QtGui.QVBoxLayout()
self.verticalLayout_2.setObjectName(_fromUtf8("verticalLayout_2"))
self.lblParticipants = QtGui.QLabel(self.tbPeople)
self.lblParticipants.setMaximumSize(QtCore.QSize(300, 16777215))
self.lblParticipants.setObjectName(_fromUtf8("lblParticipants"))
self.verticalLayout_2.addWidget(self.lblParticipants)
self.tblParticipants = QtGui.QTableWidget(self.tbPeople)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.MinimumExpanding, QtGui.QSizePolicy.MinimumExpanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.tblParticipants.sizePolicy().hasHeightForWidth())
self.tblParticipants.setSizePolicy(sizePolicy)
self.tblParticipants.setMinimumSize(QtCore.QSize(0, 100))
self.tblParticipants.setMaximumSize(QtCore.QSize(300, 16777215))
self.tblParticipants.setEditTriggers(QtGui.QAbstractItemView.NoEditTriggers)
self.tblParticipants.setTabKeyNavigation(False)
self.tblParticipants.setProperty("showDropIndicator", False)
self.tblParticipants.setDragDropOverwriteMode(False)
self.tblParticipants.setSelectionMode(QtGui.QAbstractItemView.SingleSelection)
self.tblParticipants.setSelectionBehavior(QtGui.QAbstractItemView.SelectRows)
self.tblParticipants.setObjectName(_fromUtf8("tblParticipants"))
self.tblParticipants.setColumnCount(0)
self.tblParticipants.setRowCount(0)
self.verticalLayout_2.addWidget(self.tblParticipants)
self.frPeopleControls = QtGui.QFrame(self.tbPeople)
self.frPeopleControls.setMaximumSize(QtCore.QSize(300, 16777215))
self.frPeopleControls.setObjectName(_fromUtf8("frPeopleControls"))
self.hzParticipantList = QtGui.QHBoxLayout(self.frPeopleControls)
self.hzParticipantList.setObjectName(_fromUtf8("hzParticipantList"))
self.pbParticipantNew = QtGui.QPushButton(self.frPeopleControls)
self.pbParticipantNew.setObjectName(_fromUtf8("pbParticipantNew"))
self.hzParticipantList.addWidget(self.pbParticipantNew)
self.pbParticipantSave = QtGui.QPushButton(self.frPeopleControls)
self.pbParticipantSave.setEnabled(False)
self.pbParticipantSave.setObjectName(_fromUtf8("pbParticipantSave"))
self.hzParticipantList.addWidget(self.pbParticipantSave)
self.pbParticipantCancel = QtGui.QPushButton(self.frPeopleControls)
self.pbParticipantCancel.setEnabled(False)
self.pbParticipantCancel.setObjectName(_fromUtf8("pbParticipantCancel"))
self.hzParticipantList.addWidget(self.pbParticipantCancel)
self.pbParticipantDelete = QtGui.QPushButton(self.frPeopleControls)
self.pbParticipantDelete.setEnabled(False)
self.pbParticipantDelete.setObjectName(_fromUtf8("pbParticipantDelete"))
self.hzParticipantList.addWidget(self.pbParticipantDelete)
self.verticalLayout_2.addWidget(self.frPeopleControls)
self.gridLayout_13.addLayout(self.verticalLayout_2, 0, 0, 1, 1)
self.tbxParticipants = QtGui.QToolBox(self.tbPeople)
self.tbxParticipants.setEnabled(True)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.MinimumExpanding, QtGui.QSizePolicy.MinimumExpanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.tbxParticipants.sizePolicy().hasHeightForWidth())
self.tbxParticipants.setSizePolicy(sizePolicy)
self.tbxParticipants.setMinimumSize(QtCore.QSize(440, 365))
self.tbxParticipants.setObjectName(_fromUtf8("tbxParticipants"))
self.pgBasicInfo = QtGui.QWidget()
self.pgBasicInfo.setGeometry(QtCore.QRect(0, 0, 400, 404))
self.pgBasicInfo.setObjectName(_fromUtf8("pgBasicInfo"))
self.gridLayout_12 = QtGui.QGridLayout(self.pgBasicInfo)
self.gridLayout_12.setMargin(0)
self.gridLayout_12.setObjectName(_fromUtf8("gridLayout_12"))
self.glParticipants = QtGui.QGridLayout()
self.glParticipants.setObjectName(_fromUtf8("glParticipants"))
self.leParticipantCode = QtGui.QLineEdit(self.pgBasicInfo)
self.leParticipantCode.setObjectName(_fromUtf8("leParticipantCode"))
self.glParticipants.addWidget(self.leParticipantCode, 0, 1, 1, 1)
self.leEmail = QtGui.QLineEdit(self.pgBasicInfo)
self.leEmail.setObjectName(_fromUtf8("leEmail"))
self.glParticipants.addWidget(self.leEmail, 3, 1, 1, 1)
self.lblEmail = QtGui.QLabel(self.pgBasicInfo)
self.lblEmail.setObjectName(_fromUtf8("lblEmail"))
self.glParticipants.addWidget(self.lblEmail, 3, 0, 1, 1)
self.lblFamilyGroup = QtGui.QLabel(self.pgBasicInfo)
self.lblFamilyGroup.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.lblFamilyGroup.setObjectName(_fromUtf8("lblFamilyGroup"))
self.glParticipants.addWidget(self.lblFamilyGroup, 5, 0, 1, 1)
self.lblFirstName = QtGui.QLabel(self.pgBasicInfo)
self.lblFirstName.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignTop)
self.lblFirstName.setObjectName(_fromUtf8("lblFirstName"))
self.glParticipants.addWidget(self.lblFirstName, 1, 0, 1, 1)
self.lblLastName = QtGui.QLabel(self.pgBasicInfo)
self.lblLastName.setObjectName(_fromUtf8("lblLastName"))
self.glParticipants.addWidget(self.lblLastName, 2, 0, 1, 1)
self.lblParticipantCode = QtGui.QLabel(self.pgBasicInfo)
self.lblParticipantCode.setObjectName(_fromUtf8("lblParticipantCode"))
self.glParticipants.addWidget(self.lblParticipantCode, 0, 0, 1, 1)
self.lblCommunityName = QtGui.QLabel(self.pgBasicInfo)
self.lblCommunityName.setObjectName(_fromUtf8("lblCommunityName"))
self.glParticipants.addWidget(self.lblCommunityName, 4, 0, 1, 1)
self.leFamily = QtGui.QLineEdit(self.pgBasicInfo)
self.leFamily.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.leFamily.setObjectName(_fromUtf8("leFamily"))
self.glParticipants.addWidget(self.leFamily, 5, 1, 1, 1)
self.leMaidenName = QtGui.QLineEdit(self.pgBasicInfo)
self.leMaidenName.setObjectName(_fromUtf8("leMaidenName"))
self.glParticipants.addWidget(self.leMaidenName, 6, 1, 1, 1)
self.lblMaidenName = QtGui.QLabel(self.pgBasicInfo)
self.lblMaidenName.setObjectName(_fromUtf8("lblMaidenName"))
self.glParticipants.addWidget(self.lblMaidenName, 6, 0, 1, 1)
self.leLastName = QtGui.QLineEdit(self.pgBasicInfo)
self.leLastName.setObjectName(_fromUtf8("leLastName"))
self.glParticipants.addWidget(self.leLastName, 2, 1, 1, 1)
self.leFirstName = QtGui.QLineEdit(self.pgBasicInfo)
self.leFirstName.setObjectName(_fromUtf8("leFirstName"))
self.glParticipants.addWidget(self.leFirstName, 1, 1, 1, 1)
self.leCommunity = QtGui.QLineEdit(self.pgBasicInfo)
self.leCommunity.setObjectName(_fromUtf8("leCommunity"))
self.glParticipants.addWidget(self.leCommunity, 4, 1, 1, 1)
self.gridLayout_12.addLayout(self.glParticipants, 0, 0, 1, 1)
self.horizontalLayout = QtGui.QHBoxLayout()
self.horizontalLayout.setObjectName(_fromUtf8("horizontalLayout"))
self.lblGender = QtGui.QLabel(self.pgBasicInfo)
self.lblGender.setMinimumSize(QtCore.QSize(107, 0))
self.lblGender.setObjectName(_fromUtf8("lblGender"))
self.horizontalLayout.addWidget(self.lblGender)
self.cbGender = QtGui.QComboBox(self.pgBasicInfo)
self.cbGender.setObjectName(_fromUtf8("cbGender"))
self.cbGender.addItem(_fromUtf8(""))
self.cbGender.addItem(_fromUtf8(""))
self.cbGender.addItem(_fromUtf8(""))
self.cbGender.addItem(_fromUtf8(""))
self.cbGender.addItem(_fromUtf8(""))
self.horizontalLayout.addWidget(self.cbGender)
self.lblMaritalStatus = QtGui.QLabel(self.pgBasicInfo)
self.lblMaritalStatus.setObjectName(_fromUtf8("lblMaritalStatus"))
self.horizontalLayout.addWidget(self.lblMaritalStatus)
self.cbMaritalStatus = QtGui.QComboBox(self.pgBasicInfo)
self.cbMaritalStatus.setObjectName(_fromUtf8("cbMaritalStatus"))
self.cbMaritalStatus.addItem(_fromUtf8(""))
self.cbMaritalStatus.addItem(_fromUtf8(""))
self.cbMaritalStatus.addItem(_fromUtf8(""))
self.cbMaritalStatus.addItem(_fromUtf8(""))
self.cbMaritalStatus.addItem(_fromUtf8(""))
self.cbMaritalStatus.addItem(_fromUtf8(""))
self.horizontalLayout.addWidget(self.cbMaritalStatus)
self.gridLayout_12.addLayout(self.horizontalLayout, 1, 0, 1, 1)
self.gridLayout_11 = QtGui.QGridLayout()
self.gridLayout_11.setObjectName(_fromUtf8("gridLayout_11"))
self.lblParticipantNote = QtGui.QLabel(self.pgBasicInfo)
self.lblParticipantNote.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignTop)
self.lblParticipantNote.setObjectName(_fromUtf8("lblParticipantNote"))
self.gridLayout_11.addWidget(self.lblParticipantNote, 2, 0, 1, 1)
self.lblParticipantTags = QtGui.QLabel(self.pgBasicInfo)
self.lblParticipantTags.setObjectName(_fromUtf8("lblParticipantTags"))
self.gridLayout_11.addWidget(self.lblParticipantTags, 1, 0, 1, 1)
self.lblBirthDate = QtGui.QLabel(self.pgBasicInfo)
self.lblBirthDate.setMinimumSize(QtCore.QSize(107, 0))
self.lblBirthDate.setObjectName(_fromUtf8("lblBirthDate"))
self.gridLayout_11.addWidget(self.lblBirthDate, 0, 0, 1, 1)
self.pteParticipantNote = QtGui.QPlainTextEdit(self.pgBasicInfo)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.MinimumExpanding, QtGui.QSizePolicy.MinimumExpanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.pteParticipantNote.sizePolicy().hasHeightForWidth())
self.pteParticipantNote.setSizePolicy(sizePolicy)
self.pteParticipantNote.setMinimumSize(QtCore.QSize(0, 50))
self.pteParticipantNote.setMaximumSize(QtCore.QSize(16777215, 16777215))
self.pteParticipantNote.setObjectName(_fromUtf8("pteParticipantNote"))
self.gridLayout_11.addWidget(self.pteParticipantNote, 2, 1, 1, 1)
self.leParticipantTags = QtGui.QLineEdit(self.pgBasicInfo)
self.leParticipantTags.setObjectName(_fromUtf8("leParticipantTags"))
self.gridLayout_11.addWidget(self.leParticipantTags, 1, 1, 1, 1)
self.leBirthDate = QtGui.QLineEdit(self.pgBasicInfo)
self.leBirthDate.setObjectName(_fromUtf8("leBirthDate"))
self.gridLayout_11.addWidget(self.leBirthDate, 0, 1, 1, 1)
self.gridLayout_12.addLayout(self.gridLayout_11, 2, 0, 1, 1)
self.tbxParticipants.addItem(self.pgBasicInfo, _fromUtf8(""))
self.pgAddresses = QtGui.QWidget()
self.pgAddresses.setGeometry(QtCore.QRect(0, 0, 226, 170))
self.pgAddresses.setObjectName(_fromUtf8("pgAddresses"))
self.gridLayout_9 = QtGui.QGridLayout(self.pgAddresses)
self.gridLayout_9.setMargin(0)
self.gridLayout_9.setObjectName(_fromUtf8("gridLayout_9"))
self.vlAddressList = QtGui.QVBoxLayout()
self.vlAddressList.setObjectName(_fromUtf8("vlAddressList"))
self.tblAddresses = QtGui.QTableWidget(self.pgAddresses)
self.tblAddresses.setEditTriggers(QtGui.QAbstractItemView.NoEditTriggers)
self.tblAddresses.setTabKeyNavigation(False)
self.tblAddresses.setProperty("showDropIndicator", False)
self.tblAddresses.setDragDropOverwriteMode(False)
self.tblAddresses.setSelectionMode(QtGui.QAbstractItemView.SingleSelection)
self.tblAddresses.setSelectionBehavior(QtGui.QAbstractItemView.SelectRows)
self.tblAddresses.setObjectName(_fromUtf8("tblAddresses"))
self.tblAddresses.setColumnCount(0)
self.tblAddresses.setRowCount(0)
self.vlAddressList.addWidget(self.tblAddresses)
self.hlAddressList = QtGui.QHBoxLayout()
self.hlAddressList.setObjectName(_fromUtf8("hlAddressList"))
self.pbAddNew = QtGui.QPushButton(self.pgAddresses)
self.pbAddNew.setObjectName(_fromUtf8("pbAddNew"))
self.hlAddressList.addWidget(self.pbAddNew)
self.pbAddSave = QtGui.QPushButton(self.pgAddresses)
self.pbAddSave.setEnabled(False)
self.pbAddSave.setObjectName(_fromUtf8("pbAddSave"))
self.hlAddressList.addWidget(self.pbAddSave)
self.pbAddCancel = QtGui.QPushButton(self.pgAddresses)
self.pbAddCancel.setEnabled(False)
self.pbAddCancel.setObjectName(_fromUtf8("pbAddCancel"))
self.hlAddressList.addWidget(self.pbAddCancel)
self.pbAddDelete = QtGui.QPushButton(self.pgAddresses)
self.pbAddDelete.setEnabled(False)
self.pbAddDelete.setObjectName(_fromUtf8("pbAddDelete"))
self.hlAddressList.addWidget(self.pbAddDelete)
self.vlAddressList.addLayout(self.hlAddressList)
self.gridLayout_9.addLayout(self.vlAddressList, 0, 0, 1, 1)
self.grAddresses = QtGui.QGridLayout()
self.grAddresses.setObjectName(_fromUtf8("grAddresses"))
self.lblAddress = QtGui.QLabel(self.pgAddresses)
self.lblAddress.setObjectName(_fromUtf8("lblAddress"))
self.grAddresses.addWidget(self.lblAddress, 1, 0, 1, 1)
self.lblAddType = QtGui.QLabel(self.pgAddresses)
self.lblAddType.setObjectName(_fromUtf8("lblAddType"))
self.grAddresses.addWidget(self.lblAddType, 0, 0, 1, 1)
self.pteAddress = QtGui.QPlainTextEdit(self.pgAddresses)
self.pteAddress.setMaximumSize(QtCore.QSize(16777215, 200))
self.pteAddress.setObjectName(_fromUtf8("pteAddress"))
self.grAddresses.addWidget(self.pteAddress, 1, 1, 1, 1)
self.cbAddType = QtGui.QComboBox(self.pgAddresses)
self.cbAddType.setObjectName(_fromUtf8("cbAddType"))
self.cbAddType.addItem(_fromUtf8(""))
self.cbAddType.addItem(_fromUtf8(""))
self.cbAddType.addItem(_fromUtf8(""))
self.cbAddType.addItem(_fromUtf8(""))
self.grAddresses.addWidget(self.cbAddType, 0, 1, 1, 1)
self.gridLayout_9.addLayout(self.grAddresses, 1, 0, 1, 1)
self.tbxParticipants.addItem(self.pgAddresses, _fromUtf8(""))
self.pgTelecoms = QtGui.QWidget()
self.pgTelecoms.setGeometry(QtCore.QRect(0, 0, 226, 153))
self.pgTelecoms.setObjectName(_fromUtf8("pgTelecoms"))
self.gridLayout_10 = QtGui.QGridLayout(self.pgTelecoms)
self.gridLayout_10.setMargin(0)
self.gridLayout_10.setObjectName(_fromUtf8("gridLayout_10"))
self.vlTelecomList = QtGui.QVBoxLayout()
self.vlTelecomList.setObjectName(_fromUtf8("vlTelecomList"))
self.tblTelecoms = QtGui.QTableWidget(self.pgTelecoms)
self.tblTelecoms.setEditTriggers(QtGui.QAbstractItemView.NoEditTriggers)
self.tblTelecoms.setTabKeyNavigation(False)
self.tblTelecoms.setProperty("showDropIndicator", False)
self.tblTelecoms.setDragDropOverwriteMode(False)
self.tblTelecoms.setSelectionMode(QtGui.QAbstractItemView.SingleSelection)
self.tblTelecoms.setSelectionBehavior(QtGui.QAbstractItemView.SelectRows)
self.tblTelecoms.setObjectName(_fromUtf8("tblTelecoms"))
self.tblTelecoms.setColumnCount(0)
self.tblTelecoms.setRowCount(0)
self.vlTelecomList.addWidget(self.tblTelecoms)
self.hlTelecomList = QtGui.QHBoxLayout()
self.hlTelecomList.setObjectName(_fromUtf8("hlTelecomList"))
self.pbTelNew = QtGui.QPushButton(self.pgTelecoms)
self.pbTelNew.setObjectName(_fromUtf8("pbTelNew"))
self.hlTelecomList.addWidget(self.pbTelNew)
self.pbTelSave = QtGui.QPushButton(self.pgTelecoms)
self.pbTelSave.setEnabled(False)
self.pbTelSave.setObjectName(_fromUtf8("pbTelSave"))
self.hlTelecomList.addWidget(self.pbTelSave)
self.pbTelCancel = QtGui.QPushButton(self.pgTelecoms)
self.pbTelCancel.setEnabled(False)
self.pbTelCancel.setObjectName(_fromUtf8("pbTelCancel"))
self.hlTelecomList.addWidget(self.pbTelCancel)
self.pbTelDelete = QtGui.QPushButton(self.pgTelecoms)
self.pbTelDelete.setEnabled(False)
self.pbTelDelete.setObjectName(_fromUtf8("pbTelDelete"))
self.hlTelecomList.addWidget(self.pbTelDelete)
self.vlTelecomList.addLayout(self.hlTelecomList)
self.gridLayout_10.addLayout(self.vlTelecomList, 0, 0, 1, 1)
self.glTelecoms = QtGui.QGridLayout()
self.glTelecoms.setObjectName(_fromUtf8("glTelecoms"))
self.lblTelType = QtGui.QLabel(self.pgTelecoms)
self.lblTelType.setObjectName(_fromUtf8("lblTelType"))
self.glTelecoms.addWidget(self.lblTelType, 0, 0, 1, 1)
self.lblTelecom = QtGui.QLabel(self.pgTelecoms)
self.lblTelecom.setObjectName(_fromUtf8("lblTelecom"))
self.glTelecoms.addWidget(self.lblTelecom, 1, 0, 1, 1)
self.leTelNumber = QtGui.QLineEdit(self.pgTelecoms)
self.leTelNumber.setObjectName(_fromUtf8("leTelNumber"))
self.glTelecoms.addWidget(self.leTelNumber, 1, 1, 1, 1)
self.cbTelType = QtGui.QComboBox(self.pgTelecoms)
self.cbTelType.setObjectName(_fromUtf8("cbTelType"))
self.cbTelType.addItem(_fromUtf8(""))
self.cbTelType.addItem(_fromUtf8(""))
self.cbTelType.addItem(_fromUtf8(""))
self.cbTelType.addItem(_fromUtf8(""))
self.cbTelType.addItem(_fromUtf8(""))
self.cbTelType.addItem(_fromUtf8(""))
self.glTelecoms.addWidget(self.cbTelType, 0, 1, 1, 1)
self.gridLayout_10.addLayout(self.glTelecoms, 1, 0, 1, 1)
self.tbxParticipants.addItem(self.pgTelecoms, _fromUtf8(""))
self.gridLayout_13.addWidget(self.tbxParticipants, 0, 1, 1, 1)
self.twMapBioSettings.addTab(self.tbPeople, _fromUtf8(""))
self.tbInterviews = QtGui.QWidget()
self.tbInterviews.setObjectName(_fromUtf8("tbInterviews"))
self.gridLayout_6 = QtGui.QGridLayout(self.tbInterviews)
self.gridLayout_6.setObjectName(_fromUtf8("gridLayout_6"))
self.verticalLayout = QtGui.QVBoxLayout()
self.verticalLayout.setSpacing(0)
self.verticalLayout.setObjectName(_fromUtf8("verticalLayout"))
self.lblInterviews = QtGui.QLabel(self.tbInterviews)
self.lblInterviews.setMaximumSize(QtCore.QSize(300, 16777215))
self.lblInterviews.setObjectName(_fromUtf8("lblInterviews"))
self.verticalLayout.addWidget(self.lblInterviews)
self.tblInterviews = QtGui.QTableWidget(self.tbInterviews)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.MinimumExpanding, QtGui.QSizePolicy.MinimumExpanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.tblInterviews.sizePolicy().hasHeightForWidth())
self.tblInterviews.setSizePolicy(sizePolicy)
self.tblInterviews.setMinimumSize(QtCore.QSize(0, 100))
self.tblInterviews.setMaximumSize(QtCore.QSize(300, 16777215))
self.tblInterviews.setEditTriggers(QtGui.QAbstractItemView.NoEditTriggers)
self.tblInterviews.setSelectionMode(QtGui.QAbstractItemView.SingleSelection)
self.tblInterviews.setSelectionBehavior(QtGui.QAbstractItemView.SelectRows)
self.tblInterviews.setObjectName(_fromUtf8("tblInterviews"))
self.tblInterviews.setColumnCount(0)
self.tblInterviews.setRowCount(0)
self.verticalLayout.addWidget(self.tblInterviews)
self.frInterviewControls = QtGui.QFrame(self.tbInterviews)
self.frInterviewControls.setMaximumSize(QtCore.QSize(300, 16777215))
self.frInterviewControls.setObjectName(_fromUtf8("frInterviewControls"))
self.hlInterviewList = QtGui.QHBoxLayout(self.frInterviewControls)
self.hlInterviewList.setObjectName(_fromUtf8("hlInterviewList"))
self.pbIntNew = QtGui.QPushButton(self.frInterviewControls)
self.pbIntNew.setObjectName(_fromUtf8("pbIntNew"))
self.hlInterviewList.addWidget(self.pbIntNew)
self.pbIntCopy = QtGui.QPushButton(self.frInterviewControls)
self.pbIntCopy.setObjectName(_fromUtf8("pbIntCopy"))
self.hlInterviewList.addWidget(self.pbIntCopy)
self.pbIntSave = QtGui.QPushButton(self.frInterviewControls)
self.pbIntSave.setEnabled(False)
self.pbIntSave.setObjectName(_fromUtf8("pbIntSave"))
self.hlInterviewList.addWidget(self.pbIntSave)
self.pbIntCancel = QtGui.QPushButton(self.frInterviewControls)
self.pbIntCancel.setEnabled(False)
self.pbIntCancel.setObjectName(_fromUtf8("pbIntCancel"))
self.hlInterviewList.addWidget(self.pbIntCancel)
self.pbIntDelete = QtGui.QPushButton(self.frInterviewControls)
self.pbIntDelete.setEnabled(False)
self.pbIntDelete.setObjectName(_fromUtf8("pbIntDelete"))
self.hlInterviewList.addWidget(self.pbIntDelete)
self.verticalLayout.addWidget(self.frInterviewControls)
self.gridLayout_6.addLayout(self.verticalLayout, 0, 0, 1, 1)
self.tbxInterview = QtGui.QToolBox(self.tbInterviews)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.MinimumExpanding, QtGui.QSizePolicy.MinimumExpanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.tbxInterview.sizePolicy().hasHeightForWidth())
self.tbxInterview.setSizePolicy(sizePolicy)
self.tbxInterview.setMinimumSize(QtCore.QSize(440, 315))
self.tbxInterview.setObjectName(_fromUtf8("tbxInterview"))
self.pgIntBasic = QtGui.QWidget()
self.pgIntBasic.setGeometry(QtCore.QRect(0, 0, 236, 418))
self.pgIntBasic.setObjectName(_fromUtf8("pgIntBasic"))
self.gridLayout_8 = QtGui.QGridLayout(self.pgIntBasic)
self.gridLayout_8.setMargin(0)
self.gridLayout_8.setObjectName(_fromUtf8("gridLayout_8"))
self.glInterview = QtGui.QGridLayout()
self.glInterview.setHorizontalSpacing(6)
self.glInterview.setVerticalSpacing(2)
self.glInterview.setObjectName(_fromUtf8("glInterview"))
self.lblStartDateTime = QtGui.QLabel(self.pgIntBasic)
self.lblStartDateTime.setMaximumSize(QtCore.QSize(16777215, 33))
self.lblStartDateTime.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.lblStartDateTime.setObjectName(_fromUtf8("lblStartDateTime"))
self.glInterview.addWidget(self.lblStartDateTime, 8, 0, 1, 1)
self.horizontalLayout_6 = QtGui.QHBoxLayout()
self.horizontalLayout_6.setObjectName(_fromUtf8("horizontalLayout_6"))
self.dteStart = QtGui.QDateTimeEdit(self.pgIntBasic)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.MinimumExpanding, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.dteStart.sizePolicy().hasHeightForWidth())
self.dteStart.setSizePolicy(sizePolicy)
self.dteStart.setMinimumSize(QtCore.QSize(50, 0))
self.dteStart.setObjectName(_fromUtf8("dteStart"))
self.horizontalLayout_6.addWidget(self.dteStart)
self.tbUDStart = QtGui.QToolButton(self.pgIntBasic)
self.tbUDStart.setObjectName(_fromUtf8("tbUDStart"))
self.horizontalLayout_6.addWidget(self.tbUDStart)
self.glInterview.addLayout(self.horizontalLayout_6, 8, 1, 1, 1)
self.cbInterviewSecurity = QtGui.QComboBox(self.pgIntBasic)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.MinimumExpanding, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.cbInterviewSecurity.sizePolicy().hasHeightForWidth())
self.cbInterviewSecurity.setSizePolicy(sizePolicy)
self.cbInterviewSecurity.setMinimumSize(QtCore.QSize(50, 0))
self.cbInterviewSecurity.setObjectName(_fromUtf8("cbInterviewSecurity"))
self.cbInterviewSecurity.addItem(_fromUtf8(""))
self.cbInterviewSecurity.addItem(_fromUtf8(""))
self.cbInterviewSecurity.addItem(_fromUtf8(""))
self.glInterview.addWidget(self.cbInterviewSecurity, 6, 1, 1, 1)
self.lblCode = QtGui.QLabel(self.pgIntBasic)
self.lblCode.setObjectName(_fromUtf8("lblCode"))
self.glInterview.addWidget(self.lblCode, 0, 0, 1, 1)
self.leInterviewCode = QtGui.QLineEdit(self.pgIntBasic)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.MinimumExpanding, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.leInterviewCode.sizePolicy().hasHeightForWidth())
self.leInterviewCode.setSizePolicy(sizePolicy)
self.leInterviewCode.setMinimumSize(QtCore.QSize(50, 0))
self.leInterviewCode.setMaximumSize(QtCore.QSize(16777215, 16777215))
self.leInterviewCode.setObjectName(_fromUtf8("leInterviewCode"))
self.glInterview.addWidget(self.leInterviewCode, 0, 1, 1, 1)
self.lblTitle = QtGui.QLabel(self.pgIntBasic)
self.lblTitle.setObjectName(_fromUtf8("lblTitle"))
self.glInterview.addWidget(self.lblTitle, 1, 0, 1, 1)
self.leInterviewTitle = QtGui.QLineEdit(self.pgIntBasic)
self.leInterviewTitle.setObjectName(_fromUtf8("leInterviewTitle"))
self.glInterview.addWidget(self.leInterviewTitle, 1, 1, 1, 1)
self.lblLocation = QtGui.QLabel(self.pgIntBasic)
self.lblLocation.setObjectName(_fromUtf8("lblLocation"))
self.glInterview.addWidget(self.lblLocation, 3, 0, 1, 1)
self.lblSecurity = QtGui.QLabel(self.pgIntBasic)
self.lblSecurity.setObjectName(_fromUtf8("lblSecurity"))
self.glInterview.addWidget(self.lblSecurity, 6, 0, 1, 1)
self.pteInterviewNote = QtGui.QPlainTextEdit(self.pgIntBasic)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.MinimumExpanding, QtGui.QSizePolicy.MinimumExpanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.pteInterviewNote.sizePolicy().hasHeightForWidth())
self.pteInterviewNote.setSizePolicy(sizePolicy)
self.pteInterviewNote.setMinimumSize(QtCore.QSize(50, 0))
self.pteInterviewNote.setMaximumSize(QtCore.QSize(16777215, 80))
self.pteInterviewNote.setObjectName(_fromUtf8("pteInterviewNote"))
self.glInterview.addWidget(self.pteInterviewNote, 4, 1, 1, 1)
self.lblNote = QtGui.QLabel(self.pgIntBasic)
self.lblNote.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignTop)
self.lblNote.setObjectName(_fromUtf8("lblNote"))
self.glInterview.addWidget(self.lblNote, 4, 0, 1, 1)
self.leInterviewTags = QtGui.QLineEdit(self.pgIntBasic)
self.leInterviewTags.setObjectName(_fromUtf8("leInterviewTags"))
self.glInterview.addWidget(self.leInterviewTags, 5, 1, 1, 1)
self.leInterviewLocation = QtGui.QLineEdit(self.pgIntBasic)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.MinimumExpanding, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.leInterviewLocation.sizePolicy().hasHeightForWidth())
self.leInterviewLocation.setSizePolicy(sizePolicy)
self.leInterviewLocation.setMinimumSize(QtCore.QSize(50, 0))
self.leInterviewLocation.setObjectName(_fromUtf8("leInterviewLocation"))
self.glInterview.addWidget(self.leInterviewLocation, 3, 1, 1, 1)
self.lblTags = QtGui.QLabel(self.pgIntBasic)
self.lblTags.setObjectName(_fromUtf8("lblTags"))
self.glInterview.addWidget(self.lblTags, 5, 0, 1, 1)
self.lblDescription = QtGui.QLabel(self.pgIntBasic)
self.lblDescription.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignTop)
self.lblDescription.setObjectName(_fromUtf8("lblDescription"))
self.glInterview.addWidget(self.lblDescription, 2, 0, 1, 1)
self.pteInterviewDescription = QtGui.QPlainTextEdit(self.pgIntBasic)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.MinimumExpanding, QtGui.QSizePolicy.MinimumExpanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.pteInterviewDescription.sizePolicy().hasHeightForWidth())
self.pteInterviewDescription.setSizePolicy(sizePolicy)
self.pteInterviewDescription.setMinimumSize(QtCore.QSize(50, 0))
self.pteInterviewDescription.setMaximumSize(QtCore.QSize(16777215, 80))
self.pteInterviewDescription.setObjectName(_fromUtf8("pteInterviewDescription"))
self.glInterview.addWidget(self.pteInterviewDescription, 2, 1, 1, 1)
self.lbInterviewStatus = QtGui.QLabel(self.pgIntBasic)
self.lbInterviewStatus.setObjectName(_fromUtf8("lbInterviewStatus"))
self.glInterview.addWidget(self.lbInterviewStatus, 7, 0, 1, 1)
self.cbInterviewStatus = QtGui.QComboBox(self.pgIntBasic)
self.cbInterviewStatus.setObjectName(_fromUtf8("cbInterviewStatus"))
self.cbInterviewStatus.addItem(_fromUtf8(""))
self.cbInterviewStatus.addItem(_fromUtf8(""))
self.cbInterviewStatus.addItem(_fromUtf8(""))
self.cbInterviewStatus.addItem(_fromUtf8(""))
self.glInterview.addWidget(self.cbInterviewStatus, 7, 1, 1, 1)
self.lblEndDateTime = QtGui.QLabel(self.pgIntBasic)
self.lblEndDateTime.setMaximumSize(QtCore.QSize(16777215, 33))
self.lblEndDateTime.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.lblEndDateTime.setObjectName(_fromUtf8("lblEndDateTime"))
self.glInterview.addWidget(self.lblEndDateTime, 9, 0, 1, 1)
self.horizontalLayout_7 = QtGui.QHBoxLayout()
self.horizontalLayout_7.setObjectName(_fromUtf8("horizontalLayout_7"))
self.dteEnd = QtGui.QDateTimeEdit(self.pgIntBasic)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.MinimumExpanding, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.dteEnd.sizePolicy().hasHeightForWidth())
self.dteEnd.setSizePolicy(sizePolicy)
self.dteEnd.setMinimumSize(QtCore.QSize(50, 0))
self.dteEnd.setObjectName(_fromUtf8("dteEnd"))
self.horizontalLayout_7.addWidget(self.dteEnd)
self.tbUDEnd = QtGui.QToolButton(self.pgIntBasic)
self.tbUDEnd.setObjectName(_fromUtf8("tbUDEnd"))
self.horizontalLayout_7.addWidget(self.tbUDEnd)
self.glInterview.addLayout(self.horizontalLayout_7, 9, 1, 1, 1)
self.gridLayout_8.addLayout(self.glInterview, 0, 0, 1, 1)
self.tbxInterview.addItem(self.pgIntBasic, _fromUtf8(""))
self.pgIntExtra = QtGui.QWidget()
self.pgIntExtra.setGeometry(QtCore.QRect(0, 0, 184, 327))
self.pgIntExtra.setObjectName(_fromUtf8("pgIntExtra"))
self.gridLayout_5 = QtGui.QGridLayout(self.pgIntExtra)
self.gridLayout_5.setMargin(0)
self.gridLayout_5.setObjectName(_fromUtf8("gridLayout_5"))
self.gridLayout_7 = QtGui.QGridLayout()
self.gridLayout_7.setObjectName(_fromUtf8("gridLayout_7"))
self.lblCitation = QtGui.QLabel(self.pgIntExtra)
self.lblCitation.setObjectName(_fromUtf8("lblCitation"))
self.gridLayout_7.addWidget(self.lblCitation, 3, 0, 1, 1)
self.lblRightsStatement = QtGui.QLabel(self.pgIntExtra)
self.lblRightsStatement.setObjectName(_fromUtf8("lblRightsStatement"))
self.gridLayout_7.addWidget(self.lblRightsStatement, 4, 0, 1, 1)
self.lblSource = QtGui.QLabel(self.pgIntExtra)
self.lblSource.setObjectName(_fromUtf8("lblSource"))
self.gridLayout_7.addWidget(self.lblSource, 2, 0, 1, 1)
self.lblSubject = QtGui.QLabel(self.pgIntExtra)
self.lblSubject.setObjectName(_fromUtf8("lblSubject"))
self.gridLayout_7.addWidget(self.lblSubject, 0, 0, 1, 1)
self.leLanguage = QtGui.QLineEdit(self.pgIntExtra)
self.leLanguage.setObjectName(_fromUtf8("leLanguage"))
self.gridLayout_7.addWidget(self.leLanguage, 1, 1, 1, 1)
self.leSubject = QtGui.QLineEdit(self.pgIntExtra)
self.leSubject.setObjectName(_fromUtf8("leSubject"))
self.gridLayout_7.addWidget(self.leSubject, 0, 1, 1, 1)
self.lblLanguage = QtGui.QLabel(self.pgIntExtra)
self.lblLanguage.setObjectName(_fromUtf8("lblLanguage"))
self.gridLayout_7.addWidget(self.lblLanguage, 1, 0, 1, 1)
self.lblRightsHolder = QtGui.QLabel(self.pgIntExtra)
self.lblRightsHolder.setObjectName(_fromUtf8("lblRightsHolder"))
self.gridLayout_7.addWidget(self.lblRightsHolder, 5, 0, 1, 1)
self.leRightsHolder = QtGui.QLineEdit(self.pgIntExtra)
self.leRightsHolder.setObjectName(_fromUtf8("leRightsHolder"))
self.gridLayout_7.addWidget(self.leRightsHolder, 5, 1, 1, 1)
self.pteRightsStatement = QtGui.QPlainTextEdit(self.pgIntExtra)
self.pteRightsStatement.setObjectName(_fromUtf8("pteRightsStatement"))
self.gridLayout_7.addWidget(self.pteRightsStatement, 4, 1, 1, 1)
self.pteCitation = QtGui.QPlainTextEdit(self.pgIntExtra)
self.pteCitation.setObjectName(_fromUtf8("pteCitation"))
self.gridLayout_7.addWidget(self.pteCitation, 3, 1, 1, 1)
self.pteSource = QtGui.QPlainTextEdit(self.pgIntExtra)
self.pteSource.setObjectName(_fromUtf8("pteSource"))
self.gridLayout_7.addWidget(self.pteSource, 2, 1, 1, 1)
self.lblInterviewCreator = QtGui.QLabel(self.pgIntExtra)
self.lblInterviewCreator.setObjectName(_fromUtf8("lblInterviewCreator"))
self.gridLayout_7.addWidget(self.lblInterviewCreator, 6, 0, 1, 1)
self.leInterviewPublisher = QtGui.QLineEdit(self.pgIntExtra)
self.leInterviewPublisher.setObjectName(_fromUtf8("leInterviewPublisher"))
self.gridLayout_7.addWidget(self.leInterviewPublisher, 6, 1, 1, 1)
self.leInterviewCreator = QtGui.QLineEdit(self.pgIntExtra)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.MinimumExpanding, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.leInterviewCreator.sizePolicy().hasHeightForWidth())
self.leInterviewCreator.setSizePolicy(sizePolicy)
self.leInterviewCreator.setMinimumSize(QtCore.QSize(50, 0))
self.leInterviewCreator.setObjectName(_fromUtf8("leInterviewCreator"))
self.gridLayout_7.addWidget(self.leInterviewCreator, 7, 1, 1, 1)
self.lblInterviewPublisher = QtGui.QLabel(self.pgIntExtra)
self.lblInterviewPublisher.setObjectName(_fromUtf8("lblInterviewPublisher"))
self.gridLayout_7.addWidget(self.lblInterviewPublisher, 7, 0, 1, 1)
self.gridLayout_5.addLayout(self.gridLayout_7, 0, 0, 1, 1)
self.tbxInterview.addItem(self.pgIntExtra, _fromUtf8(""))
self.pgIntParticipants = QtGui.QWidget()
self.pgIntParticipants.setGeometry(QtCore.QRect(0, 0, 226, 209))
self.pgIntParticipants.setObjectName(_fromUtf8("pgIntParticipants"))
self.gridLayout_4 = QtGui.QGridLayout(self.pgIntParticipants)
self.gridLayout_4.setMargin(0)
self.gridLayout_4.setObjectName(_fromUtf8("gridLayout_4"))
self.vlInterviewParticipants = QtGui.QVBoxLayout()
self.vlInterviewParticipants.setObjectName(_fromUtf8("vlInterviewParticipants"))
self.lblInterviewParticipants = QtGui.QLabel(self.pgIntParticipants)
self.lblInterviewParticipants.setObjectName(_fromUtf8("lblInterviewParticipants"))
self.vlInterviewParticipants.addWidget(self.lblInterviewParticipants)
self.tblInterviewParticipants = QtGui.QTableWidget(self.pgIntParticipants)
self.tblInterviewParticipants.setEditTriggers(QtGui.QAbstractItemView.NoEditTriggers)
self.tblInterviewParticipants.setSelectionMode(QtGui.QAbstractItemView.SingleSelection)
self.tblInterviewParticipants.setSelectionBehavior(QtGui.QAbstractItemView.SelectRows)
self.tblInterviewParticipants.setObjectName(_fromUtf8("tblInterviewParticipants"))
self.tblInterviewParticipants.setColumnCount(0)
self.tblInterviewParticipants.setRowCount(0)
self.vlInterviewParticipants.addWidget(self.tblInterviewParticipants)
self.hlInterviewParticipants = QtGui.QHBoxLayout()
self.hlInterviewParticipants.setObjectName(_fromUtf8("hlInterviewParticipants"))
self.pbIntPartNew = QtGui.QPushButton(self.pgIntParticipants)
self.pbIntPartNew.setObjectName(_fromUtf8("pbIntPartNew"))
self.hlInterviewParticipants.addWidget(self.pbIntPartNew)
self.pbIntPartSave = QtGui.QPushButton(self.pgIntParticipants)
self.pbIntPartSave.setEnabled(False)
self.pbIntPartSave.setObjectName(_fromUtf8("pbIntPartSave"))
self.hlInterviewParticipants.addWidget(self.pbIntPartSave)
self.pbIntPartCancel = QtGui.QPushButton(self.pgIntParticipants)
self.pbIntPartCancel.setEnabled(False)
self.pbIntPartCancel.setObjectName(_fromUtf8("pbIntPartCancel"))
self.hlInterviewParticipants.addWidget(self.pbIntPartCancel)
self.pbIntPartDelete = QtGui.QPushButton(self.pgIntParticipants)
self.pbIntPartDelete.setEnabled(False)
self.pbIntPartDelete.setObjectName(_fromUtf8("pbIntPartDelete"))
self.hlInterviewParticipants.addWidget(self.pbIntPartDelete)
self.vlInterviewParticipants.addLayout(self.hlInterviewParticipants)
self.gridLayout_4.addLayout(self.vlInterviewParticipants, 0, 0, 1, 1)
self.glIntCont = QtGui.QGridLayout()
self.glIntCont.setObjectName(_fromUtf8("glIntCont"))
self.leIntPartFamily = QtGui.QLineEdit(self.pgIntParticipants)
self.leIntPartFamily.setObjectName(_fromUtf8("leIntPartFamily"))
self.glIntCont.addWidget(self.leIntPartFamily, 2, 2, 1, 1)
self.lblIntPartCommunity = QtGui.QLabel(self.pgIntParticipants)
self.lblIntPartCommunity.setObjectName(_fromUtf8("lblIntPartCommunity"))
self.glIntCont.addWidget(self.lblIntPartCommunity, 1, 0, 1, 1)
self.lblIntPartFamily = QtGui.QLabel(self.pgIntParticipants)
self.lblIntPartFamily.setObjectName(_fromUtf8("lblIntPartFamily"))
self.glIntCont.addWidget(self.lblIntPartFamily, 2, 0, 1, 1)
self.lblIntPartName = QtGui.QLabel(self.pgIntParticipants)
self.lblIntPartName.setObjectName(_fromUtf8("lblIntPartName"))
self.glIntCont.addWidget(self.lblIntPartName, 0, 0, 1, 1)
self.cbIntPartName = QtGui.QComboBox(self.pgIntParticipants)
self.cbIntPartName.setObjectName(_fromUtf8("cbIntPartName"))
self.glIntCont.addWidget(self.cbIntPartName, 0, 2, 1, 1)
self.leIntPartCommunity = QtGui.QLineEdit(self.pgIntParticipants)
self.leIntPartCommunity.setObjectName(_fromUtf8("leIntPartCommunity"))
self.glIntCont.addWidget(self.leIntPartCommunity, 1, 2, 1, 1)
self.gridLayout_4.addLayout(self.glIntCont, 1, 0, 1, 1)
self.tbxInterview.addItem(self.pgIntParticipants, _fromUtf8(""))
self.gridLayout_6.addWidget(self.tbxInterview, 0, 1, 1, 1)
self.twMapBioSettings.addTab(self.tbInterviews, _fromUtf8(""))
self.tbAbout = QtGui.QWidget()
self.tbAbout.setObjectName(_fromUtf8("tbAbout"))
self.gridLayout = QtGui.QGridLayout(self.tbAbout)
self.gridLayout.setObjectName(_fromUtf8("gridLayout"))
self.teAbout = QtGui.QTextEdit(self.tbAbout)
self.teAbout.setEnabled(True)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.MinimumExpanding, QtGui.QSizePolicy.MinimumExpanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.teAbout.sizePolicy().hasHeightForWidth())
self.teAbout.setSizePolicy(sizePolicy)
self.teAbout.setAcceptDrops(False)
self.teAbout.setUndoRedoEnabled(False)
self.teAbout.setReadOnly(True)
self.teAbout.setTextInteractionFlags(QtCore.Qt.TextBrowserInteraction)
self.teAbout.setObjectName(_fromUtf8("teAbout"))
self.gridLayout.addWidget(self.teAbout, 0, 0, 1, 1)
self.twMapBioSettings.addTab(self.tbAbout, _fromUtf8(""))
self.gridLayout_14.addWidget(self.twMapBioSettings, 0, 0, 1, 1)
self.pbDialogClose = QtGui.QPushButton(mapbioManager)
self.pbDialogClose.setObjectName(_fromUtf8("pbDialogClose"))
self.gridLayout_14.addWidget(self.pbDialogClose, 1, 0, 1, 1)
self.retranslateUi(mapbioManager)
self.twMapBioSettings.setCurrentIndex(0)
self.tbxParticipants.setCurrentIndex(0)
self.tbxInterview.setCurrentIndex(0)
QtCore.QMetaObject.connectSlotsByName(mapbioManager)
mapbioManager.setTabOrder(self.twMapBioSettings, self.leProjectDir)
mapbioManager.setTabOrder(self.leProjectDir, self.tbSelectProjectDir)
mapbioManager.setTabOrder(self.tbSelectProjectDir, self.cbCurrentProject)
mapbioManager.setTabOrder(self.cbCurrentProject, self.pbSaveSettings)
mapbioManager.setTabOrder(self.pbSaveSettings, self.pbCancelSettings)
mapbioManager.setTabOrder(self.pbCancelSettings, self.pbDeleteProject)
mapbioManager.setTabOrder(self.pbDeleteProject, self.leQgsProject)
mapbioManager.setTabOrder(self.leQgsProject, self.tbSelectQgsProject)
mapbioManager.setTabOrder(self.tbSelectQgsProject, self.tblBaseGroups)
mapbioManager.setTabOrder(self.tblBaseGroups, self.cbProjectGroups)
mapbioManager.setTabOrder(self.cbProjectGroups, self.pbAddBaseGroup)
mapbioManager.setTabOrder(self.pbAddBaseGroup, self.pbRemoveBaseGroup)
mapbioManager.setTabOrder(self.pbRemoveBaseGroup, self.pbSaveProjectMapSettings)
mapbioManager.setTabOrder(self.pbSaveProjectMapSettings, self.pbCancelProjectMapSettings)
mapbioManager.setTabOrder(self.pbCancelProjectMapSettings, self.leProjectCode)
mapbioManager.setTabOrder(self.leProjectCode, self.pteProjectDescription)
mapbioManager.setTabOrder(self.pteProjectDescription, self.pteContentCodes)
mapbioManager.setTabOrder(self.pteContentCodes, self.cbDefaultCode)
mapbioManager.setTabOrder(self.cbDefaultCode, self.cbPointCode)
mapbioManager.setTabOrder(self.cbPointCode, self.cbLineCode)
mapbioManager.setTabOrder(self.cbLineCode, self.cbPolygonCode)
mapbioManager.setTabOrder(self.cbPolygonCode, self.pbProjectDetailsSave)
mapbioManager.setTabOrder(self.pbProjectDetailsSave, self.pbProjectDetailsCancel)
mapbioManager.setTabOrder(self.pbProjectDetailsCancel, self.tblParticipants)
mapbioManager.setTabOrder(self.tblParticipants, self.pbParticipantNew)
mapbioManager.setTabOrder(self.pbParticipantNew, self.pbParticipantSave)
mapbioManager.setTabOrder(self.pbParticipantSave, self.pbParticipantCancel)
mapbioManager.setTabOrder(self.pbParticipantCancel, self.pbParticipantDelete)
mapbioManager.setTabOrder(self.pbParticipantDelete, self.leParticipantCode)
mapbioManager.setTabOrder(self.leParticipantCode, self.leFirstName)
mapbioManager.setTabOrder(self.leFirstName, self.leLastName)
mapbioManager.setTabOrder(self.leLastName, self.leEmail)
mapbioManager.setTabOrder(self.leEmail, self.leCommunity)
mapbioManager.setTabOrder(self.leCommunity, self.leFamily)
mapbioManager.setTabOrder(self.leFamily, self.leMaidenName)
mapbioManager.setTabOrder(self.leMaidenName, self.cbGender)
mapbioManager.setTabOrder(self.cbGender, self.cbMaritalStatus)
mapbioManager.setTabOrder(self.cbMaritalStatus, self.leBirthDate)
mapbioManager.setTabOrder(self.leBirthDate, self.leParticipantTags)
mapbioManager.setTabOrder(self.leParticipantTags, self.pteParticipantNote)
mapbioManager.setTabOrder(self.pteParticipantNote, self.tblAddresses)
mapbioManager.setTabOrder(self.tblAddresses, self.pbAddNew)
mapbioManager.setTabOrder(self.pbAddNew, self.pbAddSave)
mapbioManager.setTabOrder(self.pbAddSave, self.pbAddCancel)
mapbioManager.setTabOrder(self.pbAddCancel, self.pbAddDelete)
mapbioManager.setTabOrder(self.pbAddDelete, self.tblTelecoms)
mapbioManager.setTabOrder(self.tblTelecoms, self.pbTelNew)
mapbioManager.setTabOrder(self.pbTelNew, self.pbTelSave)
mapbioManager.setTabOrder(self.pbTelSave, self.pbTelCancel)
mapbioManager.setTabOrder(self.pbTelCancel, self.pbTelDelete)
mapbioManager.setTabOrder(self.pbTelDelete, self.cbTelType)
mapbioManager.setTabOrder(self.cbTelType, self.leTelNumber)
mapbioManager.setTabOrder(self.leTelNumber, self.tblInterviews)
mapbioManager.setTabOrder(self.tblInterviews, self.pbIntNew)
mapbioManager.setTabOrder(self.pbIntNew, self.pbIntSave)
mapbioManager.setTabOrder(self.pbIntSave, self.pbIntCancel)
mapbioManager.setTabOrder(self.pbIntCancel, self.pbIntDelete)
mapbioManager.setTabOrder(self.pbIntDelete, self.leInterviewCode)
mapbioManager.setTabOrder(self.leInterviewCode, self.leInterviewTitle)
mapbioManager.setTabOrder(self.leInterviewTitle, self.pteInterviewDescription)
mapbioManager.setTabOrder(self.pteInterviewDescription, self.leInterviewLocation)
mapbioManager.setTabOrder(self.leInterviewLocation, self.pteInterviewNote)
mapbioManager.setTabOrder(self.pteInterviewNote, self.leInterviewTags)
mapbioManager.setTabOrder(self.leInterviewTags, self.cbInterviewSecurity)
mapbioManager.setTabOrder(self.cbInterviewSecurity, self.cbInterviewStatus)
mapbioManager.setTabOrder(self.cbInterviewStatus, self.leSubject)
mapbioManager.setTabOrder(self.leSubject, self.leLanguage)
mapbioManager.setTabOrder(self.leLanguage, self.pteSource)
mapbioManager.setTabOrder(self.pteSource, self.pteCitation)
mapbioManager.setTabOrder(self.pteCitation, self.pteRightsStatement)
mapbioManager.setTabOrder(self.pteRightsStatement, self.leRightsHolder)
mapbioManager.setTabOrder(self.leRightsHolder, self.tblInterviewParticipants)
mapbioManager.setTabOrder(self.tblInterviewParticipants, self.pbIntPartNew)
mapbioManager.setTabOrder(self.pbIntPartNew, self.pbIntPartSave)
mapbioManager.setTabOrder(self.pbIntPartSave, self.pbIntPartCancel)
mapbioManager.setTabOrder(self.pbIntPartCancel, self.pbIntPartDelete)
mapbioManager.setTabOrder(self.pbIntPartDelete, self.cbIntPartName)
mapbioManager.setTabOrder(self.cbIntPartName, self.leIntPartCommunity)
mapbioManager.setTabOrder(self.leIntPartCommunity, self.leIntPartFamily)
mapbioManager.setTabOrder(self.leIntPartFamily, self.teAbout)
mapbioManager.setTabOrder(self.teAbout, self.pbDialogClose)
def retranslateUi(self, mapbioManager):
mapbioManager.setWindowTitle(_translate("mapbioManager", "Dialog", None))
self.tbSelectProjectDir.setToolTip(_translate("mapbioManager", "<html><head/><body><p>Click here to set the LOUIS Map Biographer default projects directory.</p></body></html>", None))
self.tbSelectProjectDir.setText(_translate("mapbioManager", "...", None))
self.lblCurrentProject.setText(_translate("mapbioManager", "Select or Create Project", None))
self.lblProjectsDir.setText(_translate("mapbioManager", "Projects Directory", None))
self.lblOggEnc.setText(_translate("mapbioManager", "Location of oggenc program", None))
self.tbOggEnc.setText(_translate("mapbioManager", "...", None))
self.pbSaveSettings.setText(_translate("mapbioManager", "Save Settings", None))
self.pbCancelSettings.setText(_translate("mapbioManager", "Cancel Settings", None))
self.pbDeleteProject.setText(_translate("mapbioManager", "Delete Current Project", None))
self.pbTransfer.setText(_translate("mapbioManager", "Transfer Data", None))
self.pbSystemTest.setToolTip(_translate("mapbioManager", "Test if audio and other libraries are present and available", None))
self.pbSystemTest.setText(_translate("mapbioManager", "Check System", None))
self.lblProjectMapSettings.setText(_translate("mapbioManager", "Project Map Settings", None))
self.lblQgsProject.setText(_translate("mapbioManager", "QGIS Project", None))
self.tbSelectQgsProject.setToolTip(_translate("mapbioManager", "<html><head/><body><p>Click here to select the QGIS project to use with LOUIS Map Biographer</p></body></html>", None))
self.tbSelectQgsProject.setText(_translate("mapbioManager", "...", None))
self.lblBaseLayerGroups.setText(_translate("mapbioManager", "Base Layer Groups ", None))
self.lblBaseGroupOptions.setText(_translate("mapbioManager", "Use project groups as base layers ", None))
self.pbAddBaseGroup.setText(_translate("mapbioManager", "Add", None))
self.pbRemoveBaseGroup.setText(_translate("mapbioManager", "Remove", None))
self.lblMaxScale.setText(_translate("mapbioManager", "Maximum Map Scale", None))
self.cbMaxScale.setItemText(0, _translate("mapbioManager", "1:1,100", None))
self.cbMaxScale.setItemText(1, _translate("mapbioManager", "1:2,200", None))
self.cbMaxScale.setItemText(2, _translate("mapbioManager", "1:4,500", None))
self.cbMaxScale.setItemText(3, _translate("mapbioManager", "1:9,000", None))
self.cbMaxScale.setItemText(4, _translate("mapbioManager", "1:18,000", None))
self.cbMaxScale.setItemText(5, _translate("mapbioManager", "1:36,000", None))
self.cbMaxScale.setItemText(6, _translate("mapbioManager", "1:72,000", None))
self.cbMaxScale.setItemText(7, _translate("mapbioManager", "1:144,000", None))
self.cbMaxScale.setItemText(8, _translate("mapbioManager", "1:288,000", None))
self.cbMaxScale.setItemText(9, _translate("mapbioManager", "1:577,000", None))
self.cbMaxScale.setItemText(10, _translate("mapbioManager", "1:1,150,000", None))
self.cbMaxScale.setItemText(11, _translate("mapbioManager", "1:2,300,000", None))
self.cbMaxScale.setItemText(12, _translate("mapbioManager", "1:4,600,000", None))
self.lblMinScale.setText(_translate("mapbioManager", "Minimum Map Scale", None))
self.cbMinScale.setItemText(0, _translate("mapbioManager", "1:2,200", None))
self.cbMinScale.setItemText(1, _translate("mapbioManager", "1:4,500", None))
self.cbMinScale.setItemText(2, _translate("mapbioManager", "1:9,000", None))
self.cbMinScale.setItemText(3, _translate("mapbioManager", "1:18,000", None))
self.cbMinScale.setItemText(4, _translate("mapbioManager", "1:36,000", None))
self.cbMinScale.setItemText(5, _translate("mapbioManager", "1:72,000", None))
self.cbMinScale.setItemText(6, _translate("mapbioManager", "1:144,000", None))
self.cbMinScale.setItemText(7, _translate("mapbioManager", "1:288,000", None))
self.cbMinScale.setItemText(8, _translate("mapbioManager", "1:576,000", None))
self.cbMinScale.setItemText(9, _translate("mapbioManager", "1:1,150,000", None))
self.cbMinScale.setItemText(10, _translate("mapbioManager", "1:2,300,000", None))
self.cbMinScale.setItemText(11, _translate("mapbioManager", "1:4,600,000", None))
self.cbMinScale.setItemText(12, _translate("mapbioManager", "1:9,200,000", None))
self.lblZoomNotices.setText(_translate("mapbioManager", "Show Zoom Range Notices", None))
self.cbZoomRangeNotices.setItemText(0, _translate("mapbioManager", "No", None))
self.cbZoomRangeNotices.setItemText(1, _translate("mapbioManager", "Yes", None))
self.lblProjectBoundary.setText(_translate("mapbioManager", "Project Boundary Layer ", None))
self.lblEnableReference.setText(_translate("mapbioManager", "Enable Reference Layer", None))
self.lblReferenceLayer.setText(_translate("mapbioManager", "Project Reference Layer", None))
self.cbEnableReference.setItemText(0, _translate("mapbioManager", "No", None))
self.cbEnableReference.setItemText(1, _translate("mapbioManager", "Yes", None))
self.pbSaveProjectMapSettings.setText(_translate("mapbioManager", "Save Project Map Settings", None))
self.pbCancelProjectMapSettings.setText(_translate("mapbioManager", "Cancel", None))
self.twMapBioSettings.setTabText(self.twMapBioSettings.indexOf(self.tbSettings), _translate("mapbioManager", "Map Biographer Settings", None))
self.lblProjCode.setText(_translate("mapbioManager", "Project Code", None))
self.leProjectCode.setToolTip(_translate("mapbioManager", "<html><head/><body><p>Enter a code for this project.</p></body></html>", None))
self.lblProjDescription.setText(_translate("mapbioManager", "Project Description", None))
self.pteProjectDescription.setToolTip(_translate("mapbioManager", "<html><head/><body><p>Enter a brief description of the project here.</p></body></html>", None))
self.cbUseHeritage.setText(_translate("mapbioManager", "Check if this project is used with LOUIS Heritage", None))
self.lblProjOther.setText(_translate("mapbioManager", "LOUIS Heritage Other Information", None))
self.pteProjectOther.setToolTip(_translate("mapbioManager", "<html><head/><body><p>Additional information about the project recorded in LOUIS Heritage is shown here. It can not be edited.</p></body></html>", None))
self.lblCustomFields.setText(_translate("mapbioManager", "LOUIS Heritage Custom Fields", None))
self.pteCustomFields.setToolTip(_translate("mapbioManager", "<html><head/><body><p>Custom fields defined in LOUIS Heritage are listed here. They can not be edited.</p></body></html>", None))
self.lblDateAndTime.setText(_translate("mapbioManager", "Project Use Period Definitions", None))
self.cbUsePeriod.setText(_translate("mapbioManager", "Enabled", None))
self.pteDateAndTime.setToolTip(_translate("mapbioManager", "<html><head/><body><p>Enter use period ranges using the following format:</p><p>1900-01-01 : 1967-12-31 = Before Bennett Dam<br/>1968-01-01 : 2014-08-31 = After Bennett Dam until today</p></body></html>", None))
self.lblTimeOfYear.setText(_translate("mapbioManager", "Project Times of Year Definitions", None))
self.cbTimeOfYear.setText(_translate("mapbioManager", "Enabled", None))
self.pteTimeOfYear.setToolTip(_translate("mapbioManager", "<html><head/><body><p>Enter time of year options (one per line) using as a list of months and a name as per the following example:</p><p>12,1,2 = Winter<br/>3,4,5 = Spring<br/>6,7,8 = Summer<br/>9,10,11 = Autumn</p></body></html>", None))
self.lblContentCodes.setText(_translate("mapbioManager", "Project Content Code Definitions", None))
self.tbSortCodes.setText(_translate("mapbioManager", "Sort Codes", None))
self.pteContentCodes.setToolTip(_translate("mapbioManager", "<html><head/><body><p>Enter a list of codes (one per line) in the following format:</p><p>BP = Berry Picking<br/>CB = Cabin</p><p><br/></p></body></html>", None))
self.lblDefaultCodes.setText(_translate("mapbioManager", "Project Default Feature Codes", None))
self.lblDefaultCode.setText(_translate("mapbioManager", "Non-Spatial", None))
self.lblDefaultPointCode.setText(_translate("mapbioManager", "Point", None))
self.lblDefaultLineCode.setText(_translate("mapbioManager", "Line", None))
self.lblDefaultPolygonCode.setText(_translate("mapbioManager", "Polygon", None))
self.pbProjectDetailsSave.setText(_translate("mapbioManager", "Save", None))
self.pbProjectDetailsCancel.setText(_translate("mapbioManager", "Cancel", None))
self.twMapBioSettings.setTabText(self.twMapBioSettings.indexOf(self.tbProjectDetails), _translate("mapbioManager", "Project Details", None))
self.lblParticipants.setText(_translate("mapbioManager", "Participants", None))
self.tblParticipants.setSortingEnabled(True)
self.pbParticipantNew.setText(_translate("mapbioManager", "New", None))
self.pbParticipantSave.setText(_translate("mapbioManager", "Save", None))
self.pbParticipantCancel.setText(_translate("mapbioManager", "Cancel", None))
self.pbParticipantDelete.setText(_translate("mapbioManager", "Delete", None))
self.lblEmail.setText(_translate("mapbioManager", "Email Address", None))
self.lblFamilyGroup.setText(_translate("mapbioManager", "Family Group", None))
self.lblFirstName.setText(_translate("mapbioManager", "First Name", None))
self.lblLastName.setText(_translate("mapbioManager", "Last Name", None))
self.lblParticipantCode.setText(_translate("mapbioManager", "Participant Code", None))
self.lblCommunityName.setText(_translate("mapbioManager", "Community", None))
self.lblMaidenName.setText(_translate("mapbioManager", "Maiden Name", None))
self.lblGender.setText(_translate("mapbioManager", "Gender", None))
self.cbGender.setItemText(0, _translate("mapbioManager", "Unknown", None))
self.cbGender.setItemText(1, _translate("mapbioManager", "Male", None))
self.cbGender.setItemText(2, _translate("mapbioManager", "Female", None))
self.cbGender.setItemText(3, _translate("mapbioManager", "Refused", None))
self.cbGender.setItemText(4, _translate("mapbioManager", "Other", None))
self.lblMaritalStatus.setText(_translate("mapbioManager", "Marital Status", None))
self.cbMaritalStatus.setItemText(0, _translate("mapbioManager", "Unknown", None))
self.cbMaritalStatus.setItemText(1, _translate("mapbioManager", "Single", None))
self.cbMaritalStatus.setItemText(2, _translate("mapbioManager", "Married", None))
self.cbMaritalStatus.setItemText(3, _translate("mapbioManager", "Divorced", None))
self.cbMaritalStatus.setItemText(4, _translate("mapbioManager", "Refused", None))
self.cbMaritalStatus.setItemText(5, _translate("mapbioManager", "Other", None))
self.lblParticipantNote.setText(_translate("mapbioManager", "Note", None))
self.lblParticipantTags.setText(_translate("mapbioManager", "Tags", None))
self.lblBirthDate.setText(_translate("mapbioManager", "Birth Date", None))
self.leBirthDate.setInputMask(_translate("mapbioManager", "9999-99-99; ", None))
self.tbxParticipants.setItemText(self.tbxParticipants.indexOf(self.pgBasicInfo), _translate("mapbioManager", "Basic Information", None))
self.tblAddresses.setSortingEnabled(True)
self.pbAddNew.setText(_translate("mapbioManager", "New", None))
self.pbAddSave.setText(_translate("mapbioManager", "Save", None))
self.pbAddCancel.setText(_translate("mapbioManager", "Cancel", None))
self.pbAddDelete.setText(_translate("mapbioManager", "Delete", None))
self.lblAddress.setText(_translate("mapbioManager", "Address ", None))
self.lblAddType.setText(_translate("mapbioManager", "Type", None))
self.cbAddType.setItemText(0, _translate("mapbioManager", "Home", None))
self.cbAddType.setItemText(1, _translate("mapbioManager", "Work", None))
self.cbAddType.setItemText(2, _translate("mapbioManager", "PO Box", None))
self.cbAddType.setItemText(3, _translate("mapbioManager", "Other", None))
self.tbxParticipants.setItemText(self.tbxParticipants.indexOf(self.pgAddresses), _translate("mapbioManager", "Addresses", None))
self.tblTelecoms.setSortingEnabled(True)
self.pbTelNew.setText(_translate("mapbioManager", "New", None))
self.pbTelSave.setText(_translate("mapbioManager", "Save", None))
self.pbTelCancel.setText(_translate("mapbioManager", "Cancel", None))
self.pbTelDelete.setText(_translate("mapbioManager", "Delete", None))
self.lblTelType.setText(_translate("mapbioManager", "Type", None))
self.lblTelecom.setText(_translate("mapbioManager", "Number", None))
self.cbTelType.setItemText(0, _translate("mapbioManager", "Home", None))
self.cbTelType.setItemText(1, _translate("mapbioManager", "Work", None))
self.cbTelType.setItemText(2, _translate("mapbioManager", "Mobile", None))
self.cbTelType.setItemText(3, _translate("mapbioManager", "Pager", None))
self.cbTelType.setItemText(4, _translate("mapbioManager", "Fax", None))
self.cbTelType.setItemText(5, _translate("mapbioManager", "Other", None))
self.tbxParticipants.setItemText(self.tbxParticipants.indexOf(self.pgTelecoms), _translate("mapbioManager", "Phones / Faxes", None))
self.twMapBioSettings.setTabText(self.twMapBioSettings.indexOf(self.tbPeople), _translate("mapbioManager", "Participants", None))
self.lblInterviews.setText(_translate("mapbioManager", "Interview List", None))
self.tblInterviews.setSortingEnabled(False)
self.pbIntNew.setText(_translate("mapbioManager", "New", None))
self.pbIntCopy.setText(_translate("mapbioManager", "Copy", None))
self.pbIntSave.setText(_translate("mapbioManager", "Save", None))
self.pbIntCancel.setText(_translate("mapbioManager", "Cancel", None))
self.pbIntDelete.setText(_translate("mapbioManager", "Delete", None))
self.lblStartDateTime.setText(_translate("mapbioManager", "Start Date / Time", None))
self.dteStart.setDisplayFormat(_translate("mapbioManager", "yyyy-MM-dd HH:mm", None))
self.tbUDStart.setToolTip(_translate("mapbioManager", "Update start from interview", None))
self.tbUDStart.setText(_translate("mapbioManager", "S", None))
self.cbInterviewSecurity.setItemText(0, _translate("mapbioManager", "Public", None))
self.cbInterviewSecurity.setItemText(1, _translate("mapbioManager", "Community", None))
self.cbInterviewSecurity.setItemText(2, _translate("mapbioManager", "Private", None))
self.lblCode.setText(_translate("mapbioManager", "Interview Code", None))
self.lblTitle.setText(_translate("mapbioManager", "Interview Title", None))
self.lblLocation.setText(_translate("mapbioManager", "Interview Location", None))
self.lblSecurity.setText(_translate("mapbioManager", "Default Security", None))
self.lblNote.setText(_translate("mapbioManager", "Note", None))
self.lblTags.setText(_translate("mapbioManager", "Tags", None))
self.lblDescription.setText(_translate("mapbioManager", "Description", None))
self.lbInterviewStatus.setText(_translate("mapbioManager", "Interview Status", None))
self.cbInterviewStatus.setItemText(0, _translate("mapbioManager", "New", None))
self.cbInterviewStatus.setItemText(1, _translate("mapbioManager", "Completed", None))
self.cbInterviewStatus.setItemText(2, _translate("mapbioManager", "Transcribed", None))
self.cbInterviewStatus.setItemText(3, _translate("mapbioManager", "Uploaded", None))
self.lblEndDateTime.setText(_translate("mapbioManager", "End Date / Time", None))
self.dteEnd.setDisplayFormat(_translate("mapbioManager", "yyyy-MM-dd HH:mm", None))
self.tbUDEnd.setToolTip(_translate("mapbioManager", "Update end from interview", None))
self.tbUDEnd.setText(_translate("mapbioManager", "E", None))
self.tbxInterview.setItemText(self.tbxInterview.indexOf(self.pgIntBasic), _translate("mapbioManager", "Interview Basic Information", None))
self.lblCitation.setText(_translate("mapbioManager", "Citation", None))
self.lblRightsStatement.setText(_translate("mapbioManager", "Rights Statement", None))
self.lblSource.setText(_translate("mapbioManager", "Source", None))
self.lblSubject.setText(_translate("mapbioManager", "Subject", None))
self.lblLanguage.setText(_translate("mapbioManager", "Language", None))
self.lblRightsHolder.setText(_translate("mapbioManager", "Rights Holder", None))
self.lblInterviewCreator.setText(_translate("mapbioManager", "Interview Creator", None))
self.lblInterviewPublisher.setText(_translate("mapbioManager", "Interview Publisher", None))
self.tbxInterview.setItemText(self.tbxInterview.indexOf(self.pgIntExtra), _translate("mapbioManager", "Interview Extra Information", None))
self.lblInterviewParticipants.setText(_translate("mapbioManager", "Participants", None))
self.tblInterviewParticipants.setSortingEnabled(True)
self.pbIntPartNew.setText(_translate("mapbioManager", "New", None))
self.pbIntPartSave.setText(_translate("mapbioManager", "Save", None))
self.pbIntPartCancel.setText(_translate("mapbioManager", "Cancel", None))
self.pbIntPartDelete.setText(_translate("mapbioManager", "Delete", None))
self.lblIntPartCommunity.setText(_translate("mapbioManager", "Community", None))
self.lblIntPartFamily.setText(_translate("mapbioManager", "Family Group", None))
self.lblIntPartName.setText(_translate("mapbioManager", "Name", None))
self.tbxInterview.setItemText(self.tbxInterview.indexOf(self.pgIntParticipants), _translate("mapbioManager", "Interview Participants", None))
self.twMapBioSettings.setTabText(self.twMapBioSettings.indexOf(self.tbInterviews), _translate("mapbioManager", "Interviews", None))
self.teAbout.setHtml(_translate("mapbioManager", "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n"
"<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\n"
"p, li { white-space: pre-wrap; }\n"
"</style></head><body style=\" font-family:\'Sans Serif\'; font-size:10pt; font-weight:400; font-style:normal;\">\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-family:\'Cantarell\'; font-size:11pt; font-weight:600;\">LOUIS Map Biographer Version 1.3.2</span></p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px; font-family:\'Cantarell\'; font-size:11pt; font-weight:600;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-family:\'Cantarell\'; font-size:11pt;\">For additional information about this tool please visit </span><a href=\"https://www.louistoolkit.ca/ourtools/map-biographer/\"><span style=\" font-family:\'Cantarell\'; font-size:11pt; text-decoration: underline; color:#0000ff;\">https://www.louistoolkit.ca/ourtools/map-biographer/</span></a><span style=\" font-family:\'Cantarell\'; font-size:11pt;\">.</span></p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px; font-family:\'Cantarell\'; font-size:11pt;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-family:\'Cantarell\'; font-size:11pt;\">To post questions or provide suggestions see our contact page at </span><a href=\"https://www.louistoolkit.ca/contact-us/\"><span style=\" font-family:\'Cantarell\'; font-size:11pt; text-decoration: underline; color:#0000ff;\">https://www.louistoolkit.ca/contact-us/</span></a><span style=\" font-family:\'Cantarell\'; font-size:11pt;\">.</span></p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px; font-family:\'Cantarell\'; font-size:11pt;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-family:\'Cantarell\'; font-size:11pt; font-weight:600;\">ABOUT LOUIS Map Biographer</span></p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px; font-family:\'Cantarell\'; font-size:11pt; font-weight:600;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-family:\'Cantarell\'; font-size:11pt;\">LOUIS Map Biographer is a free / open source solution for direct to digital capture of land use information through in-person interviews. LOUIS Map Biographer is designed to support the best practice ideas for Use and Occupancy mapping studies outlined in </span><span style=\" font-family:\'Cantarell\'; font-size:11pt; font-style:italic;\">Living Proof</span><span style=\" font-family:\'Cantarell\'; font-size:11pt; vertical-align:super;\">1</span><span style=\" font-family:\'Cantarell\'; font-size:11pt;\">. </span></p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px; font-family:\'Cantarell\'; font-size:11pt;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-family:\'Cantarell\'; font-size:11pt;\">LOUIS Map Biographer is part of the Land Occupancy and Use Information Systems (LOUIS) Toolkit. LOUIS Map Biographer can be used on its own or together with other parts of the LOUIS tool kit. For more information about LOUIS Toolkit visit: </span><a href=\"https://www.louistoolkit.ca/\"><span style=\" font-family:\'Cantarell\'; font-size:11pt; text-decoration: underline; color:#0000ff;\">https://www.louistoolkit.ca/</span></a></p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px; font-family:\'Cantarell\'; font-size:11pt;\"><br /></p>\n"
"<hr />\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-family:\'Cantarell\'; font-size:11pt;\">1 - Tobias, Terry N., 2009. </span><a href=\"http://ecotrust.ca/report/living-proof-use-and-occupancy-mapping/\"><span style=\" font-family:\'Cantarell\'; font-size:11pt; text-decoration: underline; color:#0000ff;\">Living Proof: The essential data-collection guide for Indigenous Use-and-Occupancy map surveys</span></a><span style=\" font-family:\'Cantarell\'; font-size:11pt;\">. Ecostrust Canada and Union of BC Indian Chiefs, British Columbia, Canada.</span></p></body></html>", None))
self.twMapBioSettings.setTabText(self.twMapBioSettings.indexOf(self.tbAbout), _translate("mapbioManager", "About", None))
self.pbDialogClose.setText(_translate("mapbioManager", "Close", None))
| gpl-2.0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.