repo_name
stringlengths 5
100
| path
stringlengths 4
375
| copies
stringclasses 991
values | size
stringlengths 4
7
| content
stringlengths 666
1M
| license
stringclasses 15
values |
---|---|---|---|---|---|
wogsland/QSTK | build/lib.linux-x86_64-2.7/QSTK/qstkfeat/classes.py | 8 | 1658 | '''
(c) 2011, 2012 Georgia Tech Research Corporation
This source code is released under the New BSD license. Please see
http://wiki.quantsoftware.org/index.php?title=QSTK_License
for license details.
Created on Nov 7, 2011
@author: John Cornwell
@contact: [email protected]
@summary: File containing various classification functions
'''
# 3rd Party Imports
import pandas as pand
import numpy as np
def class_fut_ret( d_data, i_lookforward=21, s_rel=None, b_use_open=False ):
'''
@summary: Calculate classification, uses future returns
@param d_data: Dictionary of data to use
@param i_lookforward: Number of days to look in the future
@param s_rel: Stock symbol that this should be relative to, ususally $SPX.
@param b_use_open: If True, stock will be purchased at T+1 open, sold at
T+i_lookforward close
@return: DataFrame containing values
'''
if b_use_open:
df_val = d_data['open'].copy()
else:
df_val = d_data['close'].copy()
na_val = df_val.values
if b_use_open:
na_val[:-(i_lookforward + 1), :] = ((na_val[i_lookforward + 1:, :] -
na_val[1:-(i_lookforward), :]) /
na_val[1:-(i_lookforward), :])
na_val[-(i_lookforward+1):, :] = np.nan
else:
na_val[:-i_lookforward, :] = ((na_val[i_lookforward:, :] -
na_val[:-i_lookforward, :]) /
na_val[:-i_lookforward, :])
na_val[-i_lookforward:, :] = np.nan
return df_val
if __name__ == '__main__':
pass
| bsd-3-clause |
Gui13/CouchPotatoServer | libs/subliminal/core.py | 46 | 12840 | # -*- coding: utf-8 -*-
# Copyright 2011-2012 Antoine Bertin <[email protected]>
#
# This file is part of subliminal.
#
# subliminal is free software; you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# subliminal is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with subliminal. If not, see <http://www.gnu.org/licenses/>.
from .exceptions import DownloadFailedError
from .services import ServiceConfig
from .tasks import DownloadTask, ListTask
from .utils import get_keywords
from .videos import Episode, Movie, scan
from .language import Language
from collections import defaultdict
from itertools import groupby
import bs4
import guessit
import logging
__all__ = ['SERVICES', 'LANGUAGE_INDEX', 'SERVICE_INDEX', 'SERVICE_CONFIDENCE', 'MATCHING_CONFIDENCE',
'create_list_tasks', 'create_download_tasks', 'consume_task', 'matching_confidence',
'key_subtitles', 'group_by_video']
logger = logging.getLogger(__name__)
SERVICES = ['opensubtitles', 'bierdopje', 'subswiki', 'subtitulos', 'thesubdb', 'addic7ed', 'tvsubtitles']
LANGUAGE_INDEX, SERVICE_INDEX, SERVICE_CONFIDENCE, MATCHING_CONFIDENCE = range(4)
def create_list_tasks(paths, languages, services, force, multi, cache_dir, max_depth, scan_filter):
"""Create a list of :class:`~subliminal.tasks.ListTask` from one or more paths using the given criteria
:param paths: path(s) to video file or folder
:type paths: string or list
:param set languages: languages to search for
:param list services: services to use for the search
:param bool force: force searching for subtitles even if some are detected
:param bool multi: search multiple languages for the same video
:param string cache_dir: path to the cache directory to use
:param int max_depth: maximum depth for scanning entries
:param function scan_filter: filter function that takes a path as argument and returns a boolean indicating whether it has to be filtered out (``True``) or not (``False``)
:return: the created tasks
:rtype: list of :class:`~subliminal.tasks.ListTask`
"""
scan_result = []
for p in paths:
scan_result.extend(scan(p, max_depth, scan_filter))
logger.debug(u'Found %d videos in %r with maximum depth %d' % (len(scan_result), paths, max_depth))
tasks = []
config = ServiceConfig(multi, cache_dir)
services = filter_services(services)
for video, detected_subtitles in scan_result:
detected_languages = set(s.language for s in detected_subtitles)
wanted_languages = languages.copy()
if not force and multi:
wanted_languages -= detected_languages
if not wanted_languages:
logger.debug(u'No need to list multi subtitles %r for %r because %r detected' % (languages, video, detected_languages))
continue
if not force and not multi and Language('Undetermined') in detected_languages:
logger.debug(u'No need to list single subtitles %r for %r because one detected' % (languages, video))
continue
logger.debug(u'Listing subtitles %r for %r with services %r' % (wanted_languages, video, services))
for service_name in services:
mod = __import__('services.' + service_name, globals=globals(), locals=locals(), fromlist=['Service'], level=-1)
service = mod.Service
if not service.check_validity(video, wanted_languages):
continue
task = ListTask(video, wanted_languages & service.languages, service_name, config)
logger.debug(u'Created task %r' % task)
tasks.append(task)
return tasks
def create_download_tasks(subtitles_by_video, languages, multi):
"""Create a list of :class:`~subliminal.tasks.DownloadTask` from a list results grouped by video
:param subtitles_by_video: :class:`~subliminal.tasks.ListTask` results with ordered subtitles
:type subtitles_by_video: dict of :class:`~subliminal.videos.Video` => [:class:`~subliminal.subtitles.Subtitle`]
:param languages: languages in preferred order
:type languages: :class:`~subliminal.language.language_list`
:param bool multi: download multiple languages for the same video
:return: the created tasks
:rtype: list of :class:`~subliminal.tasks.DownloadTask`
"""
tasks = []
for video, subtitles in subtitles_by_video.iteritems():
if not subtitles:
continue
if not multi:
task = DownloadTask(video, list(subtitles))
logger.debug(u'Created task %r' % task)
tasks.append(task)
continue
for _, by_language in groupby(subtitles, lambda s: languages.index(s.language)):
task = DownloadTask(video, list(by_language))
logger.debug(u'Created task %r' % task)
tasks.append(task)
return tasks
def consume_task(task, services=None):
"""Consume a task. If the ``services`` parameter is given, the function will attempt
to get the service from it. In case the service is not in ``services``, it will be initialized
and put in ``services``
:param task: task to consume
:type task: :class:`~subliminal.tasks.ListTask` or :class:`~subliminal.tasks.DownloadTask`
:param dict services: mapping between the service name and an instance of this service
:return: the result of the task
:rtype: list of :class:`~subliminal.subtitles.ResultSubtitle`
"""
if services is None:
services = {}
logger.info(u'Consuming %r' % task)
result = None
if isinstance(task, ListTask):
service = get_service(services, task.service, config=task.config)
result = service.list(task.video, task.languages)
elif isinstance(task, DownloadTask):
for subtitle in task.subtitles:
service = get_service(services, subtitle.service)
try:
service.download(subtitle)
result = [subtitle]
break
except DownloadFailedError:
logger.warning(u'Could not download subtitle %r, trying next' % subtitle)
continue
if result is None:
logger.error(u'No subtitles could be downloaded for video %r' % task.video)
return result
def matching_confidence(video, subtitle):
"""Compute the probability (confidence) that the subtitle matches the video
:param video: video to match
:type video: :class:`~subliminal.videos.Video`
:param subtitle: subtitle to match
:type subtitle: :class:`~subliminal.subtitles.Subtitle`
:return: the matching probability
:rtype: float
"""
guess = guessit.guess_file_info(subtitle.release, 'autodetect')
video_keywords = get_keywords(video.guess)
subtitle_keywords = get_keywords(guess) | subtitle.keywords
logger.debug(u'Video keywords %r - Subtitle keywords %r' % (video_keywords, subtitle_keywords))
replacement = {'keywords': len(video_keywords & subtitle_keywords)}
if isinstance(video, Episode):
replacement.update({'series': 0, 'season': 0, 'episode': 0})
matching_format = '{series:b}{season:b}{episode:b}{keywords:03b}'
best = matching_format.format(series=1, season=1, episode=1, keywords=len(video_keywords))
if guess['type'] in ['episode', 'episodesubtitle']:
if 'series' in guess and guess['series'].lower() == video.series.lower():
replacement['series'] = 1
if 'season' in guess and guess['season'] == video.season:
replacement['season'] = 1
if 'episodeNumber' in guess and guess['episodeNumber'] == video.episode:
replacement['episode'] = 1
elif isinstance(video, Movie):
replacement.update({'title': 0, 'year': 0})
matching_format = '{title:b}{year:b}{keywords:03b}'
best = matching_format.format(title=1, year=1, keywords=len(video_keywords))
if guess['type'] in ['movie', 'moviesubtitle']:
if 'title' in guess and guess['title'].lower() == video.title.lower():
replacement['title'] = 1
if 'year' in guess and guess['year'] == video.year:
replacement['year'] = 1
else:
logger.debug(u'Not able to compute confidence for %r' % video)
return 0.0
logger.debug(u'Found %r' % replacement)
confidence = float(int(matching_format.format(**replacement), 2)) / float(int(best, 2))
logger.info(u'Computed confidence %.4f for %r and %r' % (confidence, video, subtitle))
return confidence
def get_service(services, service_name, config=None):
"""Get a service from its name in the service dict with the specified config.
If the service does not exist in the service dict, it is created and added to the dict.
:param dict services: dict where to get existing services or put created ones
:param string service_name: name of the service to get
:param config: config to use for the service
:type config: :class:`~subliminal.services.ServiceConfig` or None
:return: the corresponding service
:rtype: :class:`~subliminal.services.ServiceBase`
"""
if service_name not in services:
mod = __import__('services.' + service_name, globals=globals(), locals=locals(), fromlist=['Service'], level=-1)
services[service_name] = mod.Service()
services[service_name].init()
services[service_name].config = config
return services[service_name]
def key_subtitles(subtitle, video, languages, services, order):
"""Create a key to sort subtitle using the given order
:param subtitle: subtitle to sort
:type subtitle: :class:`~subliminal.subtitles.ResultSubtitle`
:param video: video to match
:type video: :class:`~subliminal.videos.Video`
:param list languages: languages in preferred order
:param list services: services in preferred order
:param order: preferred order for subtitles sorting
:type list: list of :data:`LANGUAGE_INDEX`, :data:`SERVICE_INDEX`, :data:`SERVICE_CONFIDENCE`, :data:`MATCHING_CONFIDENCE`
:return: a key ready to use for subtitles sorting
:rtype: int
"""
key = ''
for sort_item in order:
if sort_item == LANGUAGE_INDEX:
key += '{0:03d}'.format(len(languages) - languages.index(subtitle.language) - 1)
key += '{0:01d}'.format(subtitle.language == languages[languages.index(subtitle.language)])
elif sort_item == SERVICE_INDEX:
key += '{0:02d}'.format(len(services) - services.index(subtitle.service) - 1)
elif sort_item == SERVICE_CONFIDENCE:
key += '{0:04d}'.format(int(subtitle.confidence * 1000))
elif sort_item == MATCHING_CONFIDENCE:
confidence = 0
if subtitle.release:
confidence = matching_confidence(video, subtitle)
key += '{0:04d}'.format(int(confidence * 1000))
return int(key)
def group_by_video(list_results):
"""Group the results of :class:`ListTasks <subliminal.tasks.ListTask>` into a
dictionary of :class:`~subliminal.videos.Video` => :class:`~subliminal.subtitles.Subtitle`
:param list_results:
:type list_results: list of result of :class:`~subliminal.tasks.ListTask`
:return: subtitles grouped by videos
:rtype: dict of :class:`~subliminal.videos.Video` => [:class:`~subliminal.subtitles.Subtitle`]
"""
result = defaultdict(list)
for video, subtitles in list_results:
result[video] += subtitles or []
return result
def filter_services(services):
"""Filter out services that are not available because of a missing feature
:param list services: service names to filter
:return: a copy of the initial list of service names without unavailable ones
:rtype: list
"""
filtered_services = services[:]
for service_name in services:
mod = __import__('services.' + service_name, globals=globals(), locals=locals(), fromlist=['Service'], level=-1)
service = mod.Service
if service.required_features is not None and bs4.builder_registry.lookup(*service.required_features) is None:
logger.warning(u'Service %s not available: none of available features could be used. One of %r required' % (service_name, service.required_features))
filtered_services.remove(service_name)
return filtered_services
| gpl-3.0 |
ioram7/keystone-federado-pgid2013 | build/paste/paste/util/ip4.py | 27 | 9271 | # -*- coding: iso-8859-15 -*-
"""IP4 address range set implementation.
Implements an IPv4-range type.
Copyright (C) 2006, Heiko Wundram.
Released under the MIT-license.
"""
# Version information
# -------------------
__author__ = "Heiko Wundram <[email protected]>"
__version__ = "0.2"
__revision__ = "3"
__date__ = "2006-01-20"
# Imports
# -------
import intset
import socket
# IP4Range class
# --------------
class IP4Range(intset.IntSet):
"""IP4 address range class with efficient storage of address ranges.
Supports all set operations."""
_MINIP4 = 0
_MAXIP4 = (1<<32) - 1
_UNITYTRANS = "".join([chr(n) for n in range(256)])
_IPREMOVE = "0123456789."
def __init__(self,*args):
"""Initialize an ip4range class. The constructor accepts an unlimited
number of arguments that may either be tuples in the form (start,stop),
integers, longs or strings, where start and stop in a tuple may
also be of the form integer, long or string.
Passing an integer or long means passing an IPv4-address that's already
been converted to integer notation, whereas passing a string specifies
an address where this conversion still has to be done. A string
address may be in the following formats:
- 1.2.3.4 - a plain address, interpreted as a single address
- 1.2.3 - a set of addresses, interpreted as 1.2.3.0-1.2.3.255
- localhost - hostname to look up, interpreted as single address
- 1.2.3<->5 - a set of addresses, interpreted as 1.2.3.0-1.2.5.255
- 1.2.0.0/16 - a set of addresses, interpreted as 1.2.0.0-1.2.255.255
Only the first three notations are valid if you use a string address in
a tuple, whereby notation 2 is interpreted as 1.2.3.0 if specified as
lower bound and 1.2.3.255 if specified as upper bound, not as a range
of addresses.
Specifying a range is done with the <-> operator. This is necessary
because '-' might be present in a hostname. '<->' shouldn't be, ever.
"""
# Special case copy constructor.
if len(args) == 1 and isinstance(args[0],IP4Range):
super(IP4Range,self).__init__(args[0])
return
# Convert arguments to tuple syntax.
args = list(args)
for i in range(len(args)):
argval = args[i]
if isinstance(argval,str):
if "<->" in argval:
# Type 4 address.
args[i] = self._parseRange(*argval.split("<->",1))
continue
elif "/" in argval:
# Type 5 address.
args[i] = self._parseMask(*argval.split("/",1))
else:
# Type 1, 2 or 3.
args[i] = self._parseAddrRange(argval)
elif isinstance(argval,tuple):
if len(tuple) <> 2:
raise ValueError("Tuple is of invalid length.")
addr1, addr2 = argval
if isinstance(addr1,str):
addr1 = self._parseAddrRange(addr1)[0]
elif not isinstance(addr1,(int,long)):
raise TypeError("Invalid argument.")
if isinstance(addr2,str):
addr2 = self._parseAddrRange(addr2)[1]
elif not isinstance(addr2,(int,long)):
raise TypeError("Invalid argument.")
args[i] = (addr1,addr2)
elif not isinstance(argval,(int,long)):
raise TypeError("Invalid argument.")
# Initialize the integer set.
super(IP4Range,self).__init__(min=self._MINIP4,max=self._MAXIP4,*args)
# Parsing functions
# -----------------
def _parseRange(self,addr1,addr2):
naddr1, naddr1len = _parseAddr(addr1)
naddr2, naddr2len = _parseAddr(addr2)
if naddr2len < naddr1len:
naddr2 += naddr1&(((1<<((naddr1len-naddr2len)*8))-1)<<
(naddr2len*8))
naddr2len = naddr1len
elif naddr2len > naddr1len:
raise ValueError("Range has more dots than address.")
naddr1 <<= (4-naddr1len)*8
naddr2 <<= (4-naddr2len)*8
naddr2 += (1<<((4-naddr2len)*8))-1
return (naddr1,naddr2)
def _parseMask(self,addr,mask):
naddr, naddrlen = _parseAddr(addr)
naddr <<= (4-naddrlen)*8
try:
if not mask:
masklen = 0
else:
masklen = int(mask)
if not 0 <= masklen <= 32:
raise ValueError
except ValueError:
try:
mask = _parseAddr(mask,False)
except ValueError:
raise ValueError("Mask isn't parseable.")
remaining = 0
masklen = 0
if not mask:
masklen = 0
else:
while not (mask&1):
remaining += 1
while (mask&1):
mask >>= 1
masklen += 1
if remaining+masklen <> 32:
raise ValueError("Mask isn't a proper host mask.")
naddr1 = naddr & (((1<<masklen)-1)<<(32-masklen))
naddr2 = naddr1 + (1<<(32-masklen)) - 1
return (naddr1,naddr2)
def _parseAddrRange(self,addr):
naddr, naddrlen = _parseAddr(addr)
naddr1 = naddr<<((4-naddrlen)*8)
naddr2 = ( (naddr<<((4-naddrlen)*8)) +
(1<<((4-naddrlen)*8)) - 1 )
return (naddr1,naddr2)
# Utility functions
# -----------------
def _int2ip(self,num):
rv = []
for i in range(4):
rv.append(str(num&255))
num >>= 8
return ".".join(reversed(rv))
# Iterating
# ---------
def iteraddresses(self):
"""Returns an iterator which iterates over ips in this iprange. An
IP is returned in string form (e.g. '1.2.3.4')."""
for v in super(IP4Range,self).__iter__():
yield self._int2ip(v)
def iterranges(self):
"""Returns an iterator which iterates over ip-ip ranges which build
this iprange if combined. An ip-ip pair is returned in string form
(e.g. '1.2.3.4-2.3.4.5')."""
for r in self._ranges:
if r[1]-r[0] == 1:
yield self._int2ip(r[0])
else:
yield '%s-%s' % (self._int2ip(r[0]),self._int2ip(r[1]-1))
def itermasks(self):
"""Returns an iterator which iterates over ip/mask pairs which build
this iprange if combined. An IP/Mask pair is returned in string form
(e.g. '1.2.3.0/24')."""
for r in self._ranges:
for v in self._itermasks(r):
yield v
def _itermasks(self,r):
ranges = [r]
while ranges:
cur = ranges.pop()
curmask = 0
while True:
curmasklen = 1<<(32-curmask)
start = (cur[0]+curmasklen-1)&(((1<<curmask)-1)<<(32-curmask))
if start >= cur[0] and start+curmasklen <= cur[1]:
break
else:
curmask += 1
yield "%s/%s" % (self._int2ip(start),curmask)
if cur[0] < start:
ranges.append((cur[0],start))
if cur[1] > start+curmasklen:
ranges.append((start+curmasklen,cur[1]))
__iter__ = iteraddresses
# Printing
# --------
def __repr__(self):
"""Returns a string which can be used to reconstruct this iprange."""
rv = []
for start, stop in self._ranges:
if stop-start == 1:
rv.append("%r" % (self._int2ip(start),))
else:
rv.append("(%r,%r)" % (self._int2ip(start),
self._int2ip(stop-1)))
return "%s(%s)" % (self.__class__.__name__,",".join(rv))
def _parseAddr(addr,lookup=True):
if lookup and addr.translate(IP4Range._UNITYTRANS, IP4Range._IPREMOVE):
try:
addr = socket.gethostbyname(addr)
except socket.error:
raise ValueError("Invalid Hostname as argument.")
naddr = 0
for naddrpos, part in enumerate(addr.split(".")):
if naddrpos >= 4:
raise ValueError("Address contains more than four parts.")
try:
if not part:
part = 0
else:
part = int(part)
if not 0 <= part < 256:
raise ValueError
except ValueError:
raise ValueError("Address part out of range.")
naddr <<= 8
naddr += part
return naddr, naddrpos+1
def ip2int(addr, lookup=True):
return _parseAddr(addr, lookup=lookup)[0]
if __name__ == "__main__":
# Little test script.
x = IP4Range("172.22.162.250/24")
y = IP4Range("172.22.162.250","172.22.163.250","172.22.163.253<->255")
print x
for val in x.itermasks():
print val
for val in y.itermasks():
print val
for val in (x|y).itermasks():
print val
for val in (x^y).iterranges():
print val
for val in x:
print val
| apache-2.0 |
Lujeni/ansible | lib/ansible/inventory/manager.py | 4 | 24970 | # (c) 2012-2014, Michael DeHaan <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#############################################
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import fnmatch
import os
import sys
import re
import itertools
import traceback
from operator import attrgetter
from random import shuffle
from ansible import constants as C
from ansible.errors import AnsibleError, AnsibleOptionsError, AnsibleParserError
from ansible.inventory.data import InventoryData
from ansible.module_utils.six import string_types
from ansible.module_utils._text import to_bytes, to_text
from ansible.parsing.utils.addresses import parse_address
from ansible.plugins.loader import inventory_loader
from ansible.utils.helpers import deduplicate_list
from ansible.utils.path import unfrackpath
from ansible.utils.display import Display
from ansible.utils.vars import combine_vars
from ansible.vars.plugins import get_vars_from_inventory_sources
display = Display()
IGNORED_ALWAYS = [br"^\.", b"^host_vars$", b"^group_vars$", b"^vars_plugins$"]
IGNORED_PATTERNS = [to_bytes(x) for x in C.INVENTORY_IGNORE_PATTERNS]
IGNORED_EXTS = [b'%s$' % to_bytes(re.escape(x)) for x in C.INVENTORY_IGNORE_EXTS]
IGNORED = re.compile(b'|'.join(IGNORED_ALWAYS + IGNORED_PATTERNS + IGNORED_EXTS))
PATTERN_WITH_SUBSCRIPT = re.compile(
r'''^
(.+) # A pattern expression ending with...
\[(?: # A [subscript] expression comprising:
(-?[0-9]+)| # A single positive or negative number
([0-9]+)([:-]) # Or an x:y or x: range.
([0-9]*)
)\]
$
''', re.X
)
def order_patterns(patterns):
''' takes a list of patterns and reorders them by modifier to apply them consistently '''
# FIXME: this goes away if we apply patterns incrementally or by groups
pattern_regular = []
pattern_intersection = []
pattern_exclude = []
for p in patterns:
if not p:
continue
if p[0] == "!":
pattern_exclude.append(p)
elif p[0] == "&":
pattern_intersection.append(p)
else:
pattern_regular.append(p)
# if no regular pattern was given, hence only exclude and/or intersection
# make that magically work
if pattern_regular == []:
pattern_regular = ['all']
# when applying the host selectors, run those without the "&" or "!"
# first, then the &s, then the !s.
return pattern_regular + pattern_intersection + pattern_exclude
def split_host_pattern(pattern):
"""
Takes a string containing host patterns separated by commas (or a list
thereof) and returns a list of single patterns (which may not contain
commas). Whitespace is ignored.
Also accepts ':' as a separator for backwards compatibility, but it is
not recommended due to the conflict with IPv6 addresses and host ranges.
Example: 'a,b[1], c[2:3] , d' -> ['a', 'b[1]', 'c[2:3]', 'd']
"""
if isinstance(pattern, list):
return list(itertools.chain(*map(split_host_pattern, pattern)))
elif not isinstance(pattern, string_types):
pattern = to_text(pattern, errors='surrogate_or_strict')
# If it's got commas in it, we'll treat it as a straightforward
# comma-separated list of patterns.
if u',' in pattern:
patterns = pattern.split(u',')
# If it doesn't, it could still be a single pattern. This accounts for
# non-separator uses of colons: IPv6 addresses and [x:y] host ranges.
else:
try:
(base, port) = parse_address(pattern, allow_ranges=True)
patterns = [pattern]
except Exception:
# The only other case we accept is a ':'-separated list of patterns.
# This mishandles IPv6 addresses, and is retained only for backwards
# compatibility.
patterns = re.findall(
to_text(r'''(?: # We want to match something comprising:
[^\s:\[\]] # (anything other than whitespace or ':[]'
| # ...or...
\[[^\]]*\] # a single complete bracketed expression)
)+ # occurring once or more
'''), pattern, re.X
)
return [p.strip() for p in patterns if p.strip()]
class InventoryManager(object):
''' Creates and manages inventory '''
def __init__(self, loader, sources=None):
# base objects
self._loader = loader
self._inventory = InventoryData()
# a list of host(names) to contain current inquiries to
self._restriction = None
self._subset = None
# caches
self._hosts_patterns_cache = {} # resolved full patterns
self._pattern_cache = {} # resolved individual patterns
# the inventory dirs, files, script paths or lists of hosts
if sources is None:
self._sources = []
elif isinstance(sources, string_types):
self._sources = [sources]
else:
self._sources = sources
# get to work!
self.parse_sources(cache=True)
@property
def localhost(self):
return self._inventory.localhost
@property
def groups(self):
return self._inventory.groups
@property
def hosts(self):
return self._inventory.hosts
def add_host(self, host, group=None, port=None):
return self._inventory.add_host(host, group, port)
def add_group(self, group):
return self._inventory.add_group(group)
def get_groups_dict(self):
return self._inventory.get_groups_dict()
def reconcile_inventory(self):
self.clear_caches()
return self._inventory.reconcile_inventory()
def get_host(self, hostname):
return self._inventory.get_host(hostname)
def _fetch_inventory_plugins(self):
''' sets up loaded inventory plugins for usage '''
display.vvvv('setting up inventory plugins')
plugins = []
for name in C.INVENTORY_ENABLED:
plugin = inventory_loader.get(name)
if plugin:
plugins.append(plugin)
else:
display.warning('Failed to load inventory plugin, skipping %s' % name)
if not plugins:
raise AnsibleError("No inventory plugins available to generate inventory, make sure you have at least one whitelisted.")
return plugins
def parse_sources(self, cache=False):
''' iterate over inventory sources and parse each one to populate it'''
parsed = False
# allow for multiple inventory parsing
for source in self._sources:
if source:
if ',' not in source:
source = unfrackpath(source, follow=False)
parse = self.parse_source(source, cache=cache)
if parse and not parsed:
parsed = True
if parsed:
# do post processing
self._inventory.reconcile_inventory()
else:
if C.INVENTORY_UNPARSED_IS_FAILED:
raise AnsibleError("No inventory was parsed, please check your configuration and options.")
else:
display.warning("No inventory was parsed, only implicit localhost is available")
for group in self.groups.values():
group.vars = combine_vars(group.vars, get_vars_from_inventory_sources(self._loader, self._sources, [group], 'inventory'))
for host in self.hosts.values():
host.vars = combine_vars(host.vars, get_vars_from_inventory_sources(self._loader, self._sources, [host], 'inventory'))
def parse_source(self, source, cache=False):
''' Generate or update inventory for the source provided '''
parsed = False
display.debug(u'Examining possible inventory source: %s' % source)
# use binary for path functions
b_source = to_bytes(source)
# process directories as a collection of inventories
if os.path.isdir(b_source):
display.debug(u'Searching for inventory files in directory: %s' % source)
for i in sorted(os.listdir(b_source)):
display.debug(u'Considering %s' % i)
# Skip hidden files and stuff we explicitly ignore
if IGNORED.search(i):
continue
# recursively deal with directory entries
fullpath = to_text(os.path.join(b_source, i), errors='surrogate_or_strict')
parsed_this_one = self.parse_source(fullpath, cache=cache)
display.debug(u'parsed %s as %s' % (fullpath, parsed_this_one))
if not parsed:
parsed = parsed_this_one
else:
# left with strings or files, let plugins figure it out
# set so new hosts can use for inventory_file/dir vars
self._inventory.current_source = source
# try source with each plugin
failures = []
for plugin in self._fetch_inventory_plugins():
plugin_name = to_text(getattr(plugin, '_load_name', getattr(plugin, '_original_path', '')))
display.debug(u'Attempting to use plugin %s (%s)' % (plugin_name, plugin._original_path))
# initialize and figure out if plugin wants to attempt parsing this file
try:
plugin_wants = bool(plugin.verify_file(source))
except Exception:
plugin_wants = False
if plugin_wants:
try:
# FIXME in case plugin fails 1/2 way we have partial inventory
plugin.parse(self._inventory, self._loader, source, cache=cache)
try:
plugin.update_cache_if_changed()
except AttributeError:
# some plugins might not implement caching
pass
parsed = True
display.vvv('Parsed %s inventory source with %s plugin' % (source, plugin_name))
break
except AnsibleParserError as e:
display.debug('%s was not parsable by %s' % (source, plugin_name))
tb = ''.join(traceback.format_tb(sys.exc_info()[2]))
failures.append({'src': source, 'plugin': plugin_name, 'exc': e, 'tb': tb})
except Exception as e:
display.debug('%s failed while attempting to parse %s' % (plugin_name, source))
tb = ''.join(traceback.format_tb(sys.exc_info()[2]))
failures.append({'src': source, 'plugin': plugin_name, 'exc': AnsibleError(e), 'tb': tb})
else:
display.vvv("%s declined parsing %s as it did not pass its verify_file() method" % (plugin_name, source))
else:
if not parsed and failures:
# only if no plugin processed files should we show errors.
for fail in failures:
display.warning(u'\n* Failed to parse %s with %s plugin: %s' % (to_text(fail['src']), fail['plugin'], to_text(fail['exc'])))
if 'tb' in fail:
display.vvv(to_text(fail['tb']))
if C.INVENTORY_ANY_UNPARSED_IS_FAILED:
raise AnsibleError(u'Completely failed to parse inventory source %s' % (source))
if not parsed:
if source != '/etc/ansible/hosts' or os.path.exists(source):
# only warn if NOT using the default and if using it, only if the file is present
display.warning("Unable to parse %s as an inventory source" % source)
# clear up, jic
self._inventory.current_source = None
return parsed
def clear_caches(self):
''' clear all caches '''
self._hosts_patterns_cache = {}
self._pattern_cache = {}
# FIXME: flush inventory cache
def refresh_inventory(self):
''' recalculate inventory '''
self.clear_caches()
self._inventory = InventoryData()
self.parse_sources(cache=False)
def _match_list(self, items, pattern_str):
# compile patterns
try:
if not pattern_str[0] == '~':
pattern = re.compile(fnmatch.translate(pattern_str))
else:
pattern = re.compile(pattern_str[1:])
except Exception:
raise AnsibleError('Invalid host list pattern: %s' % pattern_str)
# apply patterns
results = []
for item in items:
if pattern.match(item):
results.append(item)
return results
def get_hosts(self, pattern="all", ignore_limits=False, ignore_restrictions=False, order=None):
"""
Takes a pattern or list of patterns and returns a list of matching
inventory host names, taking into account any active restrictions
or applied subsets
"""
hosts = []
# Check if pattern already computed
if isinstance(pattern, list):
pattern_list = pattern[:]
else:
pattern_list = [pattern]
if pattern_list:
if not ignore_limits and self._subset:
pattern_list.extend(self._subset)
if not ignore_restrictions and self._restriction:
pattern_list.extend(self._restriction)
# This is only used as a hash key in the self._hosts_patterns_cache dict
# a tuple is faster than stringifying
pattern_hash = tuple(pattern_list)
if pattern_hash not in self._hosts_patterns_cache:
patterns = split_host_pattern(pattern)
hosts[:] = self._evaluate_patterns(patterns)
# mainly useful for hostvars[host] access
if not ignore_limits and self._subset:
# exclude hosts not in a subset, if defined
subset_uuids = set(s._uuid for s in self._evaluate_patterns(self._subset))
hosts[:] = [h for h in hosts if h._uuid in subset_uuids]
if not ignore_restrictions and self._restriction:
# exclude hosts mentioned in any restriction (ex: failed hosts)
hosts[:] = [h for h in hosts if h.name in self._restriction]
self._hosts_patterns_cache[pattern_hash] = deduplicate_list(hosts)
# sort hosts list if needed (should only happen when called from strategy)
if order in ['sorted', 'reverse_sorted']:
hosts[:] = sorted(self._hosts_patterns_cache[pattern_hash][:], key=attrgetter('name'), reverse=(order == 'reverse_sorted'))
elif order == 'reverse_inventory':
hosts[:] = self._hosts_patterns_cache[pattern_hash][::-1]
else:
hosts[:] = self._hosts_patterns_cache[pattern_hash][:]
if order == 'shuffle':
shuffle(hosts)
elif order not in [None, 'inventory']:
raise AnsibleOptionsError("Invalid 'order' specified for inventory hosts: %s" % order)
return hosts
def _evaluate_patterns(self, patterns):
"""
Takes a list of patterns and returns a list of matching host names,
taking into account any negative and intersection patterns.
"""
patterns = order_patterns(patterns)
hosts = []
for p in patterns:
# avoid resolving a pattern that is a plain host
if p in self._inventory.hosts:
hosts.append(self._inventory.get_host(p))
else:
that = self._match_one_pattern(p)
if p[0] == "!":
that = set(that)
hosts = [h for h in hosts if h not in that]
elif p[0] == "&":
that = set(that)
hosts = [h for h in hosts if h in that]
else:
existing_hosts = set(y.name for y in hosts)
hosts.extend([h for h in that if h.name not in existing_hosts])
return hosts
def _match_one_pattern(self, pattern):
"""
Takes a single pattern and returns a list of matching host names.
Ignores intersection (&) and exclusion (!) specifiers.
The pattern may be:
1. A regex starting with ~, e.g. '~[abc]*'
2. A shell glob pattern with ?/*/[chars]/[!chars], e.g. 'foo*'
3. An ordinary word that matches itself only, e.g. 'foo'
The pattern is matched using the following rules:
1. If it's 'all', it matches all hosts in all groups.
2. Otherwise, for each known group name:
(a) if it matches the group name, the results include all hosts
in the group or any of its children.
(b) otherwise, if it matches any hosts in the group, the results
include the matching hosts.
This means that 'foo*' may match one or more groups (thus including all
hosts therein) but also hosts in other groups.
The built-in groups 'all' and 'ungrouped' are special. No pattern can
match these group names (though 'all' behaves as though it matches, as
described above). The word 'ungrouped' can match a host of that name,
and patterns like 'ungr*' and 'al*' can match either hosts or groups
other than all and ungrouped.
If the pattern matches one or more group names according to these rules,
it may have an optional range suffix to select a subset of the results.
This is allowed only if the pattern is not a regex, i.e. '~foo[1]' does
not work (the [1] is interpreted as part of the regex), but 'foo*[1]'
would work if 'foo*' matched the name of one or more groups.
Duplicate matches are always eliminated from the results.
"""
if pattern[0] in ("&", "!"):
pattern = pattern[1:]
if pattern not in self._pattern_cache:
(expr, slice) = self._split_subscript(pattern)
hosts = self._enumerate_matches(expr)
try:
hosts = self._apply_subscript(hosts, slice)
except IndexError:
raise AnsibleError("No hosts matched the subscripted pattern '%s'" % pattern)
self._pattern_cache[pattern] = hosts
return self._pattern_cache[pattern]
def _split_subscript(self, pattern):
"""
Takes a pattern, checks if it has a subscript, and returns the pattern
without the subscript and a (start,end) tuple representing the given
subscript (or None if there is no subscript).
Validates that the subscript is in the right syntax, but doesn't make
sure the actual indices make sense in context.
"""
# Do not parse regexes for enumeration info
if pattern[0] == '~':
return (pattern, None)
# We want a pattern followed by an integer or range subscript.
# (We can't be more restrictive about the expression because the
# fnmatch semantics permit [\[:\]] to occur.)
subscript = None
m = PATTERN_WITH_SUBSCRIPT.match(pattern)
if m:
(pattern, idx, start, sep, end) = m.groups()
if idx:
subscript = (int(idx), None)
else:
if not end:
end = -1
subscript = (int(start), int(end))
if sep == '-':
display.warning("Use [x:y] inclusive subscripts instead of [x-y] which has been removed")
return (pattern, subscript)
def _apply_subscript(self, hosts, subscript):
"""
Takes a list of hosts and a (start,end) tuple and returns the subset of
hosts based on the subscript (which may be None to return all hosts).
"""
if not hosts or not subscript:
return hosts
(start, end) = subscript
if end:
if end == -1:
end = len(hosts) - 1
return hosts[start:end + 1]
else:
return [hosts[start]]
def _enumerate_matches(self, pattern):
"""
Returns a list of host names matching the given pattern according to the
rules explained above in _match_one_pattern.
"""
results = []
# check if pattern matches group
matching_groups = self._match_list(self._inventory.groups, pattern)
if matching_groups:
for groupname in matching_groups:
results.extend(self._inventory.groups[groupname].get_hosts())
# check hosts if no groups matched or it is a regex/glob pattern
if not matching_groups or pattern[0] == '~' or any(special in pattern for special in ('.', '?', '*', '[')):
# pattern might match host
matching_hosts = self._match_list(self._inventory.hosts, pattern)
if matching_hosts:
for hostname in matching_hosts:
results.append(self._inventory.hosts[hostname])
if not results and pattern in C.LOCALHOST:
# get_host autocreates implicit when needed
implicit = self._inventory.get_host(pattern)
if implicit:
results.append(implicit)
# Display warning if specified host pattern did not match any groups or hosts
if not results and not matching_groups and pattern != 'all':
msg = "Could not match supplied host pattern, ignoring: %s" % pattern
display.debug(msg)
if C.HOST_PATTERN_MISMATCH == 'warning':
display.warning(msg)
elif C.HOST_PATTERN_MISMATCH == 'error':
raise AnsibleError(msg)
# no need to write 'ignore' state
return results
def list_hosts(self, pattern="all"):
""" return a list of hostnames for a pattern """
# FIXME: cache?
result = [h for h in self.get_hosts(pattern)]
# allow implicit localhost if pattern matches and no other results
if len(result) == 0 and pattern in C.LOCALHOST:
result = [pattern]
return result
def list_groups(self):
# FIXME: cache?
return sorted(self._inventory.groups.keys(), key=lambda x: x)
def restrict_to_hosts(self, restriction):
"""
Restrict list operations to the hosts given in restriction. This is used
to batch serial operations in main playbook code, don't use this for other
reasons.
"""
if restriction is None:
return
elif not isinstance(restriction, list):
restriction = [restriction]
self._restriction = set(to_text(h.name) for h in restriction)
def subset(self, subset_pattern):
"""
Limits inventory results to a subset of inventory that matches a given
pattern, such as to select a given geographic of numeric slice amongst
a previous 'hosts' selection that only select roles, or vice versa.
Corresponds to --limit parameter to ansible-playbook
"""
if subset_pattern is None:
self._subset = None
else:
subset_patterns = split_host_pattern(subset_pattern)
results = []
# allow Unix style @filename data
for x in subset_patterns:
if x[0] == "@":
fd = open(x[1:])
results.extend([to_text(l.strip()) for l in fd.read().split("\n")])
fd.close()
else:
results.append(to_text(x))
self._subset = results
def remove_restriction(self):
""" Do not restrict list operations """
self._restriction = None
def clear_pattern_cache(self):
self._pattern_cache = {}
| gpl-3.0 |
evansd/django | django/conf/locale/cs/formats.py | 65 | 1635 | # This file is distributed under the same license as the Django package.
#
# The *_FORMAT strings use the Django date format syntax,
# see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATE_FORMAT = 'j. E Y'
TIME_FORMAT = 'G:i'
DATETIME_FORMAT = 'j. E Y G:i'
YEAR_MONTH_FORMAT = 'F Y'
MONTH_DAY_FORMAT = 'j. F'
SHORT_DATE_FORMAT = 'd.m.Y'
SHORT_DATETIME_FORMAT = 'd.m.Y G:i'
FIRST_DAY_OF_WEEK = 1 # Monday
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
# see http://docs.python.org/library/datetime.html#strftime-strptime-behavior
DATE_INPUT_FORMATS = [
'%d.%m.%Y', '%d.%m.%y', # '05.01.2006', '05.01.06'
'%d. %m. %Y', '%d. %m. %y', # '5. 1. 2006', '5. 1. 06'
# '%d. %B %Y', '%d. %b. %Y', # '25. October 2006', '25. Oct. 2006'
]
# Kept ISO formats as one is in first position
TIME_INPUT_FORMATS = [
'%H:%M:%S', # '04:30:59'
'%H.%M', # '04.30'
'%H:%M', # '04:30'
]
DATETIME_INPUT_FORMATS = [
'%d.%m.%Y %H:%M:%S', # '05.01.2006 04:30:59'
'%d.%m.%Y %H:%M:%S.%f', # '05.01.2006 04:30:59.000200'
'%d.%m.%Y %H.%M', # '05.01.2006 04.30'
'%d.%m.%Y %H:%M', # '05.01.2006 04:30'
'%d.%m.%Y', # '05.01.2006'
'%d. %m. %Y %H:%M:%S', # '05. 01. 2006 04:30:59'
'%d. %m. %Y %H:%M:%S.%f', # '05. 01. 2006 04:30:59.000200'
'%d. %m. %Y %H.%M', # '05. 01. 2006 04.30'
'%d. %m. %Y %H:%M', # '05. 01. 2006 04:30'
'%d. %m. %Y', # '05. 01. 2006'
'%Y-%m-%d %H.%M', # '2006-01-05 04.30'
]
DECIMAL_SEPARATOR = ','
THOUSAND_SEPARATOR = '\xa0' # non-breaking space
NUMBER_GROUPING = 3
| bsd-3-clause |
AICP/external_chromium_org | tools/win/split_link/install_split_link.py | 146 | 3104 | # Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import _winreg
import os
import shutil
import subprocess
import sys
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
def IsExe(fpath):
return os.path.isfile(fpath) and os.access(fpath, os.X_OK)
def FindInPath(program):
fpath, _ = os.path.split(program)
if fpath:
if IsExe(program):
return program
else:
for path in os.environ['PATH'].split(os.pathsep):
path = path.strip('"')
exe_file = os.path.join(path, program)
if not path or not os.path.isabs(path):
continue
if IsExe(exe_file):
return exe_file
return None
def EscapeForCommandLineAndCString(path):
"""Quoted sufficiently to be passed on the compile command line as a define
to be turned into a string in the target C program."""
path = '"' + path + '"'
return path.replace('\\', '\\\\').replace('"', '\\"')
def main():
# Switch to our own dir.
os.chdir(BASE_DIR)
link = FindInPath('link.exe')
mt = FindInPath('mt.exe')
if not link or not mt:
print("Couldn't find link.exe or mt.exe in PATH. "
"Must run from Administrator Visual Studio Command Prompt.")
return 1
link_backup = os.path.join(os.path.split(link)[0], 'link.exe.split_link.exe')
# Don't re-backup link.exe, so only copy link.exe to backup if it's
# not there already.
if not os.path.exists(link_backup):
try:
print 'Saving original link.exe...'
shutil.copyfile(link, link_backup)
except IOError:
print(("Wasn't able to back up %s to %s. "
"Not running with Administrator privileges?")
% (link, link_backup))
return 1
# Build our linker shim.
print 'Building split_link.exe...'
split_link_py = os.path.abspath('split_link.py')
script_path = EscapeForCommandLineAndCString(split_link_py)
python = EscapeForCommandLineAndCString(sys.executable)
subprocess.check_call('cl.exe /nologo /Ox /Zi /W4 /WX /D_UNICODE /DUNICODE'
' /D_CRT_SECURE_NO_WARNINGS /EHsc split_link.cc'
' /DPYTHON_PATH="%s"'
' /DSPLIT_LINK_SCRIPT_PATH="%s"'
' /link shell32.lib shlwapi.lib /out:split_link.exe' % (
python, script_path))
# Copy shim into place.
print 'Copying split_link.exe over link.exe...'
try:
shutil.copyfile('split_link.exe', link)
_winreg.SetValue(_winreg.HKEY_CURRENT_USER,
'Software\\Chromium\\split_link_installed',
_winreg.REG_SZ,
link_backup)
_winreg.SetValue(_winreg.HKEY_CURRENT_USER,
'Software\\Chromium\\split_link_mt_path',
_winreg.REG_SZ,
mt)
except IOError:
print("Wasn't able to copy split_link.exe over %s. "
"Not running with Administrator privileges?" % link)
return 1
return 0
if __name__ == '__main__':
sys.exit(main())
| bsd-3-clause |
imsparsh/python-for-android | python-build/python-libs/gdata/src/gdata/tlslite/errors.py | 333 | 5795 | """Exception classes.
@sort: TLSError, TLSAbruptCloseError, TLSAlert, TLSLocalAlert, TLSRemoteAlert,
TLSAuthenticationError, TLSNoAuthenticationError, TLSAuthenticationTypeError,
TLSFingerprintError, TLSAuthorizationError, TLSValidationError, TLSFaultError
"""
from constants import AlertDescription, AlertLevel
class TLSError(Exception):
"""Base class for all TLS Lite exceptions."""
pass
class TLSAbruptCloseError(TLSError):
"""The socket was closed without a proper TLS shutdown.
The TLS specification mandates that an alert of some sort
must be sent before the underlying socket is closed. If the socket
is closed without this, it could signify that an attacker is trying
to truncate the connection. It could also signify a misbehaving
TLS implementation, or a random network failure.
"""
pass
class TLSAlert(TLSError):
"""A TLS alert has been signalled."""
pass
_descriptionStr = {\
AlertDescription.close_notify: "close_notify",\
AlertDescription.unexpected_message: "unexpected_message",\
AlertDescription.bad_record_mac: "bad_record_mac",\
AlertDescription.decryption_failed: "decryption_failed",\
AlertDescription.record_overflow: "record_overflow",\
AlertDescription.decompression_failure: "decompression_failure",\
AlertDescription.handshake_failure: "handshake_failure",\
AlertDescription.no_certificate: "no certificate",\
AlertDescription.bad_certificate: "bad_certificate",\
AlertDescription.unsupported_certificate: "unsupported_certificate",\
AlertDescription.certificate_revoked: "certificate_revoked",\
AlertDescription.certificate_expired: "certificate_expired",\
AlertDescription.certificate_unknown: "certificate_unknown",\
AlertDescription.illegal_parameter: "illegal_parameter",\
AlertDescription.unknown_ca: "unknown_ca",\
AlertDescription.access_denied: "access_denied",\
AlertDescription.decode_error: "decode_error",\
AlertDescription.decrypt_error: "decrypt_error",\
AlertDescription.export_restriction: "export_restriction",\
AlertDescription.protocol_version: "protocol_version",\
AlertDescription.insufficient_security: "insufficient_security",\
AlertDescription.internal_error: "internal_error",\
AlertDescription.user_canceled: "user_canceled",\
AlertDescription.no_renegotiation: "no_renegotiation",\
AlertDescription.unknown_srp_username: "unknown_srp_username",\
AlertDescription.missing_srp_username: "missing_srp_username"}
class TLSLocalAlert(TLSAlert):
"""A TLS alert has been signalled by the local implementation.
@type description: int
@ivar description: Set to one of the constants in
L{tlslite.constants.AlertDescription}
@type level: int
@ivar level: Set to one of the constants in
L{tlslite.constants.AlertLevel}
@type message: str
@ivar message: Description of what went wrong.
"""
def __init__(self, alert, message=None):
self.description = alert.description
self.level = alert.level
self.message = message
def __str__(self):
alertStr = TLSAlert._descriptionStr.get(self.description)
if alertStr == None:
alertStr = str(self.description)
if self.message:
return alertStr + ": " + self.message
else:
return alertStr
class TLSRemoteAlert(TLSAlert):
"""A TLS alert has been signalled by the remote implementation.
@type description: int
@ivar description: Set to one of the constants in
L{tlslite.constants.AlertDescription}
@type level: int
@ivar level: Set to one of the constants in
L{tlslite.constants.AlertLevel}
"""
def __init__(self, alert):
self.description = alert.description
self.level = alert.level
def __str__(self):
alertStr = TLSAlert._descriptionStr.get(self.description)
if alertStr == None:
alertStr = str(self.description)
return alertStr
class TLSAuthenticationError(TLSError):
"""The handshake succeeded, but the other party's authentication
was inadequate.
This exception will only be raised when a
L{tlslite.Checker.Checker} has been passed to a handshake function.
The Checker will be invoked once the handshake completes, and if
the Checker objects to how the other party authenticated, a
subclass of this exception will be raised.
"""
pass
class TLSNoAuthenticationError(TLSAuthenticationError):
"""The Checker was expecting the other party to authenticate with a
certificate chain, but this did not occur."""
pass
class TLSAuthenticationTypeError(TLSAuthenticationError):
"""The Checker was expecting the other party to authenticate with a
different type of certificate chain."""
pass
class TLSFingerprintError(TLSAuthenticationError):
"""The Checker was expecting the other party to authenticate with a
certificate chain that matches a different fingerprint."""
pass
class TLSAuthorizationError(TLSAuthenticationError):
"""The Checker was expecting the other party to authenticate with a
certificate chain that has a different authorization."""
pass
class TLSValidationError(TLSAuthenticationError):
"""The Checker has determined that the other party's certificate
chain is invalid."""
pass
class TLSFaultError(TLSError):
"""The other party responded incorrectly to an induced fault.
This exception will only occur during fault testing, when a
TLSConnection's fault variable is set to induce some sort of
faulty behavior, and the other party doesn't respond appropriately.
"""
pass
| apache-2.0 |
meduz/scikit-learn | examples/linear_model/plot_ransac.py | 73 | 1859 | """
===========================================
Robust linear model estimation using RANSAC
===========================================
In this example we see how to robustly fit a linear model to faulty data using
the RANSAC algorithm.
"""
import numpy as np
from matplotlib import pyplot as plt
from sklearn import linear_model, datasets
n_samples = 1000
n_outliers = 50
X, y, coef = datasets.make_regression(n_samples=n_samples, n_features=1,
n_informative=1, noise=10,
coef=True, random_state=0)
# Add outlier data
np.random.seed(0)
X[:n_outliers] = 3 + 0.5 * np.random.normal(size=(n_outliers, 1))
y[:n_outliers] = -3 + 10 * np.random.normal(size=n_outliers)
# Fit line using all data
model = linear_model.LinearRegression()
model.fit(X, y)
# Robustly fit linear model with RANSAC algorithm
model_ransac = linear_model.RANSACRegressor(linear_model.LinearRegression())
model_ransac.fit(X, y)
inlier_mask = model_ransac.inlier_mask_
outlier_mask = np.logical_not(inlier_mask)
# Predict data of estimated models
line_X = np.arange(-5, 5)
line_y = model.predict(line_X[:, np.newaxis])
line_y_ransac = model_ransac.predict(line_X[:, np.newaxis])
# Compare estimated coefficients
print("Estimated coefficients (true, normal, RANSAC):")
print(coef, model.coef_, model_ransac.estimator_.coef_)
lw = 2
plt.scatter(X[inlier_mask], y[inlier_mask], color='yellowgreen', marker='.',
label='Inliers')
plt.scatter(X[outlier_mask], y[outlier_mask], color='gold', marker='.',
label='Outliers')
plt.plot(line_X, line_y, color='navy', linestyle='-', linewidth=lw,
label='Linear regressor')
plt.plot(line_X, line_y_ransac, color='cornflowerblue', linestyle='-',
linewidth=lw, label='RANSAC regressor')
plt.legend(loc='lower right')
plt.show()
| bsd-3-clause |
sandeepraju/git-talk | cli.py | 1 | 1686 | import argparse
import os
from gittalk import GitTalk
from gittalk.utils import which, make_sure_path_exists
def run():
"""
`run` drives the command line interface for Git Talk.
It exposes a command line interface through which users
can interact with Git Talk to configure or invoke various
functionalities.
"""
# do explict dependency checks
try:
import Tkinter
except Exception as e:
print 'Make sure your Python has Tkinter installed before using GitTalk!'
if not which('ffmpeg'):
print 'Please make sure FFmpeg is installed before using GitTalk!'
# create a folder to be used by GitTalk
make_sure_path_exists(os.path.join(os.environ['HOME'], '.gittalk'))
parser = argparse.ArgumentParser(description='Audio & Video annotations to your code via Git')
group = parser.add_mutually_exclusive_group(required=True)
group.add_argument('-e', '--enable', action='store_true', required=False,
help='Enable Git Talk in the current Git repository.',
dest='enable')
group.add_argument('-d', '--disable', action='store_true', required=False,
help='Disable Git Talk in the current Git repository.',
dest='disable')
group.add_argument('-t', '--trigger', action='store_true', required=False,
help='Trigger Git Talk.',
dest='trigger')
args = parser.parse_args()
gt = GitTalk()
if args.enable:
gt.enable()
elif args.disable:
gt.disable()
elif args.trigger:
gt.trigger()
if __name__ == '__main__':
run()
| bsd-3-clause |
jimi-c/ansible | lib/ansible/plugins/action/add_host.py | 49 | 3384 | # (c) 2012-2014, Michael DeHaan <[email protected]>
# Copyright 2012, Seth Vidal <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.errors import AnsibleError
from ansible.module_utils.six import string_types
from ansible.plugins.action import ActionBase
from ansible.parsing.utils.addresses import parse_address
try:
from __main__ import display
except ImportError:
from ansible.utils.display import Display
display = Display()
class ActionModule(ActionBase):
''' Create inventory hosts and groups in the memory inventory'''
# We need to be able to modify the inventory
BYPASS_HOST_LOOP = True
TRANSFERS_FILES = False
def run(self, tmp=None, task_vars=None):
self._supports_check_mode = True
result = super(ActionModule, self).run(tmp, task_vars)
del tmp # tmp no longer has any effect
# Parse out any hostname:port patterns
new_name = self._task.args.get('name', self._task.args.get('hostname', self._task.args.get('host', None)))
if new_name is None:
result['failed'] = True
result['msg'] = 'name or hostname arg needs to be provided'
return result
display.vv("creating host via 'add_host': hostname=%s" % new_name)
try:
name, port = parse_address(new_name, allow_ranges=False)
except:
# not a parsable hostname, but might still be usable
name = new_name
port = None
if port:
self._task.args['ansible_ssh_port'] = port
groups = self._task.args.get('groupname', self._task.args.get('groups', self._task.args.get('group', '')))
# add it to the group if that was specified
new_groups = []
if groups:
if isinstance(groups, list):
group_list = groups
elif isinstance(groups, string_types):
group_list = groups.split(",")
else:
raise AnsibleError("Groups must be specified as a list.", obj=self._task)
for group_name in group_list:
if group_name not in new_groups:
new_groups.append(group_name.strip())
# Add any variables to the new_host
host_vars = dict()
special_args = frozenset(('name', 'hostname', 'groupname', 'groups'))
for k in self._task.args.keys():
if k not in special_args:
host_vars[k] = self._task.args[k]
result['changed'] = True
result['add_host'] = dict(host_name=name, groups=new_groups, host_vars=host_vars)
return result
| gpl-3.0 |
eliangidoni/rethinkdb | test/regression/issue_4383.py | 12 | 4815 | #!/usr/bin/env python
# Copyright 2015-2016 RethinkDB, all rights reserved.
'''Test that a backfill will resume after restarting a cluster'''
import os, pprint, sys, time
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), os.path.pardir, 'common')))
import driver, scenario_common, utils, vcoptparse
op = vcoptparse.OptParser()
op["num_rows"] = vcoptparse.IntFlag("--num-rows", 50000)
scenario_common.prepare_option_parser_mode_flags(op)
opts = op.parse(sys.argv)
_, command_prefix, server_options = scenario_common.parse_mode_flags(opts)
r = utils.import_python_driver()
dbName, tableName = utils.get_test_db_table()
num_shards = 16
utils.print_with_time("Starting cluster of three servers")
with driver.Cluster(initial_servers=['source1', 'source2', 'target'], output_folder='.', console_output=True, command_prefix=command_prefix, extra_options=server_options) as cluster:
source_a = cluster['source1']
source_b = cluster['source2']
target = cluster['target']
conn = r.connect(host=source_a.host, port=source_a.driver_port)
utils.print_with_time("Creating a table")
if dbName not in r.db_list().run(conn):
r.db_create(dbName).run(conn)
if tableName in r.db(dbName).table_list().run(conn):
r.db(dbName).table_drop(tableName)
r.db("rethinkdb").table("table_config").insert({
"name":tableName, "db": dbName,
"shards": [{"primary_replica":"source1", "replicas":["source1", "source2"]}] * num_shards
}).run(conn)
tbl = r.db(dbName).table(tableName)
tbl.wait(wait_for="all_replicas_ready").run(conn)
utils.print_with_time("Inserting %d documents" % opts["num_rows"])
chunkSize = 2000
for startId in range(0, opts["num_rows"], chunkSize):
endId = min(startId + chunkSize, opts["num_rows"])
res = tbl.insert(r.range(startId, endId).map({
"value": r.row,
"padding": "x" * 100
}), durability="soft").run(conn)
assert res["inserted"] == endId - startId
utils.print_with_time(" Progress: %d/%d" % (endId, opts["num_rows"]))
tbl.sync().run(conn)
utils.print_with_time("Beginning replication to second server")
tbl.config().update({
"shards": [{"primary_replica": "source1", "replicas": ["source1", "source2", "target"]}] * num_shards
}).run(conn)
utils.print_with_time("Waiting a few seconds for backfill to get going")
deadline = time.time() + 2
while True:
status = tbl.status().run(conn)
try:
assert status["status"]["ready_for_writes"] == True, 'Table is not ready for writes:\n' + pprint.pformat(status)
assert status["status"]["all_replicas_ready"] == False, 'All replicas incorrectly reporting ready:\n' + pprint.pformat(status)
break
except AssertionError:
if time.time() > deadline:
raise
else:
time.sleep(.05)
utils.print_with_time("Shutting down servers")
cluster.check_and_stop()
utils.print_with_time("Restarting servers")
source_a.start()
source_b.start()
target.start()
conn = r.connect(host=source_a.host, port=source_a.driver_port)
conn_target = r.connect(host=target.host, port=target.driver_port)
utils.print_with_time("Checking that table is available for writes")
try:
tbl.wait(wait_for="ready_for_writes", timeout=30).run(conn)
except r.ReqlRuntimeError, e:
status = r.db("rethinkdb").table("_debug_table_status").nth(0).run(conn)
pprint.pprint(status)
raise
try:
tbl.wait(wait_for="ready_for_writes", timeout=3).run(conn_target)
except r.ReqlRuntimeError, e:
pprint.pprint(r.db("rethinkdb").table("_debug_table_status").nth(0).run(conn_target))
raise
utils.print_with_time("Making sure the backfill didn't end")
status = tbl.status().run(conn)
for shard in status['shards']:
for server in shard['replicas']:
if server['server'] == 'target' and server['state'] == 'backfilling':
break # this will cause a double break, bypassing the outer else
else:
continue
break
else:
raise AssertionError('There were no shards listed as backfilling:\n' + pprint.pformat(status))
for job in r.db('rethinkdb').table('jobs').filter({'type':'backfill'}).run(conn):
if job['info']['db'] == dbName and job['info']['table'] == tableName:
break
else:
raise AssertionError('Did not find any job backfilling this table')
assert not status["status"]["all_replicas_ready"], 'All replicas incorrectly reporting ready:\n' + pprint.pformat(status)
utils.print_with_time("Cleaning up")
utils.print_with_time("Done.")
| agpl-3.0 |
prculley/gramps | gramps/gen/filters/rules/person/_matchidof.py | 4 | 1777 | #
# Gramps - a GTK+/GNOME based genealogy program
#
# Copyright (C) 2002-2006 Donald N. Allingham
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
#-------------------------------------------------------------------------
#
# Standard Python modules
#
#-------------------------------------------------------------------------
from ....const import GRAMPS_LOCALE as glocale
_ = glocale.translation.gettext
#-------------------------------------------------------------------------
#
# Gramps modules
#
#-------------------------------------------------------------------------
from .. import Rule
#-------------------------------------------------------------------------
#
# HasIdOf
#
#-------------------------------------------------------------------------
class MatchIdOf(Rule):
"""Rule that checks for a person with a specific Gramps ID"""
labels = [ _('ID:') ]
name = _('Person with <Id>')
description = _("Matches person with a specified Gramps ID")
category = _('General filters')
def apply(self,db,person):
return person.gramps_id.find(self.list[0]) !=-1
| gpl-2.0 |
optima-ict/odoo | openerp/workflow/service.py | 50 | 4084 | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from helpers import Session
from helpers import Record
from openerp.workflow.instance import WorkflowInstance
# import instance
class WorkflowService(object):
CACHE = {}
@classmethod
def clear_cache(cls, dbname):
cls.CACHE[dbname] = {}
@classmethod
def new(cls, cr, uid, model_name, record_id):
return cls(Session(cr, uid), Record(model_name, record_id))
def __init__(self, session, record):
assert isinstance(session, Session)
assert isinstance(record, Record)
self.session = session
self.record = record
self.cr = self.session.cr
def write(self):
self.cr.execute('select id from wkf_instance where res_id=%s and res_type=%s and state=%s',
(self.record.id or None, self.record.model or None, 'active')
)
for (instance_id,) in self.cr.fetchall():
WorkflowInstance(self.session, self.record, {'id': instance_id}).update()
def trigger(self):
self.cr.execute('select instance_id from wkf_triggers where res_id=%s and model=%s', (self.record.id, self.record.model))
res = self.cr.fetchall()
for (instance_id,) in res:
self.cr.execute('select %s,res_type,res_id from wkf_instance where id=%s', (self.session.uid, instance_id,))
current_uid, current_model_name, current_record_id = self.cr.fetchone()
current_session = Session(self.session.cr, current_uid)
current_record = Record(current_model_name, current_record_id)
WorkflowInstance(current_session, current_record, {'id': instance_id}).update()
def delete(self):
WorkflowInstance(self.session, self.record, {}).delete()
def create(self):
WorkflowService.CACHE.setdefault(self.cr.dbname, {})
wkf_ids = WorkflowService.CACHE[self.cr.dbname].get(self.record.model, None)
if not wkf_ids:
self.cr.execute('select id from wkf where osv=%s and on_create=True', (self.record.model,))
wkf_ids = self.cr.fetchall()
WorkflowService.CACHE[self.cr.dbname][self.record.model] = wkf_ids
for (wkf_id, ) in wkf_ids:
WorkflowInstance.create(self.session, self.record, wkf_id)
def validate(self, signal):
result = False
# ids of all active workflow instances for a corresponding resource (id, model_nam)
self.cr.execute('select id from wkf_instance where res_id=%s and res_type=%s and state=%s', (self.record.id, self.record.model, 'active'))
# TODO: Refactor the workflow instance object
for (instance_id,) in self.cr.fetchall():
wi = WorkflowInstance(self.session, self.record, {'id': instance_id})
res2 = wi.validate(signal)
result = result or res2
return result
def redirect(self, new_rid):
# get ids of wkf instances for the old resource (res_id)
# CHECKME: shouldn't we get only active instances?
self.cr.execute('select id, wkf_id from wkf_instance where res_id=%s and res_type=%s', (self.record.id, self.record.model))
for old_inst_id, workflow_id in self.cr.fetchall():
# first active instance for new resource (new_rid), using same wkf
self.cr.execute(
'SELECT id '\
'FROM wkf_instance '\
'WHERE res_id=%s AND res_type=%s AND wkf_id=%s AND state=%s',
(new_rid, self.record.model, workflow_id, 'active'))
new_id = self.cr.fetchone()
if new_id:
# select all workitems which "wait" for the old instance
self.cr.execute('select id from wkf_workitem where subflow_id=%s', (old_inst_id,))
for (item_id,) in self.cr.fetchall():
# redirect all those workitems to the wkf instance of the new resource
self.cr.execute('update wkf_workitem set subflow_id=%s where id=%s', (new_id[0], item_id))
| agpl-3.0 |
lukas-krecan/tensorflow | tensorflow/python/summary/event_accumulator_test.py | 2 | 21287 | # Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import tensorflow.python.platform
from six.moves import xrange # pylint: disable=redefined-builtin
import tensorflow as tf
from tensorflow.core.framework import graph_pb2
from tensorflow.core.util.event_pb2 import SessionLog
from tensorflow.python.platform import gfile
from tensorflow.python.platform import googletest
from tensorflow.python.platform import logging
from tensorflow.python.summary import event_accumulator as ea
class _EventGenerator(object):
def __init__(self):
self.items = []
def Load(self):
while self.items:
yield self.items.pop(0)
def AddScalar(self, tag, wall_time=0, step=0, value=0):
event = tf.Event(
wall_time=wall_time, step=step,
summary=tf.Summary(
value=[tf.Summary.Value(tag=tag, simple_value=value)]
)
)
self.AddEvent(event)
def AddHistogram(self, tag, wall_time=0, step=0, hmin=1, hmax=2, hnum=3,
hsum=4, hsum_squares=5, hbucket_limit=None, hbucket=None):
histo = tf.HistogramProto(min=hmin, max=hmax, num=hnum, sum=hsum,
sum_squares=hsum_squares,
bucket_limit=hbucket_limit,
bucket=hbucket)
event = tf.Event(
wall_time=wall_time,
step=step,
summary=tf.Summary(value=[tf.Summary.Value(tag=tag, histo=histo)]))
self.AddEvent(event)
def AddImage(self, tag, wall_time=0, step=0, encoded_image_string=b'imgstr',
width=150, height=100):
image = tf.Summary.Image(encoded_image_string=encoded_image_string,
width=width, height=height)
event = tf.Event(
wall_time=wall_time,
step=step,
summary=tf.Summary(
value=[tf.Summary.Value(tag=tag, image=image)]))
self.AddEvent(event)
def AddEvent(self, event):
self.items.append(event)
class EventAccumulatorTest(tf.test.TestCase):
def assertTagsEqual(self, tags1, tags2):
# Make sure the two dictionaries have the same keys.
self.assertItemsEqual(tags1, tags2)
# Additionally, make sure each key in the dictionary maps to the same value.
for key in tags1:
if isinstance(tags1[key], list):
# We don't care about the order of the values in lists, thus asserting
# only if the items are equal.
self.assertItemsEqual(tags1[key], tags2[key])
else:
# Make sure the values are equal.
self.assertEqual(tags1[key], tags2[key])
class MockingEventAccumulatorTest(EventAccumulatorTest):
def setUp(self):
super(MockingEventAccumulatorTest, self).setUp()
self.stubs = googletest.StubOutForTesting()
self.empty = {ea.IMAGES: [],
ea.SCALARS: [],
ea.HISTOGRAMS: [],
ea.COMPRESSED_HISTOGRAMS: [],
ea.GRAPH: False}
self._real_constructor = ea.EventAccumulator
self._real_generator = ea._GeneratorFromPath
def _FakeAccumulatorConstructor(generator, *args, **kwargs):
ea._GeneratorFromPath = lambda x: generator
return self._real_constructor(generator, *args, **kwargs)
ea.EventAccumulator = _FakeAccumulatorConstructor
def tearDown(self):
self.stubs.CleanUp()
ea.EventAccumulator = self._real_constructor
ea._GeneratorFromPath = self._real_generator
def testEmptyAccumulator(self):
gen = _EventGenerator()
x = ea.EventAccumulator(gen)
x.Reload()
self.assertEqual(x.Tags(), self.empty)
def testTags(self):
gen = _EventGenerator()
gen.AddScalar('s1')
gen.AddScalar('s2')
gen.AddHistogram('hst1')
gen.AddHistogram('hst2')
gen.AddImage('im1')
gen.AddImage('im2')
acc = ea.EventAccumulator(gen)
acc.Reload()
self.assertTagsEqual(
acc.Tags(), {
ea.IMAGES: ['im1', 'im2'],
ea.SCALARS: ['s1', 's2'],
ea.HISTOGRAMS: ['hst1', 'hst2'],
ea.COMPRESSED_HISTOGRAMS: ['hst1', 'hst2'],
ea.GRAPH: False})
def testReload(self):
gen = _EventGenerator()
acc = ea.EventAccumulator(gen)
acc.Reload()
self.assertEqual(acc.Tags(), self.empty)
gen.AddScalar('s1')
gen.AddScalar('s2')
gen.AddHistogram('hst1')
gen.AddHistogram('hst2')
gen.AddImage('im1')
gen.AddImage('im2')
self.assertEqual(acc.Tags(), self.empty)
acc.Reload()
self.assertTagsEqual(acc.Tags(), {
ea.IMAGES: ['im1', 'im2'],
ea.SCALARS: ['s1', 's2'],
ea.HISTOGRAMS: ['hst1', 'hst2'],
ea.COMPRESSED_HISTOGRAMS: ['hst1', 'hst2'],
ea.GRAPH: False})
def testScalars(self):
gen = _EventGenerator()
acc = ea.EventAccumulator(gen)
s1 = ea.ScalarEvent(wall_time=1, step=10, value=32)
s2 = ea.ScalarEvent(wall_time=2, step=12, value=64)
gen.AddScalar('s1', wall_time=1, step=10, value=32)
gen.AddScalar('s2', wall_time=2, step=12, value=64)
acc.Reload()
self.assertEqual(acc.Scalars('s1'), [s1])
self.assertEqual(acc.Scalars('s2'), [s2])
def testHistograms(self):
gen = _EventGenerator()
acc = ea.EventAccumulator(gen)
val1 = ea.HistogramValue(min=1, max=2, num=3, sum=4, sum_squares=5,
bucket_limit=[1, 2, 3], bucket=[0, 3, 0])
val2 = ea.HistogramValue(min=-2, max=3, num=4, sum=5, sum_squares=6,
bucket_limit=[2, 3, 4], bucket=[1, 3, 0])
hst1 = ea.HistogramEvent(wall_time=1, step=10, histogram_value=val1)
hst2 = ea.HistogramEvent(wall_time=2, step=12, histogram_value=val2)
gen.AddHistogram('hst1', wall_time=1, step=10, hmin=1, hmax=2, hnum=3,
hsum=4, hsum_squares=5, hbucket_limit=[1, 2, 3],
hbucket=[0, 3, 0])
gen.AddHistogram('hst2', wall_time=2, step=12, hmin=-2, hmax=3, hnum=4,
hsum=5, hsum_squares=6, hbucket_limit=[2, 3, 4],
hbucket=[1, 3, 0])
acc.Reload()
self.assertEqual(acc.Histograms('hst1'), [hst1])
self.assertEqual(acc.Histograms('hst2'), [hst2])
def testCompressedHistograms(self):
gen = _EventGenerator()
acc = ea.EventAccumulator(gen, compression_bps=(0, 2500, 5000, 7500, 10000))
gen.AddHistogram('hst1', wall_time=1, step=10, hmin=1, hmax=2, hnum=3,
hsum=4, hsum_squares=5, hbucket_limit=[1, 2, 3],
hbucket=[0, 3, 0])
gen.AddHistogram('hst2', wall_time=2, step=12, hmin=-2, hmax=3, hnum=4,
hsum=5, hsum_squares=6, hbucket_limit=[2, 3, 4],
hbucket=[1, 3, 0])
acc.Reload()
# Create the expected values after compressing hst1
expected_vals1 = [ea.CompressedHistogramValue(bp, val) for bp, val in [(
0, 1.0), (2500, 1.25), (5000, 1.5), (7500, 1.75), (10000, 2.0)]]
expected_cmphst1 = ea.CompressedHistogramEvent(
wall_time=1,
step=10,
compressed_histogram_values=expected_vals1)
self.assertEqual(acc.CompressedHistograms('hst1'), [expected_cmphst1])
# Create the expected values after compressing hst2
expected_vals2 = [
ea.CompressedHistogramValue(bp, val)
for bp, val in [(0, -2), (2500, 2), (5000, 2 + 1 / 3), (7500, 2 + 2 / 3
), (10000, 3)]
]
expected_cmphst2 = ea.CompressedHistogramEvent(
wall_time=2,
step=12,
compressed_histogram_values=expected_vals2)
self.assertEqual(acc.CompressedHistograms('hst2'), [expected_cmphst2])
def testPercentile(self):
def AssertExpectedForBps(bps, expected):
output = acc._Percentile(
bps, bucket_limit, cumsum_weights, histo_min, histo_max, histo_num)
self.assertAlmostEqual(expected, output)
gen = _EventGenerator()
acc = ea.EventAccumulator(gen)
bucket_limit = [1, 2, 3, 4]
histo_num = 100
## All weights in the first bucket
cumsum_weights = [10000, 10000, 10000, 10000]
histo_min = -1
histo_max = .9
AssertExpectedForBps(0, histo_min)
AssertExpectedForBps(2500, ea._Remap(2500, 0, 10000, histo_min, histo_max))
AssertExpectedForBps(5000, ea._Remap(5000, 0, 10000, histo_min, histo_max))
AssertExpectedForBps(7500, ea._Remap(7500, 0, 10000, histo_min, histo_max))
AssertExpectedForBps(10000, histo_max)
## All weights in second bucket
cumsum_weights = [0, 10000, 10000, 10000]
histo_min = 1.1
histo_max = 1.8
AssertExpectedForBps(0, histo_min)
AssertExpectedForBps(2500, ea._Remap(2500, 0, 10000, histo_min, histo_max))
AssertExpectedForBps(5000, ea._Remap(5000, 0, 10000, histo_min, histo_max))
AssertExpectedForBps(7500, ea._Remap(7500, 0, 10000, histo_min, histo_max))
AssertExpectedForBps(10000, histo_max)
## All weights in the last bucket
cumsum_weights = [0, 0, 0, 10000]
histo_min = 3.1
histo_max = 3.6
AssertExpectedForBps(0, histo_min)
AssertExpectedForBps(2500, ea._Remap(2500, 0, 10000, histo_min, histo_max))
AssertExpectedForBps(5000, ea._Remap(5000, 0, 10000, histo_min, histo_max))
AssertExpectedForBps(7500, ea._Remap(7500, 0, 10000, histo_min, histo_max))
AssertExpectedForBps(10000, histo_max)
## Weights distributed between two buckets
cumsum_weights = [0, 4000, 10000, 10000]
histo_min = 1.1
histo_max = 2.9
AssertExpectedForBps(0, histo_min)
AssertExpectedForBps(2500, ea._Remap(2500, 0, 4000, histo_min,
bucket_limit[1]))
AssertExpectedForBps(5000, ea._Remap(5000, 4000, 10000, bucket_limit[1],
histo_max))
AssertExpectedForBps(7500, ea._Remap(7500, 4000, 10000, bucket_limit[1],
histo_max))
AssertExpectedForBps(10000, histo_max)
## Weights distributed between all buckets
cumsum_weights = [1000, 4000, 8000, 10000]
histo_min = -1
histo_max = 3.9
AssertExpectedForBps(0, histo_min)
AssertExpectedForBps(2500, ea._Remap(2500, 1000, 4000, bucket_limit[0],
bucket_limit[1]))
AssertExpectedForBps(5000, ea._Remap(5000, 4000, 8000, bucket_limit[1],
bucket_limit[2]))
AssertExpectedForBps(7500, ea._Remap(7500, 4000, 8000, bucket_limit[1],
bucket_limit[2]))
AssertExpectedForBps(9000, ea._Remap(9000, 8000, 10000, bucket_limit[2],
histo_max))
AssertExpectedForBps(10000, histo_max)
## Most weight in first bucket
cumsum_weights = [9000, 10000, 10000, 10000]
histo_min = -1
histo_max = 1.1
AssertExpectedForBps(0, histo_min)
AssertExpectedForBps(2500, ea._Remap(2500, 0, 9000, histo_min,
bucket_limit[0]))
AssertExpectedForBps(5000, ea._Remap(5000, 0, 9000, histo_min,
bucket_limit[0]))
AssertExpectedForBps(7500, ea._Remap(7500, 0, 9000, histo_min,
bucket_limit[0]))
AssertExpectedForBps(9500, ea._Remap(9500, 9000, 10000, bucket_limit[0],
histo_max))
AssertExpectedForBps(10000, histo_max)
def testImages(self):
gen = _EventGenerator()
acc = ea.EventAccumulator(gen)
im1 = ea.ImageEvent(wall_time=1, step=10, encoded_image_string=b'big',
width=400, height=300)
im2 = ea.ImageEvent(wall_time=2, step=12, encoded_image_string=b'small',
width=40, height=30)
gen.AddImage('im1', wall_time=1, step=10, encoded_image_string=b'big',
width=400, height=300)
gen.AddImage('im2', wall_time=2, step=12, encoded_image_string=b'small',
width=40, height=30)
acc.Reload()
self.assertEqual(acc.Images('im1'), [im1])
self.assertEqual(acc.Images('im2'), [im2])
def testActivation(self):
gen = _EventGenerator()
acc = ea.EventAccumulator(gen)
self.assertFalse(acc._activated)
with self.assertRaises(RuntimeError):
acc.Tags()
with self.assertRaises(RuntimeError):
acc.Scalars('s1')
acc.Reload()
self.assertTrue(acc._activated)
acc._activated = False
def testKeyError(self):
gen = _EventGenerator()
acc = ea.EventAccumulator(gen)
acc.Reload()
with self.assertRaises(KeyError):
acc.Scalars('s1')
with self.assertRaises(KeyError):
acc.Scalars('hst1')
with self.assertRaises(KeyError):
acc.Scalars('im1')
with self.assertRaises(KeyError):
acc.Histograms('s1')
with self.assertRaises(KeyError):
acc.Histograms('im1')
with self.assertRaises(KeyError):
acc.Images('s1')
with self.assertRaises(KeyError):
acc.Images('hst1')
def testNonValueEvents(self):
"""Tests that non-value events in the generator don't cause early exits."""
gen = _EventGenerator()
acc = ea.EventAccumulator(gen)
gen.AddScalar('s1', wall_time=1, step=10, value=20)
gen.AddEvent(tf.Event(
wall_time=2, step=20, file_version='nots2'))
gen.AddScalar('s3', wall_time=3, step=100, value=1)
gen.AddHistogram('hst1')
gen.AddImage('im1')
acc.Reload()
self.assertTagsEqual(acc.Tags(), {
ea.IMAGES: ['im1'],
ea.SCALARS: ['s1', 's3'],
ea.HISTOGRAMS: ['hst1'],
ea.COMPRESSED_HISTOGRAMS: ['hst1'],
ea.GRAPH: False})
def testExpiredDataDiscardedAfterRestartForFileVersionLessThan2(self):
"""Tests that events are discarded after a restart is detected.
If a step value is observed to be lower than what was previously seen,
this should force a discard of all previous items with the same tag
that are outdated.
Only file versions < 2 use this out-of-order discard logic. Later versions
discard events based on the step value of SessionLog.START.
"""
warnings = []
self.stubs.Set(logging, 'warn', warnings.append)
gen = _EventGenerator()
acc = ea.EventAccumulator(gen)
gen.AddEvent(tf.Event(wall_time=0, step=0, file_version='brain.Event:1'))
gen.AddScalar('s1', wall_time=1, step=100, value=20)
gen.AddScalar('s1', wall_time=1, step=200, value=20)
gen.AddScalar('s1', wall_time=1, step=300, value=20)
acc.Reload()
## Check that number of items are what they should be
self.assertEqual([x.step for x in acc.Scalars('s1')], [100, 200, 300])
gen.AddScalar('s1', wall_time=1, step=101, value=20)
gen.AddScalar('s1', wall_time=1, step=201, value=20)
gen.AddScalar('s1', wall_time=1, step=301, value=20)
acc.Reload()
## Check that we have discarded 200 and 300 from s1
self.assertEqual([x.step for x in acc.Scalars('s1')], [100, 101, 201, 301])
def testEventsDiscardedPerTagAfterRestartForFileVersionLessThan2(self):
"""Tests that event discards after restart, only affect the misordered tag.
If a step value is observed to be lower than what was previously seen,
this should force a discard of all previous items that are outdated, but
only for the out of order tag. Other tags should remain unaffected.
Only file versions < 2 use this out-of-order discard logic. Later versions
discard events based on the step value of SessionLog.START.
"""
warnings = []
self.stubs.Set(logging, 'warn', warnings.append)
gen = _EventGenerator()
acc = ea.EventAccumulator(gen)
gen.AddEvent(tf.Event(wall_time=0, step=0, file_version='brain.Event:1'))
gen.AddScalar('s1', wall_time=1, step=100, value=20)
gen.AddScalar('s1', wall_time=1, step=200, value=20)
gen.AddScalar('s1', wall_time=1, step=300, value=20)
gen.AddScalar('s1', wall_time=1, step=101, value=20)
gen.AddScalar('s1', wall_time=1, step=201, value=20)
gen.AddScalar('s1', wall_time=1, step=301, value=20)
gen.AddScalar('s2', wall_time=1, step=101, value=20)
gen.AddScalar('s2', wall_time=1, step=201, value=20)
gen.AddScalar('s2', wall_time=1, step=301, value=20)
acc.Reload()
## Check that we have discarded 200 and 300
self.assertEqual([x.step for x in acc.Scalars('s1')], [100, 101, 201, 301])
## Check that s1 discards do not affect s2
## i.e. check that only events from the out of order tag are discarded
self.assertEqual([x.step for x in acc.Scalars('s2')], [101, 201, 301])
def testOnlySummaryEventsTriggerDiscards(self):
"""Test that file version event does not trigger data purge."""
gen = _EventGenerator()
acc = ea.EventAccumulator(gen)
gen.AddScalar('s1', wall_time=1, step=100, value=20)
ev1 = tf.Event(wall_time=2, step=0, file_version='brain.Event:1')
graph_bytes = graph_pb2.GraphDef().SerializeToString()
ev2 = tf.Event(wall_time=3, step=0, graph_def=graph_bytes)
gen.AddEvent(ev1)
gen.AddEvent(ev2)
acc.Reload()
self.assertEqual([x.step for x in acc.Scalars('s1')], [100])
def testSessionLogStartMessageDiscardsExpiredEvents(self):
"""Test that SessionLog.START message discards expired events.
This discard logic is preferred over the out-of-order step discard logic,
but this logic can only be used for event protos which have the SessionLog
enum, which was introduced to event.proto for file_version >= brain.Event:2.
"""
gen = _EventGenerator()
acc = ea.EventAccumulator(gen)
gen.AddEvent(tf.Event(wall_time=0, step=1, file_version='brain.Event:2'))
gen.AddScalar('s1', wall_time=1, step=100, value=20)
gen.AddScalar('s1', wall_time=1, step=200, value=20)
gen.AddScalar('s1', wall_time=1, step=300, value=20)
gen.AddScalar('s1', wall_time=1, step=400, value=20)
gen.AddScalar('s2', wall_time=1, step=202, value=20)
gen.AddScalar('s2', wall_time=1, step=203, value=20)
slog = SessionLog(status=SessionLog.START)
gen.AddEvent(tf.Event(wall_time=2, step=201, session_log=slog))
acc.Reload()
self.assertEqual([x.step for x in acc.Scalars('s1')], [100, 200])
self.assertEqual([x.step for x in acc.Scalars('s2')], [])
class RealisticEventAccumulatorTest(EventAccumulatorTest):
def setUp(self):
super(RealisticEventAccumulatorTest, self).setUp()
def testScalarsRealistically(self):
"""Test accumulator by writing values and then reading them."""
def FakeScalarSummary(tag, value):
value = tf.Summary.Value(tag=tag, simple_value=value)
summary = tf.Summary(value=[value])
return summary
directory = os.path.join(self.get_temp_dir(), 'values_dir')
if gfile.IsDirectory(directory):
gfile.DeleteRecursively(directory)
gfile.MkDir(directory)
writer = tf.train.SummaryWriter(directory, max_queue=100)
graph_def = tf.GraphDef(node=[tf.NodeDef(name='A', op='Mul')])
# Add a graph to the summary writer.
writer.add_graph(graph_def)
# Write a bunch of events using the writer
for i in xrange(30):
summ_id = FakeScalarSummary('id', i)
summ_sq = FakeScalarSummary('sq', i*i)
writer.add_summary(summ_id, i*5)
writer.add_summary(summ_sq, i*5)
writer.flush()
# Verify that we can load those events properly
acc = ea.EventAccumulator(directory)
acc.Reload()
self.assertTagsEqual(acc.Tags(), {
ea.IMAGES: [],
ea.SCALARS: ['id', 'sq'],
ea.HISTOGRAMS: [],
ea.COMPRESSED_HISTOGRAMS: [],
ea.GRAPH: True})
id_events = acc.Scalars('id')
sq_events = acc.Scalars('sq')
self.assertEqual(30, len(id_events))
self.assertEqual(30, len(sq_events))
for i in xrange(30):
self.assertEqual(i*5, id_events[i].step)
self.assertEqual(i*5, sq_events[i].step)
self.assertEqual(i, id_events[i].value)
self.assertEqual(i*i, sq_events[i].value)
# Write a few more events to test incremental reloading
for i in xrange(30, 40):
summ_id = FakeScalarSummary('id', i)
summ_sq = FakeScalarSummary('sq', i*i)
writer.add_summary(summ_id, i*5)
writer.add_summary(summ_sq, i*5)
writer.flush()
# Verify we can now see all of the data
acc.Reload()
self.assertEqual(40, len(id_events))
self.assertEqual(40, len(sq_events))
for i in xrange(40):
self.assertEqual(i*5, id_events[i].step)
self.assertEqual(i*5, sq_events[i].step)
self.assertEqual(i, id_events[i].value)
self.assertEqual(i*i, sq_events[i].value)
self.assertProtoEquals(graph_def, acc.Graph())
if __name__ == '__main__':
tf.test.main()
| apache-2.0 |
nenel83/fuzzy-avenger | src/ibmiotf/device.py | 2 | 6759 | # *****************************************************************************
# Copyright (c) 2014 IBM Corporation and other Contributors.
#
# All rights reserved. This program and the accompanying materials
# are made available under the terms of the Eclipse Public License v1.0
# which accompanies this distribution, and is available at
# http://www.eclipse.org/legal/epl-v10.html
#
# Contributors:
# David Parker - Initial Contribution
# *****************************************************************************
import json
import re
import pytz
from datetime import datetime
from ibmiotf import AbstractClient, Message, InvalidEventException, UnsupportedAuthenticationMethod, ConfigurationException, ConnectionException, MissingMessageEncoderException
from ibmiotf.codecs import jsonCodec, jsonIotfCodec
# Support Python 2.7 and 3.4 versions of configparser
try:
import configparser
except ImportError:
import ConfigParser as configparser
COMMAND_RE = re.compile("iot-2/cmd/(.+)/fmt/(.+)")
class Command:
def __init__(self, pahoMessage, messageEncoderModules):
result = COMMAND_RE.match(pahoMessage.topic)
if result:
self.command = result.group(1)
self.format = result.group(2)
if self.format in messageEncoderModules:
message = messageEncoderModules[self.format].decode(pahoMessage)
self.timestamp = message.timestamp
self.data = message.data
else:
raise MissingMessageDecoderException(self.format)
else:
raise InvalidEventException("Received command on invalid topic: %s" % (pahoMessage.topic))
class Client(AbstractClient):
def __init__(self, options):
self.__options = options
if self.__options['org'] == None:
raise ConfigurationException("Missing required property: org")
if self.__options['type'] == None:
raise ConfigurationException("Missing required property: type")
if self.__options['id'] == None:
raise ConfigurationException("Missing required property: id")
if self.__options['org'] != "quickstart":
if self.__options['auth-method'] == None:
raise ConfigurationException("Missing required property: auth-method")
if (self.__options['auth-method'] == "token"):
if self.__options['auth-token'] == None:
raise ConfigurationException("Missing required property for token based authentication: auth-token")
else:
raise UnsupportedAuthenticationMethod(options['authMethod'])
AbstractClient.__init__(
self,
organization = options['org'],
clientId = "d:" + options['org'] + ":" + options['type'] + ":" + options['id'],
username = "use-token-auth" if (options['auth-method'] == "token") else None,
password = options['auth-token']
)
# Add handler for commands if not connected to QuickStart
if self.__options['org'] != "quickstart":
self.client.message_callback_add("iot-2/cmd/+/fmt/+", self.__onCommand)
# Initialize user supplied callback
self.commandCallback = None
self.client.on_connect = self.on_connect
self.setMessageEncoderModule('json', jsonCodec)
self.setMessageEncoderModule('json-iotf', jsonIotfCodec)
'''
This is called after the client has received a CONNACK message from the broker in response to calling connect().
The parameter rc is an integer giving the return code:
0: Success
1: Refused - unacceptable protocol version
2: Refused - identifier rejected
3: Refused - server unavailable
4: Refused - bad user name or password
5: Refused - not authorised
'''
def on_connect(self, client, userdata, flags, rc):
if rc == 0:
self.connectEvent.set()
self.logger.info("Connected successfully: %s" % self.clientId)
if self.__options['org'] != "quickstart":
self.__subscribeToCommands()
elif rc == 5:
self.logAndRaiseException(ConnectionException("Not authorized: s (%s, %s, %s)" % (self.clientId, self.username, self.password)))
else:
self.logAndRaiseException(ConnectionException("Connection failed: RC= %s" % (rc)))
def publishEvent(self, event, msgFormat, data, qos=0):
if not self.connectEvent.wait():
self.logger.warning("Unable to send event %s because device is not currently connected")
return False
else:
self.logger.debug("Sending event %s with data %s" % (event, json.dumps(data)))
topic = 'iot-2/evt/'+event+'/fmt/' + msgFormat
if msgFormat in self.messageEncoderModules:
payload = self.messageEncoderModules[msgFormat].encode(data, datetime.now(pytz.timezone('UTC')))
self.client.publish(topic, payload=payload, qos=qos, retain=False)
return True
else:
raise MissingMessageEncoderException(msgFormat)
def __subscribeToCommands(self):
if self.__options['org'] == "quickstart":
self.logger.warning("QuickStart applications do not support commands")
return False
if not self.connectEvent.wait():
self.logger.warning("Unable to subscribe to commands because device is not currently connected")
return False
else:
topic = 'iot-2/cmd/+/fmt/json'
self.client.subscribe(topic, qos=2)
return True
'''
Internal callback for device command messages, parses source device from topic string and
passes the information on to the registerd device command callback
'''
def __onCommand(self, client, userdata, pahoMessage):
self.recv = self.recv + 1
try:
command = Command(pahoMessage, self.messageEncoderModules)
self.logger.debug("Received command '%s'" % (command.command))
if self.commandCallback: self.commandCallback(command)
except InvalidEventException as e:
self.logger.critical(str(e))
def ParseConfigFile(configFilePath):
parms = configparser.ConfigParser()
sectionHeader = "device"
try:
with open(configFilePath) as f:
try:
parms.read_file(f)
organization = parms.get(sectionHeader, "org", fallback=None)
deviceType = parms.get(sectionHeader, "type", fallback=None)
deviceId = parms.get(sectionHeader, "id", fallback=None)
authMethod = parms.get(sectionHeader, "auth-method", fallback=None)
authToken = parms.get(sectionHeader, "auth-token", fallback=None)
except AttributeError:
# Python 2.7 support
# https://docs.python.org/3/library/configparser.html#configparser.ConfigParser.read_file
parms.readfp(f)
organization = parms.get(sectionHeader, "org", None)
deviceType = parms.get(sectionHeader, "type", None)
deviceId = parms.get(sectionHeader, "id", None)
authMethod = parms.get(sectionHeader, "auth-method", None)
authToken = parms.get(sectionHeader, "auth-token", None)
except IOError as e:
reason = "Error reading device configuration file '%s' (%s)" % (configFilePath,e[1])
raise ConfigurationException(reason)
return {'org': organization, 'type': deviceType, 'id': deviceId, 'auth-method': authMethod, 'auth-token': authToken}
| epl-1.0 |
sunzhxjs/JobGIS | lib/python2.7/site-packages/jinja2/nodes.py | 342 | 28954 | # -*- coding: utf-8 -*-
"""
jinja2.nodes
~~~~~~~~~~~~
This module implements additional nodes derived from the ast base node.
It also provides some node tree helper functions like `in_lineno` and
`get_nodes` used by the parser and translator in order to normalize
python and jinja nodes.
:copyright: (c) 2010 by the Jinja Team.
:license: BSD, see LICENSE for more details.
"""
import types
import operator
from collections import deque
from jinja2.utils import Markup
from jinja2._compat import izip, with_metaclass, text_type
#: the types we support for context functions
_context_function_types = (types.FunctionType, types.MethodType)
_binop_to_func = {
'*': operator.mul,
'/': operator.truediv,
'//': operator.floordiv,
'**': operator.pow,
'%': operator.mod,
'+': operator.add,
'-': operator.sub
}
_uaop_to_func = {
'not': operator.not_,
'+': operator.pos,
'-': operator.neg
}
_cmpop_to_func = {
'eq': operator.eq,
'ne': operator.ne,
'gt': operator.gt,
'gteq': operator.ge,
'lt': operator.lt,
'lteq': operator.le,
'in': lambda a, b: a in b,
'notin': lambda a, b: a not in b
}
class Impossible(Exception):
"""Raised if the node could not perform a requested action."""
class NodeType(type):
"""A metaclass for nodes that handles the field and attribute
inheritance. fields and attributes from the parent class are
automatically forwarded to the child."""
def __new__(cls, name, bases, d):
for attr in 'fields', 'attributes':
storage = []
storage.extend(getattr(bases[0], attr, ()))
storage.extend(d.get(attr, ()))
assert len(bases) == 1, 'multiple inheritance not allowed'
assert len(storage) == len(set(storage)), 'layout conflict'
d[attr] = tuple(storage)
d.setdefault('abstract', False)
return type.__new__(cls, name, bases, d)
class EvalContext(object):
"""Holds evaluation time information. Custom attributes can be attached
to it in extensions.
"""
def __init__(self, environment, template_name=None):
self.environment = environment
if callable(environment.autoescape):
self.autoescape = environment.autoescape(template_name)
else:
self.autoescape = environment.autoescape
self.volatile = False
def save(self):
return self.__dict__.copy()
def revert(self, old):
self.__dict__.clear()
self.__dict__.update(old)
def get_eval_context(node, ctx):
if ctx is None:
if node.environment is None:
raise RuntimeError('if no eval context is passed, the '
'node must have an attached '
'environment.')
return EvalContext(node.environment)
return ctx
class Node(with_metaclass(NodeType, object)):
"""Baseclass for all Jinja2 nodes. There are a number of nodes available
of different types. There are four major types:
- :class:`Stmt`: statements
- :class:`Expr`: expressions
- :class:`Helper`: helper nodes
- :class:`Template`: the outermost wrapper node
All nodes have fields and attributes. Fields may be other nodes, lists,
or arbitrary values. Fields are passed to the constructor as regular
positional arguments, attributes as keyword arguments. Each node has
two attributes: `lineno` (the line number of the node) and `environment`.
The `environment` attribute is set at the end of the parsing process for
all nodes automatically.
"""
fields = ()
attributes = ('lineno', 'environment')
abstract = True
def __init__(self, *fields, **attributes):
if self.abstract:
raise TypeError('abstract nodes are not instanciable')
if fields:
if len(fields) != len(self.fields):
if not self.fields:
raise TypeError('%r takes 0 arguments' %
self.__class__.__name__)
raise TypeError('%r takes 0 or %d argument%s' % (
self.__class__.__name__,
len(self.fields),
len(self.fields) != 1 and 's' or ''
))
for name, arg in izip(self.fields, fields):
setattr(self, name, arg)
for attr in self.attributes:
setattr(self, attr, attributes.pop(attr, None))
if attributes:
raise TypeError('unknown attribute %r' %
next(iter(attributes)))
def iter_fields(self, exclude=None, only=None):
"""This method iterates over all fields that are defined and yields
``(key, value)`` tuples. Per default all fields are returned, but
it's possible to limit that to some fields by providing the `only`
parameter or to exclude some using the `exclude` parameter. Both
should be sets or tuples of field names.
"""
for name in self.fields:
if (exclude is only is None) or \
(exclude is not None and name not in exclude) or \
(only is not None and name in only):
try:
yield name, getattr(self, name)
except AttributeError:
pass
def iter_child_nodes(self, exclude=None, only=None):
"""Iterates over all direct child nodes of the node. This iterates
over all fields and yields the values of they are nodes. If the value
of a field is a list all the nodes in that list are returned.
"""
for field, item in self.iter_fields(exclude, only):
if isinstance(item, list):
for n in item:
if isinstance(n, Node):
yield n
elif isinstance(item, Node):
yield item
def find(self, node_type):
"""Find the first node of a given type. If no such node exists the
return value is `None`.
"""
for result in self.find_all(node_type):
return result
def find_all(self, node_type):
"""Find all the nodes of a given type. If the type is a tuple,
the check is performed for any of the tuple items.
"""
for child in self.iter_child_nodes():
if isinstance(child, node_type):
yield child
for result in child.find_all(node_type):
yield result
def set_ctx(self, ctx):
"""Reset the context of a node and all child nodes. Per default the
parser will all generate nodes that have a 'load' context as it's the
most common one. This method is used in the parser to set assignment
targets and other nodes to a store context.
"""
todo = deque([self])
while todo:
node = todo.popleft()
if 'ctx' in node.fields:
node.ctx = ctx
todo.extend(node.iter_child_nodes())
return self
def set_lineno(self, lineno, override=False):
"""Set the line numbers of the node and children."""
todo = deque([self])
while todo:
node = todo.popleft()
if 'lineno' in node.attributes:
if node.lineno is None or override:
node.lineno = lineno
todo.extend(node.iter_child_nodes())
return self
def set_environment(self, environment):
"""Set the environment for all nodes."""
todo = deque([self])
while todo:
node = todo.popleft()
node.environment = environment
todo.extend(node.iter_child_nodes())
return self
def __eq__(self, other):
return type(self) is type(other) and \
tuple(self.iter_fields()) == tuple(other.iter_fields())
def __ne__(self, other):
return not self.__eq__(other)
# Restore Python 2 hashing behavior on Python 3
__hash__ = object.__hash__
def __repr__(self):
return '%s(%s)' % (
self.__class__.__name__,
', '.join('%s=%r' % (arg, getattr(self, arg, None)) for
arg in self.fields)
)
class Stmt(Node):
"""Base node for all statements."""
abstract = True
class Helper(Node):
"""Nodes that exist in a specific context only."""
abstract = True
class Template(Node):
"""Node that represents a template. This must be the outermost node that
is passed to the compiler.
"""
fields = ('body',)
class Output(Stmt):
"""A node that holds multiple expressions which are then printed out.
This is used both for the `print` statement and the regular template data.
"""
fields = ('nodes',)
class Extends(Stmt):
"""Represents an extends statement."""
fields = ('template',)
class For(Stmt):
"""The for loop. `target` is the target for the iteration (usually a
:class:`Name` or :class:`Tuple`), `iter` the iterable. `body` is a list
of nodes that are used as loop-body, and `else_` a list of nodes for the
`else` block. If no else node exists it has to be an empty list.
For filtered nodes an expression can be stored as `test`, otherwise `None`.
"""
fields = ('target', 'iter', 'body', 'else_', 'test', 'recursive')
class If(Stmt):
"""If `test` is true, `body` is rendered, else `else_`."""
fields = ('test', 'body', 'else_')
class Macro(Stmt):
"""A macro definition. `name` is the name of the macro, `args` a list of
arguments and `defaults` a list of defaults if there are any. `body` is
a list of nodes for the macro body.
"""
fields = ('name', 'args', 'defaults', 'body')
class CallBlock(Stmt):
"""Like a macro without a name but a call instead. `call` is called with
the unnamed macro as `caller` argument this node holds.
"""
fields = ('call', 'args', 'defaults', 'body')
class FilterBlock(Stmt):
"""Node for filter sections."""
fields = ('body', 'filter')
class Block(Stmt):
"""A node that represents a block."""
fields = ('name', 'body', 'scoped')
class Include(Stmt):
"""A node that represents the include tag."""
fields = ('template', 'with_context', 'ignore_missing')
class Import(Stmt):
"""A node that represents the import tag."""
fields = ('template', 'target', 'with_context')
class FromImport(Stmt):
"""A node that represents the from import tag. It's important to not
pass unsafe names to the name attribute. The compiler translates the
attribute lookups directly into getattr calls and does *not* use the
subscript callback of the interface. As exported variables may not
start with double underscores (which the parser asserts) this is not a
problem for regular Jinja code, but if this node is used in an extension
extra care must be taken.
The list of names may contain tuples if aliases are wanted.
"""
fields = ('template', 'names', 'with_context')
class ExprStmt(Stmt):
"""A statement that evaluates an expression and discards the result."""
fields = ('node',)
class Assign(Stmt):
"""Assigns an expression to a target."""
fields = ('target', 'node')
class AssignBlock(Stmt):
"""Assigns a block to a target."""
fields = ('target', 'body')
class Expr(Node):
"""Baseclass for all expressions."""
abstract = True
def as_const(self, eval_ctx=None):
"""Return the value of the expression as constant or raise
:exc:`Impossible` if this was not possible.
An :class:`EvalContext` can be provided, if none is given
a default context is created which requires the nodes to have
an attached environment.
.. versionchanged:: 2.4
the `eval_ctx` parameter was added.
"""
raise Impossible()
def can_assign(self):
"""Check if it's possible to assign something to this node."""
return False
class BinExpr(Expr):
"""Baseclass for all binary expressions."""
fields = ('left', 'right')
operator = None
abstract = True
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
# intercepted operators cannot be folded at compile time
if self.environment.sandboxed and \
self.operator in self.environment.intercepted_binops:
raise Impossible()
f = _binop_to_func[self.operator]
try:
return f(self.left.as_const(eval_ctx), self.right.as_const(eval_ctx))
except Exception:
raise Impossible()
class UnaryExpr(Expr):
"""Baseclass for all unary expressions."""
fields = ('node',)
operator = None
abstract = True
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
# intercepted operators cannot be folded at compile time
if self.environment.sandboxed and \
self.operator in self.environment.intercepted_unops:
raise Impossible()
f = _uaop_to_func[self.operator]
try:
return f(self.node.as_const(eval_ctx))
except Exception:
raise Impossible()
class Name(Expr):
"""Looks up a name or stores a value in a name.
The `ctx` of the node can be one of the following values:
- `store`: store a value in the name
- `load`: load that name
- `param`: like `store` but if the name was defined as function parameter.
"""
fields = ('name', 'ctx')
def can_assign(self):
return self.name not in ('true', 'false', 'none',
'True', 'False', 'None')
class Literal(Expr):
"""Baseclass for literals."""
abstract = True
class Const(Literal):
"""All constant values. The parser will return this node for simple
constants such as ``42`` or ``"foo"`` but it can be used to store more
complex values such as lists too. Only constants with a safe
representation (objects where ``eval(repr(x)) == x`` is true).
"""
fields = ('value',)
def as_const(self, eval_ctx=None):
return self.value
@classmethod
def from_untrusted(cls, value, lineno=None, environment=None):
"""Return a const object if the value is representable as
constant value in the generated code, otherwise it will raise
an `Impossible` exception.
"""
from .compiler import has_safe_repr
if not has_safe_repr(value):
raise Impossible()
return cls(value, lineno=lineno, environment=environment)
class TemplateData(Literal):
"""A constant template string."""
fields = ('data',)
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
if eval_ctx.volatile:
raise Impossible()
if eval_ctx.autoescape:
return Markup(self.data)
return self.data
class Tuple(Literal):
"""For loop unpacking and some other things like multiple arguments
for subscripts. Like for :class:`Name` `ctx` specifies if the tuple
is used for loading the names or storing.
"""
fields = ('items', 'ctx')
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
return tuple(x.as_const(eval_ctx) for x in self.items)
def can_assign(self):
for item in self.items:
if not item.can_assign():
return False
return True
class List(Literal):
"""Any list literal such as ``[1, 2, 3]``"""
fields = ('items',)
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
return [x.as_const(eval_ctx) for x in self.items]
class Dict(Literal):
"""Any dict literal such as ``{1: 2, 3: 4}``. The items must be a list of
:class:`Pair` nodes.
"""
fields = ('items',)
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
return dict(x.as_const(eval_ctx) for x in self.items)
class Pair(Helper):
"""A key, value pair for dicts."""
fields = ('key', 'value')
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
return self.key.as_const(eval_ctx), self.value.as_const(eval_ctx)
class Keyword(Helper):
"""A key, value pair for keyword arguments where key is a string."""
fields = ('key', 'value')
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
return self.key, self.value.as_const(eval_ctx)
class CondExpr(Expr):
"""A conditional expression (inline if expression). (``{{
foo if bar else baz }}``)
"""
fields = ('test', 'expr1', 'expr2')
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
if self.test.as_const(eval_ctx):
return self.expr1.as_const(eval_ctx)
# if we evaluate to an undefined object, we better do that at runtime
if self.expr2 is None:
raise Impossible()
return self.expr2.as_const(eval_ctx)
class Filter(Expr):
"""This node applies a filter on an expression. `name` is the name of
the filter, the rest of the fields are the same as for :class:`Call`.
If the `node` of a filter is `None` the contents of the last buffer are
filtered. Buffers are created by macros and filter blocks.
"""
fields = ('node', 'name', 'args', 'kwargs', 'dyn_args', 'dyn_kwargs')
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
if eval_ctx.volatile or self.node is None:
raise Impossible()
# we have to be careful here because we call filter_ below.
# if this variable would be called filter, 2to3 would wrap the
# call in a list beause it is assuming we are talking about the
# builtin filter function here which no longer returns a list in
# python 3. because of that, do not rename filter_ to filter!
filter_ = self.environment.filters.get(self.name)
if filter_ is None or getattr(filter_, 'contextfilter', False):
raise Impossible()
obj = self.node.as_const(eval_ctx)
args = [x.as_const(eval_ctx) for x in self.args]
if getattr(filter_, 'evalcontextfilter', False):
args.insert(0, eval_ctx)
elif getattr(filter_, 'environmentfilter', False):
args.insert(0, self.environment)
kwargs = dict(x.as_const(eval_ctx) for x in self.kwargs)
if self.dyn_args is not None:
try:
args.extend(self.dyn_args.as_const(eval_ctx))
except Exception:
raise Impossible()
if self.dyn_kwargs is not None:
try:
kwargs.update(self.dyn_kwargs.as_const(eval_ctx))
except Exception:
raise Impossible()
try:
return filter_(obj, *args, **kwargs)
except Exception:
raise Impossible()
class Test(Expr):
"""Applies a test on an expression. `name` is the name of the test, the
rest of the fields are the same as for :class:`Call`.
"""
fields = ('node', 'name', 'args', 'kwargs', 'dyn_args', 'dyn_kwargs')
class Call(Expr):
"""Calls an expression. `args` is a list of arguments, `kwargs` a list
of keyword arguments (list of :class:`Keyword` nodes), and `dyn_args`
and `dyn_kwargs` has to be either `None` or a node that is used as
node for dynamic positional (``*args``) or keyword (``**kwargs``)
arguments.
"""
fields = ('node', 'args', 'kwargs', 'dyn_args', 'dyn_kwargs')
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
if eval_ctx.volatile:
raise Impossible()
obj = self.node.as_const(eval_ctx)
# don't evaluate context functions
args = [x.as_const(eval_ctx) for x in self.args]
if isinstance(obj, _context_function_types):
if getattr(obj, 'contextfunction', False):
raise Impossible()
elif getattr(obj, 'evalcontextfunction', False):
args.insert(0, eval_ctx)
elif getattr(obj, 'environmentfunction', False):
args.insert(0, self.environment)
kwargs = dict(x.as_const(eval_ctx) for x in self.kwargs)
if self.dyn_args is not None:
try:
args.extend(self.dyn_args.as_const(eval_ctx))
except Exception:
raise Impossible()
if self.dyn_kwargs is not None:
try:
kwargs.update(self.dyn_kwargs.as_const(eval_ctx))
except Exception:
raise Impossible()
try:
return obj(*args, **kwargs)
except Exception:
raise Impossible()
class Getitem(Expr):
"""Get an attribute or item from an expression and prefer the item."""
fields = ('node', 'arg', 'ctx')
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
if self.ctx != 'load':
raise Impossible()
try:
return self.environment.getitem(self.node.as_const(eval_ctx),
self.arg.as_const(eval_ctx))
except Exception:
raise Impossible()
def can_assign(self):
return False
class Getattr(Expr):
"""Get an attribute or item from an expression that is a ascii-only
bytestring and prefer the attribute.
"""
fields = ('node', 'attr', 'ctx')
def as_const(self, eval_ctx=None):
if self.ctx != 'load':
raise Impossible()
try:
eval_ctx = get_eval_context(self, eval_ctx)
return self.environment.getattr(self.node.as_const(eval_ctx),
self.attr)
except Exception:
raise Impossible()
def can_assign(self):
return False
class Slice(Expr):
"""Represents a slice object. This must only be used as argument for
:class:`Subscript`.
"""
fields = ('start', 'stop', 'step')
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
def const(obj):
if obj is None:
return None
return obj.as_const(eval_ctx)
return slice(const(self.start), const(self.stop), const(self.step))
class Concat(Expr):
"""Concatenates the list of expressions provided after converting them to
unicode.
"""
fields = ('nodes',)
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
return ''.join(text_type(x.as_const(eval_ctx)) for x in self.nodes)
class Compare(Expr):
"""Compares an expression with some other expressions. `ops` must be a
list of :class:`Operand`\s.
"""
fields = ('expr', 'ops')
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
result = value = self.expr.as_const(eval_ctx)
try:
for op in self.ops:
new_value = op.expr.as_const(eval_ctx)
result = _cmpop_to_func[op.op](value, new_value)
value = new_value
except Exception:
raise Impossible()
return result
class Operand(Helper):
"""Holds an operator and an expression."""
fields = ('op', 'expr')
if __debug__:
Operand.__doc__ += '\nThe following operators are available: ' + \
', '.join(sorted('``%s``' % x for x in set(_binop_to_func) |
set(_uaop_to_func) | set(_cmpop_to_func)))
class Mul(BinExpr):
"""Multiplies the left with the right node."""
operator = '*'
class Div(BinExpr):
"""Divides the left by the right node."""
operator = '/'
class FloorDiv(BinExpr):
"""Divides the left by the right node and truncates conver the
result into an integer by truncating.
"""
operator = '//'
class Add(BinExpr):
"""Add the left to the right node."""
operator = '+'
class Sub(BinExpr):
"""Subtract the right from the left node."""
operator = '-'
class Mod(BinExpr):
"""Left modulo right."""
operator = '%'
class Pow(BinExpr):
"""Left to the power of right."""
operator = '**'
class And(BinExpr):
"""Short circuited AND."""
operator = 'and'
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
return self.left.as_const(eval_ctx) and self.right.as_const(eval_ctx)
class Or(BinExpr):
"""Short circuited OR."""
operator = 'or'
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
return self.left.as_const(eval_ctx) or self.right.as_const(eval_ctx)
class Not(UnaryExpr):
"""Negate the expression."""
operator = 'not'
class Neg(UnaryExpr):
"""Make the expression negative."""
operator = '-'
class Pos(UnaryExpr):
"""Make the expression positive (noop for most expressions)"""
operator = '+'
# Helpers for extensions
class EnvironmentAttribute(Expr):
"""Loads an attribute from the environment object. This is useful for
extensions that want to call a callback stored on the environment.
"""
fields = ('name',)
class ExtensionAttribute(Expr):
"""Returns the attribute of an extension bound to the environment.
The identifier is the identifier of the :class:`Extension`.
This node is usually constructed by calling the
:meth:`~jinja2.ext.Extension.attr` method on an extension.
"""
fields = ('identifier', 'name')
class ImportedName(Expr):
"""If created with an import name the import name is returned on node
access. For example ``ImportedName('cgi.escape')`` returns the `escape`
function from the cgi module on evaluation. Imports are optimized by the
compiler so there is no need to assign them to local variables.
"""
fields = ('importname',)
class InternalName(Expr):
"""An internal name in the compiler. You cannot create these nodes
yourself but the parser provides a
:meth:`~jinja2.parser.Parser.free_identifier` method that creates
a new identifier for you. This identifier is not available from the
template and is not threated specially by the compiler.
"""
fields = ('name',)
def __init__(self):
raise TypeError('Can\'t create internal names. Use the '
'`free_identifier` method on a parser.')
class MarkSafe(Expr):
"""Mark the wrapped expression as safe (wrap it as `Markup`)."""
fields = ('expr',)
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
return Markup(self.expr.as_const(eval_ctx))
class MarkSafeIfAutoescape(Expr):
"""Mark the wrapped expression as safe (wrap it as `Markup`) but
only if autoescaping is active.
.. versionadded:: 2.5
"""
fields = ('expr',)
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
if eval_ctx.volatile:
raise Impossible()
expr = self.expr.as_const(eval_ctx)
if eval_ctx.autoescape:
return Markup(expr)
return expr
class ContextReference(Expr):
"""Returns the current template context. It can be used like a
:class:`Name` node, with a ``'load'`` ctx and will return the
current :class:`~jinja2.runtime.Context` object.
Here an example that assigns the current template name to a
variable named `foo`::
Assign(Name('foo', ctx='store'),
Getattr(ContextReference(), 'name'))
"""
class Continue(Stmt):
"""Continue a loop."""
class Break(Stmt):
"""Break a loop."""
class Scope(Stmt):
"""An artificial scope."""
fields = ('body',)
class EvalContextModifier(Stmt):
"""Modifies the eval context. For each option that should be modified,
a :class:`Keyword` has to be added to the :attr:`options` list.
Example to change the `autoescape` setting::
EvalContextModifier(options=[Keyword('autoescape', Const(True))])
"""
fields = ('options',)
class ScopedEvalContextModifier(EvalContextModifier):
"""Modifies the eval context and reverts it later. Works exactly like
:class:`EvalContextModifier` but will only modify the
:class:`~jinja2.nodes.EvalContext` for nodes in the :attr:`body`.
"""
fields = ('body',)
# make sure nobody creates custom nodes
def _failing_new(*args, **kwargs):
raise TypeError('can\'t create custom node types')
NodeType.__new__ = staticmethod(_failing_new); del _failing_new
| mit |
alephu5/Soundbyte | environment/lib/python3.3/site-packages/IPython/utils/tests/test_traitlets.py | 2 | 29996 | # encoding: utf-8
"""
Tests for IPython.utils.traitlets.
Authors:
* Brian Granger
* Enthought, Inc. Some of the code in this file comes from enthought.traits
and is licensed under the BSD license. Also, many of the ideas also come
from enthought.traits even though our implementation is very different.
"""
#-----------------------------------------------------------------------------
# Copyright (C) 2008-2011 The IPython Development Team
#
# Distributed under the terms of the BSD License. The full license is in
# the file COPYING, distributed as part of this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
import re
import sys
from unittest import TestCase
import nose.tools as nt
from nose import SkipTest
from IPython.utils.traitlets import (
HasTraits, MetaHasTraits, TraitType, Any, CBytes, Dict,
Int, Long, Integer, Float, Complex, Bytes, Unicode, TraitError,
Undefined, Type, This, Instance, TCPAddress, List, Tuple,
ObjectName, DottedObjectName, CRegExp, link
)
from IPython.utils import py3compat
from IPython.testing.decorators import skipif
#-----------------------------------------------------------------------------
# Helper classes for testing
#-----------------------------------------------------------------------------
class HasTraitsStub(HasTraits):
def _notify_trait(self, name, old, new):
self._notify_name = name
self._notify_old = old
self._notify_new = new
#-----------------------------------------------------------------------------
# Test classes
#-----------------------------------------------------------------------------
class TestTraitType(TestCase):
def test_get_undefined(self):
class A(HasTraits):
a = TraitType
a = A()
self.assertEqual(a.a, Undefined)
def test_set(self):
class A(HasTraitsStub):
a = TraitType
a = A()
a.a = 10
self.assertEqual(a.a, 10)
self.assertEqual(a._notify_name, 'a')
self.assertEqual(a._notify_old, Undefined)
self.assertEqual(a._notify_new, 10)
def test_validate(self):
class MyTT(TraitType):
def validate(self, inst, value):
return -1
class A(HasTraitsStub):
tt = MyTT
a = A()
a.tt = 10
self.assertEqual(a.tt, -1)
def test_default_validate(self):
class MyIntTT(TraitType):
def validate(self, obj, value):
if isinstance(value, int):
return value
self.error(obj, value)
class A(HasTraits):
tt = MyIntTT(10)
a = A()
self.assertEqual(a.tt, 10)
# Defaults are validated when the HasTraits is instantiated
class B(HasTraits):
tt = MyIntTT('bad default')
self.assertRaises(TraitError, B)
def test_is_valid_for(self):
class MyTT(TraitType):
def is_valid_for(self, value):
return True
class A(HasTraits):
tt = MyTT
a = A()
a.tt = 10
self.assertEqual(a.tt, 10)
def test_value_for(self):
class MyTT(TraitType):
def value_for(self, value):
return 20
class A(HasTraits):
tt = MyTT
a = A()
a.tt = 10
self.assertEqual(a.tt, 20)
def test_info(self):
class A(HasTraits):
tt = TraitType
a = A()
self.assertEqual(A.tt.info(), 'any value')
def test_error(self):
class A(HasTraits):
tt = TraitType
a = A()
self.assertRaises(TraitError, A.tt.error, a, 10)
def test_dynamic_initializer(self):
class A(HasTraits):
x = Int(10)
def _x_default(self):
return 11
class B(A):
x = Int(20)
class C(A):
def _x_default(self):
return 21
a = A()
self.assertEqual(a._trait_values, {})
self.assertEqual(list(a._trait_dyn_inits.keys()), ['x'])
self.assertEqual(a.x, 11)
self.assertEqual(a._trait_values, {'x': 11})
b = B()
self.assertEqual(b._trait_values, {'x': 20})
self.assertEqual(list(a._trait_dyn_inits.keys()), ['x'])
self.assertEqual(b.x, 20)
c = C()
self.assertEqual(c._trait_values, {})
self.assertEqual(list(a._trait_dyn_inits.keys()), ['x'])
self.assertEqual(c.x, 21)
self.assertEqual(c._trait_values, {'x': 21})
# Ensure that the base class remains unmolested when the _default
# initializer gets overridden in a subclass.
a = A()
c = C()
self.assertEqual(a._trait_values, {})
self.assertEqual(list(a._trait_dyn_inits.keys()), ['x'])
self.assertEqual(a.x, 11)
self.assertEqual(a._trait_values, {'x': 11})
class TestHasTraitsMeta(TestCase):
def test_metaclass(self):
self.assertEqual(type(HasTraits), MetaHasTraits)
class A(HasTraits):
a = Int
a = A()
self.assertEqual(type(a.__class__), MetaHasTraits)
self.assertEqual(a.a,0)
a.a = 10
self.assertEqual(a.a,10)
class B(HasTraits):
b = Int()
b = B()
self.assertEqual(b.b,0)
b.b = 10
self.assertEqual(b.b,10)
class C(HasTraits):
c = Int(30)
c = C()
self.assertEqual(c.c,30)
c.c = 10
self.assertEqual(c.c,10)
def test_this_class(self):
class A(HasTraits):
t = This()
tt = This()
class B(A):
tt = This()
ttt = This()
self.assertEqual(A.t.this_class, A)
self.assertEqual(B.t.this_class, A)
self.assertEqual(B.tt.this_class, B)
self.assertEqual(B.ttt.this_class, B)
class TestHasTraitsNotify(TestCase):
def setUp(self):
self._notify1 = []
self._notify2 = []
def notify1(self, name, old, new):
self._notify1.append((name, old, new))
def notify2(self, name, old, new):
self._notify2.append((name, old, new))
def test_notify_all(self):
class A(HasTraits):
a = Int
b = Float
a = A()
a.on_trait_change(self.notify1)
a.a = 0
self.assertEqual(len(self._notify1),0)
a.b = 0.0
self.assertEqual(len(self._notify1),0)
a.a = 10
self.assertTrue(('a',0,10) in self._notify1)
a.b = 10.0
self.assertTrue(('b',0.0,10.0) in self._notify1)
self.assertRaises(TraitError,setattr,a,'a','bad string')
self.assertRaises(TraitError,setattr,a,'b','bad string')
self._notify1 = []
a.on_trait_change(self.notify1,remove=True)
a.a = 20
a.b = 20.0
self.assertEqual(len(self._notify1),0)
def test_notify_one(self):
class A(HasTraits):
a = Int
b = Float
a = A()
a.on_trait_change(self.notify1, 'a')
a.a = 0
self.assertEqual(len(self._notify1),0)
a.a = 10
self.assertTrue(('a',0,10) in self._notify1)
self.assertRaises(TraitError,setattr,a,'a','bad string')
def test_subclass(self):
class A(HasTraits):
a = Int
class B(A):
b = Float
b = B()
self.assertEqual(b.a,0)
self.assertEqual(b.b,0.0)
b.a = 100
b.b = 100.0
self.assertEqual(b.a,100)
self.assertEqual(b.b,100.0)
def test_notify_subclass(self):
class A(HasTraits):
a = Int
class B(A):
b = Float
b = B()
b.on_trait_change(self.notify1, 'a')
b.on_trait_change(self.notify2, 'b')
b.a = 0
b.b = 0.0
self.assertEqual(len(self._notify1),0)
self.assertEqual(len(self._notify2),0)
b.a = 10
b.b = 10.0
self.assertTrue(('a',0,10) in self._notify1)
self.assertTrue(('b',0.0,10.0) in self._notify2)
def test_static_notify(self):
class A(HasTraits):
a = Int
_notify1 = []
def _a_changed(self, name, old, new):
self._notify1.append((name, old, new))
a = A()
a.a = 0
# This is broken!!!
self.assertEqual(len(a._notify1),0)
a.a = 10
self.assertTrue(('a',0,10) in a._notify1)
class B(A):
b = Float
_notify2 = []
def _b_changed(self, name, old, new):
self._notify2.append((name, old, new))
b = B()
b.a = 10
b.b = 10.0
self.assertTrue(('a',0,10) in b._notify1)
self.assertTrue(('b',0.0,10.0) in b._notify2)
def test_notify_args(self):
def callback0():
self.cb = ()
def callback1(name):
self.cb = (name,)
def callback2(name, new):
self.cb = (name, new)
def callback3(name, old, new):
self.cb = (name, old, new)
class A(HasTraits):
a = Int
a = A()
a.on_trait_change(callback0, 'a')
a.a = 10
self.assertEqual(self.cb,())
a.on_trait_change(callback0, 'a', remove=True)
a.on_trait_change(callback1, 'a')
a.a = 100
self.assertEqual(self.cb,('a',))
a.on_trait_change(callback1, 'a', remove=True)
a.on_trait_change(callback2, 'a')
a.a = 1000
self.assertEqual(self.cb,('a',1000))
a.on_trait_change(callback2, 'a', remove=True)
a.on_trait_change(callback3, 'a')
a.a = 10000
self.assertEqual(self.cb,('a',1000,10000))
a.on_trait_change(callback3, 'a', remove=True)
self.assertEqual(len(a._trait_notifiers['a']),0)
def test_notify_only_once(self):
class A(HasTraits):
listen_to = ['a']
a = Int(0)
b = 0
def __init__(self, **kwargs):
super(A, self).__init__(**kwargs)
self.on_trait_change(self.listener1, ['a'])
def listener1(self, name, old, new):
self.b += 1
class B(A):
c = 0
d = 0
def __init__(self, **kwargs):
super(B, self).__init__(**kwargs)
self.on_trait_change(self.listener2)
def listener2(self, name, old, new):
self.c += 1
def _a_changed(self, name, old, new):
self.d += 1
b = B()
b.a += 1
self.assertEqual(b.b, b.c)
self.assertEqual(b.b, b.d)
b.a += 1
self.assertEqual(b.b, b.c)
self.assertEqual(b.b, b.d)
class TestHasTraits(TestCase):
def test_trait_names(self):
class A(HasTraits):
i = Int
f = Float
a = A()
self.assertEqual(sorted(a.trait_names()),['f','i'])
self.assertEqual(sorted(A.class_trait_names()),['f','i'])
def test_trait_metadata(self):
class A(HasTraits):
i = Int(config_key='MY_VALUE')
a = A()
self.assertEqual(a.trait_metadata('i','config_key'), 'MY_VALUE')
def test_traits(self):
class A(HasTraits):
i = Int
f = Float
a = A()
self.assertEqual(a.traits(), dict(i=A.i, f=A.f))
self.assertEqual(A.class_traits(), dict(i=A.i, f=A.f))
def test_traits_metadata(self):
class A(HasTraits):
i = Int(config_key='VALUE1', other_thing='VALUE2')
f = Float(config_key='VALUE3', other_thing='VALUE2')
j = Int(0)
a = A()
self.assertEqual(a.traits(), dict(i=A.i, f=A.f, j=A.j))
traits = a.traits(config_key='VALUE1', other_thing='VALUE2')
self.assertEqual(traits, dict(i=A.i))
# This passes, but it shouldn't because I am replicating a bug in
# traits.
traits = a.traits(config_key=lambda v: True)
self.assertEqual(traits, dict(i=A.i, f=A.f, j=A.j))
def test_init(self):
class A(HasTraits):
i = Int()
x = Float()
a = A(i=1, x=10.0)
self.assertEqual(a.i, 1)
self.assertEqual(a.x, 10.0)
def test_positional_args(self):
class A(HasTraits):
i = Int(0)
def __init__(self, i):
super(A, self).__init__()
self.i = i
a = A(5)
self.assertEqual(a.i, 5)
# should raise TypeError if no positional arg given
self.assertRaises(TypeError, A)
#-----------------------------------------------------------------------------
# Tests for specific trait types
#-----------------------------------------------------------------------------
class TestType(TestCase):
def test_default(self):
class B(object): pass
class A(HasTraits):
klass = Type
a = A()
self.assertEqual(a.klass, None)
a.klass = B
self.assertEqual(a.klass, B)
self.assertRaises(TraitError, setattr, a, 'klass', 10)
def test_value(self):
class B(object): pass
class C(object): pass
class A(HasTraits):
klass = Type(B)
a = A()
self.assertEqual(a.klass, B)
self.assertRaises(TraitError, setattr, a, 'klass', C)
self.assertRaises(TraitError, setattr, a, 'klass', object)
a.klass = B
def test_allow_none(self):
class B(object): pass
class C(B): pass
class A(HasTraits):
klass = Type(B, allow_none=False)
a = A()
self.assertEqual(a.klass, B)
self.assertRaises(TraitError, setattr, a, 'klass', None)
a.klass = C
self.assertEqual(a.klass, C)
def test_validate_klass(self):
class A(HasTraits):
klass = Type('no strings allowed')
self.assertRaises(ImportError, A)
class A(HasTraits):
klass = Type('rub.adub.Duck')
self.assertRaises(ImportError, A)
def test_validate_default(self):
class B(object): pass
class A(HasTraits):
klass = Type('bad default', B)
self.assertRaises(ImportError, A)
class C(HasTraits):
klass = Type(None, B, allow_none=False)
self.assertRaises(TraitError, C)
def test_str_klass(self):
class A(HasTraits):
klass = Type('IPython.utils.ipstruct.Struct')
from IPython.utils.ipstruct import Struct
a = A()
a.klass = Struct
self.assertEqual(a.klass, Struct)
self.assertRaises(TraitError, setattr, a, 'klass', 10)
class TestInstance(TestCase):
def test_basic(self):
class Foo(object): pass
class Bar(Foo): pass
class Bah(object): pass
class A(HasTraits):
inst = Instance(Foo)
a = A()
self.assertTrue(a.inst is None)
a.inst = Foo()
self.assertTrue(isinstance(a.inst, Foo))
a.inst = Bar()
self.assertTrue(isinstance(a.inst, Foo))
self.assertRaises(TraitError, setattr, a, 'inst', Foo)
self.assertRaises(TraitError, setattr, a, 'inst', Bar)
self.assertRaises(TraitError, setattr, a, 'inst', Bah())
def test_unique_default_value(self):
class Foo(object): pass
class A(HasTraits):
inst = Instance(Foo,(),{})
a = A()
b = A()
self.assertTrue(a.inst is not b.inst)
def test_args_kw(self):
class Foo(object):
def __init__(self, c): self.c = c
class Bar(object): pass
class Bah(object):
def __init__(self, c, d):
self.c = c; self.d = d
class A(HasTraits):
inst = Instance(Foo, (10,))
a = A()
self.assertEqual(a.inst.c, 10)
class B(HasTraits):
inst = Instance(Bah, args=(10,), kw=dict(d=20))
b = B()
self.assertEqual(b.inst.c, 10)
self.assertEqual(b.inst.d, 20)
class C(HasTraits):
inst = Instance(Foo)
c = C()
self.assertTrue(c.inst is None)
def test_bad_default(self):
class Foo(object): pass
class A(HasTraits):
inst = Instance(Foo, allow_none=False)
self.assertRaises(TraitError, A)
def test_instance(self):
class Foo(object): pass
def inner():
class A(HasTraits):
inst = Instance(Foo())
self.assertRaises(TraitError, inner)
class TestThis(TestCase):
def test_this_class(self):
class Foo(HasTraits):
this = This
f = Foo()
self.assertEqual(f.this, None)
g = Foo()
f.this = g
self.assertEqual(f.this, g)
self.assertRaises(TraitError, setattr, f, 'this', 10)
def test_this_inst(self):
class Foo(HasTraits):
this = This()
f = Foo()
f.this = Foo()
self.assertTrue(isinstance(f.this, Foo))
def test_subclass(self):
class Foo(HasTraits):
t = This()
class Bar(Foo):
pass
f = Foo()
b = Bar()
f.t = b
b.t = f
self.assertEqual(f.t, b)
self.assertEqual(b.t, f)
def test_subclass_override(self):
class Foo(HasTraits):
t = This()
class Bar(Foo):
t = This()
f = Foo()
b = Bar()
f.t = b
self.assertEqual(f.t, b)
self.assertRaises(TraitError, setattr, b, 't', f)
class TraitTestBase(TestCase):
"""A best testing class for basic trait types."""
def assign(self, value):
self.obj.value = value
def coerce(self, value):
return value
def test_good_values(self):
if hasattr(self, '_good_values'):
for value in self._good_values:
self.assign(value)
self.assertEqual(self.obj.value, self.coerce(value))
def test_bad_values(self):
if hasattr(self, '_bad_values'):
for value in self._bad_values:
try:
self.assertRaises(TraitError, self.assign, value)
except AssertionError:
assert False, value
def test_default_value(self):
if hasattr(self, '_default_value'):
self.assertEqual(self._default_value, self.obj.value)
def tearDown(self):
# restore default value after tests, if set
if hasattr(self, '_default_value'):
self.obj.value = self._default_value
class AnyTrait(HasTraits):
value = Any
class AnyTraitTest(TraitTestBase):
obj = AnyTrait()
_default_value = None
_good_values = [10.0, 'ten', u'ten', [10], {'ten': 10},(10,), None, 1j]
_bad_values = []
class IntTrait(HasTraits):
value = Int(99)
class TestInt(TraitTestBase):
obj = IntTrait()
_default_value = 99
_good_values = [10, -10]
_bad_values = ['ten', u'ten', [10], {'ten': 10},(10,), None, 1j,
10.1, -10.1, '10L', '-10L', '10.1', '-10.1', u'10L',
u'-10L', u'10.1', u'-10.1', '10', '-10', u'10', u'-10']
if not py3compat.PY3:
_bad_values.extend([long(10), long(-10), 10*sys.maxint, -10*sys.maxint])
class LongTrait(HasTraits):
value = Long(99 if py3compat.PY3 else long(99))
class TestLong(TraitTestBase):
obj = LongTrait()
_default_value = 99 if py3compat.PY3 else long(99)
_good_values = [10, -10]
_bad_values = ['ten', u'ten', [10], {'ten': 10},(10,),
None, 1j, 10.1, -10.1, '10', '-10', '10L', '-10L', '10.1',
'-10.1', u'10', u'-10', u'10L', u'-10L', u'10.1',
u'-10.1']
if not py3compat.PY3:
# maxint undefined on py3, because int == long
_good_values.extend([long(10), long(-10), 10*sys.maxint, -10*sys.maxint])
_bad_values.extend([[long(10)], (long(10),)])
@skipif(py3compat.PY3, "not relevant on py3")
def test_cast_small(self):
"""Long casts ints to long"""
self.obj.value = 10
self.assertEqual(type(self.obj.value), long)
class IntegerTrait(HasTraits):
value = Integer(1)
class TestInteger(TestLong):
obj = IntegerTrait()
_default_value = 1
def coerce(self, n):
return int(n)
@skipif(py3compat.PY3, "not relevant on py3")
def test_cast_small(self):
"""Integer casts small longs to int"""
if py3compat.PY3:
raise SkipTest("not relevant on py3")
self.obj.value = long(100)
self.assertEqual(type(self.obj.value), int)
class FloatTrait(HasTraits):
value = Float(99.0)
class TestFloat(TraitTestBase):
obj = FloatTrait()
_default_value = 99.0
_good_values = [10, -10, 10.1, -10.1]
_bad_values = ['ten', u'ten', [10], {'ten': 10},(10,), None,
1j, '10', '-10', '10L', '-10L', '10.1', '-10.1', u'10',
u'-10', u'10L', u'-10L', u'10.1', u'-10.1']
if not py3compat.PY3:
_bad_values.extend([long(10), long(-10)])
class ComplexTrait(HasTraits):
value = Complex(99.0-99.0j)
class TestComplex(TraitTestBase):
obj = ComplexTrait()
_default_value = 99.0-99.0j
_good_values = [10, -10, 10.1, -10.1, 10j, 10+10j, 10-10j,
10.1j, 10.1+10.1j, 10.1-10.1j]
_bad_values = [u'10L', u'-10L', 'ten', [10], {'ten': 10},(10,), None]
if not py3compat.PY3:
_bad_values.extend([long(10), long(-10)])
class BytesTrait(HasTraits):
value = Bytes(b'string')
class TestBytes(TraitTestBase):
obj = BytesTrait()
_default_value = b'string'
_good_values = [b'10', b'-10', b'10L',
b'-10L', b'10.1', b'-10.1', b'string']
_bad_values = [10, -10, 10.1, -10.1, 1j, [10],
['ten'],{'ten': 10},(10,), None, u'string']
if not py3compat.PY3:
_bad_values.extend([long(10), long(-10)])
class UnicodeTrait(HasTraits):
value = Unicode(u'unicode')
class TestUnicode(TraitTestBase):
obj = UnicodeTrait()
_default_value = u'unicode'
_good_values = ['10', '-10', '10L', '-10L', '10.1',
'-10.1', '', u'', 'string', u'string', u"€"]
_bad_values = [10, -10, 10.1, -10.1, 1j,
[10], ['ten'], [u'ten'], {'ten': 10},(10,), None]
if not py3compat.PY3:
_bad_values.extend([long(10), long(-10)])
class ObjectNameTrait(HasTraits):
value = ObjectName("abc")
class TestObjectName(TraitTestBase):
obj = ObjectNameTrait()
_default_value = "abc"
_good_values = ["a", "gh", "g9", "g_", "_G", u"a345_"]
_bad_values = [1, "", u"€", "9g", "!", "#abc", "aj@", "a.b", "a()", "a[0]",
object(), object]
if sys.version_info[0] < 3:
_bad_values.append(u"þ")
else:
_good_values.append(u"þ") # þ=1 is valid in Python 3 (PEP 3131).
class DottedObjectNameTrait(HasTraits):
value = DottedObjectName("a.b")
class TestDottedObjectName(TraitTestBase):
obj = DottedObjectNameTrait()
_default_value = "a.b"
_good_values = ["A", "y.t", "y765.__repr__", "os.path.join", u"os.path.join"]
_bad_values = [1, u"abc.€", "_.@", ".", ".abc", "abc.", ".abc."]
if sys.version_info[0] < 3:
_bad_values.append(u"t.þ")
else:
_good_values.append(u"t.þ")
class TCPAddressTrait(HasTraits):
value = TCPAddress()
class TestTCPAddress(TraitTestBase):
obj = TCPAddressTrait()
_default_value = ('127.0.0.1',0)
_good_values = [('localhost',0),('192.168.0.1',1000),('www.google.com',80)]
_bad_values = [(0,0),('localhost',10.0),('localhost',-1)]
class ListTrait(HasTraits):
value = List(Int)
class TestList(TraitTestBase):
obj = ListTrait()
_default_value = []
_good_values = [[], [1], list(range(10)), (1,2)]
_bad_values = [10, [1,'a'], 'a']
def coerce(self, value):
if value is not None:
value = list(value)
return value
class LenListTrait(HasTraits):
value = List(Int, [0], minlen=1, maxlen=2)
class TestLenList(TraitTestBase):
obj = LenListTrait()
_default_value = [0]
_good_values = [[1], [1,2], (1,2)]
_bad_values = [10, [1,'a'], 'a', [], list(range(3))]
def coerce(self, value):
if value is not None:
value = list(value)
return value
class TupleTrait(HasTraits):
value = Tuple(Int)
class TestTupleTrait(TraitTestBase):
obj = TupleTrait()
_default_value = None
_good_values = [(1,), None, (0,), [1]]
_bad_values = [10, (1,2), ('a'), ()]
def coerce(self, value):
if value is not None:
value = tuple(value)
return value
def test_invalid_args(self):
self.assertRaises(TypeError, Tuple, 5)
self.assertRaises(TypeError, Tuple, default_value='hello')
t = Tuple(Int, CBytes, default_value=(1,5))
class LooseTupleTrait(HasTraits):
value = Tuple((1,2,3))
class TestLooseTupleTrait(TraitTestBase):
obj = LooseTupleTrait()
_default_value = (1,2,3)
_good_values = [(1,), None, [1], (0,), tuple(range(5)), tuple('hello'), ('a',5), ()]
_bad_values = [10, 'hello', {}]
def coerce(self, value):
if value is not None:
value = tuple(value)
return value
def test_invalid_args(self):
self.assertRaises(TypeError, Tuple, 5)
self.assertRaises(TypeError, Tuple, default_value='hello')
t = Tuple(Int, CBytes, default_value=(1,5))
class MultiTupleTrait(HasTraits):
value = Tuple(Int, Bytes, default_value=[99,b'bottles'])
class TestMultiTuple(TraitTestBase):
obj = MultiTupleTrait()
_default_value = (99,b'bottles')
_good_values = [(1,b'a'), (2,b'b')]
_bad_values = ((),10, b'a', (1,b'a',3), (b'a',1), (1, u'a'))
class CRegExpTrait(HasTraits):
value = CRegExp(r'')
class TestCRegExp(TraitTestBase):
def coerce(self, value):
return re.compile(value)
obj = CRegExpTrait()
_default_value = re.compile(r'')
_good_values = [r'\d+', re.compile(r'\d+')]
_bad_values = [r'(', None, ()]
class DictTrait(HasTraits):
value = Dict()
def test_dict_assignment():
d = dict()
c = DictTrait()
c.value = d
d['a'] = 5
nt.assert_equal(d, c.value)
nt.assert_true(c.value is d)
class TestLink(TestCase):
def test_connect_same(self):
"""Verify two traitlets of the same type can be linked together using link."""
# Create two simple classes with Int traitlets.
class A(HasTraits):
value = Int()
a = A(value=9)
b = A(value=8)
# Conenct the two classes.
c = link((a, 'value'), (b, 'value'))
# Make sure the values are the same at the point of linking.
self.assertEqual(a.value, b.value)
# Change one of the values to make sure they stay in sync.
a.value = 5
self.assertEqual(a.value, b.value)
b.value = 6
self.assertEqual(a.value, b.value)
def test_link_different(self):
"""Verify two traitlets of different types can be linked together using link."""
# Create two simple classes with Int traitlets.
class A(HasTraits):
value = Int()
class B(HasTraits):
count = Int()
a = A(value=9)
b = B(count=8)
# Conenct the two classes.
c = link((a, 'value'), (b, 'count'))
# Make sure the values are the same at the point of linking.
self.assertEqual(a.value, b.count)
# Change one of the values to make sure they stay in sync.
a.value = 5
self.assertEqual(a.value, b.count)
b.count = 4
self.assertEqual(a.value, b.count)
def test_unlink(self):
"""Verify two linked traitlets can be unlinked."""
# Create two simple classes with Int traitlets.
class A(HasTraits):
value = Int()
a = A(value=9)
b = A(value=8)
# Connect the two classes.
c = link((a, 'value'), (b, 'value'))
a.value = 4
c.unlink()
# Change one of the values to make sure they don't stay in sync.
a.value = 5
self.assertNotEqual(a.value, b.value)
def test_callbacks(self):
"""Verify two linked traitlets have their callbacks called once."""
# Create two simple classes with Int traitlets.
class A(HasTraits):
value = Int()
class B(HasTraits):
count = Int()
a = A(value=9)
b = B(count=8)
# Register callbacks that count.
callback_count = []
def a_callback(name, old, new):
callback_count.append('a')
a.on_trait_change(a_callback, 'value')
def b_callback(name, old, new):
callback_count.append('b')
b.on_trait_change(b_callback, 'count')
# Connect the two classes.
c = link((a, 'value'), (b, 'count'))
# Make sure b's count was set to a's value once.
self.assertEqual(''.join(callback_count), 'b')
del callback_count[:]
# Make sure a's value was set to b's count once.
b.count = 5
self.assertEqual(''.join(callback_count), 'ba')
del callback_count[:]
# Make sure b's count was set to a's value once.
a.value = 4
self.assertEqual(''.join(callback_count), 'ab')
del callback_count[:]
| gpl-3.0 |
pakal/django-recurly | django_recurly/handlers.py | 1 | 3931 | """
Push notifications are not meant to be actionable and should not be used for
critical account functions like provisioning accounts. Use the receipt of a
push notification to trigger an API query, validating both the push
notification action and the details of the action.
http://docs.recurly.com/push-notifications
"""
from django_recurly import signals
# Push notification signal handlers
def new(sender, **kwargs):
"""Create the account and the subscription
We do these at the same time (rather than using
the new_account signal) to avoid concurrency problems.
"""
from django_recurly import models
models.Account.handle_notification(**kwargs)
def update(sender, **kwargs):
"""Update a subscription and account"""
from django_recurly import models
models.Account.handle_notification(**kwargs)
def payment(sender, **kwargs):
"""Update a payment and account"""
from django_recurly import models
models.Payment.handle_notification(**kwargs)
# Connect push notification signals
#signals.new_account_notification.connect(new)
signals.new_subscription_notification.connect(new)
signals.updated_subscription_notification.connect(update)
signals.expired_subscription_notification.connect(update)
signals.canceled_subscription_notification.connect(update)
signals.renewed_subscription_notification.connect(update)
signals.reactivated_account_notification.connect(update)
signals.canceled_account_notification.connect(update)
signals.billing_info_updated_notification.connect(update)
signals.successful_payment_notification.connect(payment)
signals.failed_payment_notification.connect(payment)
signals.successful_refund_notification.connect(payment)
signals.void_payment_notification.connect(payment)
## Model signal handlers ##
def account_post_save(sender, instance, created, **kwargs):
if created:
signals.account_created.send(sender=sender, account=instance)
else:
signals.account_updated.send(sender=sender, account=instance)
was_active = not created and instance._previous_state['state'] == 'active'
now_active = instance.is_active()
# Send account closed/opened signals
if was_active and not now_active:
signals.account_closed.send(sender=sender, account=instance)
elif not was_active and now_active:
signals.account_opened.send(sender=sender, account=instance)
def billing_info_post_save(sender, instance, created, **kwargs):
if created:
signals.billing_info_created.send(sender=sender, billing_info=instance)
else:
signals.billing_info_updated.send(sender=sender, billing_info=instance)
def subscription_post_save(sender, instance, created, **kwargs):
if created:
signals.subscription_created.send(sender=sender, subscription=instance)
else:
signals.subscription_updated.send(sender=sender, subscription=instance)
was_current = not created and instance._previous_state['state'] != 'expired'
now_current = instance.state != 'expired'
# Send subscription current/expired signals
if was_current and not now_current:
signals.subscription_expired.send(sender=sender, subscription=instance)
elif not was_current and now_current:
signals.subscription_current.send(sender=sender, subscription=instance)
def payment_post_save(sender, instance, created, **kwargs):
if created:
signals.payment_created.send(sender=sender, payment=instance)
else:
signals.payment_updated.send(sender=sender, payment=instance)
def token_post_save(sender, instance, created, **kwargs):
if type == 'subscription':
signals.subscription_token_created.send(sender=sender, token=instance)
elif type == 'billing_info':
signals.billing_info_token_created.send(sender=sender, payment=instance)
elif type == 'invoice':
signals.invoice_token_created.send(sender=sender, payment=instance)
| bsd-3-clause |
kustodian/ansible-modules-core | windows/win_msi.py | 68 | 1736 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2014, Matt Martz <[email protected]>, and others
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# this is a windows documentation stub. actual code lives in the .ps1
# file of the same name
DOCUMENTATION = '''
---
module: win_msi
version_added: "1.7"
short_description: Installs and uninstalls Windows MSI files
description:
- Installs or uninstalls a Windows MSI file that is already located on the
target server
options:
path:
description:
- File system path to the MSI file to install
required: true
state:
description:
- Whether the MSI file should be installed or uninstalled
choices:
- present
- absent
default: present
creates:
description:
- Path to a file created by installing the MSI to prevent from
attempting to reinstall the package on every run
author: Matt Martz
'''
EXAMPLES = '''
# Install an MSI file
- win_msi: path=C:\\\\7z920-x64.msi
# Uninstall an MSI file
- win_msi: path=C:\\\\7z920-x64.msi state=absent
'''
| gpl-3.0 |
unaizalakain/django | tests/invalid_models_tests/test_backend_specific.py | 191 | 1024 | # -*- encoding: utf-8 -*-
from __future__ import unicode_literals
from django.core.checks import Error
from django.db import connections, models
from django.test import mock
from .base import IsolatedModelsTestCase
def dummy_allow_migrate(db, app_label, **hints):
# Prevent checks from being run on the 'other' database, which doesn't have
# its check_field() method mocked in the test.
return db == 'default'
class BackendSpecificChecksTests(IsolatedModelsTestCase):
@mock.patch('django.db.models.fields.router.allow_migrate', new=dummy_allow_migrate)
def test_check_field(self):
""" Test if backend specific checks are performed. """
error = Error('an error', hint=None)
class Model(models.Model):
field = models.IntegerField()
field = Model._meta.get_field('field')
with mock.patch.object(connections['default'].validation, 'check_field', return_value=[error]):
errors = field.check()
self.assertEqual(errors, [error])
| bsd-3-clause |
ahmadia/bokeh | bokeh/server/views/backbone.py | 29 | 11075 | from __future__ import absolute_import
import logging
log = logging.getLogger(__name__)
from flask import request, jsonify
from bokeh import protocol
from .bbauth import (
handle_auth_error
)
from ..app import bokeh_app
from ..crossdomain import crossdomain
from ..serverbb import get_temporary_docid, BokehServerTransaction
from ..views import make_json
from ..models import docs
def init_bokeh(clientdoc):
request.bokeh_server_document = clientdoc
clientdoc.autostore = False
clientdoc.autoadd = False
@bokeh_app.route("/bokeh/bb/<docid>/gc", methods=['POST'])
@handle_auth_error
def gc(docid):
# client = request.headers.get('client', 'python') # todo: not used?
doc = docs.Doc.load(bokeh_app.servermodel_storage, docid)
bokehuser = bokeh_app.current_user()
temporary_docid = get_temporary_docid(request, docid)
t = BokehServerTransaction(
bokehuser, doc, 'rw', temporary_docid=temporary_docid
)
t.load(gc=True)
t.save()
return jsonify(status='success')
# bulk upsert
@bokeh_app.route("/bokeh/bb/<docid>/bulkupsert", methods=['POST'])
@handle_auth_error
def bulk_upsert(docid):
''' Update or insert new objects for a given :class:`Document <bokeh.document.Document>`.
:param docid: id of the :class:`Document <bokeh.document.Document>`
to update or insert into
:status 200: when user is authorized
:status 401: when user is not authorized
'''
# endpoint is only used by python, therefore we don't process
# callbacks here
client = request.headers.get('client', 'python')
doc = docs.Doc.load(bokeh_app.servermodel_storage, docid)
bokehuser = bokeh_app.current_user()
temporary_docid = get_temporary_docid(request, docid)
t = BokehServerTransaction(
bokehuser, doc, 'rw', temporary_docid=temporary_docid
)
t.load()
clientdoc = t.clientdoc
data = protocol.deserialize_json(request.data.decode('utf-8'))
if client == 'python':
clientdoc.load(*data, events='none', dirty=True)
else:
clientdoc.load(*data, events='existing', dirty=True)
t.save()
msg = ws_update(clientdoc, t.write_docid, t.changed)
return make_json(msg)
def ws_update(clientdoc, docid, models):
log.debug("sending wsupdate to %s", docid)
attrs = clientdoc.dump(*models)
msg = protocol.serialize_json({'msgtype' : 'modelpush',
'modelspecs' : attrs
})
bokeh_app.publisher.send("bokehplot:" + docid, msg)
return msg
def ws_delete(clientdoc, docid, models):
attrs = clientdoc.dump(*models)
msg = {
'msgtype' : 'modeldel',
'modelspecs' : attrs,
}
msg = protocol.serialize_json(msg)
bokeh_app.wsmanager.send("bokehplot:" + docid, msg)
return msg
# backbone functionality
@bokeh_app.route("/bokeh/bb/<docid>/<typename>/", methods=['POST'])
@handle_auth_error
def create(docid, typename):
''' Update or insert new objects for a given :class:`Document <bokeh.document.Document>`.
:param docid: id of the :class:`Document <bokeh.document.Document>`
to update or insert into
:status 200: when user is authorized
:status 401: when user is not authorized
'''
doc = docs.Doc.load(bokeh_app.servermodel_storage, docid)
bokehuser = bokeh_app.current_user()
temporary_docid = get_temporary_docid(request, docid)
t = BokehServerTransaction(
bokehuser, doc, 'rw', temporary_docid=temporary_docid
)
t.load()
modeldata = protocol.deserialize_json(request.data.decode('utf-8'))
modeldata = [{'type' : typename,
'attributes' : modeldata}]
t.clientdoc.load(*modeldata, dirty=True)
t.save()
ws_update(t.clientdoc, t.write_docid, modeldata)
return protocol.serialize_json(modeldata[0]['attributes'])
@handle_auth_error
def _bulkget(docid, typename=None):
doc = docs.Doc.load(bokeh_app.servermodel_storage, docid)
bokehuser = bokeh_app.current_user()
temporary_docid = get_temporary_docid(request, docid)
t = BokehServerTransaction(
bokehuser, doc, 'r', temporary_docid=temporary_docid
)
t.load()
clientdoc = t.clientdoc
all_models = clientdoc._models.values()
if typename is not None:
attrs = clientdoc.dump(*[x for x in all_models \
if x.__view_model__==typename])
attrs = [x['attributes'] for x in attrs]
return make_json(protocol.serialize_json(attrs))
else:
attrs = clientdoc.dump(*all_models)
return make_json(protocol.serialize_json(attrs))
@bokeh_app.route("/bokeh/bb/<docid>/", methods=['GET'])
def bulkget_without_typename(docid):
''' Retrieve all objects for a given :class:`Document <bokeh.document.Document>`.
:param docid: id of the :class:`Document <bokeh.document.Document>`
to update or insert into
:status 200: when user is authorized
:status 401: when user is not authorized
'''
return _bulkget(docid)
@bokeh_app.route("/bokeh/bb/<docid>/<typename>/", methods=['GET'])
def bulkget_with_typename(docid, typename):
''' Retrieve all objects of a specified typename for a
given :class:`Document <bokeh.document.Document>`.
:param docid: id of the :class:`Document <bokeh.document.Document>`
to update or insert into
:param typename: the type of objects to find and return
:status 200: when user is authorized
:status 401: when user is not authorized
'''
return _bulkget(docid, typename)
@crossdomain(origin="*", methods=['PATCH', 'GET', 'PUT'], headers=None)
def _handle_specific_model(docid, typename, id, method):
if method == 'PUT':
return update(docid, typename, id)
elif method == 'PATCH':
return update(docid, typename, id)
elif method == 'GET':
return getbyid(docid, typename, id)
elif method == 'DELETE':
return delete(docid, typename, id)
# route for working with individual models
@bokeh_app.route("/bokeh/bb/<docid>/<typename>/<id>/", methods=['GET', 'OPTIONS'])
def _handle_specific_model_get(docid, typename, id):
''' Retrieve a specific model with a given id and typename for a
given :class:`Document <bokeh.document.Document>`.
:param docid: id of the :class:`Document <bokeh.document.Document>`
to update or insert into
:param typename: the type of objects to find and return
:param id: unique id of the object to retrieve
:status 200: when user is authorized
:status 401: when user is not authorized
'''
return _handle_specific_model(docid, typename, id, request.method)
@bokeh_app.route("/bokeh/bb/<docid>/<typename>/<id>/", methods=['PUT'])
def _handle_specific_model_put(docid, typename, id):
''' Update a specific model with a given id and typename for a
given :class:`Document <bokeh.document.Document>`.
:param docid: id of the :class:`Document <bokeh.document.Document>`
to update or insert into
:param typename: the type of objects to find and return
:param id: unique id of the object to retrieve
:status 200: when user is authorized
:status 401: when user is not authorized
'''
return _handle_specific_model(docid, typename, id, request.method)
@bokeh_app.route("/bokeh/bb/<docid>/<typename>/<id>/", methods=['PATCH'])
def _handle_specific_model_patch(docid, typename, id):
''' Update a specific model with a given id and typename for a
given :class:`Document <bokeh.document.Document>`.
:param docid: id of the :class:`Document <bokeh.document.Document>`
to update or insert into
:param typename: the type of objects to find and return
:param id: unique id of the object to retrieve
:status 200: when user is authorized
:status 401: when user is not authorized
'''
return _handle_specific_model(docid, typename, id, request.method)
@bokeh_app.route("/bokeh/bb/<docid>/<typename>/<id>/", methods=['DELETE'])
def _handle_specific_model_delete(docid, typename, id):
''' Delete a specific model with a given id and typename for a
given :class:`Document <bokeh.document.Document>`.
:param docid: id of the :class:`Document <bokeh.document.Document>`
to update or insert into
:param typename: the type of objects to find and return
:param id: unique id of the object to retrieve
:status 200: when user is authorized
:status 401: when user is not authorized
'''
return _handle_specific_model(docid, typename, id, request.method)
# individual model methods
@handle_auth_error
def getbyid(docid, typename, id):
doc = docs.Doc.load(bokeh_app.servermodel_storage, docid)
bokehuser = bokeh_app.current_user()
temporary_docid = get_temporary_docid(request, docid)
t = BokehServerTransaction(
bokehuser, doc, 'r', temporary_docid=temporary_docid
)
t.load()
clientdoc = t.clientdoc
attr = clientdoc.dump(clientdoc._models[id])[0]['attributes']
return make_json(protocol.serialize_json(attr))
@handle_auth_error
def update(docid, typename, id):
"""we need to distinguish between writing and patching models
namely in writing, we shouldn't remove unspecified attrs
(we currently don't handle this correctly)
"""
doc = docs.Doc.load(bokeh_app.servermodel_storage, docid)
bokehuser = bokeh_app.current_user()
temporary_docid = get_temporary_docid(request, docid)
t = BokehServerTransaction(
bokehuser, doc, 'rw', temporary_docid=temporary_docid
)
t.load()
modeldata = protocol.deserialize_json(request.data.decode('utf-8'))
### horrible hack, we need to pop off the noop object if it exists
modeldata.pop('noop', None)
clientdoc = t.clientdoc
log.info("loading done %s", len(clientdoc._models.values()))
# patch id is not passed...
modeldata['id'] = id
modeldata = {'type' : typename,
'attributes' : modeldata}
clientdoc.load(modeldata, events='existing', dirty=True)
t.save()
ws_update(clientdoc, t.write_docid, t.changed)
# backbone expects us to send back attrs of this model, but it doesn't
# make sense to do so because we modify other models, and we want this to
# all go out over the websocket channel
return make_json(protocol.serialize_json({'noop' : True}))
@handle_auth_error
def delete(docid, typename, id):
#I don't think this works right now
obj = 'No this does not work, because obj is not defined, should it be an arg?'
doc = docs.Doc.load(bokeh_app.servermodel_storage, docid)
bokehuser = bokeh_app.current_user()
temporary_docid = get_temporary_docid(request, docid)
t = BokehServerTransaction(
bokehuser, doc, 'rw', temporary_docid=temporary_docid
)
clientdoc = t.clientdoc
model = clientdoc._models[id]
bokeh_app.backbone_storage.del_obj(t.write_docid, obj)
t.save()
ws_delete(clientdoc, t.write_docid, [model])
return make_json(protocol.serialize_json(clientdoc.dump(model)[0]['attributes']))
| bsd-3-clause |
google/mysql-protobuf | storage/ndb/mcc/tst/unittest2/runner.py | 164 | 6757 | """Running tests"""
import sys
import time
import unittest
from unittest2 import result
try:
from unittest2.signals import registerResult
except ImportError:
def registerResult(_):
pass
__unittest = True
class _WritelnDecorator(object):
"""Used to decorate file-like objects with a handy 'writeln' method"""
def __init__(self,stream):
self.stream = stream
def __getattr__(self, attr):
if attr in ('stream', '__getstate__'):
raise AttributeError(attr)
return getattr(self.stream,attr)
def writeln(self, arg=None):
if arg:
self.write(arg)
self.write('\n') # text-mode streams translate to \r\n if needed
class TextTestResult(result.TestResult):
"""A test result class that can print formatted text results to a stream.
Used by TextTestRunner.
"""
separator1 = '=' * 70
separator2 = '-' * 70
def __init__(self, stream, descriptions, verbosity):
super(TextTestResult, self).__init__()
self.stream = stream
self.showAll = verbosity > 1
self.dots = verbosity == 1
self.descriptions = descriptions
def getDescription(self, test):
doc_first_line = test.shortDescription()
if self.descriptions and doc_first_line:
return '\n'.join((str(test), doc_first_line))
else:
return str(test)
def startTest(self, test):
super(TextTestResult, self).startTest(test)
if self.showAll:
self.stream.write(self.getDescription(test))
self.stream.write(" ... ")
self.stream.flush()
def addSuccess(self, test):
super(TextTestResult, self).addSuccess(test)
if self.showAll:
self.stream.writeln("ok")
elif self.dots:
self.stream.write('.')
self.stream.flush()
def addError(self, test, err):
super(TextTestResult, self).addError(test, err)
if self.showAll:
self.stream.writeln("ERROR")
elif self.dots:
self.stream.write('E')
self.stream.flush()
def addFailure(self, test, err):
super(TextTestResult, self).addFailure(test, err)
if self.showAll:
self.stream.writeln("FAIL")
elif self.dots:
self.stream.write('F')
self.stream.flush()
def addSkip(self, test, reason):
super(TextTestResult, self).addSkip(test, reason)
if self.showAll:
self.stream.writeln("skipped %r" % (reason,))
elif self.dots:
self.stream.write("s")
self.stream.flush()
def addExpectedFailure(self, test, err):
super(TextTestResult, self).addExpectedFailure(test, err)
if self.showAll:
self.stream.writeln("expected failure")
elif self.dots:
self.stream.write("x")
self.stream.flush()
def addUnexpectedSuccess(self, test):
super(TextTestResult, self).addUnexpectedSuccess(test)
if self.showAll:
self.stream.writeln("unexpected success")
elif self.dots:
self.stream.write("u")
self.stream.flush()
def printErrors(self):
if self.dots or self.showAll:
self.stream.writeln()
self.printErrorList('ERROR', self.errors)
self.printErrorList('FAIL', self.failures)
def printErrorList(self, flavour, errors):
for test, err in errors:
self.stream.writeln(self.separator1)
self.stream.writeln("%s: %s" % (flavour, self.getDescription(test)))
self.stream.writeln(self.separator2)
self.stream.writeln("%s" % err)
def stopTestRun(self):
super(TextTestResult, self).stopTestRun()
self.printErrors()
class TextTestRunner(unittest.TextTestRunner):
"""A test runner class that displays results in textual form.
It prints out the names of tests as they are run, errors as they
occur, and a summary of the results at the end of the test run.
"""
resultclass = TextTestResult
def __init__(self, stream=sys.stderr, descriptions=True, verbosity=1,
failfast=False, buffer=False, resultclass=None):
self.stream = _WritelnDecorator(stream)
self.descriptions = descriptions
self.verbosity = verbosity
self.failfast = failfast
self.buffer = buffer
if resultclass is not None:
self.resultclass = resultclass
def _makeResult(self):
return self.resultclass(self.stream, self.descriptions, self.verbosity)
def run(self, test):
"Run the given test case or test suite."
result = self._makeResult()
result.failfast = self.failfast
result.buffer = self.buffer
registerResult(result)
startTime = time.time()
startTestRun = getattr(result, 'startTestRun', None)
if startTestRun is not None:
startTestRun()
try:
test(result)
finally:
stopTestRun = getattr(result, 'stopTestRun', None)
if stopTestRun is not None:
stopTestRun()
else:
result.printErrors()
stopTime = time.time()
timeTaken = stopTime - startTime
if hasattr(result, 'separator2'):
self.stream.writeln(result.separator2)
run = result.testsRun
self.stream.writeln("Ran %d test%s in %.3fs" %
(run, run != 1 and "s" or "", timeTaken))
self.stream.writeln()
expectedFails = unexpectedSuccesses = skipped = 0
try:
results = map(len, (result.expectedFailures,
result.unexpectedSuccesses,
result.skipped))
expectedFails, unexpectedSuccesses, skipped = results
except AttributeError:
pass
infos = []
if not result.wasSuccessful():
self.stream.write("FAILED")
failed, errored = map(len, (result.failures, result.errors))
if failed:
infos.append("failures=%d" % failed)
if errored:
infos.append("errors=%d" % errored)
else:
self.stream.write("OK")
if skipped:
infos.append("skipped=%d" % skipped)
if expectedFails:
infos.append("expected failures=%d" % expectedFails)
if unexpectedSuccesses:
infos.append("unexpected successes=%d" % unexpectedSuccesses)
if infos:
self.stream.writeln(" (%s)" % (", ".join(infos),))
else:
self.stream.write("\n")
return result
| gpl-2.0 |
dstiert/Wox | PythonHome/Lib/site-packages/pip/_vendor/requests/__init__.py | 327 | 1856 | # -*- coding: utf-8 -*-
# __
# /__) _ _ _ _ _/ _
# / ( (- (/ (/ (- _) / _)
# /
"""
requests HTTP library
~~~~~~~~~~~~~~~~~~~~~
Requests is an HTTP library, written in Python, for human beings. Basic GET
usage:
>>> import requests
>>> r = requests.get('http://python.org')
>>> r.status_code
200
>>> 'Python is a programming language' in r.content
True
... or POST:
>>> payload = dict(key1='value1', key2='value2')
>>> r = requests.post("http://httpbin.org/post", data=payload)
>>> print(r.text)
{
...
"form": {
"key2": "value2",
"key1": "value1"
},
...
}
The other HTTP methods are supported - see `requests.api`. Full documentation
is at <http://python-requests.org>.
:copyright: (c) 2014 by Kenneth Reitz.
:license: Apache 2.0, see LICENSE for more details.
"""
__title__ = 'requests'
__version__ = '2.3.0'
__build__ = 0x020300
__author__ = 'Kenneth Reitz'
__license__ = 'Apache 2.0'
__copyright__ = 'Copyright 2014 Kenneth Reitz'
# Attempt to enable urllib3's SNI support, if possible
try:
from .packages.urllib3.contrib import pyopenssl
pyopenssl.inject_into_urllib3()
except ImportError:
pass
from . import utils
from .models import Request, Response, PreparedRequest
from .api import request, get, head, post, patch, put, delete, options
from .sessions import session, Session
from .status_codes import codes
from .exceptions import (
RequestException, Timeout, URLRequired,
TooManyRedirects, HTTPError, ConnectionError
)
# Set default logging handler to avoid "No handler found" warnings.
import logging
try: # Python 2.7+
from logging import NullHandler
except ImportError:
class NullHandler(logging.Handler):
def emit(self, record):
pass
logging.getLogger(__name__).addHandler(NullHandler())
| mit |
felipenaselva/repo.felipe | plugin.video.salts/scrapers/couchtunerv1_scraper.py | 1 | 4132 | """
SALTS XBMC Addon
Copyright (C) 2014 tknorris
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import re
import urlparse
import log_utils
import kodi
import dom_parser
from salts_lib import scraper_utils
from salts_lib.constants import FORCE_NO_MATCH
from salts_lib.constants import QUALITIES
from salts_lib.constants import VIDEO_TYPES
import scraper
BASE_URL = 'http://www.couchtuner.ch'
BASE_URL2 = 'http://couchtuner.city'
class Scraper(scraper.Scraper):
base_url = BASE_URL
def __init__(self, timeout=scraper.DEFAULT_TIMEOUT):
self.timeout = timeout
self.base_url = kodi.get_setting('%s-base_url' % (self.get_name()))
@classmethod
def provides(cls):
return frozenset([VIDEO_TYPES.TVSHOW, VIDEO_TYPES.EPISODE])
@classmethod
def get_name(cls):
return 'CouchTunerV1'
def get_sources(self, video):
source_url = self.get_url(video)
hosters = []
if source_url and source_url != FORCE_NO_MATCH:
url = urlparse.urljoin(self.base_url, source_url)
entry = ''
while True:
html = self._http_get(url, cache_limit=.5)
if not html:
url = urlparse.urljoin(BASE_URL2, source_url)
html = self._http_get(url, cache_limit=.5)
entry = dom_parser.parse_dom(html, 'div', {'class': 'entry'})
if entry:
entry = entry[0]
match = re.search('Watch it here\s*:.*?href="([^"]+)', entry, re.I)
if match:
url = match.group(1)
else:
break
else:
entry = ''
break
for tab in dom_parser.parse_dom(entry, 'div', {'class': '''[^'"]*postTabs_divs[^'"]*'''}):
match = re.search('<iframe[^>]*src="([^"]+)', tab, re.I | re.DOTALL)
if match:
link = match.group(1)
host = urlparse.urlparse(link).hostname
hoster = {'multi-part': False, 'host': host, 'class': self, 'quality': scraper_utils.get_quality(video, host, QUALITIES.HIGH), 'views': None, 'rating': None, 'url': link, 'direct': False}
hosters.append(hoster)
return hosters
def _get_episode_url(self, show_url, video):
episode_pattern = 'href="([^"]+[sS](?:eason-)?%s-[eE](?:pisode-)?%s-[^"]+)' % (video.season, video.episode)
title_pattern = 'href="(?P<url>[^"]+season-\d+-episode-\d+-[^"]+).*?8211;\s*(?P<title>[^<]+)'
return self._default_get_episode_url(show_url, video, episode_pattern, title_pattern)
def search(self, video_type, title, year, season=''):
show_list_url = urlparse.urljoin(self.base_url, '/tv-lists/')
html = self._http_get(show_list_url, cache_limit=8)
results = []
norm_title = scraper_utils.normalize_title(title)
for item in dom_parser.parse_dom(html, 'li'):
match = re.search('href="([^"]+)">(.*?)</a>', item)
if match:
url, match_title = match.groups()
match_title = re.sub('</?strong[^>]*>', '', match_title)
if norm_title in scraper_utils.normalize_title(match_title):
result = {'url': scraper_utils.pathify_url(url), 'title': scraper_utils.cleanse_title(match_title), 'year': ''}
results.append(result)
return results
| gpl-2.0 |
sethkontny/blaze | blaze/data/tests/test_usability.py | 1 | 2230 | from unittest import TestCase
import os
from tempfile import mktemp
import gzip
from blaze.utils import filetext, filetexts, tmpfile
from blaze.data import *
from blaze.py2help import skip
class TestResource(TestCase):
def setUp(self):
self.filename = mktemp()
def tearDown(self):
if os.path.exists(self.filename):
os.remove(self.filename)
def test_resource_csv(self):
with filetext('1,1\n2,2', extension='.csv') as fn:
dd = resource(fn, schema='2 * int')
assert isinstance(dd, CSV)
self.assertEqual(list(dd), [[1, 1], [2, 2]])
def test_resource_json(self):
with filetext('[[1,1], [2,2]]', extension='.json') as fn:
dd = resource(fn, schema='2 * int')
assert isinstance(dd, JSON)
self.assertEqual(list(dd), [[1, 1], [2, 2]])
def test_resource_gz(self):
with filetext('1,1\n2,2', extension='.csv.gz', open=gzip.open) as fn:
dd = resource(fn, schema='2 * int')
assert isinstance(dd, CSV)
self.assertEqual(dd.open, gzip.open)
self.assertEqual(list(dd), [[1, 1], [2, 2]])
def test_filesystem(self):
d = {'a.csv': '1,1\n2,2', 'b.csv': '1,1\n2,2'}
with filetexts(d) as filenames:
dd = resource('*.csv', schema='2 * int')
assert isinstance(dd, Files)
def test_sql(self):
assert isinstance(resource('sqlite:///:memory:::tablename',
schema='{x: int, y: int}'),
SQL)
@skip("This runs fine in isolation, segfaults in full test")
def test_hdf5(self):
with tmpfile('.hdf5') as filename:
assert isinstance(resource(filename + '::/path/to/data/',
mode='w', schema='2 * int'),
HDF5)
class TestCopy(TestCase):
def test_copy(self):
with filetext('1,1\n2,2', extension='.csv') as a:
with tmpfile(extension='.csv') as b:
A = resource(a, schema='2 * int')
B = resource(b, schema='2 * int', mode='a')
copy(A, B)
assert list(B) == [[1, 1], [2, 2]]
| bsd-3-clause |
chainer/chainer | chainer/training/extension.py | 8 | 6662 | from chainer.utils import argument
PRIORITY_WRITER = 300
PRIORITY_EDITOR = 200
PRIORITY_READER = 100
class Extension(object):
"""Base class of trainer extensions.
Extension of :class:`Trainer` is a callable object that takes the trainer
object as the argument. It also provides some default configurations as its
attributes, e.g. the default trigger and the default priority. This class
provides a set of typical default values for these attributes.
There are three ways to define users' own extensions: inheriting this
class, decorating closures by :func:`make_extension`, or using any callable
including lambda functions as extensions. Decorator can slightly reduce the
overhead and is much easier to use, while this class provides more
flexibility (for example, it can have methods to configure the behavior).
Using a lambda function allows one-line coding for simple purposes, but
users have to specify the configurations as arguments to
:meth:`Trainer.extend`. For a callable not inheriting this class, the
default configurations of this class are used unless the user explicitly
specifies them in :meth:`Trainer.extend` method.
Attributes:
trigger: Default value of trigger for this extension. It is set to
``(1, 'iteration')`` by default.
priority: Default priority of the extension. It is set to
``PRIORITY_READER`` by default.
~Extension.name: Name of the extension. It is set to
``None`` by default. This value will be overwritten when
registering an extension to a trainer. See
:meth:`chainer.training.Trainer.extend` for details.
"""
trigger = 1, 'iteration'
priority = PRIORITY_READER
name = None
@property
def default_name(self):
"""Default name of the extension.
It is the name of the class by default. Implementation can override
this property, or provide a class attribute to hide it.
"""
return type(self).__name__
def __call__(self, trainer):
"""Invokes the extension.
Implementations should override this operator. This method is called
at iterations which the corresponding trigger accepts.
Args:
trainer (Trainer): Trainer object that calls this operator.
"""
raise NotImplementedError(
'Extension implementation must override __call__.')
def __getattr__(self, name):
if name == 'invoke_before_training':
raise AttributeError(
'invoke_before_training has been removed since Chainer '
'v2.0.0. Use Extension.initialize instead.')
raise AttributeError('{} object has no attribute {}'.format(
type(self).__name__, name))
def finalize(self):
"""Finalizes the extension.
This method is called at the end of the training loop.
"""
pass
def initialize(self, trainer):
"""Initializes up the trainer state.
This method is called before entering the training loop. An extension
that modifies the state of :class:`~chainer.training.Trainer` can
override this method to initialize it.
When the trainer has been restored from a snapshot, this method has to
recover an appropriate part of the state of the trainer.
For example, :class:`~chainer.training.extensions.ExponentialShift`
extension changes the optimizer's hyperparameter at each invocation.
Note that the hyperparameter is not saved to the snapshot; it is the
responsibility of the extension to recover the hyperparameter.
The :class:`~chainer.training.extensions.ExponentialShift` extension
recovers it in its ``initialize`` method if it has been loaded from a
snapshot, or just setting the initial value otherwise.
Args:
trainer (Trainer): Trainer object that runs the training loop.
"""
pass
def on_error(self, trainer, exc, tb):
"""Handles the error raised during training before finalization.
This method is called when an exception is thrown during the
training loop, before finalize. An extension that needs
different error handling from finalize, can override this
method to handle errors.
Args:
trainer (Trainer): Trainer object that runs the training loop.
exc (Exception): arbitrary exception thrown during update loop.
tb (traceback): traceback object of the exception
"""
pass
def serialize(self, serializer):
"""Serializes the extension state.
It is called when a trainer that owns this extension is serialized. It
serializes nothing by default.
"""
pass
def make_extension(trigger=None, default_name=None, priority=None,
finalizer=None, initializer=None, on_error=None, **kwargs):
"""Decorator to make given functions into trainer extensions.
This decorator just adds some attributes to a given function. The value of
the attributes are given by the arguments of this decorator.
See :class:`Extension` for details of trainer extensions. Most of the
default values of arguments also follow those for this class.
Args:
trigger: Default trigger of the extension.
default_name: Default name of the extension. The name of a given
function is used by default.
priority (int): Default priority of the extension.
finalizer: Finalizer function of this extension. It is
called at the end of the training loop.
initializer: Initializer function of this extension. It is called at
the beginning of the training loop.
on_error: Error handler callback function of this extension. It is
called after an error is raised during the trainer loop.
"""
if kwargs:
msg = ('invoke_before_training has been removed since Chainer v2.0.0. '
'Use initializer= instead.')
argument.check_unexpected_kwargs(kwargs, invoke_before_training=msg)
argument.assert_kwargs_empty(kwargs)
if trigger is None:
trigger = Extension.trigger
if priority is None:
priority = Extension.priority
def decorator(ext):
ext.trigger = trigger
ext.default_name = default_name or ext.__name__
ext.priority = priority
ext.finalize = finalizer
ext.on_error = on_error
ext.initialize = initializer
return ext
return decorator
| mit |
The-end-novel/Freedom-Web | app/main/views.py | 1 | 9447 | from flask import render_template, redirect, url_for, abort, flash, request,\
current_app, make_response
from flask_login import login_required, current_user
from . import main
from .forms import EditProfileForm, EditProfileAdminForm, PostForm,\
CommentForm
from .. import db
from ..models import Permission, Role, User, Post, Comment
from ..decorators import admin_required, permission_required
@main.route('/', methods=['GET', 'POST'])
def index():
pic = ('jpg', 'png', 'jpeg', 'gif')
form = PostForm()
if current_user.can(Permission.WRITE_ARTICLES) and \
form.validate_on_submit():
if form.body.data.endswith(pic):
form.body.data = "<img src="+form.body.data+">"
post = Post(body=form.body.data,
author=current_user._get_current_object())
db.session.add(post)
return redirect(url_for('.index'))
page = request.args.get('page', 1, type=int)
show_followed = False
if current_user.is_authenticated:
show_followed = bool(request.cookies.get('show_followed', ''))
if show_followed:
query = current_user.followed_posts
else:
query = Post.query
pagination = query.order_by(Post.timestamp.desc()).paginate(
page, per_page=current_app.config['FLASKY_POSTS_PER_PAGE'],
error_out=False)
posts = pagination.items
return render_template('index.html', form=form, posts=posts,
show_followed=show_followed, pagination=pagination)
@main.route('/user/<username>')
def user(username):
user = User.query.filter_by(username=username).first_or_404()
page = request.args.get('page', 1, type=int)
pagination = user.posts.order_by(Post.timestamp.desc()).paginate(
page, per_page=current_app.config['FLASKY_POSTS_PER_PAGE'],
error_out=False)
posts = pagination.items
return render_template('user.html', user=user, posts=posts,
pagination=pagination)
@main.route('/edit-profile', methods=['GET', 'POST'])
@login_required
def edit_profile():
form = EditProfileForm()
if form.validate_on_submit():
current_user.name = form.name.data
current_user.location = form.location.data
current_user.about_me = form.about_me.data
db.session.add(current_user)
flash('Your profile has been updated.')
return redirect(url_for('.user', username=current_user.username))
form.name.data = current_user.name
form.location.data = current_user.location
form.about_me.data = current_user.about_me
return render_template('edit_profile.html', form=form)
@main.route('/edit-profile/<int:id>', methods=['GET', 'POST'])
@login_required
@admin_required
def edit_profile_admin(id):
user = User.query.get_or_404(id)
form = EditProfileAdminForm(user=user)
if form.validate_on_submit():
user.email = form.email.data
user.username = form.username.data
user.confirmed = form.confirmed.data
user.role = Role.query.get(form.role.data)
user.name = form.name.data
user.location = form.location.data
user.about_me = form.about_me.data
db.session.add(user)
flash('The profile has been updated.')
return redirect(url_for('.user', username=user.username))
form.email.data = user.email
form.username.data = user.username
form.confirmed.data = user.confirmed
form.role.data = user.role_id
form.name.data = user.name
form.location.data = user.location
form.about_me.data = user.about_me
return render_template('edit_profile.html', form=form, user=user)
@main.route('/post/<int:id>', methods=['GET', 'POST'])
def post(id):
post = Post.query.get_or_404(id)
form = CommentForm()
if form.validate_on_submit():
comment = Comment(body=form.body.data,
post=post,
author=current_user._get_current_object())
db.session.add(comment)
flash('Your comment has been published.')
return redirect(url_for('.post', id=post.id, page=-1))
page = request.args.get('page', 1, type=int)
if page == -1:
page = (post.comments.count() - 1) // \
current_app.config['FLASKY_COMMENTS_PER_PAGE'] + 1
pagination = post.comments.order_by(Comment.timestamp.asc()).paginate(
page, per_page=current_app.config['FLASKY_COMMENTS_PER_PAGE'],
error_out=False)
comments = pagination.items
return render_template('post.html', posts=[post], form=form,
comments=comments, pagination=pagination)
@main.route('/edit/<int:id>', methods=['GET', 'POST'])
@login_required
def edit(id):
pic = ('jpg', 'png', 'jpeg', 'gif')
post = Post.query.get_or_404(id)
if current_user != post.author and \
not current_user.can(Permission.ADMINISTER):
abort(403)
form = PostForm()
if form.validate_on_submit():
if form.body.data.endswith(pic):
form.body.data = "<img src="+form.body.data+">"
post.body = form.body.data
db.session.add(post)
flash('The post has been updated.')
return redirect(url_for('.post', id=post.id))
form.body.data = post.body
return render_template('edit_post.html', form=form)
@main.route('/follow/<username>')
@login_required
@permission_required(Permission.FOLLOW)
def follow(username):
user = User.query.filter_by(username=username).first()
if user is None:
flash('Invalid user.')
return redirect(url_for('.index'))
if current_user.is_following(user):
flash('You are already following this user.')
return redirect(url_for('.user', username=username))
current_user.follow(user)
flash('You are now following %s.' % username)
return redirect(url_for('.user', username=username))
@main.route('/unfollow/<username>')
@login_required
@permission_required(Permission.FOLLOW)
def unfollow(username):
user = User.query.filter_by(username=username).first()
if user is None:
flash('Invalid user.')
return redirect(url_for('.index'))
if not current_user.is_following(user):
flash('You are not following this user.')
return redirect(url_for('.user', username=username))
current_user.unfollow(user)
flash('You are not following %s anymore.' % username)
return redirect(url_for('.user', username=username))
@main.route('/followers/<username>')
def followers(username):
user = User.query.filter_by(username=username).first()
if user is None:
flash('Invalid user.')
return redirect(url_for('.index'))
page = request.args.get('page', 1, type=int)
pagination = user.followers.paginate(
page, per_page=current_app.config['FLASKY_FOLLOWERS_PER_PAGE'],
error_out=False)
follows = [{'user': item.follower, 'timestamp': item.timestamp}
for item in pagination.items]
return render_template('followers.html', user=user, title="Followers of",
endpoint='.followers', pagination=pagination,
follows=follows)
@main.route('/followed-by/<username>')
def followed_by(username):
user = User.query.filter_by(username=username).first()
if user is None:
flash('Invalid user.')
return redirect(url_for('.index'))
page = request.args.get('page', 1, type=int)
pagination = user.followed.paginate(
page, per_page=current_app.config['FLASKY_FOLLOWERS_PER_PAGE'],
error_out=False)
follows = [{'user': item.followed, 'timestamp': item.timestamp}
for item in pagination.items]
return render_template('followers.html', user=user, title="Followed by",
endpoint='.followed_by', pagination=pagination,
follows=follows)
@main.route('/all')
@login_required
def show_all():
resp = make_response(redirect(url_for('.index')))
resp.set_cookie('show_followed', '', max_age=30*24*60*60)
return resp
@main.route('/followed')
@login_required
def show_followed():
resp = make_response(redirect(url_for('.index')))
resp.set_cookie('show_followed', '1', max_age=30*24*60*60)
return resp
@main.route('/moderate')
@login_required
@permission_required(Permission.MODERATE_COMMENTS)
def moderate():
page = request.args.get('page', 1, type=int)
pagination = Comment.query.order_by(Comment.timestamp.desc()).paginate(
page, per_page=current_app.config['FLASKY_COMMENTS_PER_PAGE'],
error_out=False)
comments = pagination.items
return render_template('moderate.html', comments=comments,
pagination=pagination, page=page)
@main.route('/moderate/enable/<int:id>')
@login_required
@permission_required(Permission.MODERATE_COMMENTS)
def moderate_enable(id):
comment = Comment.query.get_or_404(id)
comment.disabled = False
db.session.add(comment)
return redirect(url_for('.moderate',
page=request.args.get('page', 1, type=int)))
@main.route('/moderate/disable/<int:id>')
@login_required
@permission_required(Permission.MODERATE_COMMENTS)
def moderate_disable(id):
comment = Comment.query.get_or_404(id)
comment.disabled = True
db.session.add(comment)
return redirect(url_for('.moderate',
page=request.args.get('page', 1, type=int)))
| mit |
nmercier/linux-cross-gcc | linux/lib/python2.7/lib-tk/Tkconstants.py | 375 | 1493 | # Symbolic constants for Tk
# Booleans
NO=FALSE=OFF=0
YES=TRUE=ON=1
# -anchor and -sticky
N='n'
S='s'
W='w'
E='e'
NW='nw'
SW='sw'
NE='ne'
SE='se'
NS='ns'
EW='ew'
NSEW='nsew'
CENTER='center'
# -fill
NONE='none'
X='x'
Y='y'
BOTH='both'
# -side
LEFT='left'
TOP='top'
RIGHT='right'
BOTTOM='bottom'
# -relief
RAISED='raised'
SUNKEN='sunken'
FLAT='flat'
RIDGE='ridge'
GROOVE='groove'
SOLID = 'solid'
# -orient
HORIZONTAL='horizontal'
VERTICAL='vertical'
# -tabs
NUMERIC='numeric'
# -wrap
CHAR='char'
WORD='word'
# -align
BASELINE='baseline'
# -bordermode
INSIDE='inside'
OUTSIDE='outside'
# Special tags, marks and insert positions
SEL='sel'
SEL_FIRST='sel.first'
SEL_LAST='sel.last'
END='end'
INSERT='insert'
CURRENT='current'
ANCHOR='anchor'
ALL='all' # e.g. Canvas.delete(ALL)
# Text widget and button states
NORMAL='normal'
DISABLED='disabled'
ACTIVE='active'
# Canvas state
HIDDEN='hidden'
# Menu item types
CASCADE='cascade'
CHECKBUTTON='checkbutton'
COMMAND='command'
RADIOBUTTON='radiobutton'
SEPARATOR='separator'
# Selection modes for list boxes
SINGLE='single'
BROWSE='browse'
MULTIPLE='multiple'
EXTENDED='extended'
# Activestyle for list boxes
# NONE='none' is also valid
DOTBOX='dotbox'
UNDERLINE='underline'
# Various canvas styles
PIESLICE='pieslice'
CHORD='chord'
ARC='arc'
FIRST='first'
LAST='last'
BUTT='butt'
PROJECTING='projecting'
ROUND='round'
BEVEL='bevel'
MITER='miter'
# Arguments to xview/yview
MOVETO='moveto'
SCROLL='scroll'
UNITS='units'
PAGES='pages'
| bsd-3-clause |
mrooney/metakv | website/metakv/test_helpers.py | 1 | 3165 | from django.utils import unittest
import django.test
from django.test.client import Client
from lxml import html
from cssselect import HTMLTranslator
class NotOkay(Exception):
def __init__(self, response):
Exception.__init__(self, "%r: %r" % (response.status_code, response))
self.response = response
self.status = response.status_code
class ExtendedTestCase(django.test.TestCase):
def after_setUp(self):
""" Override this to do extra setup. """
def before_tearDown(self):
""" Override this to do extra tear-down. """
def assertStatus(self, status, path, **kwargs):
try:
response = self.get(path, **kwargs)
except NotOkay, no:
response = no.response
self.assertEqual(status, response.status_code)
@classmethod
def get_client(cls, user=None):
client = Client()
if user:
assert client.login(username=user.username, password="foobar")
return client
@classmethod
def _http_verb(cls, verb, path, client=None, data=None, https=False, user=None, raise_errors=True, **kwargs):
data = data or {}
client = client or cls.get_client(user)
kwargs['HTTP_X_FORWARDED_PROTO'] = 'https' if https else 'http' # Simulates ELB
response = getattr(client, verb.lower())(path, data=data, **kwargs)
if raise_errors and response.status_code not in [200, 302]:
raise NotOkay(response)
return response
@classmethod
def get(cls, path, data=None, client=None, **kwargs):
data = data or {}
return cls._http_verb('get', path, client=client, **kwargs)
@classmethod
def post(cls, path, data=None, client=None, **kwargs):
data = data or {}
return cls._http_verb('post', path, data=data, client=client, **kwargs)
@classmethod
def _api_call(cls, path, data=None, client=None, method="post"):
data = data or {}
response = getattr(cls, method)(path,
data=util.dumps(data),
client=client,
content_type="application/json")
try:
content = util.loads(response.content)
except ValueError:
# Probably not a JSON response, so just return a string.
content = response.content
return content
@classmethod
def api_post(cls, *args, **kwargs):
return cls._api_call(*args, **kwargs)
def parse_response(self, response):
if isinstance(response, basestring):
return html.fromstring(response)
return html.fromstring(response.content)
def css_select(self, response, css_selector):
document = self.parse_response(response)
expression = HTMLTranslator().css_to_xpath(css_selector)
return document.xpath(expression)
def assertNumCssMatches(self, num, response, css_selector):
found = len(self.css_select(response, css_selector))
self.assertEqual(num, found, "Expected {0} but found {1}.".format(num, found))
| mit |
40223226/2015cdbg80420 | static/Brython3.1.1-20150328-091302/Lib/_thread.py | 740 | 4879 | """Drop-in replacement for the thread module.
Meant to be used as a brain-dead substitute so that threaded code does
not need to be rewritten for when the thread module is not present.
Suggested usage is::
try:
import _thread
except ImportError:
import _dummy_thread as _thread
"""
# Exports only things specified by thread documentation;
# skipping obsolete synonyms allocate(), start_new(), exit_thread().
__all__ = ['error', 'start_new_thread', 'exit', 'get_ident', 'allocate_lock',
'interrupt_main', 'LockType']
# A dummy value
TIMEOUT_MAX = 2**31
# NOTE: this module can be imported early in the extension building process,
# and so top level imports of other modules should be avoided. Instead, all
# imports are done when needed on a function-by-function basis. Since threads
# are disabled, the import lock should not be an issue anyway (??).
error = RuntimeError
def start_new_thread(function, args, kwargs={}):
"""Dummy implementation of _thread.start_new_thread().
Compatibility is maintained by making sure that ``args`` is a
tuple and ``kwargs`` is a dictionary. If an exception is raised
and it is SystemExit (which can be done by _thread.exit()) it is
caught and nothing is done; all other exceptions are printed out
by using traceback.print_exc().
If the executed function calls interrupt_main the KeyboardInterrupt will be
raised when the function returns.
"""
if type(args) != type(tuple()):
raise TypeError("2nd arg must be a tuple")
if type(kwargs) != type(dict()):
raise TypeError("3rd arg must be a dict")
global _main
_main = False
try:
function(*args, **kwargs)
except SystemExit:
pass
except:
import traceback
traceback.print_exc()
_main = True
global _interrupt
if _interrupt:
_interrupt = False
raise KeyboardInterrupt
def exit():
"""Dummy implementation of _thread.exit()."""
raise SystemExit
def get_ident():
"""Dummy implementation of _thread.get_ident().
Since this module should only be used when _threadmodule is not
available, it is safe to assume that the current process is the
only thread. Thus a constant can be safely returned.
"""
return -1
def allocate_lock():
"""Dummy implementation of _thread.allocate_lock()."""
return LockType()
def stack_size(size=None):
"""Dummy implementation of _thread.stack_size()."""
if size is not None:
raise error("setting thread stack size not supported")
return 0
class LockType(object):
"""Class implementing dummy implementation of _thread.LockType.
Compatibility is maintained by maintaining self.locked_status
which is a boolean that stores the state of the lock. Pickling of
the lock, though, should not be done since if the _thread module is
then used with an unpickled ``lock()`` from here problems could
occur from this class not having atomic methods.
"""
def __init__(self):
self.locked_status = False
def acquire(self, waitflag=None, timeout=-1):
"""Dummy implementation of acquire().
For blocking calls, self.locked_status is automatically set to
True and returned appropriately based on value of
``waitflag``. If it is non-blocking, then the value is
actually checked and not set if it is already acquired. This
is all done so that threading.Condition's assert statements
aren't triggered and throw a little fit.
"""
if waitflag is None or waitflag:
self.locked_status = True
return True
else:
if not self.locked_status:
self.locked_status = True
return True
else:
if timeout > 0:
import time
time.sleep(timeout)
return False
__enter__ = acquire
def __exit__(self, typ, val, tb):
self.release()
def release(self):
"""Release the dummy lock."""
# XXX Perhaps shouldn't actually bother to test? Could lead
# to problems for complex, threaded code.
if not self.locked_status:
raise error
self.locked_status = False
return True
def locked(self):
return self.locked_status
# Used to signal that interrupt_main was called in a "thread"
_interrupt = False
# True when not executing in a "thread"
_main = True
def interrupt_main():
"""Set _interrupt flag to True to have start_new_thread raise
KeyboardInterrupt upon exiting."""
if _main:
raise KeyboardInterrupt
else:
global _interrupt
_interrupt = True
# Brython-specific to avoid circular references between threading and _threading_local
class _local:
pass | gpl-3.0 |
perimosocordiae/scipy | scipy/signal/tests/test_savitzky_golay.py | 21 | 10203 | import numpy as np
from numpy.testing import (assert_allclose, assert_equal,
assert_almost_equal, assert_array_equal,
assert_array_almost_equal)
from scipy.ndimage import convolve1d
from scipy.signal import savgol_coeffs, savgol_filter
from scipy.signal._savitzky_golay import _polyder
def check_polyder(p, m, expected):
dp = _polyder(p, m)
assert_array_equal(dp, expected)
def test_polyder():
cases = [
([5], 0, [5]),
([5], 1, [0]),
([3, 2, 1], 0, [3, 2, 1]),
([3, 2, 1], 1, [6, 2]),
([3, 2, 1], 2, [6]),
([3, 2, 1], 3, [0]),
([[3, 2, 1], [5, 6, 7]], 0, [[3, 2, 1], [5, 6, 7]]),
([[3, 2, 1], [5, 6, 7]], 1, [[6, 2], [10, 6]]),
([[3, 2, 1], [5, 6, 7]], 2, [[6], [10]]),
([[3, 2, 1], [5, 6, 7]], 3, [[0], [0]]),
]
for p, m, expected in cases:
check_polyder(np.array(p).T, m, np.array(expected).T)
#--------------------------------------------------------------------
# savgol_coeffs tests
#--------------------------------------------------------------------
def alt_sg_coeffs(window_length, polyorder, pos):
"""This is an alternative implementation of the SG coefficients.
It uses numpy.polyfit and numpy.polyval. The results should be
equivalent to those of savgol_coeffs(), but this implementation
is slower.
window_length should be odd.
"""
if pos is None:
pos = window_length // 2
t = np.arange(window_length)
unit = (t == pos).astype(int)
h = np.polyval(np.polyfit(t, unit, polyorder), t)
return h
def test_sg_coeffs_trivial():
# Test a trivial case of savgol_coeffs: polyorder = window_length - 1
h = savgol_coeffs(1, 0)
assert_allclose(h, [1])
h = savgol_coeffs(3, 2)
assert_allclose(h, [0, 1, 0], atol=1e-10)
h = savgol_coeffs(5, 4)
assert_allclose(h, [0, 0, 1, 0, 0], atol=1e-10)
h = savgol_coeffs(5, 4, pos=1)
assert_allclose(h, [0, 0, 0, 1, 0], atol=1e-10)
h = savgol_coeffs(5, 4, pos=1, use='dot')
assert_allclose(h, [0, 1, 0, 0, 0], atol=1e-10)
def compare_coeffs_to_alt(window_length, order):
# For the given window_length and order, compare the results
# of savgol_coeffs and alt_sg_coeffs for pos from 0 to window_length - 1.
# Also include pos=None.
for pos in [None] + list(range(window_length)):
h1 = savgol_coeffs(window_length, order, pos=pos, use='dot')
h2 = alt_sg_coeffs(window_length, order, pos=pos)
assert_allclose(h1, h2, atol=1e-10,
err_msg=("window_length = %d, order = %d, pos = %s" %
(window_length, order, pos)))
def test_sg_coeffs_compare():
# Compare savgol_coeffs() to alt_sg_coeffs().
for window_length in range(1, 8, 2):
for order in range(window_length):
compare_coeffs_to_alt(window_length, order)
def test_sg_coeffs_exact():
polyorder = 4
window_length = 9
halflen = window_length // 2
x = np.linspace(0, 21, 43)
delta = x[1] - x[0]
# The data is a cubic polynomial. We'll use an order 4
# SG filter, so the filtered values should equal the input data
# (except within half window_length of the edges).
y = 0.5 * x ** 3 - x
h = savgol_coeffs(window_length, polyorder)
y0 = convolve1d(y, h)
assert_allclose(y0[halflen:-halflen], y[halflen:-halflen])
# Check the same input, but use deriv=1. dy is the exact result.
dy = 1.5 * x ** 2 - 1
h = savgol_coeffs(window_length, polyorder, deriv=1, delta=delta)
y1 = convolve1d(y, h)
assert_allclose(y1[halflen:-halflen], dy[halflen:-halflen])
# Check the same input, but use deriv=2. d2y is the exact result.
d2y = 3.0 * x
h = savgol_coeffs(window_length, polyorder, deriv=2, delta=delta)
y2 = convolve1d(y, h)
assert_allclose(y2[halflen:-halflen], d2y[halflen:-halflen])
def test_sg_coeffs_deriv():
# The data in `x` is a sampled parabola, so using savgol_coeffs with an
# order 2 or higher polynomial should give exact results.
i = np.array([-2.0, 0.0, 2.0, 4.0, 6.0])
x = i ** 2 / 4
dx = i / 2
d2x = np.full_like(i, 0.5)
for pos in range(x.size):
coeffs0 = savgol_coeffs(5, 3, pos=pos, delta=2.0, use='dot')
assert_allclose(coeffs0.dot(x), x[pos], atol=1e-10)
coeffs1 = savgol_coeffs(5, 3, pos=pos, delta=2.0, use='dot', deriv=1)
assert_allclose(coeffs1.dot(x), dx[pos], atol=1e-10)
coeffs2 = savgol_coeffs(5, 3, pos=pos, delta=2.0, use='dot', deriv=2)
assert_allclose(coeffs2.dot(x), d2x[pos], atol=1e-10)
def test_sg_coeffs_deriv_gt_polyorder():
"""
If deriv > polyorder, the coefficients should be all 0.
This is a regression test for a bug where, e.g.,
savgol_coeffs(5, polyorder=1, deriv=2)
raised an error.
"""
coeffs = savgol_coeffs(5, polyorder=1, deriv=2)
assert_array_equal(coeffs, np.zeros(5))
coeffs = savgol_coeffs(7, polyorder=4, deriv=6)
assert_array_equal(coeffs, np.zeros(7))
def test_sg_coeffs_large():
# Test that for large values of window_length and polyorder the array of
# coefficients returned is symmetric. The aim is to ensure that
# no potential numeric overflow occurs.
coeffs0 = savgol_coeffs(31, 9)
assert_array_almost_equal(coeffs0, coeffs0[::-1])
coeffs1 = savgol_coeffs(31, 9, deriv=1)
assert_array_almost_equal(coeffs1, -coeffs1[::-1])
#--------------------------------------------------------------------
# savgol_filter tests
#--------------------------------------------------------------------
def test_sg_filter_trivial():
""" Test some trivial edge cases for savgol_filter()."""
x = np.array([1.0])
y = savgol_filter(x, 1, 0)
assert_equal(y, [1.0])
# Input is a single value. With a window length of 3 and polyorder 1,
# the value in y is from the straight-line fit of (-1,0), (0,3) and
# (1, 0) at 0. This is just the average of the three values, hence 1.0.
x = np.array([3.0])
y = savgol_filter(x, 3, 1, mode='constant')
assert_almost_equal(y, [1.0], decimal=15)
x = np.array([3.0])
y = savgol_filter(x, 3, 1, mode='nearest')
assert_almost_equal(y, [3.0], decimal=15)
x = np.array([1.0] * 3)
y = savgol_filter(x, 3, 1, mode='wrap')
assert_almost_equal(y, [1.0, 1.0, 1.0], decimal=15)
def test_sg_filter_basic():
# Some basic test cases for savgol_filter().
x = np.array([1.0, 2.0, 1.0])
y = savgol_filter(x, 3, 1, mode='constant')
assert_allclose(y, [1.0, 4.0 / 3, 1.0])
y = savgol_filter(x, 3, 1, mode='mirror')
assert_allclose(y, [5.0 / 3, 4.0 / 3, 5.0 / 3])
y = savgol_filter(x, 3, 1, mode='wrap')
assert_allclose(y, [4.0 / 3, 4.0 / 3, 4.0 / 3])
def test_sg_filter_2d():
x = np.array([[1.0, 2.0, 1.0],
[2.0, 4.0, 2.0]])
expected = np.array([[1.0, 4.0 / 3, 1.0],
[2.0, 8.0 / 3, 2.0]])
y = savgol_filter(x, 3, 1, mode='constant')
assert_allclose(y, expected)
y = savgol_filter(x.T, 3, 1, mode='constant', axis=0)
assert_allclose(y, expected.T)
def test_sg_filter_interp_edges():
# Another test with low degree polynomial data, for which we can easily
# give the exact results. In this test, we use mode='interp', so
# savgol_filter should match the exact solution for the entire data set,
# including the edges.
t = np.linspace(-5, 5, 21)
delta = t[1] - t[0]
# Polynomial test data.
x = np.array([t,
3 * t ** 2,
t ** 3 - t])
dx = np.array([np.ones_like(t),
6 * t,
3 * t ** 2 - 1.0])
d2x = np.array([np.zeros_like(t),
np.full_like(t, 6),
6 * t])
window_length = 7
y = savgol_filter(x, window_length, 3, axis=-1, mode='interp')
assert_allclose(y, x, atol=1e-12)
y1 = savgol_filter(x, window_length, 3, axis=-1, mode='interp',
deriv=1, delta=delta)
assert_allclose(y1, dx, atol=1e-12)
y2 = savgol_filter(x, window_length, 3, axis=-1, mode='interp',
deriv=2, delta=delta)
assert_allclose(y2, d2x, atol=1e-12)
# Transpose everything, and test again with axis=0.
x = x.T
dx = dx.T
d2x = d2x.T
y = savgol_filter(x, window_length, 3, axis=0, mode='interp')
assert_allclose(y, x, atol=1e-12)
y1 = savgol_filter(x, window_length, 3, axis=0, mode='interp',
deriv=1, delta=delta)
assert_allclose(y1, dx, atol=1e-12)
y2 = savgol_filter(x, window_length, 3, axis=0, mode='interp',
deriv=2, delta=delta)
assert_allclose(y2, d2x, atol=1e-12)
def test_sg_filter_interp_edges_3d():
# Test mode='interp' with a 3-D array.
t = np.linspace(-5, 5, 21)
delta = t[1] - t[0]
x1 = np.array([t, -t])
x2 = np.array([t ** 2, 3 * t ** 2 + 5])
x3 = np.array([t ** 3, 2 * t ** 3 + t ** 2 - 0.5 * t])
dx1 = np.array([np.ones_like(t), -np.ones_like(t)])
dx2 = np.array([2 * t, 6 * t])
dx3 = np.array([3 * t ** 2, 6 * t ** 2 + 2 * t - 0.5])
# z has shape (3, 2, 21)
z = np.array([x1, x2, x3])
dz = np.array([dx1, dx2, dx3])
y = savgol_filter(z, 7, 3, axis=-1, mode='interp', delta=delta)
assert_allclose(y, z, atol=1e-10)
dy = savgol_filter(z, 7, 3, axis=-1, mode='interp', deriv=1, delta=delta)
assert_allclose(dy, dz, atol=1e-10)
# z has shape (3, 21, 2)
z = np.array([x1.T, x2.T, x3.T])
dz = np.array([dx1.T, dx2.T, dx3.T])
y = savgol_filter(z, 7, 3, axis=1, mode='interp', delta=delta)
assert_allclose(y, z, atol=1e-10)
dy = savgol_filter(z, 7, 3, axis=1, mode='interp', deriv=1, delta=delta)
assert_allclose(dy, dz, atol=1e-10)
# z has shape (21, 3, 2)
z = z.swapaxes(0, 1).copy()
dz = dz.swapaxes(0, 1).copy()
y = savgol_filter(z, 7, 3, axis=0, mode='interp', delta=delta)
assert_allclose(y, z, atol=1e-10)
dy = savgol_filter(z, 7, 3, axis=0, mode='interp', deriv=1, delta=delta)
assert_allclose(dy, dz, atol=1e-10)
| bsd-3-clause |
strobo-inc/pc-nrfutil | nordicsemi/utility/tests/__init__.py | 7 | 1579 | # Copyright (c) 2015, Nordic Semiconductor
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# * Neither the name of Nordic Semiconductor ASA nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Package marker file."""
| bsd-3-clause |
jinzo27/infoGrabr | lib/cpp/scons/scons-local-2.0.0.final.0/SCons/__init__.py | 34 | 1629 | """SCons
The main package for the SCons software construction utility.
"""
#
# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/__init__.py 5023 2010/06/14 22:05:46 scons"
__version__ = "2.0.0.final.0"
__build__ = "r5023"
__buildsys__ = "scons-dev"
__date__ = "2010/06/14 22:05:46"
__developer__ = "scons"
# make sure compatibility is always in place
import SCons.compat
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
| mit |
stormbeard/pyvmomi-community-samples | samples/create_snapshot.py | 11 | 2756 | # Copyright 2015 Michael Rice <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import atexit
import requests
from pyVim.connect import SmartConnect, Disconnect
from tools import cli
requests.packages.urllib3.disable_warnings()
def setup_args():
parser = cli.build_arg_parser()
parser.add_argument('-j', '--uuid', required=True,
help="UUID of the VirtualMachine you want to find."
" If -i is not used BIOS UUID assumed.")
parser.add_argument('-i', '--instance', required=False,
action='store_true',
help="Flag to indicate the UUID is an instance UUID")
parser.add_argument('-d', '--description', required=False,
help="Description for the snapshot")
parser.add_argument('-n', '--name', required=True,
help="Name for the Snapshot")
my_args = parser.parse_args()
return cli.prompt_for_password(my_args)
args = setup_args()
si = None
instance_search = False
try:
si = SmartConnect(host=args.host,
user=args.user,
pwd=args.password,
port=int(args.port))
atexit.register(Disconnect, si)
except IOError:
pass
if not si:
raise SystemExit("Unable to connect to host with supplied info.")
if args.instance:
instance_search = True
vm = si.content.searchIndex.FindByUuid(None, args.uuid, True, instance_search)
if vm is None:
raise SystemExit("Unable to locate VirtualMachine.")
desc = None
if args.description:
desc = args.description
task = vm.CreateSnapshot_Task(name=args.name,
description=desc,
memory=True,
quiesce=False)
print("Snapshot Completed.")
del vm
vm = si.content.searchIndex.FindByUuid(None, args.uuid, True, instance_search)
snap_info = vm.snapshot
tree = snap_info.rootSnapshotList
while tree[0].childSnapshotList is not None:
print("Snap: {0} => {1}".format(tree[0].name, tree[0].description))
if len(tree[0].childSnapshotList) < 1:
break
tree = tree[0].childSnapshotList
| apache-2.0 |
overtherain/scriptfile | py/dropbox.py | 3 | 111457 | #!/usr/bin/python
#
# Copyright (c) Dropbox, Inc.
#
# dropbox
# Dropbox frontend script
# This file is part of nautilus-dropbox 1.6.2.
#
# nautilus-dropbox is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# nautilus-dropbox is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with nautilus-dropbox. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import with_statement
import errno
import locale
import optparse
import os
import platform
import shutil
import socket
import StringIO
import subprocess
import sys
import tarfile
import tempfile
import threading
import thread
import time
import traceback
import urllib2
try:
import gpgme
except ImportError:
gpgme = None
from contextlib import closing, contextmanager
from posixpath import curdir, sep, pardir, join, abspath, commonprefix
INFO = u"Dropbox is the easiest way to share and store your files online. Want to learn more? Head to"
LINK = u"https://www.dropbox.com/"
WARNING = u"In order to use Dropbox, you must download the proprietary daemon."
GPG_WARNING = u"Note: python-gpgme is not installed, we will not be able to verify binary signatures."
ERROR_CONNECTING = u"Trouble connecting to Dropbox servers. Maybe your internet connection is down, or you need to set your http_proxy environment variable."
ERROR_SIGNATURE = u"Downloaded binary does not match Dropbox signature, aborting install."
DOWNLOAD_LOCATION_FMT = "https://www.dropbox.com/download?plat=%s"
SIGNATURE_LOCATION_FMT = "https://www.dropbox.com/download?plat=%s&signature=1"
DOWNLOADING = u"Downloading Dropbox... %d%%"
UNPACKING = u"Unpacking Dropbox... %d%%"
PARENT_DIR = os.path.expanduser("~")
DROPBOXD_PATH = "%s/.dropbox-dist/dropboxd" % PARENT_DIR
DESKTOP_FILE = u"/usr/share/applications/dropbox.desktop"
enc = locale.getpreferredencoding()
# Available from https://linux.dropbox.com/fedora/rpm-public-key.asc
DROPBOX_PUBLIC_KEY = """
-----BEGIN PGP PUBLIC KEY BLOCK-----
Version: SKS 1.1.0
mQENBEt0ibEBCACv4hZRPqwtpU6z8+BB5YZU1a3yjEvg2W68+a6hEwxtCa2U++4dzQ+7EqaU
q5ybQnwtbDdpFpsOi9x31J+PCpufPUfIG694/0rlEpmzl2GWzY8NqfdBFGGm/SPSSwvKbeNc
FMRLu5neo7W9kwvfMbGjHmvUbzBUVpCVKD0OEEf1q/Ii0Qcekx9CMoLvWq7ZwNHEbNnij7ec
nvwNlE2MxNsOSJj+hwZGK+tM19kuYGSKw4b5mR8IyThlgiSLIfpSBh1n2KX+TDdk9GR+57TY
vlRu6nTPu98P05IlrrCP+KF0hYZYOaMvQs9Rmc09tc/eoQlN0kkaBWw9Rv/dvLVc0aUXABEB
AAG0MURyb3Bib3ggQXV0b21hdGljIFNpZ25pbmcgS2V5IDxsaW51eEBkcm9wYm94LmNvbT6J
ATYEEwECACAFAkt0ibECGwMGCwkIBwMCBBUCCAMEFgIDAQIeAQIXgAAKCRD8kYszUESRLi/z
B/wMscEa15rS+0mIpsORknD7kawKwyda+LHdtZc0hD/73QGFINR2P23UTol/R4nyAFEuYNsF
0C4IAD6y4pL49eZ72IktPrr4H27Q9eXhNZfJhD7BvQMBx75L0F5gSQwuC7GdYNlwSlCD0AAh
Qbi70VBwzeIgITBkMQcJIhLvllYo/AKD7Gv9huy4RLaIoSeofp+2Q0zUHNPl/7zymOqu+5Ox
e1ltuJT/kd/8hU+N5WNxJTSaOK0sF1/wWFM6rWd6XQUP03VyNosAevX5tBo++iD1WY2/lFVU
JkvAvge2WFk3c6tAwZT/tKxspFy4M/tNbDKeyvr685XKJw9ei6GcOGHD
=5rWG
-----END PGP PUBLIC KEY BLOCK-----
"""
# Futures
def methodcaller(name, *args, **kwargs):
def caller(obj):
return getattr(obj, name)(*args, **kwargs)
return caller
def relpath(path, start=curdir):
"""Return a relative version of a path"""
if not path:
raise ValueError("no path specified")
if type(start) is unicode:
start_list = unicode_abspath(start).split(sep)
else:
start_list = abspath(start).split(sep)
if type(path) is unicode:
path_list = unicode_abspath(path).split(sep)
else:
path_list = abspath(path).split(sep)
# Work out how much of the filepath is shared by start and path.
i = len(commonprefix([start_list, path_list]))
rel_list = [pardir] * (len(start_list)-i) + path_list[i:]
if not rel_list:
return curdir
return join(*rel_list)
# End Futures
def console_print(st=u"", f=sys.stdout, linebreak=True):
global enc
assert type(st) is unicode
f.write(st.encode(enc))
if linebreak: f.write(os.linesep)
def console_flush(f=sys.stdout):
f.flush()
def yes_no_question(question):
while True:
console_print(question, linebreak=False)
console_print(u" [y/n] ", linebreak=False)
console_flush()
text = raw_input()
if text.lower().startswith("y"):
return True
elif text.lower().startswith("n"):
return False
else:
console_print(u"Sorry, I didn't understand that. Please type yes or no.")
def plat():
if sys.platform.lower().startswith('linux'):
arch = platform.machine()
if (arch[0] == 'i' and
arch[1].isdigit() and
arch[2:4] == '86'):
plat = "x86"
elif arch == 'x86_64':
plat = arch
else:
FatalVisibleError("Platform not supported")
return "lnx.%s" % plat
else:
FatalVisibleError("Platform not supported")
def is_dropbox_running():
pidfile = os.path.expanduser("~/.dropbox/dropbox.pid")
try:
with open(pidfile, "r") as f:
pid = int(f.read())
with open("/proc/%d/cmdline" % pid, "r") as f:
cmdline = f.read().lower()
except:
cmdline = ""
return "dropbox" in cmdline
def unicode_abspath(path):
global enc
assert type(path) is unicode
# shouldn't pass unicode to this craphead, it appends with os.getcwd() which is always a str
return os.path.abspath(path.encode(sys.getfilesystemencoding())).decode(sys.getfilesystemencoding())
@contextmanager
def gpgme_context(keys):
gpg_conf_contents = ''
_gpghome = tempfile.mkdtemp(prefix='tmp.gpghome')
try:
os.environ['GNUPGHOME'] = _gpghome
fp = open(os.path.join(_gpghome, 'gpg.conf'), 'wb')
fp.write(gpg_conf_contents)
fp.close()
ctx = gpgme.Context()
loaded = []
for key_file in keys:
result = ctx.import_(key_file)
key = ctx.get_key(result.imports[0][0])
loaded.append(key)
ctx.signers = loaded
yield ctx
finally:
del os.environ['GNUPGHOME']
shutil.rmtree(_gpghome, ignore_errors=True)
class SignatureVerifyError(Exception):
pass
def verify_signature(key_file, sig_file, plain_file):
with gpgme_context([key_file]) as ctx:
sigs = ctx.verify(sig_file, plain_file, None)
return sigs[0].status == None
def download_file_chunk(url, buf):
opener = urllib2.build_opener()
opener.addheaders = [('User-Agent', "DropboxLinuxDownloader/1.6.2")]
sock = opener.open(url)
size = int(sock.info()['content-length'])
bufsize = max(size / 200, 4096)
progress = 0
with closing(sock) as f:
yield (0, True)
while True:
try:
chunk = f.read(bufsize)
progress += len(chunk)
buf.write(chunk)
yield (float(progress)/size, True)
if progress == size:
break
except OSError, e:
if hasattr(e, 'errno') and e.errno == errno.EAGAIN:
# nothing left to read
yield (float(progress)/size, False)
else:
raise
class DownloadState(object):
def __init__(self):
self.local_file = StringIO.StringIO()
def copy_data(self):
return download_file_chunk(DOWNLOAD_LOCATION_FMT % plat(), self.local_file)
def unpack(self):
# download signature
signature = StringIO.StringIO()
for _ in download_file_chunk(SIGNATURE_LOCATION_FMT % plat(), signature):
pass
signature.seek(0)
self.local_file.seek(0)
if gpgme:
if not verify_signature(StringIO.StringIO(DROPBOX_PUBLIC_KEY), signature, self.local_file):
raise SignatureVerifyError()
self.local_file.seek(0)
archive = tarfile.open(fileobj=self.local_file, mode='r:gz')
total_members = len(archive.getmembers())
for i, member in enumerate(archive.getmembers()):
archive.extract(member, PARENT_DIR)
yield member.name, i, total_members
archive.close()
def cancel(self):
if not self.local_file.closed:
self.local_file.close()
def load_serialized_images():
global box_logo_pixbuf, window_icon
import gtk
box_logo_pixbuf = gtk.gdk.pixbuf_new_from_data('\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x08\x00\\\x9ef\x00\\\x9ej\x00\\\x9e\x04\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9eZ\x00[\x9er\x00\\\x9e\x14\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e8\x00Y\x9c\xc2\x00X\x9b\xff\x00X\x9b\xff\x00[\x9d\xaa\x00\\\x9e\r\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x02\x00\\\x9e\x8e\x00Y\x9b\xff\x00Y\x9b\xff\x00Y\x9b\xd5\x00\\\x9eM\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x12\x00[\x9d\x8b\x00U\x99\xfa\x0fe\xa5\xff]\xa2\xd3\xffM\x95\xc9\xff\x00X\x9b\xff\x00Y\x9c\xc9\x00\\\x9e\x1e\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x0f\x00[\x9d\xb1\x00V\x99\xff4\x85\xc1\xffZ\xa3\xda\xff\x17m\xab\xff\x00V\x99\xff\x00Z\x9d\xa2\x00\\\x9e \x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\\\x00W\x9a\xde\x00Y\x9c\xff:\x87\xbf\xff\x83\xbf\xeb\xff\x98\xce\xf6\xff\x9b\xd0\xf6\xffa\xa3\xd3\xff\x05]\x9e\xff\x00X\x9b\xda\x00\\\x9e/\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x1c\x00Z\x9c\xc5\x01Y\x9b\xff?\x90\xca\xff|\xc1\xf4\xff\x82\xc4\xf6\xff}\xbf\xf0\xffD\x90\xc8\xff\x05]\x9e\xff\x00V\x9a\xed\x00\\\x9es\x00\\\x9e\x07\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e4\x00Z\x9c\xba\x00V\x99\xff\x1dq\xae\xffd\xaa\xdd\xff\x8e\xc9\xf5\xff\x8e\xc7\xf3\xff\x8f\xc7\xf1\xff\x92\xc9\xf1\xff\xa1\xd2\xf6\xffw\xb3\xde\xff\x0fd\xa3\xff\x00V\x9a\xed\x00\\\x9eL\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e4\x00X\x9b\xdd\x05^\x9f\xffM\x9d\xd6\xffy\xc1\xf6\xffw\xbe\xf2\xffz\xbe\xf1\xff\x80\xc1\xf2\xff\x89\xc8\xf6\xffq\xb3\xe3\xff*z\xb5\xff\x00W\x9a\xff\x00X\x9b\xcd\x00\\\x9eG\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x0e\x00[\x9d\x86\x00V\x99\xfa\x0cc\xa4\xffK\x96\xce\xff\x81\xc2\xf2\xff\x89\xc7\xf5\xff\x87\xc4\xf1\xff\x8b\xc5\xf1\xff\x8f\xc7\xf1\xff\x92\xc9\xf1\xff\x97\xcb\xf1\xff\xa4\xd3\xf6\xff\x85\xbb\xe4\xff\x18k\xa8\xff\x00U\x99\xfc\x00\\\x9en\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9eS\x00W\x9a\xf1\x0bb\xa3\xffT\xa3\xdd\xffv\xc0\xf7\xffr\xbb\xf1\xffv\xbd\xf1\xffz\xbe\xf1\xff~\xc0\xf1\xff\x81\xc2\xf1\xff\x8a\xc7\xf4\xff\x8f\xc9\xf4\xff`\xa3\xd5\xff\x15i\xa8\xff\x00U\x98\xff\x00[\x9d\x9c\x00\\\x9e\x1a\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9eU\x00X\x9b\xd9\x00Z\x9c\xff1\x83\xbf\xffp\xb6\xea\xff\x84\xc5\xf6\xff\x80\xc2\xf2\xff\x83\xc2\xf1\xff\x87\xc4\xf1\xff\x8b\xc5\xf1\xff\x8f\xc7\xf1\xff\x92\xc9\xf1\xff\x97\xcb\xf1\xff\x9b\xcc\xf1\xff\xa6\xd3\xf5\xff\x96\xc7\xeb\xff*y\xb2\xff\x00T\x98\xff\x00\\\x9e\x90\x00\\\x9e\x02\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9eu\x00V\x99\xfe\x14k\xac\xff\\\xac\xe6\xffr\xbd\xf6\xffo\xba\xf1\xffr\xbb\xf1\xffv\xbd\xf1\xffz\xbe\xf1\xff~\xc0\xf1\xff\x81\xc2\xf1\xff\x85\xc3\xf1\xff\x8b\xc5\xf1\xff\x95\xcc\xf6\xff\x8c\xc5\xee\xffH\x90\xc5\xff\x04]\x9e\xff\x00V\x9a\xe7\x00\\\x9ej\x00\\\x9e\x03\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e.\x00Z\x9c\xb3\x00V\x99\xff\x17m\xad\xffV\xa3\xdc\xff{\xc2\xf6\xff|\xbf\xf3\xff|\xbf\xf1\xff\x7f\xc1\xf1\xff\x83\xc2\xf1\xff\x87\xc4\xf1\xff\x8b\xc5\xf1\xff\x8f\xc7\xf1\xff\x92\xc9\xf1\xff\x97\xcb\xf1\xff\x9b\xcc\xf1\xff\x9e\xcd\xf1\xff\xa6\xd3\xf4\xff\xa4\xd1\xf1\xff@\x88\xbd\xff\x00U\x99\xff\x00[\x9d\xb0\x00\\\x9e\x0c\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x02\x00[\x9d\x97\x00V\x98\xff\x1fv\xb6\xffa\xb1\xed\xffl\xbb\xf4\xffl\xb8\xf1\xffo\xba\xf1\xffr\xbb\xf1\xffv\xbd\xf1\xffz\xbe\xf1\xff~\xc0\xf1\xff\x81\xc2\xf1\xff\x85\xc3\xf1\xff\x8a\xc5\xf1\xff\x8d\xc6\xf1\xff\x93\xcb\xf2\xff\x9e\xd1\xf6\xff|\xb7\xe1\xff(w\xb2\xff\x00U\x99\xff\x00Y\x9c\xc6\x00\\\x9e?\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x0b\x00[\x9e\x86\x00V\x99\xf6\ta\xa2\xff=\x8f\xcc\xffm\xb9\xf1\xffu\xbf\xf5\xfft\xbc\xf1\xffy\xbd\xf1\xff|\xbf\xf1\xff\x7f\xc1\xf1\xff\x83\xc2\xf1\xff\x87\xc4\xf1\xff\x8b\xc5\xf1\xff\x8f\xc7\xf1\xff\x92\xc9\xf1\xff\x97\xcb\xf1\xff\x9b\xcc\xf1\xff\x9e\xcd\xf1\xff\xa1\xcf\xf1\xff\xa8\xd3\xf3\xff\xae\xd8\xf4\xffX\x99\xc9\xff\x00X\x9b\xff\x00Y\x9c\xc2\x00\\\x9e\x1b\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\r\x00[\x9d\xab\x00W\x99\xff,\x82\xc1\xffe\xb5\xf2\xffh\xb7\xf3\xffh\xb7\xf1\xffl\xb8\xf1\xffo\xba\xf1\xffr\xbb\xf1\xffv\xbd\xf1\xffz\xbe\xf1\xff~\xc0\xf1\xff\x81\xc2\xf1\xff\x85\xc3\xf1\xff\x8a\xc5\xf1\xff\x8d\xc6\xf1\xff\x90\xc9\xf1\xff\x95\xca\xf1\xff\x9f\xd1\xf5\xff\xa0\xcf\xf3\xffe\xa3\xd1\xff\x12f\xa5\xff\x00U\x98\xff\x00[\x9d\x9b\x00\\\x9e\x16\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9eN\x00Y\x9b\xd3\x00Y\x9c\xff\'}\xbc\xff]\xad\xe8\xffp\xbe\xf6\xffn\xba\xf2\xffp\xba\xf1\xfft\xbc\xf1\xffy\xbd\xf1\xff|\xbf\xf1\xff\x7f\xc1\xf1\xff\x83\xc2\xf1\xff\x87\xc4\xf1\xff\x8b\xc5\xf1\xff\x8f\xc7\xf1\xff\x92\xc9\xf1\xff\x97\xcb\xf1\xff\x9b\xcc\xf1\xff\x9e\xcd\xf1\xff\xa1\xcf\xf1\xff\xa5\xd1\xf1\xff\xa9\xd4\xf2\xff\xb5\xdb\xf6\xffq\xaa\xd4\xff\x04[\x9e\xff\x00X\x9b\xdc\x00\\\x9e>\x00\\\x9e0\x00Z\x9c\xc9\x00Z\x9b\xff8\x8d\xcd\xffe\xb7\xf5\xffc\xb4\xf2\xffe\xb5\xf1\xffh\xb7\xf1\xffl\xb8\xf1\xffo\xba\xf1\xffr\xbb\xf1\xffv\xbd\xf1\xffz\xbe\xf1\xff~\xc0\xf1\xff\x81\xc2\xf1\xff\x85\xc3\xf1\xff\x8a\xc5\xf1\xff\x8d\xc6\xf1\xff\x90\xc9\xf1\xff\x95\xca\xf1\xff\x99\xcc\xf1\xff\x9d\xce\xf2\xff\xa9\xd5\xf6\xff\x99\xc9\xec\xffI\x8e\xc1\xff\x03[\x9d\xff\x00V\x9a\xe1\x00\\\x9ea\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e(\x00[\x9d\xab\x00V\x98\xff\x13j\xab\xffK\x9e\xdc\xffi\xb9\xf6\xffj\xb8\xf3\xffj\xb8\xf1\xffm\xb9\xf1\xffp\xba\xf1\xfft\xbc\xf1\xffy\xbd\xf1\xff|\xbf\xf1\xff\x7f\xc1\xf1\xff\x83\xc2\xf1\xff\x87\xc4\xf1\xff\x8b\xc5\xf1\xff\x8f\xc7\xf1\xff\x92\xc9\xf1\xff\x97\xcb\xf1\xff\x9b\xcc\xf1\xff\x9e\xcd\xf1\xff\xa1\xcf\xf1\xff\xa5\xd1\xf1\xff\xa7\xd3\xf1\xff\xaa\xd4\xf1\xff\xb9\xdc\xf6\xff\x80\xb5\xda\xff\rb\xa2\xff\x00W\x9a\xff\x00Y\x9b\xfe\x04]\x9f\xff>\x94\xd4\xffd\xb6\xf6\xff`\xb3\xf1\xffb\xb3\xf1\xffe\xb5\xf1\xffh\xb7\xf1\xffl\xb8\xf1\xffo\xba\xf1\xffr\xbb\xf1\xffv\xbd\xf1\xffz\xbe\xf1\xff~\xc0\xf1\xff\x81\xc2\xf1\xff\x85\xc3\xf1\xff\x8a\xc5\xf1\xff\x8d\xc6\xf1\xff\x90\xc9\xf1\xff\x95\xca\xf1\xff\x99\xcc\xf1\xff\x9c\xcd\xf1\xff\x9f\xce\xf1\xff\xa6\xd2\xf3\xff\xb0\xd9\xf6\xff\x87\xbb\xe0\xff\'u\xaf\xff\x00T\x98\xff\x00Y\x9c\xbd\x00\\\x9e7\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x08\x00\\\x9e~\x00W\x99\xf2\x05^\x9f\xff3\x89\xc9\xff^\xb1\xf0\xffe\xb7\xf5\xffd\xb4\xf1\xffg\xb6\xf1\xffj\xb8\xf1\xffm\xb9\xf1\xffp\xba\xf1\xfft\xbc\xf1\xffy\xbd\xf1\xff|\xbf\xf1\xff\x7f\xc1\xf1\xff\x83\xc2\xf1\xff\x87\xc4\xf1\xff\x8b\xc5\xf1\xff\x8f\xc7\xf1\xff\x92\xc9\xf1\xff\x97\xcb\xf1\xff\x9b\xcc\xf1\xff\x9e\xcd\xf1\xff\xa1\xcf\xf1\xff\xa5\xd1\xf1\xff\xa7\xd3\xf1\xff\xaa\xd4\xf1\xff\xad\xd4\xf1\xff\xbb\xdd\xf6\xff\x96\xc3\xe4\xff\x18i\xa7\xff\x01]\xa2\xffH\x9e\xde\xffa\xb6\xf6\xff^\xb1\xf1\xff`\xb3\xf1\xffb\xb3\xf1\xffe\xb5\xf1\xffh\xb7\xf1\xffl\xb8\xf1\xffo\xba\xf1\xffr\xbb\xf1\xffv\xbd\xf1\xffz\xbe\xf1\xff~\xc0\xf1\xff\x81\xc2\xf1\xff\x85\xc3\xf1\xff\x8a\xc5\xf1\xff\x8d\xc6\xf1\xff\x90\xc9\xf1\xff\x95\xca\xf1\xff\x99\xcc\xf1\xff\x9c\xcd\xf1\xff\x9f\xce\xf1\xff\xa3\xd0\xf1\xff\xa6\xd2\xf1\xff\xb0\xd8\xf5\xff\xad\xd5\xf1\xfff\xa2\xce\xff\rb\xa2\xff\x00U\x99\xfb\x00\\\x9e\x92\x00\\\x9e\x11\x00\\\x9e\x9b\x02\\\x9e\xff\x1ct\xb5\xffM\xa3\xe3\xffb\xb7\xf6\xff`\xb3\xf2\xffa\xb3\xf1\xffd\xb4\xf1\xffg\xb6\xf1\xffj\xb8\xf1\xffm\xb9\xf1\xffp\xba\xf1\xfft\xbc\xf1\xffy\xbd\xf1\xff|\xbf\xf1\xff\x7f\xc1\xf1\xff\x83\xc2\xf1\xff\x87\xc4\xf1\xff\x8b\xc5\xf1\xff\x8f\xc7\xf1\xff\x92\xc9\xf1\xff\x97\xcb\xf1\xff\x9b\xcc\xf1\xff\x9e\xcd\xf1\xff\xa1\xcf\xf1\xff\xa5\xd1\xf1\xff\xa7\xd3\xf1\xff\xaa\xd4\xf1\xff\xad\xd4\xf1\xff\xae\xd5\xf1\xff\xb7\xdb\xf4\xff\xaa\xcf\xe8\xffm\xb3\xe6\xffX\xb2\xf4\xffX\xae\xf1\xff^\xb1\xf1\xff`\xb3\xf1\xffb\xb3\xf1\xffe\xb5\xf1\xffh\xb7\xf1\xffl\xb8\xf1\xffo\xba\xf1\xffr\xbb\xf1\xffv\xbd\xf1\xffz\xbe\xf1\xff~\xc0\xf1\xff\x81\xc2\xf1\xff\x85\xc3\xf1\xff\x8a\xc5\xf1\xff\x8d\xc6\xf1\xff\x90\xc9\xf1\xff\x95\xca\xf1\xff\x99\xcc\xf1\xff\x9c\xcd\xf1\xff\x9f\xce\xf1\xff\xa3\xd0\xf1\xff\xa6\xd2\xf1\xff\xa9\xd3\xf1\xff\xae\xd5\xf2\xff\xba\xdd\xf7\xff\x9b\xc7\xe6\xff<\x83\xb8\xff\x06^\x9f\xff\x00[\x9d\xb6\x00Z\x9c\xdd\x0cd\xa6\xffR\xa9\xe9\xffb\xb7\xf8\xff\\\xb1\xf1\xff_\xb2\xf1\xffa\xb3\xf1\xffd\xb4\xf1\xffg\xb6\xf1\xffj\xb8\xf1\xffm\xb9\xf1\xffp\xba\xf1\xfft\xbc\xf1\xffy\xbd\xf1\xff|\xbf\xf1\xff\x7f\xc1\xf1\xff\x83\xc2\xf1\xff\x87\xc4\xf1\xff\x8b\xc5\xf1\xff\x8f\xc7\xf1\xff\x92\xc9\xf1\xff\x97\xcb\xf1\xff\x9b\xcc\xf1\xff\x9e\xcd\xf1\xff\xa1\xcf\xf1\xff\xa5\xd1\xf1\xff\xa7\xd3\xf1\xff\xaa\xd4\xf1\xff\xaa\xd2\xf0\xff\xb2\xd7\xf1\xff\xce\xe5\xf6\xff\xe9\xf5\xfd\xff\xd0\xeb\xfe\xff\xa1\xd2\xf7\xffg\xb6\xf2\xffW\xad\xf0\xff_\xb2\xf1\xffb\xb3\xf1\xffe\xb5\xf1\xffh\xb7\xf1\xffl\xb8\xf1\xffo\xba\xf1\xffr\xbb\xf1\xffv\xbd\xf1\xffz\xbe\xf1\xff~\xc0\xf1\xff\x81\xc2\xf1\xff\x85\xc3\xf1\xff\x8a\xc5\xf1\xff\x8d\xc6\xf1\xff\x90\xc9\xf1\xff\x95\xca\xf1\xff\x99\xcc\xf1\xff\x9c\xcd\xf1\xff\x9f\xce\xf1\xff\xa3\xd0\xf1\xff\xa6\xd2\xf1\xff\xa9\xd3\xf1\xff\xac\xd4\xf1\xff\xae\xd4\xf1\xff\xbf\xe0\xf7\xff\xac\xd2\xee\xff\x1eo\xaa\xff\x00X\x9b\xeb\x00\\\x9eR\x00Y\x9b\xf6\x0ce\xa6\xffH\x9e\xde\xffb\xb6\xf6\xff_\xb2\xf1\xffa\xb3\xf1\xffd\xb4\xf1\xffg\xb6\xf1\xffj\xb8\xf1\xffm\xb9\xf1\xffp\xba\xf1\xfft\xbc\xf1\xffy\xbd\xf1\xff|\xbf\xf1\xff\x7f\xc1\xf1\xff\x83\xc2\xf1\xff\x87\xc4\xf1\xff\x8b\xc5\xf1\xff\x8f\xc7\xf1\xff\x92\xc9\xf1\xff\x97\xcb\xf1\xff\x9b\xcc\xf1\xff\x9e\xcd\xf1\xff\xa1\xcf\xf1\xff\xa5\xd1\xf1\xff\xa5\xd2\xf1\xff\xa8\xd2\xf0\xff\xbe\xdd\xf4\xff\xdd\xee\xfa\xff\xe9\xf3\xfc\xff\xe4\xf2\xfb\xff\xca\xe7\xfb\xff\xce\xe8\xfb\xff\xc3\xe2\xfa\xff\x89\xc6\xf5\xff]\xb1\xf1\xff]\xb1\xf0\xffe\xb5\xf1\xffh\xb7\xf1\xffl\xb8\xf1\xffo\xba\xf1\xffr\xbb\xf1\xffv\xbd\xf1\xffz\xbe\xf1\xff~\xc0\xf1\xff\x81\xc2\xf1\xff\x85\xc3\xf1\xff\x8a\xc5\xf1\xff\x8d\xc6\xf1\xff\x90\xc9\xf1\xff\x95\xca\xf1\xff\x99\xcc\xf1\xff\x9c\xcd\xf1\xff\x9f\xce\xf1\xff\xa3\xd0\xf1\xff\xa6\xd2\xf1\xff\xa9\xd3\xf1\xff\xac\xd4\xf1\xff\xba\xdd\xf6\xff\x93\xc1\xe3\xff\x1fo\xaa\xff\x00W\x9b\xff\x00\\\x9eo\x00\\\x9e\x00\x00\\\x9e;\x00Y\x9b\xdf\x03\\\x9e\xff;\x90\xd0\xffd\xb6\xf5\xffb\xb4\xf2\xffd\xb4\xf1\xffg\xb6\xf1\xffj\xb8\xf1\xffm\xb9\xf1\xffp\xba\xf1\xfft\xbc\xf1\xffy\xbd\xf1\xff|\xbf\xf1\xff\x7f\xc1\xf1\xff\x83\xc2\xf1\xff\x87\xc4\xf1\xff\x8b\xc5\xf1\xff\x8f\xc7\xf1\xff\x92\xc9\xf1\xff\x97\xcb\xf1\xff\x9b\xcc\xf1\xff\x9e\xcd\xf1\xff\xa0\xcf\xf1\xff\xa1\xcf\xf0\xff\xae\xd6\xf2\xff\xcf\xe6\xf8\xff\xe4\xf2\xfb\xff\xe5\xf2\xfb\xff\xe5\xf1\xfb\xff\xe4\xf2\xfb\xff\xca\xe7\xfb\xff\xc8\xe5\xfb\xff\xcb\xe7\xfb\xff\xd1\xe9\xfb\xff\xb3\xda\xf9\xffx\xbe\xf3\xff^\xb1\xf0\xfff\xb6\xf1\xffl\xb8\xf1\xffo\xba\xf1\xffr\xbb\xf1\xffv\xbd\xf1\xffz\xbe\xf1\xff~\xc0\xf1\xff\x81\xc2\xf1\xff\x85\xc3\xf1\xff\x8a\xc5\xf1\xff\x8d\xc6\xf1\xff\x90\xc9\xf1\xff\x95\xca\xf1\xff\x99\xcc\xf1\xff\x9c\xcd\xf1\xff\x9f\xce\xf1\xff\xa3\xd0\xf1\xff\xa6\xd2\xf1\xff\xaa\xd4\xf2\xff\xb7\xdb\xf6\xffx\xaf\xd6\xff\x0b`\xa1\xff\x00V\x9a\xed\x00\\\x9eR\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x1c\x00Z\x9c\xbe\x00X\x99\xff-\x83\xc2\xffe\xb6\xf3\xfff\xb6\xf3\xffg\xb6\xf1\xffj\xb8\xf1\xffm\xb9\xf1\xffp\xba\xf1\xfft\xbc\xf1\xffy\xbd\xf1\xff|\xbf\xf1\xff\x7f\xc1\xf1\xff\x83\xc2\xf1\xff\x87\xc4\xf1\xff\x8b\xc5\xf1\xff\x8f\xc7\xf1\xff\x92\xc9\xf1\xff\x97\xcb\xf1\xff\x9b\xcc\xf1\xff\x9b\xcc\xf0\xff\xa1\xcf\xf1\xff\xbf\xde\xf6\xff\xdc\xee\xfa\xff\xe3\xf1\xfb\xff\xe2\xf1\xfb\xff\xe3\xf1\xfb\xff\xe5\xf1\xfb\xff\xe4\xf2\xfb\xff\xca\xe7\xfb\xff\xc8\xe5\xfb\xff\xc9\xe6\xfb\xff\xcb\xe6\xfb\xff\xd2\xe8\xfb\xff\xd0\xe9\xfb\xff\xa2\xd2\xf7\xffm\xb9\xf1\xffe\xb5\xf0\xffo\xba\xf1\xffr\xbb\xf1\xffv\xbd\xf1\xffz\xbe\xf1\xff~\xc0\xf1\xff\x81\xc2\xf1\xff\x85\xc3\xf1\xff\x8a\xc5\xf1\xff\x8d\xc6\xf1\xff\x90\xc9\xf1\xff\x95\xca\xf1\xff\x99\xcc\xf1\xff\x9c\xcd\xf1\xff\x9f\xce\xf1\xff\xa3\xd0\xf1\xff\xa9\xd4\xf2\xff\xb1\xd9\xf5\xff[\x9b\xc9\xff\x00X\x9b\xff\x00Y\x9c\xd3\x00\\\x9e-\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x06\x00[\x9d\x96\x00V\x99\xff"x\xb8\xffa\xb1\xee\xffk\xba\xf4\xffj\xb8\xf1\xffm\xb9\xf1\xffp\xba\xf1\xfft\xbc\xf1\xffy\xbd\xf1\xff|\xbf\xf1\xff\x7f\xc1\xf1\xff\x83\xc2\xf1\xff\x87\xc4\xf1\xff\x8b\xc5\xf1\xff\x8f\xc7\xf1\xff\x92\xc9\xf1\xff\x96\xca\xf1\xff\x97\xca\xf0\xff\xac\xd5\xf3\xff\xd0\xe7\xf9\xff\xe0\xef\xfb\xff\xdf\xef\xfb\xff\xe0\xf0\xfb\xff\xe2\xf1\xfb\xff\xe3\xf1\xfb\xff\xe5\xf1\xfb\xff\xe4\xf2\xfb\xff\xca\xe7\xfb\xff\xc8\xe5\xfb\xff\xc9\xe6\xfb\xff\xcb\xe6\xfb\xff\xcd\xe6\xfb\xff\xd0\xea\xfb\xff\xd8\xec\xfb\xff\xc8\xe5\xfa\xff\x8f\xc9\xf4\xffi\xb7\xf0\xffo\xb9\xf1\xffv\xbd\xf1\xffz\xbe\xf1\xff~\xc0\xf1\xff\x81\xc2\xf1\xff\x85\xc3\xf1\xff\x8a\xc5\xf1\xff\x8d\xc6\xf1\xff\x90\xc9\xf1\xff\x95\xca\xf1\xff\x99\xcc\xf1\xff\x9c\xcd\xf1\xff\x9f\xce\xf1\xff\xa8\xd4\xf4\xff\xa6\xd2\xf1\xffE\x8c\xbf\xff\x00U\x99\xff\x00Z\x9d\xaf\x00\\\x9e\x12\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9en\x00V\x98\xfe\x14k\xac\xffY\xaa\xe5\xffp\xbd\xf6\xffm\xb9\xf1\xffp\xba\xf1\xfft\xbc\xf1\xffy\xbd\xf1\xff|\xbf\xf1\xff\x7f\xc1\xf1\xff\x83\xc2\xf1\xff\x87\xc4\xf1\xff\x8b\xc5\xf1\xff\x8f\xc7\xf1\xff\x8e\xc7\xf0\xff\x9a\xcc\xf1\xff\xbd\xde\xf7\xff\xd8\xec\xfb\xff\xdc\xed\xfb\xff\xdc\xed\xfb\xff\xde\xef\xfb\xff\xe0\xf0\xfb\xff\xe2\xf1\xfb\xff\xe3\xf1\xfb\xff\xe5\xf1\xfb\xff\xe4\xf2\xfb\xff\xca\xe7\xfb\xff\xc8\xe5\xfb\xff\xc9\xe6\xfb\xff\xcb\xe6\xfb\xff\xcd\xe6\xfb\xff\xcf\xe9\xfb\xff\xd1\xe9\xfb\xff\xd6\xec\xfb\xff\xda\xed\xfb\xff\xb6\xdc\xf8\xff\x80\xc1\xf2\xffo\xb9\xf0\xffy\xbd\xf1\xff~\xc0\xf1\xff\x81\xc2\xf1\xff\x85\xc3\xf1\xff\x8a\xc5\xf1\xff\x8d\xc6\xf1\xff\x90\xc9\xf1\xff\x95\xca\xf1\xff\x99\xcc\xf1\xff\x9c\xcd\xf1\xff\xa7\xd4\xf5\xff\x95\xc7\xea\xff+y\xb2\xff\x00T\x98\xff\x00[\x9e\x88\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9eQ\x00W\x9a\xee\x08a\xa2\xffL\x9d\xd8\xfft\xbf\xf6\xffq\xbb\xf2\xfft\xbc\xf1\xffy\xbd\xf1\xff|\xbf\xf1\xff\x7f\xc1\xf1\xff\x83\xc2\xf1\xff\x87\xc4\xf1\xff\x88\xc4\xf1\xff\x8d\xc6\xf0\xff\xaa\xd5\xf4\xff\xcd\xe7\xfa\xff\xd8\xed\xfb\xff\xd7\xec\xfb\xff\xda\xec\xfb\xff\xdc\xed\xfb\xff\xde\xef\xfb\xff\xe0\xf0\xfb\xff\xe2\xf1\xfb\xff\xe3\xf1\xfb\xff\xe5\xf1\xfb\xff\xe4\xf2\xfb\xff\xca\xe7\xfb\xff\xc8\xe5\xfb\xff\xc9\xe6\xfb\xff\xcb\xe6\xfb\xff\xcd\xe6\xfb\xff\xcf\xe9\xfb\xff\xd1\xe9\xfb\xff\xd3\xea\xfb\xff\xd5\xeb\xfb\xff\xdd\xef\xfc\xff\xd7\xeb\xfb\xff\xa6\xd4\xf5\xff{\xbe\xf1\xffy\xbd\xf1\xff\x81\xc2\xf1\xff\x85\xc3\xf1\xff\x8a\xc5\xf1\xff\x8d\xc6\xf1\xff\x90\xc9\xf1\xff\x95\xca\xf1\xff\x99\xcc\xf1\xff\xa6\xd4\xf6\xff~\xb6\xdf\xff\x15h\xa7\xff\x00U\x99\xf9\x00\\\x9ek\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e-\x00Y\x9c\xd2\x01Y\x9c\xff<\x8e\xca\xffu\xbe\xf4\xffv\xbe\xf2\xffy\xbd\xf1\xff|\xbf\xf1\xff\x7f\xc1\xf1\xff\x82\xc1\xf1\xff\x83\xc1\xf0\xff\x97\xcb\xf3\xff\xbe\xe0\xf8\xff\xd4\xeb\xfb\xff\xd5\xeb\xfb\xff\xd5\xeb\xfb\xff\xd7\xec\xfb\xff\xda\xec\xfb\xff\xdc\xed\xfb\xff\xde\xef\xfb\xff\xe0\xf0\xfb\xff\xe2\xf1\xfb\xff\xe3\xf1\xfb\xff\xe5\xf1\xfb\xff\xe4\xf2\xfb\xff\xca\xe7\xfb\xff\xc8\xe5\xfb\xff\xc9\xe6\xfb\xff\xcb\xe6\xfb\xff\xcd\xe6\xfb\xff\xcf\xe9\xfb\xff\xd1\xe9\xfb\xff\xd3\xea\xfb\xff\xd5\xeb\xfb\xff\xd7\xec\xfb\xff\xdc\xee\xfb\xff\xe3\xf0\xfc\xff\xcd\xe7\xf9\xff\x98\xcc\xf3\xff|\xbf\xf0\xff\x82\xc2\xf1\xff\x8a\xc5\xf1\xff\x8d\xc6\xf1\xff\x90\xc9\xf1\xff\x96\xcb\xf2\xff\xa1\xd2\xf5\xffc\xa3\xd2\xff\x06]\x9f\xff\x00W\x9b\xe5\x00\\\x9eC\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x12\x00[\x9d\xaf\x00V\x98\xff.\x81\xbe\xffv\xbd\xf2\xff|\xc0\xf4\xff|\xbf\xf1\xff{\xbf\xf0\xff\x83\xc2\xf1\xff\xaa\xd5\xf6\xff\xcc\xe6\xfb\xff\xd1\xea\xfb\xff\xd1\xe9\xfb\xff\xd3\xea\xfb\xff\xd5\xeb\xfb\xff\xd7\xec\xfb\xff\xda\xec\xfb\xff\xdc\xed\xfb\xff\xde\xef\xfb\xff\xe0\xf0\xfb\xff\xe2\xf1\xfb\xff\xe3\xf1\xfb\xff\xe5\xf1\xfb\xff\xe4\xf2\xfb\xff\xca\xe7\xfb\xff\xc8\xe5\xfb\xff\xc9\xe6\xfb\xff\xcb\xe6\xfb\xff\xcd\xe6\xfb\xff\xcf\xe9\xfb\xff\xd1\xe9\xfb\xff\xd3\xea\xfb\xff\xd5\xeb\xfb\xff\xd7\xec\xfb\xff\xda\xed\xfb\xff\xdc\xed\xfb\xff\xe2\xf1\xfb\xff\xe3\xf2\xfb\xff\xbe\xdf\xf7\xff\x8b\xc6\xf1\xff\x84\xc2\xf0\xff\x8c\xc5\xf1\xff\x94\xcb\xf3\xff\x9b\xcf\xf4\xffK\x92\xc6\xff\x00W\x9a\xff\x00Y\x9c\xc7\x00\\\x9e#\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x8c\x00V\x98\xfd u\xb3\xffn\xb4\xe8\xff~\xc0\xf3\xff\x94\xca\xf4\xff\xbe\xe0\xf9\xff\xcf\xe8\xfb\xff\xcd\xe6\xfb\xff\xce\xe9\xfb\xff\xd1\xe9\xfb\xff\xd3\xea\xfb\xff\xd5\xeb\xfb\xff\xd7\xec\xfb\xff\xda\xec\xfb\xff\xdc\xed\xfb\xff\xde\xef\xfb\xff\xe0\xf0\xfb\xff\xe2\xf1\xfb\xff\xe3\xf1\xfb\xff\xe5\xf1\xfb\xff\xe4\xf2\xfb\xff\xca\xe7\xfb\xff\xc8\xe5\xfb\xff\xc9\xe6\xfb\xff\xcb\xe6\xfb\xff\xcd\xe6\xfb\xff\xcf\xe9\xfb\xff\xd1\xe9\xfb\xff\xd3\xea\xfb\xff\xd5\xeb\xfb\xff\xd7\xec\xfb\xff\xda\xed\xfb\xff\xdc\xed\xfb\xff\xde\xef\xfb\xff\xe1\xf0\xfb\xff\xe8\xf3\xfb\xff\xdb\xed\xfa\xff\xac\xd5\xf4\xff\x8f\xc7\xf2\xff\x89\xc3\xed\xff6\x83\xbb\xff\x00U\x99\xff\x00[\x9d\xa9\x00\\\x9e\n\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x07\x00\\\x9e\xf1\x00Q\x95\xff\x18p\xb0\xff\x98\xcd\xf5\xff\xd4\xeb\xfd\xff\xce\xe8\xfb\xff\xcb\xe6\xfb\xff\xcc\xe6\xfb\xff\xce\xe9\xfb\xff\xd1\xe9\xfb\xff\xd3\xea\xfb\xff\xd5\xeb\xfb\xff\xd7\xec\xfb\xff\xda\xec\xfb\xff\xdc\xed\xfb\xff\xde\xef\xfb\xff\xe0\xf0\xfb\xff\xe2\xf1\xfb\xff\xe3\xf1\xfb\xff\xe5\xf1\xfb\xff\xe4\xf2\xfb\xff\xca\xe7\xfb\xff\xc8\xe5\xfb\xff\xc9\xe6\xfb\xff\xcb\xe6\xfb\xff\xcd\xe6\xfb\xff\xcf\xe9\xfb\xff\xd1\xe9\xfb\xff\xd3\xea\xfb\xff\xd5\xeb\xfb\xff\xd7\xec\xfb\xff\xda\xed\xfb\xff\xdc\xed\xfb\xff\xde\xef\xfb\xff\xe0\xf0\xfb\xff\xe2\xf1\xfb\xff\xe6\xf3\xfb\xff\xf2\xf8\xfd\xff\xc9\xe5\xf9\xff1\x81\xba\xff\x00O\x94\xff\x00\\\x9e\xff\x00\\\x9e\'\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e}\x00V\x99\xfc\x1ap\xae\xffc\xad\xe4\xffM\xa8\xef\xff\x83\xc2\xf3\xff\xc6\xe4\xfb\xff\xd1\xe9\xfc\xff\xcc\xe6\xfb\xff\xce\xe9\xfb\xff\xd1\xe9\xfb\xff\xd3\xea\xfb\xff\xd5\xeb\xfb\xff\xd7\xec\xfb\xff\xda\xec\xfb\xff\xdc\xed\xfb\xff\xde\xef\xfb\xff\xe0\xf0\xfb\xff\xe2\xf1\xfb\xff\xe3\xf1\xfb\xff\xe5\xf1\xfb\xff\xe4\xf2\xfb\xff\xca\xe7\xfb\xff\xc8\xe5\xfb\xff\xc9\xe6\xfb\xff\xcb\xe6\xfb\xff\xcd\xe6\xfb\xff\xcf\xe9\xfb\xff\xd1\xe9\xfb\xff\xd3\xea\xfb\xff\xd5\xeb\xfb\xff\xd7\xec\xfb\xff\xda\xed\xfb\xff\xdc\xed\xfb\xff\xde\xef\xfb\xff\xe0\xf0\xfb\xff\xe7\xf4\xfc\xff\xe7\xf3\xfb\xff\xb6\xd8\xf4\xff{\xbc\xee\xff\x7f\xbd\xe9\xff/}\xb7\xff\x00U\x99\xff\x00[\x9d\x9d\x00\\\x9e\x06\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x0b\x00[\x9d\xa2\x00U\x98\xff\'{\xb9\xffs\xbb\xef\xff{\xc0\xf4\xff@\xa1\xed\xff3\x99\xeb\xffW\xac\xee\xff\xa7\xd4\xf7\xff\xd3\xe9\xfc\xff\xd1\xeb\xfb\xff\xd1\xe9\xfb\xff\xd3\xea\xfb\xff\xd5\xeb\xfb\xff\xd7\xec\xfb\xff\xda\xec\xfb\xff\xdc\xed\xfb\xff\xde\xef\xfb\xff\xe0\xf0\xfb\xff\xe2\xf1\xfb\xff\xe3\xf1\xfb\xff\xe5\xf1\xfb\xff\xe4\xf2\xfb\xff\xca\xe7\xfb\xff\xc8\xe5\xfb\xff\xc9\xe6\xfb\xff\xcb\xe6\xfb\xff\xcd\xe6\xfb\xff\xcf\xe9\xfb\xff\xd1\xe9\xfb\xff\xd3\xea\xfb\xff\xd5\xeb\xfb\xff\xd7\xec\xfb\xff\xda\xed\xfb\xff\xdc\xed\xfb\xff\xe0\xf0\xfb\xff\xea\xf5\xfc\xff\xcc\xe5\xf8\xff~\xbe\xee\xffX\xaa\xe9\xffc\xb0\xe9\xff\x92\xca\xf3\xff\x9a\xcd\xf3\xffC\x8d\xc2\xff\x00U\x99\xff\x00Z\x9c\xbd\x00\\\x9e\x1c\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e%\x00Z\x9c\xc9\x00X\x9b\xff6\x8a\xc6\xffs\xbd\xf3\xffw\xbe\xf3\xffv\xbc\xf1\xffC\xa2\xed\xff<\x9e\xec\xff>\x9f\xeb\xffE\xa2\xeb\xff}\xbf\xf1\xff\xc3\xe3\xfa\xff\xd8\xed\xfc\xff\xd4\xeb\xfb\xff\xd5\xeb\xfb\xff\xd7\xec\xfb\xff\xda\xec\xfb\xff\xdc\xed\xfb\xff\xde\xef\xfb\xff\xe0\xf0\xfb\xff\xe2\xf1\xfb\xff\xe3\xf1\xfb\xff\xe5\xf1\xfb\xff\xe4\xf2\xfb\xff\xca\xe7\xfb\xff\xc8\xe5\xfb\xff\xc9\xe6\xfb\xff\xcb\xe6\xfb\xff\xcd\xe6\xfb\xff\xcf\xe9\xfb\xff\xd1\xe9\xfb\xff\xd3\xea\xfb\xff\xd5\xeb\xfb\xff\xd7\xec\xfb\xff\xdb\xed\xfb\xff\xe4\xf1\xfc\xff\xda\xed\xfb\xff\x97\xca\xf2\xffV\xa9\xea\xffS\xa7\xe9\xff_\xad\xea\xffg\xb2\xea\xff\x8d\xc7\xf1\xff\x97\xcc\xf2\xff\xa1\xd1\xf5\xff\\\x9e\xcf\xff\x03[\x9d\xff\x00X\x9b\xdf\x00\\\x9e<\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9eH\x00X\x9a\xe8\x06_\xa0\xffH\x99\xd5\xffs\xbf\xf6\xffq\xbb\xf2\xffu\xbc\xf1\xffv\xbc\xf1\xffC\xa2\xed\xff<\x9e\xec\xffC\xa2\xec\xffH\xa4\xec\xffG\xa4\xeb\xff\\\xad\xed\xff\x9e\xcf\xf5\xff\xd4\xea\xfb\xff\xda\xee\xfc\xff\xd7\xec\xfb\xff\xda\xec\xfb\xff\xdc\xed\xfb\xff\xde\xef\xfb\xff\xe0\xf0\xfb\xff\xe2\xf1\xfb\xff\xe3\xf1\xfb\xff\xe5\xf1\xfb\xff\xe4\xf2\xfb\xff\xca\xe7\xfb\xff\xc8\xe5\xfb\xff\xc9\xe6\xfb\xff\xcb\xe6\xfb\xff\xcd\xe6\xfb\xff\xcf\xe9\xfb\xff\xd1\xe9\xfb\xff\xd3\xea\xfb\xff\xd5\xeb\xfb\xff\xdd\xef\xfc\xff\xe1\xf0\xfc\xff\xac\xd5\xf5\xff\\\xad\xec\xffB\xa0\xe9\xffQ\xa7\xea\xffZ\xab\xea\xff_\xad\xea\xffg\xb2\xea\xff\x8d\xc7\xf1\xff\x95\xca\xf1\xff\x9a\xcc\xf1\xff\xa6\xd4\xf6\xffy\xb2\xdd\xff\x11f\xa5\xff\x00V\x99\xf6\x00\\\x9ed\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9ef\x00V\x99\xfc\x11h\xa9\xffV\xa7\xe2\xffp\xbd\xf6\xffl\xb9\xf1\xffp\xba\xf1\xffu\xbc\xf1\xffv\xbc\xf1\xffC\xa2\xed\xff<\x9e\xec\xffC\xa2\xec\xffI\xa4\xec\xffO\xa8\xec\xffR\xa9\xec\xffS\xa9\xeb\xffx\xbb\xef\xff\xba\xdd\xf7\xff\xdd\xef\xfc\xff\xdc\xed\xfb\xff\xdc\xed\xfb\xff\xde\xef\xfb\xff\xe0\xf0\xfb\xff\xe2\xf1\xfb\xff\xe3\xf1\xfb\xff\xe5\xf1\xfb\xff\xe4\xf2\xfb\xff\xca\xe7\xfb\xff\xc8\xe5\xfb\xff\xc9\xe6\xfb\xff\xcb\xe6\xfb\xff\xcd\xe6\xfb\xff\xcf\xe9\xfb\xff\xd1\xe9\xfb\xff\xd5\xeb\xfb\xff\xdf\xf0\xfc\xff\xc1\xe1\xf9\xffk\xb5\xef\xff8\x9b\xe9\xff@\xa0\xe9\xffL\xa5\xea\xffS\xa8\xea\xffZ\xab\xea\xff_\xad\xea\xffg\xb2\xea\xff\x8d\xc7\xf1\xff\x95\xca\xf1\xff\x99\xcc\xf1\xff\x9c\xcd\xf1\xff\xa8\xd5\xf5\xff\x91\xc4\xe8\xff\'u\xb0\xff\x00T\x98\xff\x00\\\x9e\x83\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x04\x00[\x9d\x90\x00W\x98\xff\x1fv\xb6\xff_\xaf\xec\xffk\xba\xf5\xffi\xb8\xf1\xffl\xb9\xf1\xffp\xba\xf1\xffu\xbc\xf1\xffv\xbc\xf1\xffC\xa2\xed\xff<\x9e\xec\xffC\xa2\xec\xffI\xa4\xec\xffO\xa8\xec\xffU\xaa\xec\xff[\xad\xec\xff\\\xad\xeb\xffc\xb1\xeb\xff\x99\xcc\xf2\xff\xd4\xe9\xfa\xff\xe2\xf0\xfc\xff\xdf\xef\xfb\xff\xe0\xf0\xfb\xff\xe2\xf1\xfb\xff\xe3\xf1\xfb\xff\xe5\xf1\xfb\xff\xe4\xf2\xfb\xff\xca\xe7\xfb\xff\xc8\xe5\xfb\xff\xc9\xe6\xfb\xff\xcb\xe6\xfb\xff\xcd\xe6\xfb\xff\xcf\xe9\xfb\xff\xd9\xed\xfc\xff\xd4\xea\xfb\xff\x87\xc4\xf3\xff6\x9b\xea\xff/\x98\xe9\xff>\x9f\xea\xffF\xa3\xea\xffL\xa5\xea\xffS\xa8\xea\xffZ\xab\xea\xff_\xad\xea\xffg\xb2\xea\xff\x8d\xc7\xf1\xff\x95\xca\xf1\xff\x99\xcc\xf1\xff\x9c\xcd\xf1\xff\x9f\xce\xf1\xff\xa9\xd4\xf4\xff\xa5\xd1\xf0\xffB\x89\xbd\xff\x00U\x99\xff\x00[\x9d\xab\x00\\\x9e\x10\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x19\x00Z\x9d\xb9\x00W\x99\xff*\x81\xc0\xffc\xb5\xf2\xfff\xb6\xf3\xfff\xb6\xf1\xffi\xb8\xf1\xffl\xb9\xf1\xffp\xba\xf1\xffu\xbc\xf1\xffv\xbc\xf1\xffC\xa2\xed\xff<\x9e\xec\xffC\xa2\xec\xffI\xa4\xec\xffO\xa8\xec\xffU\xaa\xec\xff[\xad\xec\xffa\xb0\xec\xffe\xb2\xec\xfff\xb2\xeb\xff}\xbd\xed\xff\xb8\xda\xf5\xff\xe1\xf0\xfb\xff\xe4\xf2\xfc\xff\xe2\xf1\xfb\xff\xe3\xf1\xfb\xff\xe5\xf1\xfb\xff\xe3\xf2\xfb\xff\xc8\xe6\xfb\xff\xc8\xe5\xfb\xff\xc9\xe6\xfb\xff\xcb\xe6\xfb\xff\xd2\xe8\xfb\xff\xd8\xee\xfc\xff\xa5\xd2\xf7\xffG\xa2\xed\xff!\x90\xe9\xff0\x98\xea\xff9\x9d\xea\xff?\x9f\xea\xffF\xa3\xea\xffL\xa5\xea\xffS\xa8\xea\xffZ\xab\xea\xff_\xad\xea\xffg\xb2\xea\xff\x8d\xc7\xf1\xff\x95\xca\xf1\xff\x99\xcc\xf1\xff\x9c\xcd\xf1\xff\x9f\xce\xf1\xff\xa3\xd0\xf1\xff\xa9\xd4\xf2\xff\xb0\xd9\xf5\xffX\x99\xc8\xff\x00W\x9b\xff\x00Y\x9c\xd1\x00\\\x9e,\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e9\x00Y\x9b\xdc\x03\\\x9d\xff9\x8f\xce\xffd\xb6\xf5\xffb\xb4\xf2\xffc\xb4\xf1\xfff\xb6\xf1\xffi\xb8\xf1\xffl\xb9\xf1\xffp\xba\xf1\xffu\xbc\xf1\xffv\xbc\xf1\xffC\xa2\xed\xff<\x9e\xec\xffC\xa2\xec\xffI\xa4\xec\xffO\xa8\xec\xffU\xaa\xec\xff[\xad\xec\xffa\xb0\xec\xfff\xb2\xec\xffm\xb5\xec\xffp\xb7\xeb\xfft\xb7\xeb\xff\x97\xc9\xf0\xff\xcf\xe7\xf8\xff\xe7\xf4\xfc\xff\xe4\xf2\xfb\xff\xe3\xf0\xfb\xff\xe9\xf4\xfb\xff\xd6\xec\xfc\xff\xc5\xe3\xfb\xff\xca\xe6\xfb\xff\xd7\xec\xfc\xff\xbb\xdd\xf9\xff]\xaf\xf0\xff\x1b\x8d\xe9\xff\x1f\x8e\xe9\xff,\x96\xea\xff3\x99\xea\xff9\x9d\xea\xff?\x9f\xea\xffF\xa3\xea\xffL\xa5\xea\xffS\xa8\xea\xffZ\xab\xea\xff_\xad\xea\xffg\xb2\xea\xff\x8d\xc7\xf1\xff\x95\xca\xf1\xff\x99\xcc\xf1\xff\x9c\xcd\xf1\xff\x9f\xce\xf1\xff\xa3\xd0\xf1\xff\xa6\xd2\xf1\xff\xaa\xd4\xf2\xff\xb7\xdb\xf6\xffv\xae\xd6\xff\n`\xa0\xff\x00W\x9a\xed\x00\\\x9eQ\x00\\\x9e\x00\x00\\\x9eP\x00Y\x9b\xf5\x0cd\xa6\xffF\x9c\xdc\xffb\xb6\xf6\xff_\xb2\xf1\xffa\xb3\xf1\xffc\xb4\xf1\xfff\xb6\xf1\xffi\xb8\xf1\xffl\xb9\xf1\xffp\xba\xf1\xffu\xbc\xf1\xffv\xbc\xf1\xffC\xa2\xed\xff<\x9e\xec\xffC\xa2\xec\xffI\xa4\xec\xffO\xa8\xec\xffU\xaa\xec\xff[\xad\xec\xffa\xb0\xec\xfff\xb2\xec\xffm\xb5\xec\xffs\xb8\xec\xffy\xba\xec\xffy\xba\xeb\xff\x83\xbf\xec\xff\xb0\xd6\xf2\xff\xe1\xf0\xfa\xff\xf5\xfa\xfe\xff\xfe\xff\xff\xff\xff\xff\xff\xff\xf1\xf8\xfe\xff\xd2\xe9\xfb\xffz\xbd\xf4\xff \x90\xeb\xff\x10\x87\xe9\xff \x8e\xea\xff&\x92\xea\xff,\x96\xea\xff3\x99\xea\xff9\x9d\xea\xff?\x9f\xea\xffF\xa3\xea\xffL\xa5\xea\xffS\xa8\xea\xffZ\xab\xea\xff_\xad\xea\xffg\xb2\xea\xff\x8d\xc7\xf1\xff\x95\xca\xf1\xff\x99\xcc\xf1\xff\x9c\xcd\xf1\xff\x9f\xce\xf1\xff\xa3\xd0\xf1\xff\xa6\xd2\xf1\xff\xa9\xd3\xf1\xff\xac\xd4\xf1\xff\xba\xdd\xf6\xff\x93\xc1\xe3\xff\x1eo\xaa\xff\x00X\x9b\xff\x00\\\x9ep\x00Z\x9c\xd6\x0bc\xa4\xffQ\xa7\xe7\xffb\xb8\xf9\xff\\\xb1\xf1\xff_\xb2\xf1\xffa\xb3\xf1\xffc\xb4\xf1\xfff\xb6\xf1\xffi\xb8\xf1\xffl\xb9\xf1\xffp\xba\xf1\xffu\xbc\xf1\xffv\xbc\xf1\xffC\xa2\xed\xff<\x9e\xec\xffC\xa2\xec\xffI\xa4\xec\xffO\xa8\xec\xffU\xaa\xec\xff[\xad\xec\xffa\xb0\xec\xfff\xb2\xec\xffm\xb5\xec\xffs\xb8\xec\xffy\xba\xec\xff~\xbd\xec\xff\x82\xbe\xec\xff\x84\xbf\xeb\xff\xa1\xce\xef\xff\xdf\xee\xf9\xff\xff\xff\xff\xff\xff\xff\xff\xff\xce\xe7\xfb\xffJ\xa5\xee\xff\x08\x83\xe9\xff\x12\x89\xea\xff\x1b\x8d\xea\xff!\x8f\xea\xff&\x92\xea\xff,\x96\xea\xff3\x99\xea\xff9\x9d\xea\xff?\x9f\xea\xffF\xa3\xea\xffL\xa5\xea\xffS\xa8\xea\xffZ\xab\xea\xff_\xad\xea\xffg\xb2\xea\xff\x8d\xc7\xf1\xff\x95\xca\xf1\xff\x99\xcc\xf1\xff\x9c\xcd\xf1\xff\x9f\xce\xf1\xff\xa3\xd0\xf1\xff\xa6\xd2\xf1\xff\xa9\xd3\xf1\xff\xac\xd4\xf1\xff\xae\xd5\xf1\xff\xbf\xe0\xf7\xff\xab\xd2\xed\xff\x1en\xaa\xff\x00X\x9b\xeb\x00[\x9d\x90\x02\\\x9e\xff\x1bs\xb4\xffM\xa2\xe2\xffb\xb7\xf6\xff`\xb3\xf2\xffa\xb3\xf1\xffc\xb4\xf1\xfff\xb6\xf1\xffi\xb8\xf1\xffl\xb9\xf1\xffp\xba\xf1\xffu\xbc\xf1\xffv\xbc\xf1\xffC\xa2\xed\xff<\x9e\xec\xffC\xa2\xec\xffI\xa4\xec\xffO\xa8\xec\xffU\xaa\xec\xff[\xad\xec\xffa\xb0\xec\xfff\xb2\xec\xffm\xb5\xec\xffs\xb8\xec\xffy\xba\xec\xff\x7f\xbd\xec\xff\x89\xc1\xed\xff\x92\xc6\xed\xff\x93\xc7\xed\xff\x95\xc7\xec\xff\xc0\xde\xf3\xff\x7f\xbf\xf4\xff\x0f\x87\xe9\xff\r\x87\xe9\xff\x1c\x8e\xea\xff\x1d\x8e\xea\xff\x1d\x8e\xea\xff \x8f\xea\xff&\x92\xea\xff,\x96\xea\xff3\x99\xea\xff9\x9d\xea\xff?\x9f\xea\xffF\xa3\xea\xffL\xa5\xea\xffS\xa8\xea\xffZ\xab\xea\xff_\xad\xea\xffg\xb2\xea\xff\x8d\xc7\xf1\xff\x95\xca\xf1\xff\x99\xcc\xf1\xff\x9c\xcd\xf1\xff\x9f\xce\xf1\xff\xa3\xd0\xf1\xff\xa6\xd2\xf1\xff\xa9\xd3\xf1\xff\xad\xd5\xf2\xff\xba\xdd\xf6\xff\x9b\xc7\xe7\xff=\x84\xb9\xff\x05]\x9f\xff\x00[\x9d\xb1\x00\\\x9e\x03\x00\\\x9ey\x00X\x9a\xf3\x05^\x9f\xff6\x8c\xcc\xff`\xb3\xf2\xffe\xb7\xf5\xffc\xb4\xf1\xfff\xb6\xf1\xffi\xb8\xf1\xffl\xb9\xf1\xffp\xba\xf1\xffu\xbc\xf1\xffv\xbc\xf1\xffC\xa2\xed\xff<\x9e\xec\xffC\xa2\xec\xffI\xa4\xec\xffO\xa8\xec\xffU\xaa\xec\xff[\xad\xec\xffa\xb0\xec\xfff\xb2\xec\xffm\xb5\xec\xffr\xb8\xec\xff|\xbb\xec\xff\x87\xc1\xed\xff\x8e\xc4\xed\xff\x92\xc6\xed\xff\x94\xc7\xed\xff\x9d\xcb\xed\xff\xb7\xd9\xf3\xffP\xaa\xf0\xff\x18\x8c\xea\xff\x13\x89\xea\xff\x1b\x8e\xea\xff \x90\xea\xff$\x92\xea\xff$\x91\xea\xff&\x92\xea\xff,\x96\xea\xff3\x99\xea\xff9\x9d\xea\xff?\x9f\xea\xffF\xa3\xea\xffL\xa5\xea\xffS\xa8\xea\xffZ\xab\xea\xff_\xad\xea\xffg\xb2\xea\xff\x8d\xc7\xf1\xff\x95\xca\xf1\xff\x99\xcc\xf1\xff\x9c\xcd\xf1\xff\x9f\xce\xf1\xff\xa3\xd0\xf1\xff\xa6\xd2\xf1\xff\xaf\xd7\xf4\xff\xb1\xd8\xf4\xffn\xa8\xd2\xff\x0ec\xa2\xff\x00U\x99\xfd\x00\\\x9e\x91\x00\\\x9e\r\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e)\x00Z\x9c\xb2\x00V\x99\xff\x18o\xb0\xffR\xa4\xe2\xffj\xb9\xf6\xffg\xb7\xf2\xffi\xb8\xf1\xffl\xb9\xf1\xffp\xba\xf1\xffu\xbc\xf1\xffv\xbc\xf1\xffC\xa2\xed\xff<\x9e\xec\xffC\xa2\xec\xffI\xa4\xec\xffO\xa8\xec\xffU\xaa\xec\xff[\xad\xec\xffa\xb0\xec\xffe\xb2\xec\xffm\xb5\xec\xffz\xbb\xed\xff\x84\xc0\xed\xff\x89\xc2\xed\xff\x8d\xc4\xed\xff\x8f\xc4\xec\xff\x9c\xcc\xee\xff\xc5\xe1\xf5\xff\xca\xe4\xf7\xffi\xb7\xf2\xffS\xac\xf0\xff(\x94\xeb\xff\x16\x8b\xea\xff \x90\xea\xff$\x92\xea\xff*\x95\xea\xff,\x96\xea\xff-\x97\xea\xff2\x99\xea\xff9\x9d\xea\xff?\x9f\xea\xffF\xa3\xea\xffL\xa5\xea\xffS\xa8\xea\xffZ\xab\xea\xff_\xad\xea\xffg\xb2\xea\xff\x8d\xc7\xf1\xff\x95\xca\xf1\xff\x99\xcc\xf1\xff\x9c\xcd\xf1\xff\x9f\xce\xf1\xff\xa5\xd1\xf2\xff\xb1\xd9\xf6\xff\x94\xc4\xe7\xff2}\xb5\xff\x00U\x99\xff\x00Y\x9c\xc6\x00\\\x9e;\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e[\x00X\x9a\xe1\x04]\x9e\xff3\x87\xc5\xffd\xb4\xef\xffo\xbd\xf5\xffl\xb9\xf1\xffp\xba\xf1\xffu\xbc\xf1\xffv\xbc\xf1\xffC\xa2\xed\xff<\x9e\xec\xffC\xa2\xec\xffI\xa4\xec\xffO\xa8\xec\xffU\xaa\xec\xffZ\xad\xec\xff`\xaf\xec\xffj\xb5\xec\xffx\xba\xed\xff\x80\xbe\xed\xff\x83\xc0\xed\xff\x89\xc2\xed\xff\x8a\xc2\xec\xff\x9b\xcb\xef\xff\xc4\xe0\xf6\xff\xd0\xe7\xf7\xff\xc8\xe3\xf7\xffe\xb5\xf2\xff[\xb0\xf1\xff^\xb1\xf1\xff2\x9a\xec\xff\x1b\x8d\xea\xff#\x92\xea\xff)\x95\xea\xff/\x98\xea\xff4\x9a\xea\xff6\x9b\xea\xff9\x9c\xea\xff?\x9f\xea\xffF\xa3\xea\xffL\xa5\xea\xffS\xa8\xea\xffZ\xab\xea\xff_\xad\xea\xffg\xb2\xea\xff\x8d\xc7\xf1\xff\x95\xca\xf1\xff\x99\xcc\xf1\xff\x9c\xcd\xf1\xff\xa6\xd3\xf5\xff\xa4\xd1\xf2\xff^\x9e\xcc\xff\x0b`\xa1\xff\x00V\x99\xef\x00\\\x9er\x00\\\x9e\x04\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x17\x00[\x9d\xa0\x00V\x98\xff\x12i\xaa\xffM\x9e\xd9\xffr\xbe\xf5\xffs\xbc\xf3\xffu\xbc\xf1\xffv\xbc\xf1\xffC\xa2\xed\xff<\x9e\xec\xffC\xa2\xec\xffI\xa4\xec\xffO\xa8\xec\xffT\xa9\xec\xff\\\xad\xec\xffi\xb4\xed\xffs\xba\xed\xffy\xbb\xed\xff\x7f\xbe\xed\xff\x83\xc0\xed\xff\x85\xc0\xec\xff\x9e\xcd\xf0\xff\xc3\xe1\xf7\xff\xcb\xe4\xf7\xff\xcd\xe6\xf7\xff\xc8\xe3\xf7\xffe\xb5\xf2\xffY\xaf\xf1\xffa\xb3\xf1\xfff\xb5\xf1\xffA\xa2\xed\xff \x90\xea\xff(\x94\xea\xff.\x98\xea\xff4\x9a\xea\xff;\x9d\xea\xff?\x9f\xea\xffA\xa0\xea\xffE\xa2\xea\xffL\xa5\xea\xffS\xa8\xea\xffZ\xab\xea\xff_\xad\xea\xffg\xb2\xea\xff\x8d\xc7\xf1\xff\x95\xca\xf1\xff\x9c\xce\xf3\xff\xa5\xd4\xf6\xff~\xb6\xdf\xff%t\xaf\xff\x00U\x99\xff\x00Z\x9c\xb6\x00\\\x9e&\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9eI\x00X\x9b\xd3\x00X\x9b\xff)}\xbb\xffg\xb2\xea\xff|\xc2\xf6\xffv\xbc\xf1\xffC\xa2\xed\xff<\x9e\xec\xffC\xa2\xec\xffH\xa4\xec\xffO\xa7\xec\xff[\xae\xec\xffh\xb3\xed\xffn\xb7\xed\xffs\xba\xed\xffy\xbb\xed\xff~\xbd\xed\xff\x80\xbf\xec\xff\xa1\xcf\xf1\xff\xc3\xe1\xf7\xff\xc6\xe3\xf7\xff\xc9\xe3\xf7\xff\xcd\xe6\xf7\xff\xc8\xe3\xf7\xffe\xb5\xf2\xffY\xaf\xf1\xff_\xb2\xf1\xffe\xb5\xf1\xffm\xb9\xf1\xffQ\xaa\xee\xff)\x94\xea\xff,\x97\xea\xff4\x9a\xea\xff:\x9d\xea\xffA\xa0\xea\xffG\xa3\xea\xffJ\xa5\xea\xffM\xa5\xea\xffS\xa8\xea\xffZ\xab\xea\xff_\xad\xea\xffg\xb2\xea\xff\x8d\xc7\xf1\xff\x9d\xd0\xf6\xff\x93\xc7\xed\xffF\x8d\xc2\xff\x02Z\x9d\xff\x00W\x9a\xe4\x00\\\x9e`\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\n\x00\\\x9e\x81\x00V\x99\xf6\x0cc\xa4\xffM\x9a\xd3\xffz\xbf\xf3\xffD\xa3\xee\xff;\x9e\xec\xffB\xa2\xec\xffM\xa6\xec\xff[\xae\xed\xffd\xb2\xed\xffh\xb4\xed\xffm\xb6\xed\xffs\xba\xed\xffw\xba\xed\xff~\xbd\xed\xff\xa5\xd2\xf3\xff\xc1\xe0\xf7\xff\xc2\xe0\xf7\xff\xc5\xe2\xf7\xff\xc9\xe3\xf7\xff\xcd\xe6\xf7\xff\xc8\xe3\xf7\xffe\xb5\xf2\xffY\xaf\xf1\xff_\xb2\xf1\xffd\xb4\xf1\xffi\xb7\xf1\xffs\xbb\xf1\xffa\xb2\xef\xff3\x9a\xeb\xff0\x98\xea\xff:\x9d\xea\xffA\xa0\xea\xffG\xa3\xea\xffM\xa6\xea\xffR\xa8\xea\xffV\xa9\xea\xffZ\xab\xea\xff_\xad\xea\xffg\xb2\xeb\xff\x94\xcc\xf5\xffn\xad\xda\xff\x18k\xa9\xff\x00T\x98\xff\x00[\x9d\x9b\x00\\\x9e\x18\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e/\x00Y\x9c\xbd\x00W\x99\xff%z\xb7\xffB\xa0\xea\xff;\x9e\xec\xffK\xa6\xed\xffY\xad\xed\xff_\xaf\xed\xffc\xb2\xed\xffh\xb4\xed\xffm\xb6\xed\xffp\xb8\xec\xff}\xbd\xee\xff\xa8\xd4\xf4\xff\xbe\xdf\xf8\xff\xbd\xde\xf7\xff\xc2\xe0\xf7\xff\xc5\xe2\xf7\xff\xc9\xe3\xf7\xff\xcd\xe6\xf7\xff\xc8\xe3\xf7\xffe\xb5\xf2\xffY\xaf\xf1\xff_\xb2\xf1\xffd\xb4\xf1\xffi\xb7\xf1\xffn\xb9\xf1\xffx\xbe\xf1\xffq\xb9\xf0\xff@\xa0\xeb\xff5\x9b\xea\xffA\xa0\xea\xffG\xa3\xea\xffM\xa6\xea\xffS\xa9\xea\xffZ\xab\xea\xff]\xae\xea\xff]\xac\xea\xffk\xb3\xeb\xff:\x87\xbf\xff\x00X\x9a\xff\x00X\x9b\xd1\x00\\\x9eD\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x03\x00[\x9d\xb6\x00[\x9e\xffi\xb2\xe8\xffq\xbc\xf4\xffT\xaa\xed\xffV\xaa\xed\xff^\xaf\xed\xffc\xb2\xed\xffh\xb4\xed\xffj\xb5\xec\xffz\xbd\xee\xff\xa9\xd5\xf5\xff\xb9\xdd\xf8\xff\xba\xdd\xf7\xff\xbd\xde\xf7\xff\xc2\xe0\xf7\xff\xc5\xe2\xf7\xff\xc9\xe3\xf7\xff\xcd\xe6\xf7\xff\xc8\xe3\xf7\xffe\xb5\xf2\xffY\xaf\xf1\xff_\xb2\xf1\xffd\xb4\xf1\xffi\xb7\xf1\xffn\xb9\xf1\xfft\xbc\xf1\xff~\xbf\xf1\xff|\xbe\xf0\xffM\xa6\xeb\xff;\x9d\xea\xffG\xa3\xea\xffM\xa6\xea\xffS\xa9\xea\xffW\xaa\xea\xff]\xae\xea\xff\x84\xc1\xee\xff\x9c\xc9\xea\xff\ta\xa1\xff\x00Y\x9b\xcd\x00\\\x9e\x0e\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00[\x9d\x8c\x04_\xa1\xff~\xbc\xea\xff\x9c\xd2\xfa\xff\x8a\xc7\xf5\xffg\xb4\xef\xffY\xad\xec\xffa\xb1\xed\xffd\xb2\xec\xff|\xbe\xef\xff\xa9\xd4\xf6\xff\xb3\xda\xf7\xff\xb6\xdb\xf7\xff\xba\xdd\xf7\xff\xbd\xde\xf7\xff\xc2\xe0\xf7\xff\xc5\xe2\xf7\xff\xc9\xe3\xf7\xff\xcd\xe6\xf7\xff\xc8\xe3\xf7\xffe\xb5\xf2\xffY\xaf\xf1\xff_\xb2\xf1\xffd\xb4\xf1\xffi\xb7\xf1\xffn\xb9\xf1\xfft\xbc\xf1\xff{\xbe\xf1\xff\x83\xc2\xf1\xff\x88\xc4\xf1\xff^\xae\xed\xffB\xa0\xea\xffK\xa5\xea\xffN\xa6\xea\xffd\xb0\xeb\xff\x97\xca\xef\xff\xb7\xda\xf3\xff\xa9\xd0\xeb\xff\x0ef\xa4\xff\x00Z\x9c\xa7\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00[\x9d\x92\x04_\xa1\xff\x82\xbf\xee\xff\x98\xd0\xfb\xff\x99\xce\xf7\xff\x9a\xce\xf7\xff\x81\xc1\xf2\xffd\xb2\xed\xff~\xbf\xf0\xff\xa8\xd5\xf7\xff\xae\xd8\xf7\xff\xb1\xd9\xf7\xff\xb6\xdb\xf7\xff\xba\xdd\xf7\xff\xbd\xde\xf7\xff\xc2\xe0\xf7\xff\xc5\xe2\xf7\xff\xc9\xe3\xf7\xff\xcd\xe6\xf7\xff\xc8\xe3\xf7\xffe\xb5\xf2\xffY\xaf\xf1\xff_\xb2\xf1\xffd\xb4\xf1\xffi\xb7\xf1\xffn\xb9\xf1\xfft\xbc\xf1\xff{\xbe\xf1\xff\x81\xc1\xf1\xff\x89\xc5\xf1\xff\x91\xc9\xf1\xffo\xb7\xed\xffN\xa6\xea\xffu\xb9\xed\xff\xa3\xd0\xf0\xff\xaf\xd6\xf1\xff\xb5\xda\xf4\xff\xaf\xd5\xef\xff\x0ef\xa4\xff\x00Z\x9c\xaa\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9em\x01[\x9d\xffT\x9c\xd1\xff\x98\xd0\xfb\xff\x9b\xd1\xfa\xff\x99\xce\xf7\xff\x9f\xd2\xf7\xff\x9a\xcf\xf6\xff\xa4\xd4\xf7\xff\xa9\xd6\xf7\xff\xac\xd7\xf7\xff\xb1\xd9\xf7\xff\xb6\xdb\xf7\xff\xba\xdd\xf7\xff\xbd\xde\xf7\xff\xc2\xe0\xf7\xff\xc5\xe2\xf7\xff\xc9\xe3\xf7\xff\xcd\xe6\xf7\xff\xc8\xe3\xf7\xffe\xb5\xf2\xffY\xaf\xf1\xff_\xb2\xf1\xffd\xb4\xf1\xffi\xb7\xf1\xffn\xb9\xf1\xfft\xbc\xf1\xff{\xbe\xf1\xff\x81\xc1\xf1\xff\x88\xc4\xf1\xff\x8e\xc7\xf1\xff\x96\xcb\xf1\xff\x93\xc8\xf0\xff\xa3\xd0\xf1\xff\xa7\xd3\xf1\xff\xb3\xd9\xf5\xff\xae\xd4\xf0\xffZ\x98\xc7\xff\x03\\\x9d\xff\x00[\x9d{\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x16\x00[\x9d\xcd\x00W\x99\xff*z\xb6\xff\x85\xc0\xec\xff\xa3\xd6\xfd\xff\x9d\xd1\xf8\xff\xa0\xd3\xf7\xff\xa4\xd4\xf7\xff\xa8\xd5\xf7\xff\xac\xd7\xf7\xff\xb1\xd9\xf7\xff\xb6\xdb\xf7\xff\xba\xdd\xf7\xff\xbd\xde\xf7\xff\xc2\xe0\xf7\xff\xc5\xe2\xf7\xff\xc9\xe3\xf7\xff\xcd\xe6\xf7\xff\xc8\xe3\xf7\xffe\xb5\xf2\xffY\xaf\xf1\xff_\xb2\xf1\xffd\xb4\xf1\xffi\xb7\xf1\xffn\xb9\xf1\xfft\xbc\xf1\xff{\xbe\xf1\xff\x81\xc1\xf1\xff\x88\xc4\xf1\xff\x8e\xc7\xf1\xff\x94\xca\xf1\xff\x9c\xcd\xf1\xff\xa3\xd1\xf2\xff\xb1\xd9\xf6\xff\x87\xba\xdf\xff!p\xac\xff\x00T\x99\xff\x00[\x9e\xb9\x00\\\x9e\x13\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x0b\x00\\\x9e\x81\x00V\x99\xf5\na\xa2\xff\\\x9f\xd2\xff\xa0\xd3\xf9\xff\xa6\xd7\xfb\xff\xa4\xd4\xf7\xff\xa8\xd5\xf7\xff\xac\xd7\xf7\xff\xb1\xd9\xf7\xff\xb6\xdb\xf7\xff\xba\xdd\xf7\xff\xbd\xde\xf7\xff\xc2\xe0\xf7\xff\xc5\xe2\xf7\xff\xc9\xe3\xf7\xff\xcd\xe6\xf7\xff\xc8\xe3\xf7\xffe\xb5\xf2\xffY\xaf\xf1\xff_\xb2\xf1\xffd\xb4\xf1\xffi\xb7\xf1\xffn\xb9\xf1\xfft\xbc\xf1\xff{\xbe\xf1\xff\x81\xc1\xf1\xff\x88\xc4\xf1\xff\x8e\xc7\xf1\xff\x94\xca\xf1\xff\xa3\xd2\xf5\xff\x9e\xcd\xf0\xffQ\x95\xc6\xff\x03[\x9d\xff\x00W\x9a\xe9\x00\\\x9ei\x00\\\x9e\x01\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e.\x00Z\x9c\xb6\x00U\x99\xff(x\xb3\xff\x81\xbc\xe6\xff\xae\xdb\xfc\xff\xab\xd7\xf9\xff\xac\xd7\xf7\xff\xb1\xd9\xf7\xff\xb6\xdb\xf7\xff\xba\xdd\xf7\xff\xbd\xde\xf7\xff\xc2\xe0\xf7\xff\xc5\xe2\xf7\xff\xc9\xe3\xf7\xff\xcd\xe6\xf7\xff\xc8\xe3\xf7\xffe\xb5\xf2\xffY\xaf\xf1\xff_\xb2\xf1\xffd\xb4\xf1\xffi\xb7\xf1\xffn\xb9\xf1\xfft\xbc\xf1\xff{\xbe\xf1\xff\x81\xc1\xf1\xff\x88\xc4\xf1\xff\x92\xca\xf3\xff\x9c\xd0\xf5\xffr\xae\xda\xff\x1dn\xab\xff\x00U\x99\xff\x00[\x9d\x9f\x00\\\x9e\x1d\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9ed\x00V\x99\xe9\x03[\x9e\xffL\x92\xc6\xff\xa1\xd0\xf3\xff\xb6\xde\xfc\xff\xb2\xda\xf7\xff\xb6\xdb\xf7\xff\xba\xdd\xf7\xff\xbd\xde\xf7\xff\xc2\xe0\xf7\xff\xc5\xe2\xf7\xff\xc9\xe3\xf7\xff\xcd\xe6\xf7\xff\xc8\xe3\xf7\xffe\xb5\xf2\xffY\xaf\xf1\xff_\xb2\xf1\xffd\xb4\xf1\xffi\xb7\xf1\xffn\xb9\xf1\xfft\xbc\xf1\xff{\xbe\xf1\xff\x82\xc2\xf2\xff\x90\xca\xf6\xff\x82\xbe\xe9\xff8\x84\xbc\xff\x00X\x9b\xff\x00W\x9b\xd9\x00\\\x9eO\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x1c\x00[\x9d\x9f\x00T\x98\xff\x18j\xa8\xfft\xaf\xd9\xff\xb8\xde\xfa\xff\xbb\xdf\xfa\xff\xba\xdd\xf7\xff\xbd\xde\xf7\xff\xc2\xe0\xf7\xff\xc5\xe2\xf7\xff\xc9\xe3\xf7\xff\xcd\xe6\xf7\xff\xc8\xe3\xf7\xffe\xb5\xf2\xffY\xaf\xf1\xff_\xb2\xf1\xffd\xb4\xf1\xffi\xb7\xf1\xffn\xb9\xf1\xfft\xbc\xf1\xff\x80\xc2\xf4\xff\x85\xc4\xf3\xffR\x9a\xcf\xff\rc\xa3\xff\x00U\x99\xf9\x00[\x9e\x88\x00\\\x9e\x0f\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9eG\x00X\x9b\xd0\x00V\x9a\xff=\x85\xbb\xff\xa3\xce\xee\xff\xc6\xe5\xfd\xff\xbf\xdf\xf8\xff\xc2\xe0\xf7\xff\xc5\xe2\xf7\xff\xc9\xe3\xf7\xff\xcd\xe6\xf7\xff\xc8\xe3\xf7\xffe\xb5\xf2\xffY\xaf\xf1\xff_\xb2\xf1\xffd\xb4\xf1\xffi\xb7\xf1\xffo\xba\xf2\xff|\xc2\xf6\xffj\xb0\xe4\xff%w\xb4\xff\x00U\x99\xff\x00Z\x9c\xbc\x00\\\x9e3\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x08\x00\\\x9e{\x00T\x99\xf5\rb\xa2\xffk\xa5\xd0\xff\xbe\xdf\xf7\xff\xcb\xe6\xfb\xff\xc5\xe2\xf7\xff\xc9\xe3\xf7\xff\xcd\xe6\xf7\xff\xc8\xe3\xf7\xffe\xb5\xf2\xffY\xaf\xf1\xff_\xb2\xf1\xffd\xb4\xf1\xffn\xbc\xf5\xffm\xb8\xf0\xff=\x8d\xc8\xff\x06^\x9f\xff\x00W\x99\xe9\x00\\\x9ed\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e0\x00Y\x9c\xbc\x00T\x98\xff*v\xb0\xff\x95\xc2\xe2\xff\xd0\xea\xfc\xff\xcd\xe6\xf9\xff\xcd\xe6\xf7\xff\xc8\xe3\xf7\xffe\xb5\xf2\xffY\xaf\xf1\xffa\xb4\xf3\xffj\xb9\xf5\xffN\x9f\xda\xff\x15k\xab\xff\x00V\x98\xff\x00Z\x9d\xa7\x00\\\x9e \x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9ec\x00V\x9a\xe4\x01Y\x9c\xffS\x93\xc2\xff\xbf\xdd\xf2\xff\xd9\xee\xfc\xff\xc9\xe3\xf7\xffe\xb5\xf2\xff^\xb4\xf6\xffZ\xad\xeb\xff(}\xbc\xff\x00Y\x9b\xff\x00X\x9b\xd7\x00\\\x9eO\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x18\x00[\x9d\x9a\x00S\x98\xff\x1cl\xa8\xff\x8d\xb9\xda\xff\xd1\xe9\xfb\xffk\xba\xf5\xff;\x92\xd3\xff\x0cd\xa5\xff\x00V\x99\xf7\x00[\x9d\x85\x00\\\x9e\r\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9eA\x00X\x9b\xd2\x00V\x9a\xff?\x87\xbb\xff&z\xb8\xff\x00W\x99\xff\x00Y\x9c\xc3\x00\\\x9e1\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x0b\x00\\\x9e\x81\x00V\x9a\xf0\x00X\x9a\xf2\x00\\\x9eu\x00\\\x9e\x03\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x0f\x00[\x9et\x00\\\x9ex\x00\\\x9e\x0b\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00\xff\xff\xff\x00', gtk.gdk.COLORSPACE_RGB, True, 8, 64, 64, 256)
window_icon = gtk.gdk.pixbuf_new_from_data('\x00\\\x9e\x00\x00\\\x9e\x00\x00^\xa0\x00\x00V\x99\x00\x00L\x91g\x00N\x93q\x00X\x9c\x00\x00^\x9f\x00\x00]\x9f\x00\x00Y\x9c\x00\x00P\x94o\x00M\x92i\x00V\x99\x00\x00^\xa0\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00^\xa0\x00\x00T\x97\x00\x00F\x8c_1\x81\xba\xff+|\xb6\xff\x00F\x8e?\x00\\\x9e\x00\x00\\\x9e\x00\x00J\x8e;$y\xb6\xff-\x7f\xbc\xff\x00E\x8cb\x00R\x97\x00\x00^\xa0\x00\x00\\\x9e\x00\x00[\x9d\x00\x00I\x8c*\x05X\x9b\xc0P\x9b\xd5\xff\xa7\xdd\xff\xff\xbb\xe5\xff\xff@\x86\xbb\xff\x00>\x89D\x00D\x89B\'{\xbb\xff\x89\xcf\xff\xff\xa2\xdb\xff\xffg\xa6\xd5\xff\x07Y\x9b\xc3\x00C\x8c,\x00[\x9d\x00\x05\\\x9e\x971\x84\xc5\xffk\xbb\xf6\xff\x8e\xd0\xff\xff\x86\xc3\xf0\xff\xa2\xd2\xf5\xff\xc4\xe7\xff\xffP\x90\xc0\xff\x15u\xbf\xfff\xbf\xff\xffx\xc1\xf6\xff\x80\xc0\xf0\xff\xb0\xdf\xff\xff\xa9\xd7\xf6\xff\\\x97\xc5\xff\x0b]\x9e\x98\x08[\x9e\xdcX\xb0\xf0\xff\x84\xd0\xff\xffx\xbc\xf0\xff\x83\xc2\xf0\xff\x88\xc3\xee\xff\xb1\xd7\xf3\xff\xf9\xff\xff\xff\xca\xec\xff\xffm\xba\xf3\xffX\xae\xee\xff{\xbe\xf0\xff\x91\xc7\xf0\xff\xd2\xf2\xff\xff\xa6\xd4\xf0\xff\x11]\x9e\xde\x00T\x96\x00\x00N\x91\x9eD\x98\xd5\xff\x84\xc9\xfc\xff\x85\xc3\xf1\xff\xb7\xdb\xf6\xff\xe9\xf4\xfc\xff\xe9\xf5\xfd\xff\xdb\xee\xfd\xff\xdf\xef\xfc\xff\xa8\xd5\xf6\xff|\xbf\xf1\xff\xa3\xd6\xfc\xffl\xaa\xd6\xff\x00J\x91\xa1\x00Q\x96\x00\x00^\xa0\x00\x00T\x97\x00\x008\x7f\x9eC\x94\xd1\xff\xde\xf6\xff\xff\xf5\xfc\xff\xff\xe0\xef\xfb\xff\xe0\xf0\xfb\xff\xc8\xe5\xfb\xff\xcf\xe7\xfb\xff\xff\xff\xff\xff\xfe\xff\xff\xffV\x9d\xd2\xff\x002\x80\xa2\x00Q\x96\x00\x00_\xa0\x00\x00W\x99\x00\x00I\x8cq9\x89\xc3\xf1Y\xb0\xf2\xffR\xaa\xef\xff\xbc\xde\xf7\xff\xf9\xfc\xfe\xff\xe3\xf2\xfb\xff\xd3\xea\xfc\xff\xf5\xfb\xff\xff\xb7\xdb\xf7\xffd\xb1\xed\xff\x86\xc3\xf2\xffR\x93\xc4\xf3\x00D\x8du\x00T\x99\x00\x06Z\x9d\xb3I\xa0\xe0\xff\x8a\xd2\xff\xffe\xb5\xf2\xff/\x97\xe8\xffK\xa4\xe9\xff\x9c\xcd\xf0\xff\xf6\xf9\xfc\xff\xd6\xec\xfc\xffX\xab\xf0\xff\x15\x8a\xe6\xff9\x9b\xe6\xff\x8c\xc6\xf1\xff\xd1\xf0\xff\xff\x8b\xbe\xe1\xff\x0e\\\x9d\xb6\x07]\x9f\xc1D\x98\xd9\xff\x85\xcd\xff\xffm\xbc\xf9\xff;\x9d\xe9\xff^\xae\xec\xffl\xb3\xe8\xff\xb7\xd9\xf2\xffC\xa2\xef\xff\x00s\xe5\xff3\x99\xea\xffL\xa3\xe7\xff\x96\xce\xf9\xff\xc7\xeb\xff\xff\x81\xb3\xd9\xff\x10_\x9f\xc4\x00X\x9a\x00\x00H\x8bU\x1eq\xad\xeeR\xa8\xe8\xffA\xa4\xf1\xff`\xae\xea\xff\xa9\xd3\xf2\xff\xc8\xe4\xf8\xffh\xb7\xf2\xff@\xa2\xed\xff,\x95\xe8\xffQ\xaa\xef\xff|\xba\xe9\xff*u\xae\xf1\x00A\x8bX\x00V\x9a\x00\x00\\\x9e\x00\x00]\x9f\x00\x00>\x84\x0c"v\xb3\xff\x9b\xdb\xff\xff\x97\xcf\xf8\xff\xce\xe6\xf8\xff\xc5\xe1\xf7\xffe\xb5\xf1\xfft\xbc\xf0\xffu\xbe\xf5\xff\xa9\xde\xff\xff0{\xb0\xff\x00:\x85\x0f\x00]\x9f\x00\x00]\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00Y\x9c\x00\x02S\x97zH\x89\xbf\xff\xb8\xe3\xfd\xff\xe8\xfb\xff\xff\xc2\xdf\xf7\xff`\xb3\xf1\xff\x82\xcb\xff\xff\xa1\xd3\xf7\xffJ\x88\xb8\xff\x00S\x96r\x00Z\x9d\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00]\x9f\x00\x00[\x9d\x00\x00C\x8b*\x08W\x9b\xc5\x8c\xb9\xda\xff\xea\xfd\xff\xff\x80\xcb\xff\xffG\x97\xd4\xff\x03W\x99\xbc\x00E\x8d"\x00[\x9e\x00\x00]\x9f\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00_\xa0\x00\x00Q\x96\x00\x00C\x8di>\x88\xbd\xff,\x7f\xbb\xff\x00G\x8c`\x00T\x98\x00\x00^\xa0\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00_\xa0\x00\x00R\x98\x00\x00I\x92r\x00P\x92n\x00V\x99\x00\x00^\xa0\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00\x00\\\x9e\x00', gtk.gdk.COLORSPACE_RGB, True, 8, 16, 16, 64)
GUI_AVAILABLE = os.environ.get("DISPLAY", '')
if GUI_AVAILABLE:
def download():
import pygtk
pygtk.require("2.0")
import gtk
import gobject
import pango
import webbrowser
gtk.gdk.threads_init()
load_serialized_images()
global FatalVisibleError
def FatalVisibleError(s):
error = gtk.MessageDialog(parent = None,
flags = gtk.DIALOG_MODAL,
type = gtk.MESSAGE_ERROR,
buttons = gtk.BUTTONS_OK,
message_format = s)
error.set_title("Error")
error.run()
gtk.main_quit()
sys.exit(-1)
class GeneratorTask(object):
def __init__(self, generator, loop_callback, on_done=None, on_exception=None):
self.generator = generator
self.loop_callback = loop_callback
self.on_done = on_done
self.on_exception = on_exception
def _run(self, *args, **kwargs):
self._stopped = False
try:
for ret in self.generator(*args, **kwargs):
if ret is None:
ret = ()
if not isinstance(ret, tuple):
ret = (ret,)
gobject.idle_add(self.loop_callback, *ret)
if self._stopped:
thread.exit()
except Exception, ex:
print ex
if self.on_exception is not None:
gobject.idle_add(self.on_exception, ex)
else:
if self.on_done is not None:
gobject.idle_add(self.on_done)
def start(self, *args, **kwargs):
t = threading.Thread(target=self._run, args=args, kwargs=kwargs)
t.setDaemon(True)
t.start()
def stop(self):
self._stopped = True
class DownloadDialog(gtk.Dialog):
def handle_delete_event(self, wid, ev, data=None):
self.handle_cancel(wid)
def handle_dont_show_toggle(self, button, data=None):
reroll_autostart(not button.get_active())
def handle_cancel(self, button):
if self.task:
self.task.stop()
if self.download:
self.download.cancel()
gtk.main_quit()
self.user_cancelled = True
def handle_ok(self, button):
# begin download
self.ok.hide()
self.download = DownloadState()
self.label.hide()
if self.dont_show_again_align is not None:
self.dont_show_again_align.hide()
self.progress.show()
def download_progress(progress, status):
if not status:
self.task.stop()
self.update_progress(DOWNLOADING, progress)
def finished():
self.update_progress(DOWNLOADING, 1.0)
self.unpack_dropbox()
def error(ex):
FatalVisibleError(ERROR_CONNECTING)
self.update_progress(DOWNLOADING, 0)
self.task = GeneratorTask(self.download.copy_data,
download_progress,
finished, error).start()
def update_progress(self, text, fraction):
self.progress.set_text(text % int(fraction*100))
self.progress.set_fraction(fraction)
def unpack_dropbox(self):
def unpack_progress(name, i, total):
self.update_progress(UNPACKING, float(i)/total)
def finished():
self.update_progress(UNPACKING, 1.0)
gtk.main_quit()
def error(ex):
if isinstance(ex, SignatureVerifyError):
FatalVisibleError(ERROR_SIGNATURE)
else:
FatalVisibleError(ERROR_CONNECTING)
self.task = GeneratorTask(self.download.unpack,
unpack_progress,
finished, error).start()
def mouse_down(self, widget, event):
if self.hovering:
self.clicked_link = True
def mouse_up(self, widget, event):
if self.clicked_link:
webbrowser.open(LINK)
self.clicked_link = False
def label_motion(self, widget, event):
offx, offy = self.label.get_layout_offsets()
layout = self.label.get_layout()
index = layout.xy_to_index(int((offx+event.x)*pango.SCALE),
int((offy+event.y)*pango.SCALE))[0]
link_index = layout.get_text().find(LINK)
if index >= link_index and index < link_index+len(LINK):
self.hovering = True
self.label_box.window.set_cursor(gtk.gdk.Cursor(gtk.gdk.HAND2))
else:
self.hovering = False
self.label_box.window.set_cursor(gtk.gdk.Cursor(gtk.gdk.ARROW))
def __init__(self):
super(DownloadDialog, self).__init__(parent = None,
title = "Dropbox Installation")
self.download = None
self.hovering = False
self.clicked_link = False
self.user_cancelled = False
self.task = None
self.ok = ok = gtk.Button(stock=gtk.STOCK_OK)
ok.connect('clicked', self.handle_ok)
self.action_area.add(ok)
ok.show()
cancel = gtk.Button(stock=gtk.STOCK_CANCEL)
cancel.connect('clicked', self.handle_cancel)
self.action_area.add(cancel)
cancel.show()
self.connect('delete_event', self.handle_delete_event)
self.box_logo = gtk.image_new_from_pixbuf(box_logo_pixbuf)
self.box_logo.show()
self.set_icon(window_icon)
self.progress = gtk.ProgressBar()
self.progress.set_property('width-request', 300)
self.label = gtk.Label()
GPG_WARNING_MSG = (u"\n\n" + GPG_WARNING) if not gpgme else u""
self.label.set_markup('%s <span foreground="#000099" underline="single" weight="bold">%s</span>\n\n%s%s' % (INFO, LINK, WARNING, GPG_WARNING_MSG))
self.label.set_line_wrap(True)
self.label.set_property('width-request', 300)
self.label.show()
self.label_box = gtk.EventBox()
self.label_box.add(self.label)
self.label_box.connect("button-release-event", self.mouse_up)
self.label_box.connect("button-press-event", self.mouse_down)
self.label_box.connect("motion-notify-event", self.label_motion)
self.label_box.show()
def on_realize(widget):
self.label_box.add_events(gtk.gdk.POINTER_MOTION_MASK)
self.label_box.connect("realize", on_realize)
self.hbox = gtk.HBox(spacing=10)
self.hbox.set_property('border-width',10)
self.hbox.pack_start(self.box_logo, False, False)
self.hbox.pack_start(self.label_box, False, False)
self.hbox.pack_start(self.progress, False, False)
self.hbox.show()
self.vbox.add(self.hbox)
self.dont_show_again_align = None
try:
if can_reroll_autostart():
dont_show_again = gtk.CheckButton("_Don't show this again")
dont_show_again.connect('toggled', self.handle_dont_show_toggle)
dont_show_again.show()
self.dont_show_again_align = gtk.Alignment(xalign=1.0, yalign=0.0, xscale=0.0, yscale=0.0)
self.dont_show_again_align.add(dont_show_again)
self.dont_show_again_align.show()
hbox = gtk.HBox()
hbox.set_property('border-width', 10)
hbox.pack_start(self.dont_show_again_align, True, True)
hbox.show()
self.vbox.add(hbox)
self.set_resizable(False)
except:
traceback.print_exc()
self.ok.grab_focus()
dialog = DownloadDialog()
dialog.show()
gtk.main()
if dialog.user_cancelled:
raise Exception("user cancelled download!!!")
else:
def download():
global FatalVisibleError
def FatalVisibleError(s):
console_print(u"\nError: %s" % s, f=sys.stderr)
sys.exit(-1)
ESC = "\x1b"
save = ESC+"7"
unsave = ESC+"8"
clear = ESC+"[2J"
erase_to_start = ESC+"[1K"
write = sys.stdout.write
flush = sys.stdout.flush
last_progress = [None, None]
def setprogress(text, frac):
if last_progress == [text, frac]:
return
if sys.stdout.isatty():
write(erase_to_start)
write(unsave)
console_print(text % int(100*frac), linebreak=not sys.stdout.isatty())
if sys.stdout.isatty():
flush()
last_progress[0], last_progress[1] = text, frac
console_print()
if sys.stdout.isatty():
write(save)
flush()
console_print(u"%s %s\n" % (INFO, LINK))
GPG_WARNING_MSG = (u"\n%s" % GPG_WARNING) if not gpgme else u""
if not yes_no_question("%s%s" % (WARNING, GPG_WARNING_MSG)):
return
download = DownloadState()
try:
for progress, status in download.copy_data():
if not status:
break
setprogress(DOWNLOADING, progress)
except Exception:
FatalVisibleError(ERROR_CONNECTING)
else:
setprogress(DOWNLOADING, 1.0)
console_print()
write(save)
try:
for name, i, total in download.unpack():
setprogress(UNPACKING, float(i)/total)
except SignatureVerifyError:
FatalVisibleError(ERROR_SIGNATURE)
except Exception:
FatalVisibleError(ERROR_CONNECTING)
else:
setprogress(UNPACKING, 1.0)
console_print()
class CommandTicker(threading.Thread):
def __init__(self):
threading.Thread.__init__(self)
self.stop_event = threading.Event()
def stop(self):
self.stop_event.set()
def run(self):
ticks = ['[. ]', '[.. ]', '[...]', '[ ..]', '[ .]', '[ ]']
i = 0
first = True
while True:
self.stop_event.wait(0.25)
if self.stop_event.isSet(): break
if i == len(ticks):
first = False
i = 0
if not first:
sys.stderr.write("\r%s\r" % ticks[i])
sys.stderr.flush()
i += 1
sys.stderr.flush()
class DropboxCommand(object):
class CouldntConnectError(Exception): pass
class BadConnectionError(Exception): pass
class EOFError(Exception): pass
class CommandError(Exception): pass
def __init__(self, timeout=5):
self.s = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
self.s.settimeout(timeout)
try:
self.s.connect(os.path.expanduser(u'~/.dropbox/command_socket'))
except socket.error, e:
raise DropboxCommand.CouldntConnectError()
self.f = self.s.makefile("r+", 4096)
def close(self):
self.f.close()
self.s.close()
def __readline(self):
try:
toret = self.f.readline().decode('utf8').rstrip(u"\n")
except socket.error, e:
raise DropboxCommand.BadConnectionError()
if toret == '':
raise DropboxCommand.EOFError()
else:
return toret
# atttribute doesn't exist, i know what you want
def send_command(self, name, args):
self.f.write(name.encode('utf8'))
self.f.write(u"\n".encode('utf8'))
self.f.writelines((u"\t".join([k] + (list(v)
if hasattr(v, '__iter__') else
[v])) + u"\n").encode('utf8')
for k,v in args.iteritems())
self.f.write(u"done\n".encode('utf8'))
self.f.flush()
# Start a ticker
ticker_thread = CommandTicker()
ticker_thread.start()
# This is the potentially long-running call.
try:
ok = self.__readline() == u"ok"
except KeyboardInterrupt:
raise DropboxCommand.BadConnectionError("Keyboard interruption detected")
finally:
# Tell the ticker to stop.
ticker_thread.stop()
ticker_thread.join()
if ok:
toret = {}
for i in range(21):
if i == 20:
raise Exception(u"close this connection!")
line = self.__readline()
if line == u"done":
break
argval = line.split(u"\t")
toret[argval[0]] = argval[1:]
return toret
else:
problems = []
for i in range(21):
if i == 20:
raise Exception(u"close this connection!")
line = self.__readline()
if line == u"done":
break
problems.append(line)
raise DropboxCommand.CommandError(u"\n".join(problems))
# this is the hotness, auto marshalling
def __getattr__(self, name):
try:
return super(DropboxCommand, self).__getattr__(name)
except:
def __spec_command(**kw):
return self.send_command(unicode(name), kw)
self.__setattr__(name, __spec_command)
return __spec_command
commands = {}
aliases = {}
def command(meth):
global commands, aliases
assert meth.__doc__, "All commands need properly formatted docstrings (even %r!!)" % meth
if hasattr(meth, 'im_func'): # bound method, if we ever have one
meth = meth.im_func
commands[meth.func_name] = meth
meth_aliases = [unicode(alias) for alias in aliases.iterkeys() if aliases[alias].func_name == meth.func_name]
if meth_aliases:
meth.__doc__ += u"\nAliases: %s" % ",".join(meth_aliases)
return meth
def alias(name):
def decorator(meth):
global commands, aliases
assert name not in commands, "This alias is the name of a command."
aliases[name] = meth
return meth
return decorator
def requires_dropbox_running(meth):
def newmeth(*n, **kw):
if is_dropbox_running():
return meth(*n, **kw)
else:
console_print(u"Dropbox isn't running!")
newmeth.func_name = meth.func_name
newmeth.__doc__ = meth.__doc__
return newmeth
def start_dropbox():
db_path = os.path.expanduser(u"~/.dropbox-dist/dropboxd").encode(sys.getfilesystemencoding())
if os.access(db_path, os.X_OK):
f = open("/dev/null", "w")
# we don't reap the child because we're gonna die anyway, let init do it
a = subprocess.Popen([db_path], preexec_fn=os.setsid, cwd=os.path.expanduser("~"),
stderr=sys.stderr, stdout=f, close_fds=True)
# in seconds
interval = 0.5
wait_for = 60
for i in xrange(int(wait_for / interval)):
if is_dropbox_running():
return True
# back off from connect for a while
time.sleep(interval)
return False
else:
return False
# Extracted and modified from os.cmd.Cmd
def columnize(list, display_list=None, display_width=None):
if not list:
console_print(u"<empty>")
return
non_unicode = [i for i in range(len(list)) if not (isinstance(list[i], unicode))]
if non_unicode:
raise TypeError, ("list[i] not a string for i in %s" %
", ".join(map(unicode, non_unicode)))
if not display_width:
d = os.popen('stty size', 'r').read().split()
if d:
display_width = int(d[1])
else:
for item in list:
console_print(item)
return
if not display_list:
display_list = list
size = len(list)
if size == 1:
console_print(display_list[0])
return
for nrows in range(1, len(list)):
ncols = (size+nrows-1) // nrows
colwidths = []
totwidth = -2
for col in range(ncols):
colwidth = 0
for row in range(nrows):
i = row + nrows*col
if i >= size:
break
x = list[i]
colwidth = max(colwidth, len(x))
colwidths.append(colwidth)
totwidth += colwidth + 2
if totwidth > display_width:
break
if totwidth <= display_width:
break
else:
nrows = len(list)
ncols = 1
colwidths = [0]
lines = []
for row in range(nrows):
texts = []
display_texts = []
for col in range(ncols):
i = row + nrows*col
if i >= size:
x = ""
y = ""
else:
x = list[i]
y = display_list[i]
texts.append(x)
display_texts.append(y)
while texts and not texts[-1]:
del texts[-1]
original_texts = texts[:]
for col in range(len(texts)):
texts[col] = texts[col].ljust(colwidths[col])
texts[col] = texts[col].replace(original_texts[col], display_texts[col])
line = u" ".join(texts)
lines.append(line)
for line in lines:
console_print(line)
@command
@requires_dropbox_running
@alias('stat')
def filestatus(args):
u"""get current sync status of one or more files
dropbox filestatus [-l] [-a] [FILE]...
Prints the current status of each FILE.
options:
-l --list prints out information in a format similar to ls. works best when your console supports color :)
-a --all do not ignore entries starting with .
"""
global enc
oparser = optparse.OptionParser()
oparser.add_option("-l", "--list", action="store_true", dest="list")
oparser.add_option("-a", "--all", action="store_true", dest="all")
(options, args) = oparser.parse_args(args)
try:
with closing(DropboxCommand()) as dc:
if options.list:
# Listing.
# Separate directories from files.
if len(args) == 0:
dirs, nondirs = [u"."], []
else:
dirs, nondirs = [], []
for a in args:
try:
(dirs if os.path.isdir(a) else nondirs).append(a.decode(enc))
except UnicodeDecodeError:
continue
if len(dirs) == 0 and len(nondirs) == 0:
#TODO: why?
exit(1)
dirs.sort(key=methodcaller('lower'))
nondirs.sort(key=methodcaller('lower'))
# Gets a string representation for a path.
def path_to_string(file_path):
if not os.path.exists(file_path):
path = u"%s (File doesn't exist!)" % os.path.basename(file_path)
return (path, path)
try:
status = dc.icon_overlay_file_status(path=file_path).get(u'status', [None])[0]
except DropboxCommand.CommandError, e:
path = u"%s (%s)" % (os.path.basename(file_path), e)
return (path, path)
env_term = os.environ.get('TERM','')
supports_color = (sys.stderr.isatty() and (
env_term.startswith('vt') or
env_term.startswith('linux') or
'xterm' in env_term or
'color' in env_term
)
)
# TODO: Test when you don't support color.
if not supports_color:
path = os.path.basename(file_path)
return (path, path)
if status == u"up to date":
init, cleanup = "\x1b[32;1m", "\x1b[0m"
elif status == u"syncing":
init, cleanup = "\x1b[36;1m", "\x1b[0m"
elif status == u"unsyncable":
init, cleanup = "\x1b[41;1m", "\x1b[0m"
elif status == u"selsync":
init, cleanup = "\x1b[37;1m", "\x1b[0m"
else:
init, cleanup = '', ''
path = os.path.basename(file_path)
return (path, u"%s%s%s" % (init, path, cleanup))
# Prints a directory.
def print_directory(name):
clean_paths = []
formatted_paths = []
for subname in sorted(os.listdir(name), key=methodcaller('lower')):
if type(subname) != unicode:
continue
if not options.all and subname[0] == u'.':
continue
try:
clean, formatted = path_to_string(unicode_abspath(os.path.join(name, subname)))
clean_paths.append(clean)
formatted_paths.append(formatted)
except (UnicodeEncodeError, UnicodeDecodeError), e:
continue
columnize(clean_paths, formatted_paths)
try:
if len(dirs) == 1 and len(nondirs) == 0:
print_directory(dirs[0])
else:
nondir_formatted_paths = []
nondir_clean_paths = []
for name in nondirs:
try:
clean, formatted = path_to_string(unicode_abspath(name))
nondir_clean_paths.append(clean)
nondir_formatted_paths.append(formatted)
except (UnicodeEncodeError, UnicodeDecodeError), e:
continue
if nondir_clean_paths:
columnize(nondir_clean_paths, nondir_formatted_paths)
if len(nondirs) == 0:
console_print(dirs[0] + u":")
print_directory(dirs[0])
dirs = dirs[1:]
for name in dirs:
console_print()
console_print(name + u":")
print_directory(name)
except DropboxCommand.EOFError:
console_print(u"Dropbox daemon stopped.")
except DropboxCommand.BadConnectionError, e:
console_print(u"Dropbox isn't responding!")
else:
if len(args) == 0:
args = [name for name in sorted(os.listdir(u"."), key=methodcaller('lower')) if type(name) == unicode]
if len(args) == 0:
# Bail early if there's nothing to list to avoid crashing on indent below
console_print(u"<empty>")
return
indent = max(len(st)+1 for st in args)
for file in args:
try:
if type(file) is not unicode:
file = file.decode(enc)
fp = unicode_abspath(file)
except (UnicodeEncodeError, UnicodeDecodeError), e:
continue
if not os.path.exists(fp):
console_print(u"%-*s %s" % \
(indent, file+':', "File doesn't exist"))
continue
try:
status = dc.icon_overlay_file_status(path=fp).get(u'status', [u'unknown'])[0]
console_print(u"%-*s %s" % (indent, file+':', status))
except DropboxCommand.CommandError, e:
console_print(u"%-*s %s" % (indent, file+':', e))
except DropboxCommand.CouldntConnectError, e:
console_print(u"Dropbox isn't running!")
@command
@requires_dropbox_running
def ls(args):
u"""list directory contents with current sync status
dropbox ls [FILE]...
This is an alias for filestatus -l
"""
return filestatus(["-l"] + args)
@command
@requires_dropbox_running
def puburl(args):
u"""get public url of a file in your dropbox
dropbox puburl FILE
Prints out a public url for FILE.
"""
if len(args) != 1:
console_print(puburl.__doc__,linebreak=False)
return
try:
with closing(DropboxCommand()) as dc:
try:
console_print(dc.get_public_link(path=unicode_abspath(args[0].decode(sys.getfilesystemencoding()))).get(u'link', [u'No Link'])[0])
except DropboxCommand.CommandError, e:
console_print(u"Couldn't get public url: " + str(e))
except DropboxCommand.BadConnectionError, e:
console_print(u"Dropbox isn't responding!")
except DropboxCommand.EOFError:
console_print(u"Dropbox daemon stopped.")
except DropboxCommand.CouldntConnectError, e:
console_print(u"Dropbox isn't running!")
@command
@requires_dropbox_running
def status(args):
u"""get current status of the dropboxd
dropbox status
Prints out the current status of the Dropbox daemon.
"""
if len(args) != 0:
console_print(status.__doc__,linebreak=False)
return
try:
with closing(DropboxCommand()) as dc:
try:
lines = dc.get_dropbox_status()[u'status']
if len(lines) == 0:
console_print(u'Idle')
else:
for line in lines:
console_print(line)
except KeyError:
console_print(u"Couldn't get status: daemon isn't responding")
except DropboxCommand.CommandError, e:
console_print(u"Couldn't get status: " + str(e))
except DropboxCommand.BadConnectionError, e:
console_print(u"Dropbox isn't responding!")
except DropboxCommand.EOFError:
console_print(u"Dropbox daemon stopped.")
except DropboxCommand.CouldntConnectError, e:
console_print(u"Dropbox isn't running!")
@command
def running(argv):
u"""return whether dropbox is running
dropbox running
Returns 1 if running 0 if not running.
"""
return int(is_dropbox_running())
@command
@requires_dropbox_running
def stop(args):
u"""stop dropboxd
dropbox stop
Stops the dropbox daemon.
"""
try:
with closing(DropboxCommand()) as dc:
try:
dc.tray_action_hard_exit()
except DropboxCommand.BadConnectionError, e:
console_print(u"Dropbox isn't responding!")
except DropboxCommand.EOFError:
console_print(u"Dropbox daemon stopped.")
except DropboxCommand.CouldntConnectError, e:
console_print(u"Dropbox isn't running!")
#returns true if link is necessary
def grab_link_url_if_necessary():
try:
with closing(DropboxCommand()) as dc:
try:
link_url = dc.needs_link().get(u"link_url", None)
if link_url is not None:
console_print(u"To link this computer to a dropbox account, visit the following url:\n%s" % link_url[0])
return True
else:
return False
except DropboxCommand.CommandError, e:
pass
except DropboxCommand.BadConnectionError, e:
console_print(u"Dropbox isn't responding!")
except DropboxCommand.EOFError:
console_print(u"Dropbox daemon stopped.")
except DropboxCommand.CouldntConnectError, e:
console_print(u"Dropbox isn't running!")
@command
@requires_dropbox_running
def lansync(argv):
u"""enables or disables LAN sync
dropbox lansync [y/n]
options:
y dropbox will use LAN sync (default)
n dropbox will not use LAN sync
"""
if len(argv) != 1:
console_print(lansync.__doc__, linebreak=False)
return
s = argv[0].lower()
if s.startswith('y') or s.startswith('-y'):
should_lansync = True
elif s.startswith('n') or s.startswith('-n'):
should_lansync = False
else:
should_lansync = None
if should_lansync is None:
console_print(lansync.__doc__,linebreak=False)
else:
with closing(DropboxCommand()) as dc:
dc.set_lan_sync(lansync='enabled' if should_lansync else 'disabled')
@command
@requires_dropbox_running
def exclude(args):
u"""ignores/excludes a directory from syncing
dropbox exclude [list]
dropbox exclude add [DIRECTORY] [DIRECTORY] ...
dropbox exclude remove [DIRECTORY] [DIRECTORY] ...
"list" prints a list of directories currently excluded from syncing.
"add" adds one or more directories to the exclusion list, then resynchronizes Dropbox.
"remove" removes one or more directories from the exclusion list, then resynchronizes Dropbox.
With no arguments, executes "list".
Any specified path must be within Dropbox.
"""
if len(args) == 0:
try:
with closing(DropboxCommand()) as dc:
try:
lines = [relpath(path) for path in dc.get_ignore_set()[u'ignore_set']]
lines.sort()
if len(lines) == 0:
console_print(u'No directories are being ignored.')
else:
console_print(u'Excluded: ')
for line in lines:
console_print(unicode(line))
except KeyError:
console_print(u"Couldn't get ignore set: daemon isn't responding")
except DropboxCommand.CommandError, e:
if e.args[0].startswith(u"No command exists by that name"):
console_print(u"This version of the client does not support this command.")
else:
console_print(u"Couldn't get ignore set: " + str(e))
except DropboxCommand.BadConnectionError, e:
console_print(u"Dropbox isn't responding!")
except DropboxCommand.EOFError:
console_print(u"Dropbox daemon stopped.")
except DropboxCommand.CouldntConnectError, e:
console_print(u"Dropbox isn't running!")
elif len(args) == 1 and args[0] == u"list":
exclude([])
elif len(args) >= 2:
sub_command = args[0]
paths = args[1:]
absolute_paths = [unicode_abspath(path.decode(sys.getfilesystemencoding())) for path in paths]
if sub_command == u"add":
try:
with closing(DropboxCommand(timeout=None)) as dc:
try:
result = dc.ignore_set_add(paths=absolute_paths)
if result[u"ignored"]:
console_print(u"Excluded: ")
lines = [relpath(path) for path in result[u"ignored"]]
for line in lines:
console_print(unicode(line))
except KeyError:
console_print(u"Couldn't add ignore path: daemon isn't responding")
except DropboxCommand.CommandError, e:
if e.args[0].startswith(u"No command exists by that name"):
console_print(u"This version of the client does not support this command.")
else:
console_print(u"Couldn't get ignore set: " + str(e))
except DropboxCommand.BadConnectionError, e:
console_print(u"Dropbox isn't responding! [%s]" % e)
except DropboxCommand.EOFError:
console_print(u"Dropbox daemon stopped.")
except DropboxCommand.CouldntConnectError, e:
console_print(u"Dropbox isn't running!")
elif sub_command == u"remove":
try:
with closing(DropboxCommand(timeout=None)) as dc:
try:
result = dc.ignore_set_remove(paths=absolute_paths)
if result[u"removed"]:
console_print(u"No longer excluded: ")
lines = [relpath(path) for path in result[u"removed"]]
for line in lines:
console_print(unicode(line))
except KeyError:
console_print(u"Couldn't remove ignore path: daemon isn't responding")
except DropboxCommand.CommandError, e:
if e.args[0].startswith(u"No command exists by that name"):
console_print(u"This version of the client does not support this command.")
else:
console_print(u"Couldn't get ignore set: " + str(e))
except DropboxCommand.BadConnectionError, e:
console_print(u"Dropbox isn't responding! [%s]" % e)
except DropboxCommand.EOFError:
console_print(u"Dropbox daemon stopped.")
except DropboxCommand.CouldntConnectError, e:
console_print(u"Dropbox isn't running!")
else:
console_print(exclude.__doc__, linebreak=False)
return
else:
console_print(exclude.__doc__, linebreak=False)
return
@command
def start(argv):
u"""start dropboxd
dropbox start [-i]
Starts the dropbox daemon, dropboxd. If dropboxd is already running, this will do nothing.
options:
-i --install auto install dropboxd if not available on the system
"""
should_install = "-i" in argv or "--install" in argv
# first check if dropbox is already running
if is_dropbox_running():
if not grab_link_url_if_necessary():
console_print(u"Dropbox is already running!")
return
console_print(u"Starting Dropbox...", linebreak=False)
console_flush()
if not start_dropbox():
if not should_install:
console_print()
console_print(u"The Dropbox daemon is not installed!")
console_print(u"Run \"dropbox start -i\" to install the daemon")
return
# install dropbox!!!
try:
download()
except:
traceback.print_exc()
else:
if GUI_AVAILABLE:
start_dropbox()
console_print(u"Done!")
else:
if start_dropbox():
if not grab_link_url_if_necessary():
console_print(u"Done!")
else:
if not grab_link_url_if_necessary():
console_print(u"Done!")
def can_reroll_autostart():
return u".config" in os.listdir(os.path.expanduser(u'~'))
def reroll_autostart(should_autostart):
home_dir = os.path.expanduser(u'~')
contents = os.listdir(home_dir)
# UBUNTU
if u".config" in contents:
autostart_dir = os.path.join(home_dir, u".config", u"autostart")
autostart_link = os.path.join(autostart_dir, u"dropbox.desktop")
if should_autostart:
if os.path.exists(DESKTOP_FILE):
if not os.path.exists(autostart_dir):
os.makedirs(autostart_dir)
shutil.copyfile(DESKTOP_FILE, autostart_link)
elif os.path.exists(autostart_link):
os.remove(autostart_link)
@command
def autostart(argv):
u"""automatically start dropbox at login
dropbox autostart [y/n]
options:
n dropbox will not start automatically at login
y dropbox will start automatically at login (default)
Note: May only work on current Ubuntu distributions.
"""
if len(argv) != 1:
console_print(''.join(autostart.__doc__.split('\n', 1)[1:]).decode('ascii'))
return
s = argv[0].lower()
if s.startswith('y') or s.startswith('-y'):
should_autostart = True
elif s.startswith('n') or s.startswith('-n'):
should_autostart = False
else:
should_autostart = None
if should_autostart is None:
console_print(autostart.__doc__,linebreak=False)
else:
reroll_autostart(should_autostart)
@command
def help(argv):
u"""provide help
dropbox help [COMMAND]
With no arguments, print a list of commands and a short description of each. With a command, print descriptive help on how to use the command.
"""
if not argv:
return usage(argv)
for command in commands:
if command == argv[0]:
console_print(commands[command].__doc__.split('\n', 1)[1].decode('ascii'))
return
for alias in aliases:
if alias == argv[0]:
console_print(aliases[alias].__doc__.split('\n', 1)[1].decode('ascii'))
return
console_print(u"unknown command '%s'" % argv[0], f=sys.stderr)
def usage(argv):
console_print(u"Dropbox command-line interface\n")
console_print(u"commands:\n")
console_print(u"Note: use dropbox help <command> to view usage for a specific command.\n")
out = []
for command in commands:
out.append((command, commands[command].__doc__.splitlines()[0]))
spacing = max(len(o[0])+3 for o in out)
for o in out:
console_print(" %-*s%s" % (spacing, o[0], o[1]))
console_print()
def main(argv):
global commands
# now we need to find out if one of the commands are in the
# argv list, and if so split the list at the point to
# separate the argv list at that point
cut = None
for i in range(len(argv)):
if argv[i] in commands or argv[i] in aliases:
cut = i
break
if cut == None:
usage(argv)
os._exit(0)
return
# lol no options for now
globaloptionparser = optparse.OptionParser()
globaloptionparser.parse_args(argv[0:i])
# now dispatch and run
result = None
if argv[i] in commands:
result = commands[argv[i]](argv[i+1:])
elif argv[i] in aliases:
result = aliases[argv[i]](argv[i+1:])
# flush, in case output is rerouted to a file.
console_flush()
# done
return result
if __name__ == "__main__":
ret = main(sys.argv)
if ret is not None:
sys.exit(ret)
| mit |
trustedanalytics/platform-appstack | env_vars_fetcher/cdh_utilities.py | 1 | 15698 | try:
from sshtunnel import SSHTunnelForwarder
except ImportError:
from sshtunnel.sshtunnel import SSHTunnelForwarder
from cm_api.api_client import ApiResource, ApiException
from cm_api.endpoints.services import ApiService, ApiServiceSetupInfo
import paramiko
import json
import yaml
import requests
import subprocess
import zipfile
import shutil
import os
import logger
import base64
class CdhConfExtractor(object):
def __init__(self, config_filename=None):
self._logger = logger.get_info_logger(__name__)
self.config_filename = config_filename if config_filename else 'fetcher_config.yml'
config = self._load_config_yaml(self.config_filename)
self._hostname = config['machines']['cdh-launcher']['hostname']
self._hostport = config['machines']['cdh-launcher']['hostport']
self._username = config['machines']['cdh-launcher']['username']
self._key_filename = config['machines']['cdh-launcher']['key_filename']
self._key = os.path.expanduser(self._key_filename)
self._key_password = config['machines']['cdh-launcher']['key_password']
self._is_openstack = config['openstack_env']
self._is_kerberos = config['kerberos_used']
self._cdh_manager_ip = config['machines']['cdh-manager']['ip']
self._cdh_manager_user = config['machines']['cdh-manager']['user']
self._cdh_manager_sshtunnel_required = config['machines']['cdh-manager']['sshtunnel_required']
self._cdh_manager_password = config['machines']['cdh-manager']['password']
def __enter__(self):
extractor = self
try:
if self._cdh_manager_sshtunnel_required:
self._logger.info('Creating tunnel to CDH-Manager.')
extractor.create_tunnel_to_cdh_manager()
extractor.start_cdh_manager_tunneling()
self._logger.info('Tunnel to CDH-Manager has been created.')
else:
self._logger.info('Connection to CDH-Manager host without ssh tunnel.')
self._local_bind_address = self.extract_cdh_manager_host()
self._local_bind_port = 7180
return extractor
except Exception as exc:
self._logger.error('Cannot creating tunnel to CDH-Manager machine.')
raise exc
def __exit__(self, exc_type, exc_val, exc_tb):
try:
if self._cdh_manager_sshtunnel_required:
self.stop_cdh_manager_tunneling()
self._logger.info('Tunelling to CDH-Manager stopped.')
except Exception as exc:
self._logger.error('Cannot close tunnel to CDH-Manager machine.')
raise exc
# Cdh launcher methods
def create_ssh_connection(self, hostname, username, key_filename, key_password):
try:
self._logger.info('Creating connection to remote host {0}.'.format(hostname))
self.ssh_connection = paramiko.SSHClient()
self.ssh_connection.set_missing_host_key_policy(paramiko.AutoAddPolicy())
self.ssh_connection.connect(hostname, username=username, key_filename=key_filename, password=key_password)
self._logger.info('Connection to host {0} established.'.format(hostname))
except Exception as exc:
self._logger.error('Cannot creating connection to host {0} machine. Check your settings '
'in fetcher_config.yml file.'.format(hostname))
raise exc
def close_ssh_connection(self):
try:
self.ssh_connection.close()
self._logger.info('Connection to remote host closed.')
except Exception as exc:
self._logger.error('Cannot close connection to the remote host.')
raise exc
def ssh_call_command(self, command, subcommands=None):
self._logger.info('Calling remote command: "{0}" with subcommands "{1}"'.format(command, subcommands))
ssh_in, ssh_out, ssh_err = self.ssh_connection.exec_command(command, get_pty=True)
if subcommands != None:
for subcommand in subcommands:
ssh_in.write(subcommand + '\n')
ssh_in.flush()
return ssh_out.read() if ssh_out is not None else ssh_err.read()
def extract_cdh_manager_host(self):
self._logger.info('Extracting CDH-Manager address.')
if self._cdh_manager_ip is None:
self.create_ssh_connection(self._hostname, self._username, self._key_filename, self._key_password)
if self._is_openstack:
ansible_ini = self.ssh_call_command('cat ansible-cdh/platform-ansible/inventory/cdh')
else:
ansible_ini = self.ssh_call_command('cat ansible-cdh/inventory/cdh')
self._cdh_manager_ip = self._get_host_ip('cdh-manager', ansible_ini)
self.close_ssh_connection()
self._logger.info('CDH-Manager adress extracted: {}'.format(self._cdh_manager_ip))
return self._cdh_manager_ip
# Cdh manager methods
def create_tunnel_to_cdh_manager(self, local_bind_address='localhost', local_bind_port=7180, remote_bind_port=7180):
self._local_bind_address = local_bind_address
self._local_bind_port = local_bind_port
self.cdh_manager_tunnel = SSHTunnelForwarder(
(self._hostname, self._hostport),
ssh_username=self._username,
local_bind_address=(local_bind_address, local_bind_port),
remote_bind_address=(self.extract_cdh_manager_host(), remote_bind_port),
ssh_private_key_password=self._key_password,
ssh_private_key=self._key
)
def start_cdh_manager_tunneling(self):
try:
self.cdh_manager_tunnel.start()
except Exception as e:
self._logger.error('Cannot start tunnel: ' + e.message)
def stop_cdh_manager_tunneling(self):
try:
self.cdh_manager_tunnel.stop()
except Exception as e:
self._logger.error('Cannot stop tunnel: ' + e.message)
def extract_cdh_manager_details(self, settings):
for host in settings['hosts']:
if 'cdh-manager' in host['hostname']:
return host
def extract_nodes_info(self, name, settings):
nodes = []
for host in settings['hosts']:
if name in host['hostname']:
nodes.append(host)
return nodes
def extract_service_namenode(self, service_name, role_name, settings):
hdfs_service = self._find_item_by_attr_value(service_name, 'name', settings['clusters'][0]['services'])
hdfs_namenode = self._find_item_by_attr_value(role_name, 'name', hdfs_service['roles'])
host_id = hdfs_namenode['hostRef']['hostId']
return self._find_item_by_attr_value(host_id, 'hostId', settings['hosts'])['hostname']
def get_client_config_for_service(self, service_name):
result = requests.get('http://{0}:{1}/api/v10/clusters/CDH-cluster/services/{2}/clientConfig'.format(self._local_bind_address, self._local_bind_port, service_name))
return base64.standard_b64encode(result.content)
def generate_keytab(self, principal_name):
self._logger.info('Generating keytab for {} principal.'.format(principal_name))
self.create_ssh_connection(self._hostname, self._username, self._key_filename, self._key_password)
sftp = self.ssh_connection.open_sftp()
sftp.put('utils/generate_keytab_script.sh', '/tmp/generate_keytab_script.sh')
self.ssh_call_command('scp -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no /tmp/generate_keytab_script.sh {0}:/tmp/'.format(self._cdh_manager_ip))
self.ssh_call_command('ssh -t {0} -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no "chmod 700 /tmp/generate_keytab_script.sh"'.format(self._cdh_manager_ip))
keytab_hash = self.ssh_call_command('ssh -t {0} -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no "/tmp/generate_keytab_script.sh {1}"'
.format(self._cdh_manager_ip, principal_name))
self.close_ssh_connection()
lines = keytab_hash.splitlines()
self._logger.info('Keytab for {} principal has been generated.'.format(principal_name))
return '"{}"'.format(''.join(lines[2:-2]))
def generate_base64_for_file(self, file_path, hostname):
self._logger.info('Generating base64 for {} file.'.format(file_path))
self.create_ssh_connection(self._hostname, self._username, self._key_filename, self._key_password)
base64_file_hash = self.ssh_call_command('ssh -t {0} -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no "base64 {1}"'.format(hostname, file_path))
self.close_ssh_connection()
lines = base64_file_hash.splitlines()
self._logger.info('Base64 hash for {0} file on {1} machine has been generated.'.format(file_path, hostname))
return '"{}"'.format(''.join(lines[2:-2]))
def get_all_deployments_conf(self):
result = {}
deployments_settings = json.loads(requests.get('http://' + self._local_bind_address + ':'
+ str(self._local_bind_port) + '/api/v10/cm/deployment',
auth=(self._cdh_manager_user, self._cdh_manager_password)).content)
result['cloudera_manager_internal_host'] = self.extract_cdh_manager_details(deployments_settings)['hostname']
if self._is_kerberos:
result['kerberos_host'] = result['cloudera_manager_internal_host']
result['hdfs_keytab_value'] = self.generate_keytab('hdfs')
result['auth_gateway_keytab_value'] = self.generate_keytab('authgateway/sys')
result['hgm_keytab_value'] = self.generate_keytab('hgm/sys')
result['vcap_keytab_value'] = self.generate_keytab('vcap')
result['krb5_base64'] = self.generate_base64_for_file('/etc/krb5.conf', self._cdh_manager_ip)
result['kerberos_cacert'] = self.generate_base64_for_file('/var/krb5kdc/cacert.pem', self._cdh_manager_ip)
helper = CdhApiHelper(ApiResource(self._local_bind_address, username=self._cdh_manager_user, password=self._cdh_manager_password, version=9))
hgm_service = helper.get_service_from_cdh('HADOOPGROUPSMAPPING')
result['hgm_adress'] = 'http://' + helper.get_host(hgm_service, 'HADOOPGROUPSMAPPING-HADOOPGROUPSMAPPING_RESTSERVER') + ':' \
+ helper.get_entry_from_group(hgm_service, 'rest_port', 'HADOOPGROUPSMAPPING-HADOOPGROUPSMAPPING_RESTSERVER-BASE')
result['hgm_password'] = helper.get_entry_from_group(hgm_service, 'basic_auth_pass', 'HADOOPGROUPSMAPPING-HADOOPGROUPSMAPPING_RESTSERVER-BASE')
result['hgm_username'] = helper.get_entry_from_group(hgm_service, 'basic_auth_user', 'HADOOPGROUPSMAPPING-HADOOPGROUPSMAPPING_RESTSERVER-BASE')
sentry_service = helper.get_service_from_cdh('SENTRY')
result['sentry_port'] = helper.get_entry(sentry_service, 'sentry_service_server_rpc_port')
result['sentry_address'] = helper.get_host(sentry_service)
result['sentry_keytab_value'] = self.generate_keytab('hive/sys')
result['auth_gateway_profile'] = 'cloud,zookeeper-auth-gateway,hdfs-auth-gateway,kerberos-hgm-auth-gateway,sentry-auth-gateway'
else:
result['sentry_port'] = "''"
result['sentry_address'] = "''"
result['sentry_keytab_value'] = "''"
result['hdfs_keytab_value'] = "''"
result['auth_gateway_keytab_value'] = "''"
result['vcap_keytab_value'] = '""'
result['hgm_keytab_value'] = '""'
result['krb5_base64'] = '""'
result['kerberos_cacert'] = '""'
result['auth_gateway_profile'] = 'cloud,zookeeper-auth-gateway,hdfs-auth-gateway,https-hgm-auth-gateway'
helper = CdhApiHelper(ApiResource(self._local_bind_address, username=self._cdh_manager_user, password=self._cdh_manager_password, version=9))
hgm_service = helper.get_service_from_cdh('HADOOPGROUPSMAPPING')
result['hgm_adress'] = 'https://' + helper.get_host(hgm_service, 'HADOOPGROUPSMAPPING-HADOOPGROUPSMAPPING_RESTSERVER') + ':' \
+ helper.get_entry_from_group(hgm_service, 'rest_port', 'HADOOPGROUPSMAPPING-HADOOPGROUPSMAPPING_RESTSERVER-BASE')
result['hgm_password'] = helper.get_entry_from_group(hgm_service, 'basic_auth_pass', 'HADOOPGROUPSMAPPING-HADOOPGROUPSMAPPING_RESTSERVER-BASE')
result['hgm_username'] = helper.get_entry_from_group(hgm_service, 'basic_auth_user', 'HADOOPGROUPSMAPPING-HADOOPGROUPSMAPPING_RESTSERVER-BASE')
master_nodes = self.extract_nodes_info('cdh-master', deployments_settings)
for i, node in enumerate(master_nodes):
result['master_node_host_' + str(i+1)] = node['hostname']
result['namenode_internal_host'] = self.extract_service_namenode('HDFS', 'HDFS-NAMENODE', deployments_settings)
result['hue_node'] = self.extract_service_namenode('HUE', 'HUE-HUE_SERVER', deployments_settings)
result['h2o_node'] = self.extract_nodes_info('cdh-worker-0', deployments_settings)[0]['hostname']
result['arcadia_node'] = self.extract_nodes_info('cdh-worker-0', deployments_settings)[0]['hostname']
result['import_hadoop_conf_hdfs'] = self.get_client_config_for_service('HDFS')
result['import_hadoop_conf_hbase'] = self.get_client_config_for_service('HBASE')
result['import_hadoop_conf_yarn'] = self.get_client_config_for_service('YARN')
return result
# helpful methods
def _find_item_by_attr_value(self, attr_value, attr_name, array_with_dicts):
return next(item for item in array_with_dicts if item[attr_name] == attr_value)
def _get_host_ip(self, host, ansible_ini):
host_info = []
for line in ansible_ini.split('\n'):
if host in line:
host_info.append(line.strip())
return host_info[host_info.index('[' + host + ']') + 1].split(' ')[1].split('=')[1]
def _load_config_yaml(self, filename):
with open(filename, 'r') as stream:
return yaml.load(stream)
class CdhApiHelper(object):
def __init__(self, cdhApi):
self.cdhApi = cdhApi
def get_service_from_cdh(self, name):
cluster = self.cdhApi.get_all_clusters()[0]
try:
return next(service for service in cluster.get_all_services() if service.type == name)
except StopIteration:
raise NoCdhServiceError('No {} in CDH services.'.format(name))
# get host ip for service or specified service role
def get_host(self, service, role = None):
if role is None:
id = service.get_all_roles()[0].hostRef.hostId
else:
id = service.get_role(role).hostRef.hostId
return self.cdhApi.get_host(id).hostname
def get_entry(self, service, name):
config = service.get_all_roles()[0].get_config('full')
for config_entry in config:
if name == config_entry:
entry = config[config_entry].value or config[config_entry].default
return entry
def get_entry_from_group(self, service, name, group):
config = service.get_role_config_group(group).get_config('full')
for config_entry in config:
if name == config_entry:
entry = config[config_entry].value or config[config_entry].default
return entry
class NoCdhServiceError(Exception):
pass
| apache-2.0 |
richard-willowit/odoo | addons/l10n_fr_hr_payroll/report/fiche_paye.py | 14 | 1711 | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import api, models
class FichePayeParser(models.AbstractModel):
_name = 'report.l10n_fr_hr_payroll.report_l10n_fr_fiche_paye'
def get_payslip_lines(self, objs):
res = []
ids = []
for item in objs:
if item.appears_on_payslip is True and not item.salary_rule_id.parent_rule_id:
ids.append(item.id)
if ids:
res = self.env['hr.payslip.line'].browse(ids)
return res
def get_total_by_rule_category(self, obj, code):
category_total = 0
category_id = self.env['hr.salary.rule.category'].search([('code', '=', code)], limit=1).id
if category_id:
line_ids = self.env['hr.payslip.line'].search([('slip_id', '=', obj.id), ('category_id', 'child_of', category_id)])
for line in line_ids:
category_total += line.total
return category_total
def get_employer_line(self, obj, parent_line):
return self.env['hr.payslip.line'].search([('slip_id', '=', obj.id), ('salary_rule_id.parent_rule_id.id', '=', parent_line.salary_rule_id.id)], limit=1)
@api.model
def get_report_values(self, docids, data=None):
payslip = self.env['hr.payslip'].browse(docids)
return {
'doc_ids': docids,
'doc_model': 'hr.payslip',
'data': data,
'docs': payslip,
'lang': "fr_FR",
'get_payslip_lines': self.get_payslip_lines,
'get_total_by_rule_category': self.get_total_by_rule_category,
'get_employer_line': self.get_employer_line,
}
| gpl-3.0 |
BT-ojossen/odoo | addons/mail/mail_group_menu.py | 334 | 2631 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2012-today OpenERP SA (<http://www.openerp.com>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>
#
##############################################################################
from openerp.osv import osv
from openerp.osv import fields
class ir_ui_menu(osv.osv):
""" Override of ir.ui.menu class. When adding mail_thread module, each
new mail.group will create a menu entry. This overrides checks that
the current user is in the mail.group followers. If not, the menu
entry is taken off the list of menu ids. This way the user will see
menu entries for the mail.group he is following.
"""
_inherit = 'ir.ui.menu'
_columns = {
'mail_group_id': fields.many2one('mail.group', 'Mail Group')
}
def search(self, cr, uid, args, offset=0, limit=None, order=None, context=None, count=False):
""" Remove mail.group menu entries when the user is not a follower."""
ids = super(ir_ui_menu, self).search(cr, uid, args, offset=offset,
limit=limit, order=order,
context=context, count=False)
if ids:
cr.execute("""
SELECT id FROM ir_ui_menu m
WHERE m.mail_group_id IS NULL OR EXISTS (
SELECT 1 FROM mail_followers
WHERE res_model = 'mail.group' AND res_id = m.mail_group_id
AND partner_id = (SELECT partner_id FROM res_users WHERE id = %s)
) AND id in %s
""", (uid, tuple(ids)))
# Preserve original search order
visible_ids = set(x[0] for x in cr.fetchall())
ids = [i for i in ids if i in visible_ids]
if count:
return len(ids)
return ids
| agpl-3.0 |
synergeticsedx/deployment-wipro | lms/djangoapps/commerce/api/v1/views.py | 60 | 2633 | """ API v1 views. """
import logging
from django.http import Http404
from edx_rest_api_client import exceptions
from rest_framework.authentication import SessionAuthentication
from rest_framework.views import APIView
from rest_framework.generics import RetrieveUpdateAPIView, ListAPIView
from rest_framework.permissions import IsAuthenticated
from rest_framework_oauth.authentication import OAuth2Authentication
from commerce.api.v1.models import Course
from commerce.api.v1.permissions import ApiKeyOrModelPermission
from commerce.api.v1.serializers import CourseSerializer
from course_modes.models import CourseMode
from openedx.core.djangoapps.commerce.utils import ecommerce_api_client
from openedx.core.lib.api.mixins import PutAsCreateMixin
from util.json_request import JsonResponse
log = logging.getLogger(__name__)
class CourseListView(ListAPIView):
""" List courses and modes. """
authentication_classes = (OAuth2Authentication, SessionAuthentication,)
permission_classes = (IsAuthenticated,)
serializer_class = CourseSerializer
pagination_class = None
def get_queryset(self):
return list(Course.iterator())
class CourseRetrieveUpdateView(PutAsCreateMixin, RetrieveUpdateAPIView):
""" Retrieve, update, or create courses/modes. """
lookup_field = 'id'
lookup_url_kwarg = 'course_id'
model = CourseMode
authentication_classes = (OAuth2Authentication, SessionAuthentication,)
permission_classes = (ApiKeyOrModelPermission,)
serializer_class = CourseSerializer
# Django Rest Framework v3 requires that we provide a queryset.
# Note that we're overriding `get_object()` below to return a `Course`
# rather than a CourseMode, so this isn't really used.
queryset = CourseMode.objects.all()
def get_object(self, queryset=None):
course_id = self.kwargs.get(self.lookup_url_kwarg)
course = Course.get(course_id)
if course:
return course
raise Http404
def pre_save(self, obj):
# There is nothing to pre-save. The default behavior changes the Course.id attribute from
# a CourseKey to a string, which is not desired.
pass
class OrderView(APIView):
""" Retrieve order details. """
authentication_classes = (SessionAuthentication,)
permission_classes = (IsAuthenticated,)
def get(self, request, number):
""" HTTP handler. """
try:
order = ecommerce_api_client(request.user).orders(number).get()
return JsonResponse(order)
except exceptions.HttpNotFoundError:
return JsonResponse(status=404)
| agpl-3.0 |
tombstone/models | research/fivo/fivo/models/base.py | 4 | 14517 | # Copyright 2018 The TensorFlow Authors All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Reusable model classes for FIVO."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import sonnet as snt
import tensorflow as tf
from fivo import nested_utils as nested
tfd = tf.contrib.distributions
class ELBOTrainableSequenceModel(object):
"""An abstract class for ELBO-trainable sequence models to extend.
Because the ELBO, IWAE, and FIVO bounds all accept the same arguments,
any model that is ELBO-trainable is also IWAE- and FIVO-trainable.
"""
def zero_state(self, batch_size, dtype):
"""Returns the initial state of the model as a Tensor or tuple of Tensors.
Args:
batch_size: The batch size.
dtype: The datatype to use for the state.
"""
raise NotImplementedError("zero_state not yet implemented.")
def set_observations(self, observations, seq_lengths):
"""Sets the observations for the model.
This method provides the model with all observed variables including both
inputs and targets. It will be called before running any computations with
the model that require the observations, e.g. training the model or
computing bounds, and should be used to run any necessary preprocessing
steps.
Args:
observations: A potentially nested set of Tensors containing
all observations for the model, both inputs and targets. Typically
a set of Tensors with shape [max_seq_len, batch_size, data_size].
seq_lengths: A [batch_size] Tensor of ints encoding the length of each
sequence in the batch (sequences can be padded to a common length).
"""
self.observations = observations
self.max_seq_len = tf.reduce_max(seq_lengths)
self.observations_ta = nested.tas_for_tensors(
observations, self.max_seq_len, clear_after_read=False)
self.seq_lengths = seq_lengths
def propose_and_weight(self, state, t):
"""Propogates model state one timestep and computes log weights.
This method accepts the current state of the model and computes the state
for the next timestep as well as the incremental log weight of each
element in the batch.
Args:
state: The current state of the model.
t: A scalar integer Tensor representing the current timestep.
Returns:
next_state: The state of the model after one timestep.
log_weights: A [batch_size] Tensor containing the incremental log weights.
"""
raise NotImplementedError("propose_and_weight not yet implemented.")
DEFAULT_INITIALIZERS = {"w": tf.contrib.layers.xavier_initializer(),
"b": tf.zeros_initializer()}
class ConditionalNormalDistribution(object):
"""A Normal distribution conditioned on Tensor inputs via a fc network."""
def __init__(self, size, hidden_layer_sizes, sigma_min=0.0,
raw_sigma_bias=0.25, hidden_activation_fn=tf.nn.relu,
initializers=None, name="conditional_normal_distribution"):
"""Creates a conditional Normal distribution.
Args:
size: The dimension of the random variable.
hidden_layer_sizes: The sizes of the hidden layers of the fully connected
network used to condition the distribution on the inputs.
sigma_min: The minimum standard deviation allowed, a scalar.
raw_sigma_bias: A scalar that is added to the raw standard deviation
output from the fully connected network. Set to 0.25 by default to
prevent standard deviations close to 0.
hidden_activation_fn: The activation function to use on the hidden layers
of the fully connected network.
initializers: The variable intitializers to use for the fully connected
network. The network is implemented using snt.nets.MLP so it must
be a dictionary mapping the keys 'w' and 'b' to the initializers for
the weights and biases. Defaults to xavier for the weights and zeros
for the biases when initializers is None.
name: The name of this distribution, used for sonnet scoping.
"""
self.sigma_min = sigma_min
self.raw_sigma_bias = raw_sigma_bias
self.name = name
self.size = size
if initializers is None:
initializers = DEFAULT_INITIALIZERS
self.fcnet = snt.nets.MLP(
output_sizes=hidden_layer_sizes + [2*size],
activation=hidden_activation_fn,
initializers=initializers,
activate_final=False,
use_bias=True,
name=name + "_fcnet")
def condition(self, tensor_list, **unused_kwargs):
"""Computes the parameters of a normal distribution based on the inputs."""
inputs = tf.concat(tensor_list, axis=1)
outs = self.fcnet(inputs)
mu, sigma = tf.split(outs, 2, axis=1)
sigma = tf.maximum(tf.nn.softplus(sigma + self.raw_sigma_bias),
self.sigma_min)
return mu, sigma
def __call__(self, *args, **kwargs):
"""Creates a normal distribution conditioned on the inputs."""
mu, sigma = self.condition(args, **kwargs)
return tf.contrib.distributions.Normal(loc=mu, scale=sigma)
class ConditionalBernoulliDistribution(object):
"""A Bernoulli distribution conditioned on Tensor inputs via a fc net."""
def __init__(self, size, hidden_layer_sizes, hidden_activation_fn=tf.nn.relu,
initializers=None, bias_init=0.0,
name="conditional_bernoulli_distribution"):
"""Creates a conditional Bernoulli distribution.
Args:
size: The dimension of the random variable.
hidden_layer_sizes: The sizes of the hidden layers of the fully connected
network used to condition the distribution on the inputs.
hidden_activation_fn: The activation function to use on the hidden layers
of the fully connected network.
initializers: The variable intiializers to use for the fully connected
network. The network is implemented using snt.nets.MLP so it must
be a dictionary mapping the keys 'w' and 'b' to the initializers for
the weights and biases. Defaults to xavier for the weights and zeros
for the biases when initializers is None.
bias_init: A scalar or vector Tensor that is added to the output of the
fully-connected network that parameterizes the mean of this
distribution.
name: The name of this distribution, used for sonnet scoping.
"""
self.bias_init = bias_init
self.size = size
if initializers is None:
initializers = DEFAULT_INITIALIZERS
self.fcnet = snt.nets.MLP(
output_sizes=hidden_layer_sizes + [size],
activation=hidden_activation_fn,
initializers=initializers,
activate_final=False,
use_bias=True,
name=name + "_fcnet")
def condition(self, tensor_list):
"""Computes the p parameter of the Bernoulli distribution."""
inputs = tf.concat(tensor_list, axis=1)
return self.fcnet(inputs) + self.bias_init
def __call__(self, *args):
p = self.condition(args)
return tf.contrib.distributions.Bernoulli(logits=p)
class NormalApproximatePosterior(ConditionalNormalDistribution):
"""A Normally-distributed approx. posterior with res_q parameterization."""
def __init__(self, size, hidden_layer_sizes, sigma_min=0.0,
raw_sigma_bias=0.25, hidden_activation_fn=tf.nn.relu,
initializers=None, smoothing=False,
name="conditional_normal_distribution"):
super(NormalApproximatePosterior, self).__init__(
size, hidden_layer_sizes, sigma_min=sigma_min,
raw_sigma_bias=raw_sigma_bias,
hidden_activation_fn=hidden_activation_fn, initializers=initializers,
name=name)
self.smoothing = smoothing
def condition(self, tensor_list, prior_mu, smoothing_tensors=None):
"""Generates the mean and variance of the normal distribution.
Args:
tensor_list: The list of Tensors to condition on. Will be concatenated and
fed through a fully connected network.
prior_mu: The mean of the prior distribution associated with this
approximate posterior. Will be added to the mean produced by
this approximate posterior, in res_q fashion.
smoothing_tensors: A list of Tensors. If smoothing is True, these Tensors
will be concatenated with the tensors in tensor_list.
Returns:
mu: The mean of the approximate posterior.
sigma: The standard deviation of the approximate posterior.
"""
if self.smoothing:
tensor_list.extend(smoothing_tensors)
mu, sigma = super(NormalApproximatePosterior, self).condition(tensor_list)
return mu + prior_mu, sigma
class NonstationaryLinearDistribution(object):
"""A set of loc-scale distributions that are linear functions of inputs.
This class defines a series of location-scale distributions such that
the means are learnable linear functions of the inputs and the log variances
are learnable constants. The functions and log variances are different across
timesteps, allowing the distributions to be nonstationary.
"""
def __init__(self,
num_timesteps,
inputs_per_timestep=None,
outputs_per_timestep=None,
initializers=None,
variance_min=0.0,
output_distribution=tfd.Normal,
dtype=tf.float32):
"""Creates a NonstationaryLinearDistribution.
Args:
num_timesteps: The number of timesteps, i.e. the number of distributions.
inputs_per_timestep: A list of python ints, the dimension of inputs to the
linear function at each timestep. If not provided, the dimension at each
timestep is assumed to be 1.
outputs_per_timestep: A list of python ints, the dimension of the output
distribution at each timestep. If not provided, the dimension at each
timestep is assumed to be 1.
initializers: A dictionary containing intializers for the variables. The
initializer under the key 'w' is used for the weights in the linear
function and the initializer under the key 'b' is used for the biases.
Defaults to xavier initialization for the weights and zeros for the
biases.
variance_min: Python float, the minimum variance of each distribution.
output_distribution: A locatin-scale subclass of tfd.Distribution that
defines the output distribution, e.g. Normal.
dtype: The dtype of the weights and biases.
"""
if not initializers:
initializers = DEFAULT_INITIALIZERS
if not inputs_per_timestep:
inputs_per_timestep = [1] * num_timesteps
if not outputs_per_timestep:
outputs_per_timestep = [1] * num_timesteps
self.num_timesteps = num_timesteps
self.variance_min = variance_min
self.initializers = initializers
self.dtype = dtype
self.output_distribution = output_distribution
def _get_variables_ta(shapes, name, initializer, trainable=True):
"""Creates a sequence of variables and stores them in a TensorArray."""
# Infer shape if all shapes are equal.
first_shape = shapes[0]
infer_shape = all(shape == first_shape for shape in shapes)
ta = tf.TensorArray(
dtype=dtype, size=len(shapes), dynamic_size=False,
clear_after_read=False, infer_shape=infer_shape)
for t, shape in enumerate(shapes):
var = tf.get_variable(
name % t, shape=shape, initializer=initializer, trainable=trainable)
ta = ta.write(t, var)
return ta
bias_shapes = [[num_outputs] for num_outputs in outputs_per_timestep]
self.log_variances = _get_variables_ta(
bias_shapes, "proposal_log_variance_%d", initializers["b"])
self.mean_biases = _get_variables_ta(
bias_shapes, "proposal_b_%d", initializers["b"])
weight_shapes = zip(inputs_per_timestep, outputs_per_timestep)
self.mean_weights = _get_variables_ta(
weight_shapes, "proposal_w_%d", initializers["w"])
self.shapes = tf.TensorArray(
dtype=tf.int32, size=num_timesteps,
dynamic_size=False, clear_after_read=False).unstack(weight_shapes)
def __call__(self, t, inputs):
"""Computes the distribution at timestep t.
Args:
t: Scalar integer Tensor, the current timestep. Must be in
[0, num_timesteps).
inputs: The inputs to the linear function parameterizing the mean of
the current distribution. A Tensor of shape [batch_size, num_inputs_t].
Returns:
A tfd.Distribution subclass representing the distribution at timestep t.
"""
b = self.mean_biases.read(t)
w = self.mean_weights.read(t)
shape = self.shapes.read(t)
w = tf.reshape(w, shape)
b = tf.reshape(b, [shape[1], 1])
log_variance = self.log_variances.read(t)
scale = tf.sqrt(tf.maximum(tf.exp(log_variance), self.variance_min))
loc = tf.matmul(w, inputs, transpose_a=True) + b
return self.output_distribution(loc=loc, scale=scale)
def encode_all(inputs, encoder):
"""Encodes a timeseries of inputs with a time independent encoder.
Args:
inputs: A [time, batch, feature_dimensions] tensor.
encoder: A network that takes a [batch, features_dimensions] input and
encodes the input.
Returns:
A [time, batch, encoded_feature_dimensions] output tensor.
"""
input_shape = tf.shape(inputs)
num_timesteps, batch_size = input_shape[0], input_shape[1]
reshaped_inputs = tf.reshape(inputs, [-1, inputs.shape[-1]])
inputs_encoded = encoder(reshaped_inputs)
inputs_encoded = tf.reshape(inputs_encoded,
[num_timesteps, batch_size, encoder.output_size])
return inputs_encoded
def ta_for_tensor(x, **kwargs):
"""Creates a TensorArray for the input tensor."""
return tf.TensorArray(
x.dtype, tf.shape(x)[0], dynamic_size=False, **kwargs).unstack(x)
| apache-2.0 |
rhdedgar/openshift-tools | openshift/installer/vendored/openshift-ansible-3.6.173/roles/lib_openshift/src/class/oc_serviceaccount_secret.py | 66 | 4640 | # pylint: skip-file
# flake8: noqa
class OCServiceAccountSecret(OpenShiftCLI):
''' Class to wrap the oc command line tools '''
kind = 'sa'
def __init__(self, config, verbose=False):
''' Constructor for OpenshiftOC '''
super(OCServiceAccountSecret, self).__init__(config.namespace, kubeconfig=config.kubeconfig, verbose=verbose)
self.config = config
self.verbose = verbose
self._service_account = None
@property
def service_account(self):
''' Property for the service account '''
if not self._service_account:
self.get()
return self._service_account
@service_account.setter
def service_account(self, data):
''' setter for the service account '''
self._service_account = data
def exists(self, in_secret):
''' verifies if secret exists in the service account '''
result = self.service_account.find_secret(in_secret)
if not result:
return False
return True
def get(self):
''' get the service account definition from the master '''
sao = self._get(OCServiceAccountSecret.kind, self.config.name)
if sao['returncode'] == 0:
self.service_account = ServiceAccount(content=sao['results'][0])
sao['results'] = self.service_account.get('secrets')
return sao
def delete(self):
''' delete secrets '''
modified = []
for rem_secret in self.config.secrets:
modified.append(self.service_account.delete_secret(rem_secret))
if any(modified):
return self._replace_content(OCServiceAccountSecret.kind, self.config.name, self.service_account.yaml_dict)
return {'returncode': 0, 'changed': False}
def put(self):
''' place secrets into sa '''
modified = False
for add_secret in self.config.secrets:
if not self.service_account.find_secret(add_secret):
self.service_account.add_secret(add_secret)
modified = True
if modified:
return self._replace_content(OCServiceAccountSecret.kind, self.config.name, self.service_account.yaml_dict)
return {'returncode': 0, 'changed': False}
@staticmethod
# pylint: disable=too-many-return-statements,too-many-branches
# TODO: This function should be refactored into its individual parts.
def run_ansible(params, check_mode):
''' run the ansible idempotent code '''
sconfig = ServiceAccountConfig(params['service_account'],
params['namespace'],
params['kubeconfig'],
[params['secret']],
None)
oc_sa_sec = OCServiceAccountSecret(sconfig, verbose=params['debug'])
state = params['state']
api_rval = oc_sa_sec.get()
#####
# Get
#####
if state == 'list':
return {'changed': False, 'results': api_rval['results'], 'state': "list"}
########
# Delete
########
if state == 'absent':
if oc_sa_sec.exists(params['secret']):
if check_mode:
return {'changed': True, 'msg': 'Would have removed the " + \
"secret from the service account.'}
api_rval = oc_sa_sec.delete()
return {'changed': True, 'results': api_rval, 'state': "absent"}
return {'changed': False, 'state': "absent"}
if state == 'present':
########
# Create
########
if not oc_sa_sec.exists(params['secret']):
if check_mode:
return {'changed': True, 'msg': 'Would have added the ' + \
'secret to the service account.'}
# Create it here
api_rval = oc_sa_sec.put()
if api_rval['returncode'] != 0:
return {'failed': True, 'msg': api_rval}
# return the created object
api_rval = oc_sa_sec.get()
if api_rval['returncode'] != 0:
return {'failed': True, 'msg': api_rval}
return {'changed': True, 'results': api_rval, 'state': "present"}
return {'changed': False, 'results': api_rval, 'state': "present"}
return {'failed': True,
'changed': False,
'msg': 'Unknown state passed. %s' % state,
'state': 'unknown'}
| apache-2.0 |
toontownfunserver/Panda3D-1.9.0 | direct/leveleditor/AnimMgrBase.py | 3 | 17489 | """
Defines AnimMgrBase
"""
import os, wx, math
from direct.interval.IntervalGlobal import *
from panda3d.core import VBase3,VBase4
import ObjectGlobals as OG
import AnimGlobals as AG
class AnimMgrBase:
""" AnimMgr will create, manage, update animations in the scene """
def __init__(self, editor):
self.editor = editor
self.graphEditorCounter = 0
self.keyFramesInfo = {}
self.curveAnimation = {}
#normal properties
self.lerpFuncs={
'H' : self.lerpFuncH,
'P' : self.lerpFuncP,
'R' : self.lerpFuncR,
'SX' : self.lerpFuncSX,
'SY' : self.lerpFuncSY,
'SZ' : self.lerpFuncSZ,
'CR' : self.lerpFuncCR,
'CG' : self.lerpFuncCG,
'CB' : self.lerpFuncCB,
'CA' : self.lerpFuncCA
}
#Properties which has animation curves
self.curveLerpFuncs={
'X' : [ self.lerpFuncX, self.lerpCurveFuncX ],
'Y' : [ self.lerpFuncY, self.lerpCurveFuncY ],
'Z' : [ self.lerpFuncZ, self.lerpCurveFuncZ ]
}
def reset(self):
self.keyFramesInfo = {}
self.curveAnimation = {}
def generateKeyFrames(self):
#generate keyFrame list
self.keyFrames = []
for property in self.keyFramesInfo.keys():
for frameInfo in self.keyFramesInfo[property]:
frame = frameInfo[AG.FRAME]
exist = False
for keyFrame in self.keyFrames:
if frame == keyFrame:
exist = True
break
if exist == False:
self.keyFrames.append(frame)
def generateSlope(self, list):
#generate handler slope of every keyframe for animation curve
listLen = len(list)
if listLen == 2:
slope =[float(list[1][AG.FRAME]-list[0][AG.FRAME]),(float(list[1][AG.VALUE])-float(list[0][AG.VALUE]))]
list[0][AG.INSLOPE] = slope
list[1][AG.INSLOPE] = slope
list[0][AG.OUTSLOPE] = list[0][AG.INSLOPE]
list[1][AG.OUTSLOPE] = list[1][AG.INSLOPE]
return
if listLen >= 3:
list[0][AG.INSLOPE] = [float(list[1][AG.FRAME] - list[0][AG.FRAME]),(float(list[1][AG.VALUE]) - float(list[0][AG.VALUE]))]
list[0][AG.OUTSLOPE] = list[0][AG.INSLOPE]
for i in range(1, listLen-1):
list[i][AG.INSLOPE] = [float(list[i+1][AG.FRAME] - list[i-1][AG.FRAME]),(float(list[i+1][AG.VALUE]) - float(list[i-1][AG.VALUE]))]
list[i][AG.OUTSLOPE] = list[i][AG.INSLOPE]
list[listLen-1][AG.INSLOPE] = [float(list[listLen-1][AG.FRAME] - list[listLen-2][AG.FRAME]),(float(list[listLen-1][AG.VALUE]) - float(list[listLen-2][AG.VALUE]))]
list[listLen-1][AG.OUTSLOPE] = list[listLen-1][AG.INSLOPE]
return
def removeAnimInfo(self, uid):
for property in self.keyFramesInfo.keys():
if property[AG.UID] == uid:
del self.keyFramesInfo[property]
self.generateKeyFrames()
if self.editor.mode == self.editor.ANIM_MODE:
self.editor.ui.animUI.OnPropKey()
def singleCurveAnimation(self, nodePath, curve, time):
rope = curve[OG.OBJ_NP]
self.points = rope.getPoints(time)
self.hprs = []
temp = render.attachNewNode("temp")
temp.setHpr(0,0,0)
for i in range(len(self.points)-1):
temp.setPos(self.points[i])
temp.lookAt(self.points[i+1])
hpr = temp.getHpr()
## self.hprs.append(hpr)
self.hprs.append(VBase3(hpr[0]+180,hpr[1],hpr[2]))
self.hprs.append(self.hprs[len(self.points)-2])
curveSequenceName = str(nodePath[OG.OBJ_UID])+' '+str(curve[OG.OBJ_UID])+' '+str(time)
self.curveSequence = Sequence(name = curveSequenceName)
for i in range(len(self.points)-1):
myLerp = LerpPosHprInterval(nodePath[OG.OBJ_NP], float(1)/float(24), self.points[i+1], self.hprs[i+1], self.points[i], self.hprs[i])
self.curveSequence.append(myLerp)
return self.curveSequence
def createParallel(self, startFrame, endFrame):
self.parallel = []
self.parallel = Parallel(name="Current Parallel")
self.createCurveAnimation(self.parallel)
self.createActorAnimation(self.parallel, startFrame, endFrame)
self.createKeyFrameAnimation(self.parallel, startFrame, endFrame)
self.createCurveKeyFrameAnimation(self.parallel, startFrame, endFrame)
return self.parallel
def createCurveAnimation(self, parallel):
for key in self.curveAnimation:
curveInfo = self.curveAnimation[key]
nodePath = self.editor.objectMgr.findObjectById(curveInfo[AG.NODE])
curve = self.editor.objectMgr.findObjectById(curveInfo[AG.CURVE])
time = curveInfo[AG.TIME]
sequence = self.singleCurveAnimation(nodePath, curve, time)
parallel.append(sequence)
def createActorAnimation(self, parallel, startFrame, endFrame):
self.editor.objectMgr.findActors(render)
for actor in self.editor.objectMgr.Actor:
actorAnim = os.path.basename(actor[OG.OBJ_ANIM])
myInterval = ActorInterval(actor[OG.OBJ_NP], actorAnim, loop=1, duration = float(endFrame-startFrame+1)/float(24))
parallel.append(myInterval)
def createKeyFrameAnimation(self, parallel, startFrame, endFrame):
#generate key frame animation for normal property
self.editor.objectMgr.findNodes(render)
for node in self.editor.objectMgr.Nodes:
for property in self.keyFramesInfo.keys():
if property[AG.UID] == node[OG.OBJ_UID] and property[AG.PROP_NAME] != 'X' and property[AG.PROP_NAME] != 'Y' and property[AG.PROP_NAME] != 'Z':
mysequence = Sequence(name = node[OG.OBJ_UID])
keyFramesInfo = self.keyFramesInfo[property]
if len(keyFramesInfo) == 1:
myLerp = LerpFunc(self.lerpFuncs[property[AG.PROP_NAME]],fromData=float(keyFramesInfo[0][AG.VALUE]),toData=float(keyFramesInfo[0][AG.VALUE]),duration = float(endFrame-startFrame)/float(24),blendType = 'noBlend',extraArgs = [node[OG.OBJ_NP]])
mysequence.append(myLerp)
parallel.append(mysequence)
if len(keyFramesInfo) != 1:
myLerp = LerpFunc(self.lerpFuncs[property[AG.PROP_NAME]],fromData=float(keyFramesInfo[0][AG.VALUE]),toData=float(keyFramesInfo[0][AG.VALUE]),duration = float(keyFramesInfo[0][AG.FRAME]-startFrame)/float(24),blendType = 'noBlend',extraArgs = [node[OG.OBJ_NP]])
mysequence.append(myLerp)
for key in range(0,len(keyFramesInfo)-1):
myLerp = LerpFunc(self.lerpFuncs[property[AG.PROP_NAME]],fromData=float(keyFramesInfo[key][AG.VALUE]),toData=float(keyFramesInfo[key+1][AG.VALUE]),duration = float(keyFramesInfo[key+1][AG.FRAME]-keyFramesInfo[key][AG.FRAME])/float(24),blendType = 'noBlend',extraArgs = [node[OG.OBJ_NP]])
mysequence.append(myLerp)
myLerp = LerpFunc(self.lerpFuncs[property[AG.PROP_NAME]],fromData=float(keyFramesInfo[len(keyFramesInfo)-1][AG.VALUE]),toData=float(keyFramesInfo[len(keyFramesInfo)-1][AG.VALUE]),duration = float(endFrame-keyFramesInfo[len(keyFramesInfo)-1][AG.FRAME])/float(24),blendType = 'noBlend',extraArgs = [node[OG.OBJ_NP]])
mysequence.append(myLerp)
parallel.append(mysequence)
def createCurveKeyFrameAnimation(self, parallel, startFrame, endFrame):
#generate key frame animation for the property which is controled by animation curve
self.editor.objectMgr.findNodes(render)
for node in self.editor.objectMgr.Nodes:
for property in self.keyFramesInfo.keys():
if property[AG.UID] == node[OG.OBJ_UID]:
if property[AG.PROP_NAME] == 'X' or property[AG.PROP_NAME] == 'Y' or property[AG.PROP_NAME] == 'Z':
mysequence = Sequence(name = node[OG.OBJ_UID])
keyFramesInfo = self.keyFramesInfo[property]
if len(keyFramesInfo) == 1:
myLerp = LerpFunc(self.curveLerpFuncs[property[AG.PROP_NAME]][0],fromData=float(keyFramesInfo[0][AG.VALUE]),toData=float(keyFramesInfo[0][AG.VALUE]),duration = float(endFrame-startFrame)/float(24),blendType = 'noBlend',extraArgs = [node[OG.OBJ_NP]])
mysequence.append(myLerp)
parallel.append(mysequence)
if len(keyFramesInfo) == 2:
myLerp = LerpFunc(self.curveLerpFuncs[property[AG.PROP_NAME]][0],fromData=float(keyFramesInfo[0][AG.VALUE]),toData=float(keyFramesInfo[0][AG.VALUE]),duration = float(keyFramesInfo[0][AG.FRAME]-startFrame)/float(24),blendType = 'noBlend',extraArgs = [node[OG.OBJ_NP]])
mysequence.append(myLerp)
for key in range(0,len(keyFramesInfo)-1):
self.keyFrameInfoForSingleLerp = keyFramesInfo
self.keyInfoForSingleLerp = key
myLerp = LerpFunc(self.curveLerpFuncs[property[AG.PROP_NAME]][0],fromData=float(keyFramesInfo[key][AG.VALUE]),toData=float(keyFramesInfo[key+1][AG.VALUE]),duration = float(keyFramesInfo[key+1][AG.FRAME]-keyFramesInfo[key][AG.FRAME])/float(24),blendType = 'noBlend',extraArgs = [node[OG.OBJ_NP]])
mysequence.append(myLerp)
myLerp = LerpFunc(self.curveLerpFuncs[property[AG.PROP_NAME]][0],fromData=float(keyFramesInfo[len(keyFramesInfo)-1][AG.VALUE]),toData=float(keyFramesInfo[len(keyFramesInfo)-1][AG.VALUE]),duration = float(endFrame-keyFramesInfo[len(keyFramesInfo)-1][AG.FRAME])/float(24),blendType = 'noBlend',extraArgs = [node[OG.OBJ_NP]])
mysequence.append(myLerp)
parallel.append(mysequence)
if len(keyFramesInfo) > 2:
myLerp = LerpFunc(self.curveLerpFuncs[property[AG.PROP_NAME]][0],fromData=float(keyFramesInfo[0][AG.VALUE]),toData=float(keyFramesInfo[0][1]),duration = float(keyFramesInfo[0][AG.FRAME]-startFrame)/float(24),blendType = 'noBlend',extraArgs = [node[OG.OBJ_NP]])
mysequence.append(myLerp)
for key in range(0,len(keyFramesInfo)-1):
myLerp = LerpFunc(self.curveLerpFuncs[property[AG.PROP_NAME]][1],fromData=float(keyFramesInfo[key][AG.FRAME]),toData=float(keyFramesInfo[key+1][AG.FRAME]),duration = float(keyFramesInfo[key+1][AG.FRAME]-keyFramesInfo[key][AG.FRAME])/float(24),blendType = 'noBlend',extraArgs = [[node[OG.OBJ_NP], keyFramesInfo, key]])
mysequence.append(myLerp)
myLerp = LerpFunc(self.curveLerpFuncs[property[AG.PROP_NAME]][0],fromData=float(keyFramesInfo[len(keyFramesInfo)-1][AG.VALUE]),toData=float(keyFramesInfo[len(keyFramesInfo)-1][AG.VALUE]),duration = float(endFrame-keyFramesInfo[len(keyFramesInfo)-1][AG.FRAME])/float(24),blendType = 'noBlend',extraArgs = [node[OG.OBJ_NP]])
mysequence.append(myLerp)
parallel.append(mysequence)
def getPos(self, x, list, i):
#get the value from animation curve
x1 = float(list[i][AG.FRAME])
y1 = float(list[i][AG.VALUE])
x4 = float(list[i+1][AG.FRAME])
y4 = float(list[i+1][AG.VALUE])
t1x = list[i][AG.OUTSLOPE][0]
t1y = list[i][AG.OUTSLOPE][1]
t2x = list[i+1][AG.INSLOPE][0]
t2y = list[i+1][AG.INSLOPE][1]
x2 = x1 + (x4 - x1) / float(3)
scale1 = (x2 - x1) / t1x
y2 = y1 + t1y * scale1
x3 = x4 - (x4 - x1) / float(3)
scale2 = (x4 - x3) / t2x
y3 = y4 - t2y * scale2
ax = - float(1) * x1 + float(3) * x2 - float(3) * x3 + float(1) * x4
bx = float(3) * x1 - float(6) * x2 + float(3) * x3 + float(0) * x4
cx = - float(3) * x1 + float(3) * x2 + float(0) * x3 + float(0) * x4
dx = float(1) * x1 + float(0) * x2 - float(0) * x3 + float(0) * x4
ay = - float(1) * y1 + float(3) * y2 - float(3) * y3 + float(1) * y4
by = float(3) * y1 - float(6) * y2 + float(3) * y3 + float(0) * y4
cy = - float(3) * y1 + float(3) * y2 + float(0) * y3 + float(0) * y4
dy = float(1) * y1 + float(0) * y2 - float(0) * y3 + float(0) * y4
if ax == 0 and bx == 0 and cx == 0:
return 0
if ax == 0 and bx == 0 and cx != 0:
a = cx
b = dx-x
t = -b/a
y = ay * t*t*t + by * t*t + cy * t + dy
return y
if ax == 0 and bx!= 0:
a=bx
b=cx
c=dx-x
t=(-b+math.sqrt(b**2-4.0*a*c))/2*a
if t>=0 and t<=1:
y = ay * t*t*t + by * t*t + cy * t + dy
return y
else:
t=(-b-math.sqrt(b**2-4.0*a*c))/2*a
y = ay * t*t*t + by * t*t + cy * t + dy
return y
if ax != 0:
a = ax
b = bx
c = cx
d = dx - float(x)
t = self.calculateT(a, b, c, d, x)
y = ay * t*t*t + by * t*t + cy * t + dy
return y
def calculateT(self, a, b, c, d, x):
#Newton EQUATION
t = float(1)
t2 = t
t -= (a*t*t*t+b*t*t+c*t+d)/(float(3)*a*t*t+float(2)*b*t+c)
if abs(t-t2) <= 0.000001:
return t
else:
while abs(t - t2) > 0.000001:
t2 = t
t -= (a*t*t*t+b*t*t+c*t+d)/(float(3)*a*t*t+float(2)*b*t+c)
return t
def lerpFuncX(self,pos,np):
np.setX(pos)
def lerpFuncY(self,pos,np):
np.setY(pos)
def lerpFuncZ(self,pos,np):
np.setZ(pos)
def lerpCurveFuncX(self,t,extraArgs):
np = extraArgs[0]
pos = self.getPos(t, extraArgs[1], extraArgs[2])
np.setX(pos)
def lerpCurveFuncY(self,t,extraArgs):
np = extraArgs[0]
pos = self.getPos(t, extraArgs[1], extraArgs[2])
np.setY(pos)
def lerpCurveFuncZ(self,t,extraArgs):
np = extraArgs[0]
pos = self.getPos(t, extraArgs[1], extraArgs[2])
np.setZ(pos)
def lerpFuncH(self,angle,np):
np.setH(angle)
def lerpFuncP(self,angle,np):
np.setP(angle)
def lerpFuncR(self,angle,np):
np.setR(angle)
def lerpFuncSX(self,scale,np):
np.setSx(scale)
def lerpFuncSY(self,scale,np):
np.setSy(scale)
def lerpFuncSZ(self,scale,np):
np.setSz(scale)
def lerpFuncCR(self,R,np):
obj = self.editor.objectMgr.findObjectByNodePath(np)
r = obj[OG.OBJ_RGBA][0]
g = obj[OG.OBJ_RGBA][1]
b = obj[OG.OBJ_RGBA][2]
a = obj[OG.OBJ_RGBA][3]
self.colorUpdate(R,g,b,a,np)
def lerpFuncCG(self,G,np):
obj = self.editor.objectMgr.findObjectByNodePath(np)
r = obj[OG.OBJ_RGBA][0]
g = obj[OG.OBJ_RGBA][1]
b = obj[OG.OBJ_RGBA][2]
a = obj[OG.OBJ_RGBA][3]
self.colorUpdate(r,G,b,a,np)
def lerpFuncCB(self,B,np):
obj = self.editor.objectMgr.findObjectByNodePath(np)
r = obj[OG.OBJ_RGBA][0]
g = obj[OG.OBJ_RGBA][1]
b = obj[OG.OBJ_RGBA][2]
a = obj[OG.OBJ_RGBA][3]
self.colorUpdate(r,g,B,a,np)
def lerpFuncCA(self,A,np):
obj = self.editor.objectMgr.findObjectByNodePath(np)
r = obj[OG.OBJ_RGBA][0]
g = obj[OG.OBJ_RGBA][1]
b = obj[OG.OBJ_RGBA][2]
a = obj[OG.OBJ_RGBA][3]
self.colorUpdate(r,g,b,A,np)
def colorUpdate(self, r, g, b, a, np):
if base.direct.selected.last == None:
self.editor.objectMgr.updateObjectColor(r, g, b, a, np)
elif self.editor.objectMgr.findObjectByNodePath(np) == self.editor.objectMgr.findObjectByNodePath(base.direct.selected.last):
self.editor.ui.objectPropertyUI.propCR.setValue(r)
self.editor.ui.objectPropertyUI.propCG.setValue(g)
self.editor.ui.objectPropertyUI.propCB.setValue(b)
self.editor.ui.objectPropertyUI.propCA.setValue(a)
self.editor.objectMgr.updateObjectColor(r, g, b, a, np)
else:
self.editor.objectMgr.updateObjectColor(r, g, b, a, np)
| bsd-3-clause |
h2oai/h2o-dev | h2o-py/h2o/exceptions.py | 4 | 6460 | # -*- encoding: utf-8 -*-
# Copyright: (c) 2016 H2O.ai
# License: Apache License Version 2.0 (see LICENSE for details)
"""
:mod:`h2o.exceptions` -- all exceptions classes in h2o module.
All H2O exceptions derive from :class:`H2OError`.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
__all__ = ("H2OStartupError", "H2OConnectionError", "H2OServerError", "H2OResponseError",
"H2OValueError", "H2OTypeError", "H2OJobCancelled")
class H2OError(Exception):
"""Base class for all H2O exceptions."""
class H2OSoftError(H2OError):
"""Base class for exceptions that trigger "soft" exception handling hook."""
#-----------------------------------------------------------------------------------------------------------------------
# H2OValueError
#-----------------------------------------------------------------------------------------------------------------------
class H2OValueError(H2OSoftError, ValueError):
"""Error indicating that wrong parameter value was passed to a function."""
def __init__(self, message, var_name=None, skip_frames=0):
"""Create an H2OValueError exception object."""
super(H2OValueError, self).__init__(message)
self.var_name = var_name
self.skip_frames = skip_frames
#-----------------------------------------------------------------------------------------------------------------------
# H2OTypeError
#-----------------------------------------------------------------------------------------------------------------------
class H2OTypeError(H2OSoftError, TypeError):
"""
Error indicating that the user passed a parameter of wrong type.
This error will trigger "soft" exception handling, in the sense that the stack trace will be much more compact
than usual.
"""
def __init__(self, var_name=None, var_value=None, var_type_name=None, exp_type_name=None, message=None,
skip_frames=0):
"""
Create an H2OTypeError exception object.
:param message: error message that will be shown to the user. If not given, this message will be constructed
from ``var_name``, ``var_value``, etc.
:param var_name: name of the variable whose type is wrong (can be used for highlighting etc).
:param var_value: the value of the variable.
:param var_type_name: the name of the variable's actual type.
:param exp_type_name: the name of the variable's expected type.
:param skip_frames: how many auxiliary function calls have been made since the moment of the exception. This
many local frames will be skipped in the output of the exception message. For example if you want to check
a variables type, and call a helper function ``assert_is_type()`` to do that job for you, then
``skip_frames`` should be 1 (thus making the call to ``assert_is_type`` invisible).
"""
super(H2OTypeError, self).__init__(message)
self._var_name = var_name
self._var_value = var_value
self._var_type_name = var_type_name or str(type(var_value))
self._exp_type_name = exp_type_name
self._message = message
self._skip_frames = skip_frames
def __str__(self):
"""Used when printing out the exception message."""
if self._message:
return self._message
# Otherwise construct the message
var = self._var_name
val = self._var_value
atn = self._var_type_name
etn = self._exp_type_name or ""
article = "an" if etn.lstrip("?")[0] in "aioeH" else "a"
return "Argument `{var}` should be {an} {expected_type}, got {actual_type} {value}".\
format(var=var, an=article, expected_type=etn, actual_type=atn, value=val)
@property
def var_name(self):
"""Variable name."""
return self._var_name
@property
def skip_frames(self):
"""Number of local frames to skip when printing our the stacktrace."""
return self._skip_frames
#-----------------------------------------------------------------------------------------------------------------------
# Backend exceptions
#-----------------------------------------------------------------------------------------------------------------------
class H2OStartupError(H2OSoftError):
"""Raised by H2OLocalServer when the class fails to launch a server."""
class H2OConnectionError(H2OSoftError):
"""
Raised when connection to an H2O server cannot be established.
This can be raised if the connection was not initialized; or the server cannot be reached at the specified address;
or there is an authentication error; or the request times out; etc.
"""
# This should have been extending from Exception as well; however in old code version all exceptions were
# EnvironmentError's, so for old code to work we extend H2OResponseError from EnvironmentError.
class H2OResponseError(H2OError, EnvironmentError):
"""Raised when the server encounters a user error and sends back an H2OErrorV3 response."""
class H2OServerError(H2OError):
"""
Raised when any kind of server error is encountered.
This includes: server returning HTTP status 500; or server sending malformed JSON; or server returning an
unexpected response (e.g. lacking a "__schema" field); or server indicating that it is in an unhealthy state; etc.
"""
def __init__(self, message, stacktrace=None):
"""
Instantiate a new H2OServerError exception.
:param message: error message describing the exception.
:param stacktrace: (optional, list(str)) server-side stacktrace, if available. This will be printed out by
our custom except hook (see debugging.py).
"""
super(H2OServerError, self).__init__(message)
self.stacktrace = stacktrace
#-----------------------------------------------------------------------------------------------------------------------
# H2OJobCancelled
#-----------------------------------------------------------------------------------------------------------------------
class H2OJobCancelled(H2OError):
"""
Raised when the user interrupts a running job.
By default, this exception will not trigger any output (as if it is caught and ignored), however the user still
has an ability to catch this explicitly and perform a custom action.
"""
| apache-2.0 |
recap/pumpkin | examples/tweeter/filters-cat2/filterhaikus.py | 1 | 3418 | __author__ = 'reggie'
###START-CONF
##{
##"object_name": "filterhaikus",
##"object_poi": "qpwo-2345",
##"parameters": [
## {
## "name": "tweet",
## "description": "english tweets",
## "required": true,
## "type": "TweetString",
## "format": "",
## "state" : "ENGLISH"
## }
## ],
##"return": [
## {
## "name": "tweet",
## "description": "haiku tweet",
## "required": true,
## "type": "TweetString",
## "format": "",
## "state" : "HAIKU|NO_HAIKU"
## }
##
## ] }
##END-CONF
import re
import nltk
from nltk.corpus import cmudict
from curses.ascii import isdigit
from pumpkin import PmkSeed
class filterhaikus(PmkSeed.Seed):
def __init__(self, context, poi=None):
PmkSeed.Seed.__init__(self, context,poi)
self.d = None
pass
def on_load(self):
print "Loading: " + self.__class__.__name__
wd = self.context.getWorkingDir()
nltk.data.path.append(wd + "nltk_data")
self.d = cmudict.dict()
pass
def run(self, pkt, tweet):
#print "RECEIVED TWEET: "+tweet
m = re.search('W(\s+)(.*)(\n)', tweet, re.S)
try:
if m:
tw = m.group(2)
if self.is_haiku(tw):
self.dispatch(pkt, tweet, "HAIKU")
#else:
# self.dispatch(pkt, tweet, "RUBBSIH")
except:
pass
pass
def is_haiku(self, text):
text_orig = text
text = text.lower()
if filter(str.isdigit, str(text)):
return False
words = nltk.wordpunct_tokenize(re.sub('[^a-zA-Z_ ]', '',text))
#print words
syl_count = 0
word_count = 0
haiku_line_count = 0
lines = []
d = self.d
for word in words:
if word.lower() in d.keys():
syl_count += [len(list(y for y in x if isdigit(y[-1]))) for x in
d[word.lower()]][0]
if haiku_line_count == 0:
if syl_count == 5:
lines.append(word)
haiku_line_count += 1
elif haiku_line_count == 1:
if syl_count == 12:
lines.append(word)
haiku_line_count += 1
else:
if syl_count == 17:
lines.append(word)
haiku_line_count += 1
if syl_count == 17:
try:
final_lines = []
str_tmp = ""
counter = 0
for word in text_orig.split():
str_tmp += str(word) + " "
if lines[counter].lower() in str(word).lower():
final_lines.append(str_tmp.strip())
counter += 1
str_tmp = ""
if len(str_tmp) > 0:
final_lines.append(str_tmp.strip())
return True
except Exception as e:
print e
return False
else:
return False
return True
| mit |
Diegojnb/JdeRobot | src/drivers/MAVLinkServer/MAVProxy/modules/mavproxy_misc.py | 11 | 8845 | #!/usr/bin/env python
'''miscellaneous commands'''
import time, math
from pymavlink import mavutil
from MAVProxy.modules.lib import mp_module
from os import kill
from signal import signal
from subprocess import PIPE, Popen
class RepeatCommand(object):
'''repeated command object'''
def __init__(self, period, cmd):
self.period = period
self.cmd = cmd
self.event = mavutil.periodic_event(1.0/period)
def __str__(self):
return "Every %.1f seconds: %s" % (self.period, self.cmd)
def run_command(args, cwd = None, shell = False, timeout = None, env = None):
'''
Run a shell command with a timeout.
See http://stackoverflow.com/questions/1191374/subprocess-with-timeout
'''
from subprocess import PIPE, Popen
from StringIO import StringIO
import fcntl, os, signal
p = Popen(args, shell = shell, cwd = cwd, stdout = PIPE, stderr = PIPE, env = env)
tstart = time.time()
buf = StringIO()
# try to make it non-blocking
try:
fcntl.fcntl(p.stdout, fcntl.F_SETFL, fcntl.fcntl(p.stdout, fcntl.F_GETFL) | os.O_NONBLOCK)
except Exception:
pass
while True:
time.sleep(0.1)
retcode = p.poll()
try:
buf.write(p.stdout.read())
except Exception:
pass
if retcode is not None:
break
if timeout is not None and time.time() > tstart + timeout:
print("timeout in process %u" % p.pid)
try:
os.kill(p.pid, signal.SIGKILL)
except OSError:
pass
p.wait()
return buf.getvalue()
class MiscModule(mp_module.MPModule):
def __init__(self, mpstate):
super(MiscModule, self).__init__(mpstate, "misc", "misc commands")
self.add_command('alt', self.cmd_alt, "show altitude information")
self.add_command('up', self.cmd_up, "adjust pitch trim by up to 5 degrees")
self.add_command('reboot', self.cmd_reboot, "reboot autopilot")
self.add_command('time', self.cmd_time, "show autopilot time")
self.add_command('shell', self.cmd_shell, "run shell command")
self.add_command('changealt', self.cmd_changealt, "change target altitude")
self.add_command('land', self.cmd_land, "auto land")
self.add_command('repeat', self.cmd_repeat, "repeat a command at regular intervals",
["<add|remove|clear>"])
self.add_command('version', self.cmd_version, "show version")
self.add_command('rcbind', self.cmd_rcbind, "bind RC receiver")
self.repeats = []
def altitude_difference(self, pressure1, pressure2, ground_temp):
'''calculate barometric altitude'''
scaling = pressure2 / pressure1
temp = ground_temp + 273.15
return 153.8462 * temp * (1.0 - math.exp(0.190259 * math.log(scaling)))
def qnh_estimate(self):
'''estimate QNH pressure from GPS altitude and scaled pressure'''
alt_gps = self.master.field('GPS_RAW_INT', 'alt', 0) * 0.001
pressure2 = self.master.field('SCALED_PRESSURE', 'press_abs', 0)
ground_temp = self.get_mav_param('GND_TEMP', 21)
temp = ground_temp + 273.15
pressure1 = pressure2 / math.exp(math.log(1.0 - (alt_gps / (153.8462 * temp))) / 0.190259)
return pressure1
def cmd_alt(self, args):
'''show altitude'''
print("Altitude: %.1f" % self.status.altitude)
qnh_pressure = self.get_mav_param('AFS_QNH_PRESSURE', None)
if qnh_pressure is not None and qnh_pressure > 0:
ground_temp = self.get_mav_param('GND_TEMP', 21)
pressure = self.master.field('SCALED_PRESSURE', 'press_abs', 0)
qnh_alt = self.altitude_difference(qnh_pressure, pressure, ground_temp)
print("QNH Alt: %u meters %u feet for QNH pressure %.1f" % (qnh_alt, qnh_alt*3.2808, qnh_pressure))
print("QNH Estimate: %.1f millibars" % self.qnh_estimate())
def cmd_shell(self, args):
'''shell command'''
print(run_command(args, shell=False, timeout=3))
def cmd_up(self, args):
'''adjust TRIM_PITCH_CD up by 5 degrees'''
if len(args) == 0:
adjust = 5.0
else:
adjust = float(args[0])
old_trim = self.get_mav_param('TRIM_PITCH_CD', None)
if old_trim is None:
print("Existing trim value unknown!")
return
new_trim = int(old_trim + (adjust*100))
if math.fabs(new_trim - old_trim) > 1000:
print("Adjustment by %d too large (from %d to %d)" % (adjust*100, old_trim, new_trim))
return
print("Adjusting TRIM_PITCH_CD from %d to %d" % (old_trim, new_trim))
self.param_set('TRIM_PITCH_CD', new_trim)
def cmd_reboot(self, args):
'''reboot autopilot'''
self.master.reboot_autopilot()
def cmd_time(self, args):
'''show autopilot time'''
tusec = self.master.field('SYSTEM_TIME', 'time_unix_usec', 0)
if tusec == 0:
print("No SYSTEM_TIME time available")
return
print("%s (%s)\n" % (time.ctime(tusec * 1.0e-6), time.ctime()))
def cmd_changealt(self, args):
'''change target altitude'''
if len(args) < 1:
print("usage: changealt <relaltitude>")
return
relalt = float(args[0])
self.master.mav.mission_item_send(self.settings.target_system,
self.settings.target_component,
0,
3,
mavutil.mavlink.MAV_CMD_NAV_WAYPOINT,
3, 1, 0, 0, 0, 0,
0, 0, relalt)
print("Sent change altitude command for %.1f meters" % relalt)
def cmd_land(self, args):
'''auto land commands'''
if len(args) < 1:
self.master.mav.command_long_send(self.settings.target_system,
0,
mavutil.mavlink.MAV_CMD_DO_LAND_START,
0, 0, 0, 0, 0, 0, 0, 0)
elif args[0] == 'abort':
self.master.mav.command_long_send(self.settings.target_system,
0,
mavutil.mavlink.MAV_CMD_DO_GO_AROUND,
0, 0, 0, 0, 0, 0, 0, 0)
else:
print("Usage: land [abort]")
def cmd_version(self, args):
'''show version'''
self.master.mav.command_long_send(self.settings.target_system,
self.settings.target_component,
mavutil.mavlink.MAV_CMD_REQUEST_AUTOPILOT_CAPABILITIES,
0,
1, 0, 0, 0, 0, 0, 0)
def cmd_rcbind(self, args):
'''start RC bind'''
if len(args) < 1:
print("Usage: rcbind <dsmmode>")
return
self.master.mav.command_long_send(self.settings.target_system,
self.settings.target_component,
mavutil.mavlink.MAV_CMD_START_RX_PAIR,
0,
float(args[0]), 0, 0, 0, 0, 0, 0)
def cmd_repeat(self, args):
'''repeat a command at regular intervals'''
if len(args) == 0:
if len(self.repeats) == 0:
print("No repeats")
return
for i in range(len(self.repeats)):
print("%u: %s" % (i, self.repeats[i]))
return
if args[0] == 'add':
if len(args) < 3:
print("Usage: repeat add PERIOD CMD")
return
self.repeats.append(RepeatCommand(float(args[1]), " ".join(args[2:])))
elif args[0] == 'remove':
if len(args) < 2:
print("Usage: repeat remove INDEX")
return
i = int(args[1])
if i < 0 or i >= len(self.repeats):
print("Invalid index %d" % i)
return
self.repeats.pop(i)
return
elif args[0] == 'clean':
self.repeats = []
else:
print("Usage: repeat <add|remove|clean>")
def idle_task(self):
'''called on idle'''
for r in self.repeats:
if r.event.trigger():
self.mpstate.functions.process_stdin(r.cmd, immediate=True)
def init(mpstate):
'''initialise module'''
return MiscModule(mpstate)
| gpl-3.0 |
cjellick/rancher | tests/integration/suite/test_globaldns.py | 4 | 12409 | from .common import random_str
from rancher import ApiError
from kubernetes.client import CustomObjectsApi
from kubernetes.client import CoreV1Api
import pytest
import time
import kubernetes
import base64
def test_dns_fqdn_unique(admin_mc):
client = admin_mc.client
provider_name = random_str()
access = random_str()
secret = random_str()
globaldns_provider = \
client.create_global_dns_provider(
name=provider_name,
rootDomain="example.com",
route53ProviderConfig={
'accessKey': access,
'secretKey': secret})
fqdn = random_str() + ".example.com"
globaldns_entry = \
client.create_global_dns(fqdn=fqdn, providerId=provider_name)
with pytest.raises(ApiError) as e:
client.create_global_dns(fqdn=fqdn, providerId=provider_name)
assert e.value.error.status == 422
client.delete(globaldns_entry)
client.delete(globaldns_provider)
def test_dns_provider_deletion(admin_mc):
client = admin_mc.client
provider_name = random_str()
access = random_str()
secret = random_str()
globaldns_provider = \
client.create_global_dns_provider(
name=provider_name,
rootDomain="example.com",
route53ProviderConfig={
'accessKey': access,
'secretKey': secret})
fqdn = random_str() + ".example.com"
provider_id = "cattle-global-data:"+provider_name
globaldns_entry = \
client.create_global_dns(fqdn=fqdn, providerId=provider_id)
with pytest.raises(ApiError) as e:
client.delete(globaldns_provider)
assert e.value.error.status == 403
client.delete(globaldns_entry)
client.delete(globaldns_provider)
def test_share_globaldns_provider_entry(admin_mc, user_factory,
remove_resource):
client = admin_mc.client
provider_name = random_str()
access = random_str()
secret = random_str()
# Add regular user as member to gdns provider
user_member = user_factory()
remove_resource(user_member)
user_client = user_member.client
members = [{"userPrincipalId": "local://" + user_member.user.id,
"accessType": "owner"}]
globaldns_provider = \
client.create_global_dns_provider(
name=provider_name,
rootDomain="example.com",
route53ProviderConfig={
'accessKey': access,
'secretKey': secret},
members=members)
remove_resource(globaldns_provider)
fqdn = random_str() + ".example.com"
globaldns_entry = \
client.create_global_dns(fqdn=fqdn, providerId=provider_name,
members=members)
remove_resource(globaldns_entry)
# Make sure creator can access both, provider and entry
gdns_provider_id = "cattle-global-data:" + provider_name
gdns_provider = client.by_id_global_dns_provider(gdns_provider_id)
assert gdns_provider is not None
gdns_entry_id = "cattle-global-data:" + globaldns_entry.name
gdns = client.by_id_global_dns(gdns_entry_id)
assert gdns is not None
# user should be able to list this gdns provider
api_instance = kubernetes.client.RbacAuthorizationV1Api(
admin_mc.k8s_client)
provider_rb_name = provider_name + "-gp-a"
wait_to_ensure_user_in_rb_subject(api_instance, provider_rb_name,
user_member.user.id)
gdns_provider = user_client.by_id_global_dns_provider(gdns_provider_id)
assert gdns_provider is not None
# user should be able to list this gdns entry
entry_rb_name = globaldns_entry.name + "-g-a"
wait_to_ensure_user_in_rb_subject(api_instance, entry_rb_name,
user_member.user.id)
gdns = user_client.by_id_global_dns(gdns_entry_id)
assert gdns is not None
def test_user_access_global_dns(admin_mc, user_factory, remove_resource):
user1 = user_factory()
remove_resource(user1)
user_client = user1.client
provider_name = random_str()
access = random_str()
secret = random_str()
globaldns_provider = \
user_client.create_global_dns_provider(
name=provider_name,
rootDomain="example.com",
route53ProviderConfig={
'accessKey': access,
'secretKey': secret})
remove_resource(globaldns_provider)
fqdn = random_str() + ".example.com"
globaldns_entry = \
user_client.create_global_dns(fqdn=fqdn, providerId=provider_name)
remove_resource(globaldns_entry)
# Make sure creator can access both, provider and entry
api_instance = kubernetes.client.RbacAuthorizationV1Api(
admin_mc.k8s_client)
provider_rb_name = provider_name + "-gp-a"
wait_to_ensure_user_in_rb_subject(api_instance, provider_rb_name,
user1.user.id)
gdns_provider_id = "cattle-global-data:" + provider_name
gdns_provider = user_client.by_id_global_dns_provider(gdns_provider_id)
assert gdns_provider is not None
entry_rb_name = globaldns_entry.name + "-g-a"
wait_to_ensure_user_in_rb_subject(api_instance, entry_rb_name,
user1.user.id)
gdns_entry_id = "cattle-global-data:" + globaldns_entry.name
gdns = user_client.by_id_global_dns(gdns_entry_id)
assert gdns is not None
def test_update_gdns_entry(admin_mc, remove_resource):
client = admin_mc.client
provider_name = random_str()
access = random_str()
secret = random_str()
globaldns_provider = \
client.create_global_dns_provider(
name=provider_name,
rootDomain="example.com",
route53ProviderConfig={
'accessKey': access,
'secretKey': secret})
remove_resource(globaldns_provider)
fqdn = random_str() + ".example.com"
gdns_entry_name = random_str()
globaldns_entry = \
client.create_global_dns(name=gdns_entry_name,
fqdn=fqdn, providerId=provider_name)
remove_resource(globaldns_entry)
new_fqdn = random_str()
wait_for_gdns_entry_creation(admin_mc, gdns_entry_name)
client.update(globaldns_entry, fqdn=new_fqdn)
wait_for_gdns_update(admin_mc, gdns_entry_name, new_fqdn)
def test_create_globaldns_provider_regular_user(remove_resource,
user_factory):
provider_name = random_str()
access = random_str()
secret = random_str()
user = user_factory()
globaldns_provider = \
user.client.create_global_dns_provider(
name=provider_name,
rootDomain="example.com",
route53ProviderConfig={
'accessKey': access,
'secretKey': secret})
remove_resource(globaldns_provider)
def wait_to_ensure_user_in_rb_subject(api, name,
userId, timeout=60):
found = False
interval = 0.5
start = time.time()
while not found:
time.sleep(interval)
interval *= 2
try:
rb = api.read_namespaced_role_binding(name, "cattle-global-data")
for i in range(0, len(rb.subjects)):
if rb.subjects[i].name == userId:
found = True
except kubernetes.client.rest.ApiException:
found = False
if time.time() - start > timeout:
raise AssertionError(
"Timed out waiting for user to get added to rb")
def wait_for_gdns_update(admin_mc, gdns_entry_name, new_fqdn, timeout=60):
client = admin_mc.client
updated = False
interval = 0.5
start = time.time()
id = "cattle-global-data:" + gdns_entry_name
while not updated:
if time.time() - start > timeout:
raise Exception('Timeout waiting for gdns entry to update')
gdns = client.by_id_global_dns(id)
if gdns is not None and gdns.fqdn == new_fqdn:
updated = True
time.sleep(interval)
interval *= 2
def wait_for_gdns_entry_creation(admin_mc, gdns_name, timeout=60):
start = time.time()
interval = 0.5
client = admin_mc.client
found = False
while not found:
if time.time() - start > timeout:
raise Exception('Timeout waiting for globalDNS entry creation')
gdns = client.list_global_dns(name=gdns_name)
if len(gdns) > 0:
found = True
time.sleep(interval)
interval *= 2
def test_cloudflare_provider_proxy_setting(admin_mc, remove_resource):
client = admin_mc.client
provider_name = random_str()
apiEmail = random_str()
apiKey = random_str()
globaldns_provider = \
client.create_global_dns_provider(
name=provider_name,
rootDomain="example.com",
cloudflareProviderConfig={
'proxySetting': True,
'apiEmail': apiEmail,
'apiKey': apiKey})
gdns_provider_id = "cattle-global-data:" + provider_name
gdns_provider = client.by_id_global_dns_provider(gdns_provider_id)
assert gdns_provider is not None
assert gdns_provider.cloudflareProviderConfig.proxySetting is True
remove_resource(globaldns_provider)
def test_dns_fqdn_hostname(admin_mc, remove_resource):
client = admin_mc.client
provider_name = random_str()
access = random_str()
secret = random_str()
globaldns_provider = \
client.create_global_dns_provider(
name=provider_name,
rootDomain="example.com",
route53ProviderConfig={
'accessKey': access,
'secretKey': secret})
remove_resource(globaldns_provider)
fqdn = random_str() + ".example!!!*.com"
with pytest.raises(ApiError) as e:
client.create_global_dns(fqdn=fqdn, providerId=provider_name)
assert e.value.error.status == 422
def test_globaldnsprovider_secret(admin_mc, remove_resource):
client = admin_mc.client
provider_name = random_str()
access_key = random_str()
secret_key = random_str()
globaldns_provider = \
client.create_global_dns_provider(
name=provider_name,
rootDomain="example.com",
route53ProviderConfig={
'accessKey': access_key,
'secretKey': secret_key})
# Test password not present in api
assert globaldns_provider is not None
assert globaldns_provider.route53ProviderConfig.get('secretKey') is None
crdClient, k8sclient = getClients(admin_mc)
ns, name = globaldns_provider["id"].split(":")
# Test password is in k8s secret after creation
verifyGDNSPassword(crdClient, k8sclient, ns, name, secret_key)
# Test updating password
newSecretPassword = random_str()
_ = client.update(globaldns_provider, route53ProviderConfig={
'accessKey': access_key,
'secretKey': newSecretPassword})
verifyGDNSPassword(crdClient, k8sclient, ns, name, newSecretPassword)
def getClients(admin_mc):
return CustomObjectsApi(admin_mc.k8s_client), \
CoreV1Api(admin_mc.k8s_client)
def verifyGDNSPassword(crdClient, k8sclient, ns, name, secretPassword):
k8es = crdClient.get_namespaced_custom_object(
"management.cattle.io", "v3", ns, 'globaldnsproviders', name)
secretName = k8es['spec']['route53ProviderConfig']['secretKey']
ns, name = secretName.split(":")
assert ns is not None
assert name is not None
secret = k8sclient.read_namespaced_secret(name, ns)
assert base64.b64decode(secret.data[name]).\
decode("utf-8") == secretPassword
| apache-2.0 |
bioinfo-center-pasteur-fr/ReGaTE | regate/remag.py | 2 | 7997 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Created on Jun. 16, 2014
@author: Olivia Doppelt-Azeroual, CIB-C3BI, Institut Pasteur, Paris
@author: Fabien Mareuil, CIB-C3BI, Institut Pasteur, Paris
@author: Hervé Ménager, CIB-C3BI, Institut Pasteur, Paris
@contact: [email protected]
@project: ReGaTE
@githuborganization: bioinfo-center-pasteur-fr
"""
import string
import os
import sys
import ruamel.yaml
import rdflib
import argparse
import regate
from bioblend.galaxy import GalaxyInstance
from bioblend.galaxy.client import ConnectionError
from bioblend.galaxy.datatypes import DatatypesClient
from bioblend.galaxy.client import Client
class EdamDatatypesClient(DatatypesClient):
"""
Override of the bioblend DatatypesClient class to add a get_edam_formats method
"""
def get_edam_formats(self):
"""
Displays a collection (dict) of edam formats.
:rtype: dict
:return: A dict of individual edam_format.
For example::
{
"RData": "format_2333",
"Roadmaps": "format_2561",
"Sequences": "format_1929",
"ab1": "format_2333",
"acedb": "format_2330",
"affybatch": "format_2331",
"afg": "format_2561",
"arff": "format_2330",
"asn1": "format_2330",
"asn1-binary": "format_2333"}
"""
url = self.gi._make_url(self)
url = '/'.join([url, "edam_formats"])
return Client._get(self, url=url)
def is_true(value):
"""
:param value:
:return:
"""
return value.lower() == "true"
def is_edamtype(dic_child):
"""
:param dic_child:
:return:
"""
if 'edam' in dic_child:
if dic_child['edam'] not in ['', "None", "Null"]:
return True
else:
return False
else:
return False
def return_formatted_edam(edam):
"""
:param edam:
:return:
"""
edam = string.split(edam, '_')
edam = "EDAM_{}:{:0>4d}".format(edam[0], int(edam[1]))
return edam
def http_to_edamform(url):
"""
:param url:
:return:
"""
base = string.split(os.path.basename(url), '_')
return str("EDAM_{}:{:0>4d}").format(base[0], int(base[1]))
def edam_to_dict(edam_file):
"""
:param edam_file:
:return:
"""
g = rdflib.Graph()
g.parse(edam_file)
query1 = """SELECT ?format ?is_format_of WHERE {
?format rdfs:subClassOf ?format_sc .
?format_sc owl:onProperty
<http://edamontology.org/is_format_of> .
?format_sc owl:someValuesFrom ?is_format_of
}"""
query2 = """SELECT ?format ?superformat WHERE {
?format rdfs:subClassOf ?superformat .
?superformat oboInOwl:inSubset <http://purl.obolibrary.org/obo/edam#formats>
}"""
query3 = """SELECT ?format ?label WHERE {
?format rdfs:label ?label.
?format oboInOwl:inSubset ?subset.
FILTER (?subset = <http://purl.obolibrary.org/obo/edam#formats> ||
?subset = <http://purl.obolibrary.org/obo/edam#data>)}"""
# Property = {"oboInOwl": "http://www.geneontology.org/formats/oboInOwl#"}
format_with_formats = {}
format_with_data = {}
term_labels = {}
for row in g.query(query1):
format_with_data[http_to_edamform(row[0])] = http_to_edamform(row[1])
for row in g.query(query2):
child_format = http_to_edamform(row[0])
parent_format = http_to_edamform(row[1])
if child_format in format_with_formats:
format_with_formats[child_format].append(parent_format)
else:
format_with_formats[child_format] = [parent_format]
for row in g.query(query3):
term_labels[http_to_edamform(row[0].toPython())]=str(row[1].toPython())
return format_with_formats, format_with_data, term_labels
def add_data(formats, relation_formats, relation_data, list_edam_data):
"""
:param formats:
:param relation_formats:
:param relation_data:
:param list_edam_data:
:return:
"""
if len(formats) != 0:
for format_tool in formats:
if format_tool in relation_data:
list_edam_data.append(relation_data[format_tool])
formats.remove(format_tool)
return add_data(formats, relation_formats, relation_data, list_edam_data)
elif format_tool in relation_formats:
formats.remove(format_tool)
formats = formats + relation_formats[format_tool]
return add_data(formats, relation_formats, relation_data, list_edam_data)
else:
sys.stdout.write("NO FORMAT AND NO DATA FOR {0}\n".format(format_tool))
formats.remove(format_tool)
if format_tool in ("Not Mapped Yet", "NONE Known"):
return add_data(formats, relation_formats, relation_data, list_edam_data)
else:
list_edam_data.append("EDAM_data:0006")
return add_data(formats, relation_formats, relation_data, list_edam_data)
else:
return list_edam_data
def add_datas(dict_map, rel_format_formats, rel_format_data, term_labels):
"""
:param dict_map:
:param rel_format_formats:
:param rel_format_data:
:return:
"""
import copy
for key, value in dict_map.iteritems():
formats = copy.copy(value)
datas = add_data(formats, rel_format_formats, rel_format_data, list_edam_data=[])
datas_v = [{'uri':data_item,'term':term_labels.get(data_item,'')} for data_item in datas]
formats_v = [{'uri':format_item,'term':term_labels.get(format_item,'')} for format_item in value]
dict_map[key] = {'formats':formats_v, 'data':datas_v}
return dict_map
def dict_to_yaml(mapping_dict, yamlfile):
"""
:param mapping_dict:
:param yamlfile:
:return:
"""
stream = file(yamlfile, 'w')
ruamel.yaml.dump(mapping_dict, stream, default_flow_style=False)
def galaxy_to_edamdict(url, key):
"""
:param url:
:param key:
:return:
"""
gi = GalaxyInstance(url, key=key)
datatypeclient = EdamDatatypesClient(gi)
try:
dict_map = datatypeclient.get_edam_formats()
except ConnectionError, e:
raise ConnectionError(
'{0}, The Galaxy data can\'t be used, It\'s possible that Galaxy is too old, please update it\n'.format(e))
dictmapping = {}
for key, value in dict_map.iteritems():
form_edam = return_formatted_edam(value)
dictmapping[str(key)] = [form_edam]
return dictmapping
def run():
parser = argparse.ArgumentParser(description="Galaxy instance tool\
parsing, for integration in biotools/bioregistry")
parser.add_argument("--config_file", help="config.ini file for regate or remag")
parser.add_argument("--templateconfig", action='store_true', help="generate a config_file template")
if len(sys.argv) == 1:
parser.print_help()
sys.exit(1)
args = parser.parse_args()
if not args.templateconfig:
if not os.path.exists(args.config_file):
raise IOError("{0} doesn't exist".format(args.config_file))
config = regate.Config(args.config_file, "remag")
dict_mapping = galaxy_to_edamdict(config.galaxy_url_api, config.api_key)
relation_format_formats, relation_format_data, term_labels = edam_to_dict(config.edam_file)
yaml_file = config.output_yaml
dict_mapping = add_datas(dict_mapping, relation_format_formats, relation_format_data, term_labels)
dict_to_yaml(dict_mapping, yaml_file)
elif args.templateconfig:
regate.generate_template()
else:
parser.print_help()
| gpl-2.0 |
dcclogin/TextGenerator | TitleCrawler/ccf_conference/categories/network/lcn2015.py | 1 | 3390 |
# -*- coding: utf-8 -*-
import re
import copy
import random
import os, sys
import MySQLdb
import requests
from time import sleep
from threading import Thread
from bs4 import BeautifulSoup
reload(sys)
sys.setdefaultencoding('utf-8')
clade = 'http://dblp.uni-trier.de/db/conf/lcn/'
months = {
'January': '01',
'February': '02',
'March': '03',
'April': '04',
'May': '05',
'June': '06',
'July': '07',
'August': '08',
'September': '09',
'October': '10',
'November': '11',
'December': '12'
}
# regex to match months in <h2> tags
re_mons=r'(January|February|March|April|May|June|July|August|September|October|November|December)'
repeato_mons=r'([ /-]*'+re_mons+r'*)*'
pattern_mons=re_mons+repeato_mons
# regex to match years in <h2> tags
re_year=r'((19|20)\d+)'
repeato_year=r'([ /-]*'+re_year+r'*)*'
pattern_year=re_year+repeato_year
def get_leaves(clade):
r = requests.get(clade)
if r.status_code == 200:
soup = BeautifulSoup(r.text, 'lxml')
leaves = []
late = soup.find('ul', class_='publ-list')
tags = late.find_all('div', class_='data', itemprop='headline')
for tag in tags:
leaves.append(tag.find_all('a')[-1]['href'])
return leaves
def sub_months(match_obj):
""" transfer months to digital form (in-place change)
"""
for m in months:
match_obj = re.sub(m, months[m], match_obj)
return match_obj
def get_yymm(leaf):
r = requests.get(leaf)
if r.status_code == 200:
soup = BeautifulSoup(r.text, 'lxml')
lat = soup.find('div', class_='data', itemprop='headline')
tag = lat.find('span', class_='title', itemprop='name')
txt = tag.get_text()
try:
match_obj_mons = re.search(pattern_mons, txt)
match_obj_mons = match_obj_mons.group().strip()
match_obj_mons = sub_months(match_obj_mons)
month = match_obj_mons
except Exception, error_mons:
print '[-]', error_mons
month = None
try:
match_obj_year = re.search(pattern_year, txt)
match_obj_year = match_obj_year.group().strip()
year = match_obj_year
except Exception, error_year:
print '[-]', error_year
year = None
return year, month
def get_titles(leaf):
r = requests.get(leaf)
if r.status_code == 200:
soup = BeautifulSoup(r.text, 'lxml')
title_lst = []
tags = soup.find_all('span', class_='title', itemprop='name')
for tag in tags:
title_lst.append(tag.get_text())
return title_lst
def incert_mysql(year, month, title_lst):
try:
tablename = 'papertitle'
conn = MySQLdb.connect(host='127.0.0.1', user='root', passwd='13917331612', db='conference')
c = conn.cursor()
conn.set_character_set('utf8')
c.execute('SET NAMES utf8;')
c.execute('SET CHARACTER SET utf8;')
c.execute('SET character_set_connection=utf8;')
for p in title_lst:
try:
sql = "insert into " + tablename + "(year, month, name, title, class, category) \
values(%s, %s, %s, %s, %s, %s)"
param = (year, month, 'LCN', p, 'C', 'network')
c.execute(sql, param)
print ">>>> [+] Insert paper <%s> : done." %(p)
except MySQLdb.Error, e:
print "[-] Mysql Error %d: %s" % (e.args[0], e.args[1])
continue
conn.commit()
c.close()
except MySQLdb.Error, e:
print "[-] Mysql Error %d: %s" % (e.args[0], e.args[1])
return None
def build():
leaves = get_leaves(clade)
for leaf in leaves:
title_lst = get_titles(leaf)
year, month = get_yymm(leaf)
incert_mysql(year, month, title_lst)
return None
build() | mit |
cmelange/ansible | lib/ansible/modules/cloud/misc/virt.py | 44 | 15378 | #!/usr/bin/python
# -*- coding: utf-8 -*-
"""
Virt management features
Copyright 2007, 2012 Red Hat, Inc
Michael DeHaan <[email protected]>
Seth Vidal <[email protected]>
This software may be freely redistributed under the terms of the GNU
general public license.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: virt
short_description: Manages virtual machines supported by libvirt
description:
- Manages virtual machines supported by I(libvirt).
version_added: "0.2"
options:
name:
description:
- name of the guest VM being managed. Note that VM must be previously
defined with xml.
required: true
default: null
aliases: []
state:
description:
- Note that there may be some lag for state requests like C(shutdown)
since these refer only to VM states. After starting a guest, it may not
be immediately accessible.
required: false
choices: [ "running", "shutdown", "destroyed", "paused" ]
default: "no"
command:
description:
- in addition to state management, various non-idempotent commands are available. See examples
required: false
choices: ["create","status", "start", "stop", "pause", "unpause",
"shutdown", "undefine", "destroy", "get_xml",
"freemem", "list_vms", "info", "nodeinfo", "virttype", "define"]
autostart:
description:
- start VM at host startup
choices: [True, False]
version_added: "2.3"
default: null
uri:
description:
- libvirt connection uri
required: false
default: qemu:///system
xml:
description:
- XML document used with the define command
required: false
default: null
requirements:
- "python >= 2.6"
- "libvirt-python"
author:
- "Ansible Core Team"
- "Michael DeHaan"
- "Seth Vidal"
'''
EXAMPLES = '''
# a playbook task line:
- virt:
name: alpha
state: running
# /usr/bin/ansible invocations
# ansible host -m virt -a "name=alpha command=status"
# ansible host -m virt -a "name=alpha command=get_xml"
# ansible host -m virt -a "name=alpha command=create uri=lxc:///"
---
# a playbook example of defining and launching an LXC guest
tasks:
- name: define vm
virt:
name: foo
command: define
xml: "{{ lookup('template', 'container-template.xml.j2') }}"
uri: 'lxc:///'
- name: start vm
virt:
name: foo
state: running
uri: 'lxc:///'
'''
RETURN = '''
# for list_vms command
list_vms:
description: The list of vms defined on the remote system
type: dictionary
returned: success
sample: [
"build.example.org",
"dev.example.org"
]
# for status command
status:
description: The status of the VM, among running, crashed, paused and shutdown
type: string
sample: "success"
returned: success
'''
VIRT_FAILED = 1
VIRT_SUCCESS = 0
VIRT_UNAVAILABLE=2
import sys
try:
import libvirt
except ImportError:
HAS_VIRT = False
else:
HAS_VIRT = True
ALL_COMMANDS = []
VM_COMMANDS = ['create','status', 'start', 'stop', 'pause', 'unpause',
'shutdown', 'undefine', 'destroy', 'get_xml', 'define']
HOST_COMMANDS = ['freemem', 'list_vms', 'info', 'nodeinfo', 'virttype']
ALL_COMMANDS.extend(VM_COMMANDS)
ALL_COMMANDS.extend(HOST_COMMANDS)
VIRT_STATE_NAME_MAP = {
0 : "running",
1 : "running",
2 : "running",
3 : "paused",
4 : "shutdown",
5 : "shutdown",
6 : "crashed"
}
class VMNotFound(Exception):
pass
class LibvirtConnection(object):
def __init__(self, uri, module):
self.module = module
cmd = "uname -r"
rc, stdout, stderr = self.module.run_command(cmd)
if "xen" in stdout:
conn = libvirt.open(None)
elif "esx" in uri:
auth = [[libvirt.VIR_CRED_AUTHNAME, libvirt.VIR_CRED_NOECHOPROMPT], [], None]
conn = libvirt.openAuth(uri, auth)
else:
conn = libvirt.open(uri)
if not conn:
raise Exception("hypervisor connection failure")
self.conn = conn
def find_vm(self, vmid):
"""
Extra bonus feature: vmid = -1 returns a list of everything
"""
conn = self.conn
vms = []
# this block of code borrowed from virt-manager:
# get working domain's name
ids = conn.listDomainsID()
for id in ids:
vm = conn.lookupByID(id)
vms.append(vm)
# get defined domain
names = conn.listDefinedDomains()
for name in names:
vm = conn.lookupByName(name)
vms.append(vm)
if vmid == -1:
return vms
for vm in vms:
if vm.name() == vmid:
return vm
raise VMNotFound("virtual machine %s not found" % vmid)
def shutdown(self, vmid):
return self.find_vm(vmid).shutdown()
def pause(self, vmid):
return self.suspend(self.conn,vmid)
def unpause(self, vmid):
return self.resume(self.conn,vmid)
def suspend(self, vmid):
return self.find_vm(vmid).suspend()
def resume(self, vmid):
return self.find_vm(vmid).resume()
def create(self, vmid):
return self.find_vm(vmid).create()
def destroy(self, vmid):
return self.find_vm(vmid).destroy()
def undefine(self, vmid):
return self.find_vm(vmid).undefine()
def get_status2(self, vm):
state = vm.info()[0]
return VIRT_STATE_NAME_MAP.get(state,"unknown")
def get_status(self, vmid):
state = self.find_vm(vmid).info()[0]
return VIRT_STATE_NAME_MAP.get(state,"unknown")
def nodeinfo(self):
return self.conn.getInfo()
def get_type(self):
return self.conn.getType()
def get_xml(self, vmid):
vm = self.conn.lookupByName(vmid)
return vm.XMLDesc(0)
def get_maxVcpus(self, vmid):
vm = self.conn.lookupByName(vmid)
return vm.maxVcpus()
def get_maxMemory(self, vmid):
vm = self.conn.lookupByName(vmid)
return vm.maxMemory()
def getFreeMemory(self):
return self.conn.getFreeMemory()
def get_autostart(self, vmid):
vm = self.conn.lookupByName(vmid)
return vm.autostart()
def set_autostart(self, vmid, val):
vm = self.conn.lookupByName(vmid)
return vm.setAutostart(val)
def define_from_xml(self, xml):
return self.conn.defineXML(xml)
class Virt(object):
def __init__(self, uri, module):
self.module = module
self.uri = uri
def __get_conn(self):
self.conn = LibvirtConnection(self.uri, self.module)
return self.conn
def get_vm(self, vmid):
self.__get_conn()
return self.conn.find_vm(vmid)
def state(self):
vms = self.list_vms()
state = []
for vm in vms:
state_blurb = self.conn.get_status(vm)
state.append("%s %s" % (vm,state_blurb))
return state
def info(self):
vms = self.list_vms()
info = dict()
for vm in vms:
data = self.conn.find_vm(vm).info()
# libvirt returns maxMem, memory, and cpuTime as long()'s, which
# xmlrpclib tries to convert to regular int's during serialization.
# This throws exceptions, so convert them to strings here and
# assume the other end of the xmlrpc connection can figure things
# out or doesn't care.
info[vm] = {
"state" : VIRT_STATE_NAME_MAP.get(data[0],"unknown"),
"maxMem" : str(data[1]),
"memory" : str(data[2]),
"nrVirtCpu" : data[3],
"cpuTime" : str(data[4]),
}
info[vm]["autostart"] = self.conn.get_autostart(vm)
return info
def nodeinfo(self):
self.__get_conn()
info = dict()
data = self.conn.nodeinfo()
info = {
"cpumodel" : str(data[0]),
"phymemory" : str(data[1]),
"cpus" : str(data[2]),
"cpumhz" : str(data[3]),
"numanodes" : str(data[4]),
"sockets" : str(data[5]),
"cpucores" : str(data[6]),
"cputhreads" : str(data[7])
}
return info
def list_vms(self, state=None):
self.conn = self.__get_conn()
vms = self.conn.find_vm(-1)
results = []
for x in vms:
try:
if state:
vmstate = self.conn.get_status2(x)
if vmstate == state:
results.append(x.name())
else:
results.append(x.name())
except:
pass
return results
def virttype(self):
return self.__get_conn().get_type()
def autostart(self, vmid, as_flag):
self.conn = self.__get_conn()
# Change autostart flag only if needed
if self.conn.get_autostart(vmid) != as_flag:
self.conn.set_autostart(vmid, as_flag)
return True
return False
def freemem(self):
self.conn = self.__get_conn()
return self.conn.getFreeMemory()
def shutdown(self, vmid):
""" Make the machine with the given vmid stop running. Whatever that takes. """
self.__get_conn()
self.conn.shutdown(vmid)
return 0
def pause(self, vmid):
""" Pause the machine with the given vmid. """
self.__get_conn()
return self.conn.suspend(vmid)
def unpause(self, vmid):
""" Unpause the machine with the given vmid. """
self.__get_conn()
return self.conn.resume(vmid)
def create(self, vmid):
""" Start the machine via the given vmid """
self.__get_conn()
return self.conn.create(vmid)
def start(self, vmid):
""" Start the machine via the given id/name """
self.__get_conn()
return self.conn.create(vmid)
def destroy(self, vmid):
""" Pull the virtual power from the virtual domain, giving it virtually no time to virtually shut down. """
self.__get_conn()
return self.conn.destroy(vmid)
def undefine(self, vmid):
""" Stop a domain, and then wipe it from the face of the earth. (delete disk/config file) """
self.__get_conn()
return self.conn.undefine(vmid)
def status(self, vmid):
"""
Return a state suitable for server consumption. Aka, codes.py values, not XM output.
"""
self.__get_conn()
return self.conn.get_status(vmid)
def get_xml(self, vmid):
"""
Receive a Vm id as input
Return an xml describing vm config returned by a libvirt call
"""
self.__get_conn()
return self.conn.get_xml(vmid)
def get_maxVcpus(self, vmid):
"""
Gets the max number of VCPUs on a guest
"""
self.__get_conn()
return self.conn.get_maxVcpus(vmid)
def get_max_memory(self, vmid):
"""
Gets the max memory on a guest
"""
self.__get_conn()
return self.conn.get_MaxMemory(vmid)
def define(self, xml):
"""
Define a guest with the given xml
"""
self.__get_conn()
return self.conn.define_from_xml(xml)
def core(module):
state = module.params.get('state', None)
autostart = module.params.get('autostart', None)
guest = module.params.get('name', None)
command = module.params.get('command', None)
uri = module.params.get('uri', None)
xml = module.params.get('xml', None)
v = Virt(uri, module)
res = {}
if state and command=='list_vms':
res = v.list_vms(state=state)
if not isinstance(res, dict):
res = { command: res }
return VIRT_SUCCESS, res
if state:
if not guest:
module.fail_json(msg = "state change requires a guest specified")
if state == 'running':
if v.status(guest) is 'paused':
res['changed'] = True
res['msg'] = v.unpause(guest)
elif v.status(guest) is not 'running':
res['changed'] = True
res['msg'] = v.start(guest)
elif state == 'shutdown':
if v.status(guest) is not 'shutdown':
res['changed'] = True
res['msg'] = v.shutdown(guest)
elif state == 'destroyed':
if v.status(guest) is not 'shutdown':
res['changed'] = True
res['msg'] = v.destroy(guest)
elif state == 'paused':
if v.status(guest) is 'running':
res['changed'] = True
res['msg'] = v.pause(guest)
else:
module.fail_json(msg="unexpected state")
return VIRT_SUCCESS, res
if autostart is not None and v.autostart(guest, autostart):
res['changed'] = True
if command:
if command in VM_COMMANDS:
if not guest:
module.fail_json(msg = "%s requires 1 argument: guest" % command)
if command == 'define':
if not xml:
module.fail_json(msg = "define requires xml argument")
try:
v.get_vm(guest)
except VMNotFound:
v.define(xml)
res = {'changed': True, 'created': guest}
return VIRT_SUCCESS, res
res = getattr(v, command)(guest)
if not isinstance(res, dict):
res = { command: res }
return VIRT_SUCCESS, res
elif hasattr(v, command):
res = getattr(v, command)()
if not isinstance(res, dict):
res = { command: res }
return VIRT_SUCCESS, res
else:
module.fail_json(msg="Command %s not recognized" % basecmd)
module.fail_json(msg="expected state or command parameter to be specified")
def main():
module = AnsibleModule(argument_spec=dict(
name = dict(aliases=['guest']),
state = dict(choices=['running', 'shutdown', 'destroyed', 'paused']),
autostart = dict(type='bool'),
command = dict(choices=ALL_COMMANDS),
uri = dict(default='qemu:///system'),
xml = dict(),
))
if not HAS_VIRT:
module.fail_json(
msg='The `libvirt` module is not importable. Check the requirements.'
)
rc = VIRT_SUCCESS
try:
rc, result = core(module)
except Exception:
e = get_exception()
module.fail_json(msg=str(e))
if rc != 0: # something went wrong emit the msg
module.fail_json(rc=rc, msg=result)
else:
module.exit_json(**result)
# import module snippets
from ansible.module_utils.basic import *
from ansible.module_utils.pycompat24 import get_exception
if __name__ == '__main__':
main()
| gpl-3.0 |
atopuzov/nitro-python | nssrc/com/citrix/netscaler/nitro/resource/config/tm/tmsessionpolicy_tmglobal_binding.py | 3 | 5241 | #
# Copyright (c) 2008-2015 Citrix Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_resource
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_response
from nssrc.com.citrix.netscaler.nitro.service.options import options
from nssrc.com.citrix.netscaler.nitro.exception.nitro_exception import nitro_exception
from nssrc.com.citrix.netscaler.nitro.util.nitro_util import nitro_util
class tmsessionpolicy_tmglobal_binding(base_resource) :
""" Binding class showing the tmglobal that can be bound to tmsessionpolicy.
"""
def __init__(self) :
self._boundto = ""
self._priority = 0
self._activepolicy = 0
self._name = ""
self.___count = 0
@property
def boundto(self) :
ur"""The entity name to which policy is bound.
"""
try :
return self._boundto
except Exception as e:
raise e
@boundto.setter
def boundto(self, boundto) :
ur"""The entity name to which policy is bound.
"""
try :
self._boundto = boundto
except Exception as e:
raise e
@property
def name(self) :
ur"""Name of the session policy for which to display detailed information.<br/>Minimum length = 1.
"""
try :
return self._name
except Exception as e:
raise e
@name.setter
def name(self, name) :
ur"""Name of the session policy for which to display detailed information.<br/>Minimum length = 1
"""
try :
self._name = name
except Exception as e:
raise e
@property
def priority(self) :
try :
return self._priority
except Exception as e:
raise e
@property
def activepolicy(self) :
try :
return self._activepolicy
except Exception as e:
raise e
def _get_nitro_response(self, service, response) :
ur""" converts nitro response into object and returns the object array in case of get request.
"""
try :
result = service.payload_formatter.string_to_resource(tmsessionpolicy_tmglobal_binding_response, response, self.__class__.__name__)
if(result.errorcode != 0) :
if (result.errorcode == 444) :
service.clear_session(self)
if result.severity :
if (result.severity == "ERROR") :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
else :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
return result.tmsessionpolicy_tmglobal_binding
except Exception as e :
raise e
def _get_object_name(self) :
ur""" Returns the value of object identifier argument
"""
try :
if self.name is not None :
return str(self.name)
return None
except Exception as e :
raise e
@classmethod
def get(cls, service, name) :
ur""" Use this API to fetch tmsessionpolicy_tmglobal_binding resources.
"""
try :
obj = tmsessionpolicy_tmglobal_binding()
obj.name = name
response = obj.get_resources(service)
return response
except Exception as e:
raise e
@classmethod
def get_filtered(cls, service, name, filter_) :
ur""" Use this API to fetch filtered set of tmsessionpolicy_tmglobal_binding resources.
Filter string should be in JSON format.eg: "port:80,servicetype:HTTP".
"""
try :
obj = tmsessionpolicy_tmglobal_binding()
obj.name = name
option_ = options()
option_.filter = filter_
response = obj.getfiltered(service, option_)
return response
except Exception as e:
raise e
@classmethod
def count(cls, service, name) :
ur""" Use this API to count tmsessionpolicy_tmglobal_binding resources configued on NetScaler.
"""
try :
obj = tmsessionpolicy_tmglobal_binding()
obj.name = name
option_ = options()
option_.count = True
response = obj.get_resources(service, option_)
if response :
return response[0].__dict__['___count']
return 0
except Exception as e:
raise e
@classmethod
def count_filtered(cls, service, name, filter_) :
ur""" Use this API to count the filtered set of tmsessionpolicy_tmglobal_binding resources.
Filter string should be in JSON format.eg: "port:80,servicetype:HTTP".
"""
try :
obj = tmsessionpolicy_tmglobal_binding()
obj.name = name
option_ = options()
option_.count = True
option_.filter = filter_
response = obj.getfiltered(service, option_)
if response :
return response[0].__dict__['___count']
return 0
except Exception as e:
raise e
class tmsessionpolicy_tmglobal_binding_response(base_response) :
def __init__(self, length=1) :
self.tmsessionpolicy_tmglobal_binding = []
self.errorcode = 0
self.message = ""
self.severity = ""
self.sessionid = ""
self.tmsessionpolicy_tmglobal_binding = [tmsessionpolicy_tmglobal_binding() for _ in range(length)]
| apache-2.0 |
seungjin/app5-seungjin-net.appspot.com | dbindexer/backends.py | 74 | 18859 | from django.db import models
from django.db.models.fields import FieldDoesNotExist
from django.db.models.sql.constants import JOIN_TYPE, LHS_ALIAS, LHS_JOIN_COL, \
TABLE_NAME, RHS_JOIN_COL
from django.utils.tree import Node
from djangotoolbox.fields import ListField
from .lookups import StandardLookup
OR = 'OR'
# TODO: optimize code
class BaseResolver(object):
def __init__(self):
# mapping from lookups to indexes
self.index_map = {}
# mapping from column names to field names
self.column_to_name = {}
''' API called by resolver'''
def create_index(self, lookup):
field_to_index = self.get_field_to_index(lookup.model, lookup.field_name)
# backend doesn't now how to handle this index definition
if not field_to_index:
return
index_field = lookup.get_field_to_add(field_to_index)
config_field = index_field.item_field if \
isinstance(index_field, ListField) else index_field
if hasattr(field_to_index, 'max_length') and \
isinstance(config_field, models.CharField):
config_field.max_length = field_to_index.max_length
# don't install a field if it already exists
try:
lookup.model._meta.get_field(self.index_name(lookup))
except:
lookup.model.add_to_class(self.index_name(lookup), index_field)
self.index_map[lookup] = index_field
self.add_column_to_name(lookup.model, lookup.field_name)
else:
# makes dbindexer unit test compatible
if lookup not in self.index_map:
self.index_map[lookup] = lookup.model._meta.get_field(
self.index_name(lookup))
self.add_column_to_name(lookup.model, lookup.field_name)
def convert_insert_query(self, query):
'''Converts a database saving query.'''
for lookup in self.index_map.keys():
self._convert_insert_query(query, lookup)
def _convert_insert_query(self, query, lookup):
if not lookup.model == query.model:
return
position = self.get_query_position(query, lookup)
if position is None:
return
value = self.get_value(lookup.model, lookup.field_name, query)
value = lookup.convert_value(value)
query.values[position] = (self.get_index(lookup), value)
def convert_filters(self, query):
self._convert_filters(query, query.where)
''' helper methods '''
def _convert_filters(self, query, filters):
for index, child in enumerate(filters.children[:]):
if isinstance(child, Node):
self._convert_filters(query, child)
continue
self.convert_filter(query, filters, child, index)
def convert_filter(self, query, filters, child, index):
constraint, lookup_type, annotation, value = child
if constraint.field is None:
return
field_name = self.column_to_name.get(constraint.field.column)
if field_name and constraint.alias == \
query.table_map[query.model._meta.db_table][0]:
for lookup in self.index_map.keys():
if lookup.matches_filter(query.model, field_name, lookup_type,
value):
new_lookup_type, new_value = lookup.convert_lookup(value,
lookup_type)
index_name = self.index_name(lookup)
self._convert_filter(query, filters, child, index,
new_lookup_type, new_value, index_name)
def _convert_filter(self, query, filters, child, index, new_lookup_type,
new_value, index_name):
constraint, lookup_type, annotation, value = child
lookup_type, value = new_lookup_type, new_value
constraint.field = query.get_meta().get_field(index_name)
constraint.col = constraint.field.column
child = constraint, lookup_type, annotation, value
filters.children[index] = child
def index_name(self, lookup):
return lookup.index_name
def get_field_to_index(self, model, field_name):
try:
return model._meta.get_field(field_name)
except:
return None
def get_value(self, model, field_name, query):
field_to_index = self.get_field_to_index(model, field_name)
for query_field, value in query.values[:]:
if field_to_index == query_field:
return value
raise FieldDoesNotExist('Cannot find field in query.')
def add_column_to_name(self, model, field_name):
column_name = model._meta.get_field(field_name).column
self.column_to_name[column_name] = field_name
def get_index(self, lookup):
return self.index_map[lookup]
def get_query_position(self, query, lookup):
for index, (field, query_value) in enumerate(query.values[:]):
if field is self.get_index(lookup):
return index
return None
def unref_alias(query, alias):
table_name = query.alias_map[alias][TABLE_NAME]
query.alias_refcount[alias] -= 1
if query.alias_refcount[alias] < 1:
# Remove all information about the join
del query.alias_refcount[alias]
del query.join_map[query.rev_join_map[alias]]
del query.rev_join_map[alias]
del query.alias_map[alias]
query.table_map[table_name].remove(alias)
if len(query.table_map[table_name]) == 0:
del query.table_map[table_name]
query.used_aliases.discard(alias)
class FKNullFix(BaseResolver):
'''
Django doesn't generate correct code for ForeignKey__isnull.
It becomes a JOIN with pk__isnull which won't work on nonrel DBs,
so we rewrite the JOIN here.
'''
def create_index(self, lookup):
pass
def convert_insert_query(self, query):
pass
def convert_filter(self, query, filters, child, index):
constraint, lookup_type, annotation, value = child
if constraint.field is not None and lookup_type == 'isnull' and \
isinstance(constraint.field, models.ForeignKey):
self.fix_fk_null_filter(query, constraint)
def unref_alias(self, query, alias):
unref_alias(query, alias)
def fix_fk_null_filter(self, query, constraint):
alias = constraint.alias
table_name = query.alias_map[alias][TABLE_NAME]
lhs_join_col = query.alias_map[alias][LHS_JOIN_COL]
rhs_join_col = query.alias_map[alias][RHS_JOIN_COL]
if table_name != constraint.field.rel.to._meta.db_table or \
rhs_join_col != constraint.field.rel.to._meta.pk.column or \
lhs_join_col != constraint.field.column:
return
next_alias = query.alias_map[alias][LHS_ALIAS]
if not next_alias:
return
self.unref_alias(query, alias)
alias = next_alias
constraint.col = constraint.field.column
constraint.alias = alias
class ConstantFieldJOINResolver(BaseResolver):
def create_index(self, lookup):
if '__' in lookup.field_name:
super(ConstantFieldJOINResolver, self).create_index(lookup)
def convert_insert_query(self, query):
'''Converts a database saving query.'''
for lookup in self.index_map.keys():
if '__' in lookup.field_name:
self._convert_insert_query(query, lookup)
def convert_filter(self, query, filters, child, index):
constraint, lookup_type, annotation, value = child
field_chain = self.get_field_chain(query, constraint)
if field_chain is None:
return
for lookup in self.index_map.keys():
if lookup.matches_filter(query.model, field_chain, lookup_type,
value):
self.resolve_join(query, child)
new_lookup_type, new_value = lookup.convert_lookup(value,
lookup_type)
index_name = self.index_name(lookup)
self._convert_filter(query, filters, child, index,
new_lookup_type, new_value, index_name)
def get_field_to_index(self, model, field_name):
model = self.get_model_chain(model, field_name)[-1]
field_name = field_name.split('__')[-1]
return super(ConstantFieldJOINResolver, self).get_field_to_index(model,
field_name)
def get_value(self, model, field_name, query):
value = super(ConstantFieldJOINResolver, self).get_value(model,
field_name.split('__')[0],
query)
if value is not None:
value = self.get_target_value(model, field_name, value)
return value
def get_field_chain(self, query, constraint):
if constraint.field is None:
return
column_index = self.get_column_index(query, constraint)
return self.column_to_name.get(column_index)
def get_model_chain(self, model, field_chain):
model_chain = [model, ]
for value in field_chain.split('__')[:-1]:
model = model._meta.get_field(value).rel.to
model_chain.append(model)
return model_chain
def get_target_value(self, start_model, field_chain, pk):
fields = field_chain.split('__')
foreign_key = start_model._meta.get_field(fields[0])
if not foreign_key.rel:
# field isn't a related one, so return the value itself
return pk
target_model = foreign_key.rel.to
foreignkey = target_model.objects.all().get(pk=pk)
for value in fields[1:-1]:
foreignkey = getattr(foreignkey, value)
if isinstance(foreignkey._meta.get_field(fields[-1]), models.ForeignKey):
return getattr(foreignkey, '%s_id' % fields[-1])
else:
return getattr(foreignkey, fields[-1])
def add_column_to_name(self, model, field_name):
model_chain = self.get_model_chain(model, field_name)
column_chain = ''
field_names = field_name.split('__')
for model, name in zip(model_chain, field_names):
column_chain += model._meta.get_field(name).column + '__'
self.column_to_name[column_chain[:-2]] = field_name
def unref_alias(self, query, alias):
unref_alias(query, alias)
def get_column_index(self, query, constraint):
if constraint.field:
column_chain = constraint.field.column
alias = constraint.alias
while alias:
join = query.alias_map.get(alias)
if join and join[JOIN_TYPE] == 'INNER JOIN':
column_chain += '__' + join[LHS_JOIN_COL]
alias = query.alias_map[alias][LHS_ALIAS]
else:
alias = None
return '__'.join(reversed(column_chain.split('__')))
def resolve_join(self, query, child):
constraint, lookup_type, annotation, value = child
if not constraint.field:
return
alias = constraint.alias
while True:
next_alias = query.alias_map[alias][LHS_ALIAS]
if not next_alias:
break
self.unref_alias(query, alias)
alias = next_alias
constraint.alias = alias
# TODO: distinguish in memory joins from standard joins somehow
class InMemoryJOINResolver(ConstantFieldJOINResolver):
def __init__(self):
self.field_chains = []
super(InMemoryJOINResolver, self).__init__()
def create_index(self, lookup):
if '__' in lookup.field_name:
field_to_index = self.get_field_to_index(lookup.model, lookup.field_name)
if not field_to_index:
return
# save old column_to_name so we can make in memory queries later on
self.add_column_to_name(lookup.model, lookup.field_name)
# don't add an extra field for standard lookups!
if isinstance(lookup, StandardLookup):
return
# install lookup on target model
model = self.get_model_chain(lookup.model, lookup.field_name)[-1]
lookup.model = model
lookup.field_name = lookup.field_name.split('__')[-1]
super(ConstantFieldJOINResolver, self).create_index(lookup)
def convert_insert_query(self, query):
super(ConstantFieldJOINResolver, self).convert_insert_query(query)
def _convert_filters(self, query, filters):
# or queries are not supported for in-memory-JOINs
if self.contains_OR(query.where, OR):
return
# start with the deepest JOIN level filter!
all_filters = self.get_all_filters(filters)
all_filters.sort(key=lambda item: self.get_field_chain(query, item[1][0]) and \
-len(self.get_field_chain(query, item[1][0])) or 0)
for filters, child, index in all_filters:
# check if convert_filter removed a given child from the where-tree
if not self.contains_child(query.where, child):
continue
self.convert_filter(query, filters, child, index)
def convert_filter(self, query, filters, child, index):
constraint, lookup_type, annotation, value = child
field_chain = self.get_field_chain(query, constraint)
if field_chain is None:
return
if '__' not in field_chain:
return super(ConstantFieldJOINResolver, self).convert_filter(query,
filters, child, index)
pks = self.get_pks(query, field_chain, lookup_type, value)
self.resolve_join(query, child)
self._convert_filter(query, filters, child, index, 'in',
(pk for pk in pks), field_chain.split('__')[0])
def tree_contains(self, filters, to_find, func):
result = False
for child in filters.children[:]:
if func(child, to_find):
result = True
break
if isinstance(child, Node):
result = self.tree_contains(child, to_find, func)
if result:
break
return result
def contains_OR(self, filters, or_):
return self.tree_contains(filters, or_,
lambda c, f: isinstance(c, Node) and c.connector == f)
def contains_child(self, filters, to_find):
return self.tree_contains(filters, to_find, lambda c, f: c is f)
def get_all_filters(self, filters):
all_filters = []
for index, child in enumerate(filters.children[:]):
if isinstance(child, Node):
all_filters.extend(self.get_all_filters(child))
continue
all_filters.append((filters, child, index))
return all_filters
def index_name(self, lookup):
# use another index_name to avoid conflicts with lookups defined on the
# target model which are handled by the BaseBackend
return lookup.index_name + '_in_memory_join'
def get_pks(self, query, field_chain, lookup_type, value):
model_chain = self.get_model_chain(query.model, field_chain)
first_lookup = {'%s__%s' %(field_chain.rsplit('__', 1)[-1],
lookup_type): value}
self.combine_with_same_level_filter(first_lookup, query, field_chain)
pks = model_chain[-1].objects.all().filter(**first_lookup).values_list(
'id', flat=True)
chains = [field_chain.rsplit('__', i+1)[0]
for i in range(field_chain.count('__'))]
lookup = {}
for model, chain in reversed(zip(model_chain[1:-1], chains[:-1])):
lookup.update({'%s__%s' %(chain.rsplit('__', 1)[-1], 'in'):
(pk for pk in pks)})
self.combine_with_same_level_filter(lookup, query, chain)
pks = model.objects.all().filter(**lookup).values_list('id', flat=True)
return pks
def combine_with_same_level_filter(self, lookup, query, field_chain):
lookup_updates = {}
field_chains = self.get_all_field_chains(query, query.where)
for chain, child in field_chains.items():
if chain == field_chain:
continue
if field_chain.rsplit('__', 1)[0] == chain.rsplit('__', 1)[0]:
lookup_updates ['%s__%s' %(chain.rsplit('__', 1)[1], child[1])] \
= child[3]
self.remove_child(query.where, child)
self.resolve_join(query, child)
# TODO: update query.alias_refcount correctly!
lookup.update(lookup_updates)
def remove_child(self, filters, to_remove):
''' Removes a child object from filters. If filters doesn't contain
children afterwoods, filters will be removed from its parent. '''
for child in filters.children[:]:
if child is to_remove:
self._remove_child(filters, to_remove)
return
elif isinstance(child, Node):
self.remove_child(child, to_remove)
if hasattr(child, 'children') and not child.children:
self.remove_child(filters, child)
def _remove_child(self, filters, to_remove):
result = []
for child in filters.children[:]:
if child is to_remove:
continue
result.append(child)
filters.children = result
def get_all_field_chains(self, query, filters):
''' Returns a dict mapping from field_chains to the corresponding child.'''
field_chains = {}
all_filters = self.get_all_filters(filters)
for filters, child, index in all_filters:
field_chain = self.get_field_chain(query, child[0])
# field_chain can be None if the user didn't specified an index for it
if field_chain:
field_chains[field_chain] = child
return field_chains | bsd-3-clause |
KaranToor/MA450 | google-cloud-sdk/platform/gsutil/third_party/boto/boto/ec2/instancestatus.py | 181 | 6854 | # Copyright (c) 2012 Mitch Garnaat http://garnaat.org/
# Copyright (c) 2012 Amazon.com, Inc. or its affiliates.
# All Rights Reserved
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
class Details(dict):
"""
A dict object that contains name/value pairs which provide
more detailed information about the status of the system
or the instance.
"""
def startElement(self, name, attrs, connection):
return None
def endElement(self, name, value, connection):
if name == 'name':
self._name = value
elif name == 'status':
self[self._name] = value
else:
setattr(self, name, value)
class Event(object):
"""
A status event for an instance.
:ivar code: A string indicating the event type.
:ivar description: A string describing the reason for the event.
:ivar not_before: A datestring describing the earliest time for
the event.
:ivar not_after: A datestring describing the latest time for
the event.
"""
def __init__(self, code=None, description=None,
not_before=None, not_after=None):
self.code = code
self.description = description
self.not_before = not_before
self.not_after = not_after
def __repr__(self):
return 'Event:%s' % self.code
def startElement(self, name, attrs, connection):
return None
def endElement(self, name, value, connection):
if name == 'code':
self.code = value
elif name == 'description':
self.description = value
elif name == 'notBefore':
self.not_before = value
elif name == 'notAfter':
self.not_after = value
else:
setattr(self, name, value)
class Status(object):
"""
A generic Status object used for system status and instance status.
:ivar status: A string indicating overall status.
:ivar details: A dict containing name-value pairs which provide
more details about the current status.
"""
def __init__(self, status=None, details=None):
self.status = status
if not details:
details = Details()
self.details = details
def __repr__(self):
return 'Status:%s' % self.status
def startElement(self, name, attrs, connection):
if name == 'details':
return self.details
return None
def endElement(self, name, value, connection):
if name == 'status':
self.status = value
else:
setattr(self, name, value)
class EventSet(list):
def startElement(self, name, attrs, connection):
if name == 'item':
event = Event()
self.append(event)
return event
else:
return None
def endElement(self, name, value, connection):
setattr(self, name, value)
class InstanceStatus(object):
"""
Represents an EC2 Instance status as reported by
DescribeInstanceStatus request.
:ivar id: The instance identifier.
:ivar zone: The availability zone of the instance.
:ivar events: A list of events relevant to the instance.
:ivar state_code: An integer representing the current state
of the instance.
:ivar state_name: A string describing the current state
of the instance.
:ivar system_status: A Status object that reports impaired
functionality that stems from issues related to the systems
that support an instance, such as such as hardware failures
and network connectivity problems.
:ivar instance_status: A Status object that reports impaired
functionality that arises from problems internal to the instance.
"""
def __init__(self, id=None, zone=None, events=None,
state_code=None, state_name=None):
self.id = id
self.zone = zone
self.events = events
self.state_code = state_code
self.state_name = state_name
self.system_status = Status()
self.instance_status = Status()
def __repr__(self):
return 'InstanceStatus:%s' % self.id
def startElement(self, name, attrs, connection):
if name == 'eventsSet':
self.events = EventSet()
return self.events
elif name == 'systemStatus':
return self.system_status
elif name == 'instanceStatus':
return self.instance_status
else:
return None
def endElement(self, name, value, connection):
if name == 'instanceId':
self.id = value
elif name == 'availabilityZone':
self.zone = value
elif name == 'code':
self.state_code = int(value)
elif name == 'name':
self.state_name = value
else:
setattr(self, name, value)
class InstanceStatusSet(list):
"""
A list object that contains the results of a call to
DescribeInstanceStatus request. Each element of the
list will be an InstanceStatus object.
:ivar next_token: If the response was truncated by
the EC2 service, the next_token attribute of the
object will contain the string that needs to be
passed in to the next request to retrieve the next
set of results.
"""
def __init__(self, connection=None):
list.__init__(self)
self.connection = connection
self.next_token = None
def startElement(self, name, attrs, connection):
if name == 'item':
status = InstanceStatus()
self.append(status)
return status
else:
return None
def endElement(self, name, value, connection):
if name == 'nextToken':
self.next_token = value
setattr(self, name, value)
| apache-2.0 |
mach6/selenium | py/test/unit/selenium/webdriver/remote/test_new_session.py | 9 | 2867 | # Licensed to the Software Freedom Conservancy (SFC) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The SFC licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from copy import deepcopy
from importlib import import_module
import pytest
from selenium.webdriver import DesiredCapabilities
from selenium.webdriver.remote.command import Command
from selenium.webdriver.remote.webdriver import WebDriver
def test_converts_oss_capabilities_to_w3c(mocker):
mock = mocker.patch('selenium.webdriver.remote.webdriver.WebDriver.execute')
oss_caps = {'platform': 'WINDOWS', 'version': '11', 'acceptSslCerts': True}
w3c_caps = {'platformName': 'windows', 'browserVersion': '11', 'acceptInsecureCerts': True}
WebDriver(desired_capabilities=deepcopy(oss_caps))
expected_params = {'capabilities': {'firstMatch': [{}], 'alwaysMatch': w3c_caps},
'desiredCapabilities': oss_caps}
mock.assert_called_with(Command.NEW_SESSION, expected_params)
def test_converts_proxy_type_value_to_lowercase_for_w3c(mocker):
mock = mocker.patch('selenium.webdriver.remote.webdriver.WebDriver.execute')
oss_caps = {'proxy': {'proxyType': 'MANUAL', 'httpProxy': 'foo'}}
w3c_caps = {'proxy': {'proxyType': 'manual', 'httpProxy': 'foo'}}
WebDriver(desired_capabilities=deepcopy(oss_caps))
expected_params = {'capabilities': {'firstMatch': [{}], 'alwaysMatch': w3c_caps},
'desiredCapabilities': oss_caps}
mock.assert_called_with(Command.NEW_SESSION, expected_params)
@pytest.mark.parametrize('browser_name', ['firefox', 'chrome', 'ie', 'opera'])
def test_accepts_firefox_options_to_remote_driver(mocker, browser_name):
options = import_module('selenium.webdriver.{}.options'.format(browser_name))
caps_name = browser_name.upper() if browser_name != 'ie' else 'INTERNETEXPLORER'
mock = mocker.patch('selenium.webdriver.remote.webdriver.WebDriver.start_session')
opts = options.Options()
opts.add_argument('foo')
expected_caps = getattr(DesiredCapabilities, caps_name)
caps = expected_caps.copy()
expected_caps.update(opts.to_capabilities())
WebDriver(desired_capabilities=caps, options=opts)
mock.assert_called_with(expected_caps, None)
| apache-2.0 |
SaganBolliger/nupic | src/nupic/swarming/hypersearch/experimentutils.py | 40 | 9312 | # ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2013, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
# This file contains utility functions that are used
# internally by the prediction framework. It should not be
# imported by description files. (see helpers.py)
from nupic.support.enum import Enum
# TODO: This file contains duplicates of 'InferenceElement', 'InferenceType',
# and 'ModelResult' copied from nupic.frameworks.opf
# Will want to change this in the future!
class InferenceElement(Enum(
prediction="prediction",
encodings="encodings",
classification="classification",
anomalyScore="anomalyScore",
anomalyLabel="anomalyLabel",
classConfidences="classConfidences",
multiStepPredictions="multiStepPredictions",
multiStepBestPredictions="multiStepBestPredictions",
multiStepBucketLikelihoods="multiStepBucketLikelihoods",
multiStepBucketValues="multiStepBucketValues",
)):
__inferenceInputMap = {
"prediction": "dataRow",
"encodings": "dataEncodings",
"classification": "category",
"classConfidences": "category",
"multiStepPredictions": "dataDict",
"multiStepBestPredictions": "dataDict",
}
__temporalInferenceElements = None
@staticmethod
def getInputElement(inferenceElement):
""" Get the sensor input element that corresponds to the given inference
element. This is mainly used for metrics and prediction logging
"""
return InferenceElement.__inferenceInputMap.get(inferenceElement, None)
@staticmethod
def isTemporal(inferenceElement):
""" Returns True if the inference from this timestep is predicted the input
for the NEXT timestep.
NOTE: This should only be checked IF THE MODEL'S INFERENCE TYPE IS ALSO
TEMPORAL. That is, a temporal model CAN have non-temporal inference elements,
but a non-temporal model CANNOT have temporal inference elements
"""
if InferenceElement.__temporalInferenceElements is None:
InferenceElement.__temporalInferenceElements = \
set([InferenceElement.prediction])
return inferenceElement in InferenceElement.__temporalInferenceElements
@staticmethod
def getTemporalDelay(inferenceElement, key=None):
""" Returns the number of records that elapse between when an inference is
made and when the corresponding input record will appear. For example, a
multistep prediction for 3 timesteps out will have a delay of 3
Parameters:
-----------------------------------------------------------------------
inferenceElement: The InferenceElement value being delayed
key: If the inference is a dictionary type, this specifies
key for the sub-inference that is being delayed
"""
# -----------------------------------------------------------------------
# For next step prediction, we shift by 1
if inferenceElement in (InferenceElement.prediction,
InferenceElement.encodings):
return 1
# -----------------------------------------------------------------------
# For classification, anomaly scores, the inferences immediately succeed the
# inputs
if inferenceElement in (InferenceElement.anomalyScore,
InferenceElement.anomalyLabel,
InferenceElement.classification,
InferenceElement.classConfidences):
return 0
# -----------------------------------------------------------------------
# For multistep prediction, the delay is based on the key in the inference
# dictionary
if inferenceElement in (InferenceElement.multiStepPredictions,
InferenceElement.multiStepBestPredictions):
return int(key)
# -----------------------------------------------------------------------
# default: return 0
return 0
@staticmethod
def getMaxDelay(inferences):
"""
Returns the maximum delay for the InferenceElements in the inference
dictionary
Parameters:
-----------------------------------------------------------------------
inferences: A dictionary where the keys are InferenceElements
"""
maxDelay = 0
for inferenceElement, inference in inferences.iteritems():
if isinstance(inference, dict):
for key in inference.iterkeys():
maxDelay = max(InferenceElement.getTemporalDelay(inferenceElement,
key),
maxDelay)
else:
maxDelay = max(InferenceElement.getTemporalDelay(inferenceElement),
maxDelay)
return maxDelay
class InferenceType(Enum("TemporalNextStep",
"TemporalClassification",
"NontemporalClassification",
"TemporalAnomaly",
"NontemporalAnomaly",
"TemporalMultiStep",
"NontemporalMultiStep")):
__temporalInferenceTypes = None
@staticmethod
def isTemporal(inferenceType):
""" Returns True if the inference type is 'temporal', i.e. requires a
temporal pooler in the network.
"""
if InferenceType.__temporalInferenceTypes is None:
InferenceType.__temporalInferenceTypes = \
set([InferenceType.TemporalNextStep,
InferenceType.TemporalClassification,
InferenceType.TemporalAnomaly,
InferenceType.TemporalMultiStep,
InferenceType.NontemporalMultiStep])
return inferenceType in InferenceType.__temporalInferenceTypes
# ModelResult - A structure that contains the input to a model and the resulting
# predictions as well as any related information related to the predictions.
#
# predictionNumber: The prediction number. This should start at 0 and increase
# with each new ModelResult.
#
# rawInput: The input record, as input by the user. This is a dictionary-like
# object which has attributes whose names are the same as the input
# field names
#
# sensorInput: A SensorInput object that represents the input record, as it
# appears right before it is encoded. This may differ from the raw
# input in that certain input fields (such as DateTime fields) may
# be split into multiple encoded fields
#
# inferences: A dictionary of inferences. Each key is a InferenceType constant
# which corresponds to the type of prediction being made. Each value
# is a ___ element that corresponds to the actual prediction by the
# model, including auxillary information; TODO: fix description.
#
# metrics: The metrics corresponding to the most-recent prediction/ground
# truth pair
class ModelResult(object):
__slots__= ("predictionNumber", "rawInput", "sensorInput", "inferences",
"metrics", "predictedFieldIdx", "predictedFieldName")
def __init__(self,
predictionNumber=None,
rawInput=None,
sensorInput=None,
inferences=None,
metrics=None,
predictedFieldIdx=None,
predictedFieldName=None):
self.predictionNumber = predictionNumber
self.rawInput = rawInput
self.sensorInput = sensorInput
self.inferences = inferences
self.metrics = metrics
self.predictedFieldIdx = predictedFieldIdx
self.predictedFieldName = predictedFieldName
def __repr__(self):
return ("ModelResult("
"\tpredictionNumber={0}\n"
"\trawInput={1}\n"
"\tsensorInput={2}\n"
"\tinferences={3}\n"
"\tmetrics={4}\n"
"\tpredictedFieldIdx={5}\n"
"\tpredictedFieldName={6}\n"
")").format(self.predictionNumber,
self.rawInput,
self.sensorInput,
self.inferences,
self.metrics,
self.predictedFieldIdx,
self.predictedFieldName)
| agpl-3.0 |
ak2703/edx-platform | lms/djangoapps/certificates/tests/test_create_fake_cert.py | 107 | 2003 | """Tests for the create_fake_certs management command. """
from django.test import TestCase
from django.core.management.base import CommandError
from nose.plugins.attrib import attr
from opaque_keys.edx.locator import CourseLocator
from student.tests.factories import UserFactory
from certificates.management.commands import create_fake_cert
from certificates.models import GeneratedCertificate
@attr('shard_1')
class CreateFakeCertTest(TestCase):
"""Tests for the create_fake_certs management command. """
USERNAME = "test"
COURSE_KEY = CourseLocator(org='edX', course='DemoX', run='Demo_Course')
def setUp(self):
super(CreateFakeCertTest, self).setUp()
self.user = UserFactory.create(username=self.USERNAME)
def test_create_fake_cert(self):
# No existing cert, so create it
self._run_command(
self.USERNAME,
unicode(self.COURSE_KEY),
cert_mode='verified',
grade='0.89'
)
cert = GeneratedCertificate.objects.get(user=self.user, course_id=self.COURSE_KEY)
self.assertEqual(cert.status, 'downloadable')
self.assertEqual(cert.mode, 'verified')
self.assertEqual(cert.grade, '0.89')
self.assertEqual(cert.download_uuid, 'test')
self.assertEqual(cert.download_url, 'http://www.example.com')
# Cert already exists; modify it
self._run_command(
self.USERNAME,
unicode(self.COURSE_KEY),
cert_mode='honor'
)
cert = GeneratedCertificate.objects.get(user=self.user, course_id=self.COURSE_KEY)
self.assertEqual(cert.mode, 'honor')
def test_too_few_args(self):
with self.assertRaisesRegexp(CommandError, 'Usage'):
self._run_command(self.USERNAME)
def _run_command(self, *args, **kwargs):
"""Run the management command to generate a fake cert. """
command = create_fake_cert.Command()
return command.handle(*args, **kwargs)
| agpl-3.0 |
SohKai/ChronoLogger | web/flask/lib/python2.7/site-packages/whoosh/spelling.py | 39 | 12750 | # Copyright 2007 Matt Chaput. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY MATT CHAPUT ``AS IS'' AND ANY EXPRESS OR
# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
# EVENT SHALL MATT CHAPUT OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
# OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
# EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# The views and conclusions contained in the software and documentation are
# those of the authors and should not be interpreted as representing official
# policies, either expressed or implied, of Matt Chaput.
"""This module contains helper functions for correcting typos in user queries.
"""
from collections import defaultdict
from heapq import heappush, heapreplace
from whoosh import analysis, fields, highlight, query, scoring
from whoosh.automata import fst
from whoosh.compat import xrange, string_type
from whoosh.support.levenshtein import distance
from whoosh.util.text import utf8encode
# Corrector objects
class Corrector(object):
"""Base class for spelling correction objects. Concrete sub-classes should
implement the ``_suggestions`` method.
"""
def suggest(self, text, limit=5, maxdist=2, prefix=0):
"""
:param text: the text to check. This word will **not** be added to the
suggestions, even if it appears in the word graph.
:param limit: only return up to this many suggestions. If there are not
enough terms in the field within ``maxdist`` of the given word, the
returned list will be shorter than this number.
:param maxdist: the largest edit distance from the given word to look
at. Values higher than 2 are not very effective or efficient.
:param prefix: require suggestions to share a prefix of this length
with the given word. This is often justifiable since most
misspellings do not involve the first letter of the word. Using a
prefix dramatically decreases the time it takes to generate the
list of words.
"""
_suggestions = self._suggestions
heap = []
seen = set([text])
for k in xrange(1, maxdist + 1):
for item in _suggestions(text, k, prefix):
if item[1] in seen:
continue
seen.add(item[1])
# Note that the *higher* scores (item[0]) are better!
if len(heap) < limit:
heappush(heap, item)
elif item > heap[0]:
heapreplace(heap, item)
# If the heap is already at the required length, don't bother going
# to a higher edit distance
if len(heap) >= limit:
break
sugs = sorted(heap, key=lambda item: (0 - item[0], item[1]))
return [sug for _, sug in sugs]
def _suggestions(self, text, maxdist, prefix):
"""Low-level method that yields a series of (score, "suggestion")
tuples.
:param text: the text to check.
:param maxdist: the maximum edit distance.
:param prefix: require suggestions to share a prefix of this length
with the given word.
"""
raise NotImplementedError
class ReaderCorrector(Corrector):
"""Suggests corrections based on the content of a field in a reader.
Ranks suggestions by the edit distance, then by highest to lowest
frequency.
"""
def __init__(self, reader, fieldname):
self.reader = reader
self.fieldname = fieldname
def _suggestions(self, text, maxdist, prefix):
fieldname = self.fieldname
freq = self.reader.frequency
for sug in self.reader.terms_within(fieldname, text, maxdist,
prefix=prefix):
# Higher scores are better, so negate the distance and frequency
# TODO: store spelling frequencies in the graph
f = freq(fieldname, sug) or 1
score = 0 - (maxdist + (1.0 / f * 0.5))
yield (score, sug)
class GraphCorrector(Corrector):
"""Suggests corrections based on the content of a raw
:class:`whoosh.automata.fst.GraphReader` object.
By default ranks suggestions based on the edit distance.
"""
def __init__(self, graph):
self.graph = graph
def _suggestions(self, text, maxdist, prefix):
for sug in fst.within(self.graph, text, k=maxdist, prefix=prefix):
# Higher scores are better, so negate the edit distance
yield (0 - maxdist, sug)
class MultiCorrector(Corrector):
"""Merges suggestions from a list of sub-correctors.
"""
def __init__(self, correctors):
self.correctors = correctors
def _suggestions(self, text, maxdist, prefix):
for corr in self.correctors:
for item in corr._suggestions(text, maxdist, prefix):
yield item
def wordlist_to_graph_file(wordlist, dbfile, fieldname="_", strip=True):
"""Writes a word graph file from a list of words.
>>> # Open a word list file with one word on each line, and write the
>>> # word graph to a graph file
>>> wordlist_to_graph_file("mywords.txt", "mywords.dawg")
:param wordlist: an iterable containing the words for the graph. The words
must be in sorted order.
:param dbfile: a filename string or file-like object to write the word
graph to. This function will close the file.
"""
from whoosh.filedb.structfile import StructFile
if isinstance(dbfile, string_type):
dbfile = open(dbfile, "wb")
if not isinstance(dbfile, StructFile):
dbfile = StructFile(dbfile)
gw = fst.GraphWriter(dbfile)
gw.start_field(fieldname)
for word in wordlist:
if strip:
word = word.strip()
gw.insert(word)
gw.finish_field()
gw.close()
# Query correction
class Correction(object):
"""Represents the corrected version of a user query string. Has the
following attributes:
``query``
The corrected :class:`whoosh.query.Query` object.
``string``
The corrected user query string.
``original_query``
The original :class:`whoosh.query.Query` object that was corrected.
``original_string``
The original user query string.
``tokens``
A list of token objects representing the corrected words.
You can also use the :meth:`Correction.format_string` method to reformat the
corrected query string using a :class:`whoosh.highlight.Formatter` class.
For example, to display the corrected query string as HTML with the
changed words emphasized::
from whoosh import highlight
correction = mysearcher.correct_query(q, qstring)
hf = highlight.HtmlFormatter(classname="change")
html = correction.format_string(hf)
"""
def __init__(self, q, qstring, corr_q, tokens):
self.original_query = q
self.query = corr_q
self.original_string = qstring
self.tokens = tokens
if self.original_string:
self.string = self.format_string(highlight.NullFormatter())
else:
self.string = ''
def __repr__(self):
return "%s(%r, %r)" % (self.__class__.__name__, self.query,
self.string)
def format_string(self, formatter):
"""
Highlights the corrected words in the original query string using the
given :class:`~whoosh.highlight.Formatter`.
:param formatter: A :class:`whoosh.highlight.Formatter` instance.
:return: the output of the formatter (usually a string).
"""
if not self.original_string:
return ''
if isinstance(formatter, type):
formatter = formatter()
fragment = highlight.Fragment(self.original_string, self.tokens)
return formatter.format_fragment(fragment, replace=True)
# QueryCorrector objects
class QueryCorrector(object):
"""Base class for objects that correct words in a user query.
"""
def correct_query(self, q, qstring):
"""Returns a :class:`Correction` object representing the corrected
form of the given query.
:param q: the original :class:`whoosh.query.Query` tree to be
corrected.
:param qstring: the original user query. This may be None if the
original query string is not available, in which case the
``Correction.string`` attribute will also be None.
:rtype: :class:`Correction`
"""
raise NotImplementedError
class SimpleQueryCorrector(QueryCorrector):
"""A simple query corrector based on a mapping of field names to
:class:`Corrector` objects, and a list of ``("fieldname", "text")`` tuples
to correct. And terms in the query that appear in list of term tuples are
corrected using the appropriate corrector.
"""
def __init__(self, correctors, terms, prefix=0, maxdist=2):
"""
:param correctors: a dictionary mapping field names to
:class:`Corrector` objects.
:param terms: a sequence of ``("fieldname", "text")`` tuples
representing terms to be corrected.
:param prefix: suggested replacement words must share this number of
initial characters with the original word. Increasing this even to
just ``1`` can dramatically speed up suggestions, and may be
justifiable since spellling mistakes rarely involve the first
letter of a word.
:param maxdist: the maximum number of "edits" (insertions, deletions,
subsitutions, or transpositions of letters) allowed between the
original word and any suggestion. Values higher than ``2`` may be
slow.
"""
self.correctors = correctors
self.termset = frozenset(terms)
self.prefix = prefix
self.maxdist = maxdist
def correct_query(self, q, qstring):
correctors = self.correctors
termset = self.termset
prefix = self.prefix
maxdist = self.maxdist
# A list of tokens that were changed by a corrector
corrected_tokens = []
# The corrected query tree. We don't need to deepcopy the original
# because we use Query.replace() to find-and-replace the corrected
# words and it returns a copy of the query tree.
corrected_q = q
# For every word in the original query...
# Note we can't put these in a set, because we must preserve WHERE
# in the query each token occured so we can format them later
for token in q.all_tokens():
fname = token.fieldname
# If this is one of the words we're supposed to correct...
if (fname, token.text) in termset:
sugs = correctors[fname].suggest(token.text, prefix=prefix,
maxdist=maxdist)
if sugs:
# This is a "simple" corrector, so we just pick the first
# suggestion :/
sug = sugs[0]
# Return a new copy of the original query with this word
# replaced by the correction
corrected_q = corrected_q.replace(token.fieldname,
token.text, sug)
# Add the token to the list of corrected tokens (for the
# formatter to use later)
token.original = token.text
token.text = sug
corrected_tokens.append(token)
return Correction(q, qstring, corrected_q, corrected_tokens)
| mit |
TeamExodus/external_chromium_org | third_party/tlslite/tlslite/utils/openssl_rsakey.py | 200 | 4670 | # Author: Trevor Perrin
# See the LICENSE file for legal information regarding use of this file.
"""OpenSSL/M2Crypto RSA implementation."""
from .cryptomath import *
from .rsakey import *
from .python_rsakey import Python_RSAKey
#copied from M2Crypto.util.py, so when we load the local copy of m2
#we can still use it
def password_callback(v, prompt1='Enter private key passphrase:',
prompt2='Verify passphrase:'):
from getpass import getpass
while 1:
try:
p1=getpass(prompt1)
if v:
p2=getpass(prompt2)
if p1==p2:
break
else:
break
except KeyboardInterrupt:
return None
return p1
if m2cryptoLoaded:
class OpenSSL_RSAKey(RSAKey):
def __init__(self, n=0, e=0):
self.rsa = None
self._hasPrivateKey = False
if (n and not e) or (e and not n):
raise AssertionError()
if n and e:
self.rsa = m2.rsa_new()
m2.rsa_set_n(self.rsa, numberToMPI(n))
m2.rsa_set_e(self.rsa, numberToMPI(e))
def __del__(self):
if self.rsa:
m2.rsa_free(self.rsa)
def __getattr__(self, name):
if name == 'e':
if not self.rsa:
return 0
return mpiToNumber(m2.rsa_get_e(self.rsa))
elif name == 'n':
if not self.rsa:
return 0
return mpiToNumber(m2.rsa_get_n(self.rsa))
else:
raise AttributeError
def hasPrivateKey(self):
return self._hasPrivateKey
def _rawPrivateKeyOp(self, m):
b = numberToByteArray(m, numBytes(self.n))
s = m2.rsa_private_encrypt(self.rsa, bytes(b), m2.no_padding)
c = bytesToNumber(bytearray(s))
return c
def _rawPublicKeyOp(self, c):
b = numberToByteArray(c, numBytes(self.n))
s = m2.rsa_public_decrypt(self.rsa, bytes(b), m2.no_padding)
m = bytesToNumber(bytearray(s))
return m
def acceptsPassword(self): return True
def write(self, password=None):
bio = m2.bio_new(m2.bio_s_mem())
if self._hasPrivateKey:
if password:
def f(v): return password
m2.rsa_write_key(self.rsa, bio, m2.des_ede_cbc(), f)
else:
def f(): pass
m2.rsa_write_key_no_cipher(self.rsa, bio, f)
else:
if password:
raise AssertionError()
m2.rsa_write_pub_key(self.rsa, bio)
s = m2.bio_read(bio, m2.bio_ctrl_pending(bio))
m2.bio_free(bio)
return s
def generate(bits):
key = OpenSSL_RSAKey()
def f():pass
key.rsa = m2.rsa_generate_key(bits, 3, f)
key._hasPrivateKey = True
return key
generate = staticmethod(generate)
def parse(s, passwordCallback=None):
# Skip forward to the first PEM header
start = s.find("-----BEGIN ")
if start == -1:
raise SyntaxError()
s = s[start:]
if s.startswith("-----BEGIN "):
if passwordCallback==None:
callback = password_callback
else:
def f(v, prompt1=None, prompt2=None):
return passwordCallback()
callback = f
bio = m2.bio_new(m2.bio_s_mem())
try:
m2.bio_write(bio, s)
key = OpenSSL_RSAKey()
if s.startswith("-----BEGIN RSA PRIVATE KEY-----"):
def f():pass
key.rsa = m2.rsa_read_key(bio, callback)
if key.rsa == None:
raise SyntaxError()
key._hasPrivateKey = True
elif s.startswith("-----BEGIN PUBLIC KEY-----"):
key.rsa = m2.rsa_read_pub_key(bio)
if key.rsa == None:
raise SyntaxError()
key._hasPrivateKey = False
else:
raise SyntaxError()
return key
finally:
m2.bio_free(bio)
else:
raise SyntaxError()
parse = staticmethod(parse)
| bsd-3-clause |
rldhont/Quantum-GIS | tests/src/python/test_qgsserver_projectutils.py | 27 | 2853 | # -*- coding: utf-8 -*-
"""QGIS Unit tests for QgsServerProject.
.. note:: This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
"""
__author__ = 'Paul Blottiere'
__date__ = '26/12/2016'
__copyright__ = 'Copyright 2016, The QGIS Project'
import os
from qgis.server import QgsServerProjectUtils
from qgis.core import QgsProject
from qgis.testing import unittest
from utilities import unitTestDataPath
class TestQgsServerProjectUtils(unittest.TestCase):
def setUp(self):
self.testdata_path = unitTestDataPath('qgis_server_project') + '/'
self.prj = QgsProject()
self.prjPath = os.path.join(self.testdata_path, "project.qgs")
self.prj.read(self.prjPath)
self.prj2 = QgsProject()
self.prj2Path = os.path.join(self.testdata_path, "project2.qgs")
self.prj2.read(self.prj2Path)
def tearDown(self):
pass
def test_size(self):
self.assertEqual(QgsServerProjectUtils.wmsMaxWidth(self.prj), 400)
self.assertEqual(QgsServerProjectUtils.wmsMaxHeight(self.prj), 500)
def test_url(self):
self.assertEqual(QgsServerProjectUtils.wmsServiceUrl(self.prj), "my_wms_advertised_url")
self.assertEqual(QgsServerProjectUtils.wcsServiceUrl(self.prj), "my_wcs_advertised_url")
self.assertEqual(QgsServerProjectUtils.wfsServiceUrl(self.prj), "my_wfs_advertised_url")
def test_wmsuselayerids(self):
self.assertEqual(QgsServerProjectUtils.wmsUseLayerIds(self.prj), False)
self.assertEqual(QgsServerProjectUtils.wmsUseLayerIds(self.prj2), True)
def test_wmsrestrictedlayers(self):
# retrieve entry from project
result = QgsServerProjectUtils.wmsRestrictedLayers(self.prj)
expected = []
expected.append('points') # layer
expected.append('group1') # local group
expected.append('groupEmbedded') # embedded group
self.assertListEqual(sorted(expected), sorted(result))
def test_wfslayersids(self):
# retrieve entry from project
result = QgsServerProjectUtils.wfsLayerIds(self.prj)
expected = []
expected.append('multipoint20170309173637804') # from embedded group
expected.append('points20170309173738552') # local layer
expected.append('polys20170309173913723') # from local group
self.assertEqual(expected, result)
def test_wcslayersids(self):
# retrieve entry from project
result = QgsServerProjectUtils.wcsLayerIds(self.prj)
expected = []
expected.append('landsat20170313142548073')
self.assertEqual(expected, result)
if __name__ == '__main__':
unittest.main()
| gpl-2.0 |
DBrianKimmel/PyHouse | Project/src/Modules/House/Family/Hue/hue_hub.py | 1 | 25036 | """
@name: Modules/House/Family/hue/hue_hub.py
@author: D. Brian Kimmel
@contact: [email protected]
@copyright: (c) 2017-2020 by D. Brian Kimmel
@note: Created on Dec 19, 2017
@license: MIT License
@summary:
/config
/lights
/groups
/schedules
/scenes
/sensors
/rules
Read the hub info and populate parts of pyhouse_obj.
Send hub commands to do things like turn on/off/dim of lights.
The Hue Hub is a network device so we need to know which PyHouse instance is going to be in control.
http://192.168.1.131/debug/clip.html
"""
__updated__ = '2020-02-09'
# Import system type stuff
from zope.interface import implementer
import datetime
import jsonpickle
from queue import Queue
import time
from twisted.web.client import Agent
from twisted.web.http_headers import Headers
from twisted.internet.defer import Deferred, succeed
from twisted.internet.protocol import Protocol
from twisted.web.iweb import IBodyProducer
# Import PyMh files
from Modules.Core.Utilities.convert import long_to_str
from Modules.Core.Utilities.json_tools import encode_json
from Modules.Core.Utilities.debug_tools import PrettyFormatAny
from Modules.House.Family.Hue.hue_data import HueLightData
from Modules.House.Lighting.utility import lightingUtility as lightingUtility
from Modules.Core import logging_pyh as Logger
LOG = Logger.getLogger('PyHouse.Hue_Hub ')
SEND_TIMEOUT = 0.8
mac = [ '00', '17', '88', '10', '22', '01' ]
uid = '2f402f80-da50-11e1-9b23-%s' % ''.join(mac)
icon = 'hue.png'
description_xml = 'description.xml'
lights = []
username = "9nR8rIGRYNKBlROabMWuAlhGfAgSjBS2EWHoFYy3"
devicetype = "something"
portalservices = False
def generate_timestamp():
return time.strftime('%Y-%m-%dT%H:%M:%S')
def put_config_json(p_json):
l_entry = jsonpickle.encode(p_json)
if 'devicetype' in l_entry:
global devicetype
devicetype = l_entry['devicetype']
elif 'portalservices' in l_entry:
global portalservices
portalservices = l_entry['portalservices']
def json_dumps(what):
return jsonpickle.decode(what, sort_keys=True, separators=(',', ':'))
def gen_config_json(full):
pass
# return json_dumps(gen_config(full))
def gen_sensors_json():
return json_dumps(dict())
def set_light_state(_nr, state):
_entry = jsonpickle.encode(state)
# return json_dumps(json_obj)
def set_group_state(_nr, state):
# only 1 group in the current version
for i in range(0, len(lights)):
set_light_state(i, state)
def get_light_state(nr):
pass
def gen_ind_light_json(_nr):
return
def gen_lights(which):
global lights
if which == None:
json_obj = dict()
t = []
n = 0
for _l in lights:
th = 9875 # gilj(n)
n += 1
th.start()
t.append(th)
for nr in range(0, n):
t[nr].join()
json_obj['%d' % (nr + 1)] = t[nr].get_result()
return json_obj
return gen_ind_light_json(which)
def gen_groups(which):
#### a light group
action = {
'on' : True,
'bri' : 254,
'hue' : 10000,
'sat' : 254,
'effect' : 'none',
'xy' : [],
'ct' : 250,
'alert' : 'select',
'colormode' : 'ct'
}
action['xy'].append(0.5)
action['xy'].append(0.5)
g_lights = []
nOn = 0
for i in range(0, len(lights)):
g_lights.append('%d' % (i + 1))
if lights[i]['state'] == True:
nOn += 1
state = {
'all_on' : nOn == len(lights),
'any_on' : nOn > 0
}
g = {
'action' : action,
'lights' : g_lights,
'state' : state,
'type' : 'Room',
'class' : 'Living room',
'name' : 'Group 1'
}
if which == None:
answer = { '1': g }
return answer
return g
def gen_groups_json(which):
return json_dumps(gen_groups(which))
def gen_scenes():
scene = {
'name': 'Kathy on 1449133269486',
'lights': [],
'owner': 'ffffffffe0341b1b376a2389376a2389',
'recycle': True,
'locked': False,
'appdata': dict(),
'picture': '',
'lastupdated': '2015-12-03T08:57:13',
'version': 1
}
for i in range(0, len(lights)):
scene['lights'].append('%d' % (i + 1))
answer = { '123123123-on-0': scene }
return answer
def gen_scenes_json():
return json_dumps(gen_scenes())
def gen_light_json(which):
return json_dumps(gen_lights(which))
def gen_dump_json():
answer = {
'lights': gen_lights(None),
'groups': gen_groups(None),
# 'config': gen_config(True),
'sensors': {},
'swupdate2': {},
'schedules': {},
'scenes': {}
}
return json_dumps(answer)
def gen_description_xml(addr):
reply = [
'<root xmlns="urn:schemas-upnp-org:device-1-0">',
' <specVersion>',
' <major>1</major>',
' <minor>0</minor>',
' </specVersion>',
' <URLBase>http://%s/</URLBase>' % addr,
' <device>',
' <deviceType>urn:schemas-upnp-org:device:Basic:1</deviceType>',
' <friendlyName>Virtual hue</friendlyName>',
' <manufacturer>vanheusden.com</manufacturer>',
' <manufacturerURL>http://www.vanheusden.com</manufacturerURL>',
' <modelDescription>Virtual Philips hue bridge</modelDescription>',
' <modelName>Virtual hue</modelName>',
' <modelNumber>1</modelNumber>',
' <modelURL>https://github.com/flok99/virtual-hue</modelURL>',
' <serialNumber>%s</serialNumber>' % ''.join(mac),
' <UDN>uuid:%s/UDN>' % uid,
' <presentationURL>index.html</presentationURL>',
' <iconList>',
' <icon>',
' <mimetype>image/png</mimetype>',
' <height>48</height>',
' <width>48</width>',
' <depth>24</depth>',
' <url>%s</url>' % icon,
' </icon>',
' </iconList>',
' </device>',
'</root>'
]
return '\r\n'.join(reply)
def generate_light_body_json(p_light_control):
""" Convert internal data to hue control data and format
@param p_light_control: ==> Light Data() in Housing.Lighting.lighting_lights
@returns: json body to control lights
{
"on": true,
"bri": 254
}
"""
if p_light_control.BrightnessPct == 0:
l_body = {
'on' : 'false'
}
else:
l_bright = int(p_light_control.BrightnessPct * 254 / 100)
l_body = {
'on' : 'true',
'bri' : '{}'.format(l_bright)
}
return encode_json(l_body)
@implementer(IBodyProducer)
class BytesProducer(object):
"""
Generate the messages to send in the web requests.
"""
def __init__(self, body):
self.m_body = body
self.length = len(body)
def startProducing(self, consumer):
consumer.write(self.m_body)
return succeed(None)
def pauseProducing(self):
pass
def stopProducing(self):
pass
# class server(BaseHTTPRequestHandler):
class Server:
"""
"""
m_client_address = None
m_path = '/'
def _set_headers(self, mime_type):
self.send_response(200)
self.send_header('Content-type', mime_type)
self.end_headers()
def do_GET(self):
LOG.debug('GET', self.m_client_address, self.m_path)
parts = self.m_path.split('/')
if self.m_path == '/{}'.format(description_xml):
self._set_headers("text/xml")
LOG.debug('get {}'.format(description_xml))
h = self.Server.server_address[0]
if 'Host' in self.headers:
h = self.headers['Host']
self.wfile.write(gen_description_xml(h))
elif self.m_path == '/%s' % icon:
self._set_headers("image/png")
LOG.debug('get %s' % parts[1])
try:
fh = open(icon, 'r')
self.wfile.write(fh.read())
fh.close()
except Exception as e:
LOG.warning('Cannot access %s' % icon, e)
elif self.m_path == '/api/' or self.m_path == '/api/%s' % username or self.m_path == '/api/%s/' % username:
self._set_headers("application/json")
LOG.debug('get all state')
self.wfile.write(gen_dump_json())
elif self.m_path == '/api/config' or self.m_path == '/api/config/':
self._set_headers("application/json")
LOG.debug('get basic configuration short (2)')
self.wfile.write(gen_config_json(False))
elif len(parts) >= 4 and parts[1] == 'api' and parts[3] == 'lights':
self._set_headers("application/json")
LOG.debug('enumerate list of lights')
if len(parts) == 4 or parts[4] == '':
LOG.debug(' ...all')
self.wfile.write(gen_light_json(None))
else:
LOG.debug(' ...single (%s)' % parts[4])
self.wfile.write(gen_light_json(int(parts[4]) - 1))
elif len(parts) >= 4 and parts[1] == 'api' and parts[3] == 'groups':
self._set_headers("application/json")
LOG.debug('enumerate list of groups')
if len(parts) == 4 or parts[4] == '':
LOG.debug(' ...all')
self.wfile.write(gen_groups_json(None))
else:
LOG.debug(' ...single (%s)' % parts[4])
self.wfile.write(gen_groups_json(int(parts[4]) - 1))
elif len(parts) >= 4 and parts[1] == 'api' and parts[3] == 'scenes':
self._set_headers("application/json")
LOG.debug('enumerate list of scenes')
self.wfile.write(gen_scenes_json())
elif len(parts) >= 4 and parts[1] == 'api' and parts[3] == 'sensors':
self._set_headers("application/json")
LOG.debug('enumerate list of sensors')
self.wfile.write(gen_sensors_json())
elif len(parts) >= 4 and parts[1] == 'api' and parts[3] == 'light':
self._set_headers("application/json")
LOG.debug('get individual light state')
self.wfile.write(gen_ind_light_json(int(parts[4]) - 1))
elif len(parts) >= 4 and parts[1] == 'api' and parts[3] == 'config':
self._set_headers("application/json")
if parts[2] == username:
LOG.debug('get basic configuration full')
self.wfile.write(gen_config_json(True))
else:
LOG.debug('get basic configuration short (1)')
self.wfile.write(gen_config_json(False))
else:
self._set_headers("application/json")
LOG.debug('[G] unknown get request', self.m_path, self.headers)
self.wfile.write('unreg()')
# self.wfile.write('[{"error":{"type":1,"address":"/","description":"unauthorized user"}}]')
def do_HEAD(self):
LOG.debug('HEAD')
self._set_headers("text/html")
def do_POST(self):
LOG.debug('POST', self.m_path)
parts = self.m_path.split('/')
# simpler registration; always return the same key
# should keep track in e.g. an sqlite3 database and then do whitelisting etc
if len(parts) >= 2 and parts[1] == 'api':
self._set_headers("application/json")
data_len = int(self.headers['Content-Length'])
LOG.debug(self.rfile.read(data_len))
self.wfile.write('[{"success":{"username": "%s"}}]' % username)
elif len(parts) >= 4 and parts[1] == 'api' and parts['3'] == 'groups':
self._set_headers("application/json")
self.wfile.write('[{"success":{"id": "1"}}]')
else:
LOG.debug('unknown post request', self.m_path)
def do_PUT(self):
LOG.debug('PUT', self.m_path)
data_len = int(self.headers['Content-Length'])
content = self.rfile.read(data_len)
parts = self.m_path.split('/')
if len(parts) >= 6 and parts[1] == 'api' and parts[3] == 'lights' and parts[5] == 'state':
self._set_headers("application/json")
LOG.debug('set individual light state')
self.wfile.write(set_light_state(int(parts[4]) - 1, content))
elif len(parts) >= 6 and parts[1] == 'api' and parts[3] == 'groups' and parts[5] == 'action':
self._set_headers("application/json")
LOG.debug('set individual group state')
self.wfile.write(set_group_state(int(parts[4]) - 1, content))
elif len(parts) >= 4 and parts[1] == 'api' and parts[3] == 'config':
self._set_headers("application/json")
LOG.debug('put config')
put_config_json(content)
self.wfile.write('[{"success":"Updated."}]')
elif len(parts) >= 3 and parts[1] == 'api' and parts[2] == 'config':
self._set_headers("application/json")
LOG.debug('put config (2)')
LOG.debug(content)
else:
self._set_headers("text/html")
LOG.debug('unknown put request', self.m_path, content)
def add_light(name, id_, command, command_get):
global lights
row = {
'name': name,
'id': id_,
'cmd': command,
'cmd_get': command_get,
'state': False
}
lights.append(row)
class HueProtocol(Protocol):
""" A minimal protocol for the Hue Hub.
"""
m_finished = None
m_remaining = 0
def __init__(self, p_pyhouse_obj, p_finished, p_command, p_response_code):
"""
@param p_finished: is a deferred that ????
"""
self.m_finished = p_finished
self.m_command = p_command
self.m_code = p_response_code
self.m_pyhouse_obj = p_pyhouse_obj
self.m_body = ''
self.m_remaining = 1024 * 10 # Allow for 10kb response
LOG.debug('Hue Protocol Init')
def dataReceived(self, p_bytes):
if self.m_remaining > 0:
l_display = p_bytes[:self.m_remaining].decode("utf8") # Get the string
# l_json = jsonpickle.decode(l_display)
# LOG.debug('\n\tCommand: {}\n===== Body =====\n{}\n'.format(self.m_command, l_json))
self.m_body = l_display
self.m_remaining -= len(l_display)
def connectionLost(self, p_reason):
""" This gets called when the web page has all been received in its entirety.
GET
Now we have the page (and the command we used to get the page) we can deal with the servers reply.
POST
?
"""
def cb_log(self, p_command, p_code, p_body, p_finished, p_pyhouse_obj):
""" Log the response to our command and dispatch the message
"""
# LOG.debug('\n\tCommand: {}\n\tCode: {}\n\tBody: {}'.format(p_command, p_code, p_body))
if p_command == '/config':
HueDispatch(p_pyhouse_obj, p_finished, p_command, p_code).get_config(p_body)
elif p_command == '/lights':
HueDispatch(p_pyhouse_obj, p_finished, p_command, p_code).get_lights(p_body)
elif p_command == '/rules':
HueDispatch(p_pyhouse_obj, p_finished, p_command, p_code).get_rules(p_body)
elif p_command == '/scenes':
HueDispatch(p_pyhouse_obj, p_finished, p_command, p_code).get_scenes(p_body)
elif p_command == '/schedules':
HueDispatch(p_pyhouse_obj, p_finished, p_command, p_code).get_schedules(p_body)
elif p_command == '/sensors':
HueDispatch(p_pyhouse_obj, p_finished, p_command, p_code).get_sensors(p_body)
def eb_failed(fail_reason):
LOG.warning("initial Hue Hub connection failed: {}".format(fail_reason))
# l_ReconnectingService.stopService()
l_msg = p_reason.getErrorMessage() # this gives a tuple of messages (I think)
if l_msg == '':
self.m_finished.addCallback(cb_log, self.m_command, self.m_code, self.m_body, self.m_finished, self.m_pyhouse_obj)
self.m_finished.addErrback(eb_failed, p_reason)
self.m_finished.callback(None)
return
LOG.debug('Finished receiving body: {}'.format(PrettyFormatAny.form(l_msg, 'Reason', 190)))
LOG.debug('Finished receiving body: {}'.format("\t".join(str(x) for x in l_msg)))
self.m_finished.callback(None)
return
class HueDecode(object):
"""
"""
def decode_get(self):
"""
"""
LOG.info('Decode_Get')
def decode_post(self):
"""
"""
LOG.info('Decode_Post')
class HueDispatch(HueProtocol):
"""
"""
def _add_light(self, p_light_obj):
l_objs = self.m_pyhouse_obj.House.Lighting.Lights
_l_light_obj = lightingUtility().get_object_type_by_id(l_objs, name=p_light_obj.Name)
pass
def get_config(self, p_body):
# l_msg = jsonpickle.decode(p_body)
# LOG.debug('Got Config {}'.format(PrettyFormatAny.form(l_msg, 'Config', 190)))
pass
def get_lights(self, p_body):
"""
See Docs/Design.md for the JSON returned.
"""
LOG.debug('{}'.format(p_body))
return
try:
# l_json = jsonpickle.decode(p_body)
l_json = p_body
except Exception as e_err:
LOG.error('Error - {}\n{}'.format(e_err, PrettyFormatAny.form(l_json, "HUE ERROR", 190)))
# LOG.debug('Got Lights {}'.format(PrettyFormatAny.form(l_json, 'Lights', 190)))
for l_light_obj in l_json.items():
l_light = HueLightData()
LOG.debug('Light: {}'.format(PrettyFormatAny.form(l_light_obj, 'Light', 190)))
for l_key, l_value in l_light_obj[1].items():
l_light.HueLightIndex = l_light_obj[0]
l_light.Key = l_light_obj[0]
# l_light.Active = True
l_light.Family.Name = 'Hue'
l_light.DeviceType = 'Lighting' # Lighting
l_light.DeviceSubType = 'Light'
l_light.ControllerName = 'Hue Hub'
l_light.LastUpdate = datetime.datetime.now()
l_light.IsDimmable = True
# LOG.debug('Add Light: {} {}'.format(l_key, PrettyFormatAny.form(l_value, 'Light', 190)))
if l_key == 'name':
l_light.Name = l_value
# LOG.debug('Add Light {}'.format(PrettyFormatAny.form(l_light, 'Light', 190)))
if l_key == 'type':
l_light.Comment = l_value
if l_key == 'uniqueid':
l_light.HueUniqueId = l_value
if l_key == 'state':
l_state = False
for l_st_key, l_st_val in l_value.items():
if l_st_key == 'on':
l_state = l_st_val
if l_st_key == 'bri':
l_bri = l_st_val
if l_state == True:
l_light.BrightnessPct = int(l_bri / 2.54)
else:
l_light.BrightnessPct = 0
LOG.debug('Add Light {}'.format(PrettyFormatAny.form(l_light, 'Light', 190)))
self._add_light(l_light)
def get_rules(self, p_body):
l_msg = jsonpickle.decode(p_body)
LOG.debug('Got Rules {}'.format(PrettyFormatAny.form(l_msg, 'Rules', 190)))
def get_scenes(self, p_body):
l_msg = jsonpickle.decode(p_body)
LOG.debug('Got Scenes {}'.format(PrettyFormatAny.form(l_msg, 'Scenes', 190)))
def get_schedules(self, p_body):
l_msg = jsonpickle.decode(p_body)
LOG.debug('Got Schedules {}'.format(PrettyFormatAny.form(l_msg, 'Schedules', 190)))
def get_sensors(self, p_body):
l_msg = jsonpickle.decode(p_body)
LOG.debug('Got Sensors {}'.format(PrettyFormatAny.form(l_msg, 'Sensors', 190)))
class HueHub:
"""
"""
m_bridge_obj = None
m_command = b'/config'
m_headers = None
m_hue_agent = None
m_pyhouse_obj = None
def __init__(self, p_pyhouse_obj):
"""
Agent is a very basic HTTP client. It supports I{HTTP} and I{HTTPS} scheme URIs.
"""
self.m_pyhouse_obj = p_pyhouse_obj
self.m_headers = Headers({'User-Agent': ['Hue Hub Web Client']})
self.m_hue_agent = Agent(p_pyhouse_obj._Twisted.Reactor)
LOG.info('Initialized')
def _build_uri(self, p_command=b'/config'):
"""
URI: b'http://192.168.1.131/api/MBFBC-agf6rq5bsWcxLngYZoClGr2pw2oKEMLZgs/config'
"""
l_uri = b'http://'
try:
l_uri += self.m_bridge_obj.IPv4Address
except TypeError:
l_uri += long_to_str(self.m_bridge_obj.IPv4Address).encode("utf8")
l_uri += b'/api/'
try:
l_uri += self.m_bridge_obj.ApiKey
except TypeError:
l_uri += self.m_bridge_obj.ApiKey.encode("utf8")
try:
l_uri += p_command.encode("utf8")
except:
l_uri += p_command
LOG.info('URI: {}'.format(l_uri))
return l_uri
def _build_command(self, p_command):
try:
l_command = p_command.encode("utf8")
except:
l_command = p_command
return l_command
def _get_all_config(self):
"""
/config
/lights
/groups
/schedules
/scenes
/sensors
/rules
"""
return
_l_agent_d = self.HubGet('/config')
_l_agent_d = self.HubGet('/lights')
# _l_agent_d = self.HubGet('/groups')
# _l_agent_d = self.HubGet('/schedules')
# _l_agent_d = self.HubGet('/scenes')
# _l_agent_d = self.HubGet('/sensors')
# _l_agent_d = self.HubGet('/rules')
# Server().do_GET()
LOG.info('Scheduled All config')
def HubGet(self, p_command):
""" Issue a request for information. It will arrive later via a deferred.
"""
def cb_Response(p_response, p_command):
"""
"""
# LOG.debug('Command: {}'.format(p_command))
# LOG.debug('Response Code: {} {}'.format(p_response.code, p_response.phrase))
d_finished = Deferred()
p_response.deliverBody(HueProtocol(self.m_pyhouse_obj, d_finished, p_command, p_response.code))
return d_finished
d_agent = self.m_hue_agent.request(
b'GET',
self._build_uri(p_command),
self.m_headers,
None)
d_agent.addCallback(cb_Response, p_command)
HueDecode().decode_get()
return d_agent
def HubPostCommand(self, p_command, p_body):
"""
@param p_command: is the Hue command we will be using
@param p_body: is the body producer function.
"""
def cb_response(p_response):
LOG.debug('Response Code: {} {}'.format(p_response.code, p_response.phrase))
LOG.debug('Response Headers: {}'.format(p_response.headers.decode("utf8")))
l_finished = Deferred()
p_response.deliverBody(HueProtocol(self.m_pyhouse_obj, l_finished))
return l_finished
l_agent_d = self.m_hue_agent.request(b'POST',
self._build_uri(p_command),
self.m_headers,
p_body)
l_agent_d.addCallback(cb_response)
HueDecode().decode_post()
return l_agent_d
def HubStart(self, p_bridge_obj):
""" Start the hub(bridge) and then get the hub data
@param p_bridge_obj: is PyHouse_Obj.Computers.Bridges.xxx with xxx being a HueHub
"""
p_bridge_obj._Queue = Queue(32)
self.m_bridge_obj = p_bridge_obj
self._get_all_config()
LOG.info('Started')
def Start(self):
""" Start the hub(bridge) and then get the hub data
@param p_bridge_obj: is PyHouse_Obj.Computers.Bridges.xxx with xxx being a HueHub
"""
# LOG.debug(PrettyFormatAny.form(self.m_pyhouse_obj, 'PyHouse'))
# LOG.debug(PrettyFormatAny.form(self.m_pyhouse_obj.Computer, 'Computer'))
# LOG.debug(PrettyFormatAny.form(self.m_pyhouse_obj.House, 'House'))
for l_bridge_obj in self.m_pyhouse_obj.Computer.Bridges.values():
LOG.debug(PrettyFormatAny.form(l_bridge_obj, 'Bridge'))
l_bridge_obj._Queue = Queue(32)
self.m_bridge_obj = l_bridge_obj
self._get_all_config()
LOG.debug('Started')
# ## END DBK
| mit |
lgscofield/odoo | addons/l10n_be_hr_payroll/__init__.py | 438 | 1072 | # -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2011 OpenERP SA (<http://openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>
#
##############################################################################
import l10n_be_hr_payroll
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
mapclient-plugins/trcsourcestep | mapclientplugins/trcsourcestep/ui_configuredialog.py | 1 | 3575 | # -*- coding: utf-8 -*-
################################################################################
## Form generated from reading UI file 'configuredialog.ui'
##
## Created by: Qt User Interface Compiler version 5.15.2
##
## WARNING! All changes made in this file will be lost when recompiling UI file!
################################################################################
from PySide2.QtCore import *
from PySide2.QtGui import *
from PySide2.QtWidgets import *
class Ui_ConfigureDialog(object):
def setupUi(self, ConfigureDialog):
if not ConfigureDialog.objectName():
ConfigureDialog.setObjectName(u"ConfigureDialog")
ConfigureDialog.resize(562, 238)
self.gridLayout = QGridLayout(ConfigureDialog)
self.gridLayout.setObjectName(u"gridLayout")
self.configGroupBox = QGroupBox(ConfigureDialog)
self.configGroupBox.setObjectName(u"configGroupBox")
self.gridLayout_2 = QGridLayout(self.configGroupBox)
self.gridLayout_2.setObjectName(u"gridLayout_2")
self.horizontalLayout = QHBoxLayout()
self.horizontalLayout.setObjectName(u"horizontalLayout")
self.locLineEdit = QLineEdit(self.configGroupBox)
self.locLineEdit.setObjectName(u"locLineEdit")
self.horizontalLayout.addWidget(self.locLineEdit)
self.locButton = QPushButton(self.configGroupBox)
self.locButton.setObjectName(u"locButton")
self.horizontalLayout.addWidget(self.locButton)
self.gridLayout_2.addLayout(self.horizontalLayout, 1, 1, 1, 1)
self.idLineEdit = QLineEdit(self.configGroupBox)
self.idLineEdit.setObjectName(u"idLineEdit")
self.gridLayout_2.addWidget(self.idLineEdit, 0, 1, 1, 1)
self.locLabel = QLabel(self.configGroupBox)
self.locLabel.setObjectName(u"locLabel")
self.gridLayout_2.addWidget(self.locLabel, 1, 0, 1, 1)
self.idLabel = QLabel(self.configGroupBox)
self.idLabel.setObjectName(u"idLabel")
self.gridLayout_2.addWidget(self.idLabel, 0, 0, 1, 1)
self.verticalSpacer = QSpacerItem(20, 40, QSizePolicy.Minimum, QSizePolicy.Expanding)
self.gridLayout_2.addItem(self.verticalSpacer, 2, 1, 1, 1)
self.gridLayout.addWidget(self.configGroupBox, 0, 0, 1, 1)
self.buttonBox = QDialogButtonBox(ConfigureDialog)
self.buttonBox.setObjectName(u"buttonBox")
self.buttonBox.setOrientation(Qt.Horizontal)
self.buttonBox.setStandardButtons(QDialogButtonBox.Cancel|QDialogButtonBox.Ok)
self.gridLayout.addWidget(self.buttonBox, 1, 0, 1, 1)
QWidget.setTabOrder(self.idLineEdit, self.locLineEdit)
QWidget.setTabOrder(self.locLineEdit, self.locButton)
QWidget.setTabOrder(self.locButton, self.buttonBox)
self.retranslateUi(ConfigureDialog)
self.buttonBox.accepted.connect(ConfigureDialog.accept)
self.buttonBox.rejected.connect(ConfigureDialog.reject)
QMetaObject.connectSlotsByName(ConfigureDialog)
# setupUi
def retranslateUi(self, ConfigureDialog):
ConfigureDialog.setWindowTitle(QCoreApplication.translate("ConfigureDialog", u"Configure TRC Source Step", None))
self.configGroupBox.setTitle("")
self.locButton.setText(QCoreApplication.translate("ConfigureDialog", u"...", None))
self.locLabel.setText(QCoreApplication.translate("ConfigureDialog", u"Location: ", None))
self.idLabel.setText(QCoreApplication.translate("ConfigureDialog", u"identifier: ", None))
# retranslateUi
| apache-2.0 |
landscapeio/astroid | brain/py2stdlib.py | 1 | 5939 | """Astroid hooks for the Python 2 standard library.
Currently help understanding of :
* hashlib.md5 and hashlib.sha1
"""
from astroid import MANAGER, AsStringRegexpPredicate, UseInferenceDefault, inference_tip
from astroid import nodes
from astroid.builder import AstroidBuilder
MODULE_TRANSFORMS = {}
# module specific transformation functions #####################################
def transform(module):
try:
tr = MODULE_TRANSFORMS[module.name]
except KeyError:
pass
else:
tr(module)
MANAGER.register_transform(nodes.Module, transform)
# module specific transformation functions #####################################
def hashlib_transform(module):
template = '''
class %s(object):
def __init__(self, value=''): pass
def digest(self):
return u''
def update(self, value): pass
def hexdigest(self):
return u''
'''
algorithms = ('md5', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512')
classes = "".join(template % hashfunc for hashfunc in algorithms)
fake = AstroidBuilder(MANAGER).string_build(classes)
for hashfunc in algorithms:
module.locals[hashfunc] = fake.locals[hashfunc]
def collections_transform(module):
fake = AstroidBuilder(MANAGER).string_build('''
class defaultdict(dict):
default_factory = None
def __missing__(self, key): pass
class deque(object):
maxlen = 0
def __init__(iterable=None, maxlen=None): pass
def append(self, x): pass
def appendleft(self, x): pass
def clear(self): pass
def count(self, x): return 0
def extend(self, iterable): pass
def extendleft(self, iterable): pass
def pop(self): pass
def popleft(self): pass
def remove(self, value): pass
def reverse(self): pass
def rotate(self, n): pass
''')
for klass in ('deque', 'defaultdict'):
module.locals[klass] = fake.locals[klass]
def pkg_resources_transform(module):
fake = AstroidBuilder(MANAGER).string_build('''
def resource_exists(package_or_requirement, resource_name):
pass
def resource_isdir(package_or_requirement, resource_name):
pass
def resource_filename(package_or_requirement, resource_name):
pass
def resource_stream(package_or_requirement, resource_name):
pass
def resource_string(package_or_requirement, resource_name):
pass
def resource_listdir(package_or_requirement, resource_name):
pass
def extraction_error():
pass
def get_cache_path(archive_name, names=()):
pass
def postprocess(tempname, filename):
pass
def set_extraction_path(path):
pass
def cleanup_resources(force=False):
pass
''')
for func_name, func in fake.locals.items():
module.locals[func_name] = func
def urlparse_transform(module):
fake = AstroidBuilder(MANAGER).string_build('''
def urlparse(url, scheme='', allow_fragments=True):
return ParseResult()
class ParseResult(object):
def __init__(self):
self.scheme = ''
self.netloc = ''
self.path = ''
self.params = ''
self.query = ''
self.fragment = ''
self.username = None
self.password = None
self.hostname = None
self.port = None
def geturl(self):
return ''
''')
for func_name, func in fake.locals.items():
module.locals[func_name] = func
def subprocess_transform(module):
fake = AstroidBuilder(MANAGER).string_build('''
class Popen(object):
returncode = pid = 0
stdin = stdout = stderr = file()
def __init__(self, args, bufsize=0, executable=None,
stdin=None, stdout=None, stderr=None,
preexec_fn=None, close_fds=False, shell=False,
cwd=None, env=None, universal_newlines=False,
startupinfo=None, creationflags=0):
pass
def communicate(self, input=None):
return ('string', 'string')
def wait(self):
return self.returncode
def poll(self):
return self.returncode
def send_signal(self, signal):
pass
def terminate(self):
pass
def kill(self):
pass
''')
for func_name, func in fake.locals.items():
module.locals[func_name] = func
MODULE_TRANSFORMS['hashlib'] = hashlib_transform
MODULE_TRANSFORMS['collections'] = collections_transform
MODULE_TRANSFORMS['pkg_resources'] = pkg_resources_transform
MODULE_TRANSFORMS['urlparse'] = urlparse_transform
MODULE_TRANSFORMS['subprocess'] = subprocess_transform
# namedtuple support ###########################################################
def infer_named_tuple(node, context=None):
"""Specific inference function for namedtuple CallFunc node"""
# node is a CallFunc node, class name as first argument and generated class
# attributes as second argument
if len(node.args) != 2:
# something weird here, go back to class implementation
raise UseInferenceDefault()
# namedtuple list of attributes can be a list of strings or a
# whitespace-separate string
try:
name = node.args[0].value
try:
attributes = node.args[1].value.split()
except AttributeError:
attributes = [const.value for const in node.args[1].elts]
except AttributeError:
raise UseInferenceDefault()
# we want to return a Class node instance with proper attributes set
class_node = nodes.Class(name, 'docstring')
# set base class=tuple
class_node.bases.append(nodes.Tuple._proxied)
# XXX add __init__(*attributes) method
for attr in attributes:
fake_node = nodes.EmptyNode()
fake_node.parent = class_node
class_node.instance_attrs[attr] = [fake_node]
# we use UseInferenceDefault, we can't be a generator so return an iterator
return iter([class_node])
MANAGER.register_transform(nodes.CallFunc, inference_tip(infer_named_tuple),
AsStringRegexpPredicate('namedtuple', 'func'))
| gpl-2.0 |
nikolas/lettuce | tests/integration/lib/Django-1.3/django/core/serializers/base.py | 202 | 5487 | """
Module for abstract serializer/unserializer base classes.
"""
from StringIO import StringIO
from django.db import models
from django.utils.encoding import smart_str, smart_unicode
from django.utils import datetime_safe
class SerializationError(Exception):
"""Something bad happened during serialization."""
pass
class DeserializationError(Exception):
"""Something bad happened during deserialization."""
pass
class Serializer(object):
"""
Abstract serializer base class.
"""
# Indicates if the implemented serializer is only available for
# internal Django use.
internal_use_only = False
def serialize(self, queryset, **options):
"""
Serialize a queryset.
"""
self.options = options
self.stream = options.pop("stream", StringIO())
self.selected_fields = options.pop("fields", None)
self.use_natural_keys = options.pop("use_natural_keys", False)
self.start_serialization()
for obj in queryset:
self.start_object(obj)
for field in obj._meta.local_fields:
if field.serialize:
if field.rel is None:
if self.selected_fields is None or field.attname in self.selected_fields:
self.handle_field(obj, field)
else:
if self.selected_fields is None or field.attname[:-3] in self.selected_fields:
self.handle_fk_field(obj, field)
for field in obj._meta.many_to_many:
if field.serialize:
if self.selected_fields is None or field.attname in self.selected_fields:
self.handle_m2m_field(obj, field)
self.end_object(obj)
self.end_serialization()
return self.getvalue()
def get_string_value(self, obj, field):
"""
Convert a field's value to a string.
"""
return smart_unicode(field.value_to_string(obj))
def start_serialization(self):
"""
Called when serializing of the queryset starts.
"""
raise NotImplementedError
def end_serialization(self):
"""
Called when serializing of the queryset ends.
"""
pass
def start_object(self, obj):
"""
Called when serializing of an object starts.
"""
raise NotImplementedError
def end_object(self, obj):
"""
Called when serializing of an object ends.
"""
pass
def handle_field(self, obj, field):
"""
Called to handle each individual (non-relational) field on an object.
"""
raise NotImplementedError
def handle_fk_field(self, obj, field):
"""
Called to handle a ForeignKey field.
"""
raise NotImplementedError
def handle_m2m_field(self, obj, field):
"""
Called to handle a ManyToManyField.
"""
raise NotImplementedError
def getvalue(self):
"""
Return the fully serialized queryset (or None if the output stream is
not seekable).
"""
if callable(getattr(self.stream, 'getvalue', None)):
return self.stream.getvalue()
class Deserializer(object):
"""
Abstract base deserializer class.
"""
def __init__(self, stream_or_string, **options):
"""
Init this serializer given a stream or a string
"""
self.options = options
if isinstance(stream_or_string, basestring):
self.stream = StringIO(stream_or_string)
else:
self.stream = stream_or_string
# hack to make sure that the models have all been loaded before
# deserialization starts (otherwise subclass calls to get_model()
# and friends might fail...)
models.get_apps()
def __iter__(self):
return self
def next(self):
"""Iteration iterface -- return the next item in the stream"""
raise NotImplementedError
class DeserializedObject(object):
"""
A deserialized model.
Basically a container for holding the pre-saved deserialized data along
with the many-to-many data saved with the object.
Call ``save()`` to save the object (with the many-to-many data) to the
database; call ``save(save_m2m=False)`` to save just the object fields
(and not touch the many-to-many stuff.)
"""
def __init__(self, obj, m2m_data=None):
self.object = obj
self.m2m_data = m2m_data
def __repr__(self):
return "<DeserializedObject: %s.%s(pk=%s)>" % (
self.object._meta.app_label, self.object._meta.object_name, self.object.pk)
def save(self, save_m2m=True, using=None):
# Call save on the Model baseclass directly. This bypasses any
# model-defined save. The save is also forced to be raw.
# This ensures that the data that is deserialized is literally
# what came from the file, not post-processed by pre_save/save
# methods.
models.Model.save_base(self.object, using=using, raw=True)
if self.m2m_data and save_m2m:
for accessor_name, object_list in self.m2m_data.items():
setattr(self.object, accessor_name, object_list)
# prevent a second (possibly accidental) call to save() from saving
# the m2m data twice.
self.m2m_data = None
| gpl-3.0 |
arcivanov/unittest-xml-reporting | setup.py | 4 | 1728 | #!/usr/bin/env python
from setuptools import setup, find_packages
from distutils.util import convert_path
import codecs
# Load version information
main_ns = {}
ver_path = convert_path('xmlrunner/version.py')
with codecs.open(ver_path, 'rb', 'utf8') as ver_file:
exec(ver_file.read(), main_ns)
install_requires = ['six>=1.4.0']
# this is for sdist to work.
import sys
if sys.version_info < (2, 7):
install_requires += ['unittest2']
setup(
name = 'unittest-xml-reporting',
version = main_ns['__version__'],
author = 'Daniel Fernandes Martins',
author_email = '[email protected]',
description = 'unittest-based test runner with Ant/JUnit like XML reporting.',
license = 'BSD',
platforms = ['Any'],
keywords = [
'pyunit', 'unittest', 'junit xml', 'report', 'testrunner', 'xmlrunner'
],
url = 'http://github.com/xmlrunner/unittest-xml-reporting/tree/master/',
classifiers = [
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Software Development :: Testing'
],
packages = ['xmlrunner', 'xmlrunner.extra'],
zip_safe = False,
include_package_data = True,
install_requires = install_requires,
extras_require={
# this is for wheels to work
':python_version=="2.6"': ['unittest2'],
},
test_suite = 'tests'
)
| bsd-2-clause |
adambrenecki/django | django/contrib/gis/tests/geoapp/tests.py | 4 | 37417 | from __future__ import unicode_literals
import re
import unittest
from unittest import skipUnless
from django.db import connection
from django.contrib.gis import gdal
from django.contrib.gis.geos import HAS_GEOS
from django.contrib.gis.tests.utils import (
HAS_SPATIAL_DB, no_mysql, no_oracle, no_spatialite,
mysql, oracle, postgis, spatialite)
from django.test import TestCase
from django.utils import six
if HAS_GEOS:
from django.contrib.gis.geos import (fromstr, GEOSGeometry,
Point, LineString, LinearRing, Polygon, GeometryCollection)
from .models import Country, City, PennsylvaniaCity, State, Track
if HAS_GEOS and not spatialite:
from .models import Feature, MinusOneSRID
def postgis_bug_version():
spatial_version = getattr(connection.ops, "spatial_version", (0,0,0))
return spatial_version and (2, 0, 0) <= spatial_version <= (2, 0, 1)
@skipUnless(HAS_GEOS and HAS_SPATIAL_DB, "Geos and spatial db are required.")
class GeoModelTest(TestCase):
def test_fixtures(self):
"Testing geographic model initialization from fixtures."
# Ensuring that data was loaded from initial data fixtures.
self.assertEqual(2, Country.objects.count())
self.assertEqual(8, City.objects.count())
self.assertEqual(2, State.objects.count())
def test_proxy(self):
"Testing Lazy-Geometry support (using the GeometryProxy)."
## Testing on a Point
pnt = Point(0, 0)
nullcity = City(name='NullCity', point=pnt)
nullcity.save()
# Making sure TypeError is thrown when trying to set with an
# incompatible type.
for bad in [5, 2.0, LineString((0, 0), (1, 1))]:
try:
nullcity.point = bad
except TypeError:
pass
else:
self.fail('Should throw a TypeError')
# Now setting with a compatible GEOS Geometry, saving, and ensuring
# the save took, notice no SRID is explicitly set.
new = Point(5, 23)
nullcity.point = new
# Ensuring that the SRID is automatically set to that of the
# field after assignment, but before saving.
self.assertEqual(4326, nullcity.point.srid)
nullcity.save()
# Ensuring the point was saved correctly after saving
self.assertEqual(new, City.objects.get(name='NullCity').point)
# Setting the X and Y of the Point
nullcity.point.x = 23
nullcity.point.y = 5
# Checking assignments pre & post-save.
self.assertNotEqual(Point(23, 5), City.objects.get(name='NullCity').point)
nullcity.save()
self.assertEqual(Point(23, 5), City.objects.get(name='NullCity').point)
nullcity.delete()
## Testing on a Polygon
shell = LinearRing((0, 0), (0, 100), (100, 100), (100, 0), (0, 0))
inner = LinearRing((40, 40), (40, 60), (60, 60), (60, 40), (40, 40))
# Creating a State object using a built Polygon
ply = Polygon(shell, inner)
nullstate = State(name='NullState', poly=ply)
self.assertEqual(4326, nullstate.poly.srid) # SRID auto-set from None
nullstate.save()
ns = State.objects.get(name='NullState')
self.assertEqual(ply, ns.poly)
# Testing the `ogr` and `srs` lazy-geometry properties.
if gdal.HAS_GDAL:
self.assertEqual(True, isinstance(ns.poly.ogr, gdal.OGRGeometry))
self.assertEqual(ns.poly.wkb, ns.poly.ogr.wkb)
self.assertEqual(True, isinstance(ns.poly.srs, gdal.SpatialReference))
self.assertEqual('WGS 84', ns.poly.srs.name)
# Changing the interior ring on the poly attribute.
new_inner = LinearRing((30, 30), (30, 70), (70, 70), (70, 30), (30, 30))
ns.poly[1] = new_inner
ply[1] = new_inner
self.assertEqual(4326, ns.poly.srid)
ns.save()
self.assertEqual(ply, State.objects.get(name='NullState').poly)
ns.delete()
@no_mysql
def test_lookup_insert_transform(self):
"Testing automatic transform for lookups and inserts."
# San Antonio in 'WGS84' (SRID 4326)
sa_4326 = 'POINT (-98.493183 29.424170)'
wgs_pnt = fromstr(sa_4326, srid=4326) # Our reference point in WGS84
# Oracle doesn't have SRID 3084, using 41157.
if oracle:
# San Antonio in 'Texas 4205, Southern Zone (1983, meters)' (SRID 41157)
# Used the following Oracle SQL to get this value:
# SELECT SDO_UTIL.TO_WKTGEOMETRY(SDO_CS.TRANSFORM(SDO_GEOMETRY('POINT (-98.493183 29.424170)', 4326), 41157)) FROM DUAL;
nad_wkt = 'POINT (300662.034646583 5416427.45974934)'
nad_srid = 41157
else:
# San Antonio in 'NAD83(HARN) / Texas Centric Lambert Conformal' (SRID 3084)
nad_wkt = 'POINT (1645978.362408288754523 6276356.025927528738976)' # Used ogr.py in gdal 1.4.1 for this transform
nad_srid = 3084
# Constructing & querying with a point from a different SRID. Oracle
# `SDO_OVERLAPBDYINTERSECT` operates differently from
# `ST_Intersects`, so contains is used instead.
nad_pnt = fromstr(nad_wkt, srid=nad_srid)
if oracle:
tx = Country.objects.get(mpoly__contains=nad_pnt)
else:
tx = Country.objects.get(mpoly__intersects=nad_pnt)
self.assertEqual('Texas', tx.name)
# Creating San Antonio. Remember the Alamo.
sa = City.objects.create(name='San Antonio', point=nad_pnt)
# Now verifying that San Antonio was transformed correctly
sa = City.objects.get(name='San Antonio')
self.assertAlmostEqual(wgs_pnt.x, sa.point.x, 6)
self.assertAlmostEqual(wgs_pnt.y, sa.point.y, 6)
# If the GeometryField SRID is -1, then we shouldn't perform any
# transformation if the SRID of the input geometry is different.
# SpatiaLite does not support missing SRID values.
if not spatialite:
m1 = MinusOneSRID(geom=Point(17, 23, srid=4326))
m1.save()
self.assertEqual(-1, m1.geom.srid)
def test_createnull(self):
"Testing creating a model instance and the geometry being None"
c = City()
self.assertEqual(c.point, None)
@no_spatialite # SpatiaLite does not support abstract geometry columns
def test_geometryfield(self):
"Testing the general GeometryField."
Feature(name='Point', geom=Point(1, 1)).save()
Feature(name='LineString', geom=LineString((0, 0), (1, 1), (5, 5))).save()
Feature(name='Polygon', geom=Polygon(LinearRing((0, 0), (0, 5), (5, 5), (5, 0), (0, 0)))).save()
Feature(name='GeometryCollection',
geom=GeometryCollection(Point(2, 2), LineString((0, 0), (2, 2)),
Polygon(LinearRing((0, 0), (0, 5), (5, 5), (5, 0), (0, 0))))).save()
f_1 = Feature.objects.get(name='Point')
self.assertEqual(True, isinstance(f_1.geom, Point))
self.assertEqual((1.0, 1.0), f_1.geom.tuple)
f_2 = Feature.objects.get(name='LineString')
self.assertEqual(True, isinstance(f_2.geom, LineString))
self.assertEqual(((0.0, 0.0), (1.0, 1.0), (5.0, 5.0)), f_2.geom.tuple)
f_3 = Feature.objects.get(name='Polygon')
self.assertEqual(True, isinstance(f_3.geom, Polygon))
f_4 = Feature.objects.get(name='GeometryCollection')
self.assertEqual(True, isinstance(f_4.geom, GeometryCollection))
self.assertEqual(f_3.geom, f_4.geom[2])
@no_mysql
def test_inherited_geofields(self):
"Test GeoQuerySet methods on inherited Geometry fields."
# Creating a Pennsylvanian city.
PennsylvaniaCity.objects.create(name='Mansfield', county='Tioga', point='POINT(-77.071445 41.823881)')
# All transformation SQL will need to be performed on the
# _parent_ table.
qs = PennsylvaniaCity.objects.transform(32128)
self.assertEqual(1, qs.count())
for pc in qs: self.assertEqual(32128, pc.point.srid)
def test_raw_sql_query(self):
"Testing raw SQL query."
cities1 = City.objects.all()
# Only PostGIS would support a 'select *' query because of its recognized
# HEXEWKB format for geometry fields
as_text = 'ST_AsText' if postgis else 'asText'
cities2 = City.objects.raw('select id, name, %s(point) from geoapp_city' % as_text)
self.assertEqual(len(cities1), len(list(cities2)))
self.assertTrue(isinstance(cities2[0].point, Point))
@skipUnless(HAS_GEOS and HAS_SPATIAL_DB, "Geos and spatial db are required.")
class GeoLookupTest(TestCase):
@no_mysql
def test_disjoint_lookup(self):
"Testing the `disjoint` lookup type."
ptown = City.objects.get(name='Pueblo')
qs1 = City.objects.filter(point__disjoint=ptown.point)
self.assertEqual(7, qs1.count())
qs2 = State.objects.filter(poly__disjoint=ptown.point)
self.assertEqual(1, qs2.count())
self.assertEqual('Kansas', qs2[0].name)
def test_contains_contained_lookups(self):
"Testing the 'contained', 'contains', and 'bbcontains' lookup types."
# Getting Texas, yes we were a country -- once ;)
texas = Country.objects.get(name='Texas')
# Seeing what cities are in Texas, should get Houston and Dallas,
# and Oklahoma City because 'contained' only checks on the
# _bounding box_ of the Geometries.
if not oracle:
qs = City.objects.filter(point__contained=texas.mpoly)
self.assertEqual(3, qs.count())
cities = ['Houston', 'Dallas', 'Oklahoma City']
for c in qs: self.assertEqual(True, c.name in cities)
# Pulling out some cities.
houston = City.objects.get(name='Houston')
wellington = City.objects.get(name='Wellington')
pueblo = City.objects.get(name='Pueblo')
okcity = City.objects.get(name='Oklahoma City')
lawrence = City.objects.get(name='Lawrence')
# Now testing contains on the countries using the points for
# Houston and Wellington.
tx = Country.objects.get(mpoly__contains=houston.point) # Query w/GEOSGeometry
nz = Country.objects.get(mpoly__contains=wellington.point.hex) # Query w/EWKBHEX
self.assertEqual('Texas', tx.name)
self.assertEqual('New Zealand', nz.name)
# Spatialite 2.3 thinks that Lawrence is in Puerto Rico (a NULL geometry).
if not spatialite:
ks = State.objects.get(poly__contains=lawrence.point)
self.assertEqual('Kansas', ks.name)
# Pueblo and Oklahoma City (even though OK City is within the bounding box of Texas)
# are not contained in Texas or New Zealand.
self.assertEqual(0, len(Country.objects.filter(mpoly__contains=pueblo.point))) # Query w/GEOSGeometry object
self.assertEqual((mysql and 1) or 0,
len(Country.objects.filter(mpoly__contains=okcity.point.wkt))) # Qeury w/WKT
# OK City is contained w/in bounding box of Texas.
if not oracle:
qs = Country.objects.filter(mpoly__bbcontains=okcity.point)
self.assertEqual(1, len(qs))
self.assertEqual('Texas', qs[0].name)
# Only PostGIS has `left` and `right` lookup types.
@no_mysql
@no_oracle
@no_spatialite
def test_left_right_lookups(self):
"Testing the 'left' and 'right' lookup types."
# Left: A << B => true if xmax(A) < xmin(B)
# Right: A >> B => true if xmin(A) > xmax(B)
# See: BOX2D_left() and BOX2D_right() in lwgeom_box2dfloat4.c in PostGIS source.
# Getting the borders for Colorado & Kansas
co_border = State.objects.get(name='Colorado').poly
ks_border = State.objects.get(name='Kansas').poly
# Note: Wellington has an 'X' value of 174, so it will not be considered
# to the left of CO.
# These cities should be strictly to the right of the CO border.
cities = ['Houston', 'Dallas', 'Oklahoma City',
'Lawrence', 'Chicago', 'Wellington']
qs = City.objects.filter(point__right=co_border)
self.assertEqual(6, len(qs))
for c in qs: self.assertEqual(True, c.name in cities)
# These cities should be strictly to the right of the KS border.
cities = ['Chicago', 'Wellington']
qs = City.objects.filter(point__right=ks_border)
self.assertEqual(2, len(qs))
for c in qs: self.assertEqual(True, c.name in cities)
# Note: Wellington has an 'X' value of 174, so it will not be considered
# to the left of CO.
vic = City.objects.get(point__left=co_border)
self.assertEqual('Victoria', vic.name)
cities = ['Pueblo', 'Victoria']
qs = City.objects.filter(point__left=ks_border)
self.assertEqual(2, len(qs))
for c in qs: self.assertEqual(True, c.name in cities)
# The left/right lookup tests are known failures on PostGIS 2.0/2.0.1
# http://trac.osgeo.org/postgis/ticket/2035
if postgis_bug_version():
test_left_right_lookups = unittest.expectedFailure(test_left_right_lookups)
def test_equals_lookups(self):
"Testing the 'same_as' and 'equals' lookup types."
pnt = fromstr('POINT (-95.363151 29.763374)', srid=4326)
c1 = City.objects.get(point=pnt)
c2 = City.objects.get(point__same_as=pnt)
c3 = City.objects.get(point__equals=pnt)
for c in [c1, c2, c3]: self.assertEqual('Houston', c.name)
@no_mysql
def test_null_geometries(self):
"Testing NULL geometry support, and the `isnull` lookup type."
# Creating a state with a NULL boundary.
State.objects.create(name='Puerto Rico')
# Querying for both NULL and Non-NULL values.
nullqs = State.objects.filter(poly__isnull=True)
validqs = State.objects.filter(poly__isnull=False)
# Puerto Rico should be NULL (it's a commonwealth unincorporated territory)
self.assertEqual(1, len(nullqs))
self.assertEqual('Puerto Rico', nullqs[0].name)
# The valid states should be Colorado & Kansas
self.assertEqual(2, len(validqs))
state_names = [s.name for s in validqs]
self.assertEqual(True, 'Colorado' in state_names)
self.assertEqual(True, 'Kansas' in state_names)
# Saving another commonwealth w/a NULL geometry.
nmi = State.objects.create(name='Northern Mariana Islands', poly=None)
self.assertEqual(nmi.poly, None)
# Assigning a geomery and saving -- then UPDATE back to NULL.
nmi.poly = 'POLYGON((0 0,1 0,1 1,1 0,0 0))'
nmi.save()
State.objects.filter(name='Northern Mariana Islands').update(poly=None)
self.assertEqual(None, State.objects.get(name='Northern Mariana Islands').poly)
@no_mysql
def test_relate_lookup(self):
"Testing the 'relate' lookup type."
# To make things more interesting, we will have our Texas reference point in
# different SRIDs.
pnt1 = fromstr('POINT (649287.0363174 4177429.4494686)', srid=2847)
pnt2 = fromstr('POINT(-98.4919715741052 29.4333344025053)', srid=4326)
# Not passing in a geometry as first param shoud
# raise a type error when initializing the GeoQuerySet
self.assertRaises(ValueError, Country.objects.filter, mpoly__relate=(23, 'foo'))
# Making sure the right exception is raised for the given
# bad arguments.
for bad_args, e in [((pnt1, 0), ValueError), ((pnt2, 'T*T***FF*', 0), ValueError)]:
qs = Country.objects.filter(mpoly__relate=bad_args)
self.assertRaises(e, qs.count)
# Relate works differently for the different backends.
if postgis or spatialite:
contains_mask = 'T*T***FF*'
within_mask = 'T*F**F***'
intersects_mask = 'T********'
elif oracle:
contains_mask = 'contains'
within_mask = 'inside'
# TODO: This is not quite the same as the PostGIS mask above
intersects_mask = 'overlapbdyintersect'
# Testing contains relation mask.
self.assertEqual('Texas', Country.objects.get(mpoly__relate=(pnt1, contains_mask)).name)
self.assertEqual('Texas', Country.objects.get(mpoly__relate=(pnt2, contains_mask)).name)
# Testing within relation mask.
ks = State.objects.get(name='Kansas')
self.assertEqual('Lawrence', City.objects.get(point__relate=(ks.poly, within_mask)).name)
# Testing intersection relation mask.
if not oracle:
self.assertEqual('Texas', Country.objects.get(mpoly__relate=(pnt1, intersects_mask)).name)
self.assertEqual('Texas', Country.objects.get(mpoly__relate=(pnt2, intersects_mask)).name)
self.assertEqual('Lawrence', City.objects.get(point__relate=(ks.poly, intersects_mask)).name)
@skipUnless(HAS_GEOS and HAS_SPATIAL_DB, "Geos and spatial db are required.")
class GeoQuerySetTest(TestCase):
# Please keep the tests in GeoQuerySet method's alphabetic order
@no_mysql
def test_centroid(self):
"Testing the `centroid` GeoQuerySet method."
qs = State.objects.exclude(poly__isnull=True).centroid()
if oracle:
tol = 0.1
elif spatialite:
tol = 0.000001
else:
tol = 0.000000001
for s in qs:
self.assertEqual(True, s.poly.centroid.equals_exact(s.centroid, tol))
@no_mysql
def test_diff_intersection_union(self):
"Testing the `difference`, `intersection`, `sym_difference`, and `union` GeoQuerySet methods."
geom = Point(5, 23)
qs = Country.objects.all().difference(geom).sym_difference(geom).union(geom)
# XXX For some reason SpatiaLite does something screwey with the Texas geometry here. Also,
# XXX it doesn't like the null intersection.
if spatialite:
qs = qs.exclude(name='Texas')
else:
qs = qs.intersection(geom)
for c in qs:
if oracle:
# Should be able to execute the queries; however, they won't be the same
# as GEOS (because Oracle doesn't use GEOS internally like PostGIS or
# SpatiaLite).
pass
else:
self.assertEqual(c.mpoly.difference(geom), c.difference)
if not spatialite:
self.assertEqual(c.mpoly.intersection(geom), c.intersection)
self.assertEqual(c.mpoly.sym_difference(geom), c.sym_difference)
self.assertEqual(c.mpoly.union(geom), c.union)
@skipUnless(getattr(connection.ops, 'envelope', False), 'Database does not support envelope operation')
def test_envelope(self):
"Testing the `envelope` GeoQuerySet method."
countries = Country.objects.all().envelope()
for country in countries:
self.assertIsInstance(country.envelope, Polygon)
@no_mysql
@no_spatialite # SpatiaLite does not have an Extent function
def test_extent(self):
"Testing the `extent` GeoQuerySet method."
# Reference query:
# `SELECT ST_extent(point) FROM geoapp_city WHERE (name='Houston' or name='Dallas');`
# => BOX(-96.8016128540039 29.7633724212646,-95.3631439208984 32.7820587158203)
expected = (-96.8016128540039, 29.7633724212646, -95.3631439208984, 32.782058715820)
qs = City.objects.filter(name__in=('Houston', 'Dallas'))
extent = qs.extent()
for val, exp in zip(extent, expected):
self.assertAlmostEqual(exp, val, 4)
@no_mysql
@no_oracle
@no_spatialite
def test_force_rhr(self):
"Testing GeoQuerySet.force_rhr()."
rings = ( ( (0, 0), (5, 0), (0, 5), (0, 0) ),
( (1, 1), (1, 3), (3, 1), (1, 1) ),
)
rhr_rings = ( ( (0, 0), (0, 5), (5, 0), (0, 0) ),
( (1, 1), (3, 1), (1, 3), (1, 1) ),
)
State.objects.create(name='Foo', poly=Polygon(*rings))
s = State.objects.force_rhr().get(name='Foo')
self.assertEqual(rhr_rings, s.force_rhr.coords)
@no_mysql
@no_oracle
@no_spatialite
def test_geohash(self):
"Testing GeoQuerySet.geohash()."
if not connection.ops.geohash: return
# Reference query:
# SELECT ST_GeoHash(point) FROM geoapp_city WHERE name='Houston';
# SELECT ST_GeoHash(point, 5) FROM geoapp_city WHERE name='Houston';
ref_hash = '9vk1mfq8jx0c8e0386z6'
h1 = City.objects.geohash().get(name='Houston')
h2 = City.objects.geohash(precision=5).get(name='Houston')
self.assertEqual(ref_hash, h1.geohash)
self.assertEqual(ref_hash[:5], h2.geohash)
def test_geojson(self):
"Testing GeoJSON output from the database using GeoQuerySet.geojson()."
# Only PostGIS 1.3.4+ and SpatiaLite 3.0+ support GeoJSON.
if not connection.ops.geojson:
self.assertRaises(NotImplementedError, Country.objects.all().geojson, field_name='mpoly')
return
pueblo_json = '{"type":"Point","coordinates":[-104.609252,38.255001]}'
houston_json = '{"type":"Point","crs":{"type":"name","properties":{"name":"EPSG:4326"}},"coordinates":[-95.363151,29.763374]}'
victoria_json = '{"type":"Point","bbox":[-123.30519600,48.46261100,-123.30519600,48.46261100],"coordinates":[-123.305196,48.462611]}'
chicago_json = '{"type":"Point","crs":{"type":"name","properties":{"name":"EPSG:4326"}},"bbox":[-87.65018,41.85039,-87.65018,41.85039],"coordinates":[-87.65018,41.85039]}'
if postgis and connection.ops.spatial_version < (1, 4, 0):
pueblo_json = '{"type":"Point","coordinates":[-104.60925200,38.25500100]}'
houston_json = '{"type":"Point","crs":{"type":"EPSG","properties":{"EPSG":4326}},"coordinates":[-95.36315100,29.76337400]}'
victoria_json = '{"type":"Point","bbox":[-123.30519600,48.46261100,-123.30519600,48.46261100],"coordinates":[-123.30519600,48.46261100]}'
elif spatialite:
victoria_json = '{"type":"Point","bbox":[-123.305196,48.462611,-123.305196,48.462611],"coordinates":[-123.305196,48.462611]}'
# Precision argument should only be an integer
self.assertRaises(TypeError, City.objects.geojson, precision='foo')
# Reference queries and values.
# SELECT ST_AsGeoJson("geoapp_city"."point", 8, 0) FROM "geoapp_city" WHERE "geoapp_city"."name" = 'Pueblo';
self.assertEqual(pueblo_json, City.objects.geojson().get(name='Pueblo').geojson)
# 1.3.x: SELECT ST_AsGeoJson("geoapp_city"."point", 8, 1) FROM "geoapp_city" WHERE "geoapp_city"."name" = 'Houston';
# 1.4.x: SELECT ST_AsGeoJson("geoapp_city"."point", 8, 2) FROM "geoapp_city" WHERE "geoapp_city"."name" = 'Houston';
# This time we want to include the CRS by using the `crs` keyword.
self.assertEqual(houston_json, City.objects.geojson(crs=True, model_att='json').get(name='Houston').json)
# 1.3.x: SELECT ST_AsGeoJson("geoapp_city"."point", 8, 2) FROM "geoapp_city" WHERE "geoapp_city"."name" = 'Victoria';
# 1.4.x: SELECT ST_AsGeoJson("geoapp_city"."point", 8, 1) FROM "geoapp_city" WHERE "geoapp_city"."name" = 'Houston';
# This time we include the bounding box by using the `bbox` keyword.
self.assertEqual(victoria_json, City.objects.geojson(bbox=True).get(name='Victoria').geojson)
# 1.(3|4).x: SELECT ST_AsGeoJson("geoapp_city"."point", 5, 3) FROM "geoapp_city" WHERE "geoapp_city"."name" = 'Chicago';
# Finally, we set every available keyword.
self.assertEqual(chicago_json, City.objects.geojson(bbox=True, crs=True, precision=5).get(name='Chicago').geojson)
def test_gml(self):
"Testing GML output from the database using GeoQuerySet.gml()."
if mysql or (spatialite and not connection.ops.gml) :
self.assertRaises(NotImplementedError, Country.objects.all().gml, field_name='mpoly')
return
# Should throw a TypeError when tyring to obtain GML from a
# non-geometry field.
qs = City.objects.all()
self.assertRaises(TypeError, qs.gml, field_name='name')
ptown1 = City.objects.gml(field_name='point', precision=9).get(name='Pueblo')
ptown2 = City.objects.gml(precision=9).get(name='Pueblo')
if oracle:
# No precision parameter for Oracle :-/
gml_regex = re.compile(r'^<gml:Point srsName="SDO:4326" xmlns:gml="http://www.opengis.net/gml"><gml:coordinates decimal="\." cs="," ts=" ">-104.60925\d+,38.25500\d+ </gml:coordinates></gml:Point>')
elif spatialite and connection.ops.spatial_version < (3, 0, 0):
# Spatialite before 3.0 has extra colon in SrsName
gml_regex = re.compile(r'^<gml:Point SrsName="EPSG::4326"><gml:coordinates decimal="\." cs="," ts=" ">-104.609251\d+,38.255001</gml:coordinates></gml:Point>')
else:
gml_regex = re.compile(r'^<gml:Point srsName="EPSG:4326"><gml:coordinates>-104\.60925\d+,38\.255001</gml:coordinates></gml:Point>')
for ptown in [ptown1, ptown2]:
self.assertTrue(gml_regex.match(ptown.gml))
# PostGIS < 1.5 doesn't include dimension im GMLv3 output.
if postgis and connection.ops.spatial_version >= (1, 5, 0):
self.assertIn('<gml:pos srsDimension="2">',
City.objects.gml(version=3).get(name='Pueblo').gml)
def test_kml(self):
"Testing KML output from the database using GeoQuerySet.kml()."
# Only PostGIS and Spatialite (>=2.4.0-RC4) support KML serialization
if not (postgis or (spatialite and connection.ops.kml)):
self.assertRaises(NotImplementedError, State.objects.all().kml, field_name='poly')
return
# Should throw a TypeError when trying to obtain KML from a
# non-geometry field.
qs = City.objects.all()
self.assertRaises(TypeError, qs.kml, 'name')
# The reference KML depends on the version of PostGIS used
# (the output stopped including altitude in 1.3.3).
if connection.ops.spatial_version >= (1, 3, 3):
ref_kml = '<Point><coordinates>-104.609252,38.255001</coordinates></Point>'
else:
ref_kml = '<Point><coordinates>-104.609252,38.255001,0</coordinates></Point>'
# Ensuring the KML is as expected.
ptown1 = City.objects.kml(field_name='point', precision=9).get(name='Pueblo')
ptown2 = City.objects.kml(precision=9).get(name='Pueblo')
for ptown in [ptown1, ptown2]:
self.assertEqual(ref_kml, ptown.kml)
# Only PostGIS has support for the MakeLine aggregate.
@no_mysql
@no_oracle
@no_spatialite
def test_make_line(self):
"Testing the `make_line` GeoQuerySet method."
# Ensuring that a `TypeError` is raised on models without PointFields.
self.assertRaises(TypeError, State.objects.make_line)
self.assertRaises(TypeError, Country.objects.make_line)
# Reference query:
# SELECT AsText(ST_MakeLine(geoapp_city.point)) FROM geoapp_city;
ref_line = GEOSGeometry('LINESTRING(-95.363151 29.763374,-96.801611 32.782057,-97.521157 34.464642,174.783117 -41.315268,-104.609252 38.255001,-95.23506 38.971823,-87.650175 41.850385,-123.305196 48.462611)', srid=4326)
self.assertEqual(ref_line, City.objects.make_line())
@no_mysql
def test_num_geom(self):
"Testing the `num_geom` GeoQuerySet method."
# Both 'countries' only have two geometries.
for c in Country.objects.num_geom():
self.assertEqual(2, c.num_geom)
for c in City.objects.filter(point__isnull=False).num_geom():
# Oracle and PostGIS 2.0+ will return 1 for the number of
# geometries on non-collections, whereas PostGIS < 2.0.0
# will return None.
if postgis and connection.ops.spatial_version < (2, 0, 0):
self.assertIsNone(c.num_geom)
else:
self.assertEqual(1, c.num_geom)
@no_mysql
@no_spatialite # SpatiaLite can only count vertices in LineStrings
def test_num_points(self):
"Testing the `num_points` GeoQuerySet method."
for c in Country.objects.num_points():
self.assertEqual(c.mpoly.num_points, c.num_points)
if not oracle:
# Oracle cannot count vertices in Point geometries.
for c in City.objects.num_points(): self.assertEqual(1, c.num_points)
@no_mysql
def test_point_on_surface(self):
"Testing the `point_on_surface` GeoQuerySet method."
# Reference values.
if oracle:
# SELECT SDO_UTIL.TO_WKTGEOMETRY(SDO_GEOM.SDO_POINTONSURFACE(GEOAPP_COUNTRY.MPOLY, 0.05)) FROM GEOAPP_COUNTRY;
ref = {'New Zealand' : fromstr('POINT (174.616364 -36.100861)', srid=4326),
'Texas' : fromstr('POINT (-103.002434 36.500397)', srid=4326),
}
elif postgis or spatialite:
# Using GEOSGeometry to compute the reference point on surface values
# -- since PostGIS also uses GEOS these should be the same.
ref = {'New Zealand' : Country.objects.get(name='New Zealand').mpoly.point_on_surface,
'Texas' : Country.objects.get(name='Texas').mpoly.point_on_surface
}
for c in Country.objects.point_on_surface():
if spatialite:
# XXX This seems to be a WKT-translation-related precision issue?
tol = 0.00001
else:
tol = 0.000000001
self.assertEqual(True, ref[c.name].equals_exact(c.point_on_surface, tol))
@no_mysql
@no_spatialite
def test_reverse_geom(self):
"Testing GeoQuerySet.reverse_geom()."
coords = [ (-95.363151, 29.763374), (-95.448601, 29.713803) ]
Track.objects.create(name='Foo', line=LineString(coords))
t = Track.objects.reverse_geom().get(name='Foo')
coords.reverse()
self.assertEqual(tuple(coords), t.reverse_geom.coords)
if oracle:
self.assertRaises(TypeError, State.objects.reverse_geom)
@no_mysql
@no_oracle
def test_scale(self):
"Testing the `scale` GeoQuerySet method."
xfac, yfac = 2, 3
tol = 5 # XXX The low precision tolerance is for SpatiaLite
qs = Country.objects.scale(xfac, yfac, model_att='scaled')
for c in qs:
for p1, p2 in zip(c.mpoly, c.scaled):
for r1, r2 in zip(p1, p2):
for c1, c2 in zip(r1.coords, r2.coords):
self.assertAlmostEqual(c1[0] * xfac, c2[0], tol)
self.assertAlmostEqual(c1[1] * yfac, c2[1], tol)
@no_mysql
@no_oracle
@no_spatialite
def test_snap_to_grid(self):
"Testing GeoQuerySet.snap_to_grid()."
# Let's try and break snap_to_grid() with bad combinations of arguments.
for bad_args in ((), range(3), range(5)):
self.assertRaises(ValueError, Country.objects.snap_to_grid, *bad_args)
for bad_args in (('1.0',), (1.0, None), tuple(map(six.text_type, range(4)))):
self.assertRaises(TypeError, Country.objects.snap_to_grid, *bad_args)
# Boundary for San Marino, courtesy of Bjorn Sandvik of thematicmapping.org
# from the world borders dataset he provides.
wkt = ('MULTIPOLYGON(((12.41580 43.95795,12.45055 43.97972,12.45389 43.98167,'
'12.46250 43.98472,12.47167 43.98694,12.49278 43.98917,'
'12.50555 43.98861,12.51000 43.98694,12.51028 43.98277,'
'12.51167 43.94333,12.51056 43.93916,12.49639 43.92333,'
'12.49500 43.91472,12.48778 43.90583,12.47444 43.89722,'
'12.46472 43.89555,12.45917 43.89611,12.41639 43.90472,'
'12.41222 43.90610,12.40782 43.91366,12.40389 43.92667,'
'12.40500 43.94833,12.40889 43.95499,12.41580 43.95795)))')
Country.objects.create(name='San Marino', mpoly=fromstr(wkt))
# Because floating-point arithmetic isn't exact, we set a tolerance
# to pass into GEOS `equals_exact`.
tol = 0.000000001
# SELECT AsText(ST_SnapToGrid("geoapp_country"."mpoly", 0.1)) FROM "geoapp_country" WHERE "geoapp_country"."name" = 'San Marino';
ref = fromstr('MULTIPOLYGON(((12.4 44,12.5 44,12.5 43.9,12.4 43.9,12.4 44)))')
self.assertTrue(ref.equals_exact(Country.objects.snap_to_grid(0.1).get(name='San Marino').snap_to_grid, tol))
# SELECT AsText(ST_SnapToGrid("geoapp_country"."mpoly", 0.05, 0.23)) FROM "geoapp_country" WHERE "geoapp_country"."name" = 'San Marino';
ref = fromstr('MULTIPOLYGON(((12.4 43.93,12.45 43.93,12.5 43.93,12.45 43.93,12.4 43.93)))')
self.assertTrue(ref.equals_exact(Country.objects.snap_to_grid(0.05, 0.23).get(name='San Marino').snap_to_grid, tol))
# SELECT AsText(ST_SnapToGrid("geoapp_country"."mpoly", 0.5, 0.17, 0.05, 0.23)) FROM "geoapp_country" WHERE "geoapp_country"."name" = 'San Marino';
ref = fromstr('MULTIPOLYGON(((12.4 43.87,12.45 43.87,12.45 44.1,12.5 44.1,12.5 43.87,12.45 43.87,12.4 43.87)))')
self.assertTrue(ref.equals_exact(Country.objects.snap_to_grid(0.05, 0.23, 0.5, 0.17).get(name='San Marino').snap_to_grid, tol))
def test_svg(self):
"Testing SVG output using GeoQuerySet.svg()."
if mysql or oracle:
self.assertRaises(NotImplementedError, City.objects.svg)
return
self.assertRaises(TypeError, City.objects.svg, precision='foo')
# SELECT AsSVG(geoapp_city.point, 0, 8) FROM geoapp_city WHERE name = 'Pueblo';
svg1 = 'cx="-104.609252" cy="-38.255001"'
# Even though relative, only one point so it's practically the same except for
# the 'c' letter prefix on the x,y values.
svg2 = svg1.replace('c', '')
self.assertEqual(svg1, City.objects.svg().get(name='Pueblo').svg)
self.assertEqual(svg2, City.objects.svg(relative=5).get(name='Pueblo').svg)
@no_mysql
def test_transform(self):
"Testing the transform() GeoQuerySet method."
# Pre-transformed points for Houston and Pueblo.
htown = fromstr('POINT(1947516.83115183 6322297.06040572)', srid=3084)
ptown = fromstr('POINT(992363.390841912 481455.395105533)', srid=2774)
prec = 3 # Precision is low due to version variations in PROJ and GDAL.
# Asserting the result of the transform operation with the values in
# the pre-transformed points. Oracle does not have the 3084 SRID.
if not oracle:
h = City.objects.transform(htown.srid).get(name='Houston')
self.assertEqual(3084, h.point.srid)
self.assertAlmostEqual(htown.x, h.point.x, prec)
self.assertAlmostEqual(htown.y, h.point.y, prec)
p1 = City.objects.transform(ptown.srid, field_name='point').get(name='Pueblo')
p2 = City.objects.transform(srid=ptown.srid).get(name='Pueblo')
for p in [p1, p2]:
self.assertEqual(2774, p.point.srid)
self.assertAlmostEqual(ptown.x, p.point.x, prec)
self.assertAlmostEqual(ptown.y, p.point.y, prec)
@no_mysql
@no_oracle
def test_translate(self):
"Testing the `translate` GeoQuerySet method."
xfac, yfac = 5, -23
qs = Country.objects.translate(xfac, yfac, model_att='translated')
for c in qs:
for p1, p2 in zip(c.mpoly, c.translated):
for r1, r2 in zip(p1, p2):
for c1, c2 in zip(r1.coords, r2.coords):
# XXX The low precision is for SpatiaLite
self.assertAlmostEqual(c1[0] + xfac, c2[0], 5)
self.assertAlmostEqual(c1[1] + yfac, c2[1], 5)
@no_mysql
def test_unionagg(self):
"Testing the `unionagg` (aggregate union) GeoQuerySet method."
tx = Country.objects.get(name='Texas').mpoly
# Houston, Dallas -- Oracle has different order.
union1 = fromstr('MULTIPOINT(-96.801611 32.782057,-95.363151 29.763374)')
union2 = fromstr('MULTIPOINT(-96.801611 32.782057,-95.363151 29.763374)')
qs = City.objects.filter(point__within=tx)
self.assertRaises(TypeError, qs.unionagg, 'name')
# Using `field_name` keyword argument in one query and specifying an
# order in the other (which should not be used because this is
# an aggregate method on a spatial column)
u1 = qs.unionagg(field_name='point')
u2 = qs.order_by('name').unionagg()
tol = 0.00001
if oracle:
union = union2
else:
union = union1
self.assertEqual(True, union.equals_exact(u1, tol))
self.assertEqual(True, union.equals_exact(u2, tol))
qs = City.objects.filter(name='NotACity')
self.assertEqual(None, qs.unionagg(field_name='point'))
| bsd-3-clause |
BrandonY/python-docs-samples | tasks/pull_queue_snippets_test.py | 1 | 1231 | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import pull_queue_snippets
TEST_PROJECT_ID = os.getenv('GCLOUD_PROJECT')
TEST_LOCATION = os.getenv('TEST_QUEUE_LOCATION', 'us-central1')
TEST_QUEUE_NAME = os.getenv('TEST_QUEUE_NAME', 'my-pull-queue')
def test_create_task():
result = pull_queue_snippets.create_task(
TEST_PROJECT_ID, TEST_QUEUE_NAME, TEST_LOCATION)
assert TEST_QUEUE_NAME in result['name']
def test_pull_and_ack_task():
pull_queue_snippets.create_task(
TEST_PROJECT_ID, TEST_QUEUE_NAME, TEST_LOCATION)
task = pull_queue_snippets.pull_task(
TEST_PROJECT_ID, TEST_QUEUE_NAME, TEST_LOCATION)
pull_queue_snippets.acknowledge_task(task)
| apache-2.0 |
shashank971/edx-platform | common/djangoapps/terrain/stubs/http.py | 139 | 8556 | """
Stub implementation of an HTTP service.
"""
from BaseHTTPServer import HTTPServer, BaseHTTPRequestHandler
import urlparse
import threading
import json
from functools import wraps
from lazy import lazy
from logging import getLogger
LOGGER = getLogger(__name__)
def require_params(method, *required_keys):
"""
Decorator to ensure that the method has all the required parameters.
Example:
@require_params('GET', 'id', 'state')
def handle_request(self):
# ....
would send a 400 response if no GET parameters were specified
for 'id' or 'state' (or if those parameters had empty values).
The wrapped function should be a method of a `StubHttpRequestHandler`
subclass.
Currently, "GET" and "POST" are the only supported methods.
"""
def decorator(func):
@wraps(func)
def wrapper(self, *args, **kwargs):
# Read either GET querystring params or POST dict params
if method == "GET":
params = self.get_params
elif method == "POST":
params = self.post_dict
else:
raise ValueError("Unsupported method '{method}'".format(method=method))
# Check for required values
missing = []
for key in required_keys:
if params.get(key) is None:
missing.append(key)
if len(missing) > 0:
msg = "Missing required key(s) {keys}".format(keys=",".join(missing))
self.send_response(400, content=msg, headers={'Content-type': 'text/plain'})
# If nothing is missing, execute the function as usual
else:
return func(self, *args, **kwargs)
return wrapper
return decorator
class StubHttpRequestHandler(BaseHTTPRequestHandler, object):
"""
Handler for the stub HTTP service.
"""
protocol = "HTTP/1.0"
def log_message(self, format_str, *args):
"""
Redirect messages to keep the test console clean.
"""
LOGGER.debug(self._format_msg(format_str, *args))
def log_error(self, format_str, *args):
"""
Helper to log a server error.
"""
LOGGER.error(self._format_msg(format_str, *args))
@lazy
def request_content(self):
"""
Retrieve the content of the request.
"""
try:
length = int(self.headers.getheader('content-length'))
except (TypeError, ValueError):
return ""
else:
return self.rfile.read(length)
@lazy
def post_dict(self):
"""
Retrieve the request POST parameters from the client as a dictionary.
If no POST parameters can be interpreted, return an empty dict.
"""
contents = self.request_content
# The POST dict will contain a list of values for each key.
# None of our parameters are lists, however, so we map [val] --> val
# If the list contains multiple entries, we pick the first one
try:
post_dict = urlparse.parse_qs(contents, keep_blank_values=True)
return {
key: list_val[0]
for key, list_val in post_dict.items()
}
except:
return dict()
@lazy
def get_params(self):
"""
Return the GET parameters (querystring in the URL).
"""
query = urlparse.urlparse(self.path).query
# By default, `parse_qs` returns a list of values for each param
# For convenience, we replace lists of 1 element with just the element
return {
key: value[0] if len(value) == 1 else value
for key, value in urlparse.parse_qs(query).items()
}
@lazy
def path_only(self):
"""
Return the URL path without GET parameters.
Removes the trailing slash if there is one.
"""
path = urlparse.urlparse(self.path).path
if path.endswith('/'):
return path[:-1]
else:
return path
def do_PUT(self):
"""
Allow callers to configure the stub server using the /set_config URL.
The request should have POST data, such that:
Each POST parameter is the configuration key.
Each POST value is a JSON-encoded string value for the configuration.
"""
if self.path == "/set_config" or self.path == "/set_config/":
if len(self.post_dict) > 0:
for key, value in self.post_dict.iteritems():
# Decode the params as UTF-8
try:
key = unicode(key, 'utf-8')
value = unicode(value, 'utf-8')
except UnicodeDecodeError:
self.log_message("Could not decode request params as UTF-8")
self.log_message(u"Set config '{0}' to '{1}'".format(key, value))
try:
value = json.loads(value)
except ValueError:
self.log_message(u"Could not parse JSON: {0}".format(value))
self.send_response(400)
else:
self.server.config[key] = value
self.send_response(200)
# No parameters sent to configure, so return success by default
else:
self.send_response(200)
else:
self.send_response(404)
def send_response(self, status_code, content=None, headers=None):
"""
Send a response back to the client with the HTTP `status_code` (int),
`content` (str) and `headers` (dict).
"""
self.log_message(
"Sent HTTP response: {0} with content '{1}' and headers {2}".format(status_code, content, headers)
)
if headers is None:
headers = {
'Access-Control-Allow-Origin': "*",
}
BaseHTTPRequestHandler.send_response(self, status_code)
for (key, value) in headers.items():
self.send_header(key, value)
if len(headers) > 0:
self.end_headers()
if content is not None:
self.wfile.write(content)
def send_json_response(self, content):
"""
Send a response with status code 200, the given content serialized as
JSON, and the Content-Type header set appropriately
"""
self.send_response(200, json.dumps(content), {"Content-Type": "application/json"})
def _format_msg(self, format_str, *args):
"""
Format message for logging.
`format_str` is a string with old-style Python format escaping;
`args` is an array of values to fill into the string.
"""
return u"{0} - - [{1}] {2}\n".format(
self.client_address[0],
self.log_date_time_string(),
format_str % args
)
def do_HEAD(self):
"""
Respond to an HTTP HEAD request
"""
self.send_response(200)
class StubHttpService(HTTPServer, object):
"""
Stub HTTP service implementation.
"""
# Subclasses override this to provide the handler class to use.
# Should be a subclass of `StubHttpRequestHandler`
HANDLER_CLASS = StubHttpRequestHandler
def __init__(self, port_num=0):
"""
Configure the server to listen on localhost.
Default is to choose an arbitrary open port.
"""
address = ('0.0.0.0', port_num)
HTTPServer.__init__(self, address, self.HANDLER_CLASS)
# Create a dict to store configuration values set by the client
self.config = dict()
# Start the server in a separate thread
server_thread = threading.Thread(target=self.serve_forever)
server_thread.daemon = True
server_thread.start()
# Log the port we're using to help identify port conflict errors
LOGGER.debug('Starting service on port {0}'.format(self.port))
def shutdown(self):
"""
Stop the server and free up the port
"""
# First call superclass shutdown()
HTTPServer.shutdown(self)
# We also need to manually close the socket
self.socket.close()
@property
def port(self):
"""
Return the port that the service is listening on.
"""
_, port = self.server_address
return port
| agpl-3.0 |
espadrine/opera | chromium/src/v8/tools/testrunner/local/utils.py | 21 | 3476 | # Copyright 2012 the V8 project authors. All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import os
from os.path import exists
from os.path import isdir
from os.path import join
import platform
import re
def GetSuitePaths(test_root):
def IsSuite(path):
return isdir(path) and exists(join(path, 'testcfg.py'))
return [ f for f in os.listdir(test_root) if IsSuite(join(test_root, f)) ]
# Reads a file into an array of strings
def ReadLinesFrom(name):
lines = []
with open(name) as f:
for line in f:
if line.startswith('#'): continue
if '#' in line:
line = line[:line.find('#')]
line = line.strip()
if not line: continue
lines.append(line)
return lines
def GuessOS():
system = platform.system()
if system == 'Linux':
return 'linux'
elif system == 'Darwin':
return 'macos'
elif system.find('CYGWIN') >= 0:
return 'cygwin'
elif system == 'Windows' or system == 'Microsoft':
# On Windows Vista platform.system() can return 'Microsoft' with some
# versions of Python, see http://bugs.python.org/issue1082
return 'win32'
elif system == 'FreeBSD':
return 'freebsd'
elif system == 'OpenBSD':
return 'openbsd'
elif system == 'SunOS':
return 'solaris'
elif system == 'NetBSD':
return 'netbsd'
else:
return None
# This will default to building the 32 bit VM even on machines that are
# capable of running the 64 bit VM.
def DefaultArch():
machine = platform.machine()
machine = machine.lower() # Windows 7 capitalizes 'AMD64'.
if machine.startswith('arm'):
return 'arm'
elif (not machine) or (not re.match('(x|i[3-6])86$', machine) is None):
return 'ia32'
elif machine == 'i86pc':
return 'ia32'
elif machine == 'x86_64':
return 'ia32'
elif machine == 'amd64':
return 'ia32'
else:
return None
def GuessWordsize():
if '64' in platform.machine():
return '64'
else:
return '32'
def IsWindows():
return GuessOS() == 'win32'
| bsd-3-clause |
klonage/nlt-gcs | packages/IronPython.StdLib.2.7.4/content/Lib/encodings/hex_codec.py | 88 | 2388 | """ Python 'hex_codec' Codec - 2-digit hex content transfer encoding
Unlike most of the other codecs which target Unicode, this codec
will return Python string objects for both encode and decode.
Written by Marc-Andre Lemburg ([email protected]).
"""
import codecs, binascii
### Codec APIs
def hex_encode(input,errors='strict'):
""" Encodes the object input and returns a tuple (output
object, length consumed).
errors defines the error handling to apply. It defaults to
'strict' handling which is the only currently supported
error handling for this codec.
"""
assert errors == 'strict'
output = binascii.b2a_hex(input)
return (output, len(input))
def hex_decode(input,errors='strict'):
""" Decodes the object input and returns a tuple (output
object, length consumed).
input must be an object which provides the bf_getreadbuf
buffer slot. Python strings, buffer objects and memory
mapped files are examples of objects providing this slot.
errors defines the error handling to apply. It defaults to
'strict' handling which is the only currently supported
error handling for this codec.
"""
assert errors == 'strict'
output = binascii.a2b_hex(input)
return (output, len(input))
class Codec(codecs.Codec):
def encode(self, input,errors='strict'):
return hex_encode(input,errors)
def decode(self, input,errors='strict'):
return hex_decode(input,errors)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
assert self.errors == 'strict'
return binascii.b2a_hex(input)
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
assert self.errors == 'strict'
return binascii.a2b_hex(input)
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='hex',
encode=hex_encode,
decode=hex_decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamwriter=StreamWriter,
streamreader=StreamReader,
)
| gpl-3.0 |
zaina/nova | tools/install_venv_common.py | 333 | 5959 | # Copyright 2013 OpenStack Foundation
# Copyright 2013 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Provides methods needed by installation script for OpenStack development
virtual environments.
Since this script is used to bootstrap a virtualenv from the system's Python
environment, it should be kept strictly compatible with Python 2.6.
Synced in from openstack-common
"""
from __future__ import print_function
import optparse
import os
import subprocess
import sys
class InstallVenv(object):
def __init__(self, root, venv, requirements,
test_requirements, py_version,
project):
self.root = root
self.venv = venv
self.requirements = requirements
self.test_requirements = test_requirements
self.py_version = py_version
self.project = project
def die(self, message, *args):
print(message % args, file=sys.stderr)
sys.exit(1)
def check_python_version(self):
if sys.version_info < (2, 6):
self.die("Need Python Version >= 2.6")
def run_command_with_code(self, cmd, redirect_output=True,
check_exit_code=True):
"""Runs a command in an out-of-process shell.
Returns the output of that command. Working directory is self.root.
"""
if redirect_output:
stdout = subprocess.PIPE
else:
stdout = None
proc = subprocess.Popen(cmd, cwd=self.root, stdout=stdout)
output = proc.communicate()[0]
if check_exit_code and proc.returncode != 0:
self.die('Command "%s" failed.\n%s', ' '.join(cmd), output)
return (output, proc.returncode)
def run_command(self, cmd, redirect_output=True, check_exit_code=True):
return self.run_command_with_code(cmd, redirect_output,
check_exit_code)[0]
def get_distro(self):
if (os.path.exists('/etc/fedora-release') or
os.path.exists('/etc/redhat-release')):
return Fedora(
self.root, self.venv, self.requirements,
self.test_requirements, self.py_version, self.project)
else:
return Distro(
self.root, self.venv, self.requirements,
self.test_requirements, self.py_version, self.project)
def check_dependencies(self):
self.get_distro().install_virtualenv()
def create_virtualenv(self, no_site_packages=True):
"""Creates the virtual environment and installs PIP.
Creates the virtual environment and installs PIP only into the
virtual environment.
"""
if not os.path.isdir(self.venv):
print('Creating venv...', end=' ')
if no_site_packages:
self.run_command(['virtualenv', '-q', '--no-site-packages',
self.venv])
else:
self.run_command(['virtualenv', '-q', self.venv])
print('done.')
else:
print("venv already exists...")
pass
def pip_install(self, *args):
self.run_command(['tools/with_venv.sh',
'pip', 'install', '--upgrade'] + list(args),
redirect_output=False)
def install_dependencies(self):
print('Installing dependencies with pip (this can take a while)...')
# First things first, make sure our venv has the latest pip and
# setuptools and pbr
self.pip_install('pip>=1.4')
self.pip_install('setuptools')
self.pip_install('pbr')
self.pip_install('-r', self.requirements, '-r', self.test_requirements)
def parse_args(self, argv):
"""Parses command-line arguments."""
parser = optparse.OptionParser()
parser.add_option('-n', '--no-site-packages',
action='store_true',
help="Do not inherit packages from global Python "
"install.")
return parser.parse_args(argv[1:])[0]
class Distro(InstallVenv):
def check_cmd(self, cmd):
return bool(self.run_command(['which', cmd],
check_exit_code=False).strip())
def install_virtualenv(self):
if self.check_cmd('virtualenv'):
return
if self.check_cmd('easy_install'):
print('Installing virtualenv via easy_install...', end=' ')
if self.run_command(['easy_install', 'virtualenv']):
print('Succeeded')
return
else:
print('Failed')
self.die('ERROR: virtualenv not found.\n\n%s development'
' requires virtualenv, please install it using your'
' favorite package management tool' % self.project)
class Fedora(Distro):
"""This covers all Fedora-based distributions.
Includes: Fedora, RHEL, CentOS, Scientific Linux
"""
def check_pkg(self, pkg):
return self.run_command_with_code(['rpm', '-q', pkg],
check_exit_code=False)[1] == 0
def install_virtualenv(self):
if self.check_cmd('virtualenv'):
return
if not self.check_pkg('python-virtualenv'):
self.die("Please install 'python-virtualenv'.")
super(Fedora, self).install_virtualenv()
| apache-2.0 |
charbeljc/OCB | openerp/tools/convert.py | 205 | 41282 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import cStringIO
import csv
import logging
import os.path
import pickle
import re
import sys
# for eval context:
import time
import openerp
import openerp.release
import openerp.workflow
from yaml_import import convert_yaml_import
import assertion_report
_logger = logging.getLogger(__name__)
try:
import pytz
except:
_logger.warning('could not find pytz library, please install it')
class pytzclass(object):
all_timezones=[]
pytz=pytzclass()
from datetime import datetime, timedelta
from dateutil.relativedelta import relativedelta
from lxml import etree, builder
import misc
from config import config
from translate import _
# List of etree._Element subclasses that we choose to ignore when parsing XML.
from misc import SKIPPED_ELEMENT_TYPES
from misc import unquote
from openerp import SUPERUSER_ID
# Import of XML records requires the unsafe eval as well,
# almost everywhere, which is ok because it supposedly comes
# from trusted data, but at least we make it obvious now.
unsafe_eval = eval
from safe_eval import safe_eval as eval
class ParseError(Exception):
def __init__(self, msg, text, filename, lineno):
self.msg = msg
self.text = text
self.filename = filename
self.lineno = lineno
def __str__(self):
return '"%s" while parsing %s:%s, near\n%s' \
% (self.msg, self.filename, self.lineno, self.text)
def _ref(self, cr):
return lambda x: self.id_get(cr, x)
def _obj(pool, cr, uid, model_str, context=None):
model = pool[model_str]
return lambda x: model.browse(cr, uid, x, context=context)
def _get_idref(self, cr, uid, model_str, context, idref):
idref2 = dict(idref,
time=time,
DateTime=datetime,
datetime=datetime,
timedelta=timedelta,
relativedelta=relativedelta,
version=openerp.release.major_version,
ref=_ref(self, cr),
pytz=pytz)
if len(model_str):
idref2['obj'] = _obj(self.pool, cr, uid, model_str, context=context)
return idref2
def _fix_multiple_roots(node):
"""
Surround the children of the ``node`` element of an XML field with a
single root "data" element, to prevent having a document with multiple
roots once parsed separately.
XML nodes should have one root only, but we'd like to support
direct multiple roots in our partial documents (like inherited view architectures).
As a convention we'll surround multiple root with a container "data" element, to be
ignored later when parsing.
"""
real_nodes = [x for x in node if not isinstance(x, SKIPPED_ELEMENT_TYPES)]
if len(real_nodes) > 1:
data_node = etree.Element("data")
for child in node:
data_node.append(child)
node.append(data_node)
def _eval_xml(self, node, pool, cr, uid, idref, context=None):
if context is None:
context = {}
if node.tag in ('field','value'):
t = node.get('type','char')
f_model = node.get('model', '').encode('utf-8')
if node.get('search'):
f_search = node.get("search",'').encode('utf-8')
f_use = node.get("use",'id').encode('utf-8')
f_name = node.get("name",'').encode('utf-8')
idref2 = {}
if f_search:
idref2 = _get_idref(self, cr, uid, f_model, context, idref)
q = unsafe_eval(f_search, idref2)
ids = pool[f_model].search(cr, uid, q)
if f_use != 'id':
ids = map(lambda x: x[f_use], pool[f_model].read(cr, uid, ids, [f_use]))
_cols = pool[f_model]._columns
if (f_name in _cols) and _cols[f_name]._type=='many2many':
return ids
f_val = False
if len(ids):
f_val = ids[0]
if isinstance(f_val, tuple):
f_val = f_val[0]
return f_val
a_eval = node.get('eval','')
if a_eval:
idref2 = _get_idref(self, cr, uid, f_model, context, idref)
try:
return unsafe_eval(a_eval, idref2)
except Exception:
logging.getLogger('openerp.tools.convert.init').error(
'Could not eval(%s) for %s in %s', a_eval, node.get('name'), context)
raise
def _process(s, idref):
matches = re.finditer('[^%]%\((.*?)\)[ds]', s)
done = []
for m in matches:
found = m.group()[1:]
if found in done:
continue
done.append(found)
id = m.groups()[0]
if not id in idref:
idref[id] = self.id_get(cr, id)
s = s.replace(found, str(idref[id]))
s = s.replace('%%', '%') # Quite wierd but it's for (somewhat) backward compatibility sake
return s
if t == 'xml':
_fix_multiple_roots(node)
return '<?xml version="1.0"?>\n'\
+_process("".join([etree.tostring(n, encoding='utf-8')
for n in node]), idref)
if t == 'html':
return _process("".join([etree.tostring(n, encoding='utf-8')
for n in node]), idref)
data = node.text
if node.get('file'):
with openerp.tools.file_open(node.get('file'), 'rb') as f:
data = f.read()
if t == 'file':
from ..modules import module
path = data.strip()
if not module.get_module_resource(self.module, path):
raise IOError("No such file or directory: '%s' in %s" % (
path, self.module))
return '%s,%s' % (self.module, path)
if t == 'char':
return data
if t == 'base64':
return data.encode('base64')
if t == 'int':
d = data.strip()
if d == 'None':
return None
return int(d)
if t == 'float':
return float(data.strip())
if t in ('list','tuple'):
res=[]
for n in node.iterchildren(tag='value'):
res.append(_eval_xml(self,n,pool,cr,uid,idref))
if t=='tuple':
return tuple(res)
return res
elif node.tag == "function":
args = []
a_eval = node.get('eval','')
# FIXME: should probably be exclusive
if a_eval:
idref['ref'] = lambda x: self.id_get(cr, x)
args = unsafe_eval(a_eval, idref)
for n in node:
return_val = _eval_xml(self,n, pool, cr, uid, idref, context)
if return_val is not None:
args.append(return_val)
model = pool[node.get('model', '')]
method = node.get('name')
res = getattr(model, method)(cr, uid, *args)
return res
elif node.tag == "test":
return node.text
escape_re = re.compile(r'(?<!\\)/')
def escape(x):
return x.replace('\\/', '/')
class xml_import(object):
@staticmethod
def nodeattr2bool(node, attr, default=False):
if not node.get(attr):
return default
val = node.get(attr).strip()
if not val:
return default
return val.lower() not in ('0', 'false', 'off')
def isnoupdate(self, data_node=None):
return self.noupdate or (len(data_node) and self.nodeattr2bool(data_node, 'noupdate', False))
def get_context(self, data_node, node, eval_dict):
data_node_context = (len(data_node) and data_node.get('context','').encode('utf8'))
node_context = node.get("context",'').encode('utf8')
context = {}
for ctx in (data_node_context, node_context):
if ctx:
try:
ctx_res = unsafe_eval(ctx, eval_dict)
if isinstance(context, dict):
context.update(ctx_res)
else:
context = ctx_res
except NameError:
# Some contexts contain references that are only valid at runtime at
# client-side, so in that case we keep the original context string
# as it is. We also log it, just in case.
context = ctx
_logger.debug('Context value (%s) for element with id "%s" or its data node does not parse '\
'at server-side, keeping original string, in case it\'s meant for client side only',
ctx, node.get('id','n/a'), exc_info=True)
return context
def get_uid(self, cr, uid, data_node, node):
node_uid = node.get('uid','') or (len(data_node) and data_node.get('uid',''))
if node_uid:
return self.id_get(cr, node_uid)
return uid
def _test_xml_id(self, xml_id):
id = xml_id
if '.' in xml_id:
module, id = xml_id.split('.', 1)
assert '.' not in id, """The ID reference "%s" must contain
maximum one dot. They are used to refer to other modules ID, in the
form: module.record_id""" % (xml_id,)
if module != self.module:
modcnt = self.pool['ir.module.module'].search_count(self.cr, self.uid, ['&', ('name', '=', module), ('state', 'in', ['installed'])])
assert modcnt == 1, """The ID "%s" refers to an uninstalled module""" % (xml_id,)
if len(id) > 64:
_logger.error('id: %s is to long (max: 64)', id)
def _tag_delete(self, cr, rec, data_node=None, mode=None):
d_model = rec.get("model")
d_search = rec.get("search",'').encode('utf-8')
d_id = rec.get("id")
ids = []
if d_search:
idref = _get_idref(self, cr, self.uid, d_model, context={}, idref={})
try:
ids = self.pool[d_model].search(cr, self.uid, unsafe_eval(d_search, idref))
except ValueError:
_logger.warning('Skipping deletion for failed search `%r`', d_search, exc_info=True)
pass
if d_id:
try:
ids.append(self.id_get(cr, d_id))
except ValueError:
# d_id cannot be found. doesn't matter in this case
_logger.warning('Skipping deletion for missing XML ID `%r`', d_id, exc_info=True)
pass
if ids:
self.pool[d_model].unlink(cr, self.uid, ids)
def _remove_ir_values(self, cr, name, value, model):
ir_values_obj = self.pool['ir.values']
ir_value_ids = ir_values_obj.search(cr, self.uid, [('name','=',name),('value','=',value),('model','=',model)])
if ir_value_ids:
ir_values_obj.unlink(cr, self.uid, ir_value_ids)
return True
def _tag_report(self, cr, rec, data_node=None, mode=None):
res = {}
for dest,f in (('name','string'),('model','model'),('report_name','name')):
res[dest] = rec.get(f,'').encode('utf8')
assert res[dest], "Attribute %s of report is empty !" % (f,)
for field,dest in (('rml','report_rml'),('file','report_rml'),('xml','report_xml'),('xsl','report_xsl'),
('attachment','attachment'),('attachment_use','attachment_use'), ('usage','usage'),
('report_type', 'report_type'), ('parser', 'parser')):
if rec.get(field):
res[dest] = rec.get(field).encode('utf8')
if rec.get('auto'):
res['auto'] = eval(rec.get('auto','False'))
if rec.get('sxw'):
sxw_content = misc.file_open(rec.get('sxw')).read()
res['report_sxw_content'] = sxw_content
if rec.get('header'):
res['header'] = eval(rec.get('header','False'))
res['multi'] = rec.get('multi') and eval(rec.get('multi','False'))
xml_id = rec.get('id','').encode('utf8')
self._test_xml_id(xml_id)
if rec.get('groups'):
g_names = rec.get('groups','').split(',')
groups_value = []
for group in g_names:
if group.startswith('-'):
group_id = self.id_get(cr, group[1:])
groups_value.append((3, group_id))
else:
group_id = self.id_get(cr, group)
groups_value.append((4, group_id))
res['groups_id'] = groups_value
id = self.pool['ir.model.data']._update(cr, self.uid, "ir.actions.report.xml", self.module, res, xml_id, noupdate=self.isnoupdate(data_node), mode=self.mode)
self.idref[xml_id] = int(id)
if not rec.get('menu') or eval(rec.get('menu','False')):
keyword = str(rec.get('keyword', 'client_print_multi'))
value = 'ir.actions.report.xml,'+str(id)
replace = rec.get('replace', True)
self.pool['ir.model.data'].ir_set(cr, self.uid, 'action', keyword, res['name'], [res['model']], value, replace=replace, isobject=True, xml_id=xml_id)
elif self.mode=='update' and eval(rec.get('menu','False'))==False:
# Special check for report having attribute menu=False on update
value = 'ir.actions.report.xml,'+str(id)
self._remove_ir_values(cr, res['name'], value, res['model'])
return id
def _tag_function(self, cr, rec, data_node=None, mode=None):
if self.isnoupdate(data_node) and self.mode != 'init':
return
context = self.get_context(data_node, rec, {'ref': _ref(self, cr)})
uid = self.get_uid(cr, self.uid, data_node, rec)
_eval_xml(self,rec, self.pool, cr, uid, self.idref, context=context)
return
def _tag_url(self, cr, rec, data_node=None, mode=None):
url = rec.get("url",'').encode('utf8')
target = rec.get("target",'').encode('utf8')
name = rec.get("name",'').encode('utf8')
xml_id = rec.get('id','').encode('utf8')
self._test_xml_id(xml_id)
res = {'name': name, 'url': url, 'target':target}
id = self.pool['ir.model.data']._update(cr, self.uid, "ir.actions.act_url", self.module, res, xml_id, noupdate=self.isnoupdate(data_node), mode=self.mode)
self.idref[xml_id] = int(id)
def _tag_act_window(self, cr, rec, data_node=None, mode=None):
name = rec.get('name','').encode('utf-8')
xml_id = rec.get('id','').encode('utf8')
self._test_xml_id(xml_id)
type = rec.get('type','').encode('utf-8') or 'ir.actions.act_window'
view_id = False
if rec.get('view_id'):
view_id = self.id_get(cr, rec.get('view_id','').encode('utf-8'))
domain = rec.get('domain','').encode('utf-8') or '[]'
res_model = rec.get('res_model','').encode('utf-8')
src_model = rec.get('src_model','').encode('utf-8')
view_type = rec.get('view_type','').encode('utf-8') or 'form'
view_mode = rec.get('view_mode','').encode('utf-8') or 'tree,form'
usage = rec.get('usage','').encode('utf-8')
limit = rec.get('limit','').encode('utf-8')
auto_refresh = rec.get('auto_refresh','').encode('utf-8')
uid = self.uid
# Act_window's 'domain' and 'context' contain mostly literals
# but they can also refer to the variables provided below
# in eval_context, so we need to eval() them before storing.
# Among the context variables, 'active_id' refers to
# the currently selected items in a list view, and only
# takes meaning at runtime on the client side. For this
# reason it must remain a bare variable in domain and context,
# even after eval() at server-side. We use the special 'unquote'
# class to achieve this effect: a string which has itself, unquoted,
# as representation.
active_id = unquote("active_id")
active_ids = unquote("active_ids")
active_model = unquote("active_model")
def ref(str_id):
return self.id_get(cr, str_id)
# Include all locals() in eval_context, for backwards compatibility
eval_context = {
'name': name,
'xml_id': xml_id,
'type': type,
'view_id': view_id,
'domain': domain,
'res_model': res_model,
'src_model': src_model,
'view_type': view_type,
'view_mode': view_mode,
'usage': usage,
'limit': limit,
'auto_refresh': auto_refresh,
'uid' : uid,
'active_id': active_id,
'active_ids': active_ids,
'active_model': active_model,
'ref' : ref,
}
context = self.get_context(data_node, rec, eval_context)
try:
domain = unsafe_eval(domain, eval_context)
except NameError:
# Some domains contain references that are only valid at runtime at
# client-side, so in that case we keep the original domain string
# as it is. We also log it, just in case.
_logger.debug('Domain value (%s) for element with id "%s" does not parse '\
'at server-side, keeping original string, in case it\'s meant for client side only',
domain, xml_id or 'n/a', exc_info=True)
res = {
'name': name,
'type': type,
'view_id': view_id,
'domain': domain,
'context': context,
'res_model': res_model,
'src_model': src_model,
'view_type': view_type,
'view_mode': view_mode,
'usage': usage,
'limit': limit,
'auto_refresh': auto_refresh,
}
if rec.get('groups'):
g_names = rec.get('groups','').split(',')
groups_value = []
for group in g_names:
if group.startswith('-'):
group_id = self.id_get(cr, group[1:])
groups_value.append((3, group_id))
else:
group_id = self.id_get(cr, group)
groups_value.append((4, group_id))
res['groups_id'] = groups_value
if rec.get('target'):
res['target'] = rec.get('target','')
if rec.get('multi'):
res['multi'] = eval(rec.get('multi', 'False'))
id = self.pool['ir.model.data']._update(cr, self.uid, 'ir.actions.act_window', self.module, res, xml_id, noupdate=self.isnoupdate(data_node), mode=self.mode)
self.idref[xml_id] = int(id)
if src_model:
#keyword = 'client_action_relate'
keyword = rec.get('key2','').encode('utf-8') or 'client_action_relate'
value = 'ir.actions.act_window,'+str(id)
replace = rec.get('replace','') or True
self.pool['ir.model.data'].ir_set(cr, self.uid, 'action', keyword, xml_id, [src_model], value, replace=replace, isobject=True, xml_id=xml_id)
# TODO add remove ir.model.data
def _tag_ir_set(self, cr, rec, data_node=None, mode=None):
if self.mode != 'init':
return
res = {}
for field in rec.findall('./field'):
f_name = field.get("name",'').encode('utf-8')
f_val = _eval_xml(self,field,self.pool, cr, self.uid, self.idref)
res[f_name] = f_val
self.pool['ir.model.data'].ir_set(cr, self.uid, res['key'], res['key2'], res['name'], res['models'], res['value'], replace=res.get('replace',True), isobject=res.get('isobject', False), meta=res.get('meta',None))
def _tag_workflow(self, cr, rec, data_node=None, mode=None):
if self.isnoupdate(data_node) and self.mode != 'init':
return
model = rec.get('model').encode('ascii')
w_ref = rec.get('ref')
if w_ref:
id = self.id_get(cr, w_ref)
else:
number_children = len(rec)
assert number_children > 0,\
'You must define a child node if you dont give a ref'
assert number_children == 1,\
'Only one child node is accepted (%d given)' % number_children
id = _eval_xml(self, rec[0], self.pool, cr, self.uid, self.idref)
uid = self.get_uid(cr, self.uid, data_node, rec)
openerp.workflow.trg_validate(
uid, model, id, rec.get('action').encode('ascii'), cr)
#
# Support two types of notation:
# name="Inventory Control/Sending Goods"
# or
# action="action_id"
# parent="parent_id"
#
def _tag_menuitem(self, cr, rec, data_node=None, mode=None):
rec_id = rec.get("id",'').encode('ascii')
self._test_xml_id(rec_id)
m_l = map(escape, escape_re.split(rec.get("name",'').encode('utf8')))
values = {'parent_id': False}
if rec.get('parent', False) is False and len(m_l) > 1:
# No parent attribute specified and the menu name has several menu components,
# try to determine the ID of the parent according to menu path
pid = False
res = None
values['name'] = m_l[-1]
m_l = m_l[:-1] # last part is our name, not a parent
for idx, menu_elem in enumerate(m_l):
if pid:
cr.execute('select id from ir_ui_menu where parent_id=%s and name=%s', (pid, menu_elem))
else:
cr.execute('select id from ir_ui_menu where parent_id is null and name=%s', (menu_elem,))
res = cr.fetchone()
if res:
pid = res[0]
else:
# the menuitem does't exist but we are in branch (not a leaf)
_logger.warning('Warning no ID for submenu %s of menu %s !', menu_elem, str(m_l))
pid = self.pool['ir.ui.menu'].create(cr, self.uid, {'parent_id' : pid, 'name' : menu_elem})
values['parent_id'] = pid
else:
# The parent attribute was specified, if non-empty determine its ID, otherwise
# explicitly make a top-level menu
if rec.get('parent'):
menu_parent_id = self.id_get(cr, rec.get('parent',''))
else:
# we get here with <menuitem parent="">, explicit clear of parent, or
# if no parent attribute at all but menu name is not a menu path
menu_parent_id = False
values = {'parent_id': menu_parent_id}
if rec.get('name'):
values['name'] = rec.get('name')
try:
res = [ self.id_get(cr, rec.get('id','')) ]
except:
res = None
if rec.get('action'):
a_action = rec.get('action','').encode('utf8')
# determine the type of action
action_type, action_id = self.model_id_get(cr, a_action)
action_type = action_type.split('.')[-1] # keep only type part
if not values.get('name') and action_type in ('act_window', 'wizard', 'url', 'client', 'server'):
a_table = 'ir_act_%s' % action_type.replace('act_', '')
cr.execute('select name from "%s" where id=%%s' % a_table, (int(action_id),))
resw = cr.fetchone()
if resw:
values['name'] = resw[0]
if not values.get('name'):
# ensure menu has a name
values['name'] = rec_id or '?'
if rec.get('sequence'):
values['sequence'] = int(rec.get('sequence'))
if rec.get('groups'):
g_names = rec.get('groups','').split(',')
groups_value = []
for group in g_names:
if group.startswith('-'):
group_id = self.id_get(cr, group[1:])
groups_value.append((3, group_id))
else:
group_id = self.id_get(cr, group)
groups_value.append((4, group_id))
values['groups_id'] = groups_value
pid = self.pool['ir.model.data']._update(cr, self.uid, 'ir.ui.menu', self.module, values, rec_id, noupdate=self.isnoupdate(data_node), mode=self.mode, res_id=res and res[0] or False)
if rec_id and pid:
self.idref[rec_id] = int(pid)
if rec.get('action') and pid:
action = "ir.actions.%s,%d" % (action_type, action_id)
self.pool['ir.model.data'].ir_set(cr, self.uid, 'action', 'tree_but_open', 'Menuitem', [('ir.ui.menu', int(pid))], action, True, True, xml_id=rec_id)
return 'ir.ui.menu', pid
def _assert_equals(self, f1, f2, prec=4):
return not round(f1 - f2, prec)
def _tag_assert(self, cr, rec, data_node=None, mode=None):
if self.isnoupdate(data_node) and self.mode != 'init':
return
rec_model = rec.get("model",'').encode('ascii')
model = self.pool[rec_model]
rec_id = rec.get("id",'').encode('ascii')
self._test_xml_id(rec_id)
rec_src = rec.get("search",'').encode('utf8')
rec_src_count = rec.get("count")
rec_string = rec.get("string",'').encode('utf8') or 'unknown'
ids = None
eval_dict = {'ref': _ref(self, cr)}
context = self.get_context(data_node, rec, eval_dict)
uid = self.get_uid(cr, self.uid, data_node, rec)
if rec_id:
ids = [self.id_get(cr, rec_id)]
elif rec_src:
q = unsafe_eval(rec_src, eval_dict)
ids = self.pool[rec_model].search(cr, uid, q, context=context)
if rec_src_count:
count = int(rec_src_count)
if len(ids) != count:
self.assertion_report.record_failure()
msg = 'assertion "%s" failed!\n' \
' Incorrect search count:\n' \
' expected count: %d\n' \
' obtained count: %d\n' \
% (rec_string, count, len(ids))
_logger.error(msg)
return
assert ids is not None,\
'You must give either an id or a search criteria'
ref = _ref(self, cr)
for id in ids:
brrec = model.browse(cr, uid, id, context)
class d(dict):
def __getitem__(self2, key):
if key in brrec:
return brrec[key]
return dict.__getitem__(self2, key)
globals_dict = d()
globals_dict['floatEqual'] = self._assert_equals
globals_dict['ref'] = ref
globals_dict['_ref'] = ref
for test in rec.findall('./test'):
f_expr = test.get("expr",'').encode('utf-8')
expected_value = _eval_xml(self, test, self.pool, cr, uid, self.idref, context=context) or True
expression_value = unsafe_eval(f_expr, globals_dict)
if expression_value != expected_value: # assertion failed
self.assertion_report.record_failure()
msg = 'assertion "%s" failed!\n' \
' xmltag: %s\n' \
' expected value: %r\n' \
' obtained value: %r\n' \
% (rec_string, etree.tostring(test), expected_value, expression_value)
_logger.error(msg)
return
else: # all tests were successful for this assertion tag (no break)
self.assertion_report.record_success()
def _tag_record(self, cr, rec, data_node=None, mode=None):
rec_model = rec.get("model").encode('ascii')
model = self.pool[rec_model]
rec_id = rec.get("id",'').encode('ascii')
rec_context = rec.get("context", None)
if rec_context:
rec_context = unsafe_eval(rec_context)
self._test_xml_id(rec_id)
# in update mode, the record won't be updated if the data node explicitely
# opt-out using @noupdate="1". A second check will be performed in
# ir.model.data#_update() using the record's ir.model.data `noupdate` field.
if self.isnoupdate(data_node) and self.mode != 'init':
# check if the xml record has no id, skip
if not rec_id:
return None
if '.' in rec_id:
module,rec_id2 = rec_id.split('.')
else:
module = self.module
rec_id2 = rec_id
id = self.pool['ir.model.data']._update_dummy(cr, self.uid, rec_model, module, rec_id2)
if id:
# if the resource already exists, don't update it but store
# its database id (can be useful)
self.idref[rec_id] = int(id)
return None
elif not self.nodeattr2bool(rec, 'forcecreate', True):
# if it doesn't exist and we shouldn't create it, skip it
return None
# else create it normally
res = {}
for field in rec.findall('./field'):
#TODO: most of this code is duplicated above (in _eval_xml)...
f_name = field.get("name").encode('utf-8')
f_ref = field.get("ref",'').encode('utf-8')
f_search = field.get("search",'').encode('utf-8')
f_model = field.get("model",'').encode('utf-8')
if not f_model and f_name in model._fields:
f_model = model._fields[f_name].comodel_name
f_use = field.get("use",'').encode('utf-8') or 'id'
f_val = False
if f_search:
q = unsafe_eval(f_search, self.idref)
assert f_model, 'Define an attribute model="..." in your .XML file !'
f_obj = self.pool[f_model]
# browse the objects searched
s = f_obj.browse(cr, self.uid, f_obj.search(cr, self.uid, q))
# column definitions of the "local" object
_fields = self.pool[rec_model]._fields
# if the current field is many2many
if (f_name in _fields) and _fields[f_name].type == 'many2many':
f_val = [(6, 0, map(lambda x: x[f_use], s))]
elif len(s):
# otherwise (we are probably in a many2one field),
# take the first element of the search
f_val = s[0][f_use]
elif f_ref:
if f_name in model._fields and model._fields[f_name].type == 'reference':
val = self.model_id_get(cr, f_ref)
f_val = val[0] + ',' + str(val[1])
else:
f_val = self.id_get(cr, f_ref)
else:
f_val = _eval_xml(self,field, self.pool, cr, self.uid, self.idref)
if f_name in model._fields:
if model._fields[f_name].type == 'integer':
f_val = int(f_val)
res[f_name] = f_val
id = self.pool['ir.model.data']._update(cr, self.uid, rec_model, self.module, res, rec_id or False, not self.isnoupdate(data_node), noupdate=self.isnoupdate(data_node), mode=self.mode, context=rec_context )
if rec_id:
self.idref[rec_id] = int(id)
if config.get('import_partial'):
cr.commit()
return rec_model, id
def _tag_template(self, cr, el, data_node=None, mode=None):
# This helper transforms a <template> element into a <record> and forwards it
tpl_id = el.get('id', el.get('t-name', '')).encode('ascii')
full_tpl_id = tpl_id
if '.' not in full_tpl_id:
full_tpl_id = '%s.%s' % (self.module, tpl_id)
# set the full template name for qweb <module>.<id>
if not el.get('inherit_id'):
el.set('t-name', full_tpl_id)
el.tag = 't'
else:
el.tag = 'data'
el.attrib.pop('id', None)
record_attrs = {
'id': tpl_id,
'model': 'ir.ui.view',
}
for att in ['forcecreate', 'context']:
if att in el.keys():
record_attrs[att] = el.attrib.pop(att)
Field = builder.E.field
name = el.get('name', tpl_id)
record = etree.Element('record', attrib=record_attrs)
record.append(Field(name, name='name'))
record.append(Field("qweb", name='type'))
record.append(Field(el.get('priority', "16"), name='priority'))
if 'inherit_id' in el.attrib:
record.append(Field(name='inherit_id', ref=el.get('inherit_id')))
if el.get('active') in ("True", "False"):
view_id = self.id_get(cr, tpl_id, raise_if_not_found=False)
if mode != "update" or not view_id:
record.append(Field(name='active', eval=el.get('active')))
if el.get('customize_show') in ("True", "False"):
record.append(Field(name='customize_show', eval=el.get('customize_show')))
groups = el.attrib.pop('groups', None)
if groups:
grp_lst = map(lambda x: "ref('%s')" % x, groups.split(','))
record.append(Field(name="groups_id", eval="[(6, 0, ["+', '.join(grp_lst)+"])]"))
if el.attrib.pop('page', None) == 'True':
record.append(Field(name="page", eval="True"))
if el.get('primary') == 'True':
# Pseudo clone mode, we'll set the t-name to the full canonical xmlid
el.append(
builder.E.xpath(
builder.E.attribute(full_tpl_id, name='t-name'),
expr=".",
position="attributes",
)
)
record.append(Field('primary', name='mode'))
# inject complete <template> element (after changing node name) into
# the ``arch`` field
record.append(Field(el, name="arch", type="xml"))
return self._tag_record(cr, record, data_node)
def id_get(self, cr, id_str, raise_if_not_found=True):
if id_str in self.idref:
return self.idref[id_str]
res = self.model_id_get(cr, id_str, raise_if_not_found)
if res and len(res)>1: res = res[1]
return res
def model_id_get(self, cr, id_str, raise_if_not_found=True):
model_data_obj = self.pool['ir.model.data']
mod = self.module
if '.' not in id_str:
id_str = '%s.%s' % (mod, id_str)
return model_data_obj.xmlid_to_res_model_res_id(
cr, self.uid, id_str,
raise_if_not_found=raise_if_not_found)
def parse(self, de, mode=None):
if de.tag != 'openerp':
raise Exception("Mismatch xml format: root tag must be `openerp`.")
for n in de.findall('./data'):
for rec in n:
if rec.tag in self._tags:
try:
self._tags[rec.tag](self.cr, rec, n, mode=mode)
except Exception, e:
self.cr.rollback()
exc_info = sys.exc_info()
raise ParseError, (misc.ustr(e), etree.tostring(rec).rstrip(), rec.getroottree().docinfo.URL, rec.sourceline), exc_info[2]
return True
def __init__(self, cr, module, idref, mode, report=None, noupdate=False):
self.mode = mode
self.module = module
self.cr = cr
self.idref = idref
self.pool = openerp.registry(cr.dbname)
self.uid = 1
if report is None:
report = assertion_report.assertion_report()
self.assertion_report = report
self.noupdate = noupdate
self._tags = {
'record': self._tag_record,
'delete': self._tag_delete,
'function': self._tag_function,
'menuitem': self._tag_menuitem,
'template': self._tag_template,
'workflow': self._tag_workflow,
'report': self._tag_report,
'ir_set': self._tag_ir_set,
'act_window': self._tag_act_window,
'url': self._tag_url,
'assert': self._tag_assert,
}
def convert_file(cr, module, filename, idref, mode='update', noupdate=False, kind=None, report=None, pathname=None):
if pathname is None:
pathname = os.path.join(module, filename)
fp = misc.file_open(pathname)
ext = os.path.splitext(filename)[1].lower()
try:
if ext == '.csv':
convert_csv_import(cr, module, pathname, fp.read(), idref, mode, noupdate)
elif ext == '.sql':
convert_sql_import(cr, fp)
elif ext == '.yml':
convert_yaml_import(cr, module, fp, kind, idref, mode, noupdate, report)
elif ext == '.xml':
convert_xml_import(cr, module, fp, idref, mode, noupdate, report)
elif ext == '.js':
pass # .js files are valid but ignored here.
else:
_logger.warning("Can't load unknown file type %s.", filename)
finally:
fp.close()
def convert_sql_import(cr, fp):
queries = fp.read().split(';')
for query in queries:
new_query = ' '.join(query.split())
if new_query:
cr.execute(new_query)
def convert_csv_import(cr, module, fname, csvcontent, idref=None, mode='init',
noupdate=False):
'''Import csv file :
quote: "
delimiter: ,
encoding: utf-8'''
if not idref:
idref={}
model = ('.'.join(fname.split('.')[:-1]).split('-'))[0]
#remove folder path from model
head, model = os.path.split(model)
input = cStringIO.StringIO(csvcontent) #FIXME
reader = csv.reader(input, quotechar='"', delimiter=',')
fields = reader.next()
fname_partial = ""
if config.get('import_partial'):
fname_partial = module + '/'+ fname
if not os.path.isfile(config.get('import_partial')):
pickle.dump({}, file(config.get('import_partial'),'w+'))
else:
data = pickle.load(file(config.get('import_partial')))
if fname_partial in data:
if not data[fname_partial]:
return
else:
for i in range(data[fname_partial]):
reader.next()
if not (mode == 'init' or 'id' in fields):
_logger.error("Import specification does not contain 'id' and we are in init mode, Cannot continue.")
return
uid = 1
datas = []
for line in reader:
if not (line and any(line)):
continue
try:
datas.append(map(misc.ustr, line))
except:
_logger.error("Cannot import the line: %s", line)
registry = openerp.registry(cr.dbname)
result, rows, warning_msg, dummy = registry[model].import_data(cr, uid, fields, datas,mode, module, noupdate, filename=fname_partial)
if result < 0:
# Report failed import and abort module install
raise Exception(_('Module loading %s failed: file %s could not be processed:\n %s') % (module, fname, warning_msg))
if config.get('import_partial'):
data = pickle.load(file(config.get('import_partial')))
data[fname_partial] = 0
pickle.dump(data, file(config.get('import_partial'),'wb'))
cr.commit()
#
# xml import/export
#
def convert_xml_import(cr, module, xmlfile, idref=None, mode='init', noupdate=False, report=None):
doc = etree.parse(xmlfile)
relaxng = etree.RelaxNG(
etree.parse(os.path.join(config['root_path'],'import_xml.rng' )))
try:
relaxng.assert_(doc)
except Exception:
_logger.error('The XML file does not fit the required schema !')
_logger.error(misc.ustr(relaxng.error_log.last_error))
raise
if idref is None:
idref={}
obj = xml_import(cr, module, idref, mode, report=report, noupdate=noupdate)
obj.parse(doc.getroot(), mode=mode)
return True
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
nagyistoce/kaggle-galaxies | try_convnet_cc_multirotflip_3x69r45_maxout2048_extradense_pysexgen1_dup.py | 7 | 17744 | import numpy as np
# import pandas as pd
import theano
import theano.tensor as T
import layers
import cc_layers
import custom
import load_data
import realtime_augmentation as ra
import time
import csv
import os
import cPickle as pickle
from datetime import datetime, timedelta
# import matplotlib.pyplot as plt
# plt.ion()
# import utils
BATCH_SIZE = 16
NUM_INPUT_FEATURES = 3
LEARNING_RATE_SCHEDULE = {
0: 0.04,
1800: 0.004,
2300: 0.0004,
}
MOMENTUM = 0.9
WEIGHT_DECAY = 0.0
CHUNK_SIZE = 10000 # 30000 # this should be a multiple of the batch size, ideally.
NUM_CHUNKS = 2500 # 3000 # 1500 # 600 # 600 # 600 # 500
VALIDATE_EVERY = 20 # 12 # 6 # 6 # 6 # 5 # validate only every 5 chunks. MUST BE A DIVISOR OF NUM_CHUNKS!!!
# else computing the analysis data does not work correctly, since it assumes that the validation set is still loaded.
NUM_CHUNKS_NONORM = 1 # train without normalisation for this many chunks, to get the weights in the right 'zone'.
# this should be only a few, just 1 hopefully suffices.
GEN_BUFFER_SIZE = 1
# # need to load the full training data anyway to extract the validation set from it.
# # alternatively we could create separate validation set files.
# DATA_TRAIN_PATH = "data/images_train_color_cropped33_singletf.npy.gz"
# DATA2_TRAIN_PATH = "data/images_train_color_8x_singletf.npy.gz"
# DATA_VALIDONLY_PATH = "data/images_validonly_color_cropped33_singletf.npy.gz"
# DATA2_VALIDONLY_PATH = "data/images_validonly_color_8x_singletf.npy.gz"
# DATA_TEST_PATH = "data/images_test_color_cropped33_singletf.npy.gz"
# DATA2_TEST_PATH = "data/images_test_color_8x_singletf.npy.gz"
TARGET_PATH = "predictions/final/try_convnet_cc_multirotflip_3x69r45_maxout2048_extradense_pysexgen1_dup.csv"
ANALYSIS_PATH = "analysis/final/try_convnet_cc_multirotflip_3x69r45_maxout2048_extradense_pysexgen1_dup.pkl"
# FEATURES_PATTERN = "features/try_convnet_chunked_ra_b3sched.%s.npy"
print "Set up data loading"
# TODO: adapt this so it loads the validation data from JPEGs and does the processing realtime
input_sizes = [(69, 69), (69, 69)]
ds_transforms = [
ra.build_ds_transform(3.0, target_size=input_sizes[0]),
ra.build_ds_transform(3.0, target_size=input_sizes[1]) + ra.build_augmentation_transform(rotation=45)
]
num_input_representations = len(ds_transforms)
augmentation_params = {
'zoom_range': (1.0 / 1.3, 1.3),
'rotation_range': (0, 360),
'shear_range': (0, 0),
'translation_range': (-4, 4),
'do_flip': True,
}
augmented_data_gen = ra.realtime_augmented_data_gen(num_chunks=NUM_CHUNKS, chunk_size=CHUNK_SIZE,
augmentation_params=augmentation_params, ds_transforms=ds_transforms,
target_sizes=input_sizes, processor_class=ra.LoadAndProcessPysexGen1CenteringRescaling)
post_augmented_data_gen = ra.post_augment_brightness_gen(augmented_data_gen, std=0.5)
train_gen = load_data.buffered_gen_mp(post_augmented_data_gen, buffer_size=GEN_BUFFER_SIZE)
y_train = np.load("data/solutions_train.npy")
train_ids = load_data.train_ids
test_ids = load_data.test_ids
# split training data into training + a small validation set
num_train = len(train_ids)
num_test = len(test_ids)
num_valid = num_train // 10 # integer division
num_train -= num_valid
y_valid = y_train[num_train:]
y_train = y_train[:num_train]
valid_ids = train_ids[num_train:]
train_ids = train_ids[:num_train]
train_indices = np.arange(num_train)
valid_indices = np.arange(num_train, num_train + num_valid)
test_indices = np.arange(num_test)
def create_train_gen():
"""
this generates the training data in order, for postprocessing. Do not use this for actual training.
"""
data_gen_train = ra.realtime_fixed_augmented_data_gen(train_indices, 'train',
ds_transforms=ds_transforms, chunk_size=CHUNK_SIZE, target_sizes=input_sizes,
processor_class=ra.LoadAndProcessFixedPysexGen1CenteringRescaling)
return load_data.buffered_gen_mp(data_gen_train, buffer_size=GEN_BUFFER_SIZE)
def create_valid_gen():
data_gen_valid = ra.realtime_fixed_augmented_data_gen(valid_indices, 'train',
ds_transforms=ds_transforms, chunk_size=CHUNK_SIZE, target_sizes=input_sizes,
processor_class=ra.LoadAndProcessFixedPysexGen1CenteringRescaling)
return load_data.buffered_gen_mp(data_gen_valid, buffer_size=GEN_BUFFER_SIZE)
def create_test_gen():
data_gen_test = ra.realtime_fixed_augmented_data_gen(test_indices, 'test',
ds_transforms=ds_transforms, chunk_size=CHUNK_SIZE, target_sizes=input_sizes,
processor_class=ra.LoadAndProcessFixedPysexGen1CenteringRescaling)
return load_data.buffered_gen_mp(data_gen_test, buffer_size=GEN_BUFFER_SIZE)
print "Preprocess validation data upfront"
start_time = time.time()
xs_valid = [[] for _ in xrange(num_input_representations)]
for data, length in create_valid_gen():
for x_valid_list, x_chunk in zip(xs_valid, data):
x_valid_list.append(x_chunk[:length])
xs_valid = [np.vstack(x_valid) for x_valid in xs_valid]
xs_valid = [x_valid.transpose(0, 3, 1, 2) for x_valid in xs_valid] # move the colour dimension up
print " took %.2f seconds" % (time.time() - start_time)
print "Build model"
l0 = layers.Input2DLayer(BATCH_SIZE, NUM_INPUT_FEATURES, input_sizes[0][0], input_sizes[0][1])
l0_45 = layers.Input2DLayer(BATCH_SIZE, NUM_INPUT_FEATURES, input_sizes[1][0], input_sizes[1][1])
l0r = layers.MultiRotSliceLayer([l0, l0_45], part_size=45, include_flip=True)
l0s = cc_layers.ShuffleBC01ToC01BLayer(l0r)
l1a = cc_layers.CudaConvnetConv2DLayer(l0s, n_filters=32, filter_size=6, weights_std=0.01, init_bias_value=0.1, dropout=0.0, partial_sum=1, untie_biases=True)
l1 = cc_layers.CudaConvnetPooling2DLayer(l1a, pool_size=2)
l2a = cc_layers.CudaConvnetConv2DLayer(l1, n_filters=64, filter_size=5, weights_std=0.01, init_bias_value=0.1, dropout=0.0, partial_sum=1, untie_biases=True)
l2 = cc_layers.CudaConvnetPooling2DLayer(l2a, pool_size=2)
l3a = cc_layers.CudaConvnetConv2DLayer(l2, n_filters=128, filter_size=3, weights_std=0.01, init_bias_value=0.1, dropout=0.0, partial_sum=1, untie_biases=True)
l3b = cc_layers.CudaConvnetConv2DLayer(l3a, n_filters=128, filter_size=3, pad=0, weights_std=0.1, init_bias_value=0.1, dropout=0.0, partial_sum=1, untie_biases=True)
l3 = cc_layers.CudaConvnetPooling2DLayer(l3b, pool_size=2)
l3s = cc_layers.ShuffleC01BToBC01Layer(l3)
j3 = layers.MultiRotMergeLayer(l3s, num_views=4) # 2) # merge convolutional parts
l4a = layers.DenseLayer(j3, n_outputs=4096, weights_std=0.001, init_bias_value=0.01, dropout=0.5, nonlinearity=layers.identity)
l4b = layers.FeatureMaxPoolingLayer(l4a, pool_size=2, feature_dim=1, implementation='reshape')
l4c = layers.DenseLayer(l4b, n_outputs=4096, weights_std=0.001, init_bias_value=0.01, dropout=0.5, nonlinearity=layers.identity)
l4 = layers.FeatureMaxPoolingLayer(l4c, pool_size=2, feature_dim=1, implementation='reshape')
# l5 = layers.DenseLayer(l4, n_outputs=37, weights_std=0.01, init_bias_value=0.0, dropout=0.5, nonlinearity=custom.clip_01) # nonlinearity=layers.identity)
l5 = layers.DenseLayer(l4, n_outputs=37, weights_std=0.01, init_bias_value=0.1, dropout=0.5, nonlinearity=layers.identity)
# l6 = layers.OutputLayer(l5, error_measure='mse')
l6 = custom.OptimisedDivGalaxyOutputLayer(l5) # this incorporates the constraints on the output (probabilities sum to one, weighting, etc.)
train_loss_nonorm = l6.error(normalisation=False)
train_loss = l6.error() # but compute and print this!
valid_loss = l6.error(dropout_active=False)
all_parameters = layers.all_parameters(l6)
all_bias_parameters = layers.all_bias_parameters(l6)
xs_shared = [theano.shared(np.zeros((1,1,1,1), dtype=theano.config.floatX)) for _ in xrange(num_input_representations)]
y_shared = theano.shared(np.zeros((1,1), dtype=theano.config.floatX))
learning_rate = theano.shared(np.array(LEARNING_RATE_SCHEDULE[0], dtype=theano.config.floatX))
idx = T.lscalar('idx')
givens = {
l0.input_var: xs_shared[0][idx*BATCH_SIZE:(idx+1)*BATCH_SIZE],
l0_45.input_var: xs_shared[1][idx*BATCH_SIZE:(idx+1)*BATCH_SIZE],
l6.target_var: y_shared[idx*BATCH_SIZE:(idx+1)*BATCH_SIZE],
}
# updates = layers.gen_updates(train_loss, all_parameters, learning_rate=LEARNING_RATE, momentum=MOMENTUM, weight_decay=WEIGHT_DECAY)
updates_nonorm = layers.gen_updates_nesterov_momentum_no_bias_decay(train_loss_nonorm, all_parameters, all_bias_parameters, learning_rate=learning_rate, momentum=MOMENTUM, weight_decay=WEIGHT_DECAY)
updates = layers.gen_updates_nesterov_momentum_no_bias_decay(train_loss, all_parameters, all_bias_parameters, learning_rate=learning_rate, momentum=MOMENTUM, weight_decay=WEIGHT_DECAY)
train_nonorm = theano.function([idx], train_loss_nonorm, givens=givens, updates=updates_nonorm)
train_norm = theano.function([idx], train_loss, givens=givens, updates=updates)
compute_loss = theano.function([idx], valid_loss, givens=givens) # dropout_active=False
compute_output = theano.function([idx], l6.predictions(dropout_active=False), givens=givens, on_unused_input='ignore') # not using the labels, so theano complains
compute_features = theano.function([idx], l4.output(dropout_active=False), givens=givens, on_unused_input='ignore')
print "Train model"
start_time = time.time()
prev_time = start_time
num_batches_valid = x_valid.shape[0] // BATCH_SIZE
losses_train = []
losses_valid = []
param_stds = []
for e in xrange(NUM_CHUNKS):
print "Chunk %d/%d" % (e + 1, NUM_CHUNKS)
chunk_data, chunk_length = train_gen.next()
y_chunk = chunk_data.pop() # last element is labels.
xs_chunk = chunk_data
# need to transpose the chunks to move the 'channels' dimension up
xs_chunk = [x_chunk.transpose(0, 3, 1, 2) for x_chunk in xs_chunk]
if e in LEARNING_RATE_SCHEDULE:
current_lr = LEARNING_RATE_SCHEDULE[e]
learning_rate.set_value(LEARNING_RATE_SCHEDULE[e])
print " setting learning rate to %.6f" % current_lr
# train without normalisation for the first # chunks.
if e >= NUM_CHUNKS_NONORM:
train = train_norm
else:
train = train_nonorm
print " load training data onto GPU"
for x_shared, x_chunk in zip(xs_shared, xs_chunk):
x_shared.set_value(x_chunk)
y_shared.set_value(y_chunk)
num_batches_chunk = x_chunk.shape[0] // BATCH_SIZE
# import pdb; pdb.set_trace()
print " batch SGD"
losses = []
for b in xrange(num_batches_chunk):
# if b % 1000 == 0:
# print " batch %d/%d" % (b + 1, num_batches_chunk)
loss = train(b)
losses.append(loss)
# print " loss: %.6f" % loss
mean_train_loss = np.sqrt(np.mean(losses))
print " mean training loss (RMSE):\t\t%.6f" % mean_train_loss
losses_train.append(mean_train_loss)
# store param stds during training
param_stds.append([p.std() for p in layers.get_param_values(l6)])
if ((e + 1) % VALIDATE_EVERY) == 0:
print
print "VALIDATING"
print " load validation data onto GPU"
for x_shared, x_valid in zip(xs_shared, xs_valid):
x_shared.set_value(x_valid)
y_shared.set_value(y_valid)
print " compute losses"
losses = []
for b in xrange(num_batches_valid):
# if b % 1000 == 0:
# print " batch %d/%d" % (b + 1, num_batches_valid)
loss = compute_loss(b)
losses.append(loss)
mean_valid_loss = np.sqrt(np.mean(losses))
print " mean validation loss (RMSE):\t\t%.6f" % mean_valid_loss
losses_valid.append(mean_valid_loss)
layers.dump_params(l6, e=e)
now = time.time()
time_since_start = now - start_time
time_since_prev = now - prev_time
prev_time = now
est_time_left = time_since_start * (float(NUM_CHUNKS - (e + 1)) / float(e + 1))
eta = datetime.now() + timedelta(seconds=est_time_left)
eta_str = eta.strftime("%c")
print " %s since start (%.2f s)" % (load_data.hms(time_since_start), time_since_prev)
print " estimated %s to go (ETA: %s)" % (load_data.hms(est_time_left), eta_str)
print
del chunk_data, xs_chunk, x_chunk, y_chunk, xs_valid, x_valid # memory cleanup
print "Compute predictions on validation set for analysis in batches"
predictions_list = []
for b in xrange(num_batches_valid):
# if b % 1000 == 0:
# print " batch %d/%d" % (b + 1, num_batches_valid)
predictions = compute_output(b)
predictions_list.append(predictions)
all_predictions = np.vstack(predictions_list)
# postprocessing: clip all predictions to 0-1
all_predictions[all_predictions > 1] = 1.0
all_predictions[all_predictions < 0] = 0.0
print "Write validation set predictions to %s" % ANALYSIS_PATH
with open(ANALYSIS_PATH, 'w') as f:
pickle.dump({
'ids': valid_ids[:num_batches_valid * BATCH_SIZE], # note that we need to truncate the ids to a multiple of the batch size.
'predictions': all_predictions,
'targets': y_valid,
'mean_train_loss': mean_train_loss,
'mean_valid_loss': mean_valid_loss,
'time_since_start': time_since_start,
'losses_train': losses_train,
'losses_valid': losses_valid,
'param_values': layers.get_param_values(l6),
'param_stds': param_stds,
}, f, pickle.HIGHEST_PROTOCOL)
del predictions_list, all_predictions # memory cleanup
# print "Loading test data"
# x_test = load_data.load_gz(DATA_TEST_PATH)
# x2_test = load_data.load_gz(DATA2_TEST_PATH)
# test_ids = np.load("data/test_ids.npy")
# num_test = x_test.shape[0]
# x_test = x_test.transpose(0, 3, 1, 2) # move the colour dimension up.
# x2_test = x2_test.transpose(0, 3, 1, 2)
# create_test_gen = lambda: load_data.array_chunker_gen([x_test, x2_test], chunk_size=CHUNK_SIZE, loop=False, truncate=False, shuffle=False)
print "Computing predictions on test data"
predictions_list = []
for e, (xs_chunk, chunk_length) in enumerate(create_test_gen()):
print "Chunk %d" % (e + 1)
xs_chunk = [x_chunk.transpose(0, 3, 1, 2) for x_chunk in xs_chunk] # move the colour dimension up.
for x_shared, x_chunk in zip(xs_shared, xs_chunk):
x_shared.set_value(x_chunk)
num_batches_chunk = int(np.ceil(chunk_length / float(BATCH_SIZE))) # need to round UP this time to account for all data
# make predictions for testset, don't forget to cute off the zeros at the end
for b in xrange(num_batches_chunk):
# if b % 1000 == 0:
# print " batch %d/%d" % (b + 1, num_batches_chunk)
predictions = compute_output(b)
predictions_list.append(predictions)
all_predictions = np.vstack(predictions_list)
all_predictions = all_predictions[:num_test] # truncate back to the correct length
# postprocessing: clip all predictions to 0-1
all_predictions[all_predictions > 1] = 1.0
all_predictions[all_predictions < 0] = 0.0
print "Write predictions to %s" % TARGET_PATH
# test_ids = np.load("data/test_ids.npy")
with open(TARGET_PATH, 'wb') as csvfile:
writer = csv.writer(csvfile) # , delimiter=',', quoting=csv.QUOTE_MINIMAL)
# write header
writer.writerow(['GalaxyID', 'Class1.1', 'Class1.2', 'Class1.3', 'Class2.1', 'Class2.2', 'Class3.1', 'Class3.2', 'Class4.1', 'Class4.2', 'Class5.1', 'Class5.2', 'Class5.3', 'Class5.4', 'Class6.1', 'Class6.2', 'Class7.1', 'Class7.2', 'Class7.3', 'Class8.1', 'Class8.2', 'Class8.3', 'Class8.4', 'Class8.5', 'Class8.6', 'Class8.7', 'Class9.1', 'Class9.2', 'Class9.3', 'Class10.1', 'Class10.2', 'Class10.3', 'Class11.1', 'Class11.2', 'Class11.3', 'Class11.4', 'Class11.5', 'Class11.6'])
# write data
for k in xrange(test_ids.shape[0]):
row = [test_ids[k]] + all_predictions[k].tolist()
writer.writerow(row)
print "Gzipping..."
os.system("gzip -c %s > %s.gz" % (TARGET_PATH, TARGET_PATH))
del all_predictions, predictions_list, xs_chunk, x_chunk # memory cleanup
# # need to reload training data because it has been split and shuffled.
# # don't need to reload test data
# x_train = load_data.load_gz(DATA_TRAIN_PATH)
# x2_train = load_data.load_gz(DATA2_TRAIN_PATH)
# x_train = x_train.transpose(0, 3, 1, 2) # move the colour dimension up
# x2_train = x2_train.transpose(0, 3, 1, 2)
# train_gen_features = load_data.array_chunker_gen([x_train, x2_train], chunk_size=CHUNK_SIZE, loop=False, truncate=False, shuffle=False)
# test_gen_features = load_data.array_chunker_gen([x_test, x2_test], chunk_size=CHUNK_SIZE, loop=False, truncate=False, shuffle=False)
# for name, gen, num in zip(['train', 'test'], [train_gen_features, test_gen_features], [x_train.shape[0], x_test.shape[0]]):
# print "Extracting feature representations for all galaxies: %s" % name
# features_list = []
# for e, (xs_chunk, chunk_length) in enumerate(gen):
# print "Chunk %d" % (e + 1)
# x_chunk, x2_chunk = xs_chunk
# x_shared.set_value(x_chunk)
# x2_shared.set_value(x2_chunk)
# num_batches_chunk = int(np.ceil(chunk_length / float(BATCH_SIZE))) # need to round UP this time to account for all data
# # compute features for set, don't forget to cute off the zeros at the end
# for b in xrange(num_batches_chunk):
# if b % 1000 == 0:
# print " batch %d/%d" % (b + 1, num_batches_chunk)
# features = compute_features(b)
# features_list.append(features)
# all_features = np.vstack(features_list)
# all_features = all_features[:num] # truncate back to the correct length
# features_path = FEATURES_PATTERN % name
# print " write features to %s" % features_path
# np.save(features_path, all_features)
print "Done!"
| bsd-3-clause |
jspan/Open-Knesset | apis/tests.py | 14 | 6111 | import datetime, json, csv
from django.test import TestCase
from django.core.urlresolvers import reverse
from django.contrib.contenttypes.models import ContentType
from django.contrib.auth.models import User, Group, Permission
from tagging.models import Tag, TaggedItem
from laws.models import Vote, VoteAction, Bill, Law
from mks.models import Member, Party, Knesset
from agendas.models import Agenda
from committees.models import Committee
from events.models import Event
from django.core import cache
from voting.models import Vote as UserVote
import apis
class TestAPIV2(TestCase):
"""
General tests for the API V2, not specific to any app (app-specific tests
are located in the app directories).
"""
def setUp(self):
pass
def test_empty_cache_bug(self):
""" Tastypie has a bug when the cache returns None. this test verifies
that our fork of Tastypie doesn't have it. This test should be run with
DummyCache settings"""
res = self.client.get('/api/v2/vote/?format=json')
self.assertEqual(res.status_code, 200)
class MeetingApiTest(TestCase):
def setUp(self):
self.knesset = Knesset.objects.create(number=1,
start_date=datetime.date.today()-datetime.timedelta(days=1))
self.committee_1 = Committee.objects.create(name='c1')
self.committee_2 = Committee.objects.create(name='c2')
self.meeting_1 = self.committee_1.meetings.create(date=datetime.datetime.now(),
protocol_text='''jacob:
I am a perfectionist
adrian:
I have a deadline''')
self.meeting_1.create_protocol_parts()
self.meeting_2 = self.committee_1.meetings.create(date=datetime.datetime.now(),
protocol_text='m2')
self.meeting_2.create_protocol_parts()
self.jacob = User.objects.create_user('jacob', '[email protected]',
'JKM')
self.adrian = User.objects.create_user('adrian', '[email protected]',
'ADRIAN')
(self.group, created) = Group.objects.get_or_create(name='Valid Email')
if created:
self.group.save()
self.group.permissions.add(Permission.objects.get(name='Can add annotation'))
self.jacob.groups.add(self.group)
ct = ContentType.objects.get_for_model(Tag)
self.adrian.user_permissions.add(Permission.objects.get(codename='add_tag', content_type=ct))
self.bill_1 = Bill.objects.create(stage='1', title='bill 1')
self.mk_1 = Member.objects.create(name='mk 1')
self.topic = self.committee_1.topic_set.create(creator=self.jacob,
title="hello", description="hello world")
self.tag_1 = Tag.objects.create(name='tag1')
def testCommitteeMeetingV2(self):
url = reverse('api_dispatch_list', kwargs={'resource_name': 'committeemeeting', 'api_name': 'v2'})
url = url + str(self.meeting_1.id) + '/?format=json'
res = self.client.get(url)
self.assertEqual(res.status_code, 200)
res_json = json.loads(res.content)
committee_url = reverse('api_dispatch_list', kwargs={'resource_name': 'committee', 'api_name': 'v2'})
committee_url = committee_url + str(self.committee_1.id) + '/'
self.assertEqual(res_json['committee'], committee_url)
self.assertEqual(res_json['absolute_url'], self.meeting_1.get_absolute_url())
def testCommitteeMeetingListV2(self):
url = reverse('api_dispatch_list', kwargs={'resource_name': 'committeemeeting', 'api_name': 'v2'})
url = url + '?format=json'
res = self.client.get(url)
self.assertEqual(res.status_code, 200)
res_json = json.loads(res.content)
self.assertEqual(len(res_json['objects']), 2)
self.assertTrue(
res_json['objects'][0]['absolute_url'] == self.meeting_1.get_absolute_url()
or
res_json['objects'][0]['absolute_url'] == self.meeting_2.get_absolute_url()
)
def testCommitteeMeetingV2CSV(self):
url = reverse('api_dispatch_list', kwargs={'resource_name': 'committeemeeting', 'api_name': 'v2'})
url = url + '?format=csv'
res = self.client.get(url)
self.assertEqual(res.status_code, 200)
for row in csv.DictReader(res.content.split('\n'), delimiter=','):
if row.has_key('absolute_url'):
absurl = row['absolute_url']
else:
# \ufeff is the BOM - which is required for excel compatibility
absurl = row[u'\ufeff'.encode('utf8')+'absolute_url']
self.assertTrue(
absurl == self.meeting_1.get_absolute_url()
or
absurl == self.meeting_2.get_absolute_url()
)
class SwaggerTest(TestCase):
def testSwaggerUI(self):
"Swagger UI static resources should be properly mounted and served"
res = self.client.get(reverse('tastypie_swagger:index'))
self.assertEqual(res.status_code, 200)
self.assertIn("<title>Swagger UI</title>", res.content)
def testSwaggerResources(self):
"Swagger should find all the apis and list them as resources"
res = self.client.get(reverse('tastypie_swagger:resources'))
self.assertEqual(res.status_code, 200)
res_json = json.loads(res.content)
self.assertEqual(res_json["swaggerVersion"], "1.1")
rendered_apis = [api_obj_path['path'].lstrip('/') for api_obj_path in res_json["apis"]]
for api in apis.resources.v2_api._canonicals:
self.assertIn(api, rendered_apis)
def testSwaggerSchema(self):
"The schema for swagger should be generated properly for at least one controller"
res = self.client.get('/api/v2/doc/schema/agenda/')
self.assertEqual(res.status_code, 200)
res_json = json.loads(res.content)
self.assertEqual(res_json["swaggerVersion"], "1.1")
| bsd-3-clause |
liyu1990/sklearn | examples/ensemble/plot_gradient_boosting_oob.py | 50 | 4764 | """
======================================
Gradient Boosting Out-of-Bag estimates
======================================
Out-of-bag (OOB) estimates can be a useful heuristic to estimate
the "optimal" number of boosting iterations.
OOB estimates are almost identical to cross-validation estimates but
they can be computed on-the-fly without the need for repeated model
fitting.
OOB estimates are only available for Stochastic Gradient Boosting
(i.e. ``subsample < 1.0``), the estimates are derived from the improvement
in loss based on the examples not included in the bootstrap sample
(the so-called out-of-bag examples).
The OOB estimator is a pessimistic estimator of the true
test loss, but remains a fairly good approximation for a small number of trees.
The figure shows the cumulative sum of the negative OOB improvements
as a function of the boosting iteration. As you can see, it tracks the test
loss for the first hundred iterations but then diverges in a
pessimistic way.
The figure also shows the performance of 3-fold cross validation which
usually gives a better estimate of the test loss
but is computationally more demanding.
"""
print(__doc__)
# Author: Peter Prettenhofer <[email protected]>
#
# License: BSD 3 clause
import numpy as np
import matplotlib.pyplot as plt
from sklearn import ensemble
from sklearn.model_selection import KFold
from sklearn.model_selection import train_test_split
# Generate data (adapted from G. Ridgeway's gbm example)
n_samples = 1000
random_state = np.random.RandomState(13)
x1 = random_state.uniform(size=n_samples)
x2 = random_state.uniform(size=n_samples)
x3 = random_state.randint(0, 4, size=n_samples)
p = 1 / (1.0 + np.exp(-(np.sin(3 * x1) - 4 * x2 + x3)))
y = random_state.binomial(1, p, size=n_samples)
X = np.c_[x1, x2, x3]
X = X.astype(np.float32)
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.5,
random_state=9)
# Fit classifier with out-of-bag estimates
params = {'n_estimators': 1200, 'max_depth': 3, 'subsample': 0.5,
'learning_rate': 0.01, 'min_samples_leaf': 1, 'random_state': 3}
clf = ensemble.GradientBoostingClassifier(**params)
clf.fit(X_train, y_train)
acc = clf.score(X_test, y_test)
print("Accuracy: {:.4f}".format(acc))
n_estimators = params['n_estimators']
x = np.arange(n_estimators) + 1
def heldout_score(clf, X_test, y_test):
"""compute deviance scores on ``X_test`` and ``y_test``. """
score = np.zeros((n_estimators,), dtype=np.float64)
for i, y_pred in enumerate(clf.staged_decision_function(X_test)):
score[i] = clf.loss_(y_test, y_pred)
return score
def cv_estimate(n_folds=3):
cv = KFold(n_folds=n_folds)
cv_clf = ensemble.GradientBoostingClassifier(**params)
val_scores = np.zeros((n_estimators,), dtype=np.float64)
for train, test in cv.split(X_train, y_train):
cv_clf.fit(X_train[train], y_train[train])
val_scores += heldout_score(cv_clf, X_train[test], y_train[test])
val_scores /= n_folds
return val_scores
# Estimate best n_estimator using cross-validation
cv_score = cv_estimate(3)
# Compute best n_estimator for test data
test_score = heldout_score(clf, X_test, y_test)
# negative cumulative sum of oob improvements
cumsum = -np.cumsum(clf.oob_improvement_)
# min loss according to OOB
oob_best_iter = x[np.argmin(cumsum)]
# min loss according to test (normalize such that first loss is 0)
test_score -= test_score[0]
test_best_iter = x[np.argmin(test_score)]
# min loss according to cv (normalize such that first loss is 0)
cv_score -= cv_score[0]
cv_best_iter = x[np.argmin(cv_score)]
# color brew for the three curves
oob_color = list(map(lambda x: x / 256.0, (190, 174, 212)))
test_color = list(map(lambda x: x / 256.0, (127, 201, 127)))
cv_color = list(map(lambda x: x / 256.0, (253, 192, 134)))
# plot curves and vertical lines for best iterations
plt.plot(x, cumsum, label='OOB loss', color=oob_color)
plt.plot(x, test_score, label='Test loss', color=test_color)
plt.plot(x, cv_score, label='CV loss', color=cv_color)
plt.axvline(x=oob_best_iter, color=oob_color)
plt.axvline(x=test_best_iter, color=test_color)
plt.axvline(x=cv_best_iter, color=cv_color)
# add three vertical lines to xticks
xticks = plt.xticks()
xticks_pos = np.array(xticks[0].tolist() +
[oob_best_iter, cv_best_iter, test_best_iter])
xticks_label = np.array(list(map(lambda t: int(t), xticks[0])) +
['OOB', 'CV', 'Test'])
ind = np.argsort(xticks_pos)
xticks_pos = xticks_pos[ind]
xticks_label = xticks_label[ind]
plt.xticks(xticks_pos, xticks_label)
plt.legend(loc='upper right')
plt.ylabel('normalized loss')
plt.xlabel('number of iterations')
plt.show()
| bsd-3-clause |
DennisDenuto/puppet-commonscripts | files/aws_cli/AWS-ElasticBeanstalk-CLI-2.6.3/eb/macosx/python3/lib/aws/exception.py | 8 | 3871 | #!/usr/bin/env python
#==============================================================================
# Copyright 2012 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Amazon Software License (the "License"). You may not use
# this file except in compliance with the License. A copy of the License is
# located at
#
# http://aws.amazon.com/asl/
#
# or in the "license" file accompanying this file. This file is distributed on
# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, express or
# implied. See the License for the specific language governing permissions
# and limitations under the License.
#==============================================================================
class AwsErrorCode(object):
'''AWS common error code'''
AccessDenied = 'AccessDenied'
InsufficientPrivileges = 'InsufficientPrivileges'
InvalidClientTokenId = 'InvalidClientTokenId'
InvalidParameterCombination = 'InvalidParameterCombination'
InvalidParameterValue = 'InvalidParameterValue'
InvalidQueryParameter = 'InvalidQueryParameter'
MalformedQueryString = 'MalformedQueryString'
MissingParameter = 'MissingParameter'
OptInRequired = 'OptInRequired'
RequestExpired = 'RequestExpired'
Throttling = 'Throttling'
class AwsServiceException(Exception):
def __init__(self, msg, code, http_code):
self._msg = msg
self._code = code
self._http_code = http_code
@property
def message(self):
return self._msg
@property
def code(self):
return self._code
@property
def http_code(self):
return self._http_code
def __str__(self):
return '{0}. {1}'.format(self._code, self._msg)
def __repr__(self):
return 'HTTP {0}:{1}. {2}'.format(self._http_code, self._code, self._msg)
class UnknownHttpCodeException(AwsServiceException):
''' Exception of receiving http code other than 200'''
def __init__(self, message, code, http_code):
super(UnknownHttpCodeException, self).__init__(message, code, http_code)
class MissingParameterException(AwsServiceException):
def __init__(self, ex):
if not issubclass(ex.__class__, AwsServiceException):
raise AttributeError('Must initialize from instance of AwsServiceException subclass.')
super(MissingParameterException, self).__init__(ex.message, ex.code, ex.http_code)
class InsufficientPrivilegesException(AwsServiceException):
def __init__(self, ex):
if not issubclass(ex.__class__, AwsServiceException):
raise AttributeError('Must initialize from instance of AwsServiceException subclass.')
super(InsufficientPrivilegesException, self).__init__(ex.message, ex.code, ex.http_code)
class InvalidParameterValueException(AwsServiceException):
def __init__(self, ex):
if not issubclass(ex.__class__, AwsServiceException):
raise AttributeError('Must initialize from instance of AwsServiceException subclass.')
super(InvalidParameterValueException, self).__init__(ex.message, ex.code, ex.http_code)
class OptInRequiredException(AwsServiceException):
def __init__(self, ex):
if not issubclass(ex.__class__, AwsServiceException):
raise AttributeError('Must initialize from instance of AwsServiceException subclass.')
super(OptInRequiredException, self).__init__(ex.message, ex.code, ex.http_code)
class AccessDeniedException(AwsServiceException):
def __init__(self, ex):
if not issubclass(ex.__class__, AwsServiceException):
raise AttributeError('Must initialize from instance of AwsServiceException subclass.')
super(AccessDeniedException, self).__init__(ex.message, ex.code, ex.http_code)
| mit |
les69/calvin-base | calvin/actorstore/systemactors/std/Alternate3.py | 2 | 1901 | # -*- coding: utf-8 -*-
# Copyright (c) 2015 Ericsson AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from calvin.actor.actor import Actor, ActionResult, condition, guard, manage
class Alternate3(Actor):
"""
Alternating between three streams of tokens
Inputs:
token_1 : first token stream
token_2 : second token stream
token_3 : third token stream
Outputs:
token : resulting token stream
"""
@manage(['next_port'])
def init(self):
self.next_port = 1
@condition(['token_1'], ['token'])
@guard(lambda self, _: self.next_port == 1)
def port_1(self, data):
self.next_port = 2
return ActionResult(production=(data, ))
@condition(['token_2'], ['token'])
@guard(lambda self, _: self.next_port == 2)
def port_2(self, data):
self.next_port = 3
return ActionResult(production=(data, ))
@condition(['token_3'], ['token'])
@guard(lambda self, _: self.next_port == 3)
def port_3(self, data):
self.next_port = 1
return ActionResult(production=(data, ))
action_priority = (port_1, port_2, port_3)
test_set = [
{
'in': {'token_1': [1], 'token_2': ['a'], 'token_3': ['alpha']},
'out': {'token': [1, 'a', 'alpha']}
},
{
'in': {'token_1': [1]},
'out': {'token': [1]}
}
]
| apache-2.0 |
Srogozins/aiohttp | tests/test_py35/test_resp.py | 3 | 1443 | import pytest
import aiohttp
from aiohttp import web
from aiohttp.client import _RequestContextManager
from collections.abc import Coroutine
@pytest.mark.run_loop
async def test_await(create_server, loop):
async def handler(request):
return web.HTTPOk()
app, url = await create_server()
app.router.add_route('GET', '/', handler)
resp = await aiohttp.get(url+'/', loop=loop)
assert resp.status == 200
assert resp.connection is not None
await resp.release()
assert resp.connection is None
@pytest.mark.run_loop
async def test_response_context_manager(create_server, loop):
async def handler(request):
return web.HTTPOk()
app, url = await create_server()
app.router.add_route('GET', '/', handler)
resp = await aiohttp.get(url+'/', loop=loop)
async with resp:
assert resp.status == 200
assert resp.connection is not None
assert resp.connection is None
@pytest.mark.run_loop
async def test_client_api_context_manager(create_server, loop):
async def handler(request):
return web.HTTPOk()
app, url = await create_server()
app.router.add_route('GET', '/', handler)
async with aiohttp.get(url+'/', loop=loop) as resp:
assert resp.status == 200
assert resp.connection is not None
assert resp.connection is None
def test_ctx_manager_is_coroutine():
assert issubclass(_RequestContextManager, Coroutine)
| apache-2.0 |
smurfix/p2pool | p2pool/bitcoin/script.py | 2 | 1585 | from p2pool.bitcoin import data as bitcoin_data
from p2pool.util import bases
def reads_nothing(f):
return '', f
def protoPUSH(length):
return lambda f: bitcoin_data.read(f, length)
def protoPUSHDATA(size_len):
def _(f):
length_str, f = bitcoin_data.read(f, size_len)
length = bases.string_to_natural(length_str[::-1].lstrip(chr(0)))
data, f = bitcoin_data.read(f, length)
return data, f
return _
opcodes = {}
for i in xrange(256):
opcodes[i] = 'UNK_' + str(i), reads_nothing
opcodes[0] = '0', reads_nothing
for i in xrange(1, 76):
opcodes[i] = 'PUSH%i' % i, protoPUSH(i)
opcodes[76] = 'PUSHDATA1', protoPUSHDATA(1)
opcodes[77] = 'PUSHDATA2', protoPUSHDATA(2)
opcodes[78] = 'PUSHDATA4', protoPUSHDATA(4)
opcodes[79] = '-1', reads_nothing
for i in xrange(81, 97):
opcodes[i] = str(i - 80), reads_nothing
opcodes[172] = 'CHECKSIG', reads_nothing
opcodes[173] = 'CHECKSIGVERIFY', reads_nothing
opcodes[174] = 'CHECKMULTISIG', reads_nothing
opcodes[175] = 'CHECKMULTISIGVERIFY', reads_nothing
def parse(script):
f = script, 0
while bitcoin_data.size(f):
opcode_str, f = bitcoin_data.read(f, 1)
opcode = ord(opcode_str)
opcode_name, read_func = opcodes[opcode]
opcode_arg, f = read_func(f)
yield opcode_name, opcode_arg
def get_sigop_count(script):
weights = {
'CHECKSIG': 1,
'CHECKSIGVERIFY': 1,
'CHECKMULTISIG': 20,
'CHECKMULTISIGVERIFY': 20,
}
return sum(weights.get(opcode_name, 0) for opcode_name, opcode_arg in parse(script))
| gpl-3.0 |
lgarren/spack | var/spack/repos/builtin/packages/libelf/package.py | 3 | 2051 | ##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, [email protected], All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class Libelf(AutotoolsPackage):
"""libelf lets you read, modify or create ELF object files in an
architecture-independent way. The library takes care of size
and endian issues, e.g. you can process a file for SPARC
processors on an Intel-based system."""
homepage = "http://www.mr511.de/software/english.html"
url = "http://www.mr511.de/software/libelf-0.8.13.tar.gz"
version('0.8.13', '4136d7b4c04df68b686570afa26988ac')
version('0.8.12', 'e21f8273d9f5f6d43a59878dc274fec7')
provides('elf@0')
def configure_args(self):
args = ["--enable-shared",
"--disable-dependency-tracking",
"--disable-debug"]
return args
def install(self, spec, prefix):
make('install', parallel=False)
| lgpl-2.1 |
domob1812/bitcoin | test/functional/feature_rbf.py | 2 | 25198 | #!/usr/bin/env python3
# Copyright (c) 2014-2020 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the RBF code."""
from decimal import Decimal
from test_framework.blocktools import COINBASE_MATURITY
from test_framework.messages import COIN, COutPoint, CTransaction, CTxIn, CTxOut, BIP125_SEQUENCE_NUMBER
from test_framework.script import CScript, OP_DROP
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import assert_equal, assert_raises_rpc_error, satoshi_round
from test_framework.script_util import DUMMY_P2WPKH_SCRIPT, DUMMY_2_P2WPKH_SCRIPT
from test_framework.wallet import MiniWallet
MAX_REPLACEMENT_LIMIT = 100
def txToHex(tx):
return tx.serialize().hex()
def make_utxo(node, amount, confirmed=True, scriptPubKey=DUMMY_P2WPKH_SCRIPT):
"""Create a txout with a given amount and scriptPubKey
Mines coins as needed.
confirmed - txouts created will be confirmed in the blockchain;
unconfirmed otherwise.
"""
fee = 1 * COIN
while node.getbalance() < satoshi_round((amount + fee) / COIN):
node.generate(COINBASE_MATURITY)
new_addr = node.getnewaddress()
txid = node.sendtoaddress(new_addr, satoshi_round((amount + fee) / COIN))
tx1 = node.getrawtransaction(txid, 1)
txid = int(txid, 16)
i, _ = next(filter(lambda vout: new_addr == vout[1]['scriptPubKey']['address'], enumerate(tx1['vout'])))
tx2 = CTransaction()
tx2.vin = [CTxIn(COutPoint(txid, i))]
tx2.vout = [CTxOut(amount, scriptPubKey)]
tx2.rehash()
signed_tx = node.signrawtransactionwithwallet(txToHex(tx2))
txid = node.sendrawtransaction(signed_tx['hex'], 0)
# If requested, ensure txouts are confirmed.
if confirmed:
mempool_size = len(node.getrawmempool())
while mempool_size > 0:
node.generate(1)
new_size = len(node.getrawmempool())
# Error out if we have something stuck in the mempool, as this
# would likely be a bug.
assert new_size < mempool_size
mempool_size = new_size
return COutPoint(int(txid, 16), 0)
class ReplaceByFeeTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 1
self.extra_args = [
[
"-acceptnonstdtxn=1",
"-maxorphantx=1000",
"-limitancestorcount=50",
"-limitancestorsize=101",
"-limitdescendantcount=200",
"-limitdescendantsize=101",
],
]
self.supports_cli = False
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
def run_test(self):
make_utxo(self.nodes[0], 1 * COIN)
# Ensure nodes are synced
self.sync_all()
self.log.info("Running test simple doublespend...")
self.test_simple_doublespend()
self.log.info("Running test doublespend chain...")
self.test_doublespend_chain()
self.log.info("Running test doublespend tree...")
self.test_doublespend_tree()
self.log.info("Running test replacement feeperkb...")
self.test_replacement_feeperkb()
self.log.info("Running test spends of conflicting outputs...")
self.test_spends_of_conflicting_outputs()
self.log.info("Running test new unconfirmed inputs...")
self.test_new_unconfirmed_inputs()
self.log.info("Running test too many replacements...")
self.test_too_many_replacements()
self.log.info("Running test opt-in...")
self.test_opt_in()
self.log.info("Running test RPC...")
self.test_rpc()
self.log.info("Running test prioritised transactions...")
self.test_prioritised_transactions()
self.log.info("Running test no inherited signaling...")
self.test_no_inherited_signaling()
self.log.info("Passed")
def test_simple_doublespend(self):
"""Simple doublespend"""
tx0_outpoint = make_utxo(self.nodes[0], int(1.1 * COIN))
# make_utxo may have generated a bunch of blocks, so we need to sync
# before we can spend the coins generated, or else the resulting
# transactions might not be accepted by our peers.
self.sync_all()
tx1a = CTransaction()
tx1a.vin = [CTxIn(tx0_outpoint, nSequence=0)]
tx1a.vout = [CTxOut(1 * COIN, DUMMY_P2WPKH_SCRIPT)]
tx1a_hex = txToHex(tx1a)
tx1a_txid = self.nodes[0].sendrawtransaction(tx1a_hex, 0)
self.sync_all()
# Should fail because we haven't changed the fee
tx1b = CTransaction()
tx1b.vin = [CTxIn(tx0_outpoint, nSequence=0)]
tx1b.vout = [CTxOut(1 * COIN, DUMMY_2_P2WPKH_SCRIPT)]
tx1b_hex = txToHex(tx1b)
# This will raise an exception due to insufficient fee
assert_raises_rpc_error(-26, "insufficient fee", self.nodes[0].sendrawtransaction, tx1b_hex, 0)
# Extra 0.1 BTC fee
tx1b = CTransaction()
tx1b.vin = [CTxIn(tx0_outpoint, nSequence=0)]
tx1b.vout = [CTxOut(int(0.9 * COIN), DUMMY_P2WPKH_SCRIPT)]
tx1b_hex = txToHex(tx1b)
# Works when enabled
tx1b_txid = self.nodes[0].sendrawtransaction(tx1b_hex, 0)
mempool = self.nodes[0].getrawmempool()
assert tx1a_txid not in mempool
assert tx1b_txid in mempool
assert_equal(tx1b_hex, self.nodes[0].getrawtransaction(tx1b_txid))
def test_doublespend_chain(self):
"""Doublespend of a long chain"""
initial_nValue = 50 * COIN
tx0_outpoint = make_utxo(self.nodes[0], initial_nValue)
prevout = tx0_outpoint
remaining_value = initial_nValue
chain_txids = []
while remaining_value > 10 * COIN:
remaining_value -= 1 * COIN
tx = CTransaction()
tx.vin = [CTxIn(prevout, nSequence=0)]
tx.vout = [CTxOut(remaining_value, CScript([1, OP_DROP] * 15 + [1]))]
tx_hex = txToHex(tx)
txid = self.nodes[0].sendrawtransaction(tx_hex, 0)
chain_txids.append(txid)
prevout = COutPoint(int(txid, 16), 0)
# Whether the double-spend is allowed is evaluated by including all
# child fees - 40 BTC - so this attempt is rejected.
dbl_tx = CTransaction()
dbl_tx.vin = [CTxIn(tx0_outpoint, nSequence=0)]
dbl_tx.vout = [CTxOut(initial_nValue - 30 * COIN, DUMMY_P2WPKH_SCRIPT)]
dbl_tx_hex = txToHex(dbl_tx)
# This will raise an exception due to insufficient fee
assert_raises_rpc_error(-26, "insufficient fee", self.nodes[0].sendrawtransaction, dbl_tx_hex, 0)
# Accepted with sufficient fee
dbl_tx = CTransaction()
dbl_tx.vin = [CTxIn(tx0_outpoint, nSequence=0)]
dbl_tx.vout = [CTxOut(1 * COIN, DUMMY_P2WPKH_SCRIPT)]
dbl_tx_hex = txToHex(dbl_tx)
self.nodes[0].sendrawtransaction(dbl_tx_hex, 0)
mempool = self.nodes[0].getrawmempool()
for doublespent_txid in chain_txids:
assert doublespent_txid not in mempool
def test_doublespend_tree(self):
"""Doublespend of a big tree of transactions"""
initial_nValue = 50 * COIN
tx0_outpoint = make_utxo(self.nodes[0], initial_nValue)
def branch(prevout, initial_value, max_txs, tree_width=5, fee=0.0001 * COIN, _total_txs=None):
if _total_txs is None:
_total_txs = [0]
if _total_txs[0] >= max_txs:
return
txout_value = (initial_value - fee) // tree_width
if txout_value < fee:
return
vout = [CTxOut(txout_value, CScript([i+1]))
for i in range(tree_width)]
tx = CTransaction()
tx.vin = [CTxIn(prevout, nSequence=0)]
tx.vout = vout
tx_hex = txToHex(tx)
assert len(tx.serialize()) < 100000
txid = self.nodes[0].sendrawtransaction(tx_hex, 0)
yield tx
_total_txs[0] += 1
txid = int(txid, 16)
for i, txout in enumerate(tx.vout):
for x in branch(COutPoint(txid, i), txout_value,
max_txs,
tree_width=tree_width, fee=fee,
_total_txs=_total_txs):
yield x
fee = int(0.0001 * COIN)
n = MAX_REPLACEMENT_LIMIT
tree_txs = list(branch(tx0_outpoint, initial_nValue, n, fee=fee))
assert_equal(len(tree_txs), n)
# Attempt double-spend, will fail because too little fee paid
dbl_tx = CTransaction()
dbl_tx.vin = [CTxIn(tx0_outpoint, nSequence=0)]
dbl_tx.vout = [CTxOut(initial_nValue - fee * n, DUMMY_P2WPKH_SCRIPT)]
dbl_tx_hex = txToHex(dbl_tx)
# This will raise an exception due to insufficient fee
assert_raises_rpc_error(-26, "insufficient fee", self.nodes[0].sendrawtransaction, dbl_tx_hex, 0)
# 1 BTC fee is enough
dbl_tx = CTransaction()
dbl_tx.vin = [CTxIn(tx0_outpoint, nSequence=0)]
dbl_tx.vout = [CTxOut(initial_nValue - fee * n - 1 * COIN, DUMMY_P2WPKH_SCRIPT)]
dbl_tx_hex = txToHex(dbl_tx)
self.nodes[0].sendrawtransaction(dbl_tx_hex, 0)
mempool = self.nodes[0].getrawmempool()
for tx in tree_txs:
tx.rehash()
assert tx.hash not in mempool
# Try again, but with more total transactions than the "max txs
# double-spent at once" anti-DoS limit.
for n in (MAX_REPLACEMENT_LIMIT + 1, MAX_REPLACEMENT_LIMIT * 2):
fee = int(0.0001 * COIN)
tx0_outpoint = make_utxo(self.nodes[0], initial_nValue)
tree_txs = list(branch(tx0_outpoint, initial_nValue, n, fee=fee))
assert_equal(len(tree_txs), n)
dbl_tx = CTransaction()
dbl_tx.vin = [CTxIn(tx0_outpoint, nSequence=0)]
dbl_tx.vout = [CTxOut(initial_nValue - 2 * fee * n, DUMMY_P2WPKH_SCRIPT)]
dbl_tx_hex = txToHex(dbl_tx)
# This will raise an exception
assert_raises_rpc_error(-26, "too many potential replacements", self.nodes[0].sendrawtransaction, dbl_tx_hex, 0)
for tx in tree_txs:
tx.rehash()
self.nodes[0].getrawtransaction(tx.hash)
def test_replacement_feeperkb(self):
"""Replacement requires fee-per-KB to be higher"""
tx0_outpoint = make_utxo(self.nodes[0], int(1.1 * COIN))
tx1a = CTransaction()
tx1a.vin = [CTxIn(tx0_outpoint, nSequence=0)]
tx1a.vout = [CTxOut(1 * COIN, DUMMY_P2WPKH_SCRIPT)]
tx1a_hex = txToHex(tx1a)
self.nodes[0].sendrawtransaction(tx1a_hex, 0)
# Higher fee, but the fee per KB is much lower, so the replacement is
# rejected.
tx1b = CTransaction()
tx1b.vin = [CTxIn(tx0_outpoint, nSequence=0)]
tx1b.vout = [CTxOut(int(0.001 * COIN), CScript([b'a' * 999000]))]
tx1b_hex = txToHex(tx1b)
# This will raise an exception due to insufficient fee
assert_raises_rpc_error(-26, "insufficient fee", self.nodes[0].sendrawtransaction, tx1b_hex, 0)
def test_spends_of_conflicting_outputs(self):
"""Replacements that spend conflicting tx outputs are rejected"""
utxo1 = make_utxo(self.nodes[0], int(1.2 * COIN))
utxo2 = make_utxo(self.nodes[0], 3 * COIN)
tx1a = CTransaction()
tx1a.vin = [CTxIn(utxo1, nSequence=0)]
tx1a.vout = [CTxOut(int(1.1 * COIN), DUMMY_P2WPKH_SCRIPT)]
tx1a_hex = txToHex(tx1a)
tx1a_txid = self.nodes[0].sendrawtransaction(tx1a_hex, 0)
tx1a_txid = int(tx1a_txid, 16)
# Direct spend an output of the transaction we're replacing.
tx2 = CTransaction()
tx2.vin = [CTxIn(utxo1, nSequence=0), CTxIn(utxo2, nSequence=0)]
tx2.vin.append(CTxIn(COutPoint(tx1a_txid, 0), nSequence=0))
tx2.vout = tx1a.vout
tx2_hex = txToHex(tx2)
# This will raise an exception
assert_raises_rpc_error(-26, "bad-txns-spends-conflicting-tx", self.nodes[0].sendrawtransaction, tx2_hex, 0)
# Spend tx1a's output to test the indirect case.
tx1b = CTransaction()
tx1b.vin = [CTxIn(COutPoint(tx1a_txid, 0), nSequence=0)]
tx1b.vout = [CTxOut(1 * COIN, DUMMY_P2WPKH_SCRIPT)]
tx1b_hex = txToHex(tx1b)
tx1b_txid = self.nodes[0].sendrawtransaction(tx1b_hex, 0)
tx1b_txid = int(tx1b_txid, 16)
tx2 = CTransaction()
tx2.vin = [CTxIn(utxo1, nSequence=0), CTxIn(utxo2, nSequence=0),
CTxIn(COutPoint(tx1b_txid, 0))]
tx2.vout = tx1a.vout
tx2_hex = txToHex(tx2)
# This will raise an exception
assert_raises_rpc_error(-26, "bad-txns-spends-conflicting-tx", self.nodes[0].sendrawtransaction, tx2_hex, 0)
def test_new_unconfirmed_inputs(self):
"""Replacements that add new unconfirmed inputs are rejected"""
confirmed_utxo = make_utxo(self.nodes[0], int(1.1 * COIN))
unconfirmed_utxo = make_utxo(self.nodes[0], int(0.1 * COIN), False)
tx1 = CTransaction()
tx1.vin = [CTxIn(confirmed_utxo)]
tx1.vout = [CTxOut(1 * COIN, DUMMY_P2WPKH_SCRIPT)]
tx1_hex = txToHex(tx1)
self.nodes[0].sendrawtransaction(tx1_hex, 0)
tx2 = CTransaction()
tx2.vin = [CTxIn(confirmed_utxo), CTxIn(unconfirmed_utxo)]
tx2.vout = tx1.vout
tx2_hex = txToHex(tx2)
# This will raise an exception
assert_raises_rpc_error(-26, "replacement-adds-unconfirmed", self.nodes[0].sendrawtransaction, tx2_hex, 0)
def test_too_many_replacements(self):
"""Replacements that evict too many transactions are rejected"""
# Try directly replacing more than MAX_REPLACEMENT_LIMIT
# transactions
# Start by creating a single transaction with many outputs
initial_nValue = 10 * COIN
utxo = make_utxo(self.nodes[0], initial_nValue)
fee = int(0.0001 * COIN)
split_value = int((initial_nValue - fee) / (MAX_REPLACEMENT_LIMIT + 1))
outputs = []
for _ in range(MAX_REPLACEMENT_LIMIT + 1):
outputs.append(CTxOut(split_value, CScript([1])))
splitting_tx = CTransaction()
splitting_tx.vin = [CTxIn(utxo, nSequence=0)]
splitting_tx.vout = outputs
splitting_tx_hex = txToHex(splitting_tx)
txid = self.nodes[0].sendrawtransaction(splitting_tx_hex, 0)
txid = int(txid, 16)
# Now spend each of those outputs individually
for i in range(MAX_REPLACEMENT_LIMIT + 1):
tx_i = CTransaction()
tx_i.vin = [CTxIn(COutPoint(txid, i), nSequence=0)]
tx_i.vout = [CTxOut(split_value - fee, DUMMY_P2WPKH_SCRIPT)]
tx_i_hex = txToHex(tx_i)
self.nodes[0].sendrawtransaction(tx_i_hex, 0)
# Now create doublespend of the whole lot; should fail.
# Need a big enough fee to cover all spending transactions and have
# a higher fee rate
double_spend_value = (split_value - 100 * fee) * (MAX_REPLACEMENT_LIMIT + 1)
inputs = []
for i in range(MAX_REPLACEMENT_LIMIT + 1):
inputs.append(CTxIn(COutPoint(txid, i), nSequence=0))
double_tx = CTransaction()
double_tx.vin = inputs
double_tx.vout = [CTxOut(double_spend_value, CScript([b'a']))]
double_tx_hex = txToHex(double_tx)
# This will raise an exception
assert_raises_rpc_error(-26, "too many potential replacements", self.nodes[0].sendrawtransaction, double_tx_hex, 0)
# If we remove an input, it should pass
double_tx = CTransaction()
double_tx.vin = inputs[0:-1]
double_tx.vout = [CTxOut(double_spend_value, CScript([b'a']))]
double_tx_hex = txToHex(double_tx)
self.nodes[0].sendrawtransaction(double_tx_hex, 0)
def test_opt_in(self):
"""Replacing should only work if orig tx opted in"""
tx0_outpoint = make_utxo(self.nodes[0], int(1.1 * COIN))
# Create a non-opting in transaction
tx1a = CTransaction()
tx1a.vin = [CTxIn(tx0_outpoint, nSequence=0xffffffff)]
tx1a.vout = [CTxOut(1 * COIN, DUMMY_P2WPKH_SCRIPT)]
tx1a_hex = txToHex(tx1a)
tx1a_txid = self.nodes[0].sendrawtransaction(tx1a_hex, 0)
# This transaction isn't shown as replaceable
assert_equal(self.nodes[0].getmempoolentry(tx1a_txid)['bip125-replaceable'], False)
# Shouldn't be able to double-spend
tx1b = CTransaction()
tx1b.vin = [CTxIn(tx0_outpoint, nSequence=0)]
tx1b.vout = [CTxOut(int(0.9 * COIN), DUMMY_P2WPKH_SCRIPT)]
tx1b_hex = txToHex(tx1b)
# This will raise an exception
assert_raises_rpc_error(-26, "txn-mempool-conflict", self.nodes[0].sendrawtransaction, tx1b_hex, 0)
tx1_outpoint = make_utxo(self.nodes[0], int(1.1 * COIN))
# Create a different non-opting in transaction
tx2a = CTransaction()
tx2a.vin = [CTxIn(tx1_outpoint, nSequence=0xfffffffe)]
tx2a.vout = [CTxOut(1 * COIN, DUMMY_P2WPKH_SCRIPT)]
tx2a_hex = txToHex(tx2a)
tx2a_txid = self.nodes[0].sendrawtransaction(tx2a_hex, 0)
# Still shouldn't be able to double-spend
tx2b = CTransaction()
tx2b.vin = [CTxIn(tx1_outpoint, nSequence=0)]
tx2b.vout = [CTxOut(int(0.9 * COIN), DUMMY_P2WPKH_SCRIPT)]
tx2b_hex = txToHex(tx2b)
# This will raise an exception
assert_raises_rpc_error(-26, "txn-mempool-conflict", self.nodes[0].sendrawtransaction, tx2b_hex, 0)
# Now create a new transaction that spends from tx1a and tx2a
# opt-in on one of the inputs
# Transaction should be replaceable on either input
tx1a_txid = int(tx1a_txid, 16)
tx2a_txid = int(tx2a_txid, 16)
tx3a = CTransaction()
tx3a.vin = [CTxIn(COutPoint(tx1a_txid, 0), nSequence=0xffffffff),
CTxIn(COutPoint(tx2a_txid, 0), nSequence=0xfffffffd)]
tx3a.vout = [CTxOut(int(0.9 * COIN), CScript([b'c'])), CTxOut(int(0.9 * COIN), CScript([b'd']))]
tx3a_hex = txToHex(tx3a)
tx3a_txid = self.nodes[0].sendrawtransaction(tx3a_hex, 0)
# This transaction is shown as replaceable
assert_equal(self.nodes[0].getmempoolentry(tx3a_txid)['bip125-replaceable'], True)
tx3b = CTransaction()
tx3b.vin = [CTxIn(COutPoint(tx1a_txid, 0), nSequence=0)]
tx3b.vout = [CTxOut(int(0.5 * COIN), DUMMY_P2WPKH_SCRIPT)]
tx3b_hex = txToHex(tx3b)
tx3c = CTransaction()
tx3c.vin = [CTxIn(COutPoint(tx2a_txid, 0), nSequence=0)]
tx3c.vout = [CTxOut(int(0.5 * COIN), DUMMY_P2WPKH_SCRIPT)]
tx3c_hex = txToHex(tx3c)
self.nodes[0].sendrawtransaction(tx3b_hex, 0)
# If tx3b was accepted, tx3c won't look like a replacement,
# but make sure it is accepted anyway
self.nodes[0].sendrawtransaction(tx3c_hex, 0)
def test_prioritised_transactions(self):
# Ensure that fee deltas used via prioritisetransaction are
# correctly used by replacement logic
# 1. Check that feeperkb uses modified fees
tx0_outpoint = make_utxo(self.nodes[0], int(1.1 * COIN))
tx1a = CTransaction()
tx1a.vin = [CTxIn(tx0_outpoint, nSequence=0)]
tx1a.vout = [CTxOut(1 * COIN, DUMMY_P2WPKH_SCRIPT)]
tx1a_hex = txToHex(tx1a)
tx1a_txid = self.nodes[0].sendrawtransaction(tx1a_hex, 0)
# Higher fee, but the actual fee per KB is much lower.
tx1b = CTransaction()
tx1b.vin = [CTxIn(tx0_outpoint, nSequence=0)]
tx1b.vout = [CTxOut(int(0.001 * COIN), CScript([b'a' * 740000]))]
tx1b_hex = txToHex(tx1b)
# Verify tx1b cannot replace tx1a.
assert_raises_rpc_error(-26, "insufficient fee", self.nodes[0].sendrawtransaction, tx1b_hex, 0)
# Use prioritisetransaction to set tx1a's fee to 0.
self.nodes[0].prioritisetransaction(txid=tx1a_txid, fee_delta=int(-0.1 * COIN))
# Now tx1b should be able to replace tx1a
tx1b_txid = self.nodes[0].sendrawtransaction(tx1b_hex, 0)
assert tx1b_txid in self.nodes[0].getrawmempool()
# 2. Check that absolute fee checks use modified fee.
tx1_outpoint = make_utxo(self.nodes[0], int(1.1 * COIN))
tx2a = CTransaction()
tx2a.vin = [CTxIn(tx1_outpoint, nSequence=0)]
tx2a.vout = [CTxOut(1 * COIN, DUMMY_P2WPKH_SCRIPT)]
tx2a_hex = txToHex(tx2a)
self.nodes[0].sendrawtransaction(tx2a_hex, 0)
# Lower fee, but we'll prioritise it
tx2b = CTransaction()
tx2b.vin = [CTxIn(tx1_outpoint, nSequence=0)]
tx2b.vout = [CTxOut(int(1.01 * COIN), DUMMY_P2WPKH_SCRIPT)]
tx2b.rehash()
tx2b_hex = txToHex(tx2b)
# Verify tx2b cannot replace tx2a.
assert_raises_rpc_error(-26, "insufficient fee", self.nodes[0].sendrawtransaction, tx2b_hex, 0)
# Now prioritise tx2b to have a higher modified fee
self.nodes[0].prioritisetransaction(txid=tx2b.hash, fee_delta=int(0.1 * COIN))
# tx2b should now be accepted
tx2b_txid = self.nodes[0].sendrawtransaction(tx2b_hex, 0)
assert tx2b_txid in self.nodes[0].getrawmempool()
def test_rpc(self):
us0 = self.nodes[0].listunspent()[0]
ins = [us0]
outs = {self.nodes[0].getnewaddress(): Decimal(1.0000000)}
rawtx0 = self.nodes[0].createrawtransaction(ins, outs, 0, True)
rawtx1 = self.nodes[0].createrawtransaction(ins, outs, 0, False)
json0 = self.nodes[0].decoderawtransaction(rawtx0)
json1 = self.nodes[0].decoderawtransaction(rawtx1)
assert_equal(json0["vin"][0]["sequence"], 4294967293)
assert_equal(json1["vin"][0]["sequence"], 4294967295)
rawtx2 = self.nodes[0].createrawtransaction([], outs)
frawtx2a = self.nodes[0].fundrawtransaction(rawtx2, {"replaceable": True})
frawtx2b = self.nodes[0].fundrawtransaction(rawtx2, {"replaceable": False})
json0 = self.nodes[0].decoderawtransaction(frawtx2a['hex'])
json1 = self.nodes[0].decoderawtransaction(frawtx2b['hex'])
assert_equal(json0["vin"][0]["sequence"], 4294967293)
assert_equal(json1["vin"][0]["sequence"], 4294967294)
def test_no_inherited_signaling(self):
wallet = MiniWallet(self.nodes[0])
wallet.scan_blocks(start=76, num=1)
confirmed_utxo = wallet.get_utxo()
# Create an explicitly opt-in parent transaction
optin_parent_tx = wallet.send_self_transfer(
from_node=self.nodes[0],
utxo_to_spend=confirmed_utxo,
sequence=BIP125_SEQUENCE_NUMBER,
fee_rate=Decimal('0.01'),
)
assert_equal(True, self.nodes[0].getmempoolentry(optin_parent_tx['txid'])['bip125-replaceable'])
replacement_parent_tx = wallet.create_self_transfer(
from_node=self.nodes[0],
utxo_to_spend=confirmed_utxo,
sequence=BIP125_SEQUENCE_NUMBER,
fee_rate=Decimal('0.02'),
)
# Test if parent tx can be replaced.
res = self.nodes[0].testmempoolaccept(rawtxs=[replacement_parent_tx['hex']])[0]
# Parent can be replaced.
assert_equal(res['allowed'], True)
# Create an opt-out child tx spending the opt-in parent
parent_utxo = wallet.get_utxo(txid=optin_parent_tx['txid'])
optout_child_tx = wallet.send_self_transfer(
from_node=self.nodes[0],
utxo_to_spend=parent_utxo,
sequence=0xffffffff,
fee_rate=Decimal('0.01'),
)
# Reports true due to inheritance
assert_equal(True, self.nodes[0].getmempoolentry(optout_child_tx['txid'])['bip125-replaceable'])
replacement_child_tx = wallet.create_self_transfer(
from_node=self.nodes[0],
utxo_to_spend=parent_utxo,
sequence=0xffffffff,
fee_rate=Decimal('0.02'),
mempool_valid=False,
)
# Broadcast replacement child tx
# BIP 125 :
# 1. The original transactions signal replaceability explicitly or through inheritance as described in the above
# Summary section.
# The original transaction (`optout_child_tx`) doesn't signal RBF but its parent (`optin_parent_tx`) does.
# The replacement transaction (`replacement_child_tx`) should be able to replace the original transaction.
# See CVE-2021-31876 for further explanations.
assert_equal(True, self.nodes[0].getmempoolentry(optin_parent_tx['txid'])['bip125-replaceable'])
assert_raises_rpc_error(-26, 'txn-mempool-conflict', self.nodes[0].sendrawtransaction, replacement_child_tx["hex"], 0)
if __name__ == '__main__':
ReplaceByFeeTest().main()
| mit |
bob-the-hamster/commandergenius | project/jni/python/src/Lib/plat-mac/lib-scriptpackages/Finder/Files.py | 80 | 6439 | """Suite Files: Classes representing files
Level 1, version 1
Generated from /System/Library/CoreServices/Finder.app
AETE/AEUT resource version 0/144, language 0, script 0
"""
import aetools
import MacOS
_code = 'fndr'
class Files_Events:
pass
class alias_file(aetools.ComponentItem):
"""alias file - An alias file (created with \xd2Make Alias\xd3) """
want = 'alia'
class _Prop__3c_Inheritance_3e_(aetools.NProperty):
"""<Inheritance> - inherits some of its properties from the file class """
which = 'c@#^'
want = 'file'
class _Prop_original_item(aetools.NProperty):
"""original item - the original item pointed to by the alias """
which = 'orig'
want = 'obj '
alias_files = alias_file
class application_file(aetools.ComponentItem):
"""application file - An application's file on disk """
want = 'appf'
class _Prop_accepts_high_level_events(aetools.NProperty):
"""accepts high level events - Is the application high-level event aware? (OBSOLETE: always returns true) """
which = 'isab'
want = 'bool'
class _Prop_has_scripting_terminology(aetools.NProperty):
"""has scripting terminology - Does the process have a scripting terminology, i.e., can it be scripted? """
which = 'hscr'
want = 'bool'
class _Prop_minimum_size(aetools.NProperty):
"""minimum size - the smallest memory size with which the application can be launched """
which = 'mprt'
want = 'long'
class _Prop_opens_in_Classic(aetools.NProperty):
"""opens in Classic - Should the application launch in the Classic environment? """
which = 'Clsc'
want = 'bool'
class _Prop_preferred_size(aetools.NProperty):
"""preferred size - the memory size with which the application will be launched """
which = 'appt'
want = 'long'
class _Prop_suggested_size(aetools.NProperty):
"""suggested size - the memory size with which the developer recommends the application be launched """
which = 'sprt'
want = 'long'
application_files = application_file
class clipping(aetools.ComponentItem):
"""clipping - A clipping """
want = 'clpf'
class _Prop_clipping_window(aetools.NProperty):
"""clipping window - (NOT AVAILABLE YET) the clipping window for this clipping """
which = 'lwnd'
want = 'obj '
clippings = clipping
class document_file(aetools.ComponentItem):
"""document file - A document file """
want = 'docf'
document_files = document_file
class file(aetools.ComponentItem):
"""file - A file """
want = 'file'
class _Prop_creator_type(aetools.NProperty):
"""creator type - the OSType identifying the application that created the item """
which = 'fcrt'
want = 'type'
class _Prop_file_type(aetools.NProperty):
"""file type - the OSType identifying the type of data contained in the item """
which = 'asty'
want = 'type'
class _Prop_product_version(aetools.NProperty):
"""product version - the version of the product (visible at the top of the \xd2Get Info\xd3 window) """
which = 'ver2'
want = 'utxt'
class _Prop_stationery(aetools.NProperty):
"""stationery - Is the file a stationery pad? """
which = 'pspd'
want = 'bool'
class _Prop_version(aetools.NProperty):
"""version - the version of the file (visible at the bottom of the \xd2Get Info\xd3 window) """
which = 'vers'
want = 'utxt'
files = file
class internet_location_file(aetools.ComponentItem):
"""internet location file - An file containing an internet location """
want = 'inlf'
class _Prop_location(aetools.NProperty):
"""location - the internet location """
which = 'iloc'
want = 'utxt'
internet_location_files = internet_location_file
class package(aetools.ComponentItem):
"""package - A package """
want = 'pack'
packages = package
alias_file._superclassnames = ['file']
alias_file._privpropdict = {
'_3c_Inheritance_3e_' : _Prop__3c_Inheritance_3e_,
'original_item' : _Prop_original_item,
}
alias_file._privelemdict = {
}
application_file._superclassnames = ['file']
application_file._privpropdict = {
'_3c_Inheritance_3e_' : _Prop__3c_Inheritance_3e_,
'accepts_high_level_events' : _Prop_accepts_high_level_events,
'has_scripting_terminology' : _Prop_has_scripting_terminology,
'minimum_size' : _Prop_minimum_size,
'opens_in_Classic' : _Prop_opens_in_Classic,
'preferred_size' : _Prop_preferred_size,
'suggested_size' : _Prop_suggested_size,
}
application_file._privelemdict = {
}
clipping._superclassnames = ['file']
clipping._privpropdict = {
'_3c_Inheritance_3e_' : _Prop__3c_Inheritance_3e_,
'clipping_window' : _Prop_clipping_window,
}
clipping._privelemdict = {
}
document_file._superclassnames = ['file']
document_file._privpropdict = {
'_3c_Inheritance_3e_' : _Prop__3c_Inheritance_3e_,
}
document_file._privelemdict = {
}
import Finder_items
file._superclassnames = ['item']
file._privpropdict = {
'_3c_Inheritance_3e_' : _Prop__3c_Inheritance_3e_,
'creator_type' : _Prop_creator_type,
'file_type' : _Prop_file_type,
'product_version' : _Prop_product_version,
'stationery' : _Prop_stationery,
'version' : _Prop_version,
}
file._privelemdict = {
}
internet_location_file._superclassnames = ['file']
internet_location_file._privpropdict = {
'_3c_Inheritance_3e_' : _Prop__3c_Inheritance_3e_,
'location' : _Prop_location,
}
internet_location_file._privelemdict = {
}
package._superclassnames = ['item']
package._privpropdict = {
'_3c_Inheritance_3e_' : _Prop__3c_Inheritance_3e_,
}
package._privelemdict = {
}
#
# Indices of types declared in this module
#
_classdeclarations = {
'alia' : alias_file,
'appf' : application_file,
'clpf' : clipping,
'docf' : document_file,
'file' : file,
'inlf' : internet_location_file,
'pack' : package,
}
_propdeclarations = {
'Clsc' : _Prop_opens_in_Classic,
'appt' : _Prop_preferred_size,
'asty' : _Prop_file_type,
'c@#^' : _Prop__3c_Inheritance_3e_,
'fcrt' : _Prop_creator_type,
'hscr' : _Prop_has_scripting_terminology,
'iloc' : _Prop_location,
'isab' : _Prop_accepts_high_level_events,
'lwnd' : _Prop_clipping_window,
'mprt' : _Prop_minimum_size,
'orig' : _Prop_original_item,
'pspd' : _Prop_stationery,
'sprt' : _Prop_suggested_size,
'ver2' : _Prop_product_version,
'vers' : _Prop_version,
}
_compdeclarations = {
}
_enumdeclarations = {
}
| lgpl-2.1 |
imply/chuu | tools/vim/chromium.ycm_extra_conf.py | 47 | 6223 | # Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# Autocompletion config for YouCompleteMe in Chromium.
#
# USAGE:
#
# 1. Install YCM [https://github.com/Valloric/YouCompleteMe]
# (Googlers should check out [go/ycm])
#
# 2. Point to this config file in your .vimrc:
# let g:ycm_global_ycm_extra_conf =
# '<chrome_depot>/src/tools/vim/chromium.ycm_extra_conf.py'
#
# 3. Profit
#
#
# Usage notes:
#
# * You must use ninja & clang to build Chromium.
#
# * You must have run gyp_chromium and built Chromium recently.
#
#
# Hacking notes:
#
# * The purpose of this script is to construct an accurate enough command line
# for YCM to pass to clang so it can build and extract the symbols.
#
# * Right now, we only pull the -I and -D flags. That seems to be sufficient
# for everything I've used it for.
#
# * That whole ninja & clang thing? We could support other configs if someone
# were willing to write the correct commands and a parser.
#
# * This has only been tested on gPrecise.
import os
import subprocess
# Flags from YCM's default config.
flags = [
'-DUSE_CLANG_COMPLETER',
'-std=c++11',
'-x',
'c++',
]
def PathExists(*args):
return os.path.exists(os.path.join(*args))
def FindChromeSrcFromFilename(filename):
"""Searches for the root of the Chromium checkout.
Simply checks parent directories until it finds .gclient and src/.
Args:
filename: (String) Path to source file being edited.
Returns:
(String) Path of 'src/', or None if unable to find.
"""
curdir = os.path.normpath(os.path.dirname(filename))
while not (PathExists(curdir, 'src') and PathExists(curdir, 'src', 'DEPS')
and (PathExists(curdir, '.gclient')
or PathExists(curdir, 'src', '.git'))):
nextdir = os.path.normpath(os.path.join(curdir, '..'))
if nextdir == curdir:
return None
curdir = nextdir
return os.path.join(curdir, 'src')
# Largely copied from ninja-build.vim (guess_configuration)
def GetNinjaOutputDirectory(chrome_root):
"""Returns either <chrome_root>/out/Release or <chrome_root>/out/Debug.
The configuration chosen is the one most recently generated/built."""
root = os.path.join(chrome_root, 'out')
debug_path = os.path.join(root, 'Debug')
release_path = os.path.join(root, 'Release')
def is_release_15s_newer(test_path):
try:
debug_mtime = os.path.getmtime(os.path.join(debug_path, test_path))
except os.error:
debug_mtime = 0
try:
rel_mtime = os.path.getmtime(os.path.join(release_path, test_path))
except os.error:
rel_mtime = 0
return rel_mtime - debug_mtime >= 15
if is_release_15s_newer('build.ninja') or is_release_15s_newer('protoc'):
return release_path
return debug_path
def GetClangCommandFromNinjaForFilename(chrome_root, filename):
"""Returns the command line to build |filename|.
Asks ninja how it would build the source file. If the specified file is a
header, tries to find its companion source file first.
Args:
chrome_root: (String) Path to src/.
filename: (String) Path to source file being edited.
Returns:
(List of Strings) Command line arguments for clang.
"""
if not chrome_root:
return []
# Generally, everyone benefits from including Chromium's src/, because all of
# Chromium's includes are relative to that.
chrome_flags = ['-I' + os.path.join(chrome_root)]
# Header files can't be built. Instead, try to match a header file to its
# corresponding source file.
if filename.endswith('.h'):
alternates = ['.cc', '.cpp']
for alt_extension in alternates:
alt_name = filename[:-2] + alt_extension
if os.path.exists(alt_name):
filename = alt_name
break
else:
# If this is a standalone .h file with no source, the best we can do is
# try to use the default flags.
return chrome_flags
# Ninja needs the path to the source file from the output build directory.
# Cut off the common part and /.
subdir_filename = filename[len(chrome_root)+1:]
rel_filename = os.path.join('..', '..', subdir_filename)
out_dir = GetNinjaOutputDirectory(chrome_root)
# Ask ninja how it would build our source file.
p = subprocess.Popen(['ninja', '-v', '-C', out_dir, '-t',
'commands', rel_filename + '^'],
stdout=subprocess.PIPE)
stdout, stderr = p.communicate()
if p.returncode:
return chrome_flags
# Ninja might execute several commands to build something. We want the last
# clang command.
clang_line = None
for line in reversed(stdout.split('\n')):
if 'clang' in line:
clang_line = line
break
else:
return chrome_flags
# Parse out the -I and -D flags. These seem to be the only ones that are
# important for YCM's purposes.
for flag in clang_line.split(' '):
if flag.startswith('-I'):
# Relative paths need to be resolved, because they're relative to the
# output dir, not the source.
if flag[2] == '/':
chrome_flags.append(flag)
else:
abs_path = os.path.normpath(os.path.join(out_dir, flag[2:]))
chrome_flags.append('-I' + abs_path)
elif flag.startswith('-') and flag[1] in 'DWFfmO':
if flag == '-Wno-deprecated-register' or flag == '-Wno-header-guard':
# These flags causes libclang (3.3) to crash. Remove it until things
# are fixed.
continue
chrome_flags.append(flag)
return chrome_flags
def FlagsForFile(filename):
"""This is the main entry point for YCM. Its interface is fixed.
Args:
filename: (String) Path to source file being edited.
Returns:
(Dictionary)
'flags': (List of Strings) Command line flags.
'do_cache': (Boolean) True if the result should be cached.
"""
chrome_root = FindChromeSrcFromFilename(filename)
chrome_flags = GetClangCommandFromNinjaForFilename(chrome_root,
filename)
final_flags = flags + chrome_flags
return {
'flags': final_flags,
'do_cache': True
}
| bsd-3-clause |
matbra/bokeh | examples/interactions/interactive_bubble/data.py | 49 | 1265 | import numpy as np
from bokeh.palettes import Spectral6
def process_data():
from bokeh.sampledata.gapminder import fertility, life_expectancy, population, regions
# Make the column names ints not strings for handling
columns = list(fertility.columns)
years = list(range(int(columns[0]), int(columns[-1])))
rename_dict = dict(zip(columns, years))
fertility = fertility.rename(columns=rename_dict)
life_expectancy = life_expectancy.rename(columns=rename_dict)
population = population.rename(columns=rename_dict)
regions = regions.rename(columns=rename_dict)
# Turn population into bubble sizes. Use min_size and factor to tweak.
scale_factor = 200
population_size = np.sqrt(population / np.pi) / scale_factor
min_size = 3
population_size = population_size.where(population_size >= min_size).fillna(min_size)
# Use pandas categories and categorize & color the regions
regions.Group = regions.Group.astype('category')
regions_list = list(regions.Group.cat.categories)
def get_color(r):
return Spectral6[regions_list.index(r.Group)]
regions['region_color'] = regions.apply(get_color, axis=1)
return fertility, life_expectancy, population_size, regions, years, regions_list
| bsd-3-clause |
tehpug/TehPUG-flask | wsgi/app/forms.py | 1 | 1502 | from flask.ext.wtf import Form
from wtforms import TextField, BooleanField, PasswordField, TextAreaField, SelectField
from wtforms.validators import Required, Length, email, url, Optional
import os
class RegisterForm(Form):
username = TextField('username', validators = [Required(), Length(min = 4, max = 50)])
password = PasswordField('password', validators = [Required(), Length(min = 4, max = 50)])
email = TextField('email', validators = [Required(), Length(min = 6, max = 50), email()])
admin = SelectField('sound',choices = [('No','No'), ('Yes','Yes')])
class LoginForm(Form):
username = TextField('username', validators = [Required(), Length(min = 4, max = 50)])
password = PasswordField('password', validators = [Required()])
remember_me = BooleanField('remember_me', default = False)
class AddSessionForm(Form):
title = TextField('title', validators = [Required(), Length(min = 5, max = 100)])
description = TextAreaField('description', validators = [Length(min = 0, max = 4000)])
sound = SelectField('sound', validators = [Optional()])
class AddNewsForm(Form):
title = TextField('title', validators = [Required(), Length(min = 5, max= 100)])
description = TextAreaField('description', validators = [Length(min = 0, max = 4000)])
class EditProfileForm(Form):
email = TextField('email', validators = [Required(), Length(min = 6, max = 50), email()])
website = TextField('website', validators = [url])
bio = TextAreaField('bio', validators = [Length(max = 256)]) | gpl-2.0 |
ckirby/django | django/contrib/gis/db/backends/mysql/introspection.py | 700 | 1771 | from MySQLdb.constants import FIELD_TYPE
from django.contrib.gis.gdal import OGRGeomType
from django.db.backends.mysql.introspection import DatabaseIntrospection
class MySQLIntrospection(DatabaseIntrospection):
# Updating the data_types_reverse dictionary with the appropriate
# type for Geometry fields.
data_types_reverse = DatabaseIntrospection.data_types_reverse.copy()
data_types_reverse[FIELD_TYPE.GEOMETRY] = 'GeometryField'
def get_geometry_type(self, table_name, geo_col):
cursor = self.connection.cursor()
try:
# In order to get the specific geometry type of the field,
# we introspect on the table definition using `DESCRIBE`.
cursor.execute('DESCRIBE %s' %
self.connection.ops.quote_name(table_name))
# Increment over description info until we get to the geometry
# column.
for column, typ, null, key, default, extra in cursor.fetchall():
if column == geo_col:
# Using OGRGeomType to convert from OGC name to Django field.
# MySQL does not support 3D or SRIDs, so the field params
# are empty.
field_type = OGRGeomType(typ).django
field_params = {}
break
finally:
cursor.close()
return field_type, field_params
def supports_spatial_index(self, cursor, table_name):
# Supported with MyISAM, or InnoDB on MySQL 5.7.5+
storage_engine = self.get_storage_engine(cursor, table_name)
return (
(storage_engine == 'InnoDB' and self.connection.mysql_version >= (5, 7, 5)) or
storage_engine == 'MyISAM'
)
| bsd-3-clause |
shawnadelic/shuup | shuup/core/utils/vat.py | 2 | 8413 | # -*- coding: utf-8 -*-
# This file is part of Shuup.
#
# Copyright (c) 2012-2016, Shoop Ltd. All rights reserved.
#
# This source code is licensed under the AGPLv3 license found in the
# LICENSE file in the root directory of this source tree.
from __future__ import unicode_literals
import re
import six
from django.core.exceptions import ValidationError
# Patterns from
# http://www.vero.fi/fi-FI/Syventavat_veroohjeet/Arvonlisaverotus/Kansainvalinen_kauppa/EUmaiden_arvonlisaverotunnisteet(14426)
PATTERNS = {
"AT": {
"country": "Austria",
"iso3166": "AT",
"pattern": "U99999999", # Initial always U, then 8 numbers
},
"BE": {
"country": "Belgium",
"iso3166": "BE",
"pattern": "9999999999", # 1 block of 10 digits
},
"BG": {
"country": "Bulgaria",
"iso3166": "BG",
"pattern": [
"999999999", # 1 block of 9 digits
"9999999999", # 1 block of 10 digits
]
},
"CY": {
"country": "Cyprus",
"iso3166": "CY",
"pattern": "99999999L", # 1 block of 9 characters
},
"CZ": {
"country": "Czech Republic",
"iso3166": "CZ",
"pattern": [
"99999999",
"999999999",
"9999999999"
]
},
"DE": {
"country": "Germany",
"iso3166": "DE",
"pattern": "999999999", # 1 block of 9 digits
},
"DK": {
"country": "Denmark",
"iso3166": "DK",
"pattern": "99999999", # 4 blocks of 2 digits
},
"EE": {
"country": "Estonia",
"iso3166": "EE",
"pattern": "999999999", # 1 block of 9 digits
},
"EL": {
"iso3166": "GR",
"country": "Greece",
"pattern": "999999999", # 1 block of 9 digits
},
"ES": {
"country": "Spain",
"iso3166": "ES",
"pattern": [
"X9999999X4", # 1 block of 9 characters
"X99999999",
"99999999X",
"X9999999X"
]
# CIF (Certificado de Identificación Fiscal): This is the tax ID number for all companies.
# It consists of a letter followed by 8 digits. The letter represents the type of company,
# the most common being an 'A' for Sociedad Anónima or a 'B' for Sociedad Limitada.
# For companies nonresident in Spain, the letter is 'N'.
# VAT number (Número IVA): This is 'ES' followed by the CIF.
# From vero.fi. 9 characters where first or last can be chars or number, but can not be
# numbers.
},
"FI": {
"country": "Finland",
"iso3166": "FI",
"pattern": "99999999", # 1 block of 8 digits
},
"FR": {
"country": "France",
"iso3166": "FR",
"pattern": "XX999999999", # 1 block of 2 characters, 1 block of 9 digits
},
"GB": {
"country": "United Kingdom",
"iso3166": "GB",
"pattern": [
"999999999", # 1 block of 9 or 12 digits
"999999999999",
"GD999",
"HA999"
]
},
"HU": {
"iso3166": "HU",
"country": "Hungary",
"pattern": "99999999", # 1 block of 8 digits
},
"HR": {
"iso3166": "HR",
"country": "Croatia",
"pattern": "99999999999", # 1 block of 11 digits
},
"IE": {
"iso3166": "IE",
"country": "Ireland",
"pattern": [
"9S99999L", # 1 block of 8 or 9 characters
"9999999LL"
]
},
"IT": {
"iso3166": "IT",
"country": "Italy",
"pattern": "99999999999", # 1 block of 11 digits
},
"LT": {
"iso3166": "LT",
"country": "Lithuania",
"pattern": [
"999999999",
"999999999999", # 1 block of 9 digits, or 1 block of 12 digits
]
},
"LU": {
"iso3166": "LU",
"country": "Luxembourg",
"pattern": "99999999", # 1 block of 8 digits
},
"LV": {
"country": "Latvia",
"iso3166": "LV",
"pattern": "99999999999", # 1 block of 11 digits
},
"MT": {
"country": "Malta",
"iso3166": "MT",
"pattern": "99999999", # 1 block of 8 digits
},
"NL": {
"country": "The Netherlands",
"iso3166": "NL",
"pattern": "999999999B99", # 1 block of 12 characters. From vero.fi tenth char after country code is allways B
},
"PL": {
"country": "Poland",
"iso3166": "PL",
"pattern": "9999999999", # 1 block of 10 digits
},
"PT": {
"country": "Portugal",
"iso3166": "PT",
"pattern": "999999999", # 1 block of 9 digits
},
"RO": {
"country": "Romania",
"iso3166": "RO",
"pattern": "99R", # 1 block of minimum 2 digits and maximum 10 digits
},
"SE": {
"country": "Sweden",
"iso3166": "SE",
"pattern": "999999999901", # 1 block of 12 digits. From vero.fi 2 last digits is allways 01
},
"SI": {
"country": "Slovenia",
"iso3166": "SI",
"pattern": "99999999", # 1 block of 8 digits
},
"SK": {
"country": "Slovakia",
"iso3166": "SK",
"pattern": "9999999999", # 1 block of 10 digits
},
}
# *: Format excludes 2 letter alpha prefix
# 9: A digit
# X: A letter or a digit
# S: A letter; a digit; "+" or "*"
# L: A letter
def compile_pattern(prefix, pattern):
r = pattern.replace(" ", "")
for gf, gt in (
("9", "[0-9]"),
("R", "[0-9]*"),
("X", "[a-z0-9]"),
("S", "[a-z0-9+*]"),
("L", "[a-z]"),
):
regex_frag = "(%s{%%d})" % gt
def gt(m):
return (regex_frag % len(m.group(0)))
r = re.sub(gf + "+", gt, r)
return re.compile("^" + prefix + r + "$", re.I)
class VatValidationError(ValidationError):
code = None
def __init__(self, *args, **kwargs):
code = kwargs.pop("code", self.code)
super(VatValidationError, self).__init__(*args, code=code, **kwargs)
class VatCannotIdentifyValidationError(VatValidationError):
code = "vat_cannot_identify"
class VatInvalidValidationError(VatValidationError):
code = "vat_invalid"
def verify_vat(vat_id, default_prefix=""):
""" Verify an EU VAT ID.
Returns a tuple (prefix, code_parts) -- if both are truthy, the validation succeeded.
If the prefix part is falsy, then the prefix was unknown and no validation was even attempted.
If the prefix part is truthy, then it will contain the country prefix used for validation.
The code_parts part can still be falsy, if the validation for the country's VAT number pattern failed.
:param vat_id: The VAT ID string to validate.
:type vat_id: str
:param default_prefix: The default prefix to assume if none can be parsed.
:type default_prefix: str
:return: Tuple of (prefix, code_parts)
"""
# Normalize the VAT ID a little bit...
vat_id = re.sub(r"\s+", "", vat_id.upper())
vat_id = vat_id.replace("-", "") # TODO: Not sure if this is a good idea
prefix = vat_id[:2]
if prefix not in PATTERNS: # Okay, it's unknown thus far, so try again with the default prefix if any
prefix = default_prefix
# Then see if we know about this prefix.
spec = PATTERNS.get(prefix)
if not spec or not prefix: # Sorry, no dice. :/
raise VatCannotIdentifyValidationError("VAT ID could not be identified")
if not vat_id.startswith(prefix): # Add the prefix back into the VAT if required
vat_id = prefix + vat_id
# Get the relephant PATTERNS (one or more) from the spec
patterns = (spec.get("pattern") or [])
if isinstance(patterns, six.string_types):
patterns = [patterns]
for pat in patterns:
regexp = compile_pattern(prefix, pat) # Prefix will be added to the resulting spec.
match = regexp.match(vat_id)
if match:
return (prefix, match.groups())
raise VatInvalidValidationError(
"VAT ID for %(country)s could not be validated" % spec)
def get_vat_prefix_for_country(iso3166):
iso3166 = six.text_type(iso3166).upper()
for prefix, data in six.iteritems(PATTERNS): # pragma: no branch
if data.get("iso3166") == iso3166:
return prefix
| agpl-3.0 |
sekikn/incubator-airflow | airflow/providers/jenkins/example_dags/example_jenkins_job_trigger.py | 8 | 2916 | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from datetime import datetime, timedelta
from six.moves.urllib.request import Request
from airflow import DAG
from airflow.operators.python import PythonOperator
from airflow.providers.jenkins.hooks.jenkins import JenkinsHook
from airflow.providers.jenkins.operators.jenkins_job_trigger import JenkinsJobTriggerOperator
default_args = {
"owner": "airflow",
"retries": 1,
"retry_delay": timedelta(minutes=5),
"depends_on_past": False,
"concurrency": 8,
"max_active_runs": 8,
}
with DAG(
"test_jenkins", default_args=default_args, start_date=datetime(2017, 6, 1), schedule_interval=None
) as dag:
job_trigger = JenkinsJobTriggerOperator(
task_id="trigger_job",
job_name="generate-merlin-config",
parameters={"first_parameter": "a_value", "second_parameter": "18"},
# parameters="resources/parameter.json", You can also pass a path to a json file containing your param
jenkins_connection_id="your_jenkins_connection", # T he connection must be configured first
)
def grab_artifact_from_jenkins(**context):
"""
Grab an artifact from the previous job
The python-jenkins library doesn't expose a method for that
But it's totally possible to build manually the request for that
"""
hook = JenkinsHook("your_jenkins_connection")
jenkins_server = hook.get_jenkins_server()
url = context['task_instance'].xcom_pull(task_ids='trigger_job')
# The JenkinsJobTriggerOperator store the job url in the xcom variable corresponding to the task
# You can then use it to access things or to get the job number
# This url looks like : http://jenkins_url/job/job_name/job_number/
url += "artifact/myartifact.xml" # Or any other artifact name
request = Request(url)
response = jenkins_server.jenkins_open(request)
return response # We store the artifact content in a xcom variable for later use
artifact_grabber = PythonOperator(task_id='artifact_grabber', python_callable=grab_artifact_from_jenkins)
job_trigger >> artifact_grabber
| apache-2.0 |
samdowd/drumm-farm | drumm_env/lib/python2.7/distutils/__init__.py | 1211 | 3983 | import os
import sys
import warnings
import imp
import opcode # opcode is not a virtualenv module, so we can use it to find the stdlib
# Important! To work on pypy, this must be a module that resides in the
# lib-python/modified-x.y.z directory
dirname = os.path.dirname
distutils_path = os.path.join(os.path.dirname(opcode.__file__), 'distutils')
if os.path.normpath(distutils_path) == os.path.dirname(os.path.normpath(__file__)):
warnings.warn(
"The virtualenv distutils package at %s appears to be in the same location as the system distutils?")
else:
__path__.insert(0, distutils_path)
real_distutils = imp.load_module("_virtualenv_distutils", None, distutils_path, ('', '', imp.PKG_DIRECTORY))
# Copy the relevant attributes
try:
__revision__ = real_distutils.__revision__
except AttributeError:
pass
__version__ = real_distutils.__version__
from distutils import dist, sysconfig
try:
basestring
except NameError:
basestring = str
## patch build_ext (distutils doesn't know how to get the libs directory
## path on windows - it hardcodes the paths around the patched sys.prefix)
if sys.platform == 'win32':
from distutils.command.build_ext import build_ext as old_build_ext
class build_ext(old_build_ext):
def finalize_options (self):
if self.library_dirs is None:
self.library_dirs = []
elif isinstance(self.library_dirs, basestring):
self.library_dirs = self.library_dirs.split(os.pathsep)
self.library_dirs.insert(0, os.path.join(sys.real_prefix, "Libs"))
old_build_ext.finalize_options(self)
from distutils.command import build_ext as build_ext_module
build_ext_module.build_ext = build_ext
## distutils.dist patches:
old_find_config_files = dist.Distribution.find_config_files
def find_config_files(self):
found = old_find_config_files(self)
system_distutils = os.path.join(distutils_path, 'distutils.cfg')
#if os.path.exists(system_distutils):
# found.insert(0, system_distutils)
# What to call the per-user config file
if os.name == 'posix':
user_filename = ".pydistutils.cfg"
else:
user_filename = "pydistutils.cfg"
user_filename = os.path.join(sys.prefix, user_filename)
if os.path.isfile(user_filename):
for item in list(found):
if item.endswith('pydistutils.cfg'):
found.remove(item)
found.append(user_filename)
return found
dist.Distribution.find_config_files = find_config_files
## distutils.sysconfig patches:
old_get_python_inc = sysconfig.get_python_inc
def sysconfig_get_python_inc(plat_specific=0, prefix=None):
if prefix is None:
prefix = sys.real_prefix
return old_get_python_inc(plat_specific, prefix)
sysconfig_get_python_inc.__doc__ = old_get_python_inc.__doc__
sysconfig.get_python_inc = sysconfig_get_python_inc
old_get_python_lib = sysconfig.get_python_lib
def sysconfig_get_python_lib(plat_specific=0, standard_lib=0, prefix=None):
if standard_lib and prefix is None:
prefix = sys.real_prefix
return old_get_python_lib(plat_specific, standard_lib, prefix)
sysconfig_get_python_lib.__doc__ = old_get_python_lib.__doc__
sysconfig.get_python_lib = sysconfig_get_python_lib
old_get_config_vars = sysconfig.get_config_vars
def sysconfig_get_config_vars(*args):
real_vars = old_get_config_vars(*args)
if sys.platform == 'win32':
lib_dir = os.path.join(sys.real_prefix, "libs")
if isinstance(real_vars, dict) and 'LIBDIR' not in real_vars:
real_vars['LIBDIR'] = lib_dir # asked for all
elif isinstance(real_vars, list) and 'LIBDIR' in args:
real_vars = real_vars + [lib_dir] # asked for list
return real_vars
sysconfig_get_config_vars.__doc__ = old_get_config_vars.__doc__
sysconfig.get_config_vars = sysconfig_get_config_vars
| mit |
tersmitten/ansible | lib/ansible/parsing/quoting.py | 241 | 1141 | # (c) 2014 James Cammarata, <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
def is_quoted(data):
return len(data) > 1 and data[0] == data[-1] and data[0] in ('"', "'") and data[-2] != '\\'
def unquote(data):
''' removes first and last quotes from a string, if the string starts and ends with the same quotes '''
if is_quoted(data):
return data[1:-1]
return data
| gpl-3.0 |
sebastic/QGIS | python/plugins/processing/algs/gdal/ogr2ogrclipextent.py | 6 | 3700 | # -*- coding: utf-8 -*-
"""
***************************************************************************
ogr2ogrclipextent.py
---------------------
Date : November 2012
Copyright : (C) 2012 by Victor Olaya
Email : volayaf at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Victor Olaya'
__date__ = 'November 2012'
__copyright__ = '(C) 2012, Victor Olaya'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
from processing.core.parameters import ParameterVector
from processing.core.parameters import ParameterString
from processing.core.parameters import ParameterExtent
from processing.core.outputs import OutputVector
from processing.algs.gdal.GdalAlgorithm import GdalAlgorithm
from processing.algs.gdal.GdalUtils import GdalUtils
from processing.tools.system import isWindows
from processing.tools.vector import ogrConnectionString, ogrLayerName
class Ogr2OgrClipExtent(GdalAlgorithm):
OUTPUT_LAYER = 'OUTPUT_LAYER'
INPUT_LAYER = 'INPUT_LAYER'
CLIP_EXTENT = 'CLIP_EXTENT'
OPTIONS = 'OPTIONS'
def defineCharacteristics(self):
self.name, self.i18n_name = self.trAlgorithm('Clip vectors by extent')
self.group, self.i18n_group = self.trAlgorithm('[OGR] Geoprocessing')
self.addParameter(ParameterVector(self.INPUT_LAYER,
self.tr('Input layer'), [ParameterVector.VECTOR_TYPE_ANY], False))
self.addParameter(ParameterExtent(self.CLIP_EXTENT,
self.tr('Clip extent')))
self.addParameter(ParameterString(self.OPTIONS,
self.tr('Additional creation options'), '', optional=True))
self.addOutput(OutputVector(self.OUTPUT_LAYER, self.tr('Clipped (extent)')))
def getConsoleCommands(self):
inLayer = self.getParameterValue(self.INPUT_LAYER)
ogrLayer = ogrConnectionString(inLayer)[1:-1]
clipExtent = self.getParameterValue(self.CLIP_EXTENT)
output = self.getOutputFromName(self.OUTPUT_LAYER)
outFile = output.value
output = ogrConnectionString(outFile)
options = unicode(self.getParameterValue(self.OPTIONS))
arguments = []
regionCoords = clipExtent.split(',')
arguments.append('-spat')
arguments.append(regionCoords[0])
arguments.append(regionCoords[2])
arguments.append(regionCoords[1])
arguments.append(regionCoords[3])
arguments.append('-clipsrc spat_extent')
if len(options) > 0:
arguments.append(options)
arguments.append(output)
arguments.append(ogrLayer)
arguments.append(ogrLayerName(inLayer))
commands = []
if isWindows():
commands = ['cmd.exe', '/C ', 'ogr2ogr.exe',
GdalUtils.escapeAndJoin(arguments)]
else:
commands = ['ogr2ogr', GdalUtils.escapeAndJoin(arguments)]
return commands
def commandName(self):
return "ogr2ogr"
| gpl-2.0 |
Audacity-Team/Audacity | lib-src/lv2/lv2/plugins/eg02-midigate.lv2/waflib/Tools/msvc.py | 70 | 27831 | #! /usr/bin/env python
# encoding: utf-8
# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
import os,sys,re,tempfile
from waflib import Utils,Task,Logs,Options,Errors
from waflib.Logs import debug,warn
from waflib.TaskGen import after_method,feature
from waflib.Configure import conf
from waflib.Tools import ccroot,c,cxx,ar,winres
g_msvc_systemlibs='''
aclui activeds ad1 adptif adsiid advapi32 asycfilt authz bhsupp bits bufferoverflowu cabinet
cap certadm certidl ciuuid clusapi comctl32 comdlg32 comsupp comsuppd comsuppw comsuppwd comsvcs
credui crypt32 cryptnet cryptui d3d8thk daouuid dbgeng dbghelp dciman32 ddao35 ddao35d
ddao35u ddao35ud delayimp dhcpcsvc dhcpsapi dlcapi dnsapi dsprop dsuiext dtchelp
faultrep fcachdll fci fdi framedyd framedyn gdi32 gdiplus glauxglu32 gpedit gpmuuid
gtrts32w gtrtst32hlink htmlhelp httpapi icm32 icmui imagehlp imm32 iphlpapi iprop
kernel32 ksguid ksproxy ksuser libcmt libcmtd libcpmt libcpmtd loadperf lz32 mapi
mapi32 mgmtapi minidump mmc mobsync mpr mprapi mqoa mqrt msacm32 mscms mscoree
msdasc msimg32 msrating mstask msvcmrt msvcurt msvcurtd mswsock msxml2 mtx mtxdm
netapi32 nmapinmsupp npptools ntdsapi ntdsbcli ntmsapi ntquery odbc32 odbcbcp
odbccp32 oldnames ole32 oleacc oleaut32 oledb oledlgolepro32 opends60 opengl32
osptk parser pdh penter pgobootrun pgort powrprof psapi ptrustm ptrustmd ptrustu
ptrustud qosname rasapi32 rasdlg rassapi resutils riched20 rpcndr rpcns4 rpcrt4 rtm
rtutils runtmchk scarddlg scrnsave scrnsavw secur32 sensapi setupapi sfc shell32
shfolder shlwapi sisbkup snmpapi sporder srclient sti strsafe svcguid tapi32 thunk32
traffic unicows url urlmon user32 userenv usp10 uuid uxtheme vcomp vcompd vdmdbg
version vfw32 wbemuuid webpost wiaguid wininet winmm winscard winspool winstrm
wintrust wldap32 wmiutils wow32 ws2_32 wsnmp32 wsock32 wst wtsapi32 xaswitch xolehlp
'''.split()
all_msvc_platforms=[('x64','amd64'),('x86','x86'),('ia64','ia64'),('x86_amd64','amd64'),('x86_ia64','ia64'),('x86_arm','arm')]
all_wince_platforms=[('armv4','arm'),('armv4i','arm'),('mipsii','mips'),('mipsii_fp','mips'),('mipsiv','mips'),('mipsiv_fp','mips'),('sh4','sh'),('x86','cex86')]
all_icl_platforms=[('intel64','amd64'),('em64t','amd64'),('ia32','x86'),('Itanium','ia64')]
def options(opt):
opt.add_option('--msvc_version',type='string',help='msvc version, eg: "msvc 10.0,msvc 9.0"',default='')
opt.add_option('--msvc_targets',type='string',help='msvc targets, eg: "x64,arm"',default='')
def setup_msvc(conf,versions,arch=False):
platforms=getattr(Options.options,'msvc_targets','').split(',')
if platforms==['']:
platforms=Utils.to_list(conf.env['MSVC_TARGETS'])or[i for i,j in all_msvc_platforms+all_icl_platforms+all_wince_platforms]
desired_versions=getattr(Options.options,'msvc_version','').split(',')
if desired_versions==['']:
desired_versions=conf.env['MSVC_VERSIONS']or[v for v,_ in versions][::-1]
versiondict=dict(versions)
for version in desired_versions:
try:
targets=dict(versiondict[version])
for target in platforms:
try:
arch,(p1,p2,p3)=targets[target]
compiler,revision=version.rsplit(' ',1)
if arch:
return compiler,revision,p1,p2,p3,arch
else:
return compiler,revision,p1,p2,p3
except KeyError:continue
except KeyError:continue
conf.fatal('msvc: Impossible to find a valid architecture for building (in setup_msvc)')
@conf
def get_msvc_version(conf,compiler,version,target,vcvars):
debug('msvc: get_msvc_version: %r %r %r',compiler,version,target)
batfile=conf.bldnode.make_node('waf-print-msvc.bat')
batfile.write("""@echo off
set INCLUDE=
set LIB=
call "%s" %s
echo PATH=%%PATH%%
echo INCLUDE=%%INCLUDE%%
echo LIB=%%LIB%%;%%LIBPATH%%
"""%(vcvars,target))
sout=conf.cmd_and_log(['cmd','/E:on','/V:on','/C',batfile.abspath()])
lines=sout.splitlines()
if not lines[0]:
lines.pop(0)
MSVC_PATH=MSVC_INCDIR=MSVC_LIBDIR=None
for line in lines:
if line.startswith('PATH='):
path=line[5:]
MSVC_PATH=path.split(';')
elif line.startswith('INCLUDE='):
MSVC_INCDIR=[i for i in line[8:].split(';')if i]
elif line.startswith('LIB='):
MSVC_LIBDIR=[i for i in line[4:].split(';')if i]
if None in(MSVC_PATH,MSVC_INCDIR,MSVC_LIBDIR):
conf.fatal('msvc: Could not find a valid architecture for building (get_msvc_version_3)')
env=dict(os.environ)
env.update(PATH=path)
compiler_name,linker_name,lib_name=_get_prog_names(conf,compiler)
cxx=conf.find_program(compiler_name,path_list=MSVC_PATH)
cxx=conf.cmd_to_list(cxx)
if'CL'in env:
del(env['CL'])
try:
try:
conf.cmd_and_log(cxx+['/help'],env=env)
except Exception ,e:
debug('msvc: get_msvc_version: %r %r %r -> failure'%(compiler,version,target))
debug(str(e))
conf.fatal('msvc: cannot run the compiler (in get_msvc_version)')
else:
debug('msvc: get_msvc_version: %r %r %r -> OK',compiler,version,target)
finally:
conf.env[compiler_name]=''
return(MSVC_PATH,MSVC_INCDIR,MSVC_LIBDIR)
@conf
def gather_wsdk_versions(conf,versions):
version_pattern=re.compile('^v..?.?\...?.?')
try:
all_versions=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Wow6432node\\Microsoft\\Microsoft SDKs\\Windows')
except WindowsError:
try:
all_versions=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Microsoft\\Microsoft SDKs\\Windows')
except WindowsError:
return
index=0
while 1:
try:
version=Utils.winreg.EnumKey(all_versions,index)
except WindowsError:
break
index=index+1
if not version_pattern.match(version):
continue
try:
msvc_version=Utils.winreg.OpenKey(all_versions,version)
path,type=Utils.winreg.QueryValueEx(msvc_version,'InstallationFolder')
except WindowsError:
continue
if os.path.isfile(os.path.join(path,'bin','SetEnv.cmd')):
targets=[]
for target,arch in all_msvc_platforms:
try:
targets.append((target,(arch,conf.get_msvc_version('wsdk',version,'/'+target,os.path.join(path,'bin','SetEnv.cmd')))))
except conf.errors.ConfigurationError:
pass
versions.append(('wsdk '+version[1:],targets))
def gather_wince_supported_platforms():
supported_wince_platforms=[]
try:
ce_sdk=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Wow6432node\\Microsoft\\Windows CE Tools\\SDKs')
except WindowsError:
try:
ce_sdk=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Microsoft\\Windows CE Tools\\SDKs')
except WindowsError:
ce_sdk=''
if not ce_sdk:
return supported_wince_platforms
ce_index=0
while 1:
try:
sdk_device=Utils.winreg.EnumKey(ce_sdk,ce_index)
except WindowsError:
break
ce_index=ce_index+1
sdk=Utils.winreg.OpenKey(ce_sdk,sdk_device)
try:
path,type=Utils.winreg.QueryValueEx(sdk,'SDKRootDir')
except WindowsError:
try:
path,type=Utils.winreg.QueryValueEx(sdk,'SDKInformation')
path,xml=os.path.split(path)
except WindowsError:
continue
path=str(path)
path,device=os.path.split(path)
if not device:
path,device=os.path.split(path)
for arch,compiler in all_wince_platforms:
platforms=[]
if os.path.isdir(os.path.join(path,device,'Lib',arch)):
platforms.append((arch,compiler,os.path.join(path,device,'Include',arch),os.path.join(path,device,'Lib',arch)))
if platforms:
supported_wince_platforms.append((device,platforms))
return supported_wince_platforms
def gather_msvc_detected_versions():
version_pattern=re.compile('^(\d\d?\.\d\d?)(Exp)?$')
detected_versions=[]
for vcver,vcvar in[('VCExpress','Exp'),('VisualStudio','')]:
try:
prefix='SOFTWARE\\Wow6432node\\Microsoft\\'+vcver
all_versions=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,prefix)
except WindowsError:
try:
prefix='SOFTWARE\\Microsoft\\'+vcver
all_versions=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,prefix)
except WindowsError:
continue
index=0
while 1:
try:
version=Utils.winreg.EnumKey(all_versions,index)
except WindowsError:
break
index=index+1
match=version_pattern.match(version)
if not match:
continue
else:
versionnumber=float(match.group(1))
detected_versions.append((versionnumber,version+vcvar,prefix+"\\"+version))
def fun(tup):
return tup[0]
detected_versions.sort(key=fun)
return detected_versions
@conf
def gather_msvc_targets(conf,versions,version,vc_path):
targets=[]
if os.path.isfile(os.path.join(vc_path,'vcvarsall.bat')):
for target,realtarget in all_msvc_platforms[::-1]:
try:
targets.append((target,(realtarget,conf.get_msvc_version('msvc',version,target,os.path.join(vc_path,'vcvarsall.bat')))))
except conf.errors.ConfigurationError:
pass
elif os.path.isfile(os.path.join(vc_path,'Common7','Tools','vsvars32.bat')):
try:
targets.append(('x86',('x86',conf.get_msvc_version('msvc',version,'x86',os.path.join(vc_path,'Common7','Tools','vsvars32.bat')))))
except conf.errors.ConfigurationError:
pass
elif os.path.isfile(os.path.join(vc_path,'Bin','vcvars32.bat')):
try:
targets.append(('x86',('x86',conf.get_msvc_version('msvc',version,'',os.path.join(vc_path,'Bin','vcvars32.bat')))))
except conf.errors.ConfigurationError:
pass
if targets:
versions.append(('msvc '+version,targets))
@conf
def gather_wince_targets(conf,versions,version,vc_path,vsvars,supported_platforms):
for device,platforms in supported_platforms:
cetargets=[]
for platform,compiler,include,lib in platforms:
winCEpath=os.path.join(vc_path,'ce')
if not os.path.isdir(winCEpath):
continue
try:
common_bindirs,_1,_2=conf.get_msvc_version('msvc',version,'x86',vsvars)
except conf.errors.ConfigurationError:
continue
if os.path.isdir(os.path.join(winCEpath,'lib',platform)):
bindirs=[os.path.join(winCEpath,'bin',compiler),os.path.join(winCEpath,'bin','x86_'+compiler)]+common_bindirs
incdirs=[os.path.join(winCEpath,'include'),os.path.join(winCEpath,'atlmfc','include'),include]
libdirs=[os.path.join(winCEpath,'lib',platform),os.path.join(winCEpath,'atlmfc','lib',platform),lib]
cetargets.append((platform,(platform,(bindirs,incdirs,libdirs))))
if cetargets:
versions.append((device+' '+version,cetargets))
@conf
def gather_winphone_targets(conf,versions,version,vc_path,vsvars):
targets=[]
for target,realtarget in all_msvc_platforms[::-1]:
try:
targets.append((target,(realtarget,conf.get_msvc_version('winphone',version,target,vsvars))))
except conf.errors.ConfigurationError ,e:
pass
if targets:
versions.append(('winphone '+version,targets))
@conf
def gather_msvc_versions(conf,versions):
vc_paths=[]
for(v,version,reg)in gather_msvc_detected_versions():
try:
try:
msvc_version=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,reg+"\\Setup\\VC")
except WindowsError:
msvc_version=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,reg+"\\Setup\\Microsoft Visual C++")
path,type=Utils.winreg.QueryValueEx(msvc_version,'ProductDir')
vc_paths.append((version,os.path.abspath(str(path))))
except WindowsError:
continue
wince_supported_platforms=gather_wince_supported_platforms()
for version,vc_path in vc_paths:
vs_path=os.path.dirname(vc_path)
vsvars=os.path.join(vs_path,'Common7','Tools','vsvars32.bat')
if wince_supported_platforms and os.path.isfile(vsvars):
conf.gather_wince_targets(versions,version,vc_path,vsvars,wince_supported_platforms)
vsvars=os.path.join(vs_path,'VC','WPSDK','WP80','vcvarsphoneall.bat')
if os.path.isfile(vsvars):
conf.gather_winphone_targets(versions,'8.0',vc_path,vsvars)
for version,vc_path in vc_paths:
vs_path=os.path.dirname(vc_path)
conf.gather_msvc_targets(versions,version,vc_path)
@conf
def gather_icl_versions(conf,versions):
version_pattern=re.compile('^...?.?\....?.?')
try:
all_versions=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Wow6432node\\Intel\\Compilers\\C++')
except WindowsError:
try:
all_versions=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Intel\\Compilers\\C++')
except WindowsError:
return
index=0
while 1:
try:
version=Utils.winreg.EnumKey(all_versions,index)
except WindowsError:
break
index=index+1
if not version_pattern.match(version):
continue
targets=[]
for target,arch in all_icl_platforms:
try:
if target=='intel64':targetDir='EM64T_NATIVE'
else:targetDir=target
Utils.winreg.OpenKey(all_versions,version+'\\'+targetDir)
icl_version=Utils.winreg.OpenKey(all_versions,version)
path,type=Utils.winreg.QueryValueEx(icl_version,'ProductDir')
batch_file=os.path.join(path,'bin','iclvars.bat')
if os.path.isfile(batch_file):
try:
targets.append((target,(arch,conf.get_msvc_version('intel',version,target,batch_file))))
except conf.errors.ConfigurationError:
pass
except WindowsError:
pass
for target,arch in all_icl_platforms:
try:
icl_version=Utils.winreg.OpenKey(all_versions,version+'\\'+target)
path,type=Utils.winreg.QueryValueEx(icl_version,'ProductDir')
batch_file=os.path.join(path,'bin','iclvars.bat')
if os.path.isfile(batch_file):
try:
targets.append((target,(arch,conf.get_msvc_version('intel',version,target,batch_file))))
except conf.errors.ConfigurationError:
pass
except WindowsError:
continue
major=version[0:2]
versions.append(('intel '+major,targets))
@conf
def gather_intel_composer_versions(conf,versions):
version_pattern=re.compile('^...?.?\...?.?.?')
try:
all_versions=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Wow6432node\\Intel\\Suites')
except WindowsError:
try:
all_versions=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Intel\\Suites')
except WindowsError:
return
index=0
while 1:
try:
version=Utils.winreg.EnumKey(all_versions,index)
except WindowsError:
break
index=index+1
if not version_pattern.match(version):
continue
targets=[]
for target,arch in all_icl_platforms:
try:
if target=='intel64':targetDir='EM64T_NATIVE'
else:targetDir=target
try:
defaults=Utils.winreg.OpenKey(all_versions,version+'\\Defaults\\C++\\'+targetDir)
except WindowsError:
if targetDir=='EM64T_NATIVE':
defaults=Utils.winreg.OpenKey(all_versions,version+'\\Defaults\\C++\\EM64T')
else:
raise WindowsError
uid,type=Utils.winreg.QueryValueEx(defaults,'SubKey')
Utils.winreg.OpenKey(all_versions,version+'\\'+uid+'\\C++\\'+targetDir)
icl_version=Utils.winreg.OpenKey(all_versions,version+'\\'+uid+'\\C++')
path,type=Utils.winreg.QueryValueEx(icl_version,'ProductDir')
batch_file=os.path.join(path,'bin','iclvars.bat')
if os.path.isfile(batch_file):
try:
targets.append((target,(arch,conf.get_msvc_version('intel',version,target,batch_file))))
except conf.errors.ConfigurationError ,e:
pass
compilervars_warning_attr='_compilervars_warning_key'
if version[0:2]=='13'and getattr(conf,compilervars_warning_attr,True):
setattr(conf,compilervars_warning_attr,False)
patch_url='http://software.intel.com/en-us/forums/topic/328487'
compilervars_arch=os.path.join(path,'bin','compilervars_arch.bat')
for vscomntools in['VS110COMNTOOLS','VS100COMNTOOLS']:
if os.environ.has_key(vscomntools):
vs_express_path=os.environ[vscomntools]+r'..\IDE\VSWinExpress.exe'
dev_env_path=os.environ[vscomntools]+r'..\IDE\devenv.exe'
if(r'if exist "%VS110COMNTOOLS%..\IDE\VSWinExpress.exe"'in Utils.readf(compilervars_arch)and not os.path.exists(vs_express_path)and not os.path.exists(dev_env_path)):
Logs.warn(('The Intel compilervar_arch.bat only checks for one Visual Studio SKU ''(VSWinExpress.exe) but it does not seem to be installed at %r. ''The intel command line set up will fail to configure unless the file %r''is patched. See: %s')%(vs_express_path,compilervars_arch,patch_url))
except WindowsError:
pass
major=version[0:2]
versions.append(('intel '+major,targets))
@conf
def get_msvc_versions(conf):
if not conf.env['MSVC_INSTALLED_VERSIONS']:
lst=[]
conf.gather_icl_versions(lst)
conf.gather_intel_composer_versions(lst)
conf.gather_wsdk_versions(lst)
conf.gather_msvc_versions(lst)
conf.env['MSVC_INSTALLED_VERSIONS']=lst
return conf.env['MSVC_INSTALLED_VERSIONS']
@conf
def print_all_msvc_detected(conf):
for version,targets in conf.env['MSVC_INSTALLED_VERSIONS']:
Logs.info(version)
for target,l in targets:
Logs.info("\t"+target)
@conf
def detect_msvc(conf,arch=False):
versions=get_msvc_versions(conf)
return setup_msvc(conf,versions,arch)
@conf
def find_lt_names_msvc(self,libname,is_static=False):
lt_names=['lib%s.la'%libname,'%s.la'%libname,]
for path in self.env['LIBPATH']:
for la in lt_names:
laf=os.path.join(path,la)
dll=None
if os.path.exists(laf):
ltdict=Utils.read_la_file(laf)
lt_libdir=None
if ltdict.get('libdir',''):
lt_libdir=ltdict['libdir']
if not is_static and ltdict.get('library_names',''):
dllnames=ltdict['library_names'].split()
dll=dllnames[0].lower()
dll=re.sub('\.dll$','',dll)
return(lt_libdir,dll,False)
elif ltdict.get('old_library',''):
olib=ltdict['old_library']
if os.path.exists(os.path.join(path,olib)):
return(path,olib,True)
elif lt_libdir!=''and os.path.exists(os.path.join(lt_libdir,olib)):
return(lt_libdir,olib,True)
else:
return(None,olib,True)
else:
raise self.errors.WafError('invalid libtool object file: %s'%laf)
return(None,None,None)
@conf
def libname_msvc(self,libname,is_static=False):
lib=libname.lower()
lib=re.sub('\.lib$','',lib)
if lib in g_msvc_systemlibs:
return lib
lib=re.sub('^lib','',lib)
if lib=='m':
return None
(lt_path,lt_libname,lt_static)=self.find_lt_names_msvc(lib,is_static)
if lt_path!=None and lt_libname!=None:
if lt_static==True:
return os.path.join(lt_path,lt_libname)
if lt_path!=None:
_libpaths=[lt_path]+self.env['LIBPATH']
else:
_libpaths=self.env['LIBPATH']
static_libs=['lib%ss.lib'%lib,'lib%s.lib'%lib,'%ss.lib'%lib,'%s.lib'%lib,]
dynamic_libs=['lib%s.dll.lib'%lib,'lib%s.dll.a'%lib,'%s.dll.lib'%lib,'%s.dll.a'%lib,'lib%s_d.lib'%lib,'%s_d.lib'%lib,'%s.lib'%lib,]
libnames=static_libs
if not is_static:
libnames=dynamic_libs+static_libs
for path in _libpaths:
for libn in libnames:
if os.path.exists(os.path.join(path,libn)):
debug('msvc: lib found: %s'%os.path.join(path,libn))
return re.sub('\.lib$','',libn)
self.fatal("The library %r could not be found"%libname)
return re.sub('\.lib$','',libname)
@conf
def check_lib_msvc(self,libname,is_static=False,uselib_store=None):
libn=self.libname_msvc(libname,is_static)
if not uselib_store:
uselib_store=libname.upper()
if False and is_static:
self.env['STLIB_'+uselib_store]=[libn]
else:
self.env['LIB_'+uselib_store]=[libn]
@conf
def check_libs_msvc(self,libnames,is_static=False):
for libname in Utils.to_list(libnames):
self.check_lib_msvc(libname,is_static)
def configure(conf):
conf.autodetect(True)
conf.find_msvc()
conf.msvc_common_flags()
conf.cc_load_tools()
conf.cxx_load_tools()
conf.cc_add_flags()
conf.cxx_add_flags()
conf.link_add_flags()
conf.visual_studio_add_flags()
@conf
def no_autodetect(conf):
conf.env.NO_MSVC_DETECT=1
configure(conf)
@conf
def autodetect(conf,arch=False):
v=conf.env
if v.NO_MSVC_DETECT:
return
if arch:
compiler,version,path,includes,libdirs,arch=conf.detect_msvc(True)
v['DEST_CPU']=arch
else:
compiler,version,path,includes,libdirs=conf.detect_msvc()
v['PATH']=path
v['INCLUDES']=includes
v['LIBPATH']=libdirs
v['MSVC_COMPILER']=compiler
try:
v['MSVC_VERSION']=float(version)
except Exception:
v['MSVC_VERSION']=float(version[:-3])
def _get_prog_names(conf,compiler):
if compiler=='intel':
compiler_name='ICL'
linker_name='XILINK'
lib_name='XILIB'
else:
compiler_name='CL'
linker_name='LINK'
lib_name='LIB'
return compiler_name,linker_name,lib_name
@conf
def find_msvc(conf):
if sys.platform=='cygwin':
conf.fatal('MSVC module does not work under cygwin Python!')
v=conf.env
path=v['PATH']
compiler=v['MSVC_COMPILER']
version=v['MSVC_VERSION']
compiler_name,linker_name,lib_name=_get_prog_names(conf,compiler)
v.MSVC_MANIFEST=(compiler=='msvc'and version>=8)or(compiler=='wsdk'and version>=6)or(compiler=='intel'and version>=11)
cxx=None
if v['CXX']:cxx=v['CXX']
elif'CXX'in conf.environ:cxx=conf.environ['CXX']
cxx=conf.find_program(compiler_name,var='CXX',path_list=path)
cxx=conf.cmd_to_list(cxx)
env=dict(conf.environ)
if path:env.update(PATH=';'.join(path))
if not conf.cmd_and_log(cxx+['/nologo','/help'],env=env):
conf.fatal('the msvc compiler could not be identified')
v['CC']=v['CXX']=cxx
v['CC_NAME']=v['CXX_NAME']='msvc'
if not v['LINK_CXX']:
link=conf.find_program(linker_name,path_list=path)
if link:v['LINK_CXX']=link
else:conf.fatal('%s was not found (linker)'%linker_name)
v['LINK']=link
if not v['LINK_CC']:
v['LINK_CC']=v['LINK_CXX']
if not v['AR']:
stliblink=conf.find_program(lib_name,path_list=path,var='AR')
if not stliblink:return
v['ARFLAGS']=['/NOLOGO']
if v.MSVC_MANIFEST:
conf.find_program('MT',path_list=path,var='MT')
v['MTFLAGS']=['/NOLOGO']
try:
conf.load('winres')
except Errors.WafError:
warn('Resource compiler not found. Compiling resource file is disabled')
@conf
def visual_studio_add_flags(self):
v=self.env
try:v.prepend_value('INCLUDES',[x for x in self.environ['INCLUDE'].split(';')if x])
except Exception:pass
try:v.prepend_value('LIBPATH',[x for x in self.environ['LIB'].split(';')if x])
except Exception:pass
@conf
def msvc_common_flags(conf):
v=conf.env
v['DEST_BINFMT']='pe'
v.append_value('CFLAGS',['/nologo'])
v.append_value('CXXFLAGS',['/nologo'])
v['DEFINES_ST']='/D%s'
v['CC_SRC_F']=''
v['CC_TGT_F']=['/c','/Fo']
if v['MSVC_VERSION']>=8:
v['CC_TGT_F']=['/FC']+v['CC_TGT_F']
v['CXX_SRC_F']=''
v['CXX_TGT_F']=['/c','/Fo']
if v['MSVC_VERSION']>=8:
v['CXX_TGT_F']=['/FC']+v['CXX_TGT_F']
v['CPPPATH_ST']='/I%s'
v['AR_TGT_F']=v['CCLNK_TGT_F']=v['CXXLNK_TGT_F']='/OUT:'
v['CFLAGS_CONSOLE']=v['CXXFLAGS_CONSOLE']=['/SUBSYSTEM:CONSOLE']
v['CFLAGS_NATIVE']=v['CXXFLAGS_NATIVE']=['/SUBSYSTEM:NATIVE']
v['CFLAGS_POSIX']=v['CXXFLAGS_POSIX']=['/SUBSYSTEM:POSIX']
v['CFLAGS_WINDOWS']=v['CXXFLAGS_WINDOWS']=['/SUBSYSTEM:WINDOWS']
v['CFLAGS_WINDOWSCE']=v['CXXFLAGS_WINDOWSCE']=['/SUBSYSTEM:WINDOWSCE']
v['CFLAGS_CRT_MULTITHREADED']=v['CXXFLAGS_CRT_MULTITHREADED']=['/MT']
v['CFLAGS_CRT_MULTITHREADED_DLL']=v['CXXFLAGS_CRT_MULTITHREADED_DLL']=['/MD']
v['CFLAGS_CRT_MULTITHREADED_DBG']=v['CXXFLAGS_CRT_MULTITHREADED_DBG']=['/MTd']
v['CFLAGS_CRT_MULTITHREADED_DLL_DBG']=v['CXXFLAGS_CRT_MULTITHREADED_DLL_DBG']=['/MDd']
v['LIB_ST']='%s.lib'
v['LIBPATH_ST']='/LIBPATH:%s'
v['STLIB_ST']='%s.lib'
v['STLIBPATH_ST']='/LIBPATH:%s'
v.append_value('LINKFLAGS',['/NOLOGO'])
if v['MSVC_MANIFEST']:
v.append_value('LINKFLAGS',['/MANIFEST'])
v['CFLAGS_cshlib']=[]
v['CXXFLAGS_cxxshlib']=[]
v['LINKFLAGS_cshlib']=v['LINKFLAGS_cxxshlib']=['/DLL']
v['cshlib_PATTERN']=v['cxxshlib_PATTERN']='%s.dll'
v['implib_PATTERN']='%s.lib'
v['IMPLIB_ST']='/IMPLIB:%s'
v['LINKFLAGS_cstlib']=[]
v['cstlib_PATTERN']=v['cxxstlib_PATTERN']='%s.lib'
v['cprogram_PATTERN']=v['cxxprogram_PATTERN']='%s.exe'
@after_method('apply_link')
@feature('c','cxx')
def apply_flags_msvc(self):
if self.env.CC_NAME!='msvc'or not getattr(self,'link_task',None):
return
is_static=isinstance(self.link_task,ccroot.stlink_task)
subsystem=getattr(self,'subsystem','')
if subsystem:
subsystem='/subsystem:%s'%subsystem
flags=is_static and'ARFLAGS'or'LINKFLAGS'
self.env.append_value(flags,subsystem)
if not is_static:
for f in self.env.LINKFLAGS:
d=f.lower()
if d[1:]=='debug':
pdbnode=self.link_task.outputs[0].change_ext('.pdb')
self.link_task.outputs.append(pdbnode)
try:
self.install_task.source.append(pdbnode)
except AttributeError:
pass
break
@feature('cprogram','cshlib','cxxprogram','cxxshlib')
@after_method('apply_link')
def apply_manifest(self):
if self.env.CC_NAME=='msvc'and self.env.MSVC_MANIFEST and getattr(self,'link_task',None):
out_node=self.link_task.outputs[0]
man_node=out_node.parent.find_or_declare(out_node.name+'.manifest')
self.link_task.outputs.append(man_node)
self.link_task.do_manifest=True
def exec_mf(self):
env=self.env
mtool=env['MT']
if not mtool:
return 0
self.do_manifest=False
outfile=self.outputs[0].abspath()
manifest=None
for out_node in self.outputs:
if out_node.name.endswith('.manifest'):
manifest=out_node.abspath()
break
if manifest is None:
return 0
mode=''
if'cprogram'in self.generator.features or'cxxprogram'in self.generator.features:
mode='1'
elif'cshlib'in self.generator.features or'cxxshlib'in self.generator.features:
mode='2'
debug('msvc: embedding manifest in mode %r'%mode)
lst=[]
lst.append(env['MT'])
lst.extend(Utils.to_list(env['MTFLAGS']))
lst.extend(['-manifest',manifest])
lst.append('-outputresource:%s;%s'%(outfile,mode))
lst=[lst]
return self.exec_command(*lst)
def quote_response_command(self,flag):
if flag.find(' ')>-1:
for x in('/LIBPATH:','/IMPLIB:','/OUT:','/I'):
if flag.startswith(x):
flag='%s"%s"'%(x,flag[len(x):])
break
else:
flag='"%s"'%flag
return flag
def exec_response_command(self,cmd,**kw):
try:
tmp=None
if sys.platform.startswith('win')and isinstance(cmd,list)and len(' '.join(cmd))>=8192:
program=cmd[0]
cmd=[self.quote_response_command(x)for x in cmd]
(fd,tmp)=tempfile.mkstemp()
os.write(fd,'\r\n'.join(i.replace('\\','\\\\')for i in cmd[1:]))
os.close(fd)
cmd=[program,'@'+tmp]
ret=self.generator.bld.exec_command(cmd,**kw)
finally:
if tmp:
try:
os.remove(tmp)
except OSError:
pass
return ret
def exec_command_msvc(self,*k,**kw):
if isinstance(k[0],list):
lst=[]
carry=''
for a in k[0]:
if a=='/Fo'or a=='/doc'or a[-1]==':':
carry=a
else:
lst.append(carry+a)
carry=''
k=[lst]
if self.env['PATH']:
env=dict(self.env.env or os.environ)
env.update(PATH=';'.join(self.env['PATH']))
kw['env']=env
bld=self.generator.bld
try:
if not kw.get('cwd',None):
kw['cwd']=bld.cwd
except AttributeError:
bld.cwd=kw['cwd']=bld.variant_dir
ret=self.exec_response_command(k[0],**kw)
if not ret and getattr(self,'do_manifest',None):
ret=self.exec_mf()
return ret
def wrap_class(class_name):
cls=Task.classes.get(class_name,None)
if not cls:
return None
derived_class=type(class_name,(cls,),{})
def exec_command(self,*k,**kw):
if self.env['CC_NAME']=='msvc':
return self.exec_command_msvc(*k,**kw)
else:
return super(derived_class,self).exec_command(*k,**kw)
derived_class.exec_command=exec_command
derived_class.exec_response_command=exec_response_command
derived_class.quote_response_command=quote_response_command
derived_class.exec_command_msvc=exec_command_msvc
derived_class.exec_mf=exec_mf
return derived_class
for k in'c cxx cprogram cxxprogram cshlib cxxshlib cstlib cxxstlib'.split():
wrap_class(k)
def make_winapp(self,family):
append=self.env.append_unique
append('DEFINES','WINAPI_FAMILY=%s'%family)
append('CXXFLAGS','/ZW')
append('CXXFLAGS','/TP')
for lib_path in self.env.LIBPATH:
append('CXXFLAGS','/AI%s'%lib_path)
@feature('winphoneapp')
@after_method('process_use')
@after_method('propagate_uselib_vars')
def make_winphone_app(self):
make_winapp(self,'WINAPI_FAMILY_PHONE_APP')
conf.env.append_unique('LINKFLAGS','/NODEFAULTLIB:ole32.lib')
conf.env.append_unique('LINKFLAGS','PhoneAppModelHost.lib')
@feature('winapp')
@after_method('process_use')
@after_method('propagate_uselib_vars')
def make_windows_app(self):
make_winapp(self,'WINAPI_FAMILY_DESKTOP_APP')
| mit |
logicus4078/vertx-web | src/test/sockjs-protocol/venv/lib/python2.7/site-packages/pip/_vendor/requests/packages/chardet/mbcharsetprober.py | 2924 | 3268 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Universal charset detector code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 2001
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
# Shy Shalom - original C code
# Proofpoint, Inc.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
import sys
from . import constants
from .charsetprober import CharSetProber
class MultiByteCharSetProber(CharSetProber):
def __init__(self):
CharSetProber.__init__(self)
self._mDistributionAnalyzer = None
self._mCodingSM = None
self._mLastChar = [0, 0]
def reset(self):
CharSetProber.reset(self)
if self._mCodingSM:
self._mCodingSM.reset()
if self._mDistributionAnalyzer:
self._mDistributionAnalyzer.reset()
self._mLastChar = [0, 0]
def get_charset_name(self):
pass
def feed(self, aBuf):
aLen = len(aBuf)
for i in range(0, aLen):
codingState = self._mCodingSM.next_state(aBuf[i])
if codingState == constants.eError:
if constants._debug:
sys.stderr.write(self.get_charset_name()
+ ' prober hit error at byte ' + str(i)
+ '\n')
self._mState = constants.eNotMe
break
elif codingState == constants.eItsMe:
self._mState = constants.eFoundIt
break
elif codingState == constants.eStart:
charLen = self._mCodingSM.get_current_charlen()
if i == 0:
self._mLastChar[1] = aBuf[0]
self._mDistributionAnalyzer.feed(self._mLastChar, charLen)
else:
self._mDistributionAnalyzer.feed(aBuf[i - 1:i + 1],
charLen)
self._mLastChar[0] = aBuf[aLen - 1]
if self.get_state() == constants.eDetecting:
if (self._mDistributionAnalyzer.got_enough_data() and
(self.get_confidence() > constants.SHORTCUT_THRESHOLD)):
self._mState = constants.eFoundIt
return self.get_state()
def get_confidence(self):
return self._mDistributionAnalyzer.get_confidence()
| apache-2.0 |
longmen21/edx-platform | lms/djangoapps/verify_student/tests/test_views.py | 3 | 106566 | # encoding: utf-8
"""
Tests of verify_student views.
"""
import json
import urllib
from datetime import timedelta, datetime
from uuid import uuid4
import ddt
import httpretty
import mock
from nose.plugins.attrib import attr
import boto
import moto
import pytz
from bs4 import BeautifulSoup
from mock import patch, Mock, ANY
import requests
from django.conf import settings
from django.core.urlresolvers import reverse
from django.core import mail
from django.test import TestCase
from django.test.client import Client, RequestFactory
from django.test.utils import override_settings
from django.utils import timezone
from opaque_keys.edx.locations import SlashSeparatedCourseKey
from opaque_keys.edx.locator import CourseLocator
from opaque_keys.edx.keys import UsageKey
from course_modes.models import CourseMode
from course_modes.tests.factories import CourseModeFactory
from courseware.url_helpers import get_redirect_url
from common.test.utils import XssTestMixin
from commerce.models import CommerceConfiguration
from commerce.tests import TEST_PAYMENT_DATA, TEST_API_URL, TEST_API_SIGNING_KEY, TEST_PUBLIC_URL_ROOT
from embargo.test_utils import restrict_course
from openedx.core.djangoapps.user_api.accounts.api import get_account_settings
from openedx.core.djangoapps.theming.tests.test_util import with_comprehensive_theme
from shoppingcart.models import Order, CertificateItem
from student.tests.factories import UserFactory, CourseEnrollmentFactory
from student.models import CourseEnrollment
from util.date_utils import get_default_time_display
from util.testing import UrlResetMixin
from lms.djangoapps.verify_student.views import (
checkout_with_ecommerce_service, render_to_response, PayAndVerifyView,
_compose_message_reverification_email
)
from lms.djangoapps.verify_student.models import (
VerificationDeadline, SoftwareSecurePhotoVerification,
VerificationCheckpoint, VerificationStatus,
IcrvStatusEmailsConfiguration,
)
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
from xmodule.modulestore.tests.factories import CourseFactory, ItemFactory
from xmodule.modulestore.django import modulestore
from xmodule.modulestore import ModuleStoreEnum
from xmodule.modulestore.tests.factories import check_mongo_calls
def mock_render_to_response(*args, **kwargs):
return render_to_response(*args, **kwargs)
render_mock = Mock(side_effect=mock_render_to_response)
PAYMENT_DATA_KEYS = {'payment_processor_name', 'payment_page_url', 'payment_form_data'}
@attr(shard=2)
class StartView(TestCase):
"""
This view is for the first time student is
attempting a Photo Verification.
"""
def start_url(self, course_id=""):
return "/verify_student/{0}".format(urllib.quote(course_id))
def test_start_new_verification(self):
"""
Test the case where the user has no pending `PhotoVerificationAttempts`,
but is just starting their first.
"""
UserFactory.create(username="rusty", password="test")
self.client.login(username="rusty", password="test")
def must_be_logged_in(self):
self.assertHttpForbidden(self.client.get(self.start_url()))
@attr(shard=2)
@ddt.ddt
class TestPayAndVerifyView(UrlResetMixin, ModuleStoreTestCase, XssTestMixin):
"""
Tests for the payment and verification flow views.
"""
MIN_PRICE = 12
USERNAME = "test_user"
PASSWORD = "test_password"
NOW = datetime.now(pytz.UTC)
YESTERDAY = NOW - timedelta(days=1)
TOMORROW = NOW + timedelta(days=1)
URLCONF_MODULES = ['embargo']
@mock.patch.dict(settings.FEATURES, {'EMBARGO': True})
def setUp(self):
super(TestPayAndVerifyView, self).setUp()
self.user = UserFactory.create(username=self.USERNAME, password=self.PASSWORD)
result = self.client.login(username=self.USERNAME, password=self.PASSWORD)
self.assertTrue(result, msg="Could not log in")
@ddt.data(
("verified", "verify_student_start_flow"),
("professional", "verify_student_start_flow"),
("verified", "verify_student_begin_flow"),
("professional", "verify_student_begin_flow")
)
@ddt.unpack
def test_start_flow_not_verified(self, course_mode, payment_flow):
course = self._create_course(course_mode)
self._enroll(course.id)
response = self._get_page(payment_flow, course.id)
self._assert_displayed_mode(response, course_mode)
self._assert_steps_displayed(
response,
PayAndVerifyView.PAYMENT_STEPS + PayAndVerifyView.VERIFICATION_STEPS,
PayAndVerifyView.MAKE_PAYMENT_STEP
)
self._assert_messaging(response, PayAndVerifyView.FIRST_TIME_VERIFY_MSG)
self._assert_requirements_displayed(response, [
PayAndVerifyView.PHOTO_ID_REQ,
PayAndVerifyView.WEBCAM_REQ,
])
self._assert_upgrade_session_flag(False)
@httpretty.activate
@override_settings(
ECOMMERCE_API_URL=TEST_API_URL,
ECOMMERCE_API_SIGNING_KEY=TEST_API_SIGNING_KEY,
ECOMMERCE_PUBLIC_URL_ROOT=TEST_PUBLIC_URL_ROOT
)
def test_start_flow_with_ecommerce(self):
"""Verify user gets redirected to ecommerce checkout when ecommerce checkout is enabled."""
checkout_page = '/test_basket/'
sku = 'TESTSKU'
# When passing a SKU ecommerce api gets called.
httpretty.register_uri(
httpretty.GET,
"{}/payment/processors/".format(TEST_API_URL),
body=json.dumps(['foo', 'bar']),
content_type="application/json",
)
httpretty.register_uri(httpretty.GET, "{}{}".format(TEST_PUBLIC_URL_ROOT, checkout_page))
CommerceConfiguration.objects.create(
checkout_on_ecommerce_service=True,
single_course_checkout_page=checkout_page
)
course = self._create_course('verified', sku=sku)
self._enroll(course.id)
response = self._get_page('verify_student_start_flow', course.id, expected_status_code=302)
expected_page = '{}{}?sku={}'.format(TEST_PUBLIC_URL_ROOT, checkout_page, sku)
self.assertRedirects(response, expected_page, fetch_redirect_response=False)
@ddt.data(
("no-id-professional", "verify_student_start_flow"),
("no-id-professional", "verify_student_begin_flow")
)
@ddt.unpack
def test_start_flow_with_no_id_professional(self, course_mode, payment_flow):
course = self._create_course(course_mode)
self._enroll(course.id)
response = self._get_page(payment_flow, course.id)
self._assert_displayed_mode(response, course_mode)
self._assert_steps_displayed(
response,
PayAndVerifyView.PAYMENT_STEPS,
PayAndVerifyView.MAKE_PAYMENT_STEP
)
self._assert_messaging(response, PayAndVerifyView.FIRST_TIME_VERIFY_MSG)
self._assert_requirements_displayed(response, [])
def test_ab_testing_page(self):
course = self._create_course("verified")
self._enroll(course.id, "verified")
response = self._get_page("verify_student_begin_flow", course.id)
self._assert_displayed_mode(response, "verified")
self.assertContains(response, "Upgrade to a Verified Certificate")
self.assertContains(response, "Before you upgrade to a certificate track,")
self.assertContains(response, "To receive a certificate, you must also verify your identity")
self.assertContains(response, "You will use your webcam to take a picture of")
@ddt.data(
("expired", "verify_student_start_flow"),
("denied", "verify_student_begin_flow")
)
@ddt.unpack
def test_start_flow_expired_or_denied_verification(self, verification_status, payment_flow):
course = self._create_course("verified")
self._enroll(course.id, "verified")
self._set_verification_status(verification_status)
response = self._get_page(payment_flow, course.id)
# Expect the same content as when the user has not verified
self._assert_steps_displayed(
response,
[PayAndVerifyView.INTRO_STEP] + PayAndVerifyView.VERIFICATION_STEPS,
PayAndVerifyView.INTRO_STEP
)
self._assert_messaging(response, PayAndVerifyView.FIRST_TIME_VERIFY_MSG)
self._assert_requirements_displayed(response, [
PayAndVerifyView.PHOTO_ID_REQ,
PayAndVerifyView.WEBCAM_REQ,
])
@ddt.data(
("verified", "submitted", "verify_student_start_flow"),
("verified", "approved", "verify_student_start_flow"),
("verified", "error", "verify_student_start_flow"),
("professional", "submitted", "verify_student_start_flow"),
("no-id-professional", None, "verify_student_start_flow"),
("verified", "submitted", "verify_student_begin_flow"),
("verified", "approved", "verify_student_begin_flow"),
("verified", "error", "verify_student_begin_flow"),
("professional", "submitted", "verify_student_begin_flow"),
("no-id-professional", None, "verify_student_begin_flow"),
)
@ddt.unpack
def test_start_flow_already_verified(self, course_mode, verification_status, payment_flow):
course = self._create_course(course_mode)
self._enroll(course.id)
self._set_verification_status(verification_status)
response = self._get_page(payment_flow, course.id)
self._assert_displayed_mode(response, course_mode)
self._assert_steps_displayed(
response,
PayAndVerifyView.PAYMENT_STEPS,
PayAndVerifyView.MAKE_PAYMENT_STEP
)
self._assert_messaging(response, PayAndVerifyView.FIRST_TIME_VERIFY_MSG)
self._assert_requirements_displayed(response, [])
@ddt.data(
("verified", "verify_student_start_flow"),
("professional", "verify_student_start_flow"),
("verified", "verify_student_begin_flow"),
("professional", "verify_student_begin_flow")
)
@ddt.unpack
def test_start_flow_already_paid(self, course_mode, payment_flow):
course = self._create_course(course_mode)
self._enroll(course.id, course_mode)
response = self._get_page(payment_flow, course.id)
self._assert_displayed_mode(response, course_mode)
self._assert_steps_displayed(
response,
[PayAndVerifyView.INTRO_STEP] + PayAndVerifyView.VERIFICATION_STEPS,
PayAndVerifyView.INTRO_STEP
)
self._assert_messaging(response, PayAndVerifyView.FIRST_TIME_VERIFY_MSG)
self._assert_requirements_displayed(response, [
PayAndVerifyView.PHOTO_ID_REQ,
PayAndVerifyView.WEBCAM_REQ,
])
@ddt.data("verify_student_start_flow", "verify_student_begin_flow")
def test_start_flow_not_enrolled(self, payment_flow):
course = self._create_course("verified")
self._set_verification_status("submitted")
response = self._get_page(payment_flow, course.id)
# This shouldn't happen if the student has been auto-enrolled,
# but if they somehow end up on this page without enrolling,
# treat them as if they need to pay
response = self._get_page(payment_flow, course.id)
self._assert_steps_displayed(
response,
PayAndVerifyView.PAYMENT_STEPS,
PayAndVerifyView.MAKE_PAYMENT_STEP
)
self._assert_requirements_displayed(response, [])
@ddt.data("verify_student_start_flow", "verify_student_begin_flow")
def test_start_flow_unenrolled(self, payment_flow):
course = self._create_course("verified")
self._set_verification_status("submitted")
self._enroll(course.id, "verified")
self._unenroll(course.id)
# If unenrolled, treat them like they haven't paid at all
# (we assume that they've gotten a refund or didn't pay initially)
response = self._get_page(payment_flow, course.id)
self._assert_steps_displayed(
response,
PayAndVerifyView.PAYMENT_STEPS,
PayAndVerifyView.MAKE_PAYMENT_STEP
)
self._assert_requirements_displayed(response, [])
@ddt.data(
("verified", "submitted", "verify_student_start_flow"),
("verified", "approved", "verify_student_start_flow"),
("professional", "submitted", "verify_student_start_flow"),
("verified", "submitted", "verify_student_begin_flow"),
("verified", "approved", "verify_student_begin_flow"),
("professional", "submitted", "verify_student_begin_flow")
)
@ddt.unpack
def test_start_flow_already_verified_and_paid(self, course_mode, verification_status, payment_flow):
course = self._create_course(course_mode)
self._enroll(course.id, course_mode)
self._set_verification_status(verification_status)
response = self._get_page(
payment_flow,
course.id,
expected_status_code=302
)
self._assert_redirects_to_dashboard(response)
@with_comprehensive_theme("edx.org")
@ddt.data("verify_student_start_flow", "verify_student_begin_flow")
def test_pay_and_verify_hides_header_nav(self, payment_flow):
course = self._create_course("verified")
self._enroll(course.id, "verified")
response = self._get_page(payment_flow, course.id)
# Verify that the header navigation links are hidden for the edx.org version
self.assertNotContains(response, "How it Works")
self.assertNotContains(response, "Find courses")
self.assertNotContains(response, "Schools & Partners")
def test_verify_now(self):
# We've already paid, and now we're trying to verify
course = self._create_course("verified")
self._enroll(course.id, "verified")
response = self._get_page('verify_student_verify_now', course.id)
self._assert_messaging(response, PayAndVerifyView.VERIFY_NOW_MSG)
self.assert_no_xss(response, '<script>alert("XSS")</script>')
# Expect that *all* steps are displayed,
# but we start after the payment step (because it's already completed).
self._assert_steps_displayed(
response,
PayAndVerifyView.PAYMENT_STEPS + PayAndVerifyView.VERIFICATION_STEPS,
PayAndVerifyView.FACE_PHOTO_STEP
)
# These will be hidden from the user anyway since they're starting
# after the payment step.
self._assert_requirements_displayed(response, [
PayAndVerifyView.PHOTO_ID_REQ,
PayAndVerifyView.WEBCAM_REQ,
])
def test_verify_now_already_verified(self):
course = self._create_course("verified")
self._enroll(course.id, "verified")
self._set_verification_status("submitted")
# Already verified, so if we somehow end up here,
# redirect immediately to the dashboard
response = self._get_page(
'verify_student_verify_now',
course.id,
expected_status_code=302
)
self._assert_redirects_to_dashboard(response)
def test_verify_now_user_details(self):
course = self._create_course("verified")
self._enroll(course.id, "verified")
response = self._get_page('verify_student_verify_now', course.id)
self._assert_user_details(response, self.user.profile.name)
@ddt.data(
"verify_student_verify_now",
"verify_student_payment_confirmation"
)
def test_verify_now_not_enrolled(self, page_name):
course = self._create_course("verified")
response = self._get_page(page_name, course.id, expected_status_code=302)
self._assert_redirects_to_start_flow(response, course.id)
@ddt.data(
"verify_student_verify_now",
"verify_student_payment_confirmation"
)
def test_verify_now_unenrolled(self, page_name):
course = self._create_course("verified")
self._enroll(course.id, "verified")
self._unenroll(course.id)
response = self._get_page(page_name, course.id, expected_status_code=302)
self._assert_redirects_to_start_flow(response, course.id)
@ddt.data(
"verify_student_verify_now",
"verify_student_payment_confirmation"
)
def test_verify_now_not_paid(self, page_name):
course = self._create_course("verified")
self._enroll(course.id)
response = self._get_page(page_name, course.id, expected_status_code=302)
self._assert_redirects_to_upgrade(response, course.id)
def test_payment_confirmation(self):
course = self._create_course("verified")
self._enroll(course.id, "verified")
response = self._get_page('verify_student_payment_confirmation', course.id)
self._assert_messaging(response, PayAndVerifyView.PAYMENT_CONFIRMATION_MSG)
self.assert_no_xss(response, '<script>alert("XSS")</script>')
# Expect that *all* steps are displayed,
# but we start at the payment confirmation step
self._assert_steps_displayed(
response,
PayAndVerifyView.PAYMENT_STEPS + PayAndVerifyView.VERIFICATION_STEPS,
PayAndVerifyView.PAYMENT_CONFIRMATION_STEP,
)
# These will be hidden from the user anyway since they're starting
# after the payment step. We're already including the payment
# steps, so it's easier to include these as well.
self._assert_requirements_displayed(response, [
PayAndVerifyView.PHOTO_ID_REQ,
PayAndVerifyView.WEBCAM_REQ,
])
@ddt.data("verify_student_start_flow", "verify_student_begin_flow")
def test_payment_cannot_skip(self, payment_flow):
"""
Simple test to verify that certain steps cannot be skipped. This test sets up
a scenario where the user should be on the MAKE_PAYMENT_STEP, but is trying to
skip it. Despite setting the parameter, the current step should still be
MAKE_PAYMENT_STEP.
"""
course = self._create_course("verified")
response = self._get_page(
payment_flow,
course.id,
skip_first_step=True
)
self._assert_messaging(response, PayAndVerifyView.FIRST_TIME_VERIFY_MSG)
self.assert_no_xss(response, '<script>alert("XSS")</script>')
# Expect that *all* steps are displayed,
# but we start on the first verify step
self._assert_steps_displayed(
response,
PayAndVerifyView.PAYMENT_STEPS + PayAndVerifyView.VERIFICATION_STEPS,
PayAndVerifyView.MAKE_PAYMENT_STEP,
)
def test_payment_confirmation_already_verified(self):
course = self._create_course("verified")
self._enroll(course.id, "verified")
self._set_verification_status("submitted")
response = self._get_page('verify_student_payment_confirmation', course.id)
# Other pages would redirect to the dashboard at this point,
# because the user has paid and verified. However, we want
# the user to see the confirmation page even if there
# isn't anything for them to do here except return
# to the dashboard.
self._assert_steps_displayed(
response,
PayAndVerifyView.PAYMENT_STEPS,
PayAndVerifyView.PAYMENT_CONFIRMATION_STEP,
)
def test_payment_confirmation_already_verified_skip_first_step(self):
course = self._create_course("verified")
self._enroll(course.id, "verified")
self._set_verification_status("submitted")
response = self._get_page(
'verify_student_payment_confirmation',
course.id,
skip_first_step=True
)
# There are no other steps, so stay on the
# payment confirmation step
self._assert_steps_displayed(
response,
PayAndVerifyView.PAYMENT_STEPS,
PayAndVerifyView.PAYMENT_CONFIRMATION_STEP,
)
@ddt.data(
(YESTERDAY, True),
(TOMORROW, False)
)
@ddt.unpack
def test_payment_confirmation_course_details(self, course_start, show_courseware_url):
course = self._create_course("verified", course_start=course_start)
self._enroll(course.id, "verified")
response = self._get_page('verify_student_payment_confirmation', course.id)
courseware_url = (
reverse("course_root", kwargs={'course_id': unicode(course.id)})
if show_courseware_url else ""
)
self._assert_course_details(
response,
unicode(course.id),
course.display_name,
course.start_datetime_text(),
courseware_url
)
@ddt.data("verified", "professional")
def test_upgrade(self, course_mode):
course = self._create_course(course_mode)
self._enroll(course.id)
response = self._get_page('verify_student_upgrade_and_verify', course.id)
self._assert_displayed_mode(response, course_mode)
self._assert_steps_displayed(
response,
PayAndVerifyView.PAYMENT_STEPS + PayAndVerifyView.VERIFICATION_STEPS,
PayAndVerifyView.MAKE_PAYMENT_STEP
)
self._assert_messaging(response, PayAndVerifyView.UPGRADE_MSG)
self._assert_requirements_displayed(response, [
PayAndVerifyView.PHOTO_ID_REQ,
PayAndVerifyView.WEBCAM_REQ,
])
self._assert_upgrade_session_flag(True)
self.assert_no_xss(response, '<script>alert("XSS")</script>')
def test_upgrade_already_verified(self):
course = self._create_course("verified")
self._enroll(course.id)
self._set_verification_status("submitted")
response = self._get_page('verify_student_upgrade_and_verify', course.id)
self._assert_steps_displayed(
response,
PayAndVerifyView.PAYMENT_STEPS,
PayAndVerifyView.MAKE_PAYMENT_STEP
)
self._assert_messaging(response, PayAndVerifyView.UPGRADE_MSG)
self._assert_requirements_displayed(response, [])
def test_upgrade_already_paid(self):
course = self._create_course("verified")
self._enroll(course.id, "verified")
# If we've already paid, then the upgrade messaging
# won't make much sense. Redirect them to the
# "verify later" page instead.
response = self._get_page(
'verify_student_upgrade_and_verify',
course.id,
expected_status_code=302
)
self._assert_redirects_to_verify_start(response, course.id)
def test_upgrade_already_verified_and_paid(self):
course = self._create_course("verified")
self._enroll(course.id, "verified")
self._set_verification_status("submitted")
# Already verified and paid, so redirect to the dashboard
response = self._get_page(
'verify_student_upgrade_and_verify',
course.id,
expected_status_code=302
)
self._assert_redirects_to_dashboard(response)
def test_upgrade_not_enrolled(self):
course = self._create_course("verified")
response = self._get_page(
'verify_student_upgrade_and_verify',
course.id,
expected_status_code=302
)
self._assert_redirects_to_start_flow(response, course.id)
def test_upgrade_unenrolled(self):
course = self._create_course("verified")
self._enroll(course.id, "verified")
self._unenroll(course.id)
response = self._get_page(
'verify_student_upgrade_and_verify',
course.id,
expected_status_code=302
)
self._assert_redirects_to_start_flow(response, course.id)
@ddt.data([], ["honor"], ["honor", "audit"])
def test_no_verified_mode_for_course(self, modes_available):
course = self._create_course(*modes_available)
pages = [
'verify_student_start_flow',
'verify_student_begin_flow',
'verify_student_verify_now',
'verify_student_upgrade_and_verify',
]
for page_name in pages:
self._get_page(
page_name,
course.id,
expected_status_code=404
)
@ddt.data(
([], "verify_student_start_flow"),
(["no-id-professional", "professional"], "verify_student_start_flow"),
(["honor", "audit"], "verify_student_start_flow"),
([], "verify_student_begin_flow"),
(["no-id-professional", "professional"], "verify_student_begin_flow"),
(["honor", "audit"], "verify_student_begin_flow"),
)
@ddt.unpack
def test_no_id_professional_entry_point(self, modes_available, payment_flow):
course = self._create_course(*modes_available)
if "no-id-professional" in modes_available or "professional" in modes_available:
self._get_page(payment_flow, course.id, expected_status_code=200)
else:
self._get_page(payment_flow, course.id, expected_status_code=404)
@ddt.data(
"verify_student_start_flow",
"verify_student_begin_flow",
"verify_student_verify_now",
"verify_student_upgrade_and_verify",
)
def test_require_login(self, url_name):
self.client.logout()
course = self._create_course("verified")
response = self._get_page(url_name, course.id, expected_status_code=302)
original_url = reverse(url_name, kwargs={'course_id': unicode(course.id)})
login_url = u"{login_url}?next={original_url}".format(
login_url=reverse('signin_user'),
original_url=original_url
)
self.assertRedirects(response, login_url)
@ddt.data(
"verify_student_start_flow",
"verify_student_begin_flow",
"verify_student_verify_now",
"verify_student_upgrade_and_verify",
)
def test_no_such_course(self, url_name):
non_existent_course = CourseLocator(course="test", org="test", run="test")
self._get_page(
url_name,
non_existent_course,
expected_status_code=404
)
@ddt.data("verify_student_start_flow", "verify_student_begin_flow")
def test_account_not_active(self, payment_flow):
self.user.is_active = False
self.user.save()
course = self._create_course("verified")
response = self._get_page(payment_flow, course.id)
self._assert_steps_displayed(
response,
PayAndVerifyView.PAYMENT_STEPS + PayAndVerifyView.VERIFICATION_STEPS,
PayAndVerifyView.MAKE_PAYMENT_STEP
)
self._assert_requirements_displayed(response, [
PayAndVerifyView.ACCOUNT_ACTIVATION_REQ,
PayAndVerifyView.PHOTO_ID_REQ,
PayAndVerifyView.WEBCAM_REQ,
])
@ddt.data("verify_student_start_flow", "verify_student_begin_flow")
def test_no_contribution(self, payment_flow):
# Do NOT specify a contribution for the course in a session var.
course = self._create_course("verified")
response = self._get_page(payment_flow, course.id)
self._assert_contribution_amount(response, "")
@ddt.data("verify_student_start_flow", "verify_student_begin_flow")
def test_contribution_other_course(self, payment_flow):
# Specify a contribution amount for another course in the session
course = self._create_course("verified")
other_course_id = CourseLocator(org="other", run="test", course="test")
self._set_contribution("12.34", other_course_id)
# Expect that the contribution amount is NOT pre-filled,
response = self._get_page(payment_flow, course.id)
self._assert_contribution_amount(response, "")
@ddt.data("verify_student_start_flow", "verify_student_begin_flow")
def test_contribution(self, payment_flow):
# Specify a contribution amount for this course in the session
course = self._create_course("verified")
self._set_contribution("12.34", course.id)
# Expect that the contribution amount is pre-filled,
response = self._get_page(payment_flow, course.id)
self._assert_contribution_amount(response, "12.34")
@ddt.data("verify_student_start_flow", "verify_student_begin_flow")
def test_verification_deadline(self, payment_flow):
deadline = datetime.now(tz=pytz.UTC) + timedelta(days=360)
course = self._create_course("verified")
# Set a deadline on the course mode AND on the verification deadline model.
# This simulates the common case in which the upgrade deadline (course mode expiration)
# and the verification deadline are the same.
# NOTE: we used to use the course mode expiration datetime for BOTH of these deadlines,
# before the VerificationDeadline model was introduced.
self._set_deadlines(course.id, upgrade_deadline=deadline, verification_deadline=deadline)
# Expect that the expiration date is set
response = self._get_page(payment_flow, course.id)
data = self._get_page_data(response)
self.assertEqual(data['verification_deadline'], deadline.strftime("%b %d, %Y at %H:%M UTC"))
def test_course_mode_expired(self):
deadline = datetime.now(tz=pytz.UTC) + timedelta(days=-360)
course = self._create_course("verified")
# Set the upgrade deadline (course mode expiration) and verification deadline
# to the same value. This used to be the default when we used the expiration datetime
# for BOTH values.
self._set_deadlines(course.id, upgrade_deadline=deadline, verification_deadline=deadline)
# Need to be enrolled
self._enroll(course.id, "verified")
# The course mode has expired, so expect an explanation
# to the student that the deadline has passed
response = self._get_page("verify_student_verify_now", course.id)
self.assertContains(response, "verification deadline")
self.assertContains(response, deadline.strftime("%b %d, %Y at %H:%M UTC"))
@ddt.data(datetime.now(tz=pytz.UTC) + timedelta(days=360), None)
def test_course_mode_expired_verification_deadline_in_future(self, verification_deadline):
"""Verify that student can not upgrade in expired course mode."""
course_modes = ("verified", "credit")
course = self._create_course(*course_modes)
# Set the upgrade deadline of verified mode in the past, but the verification
# deadline in the future.
self._set_deadlines(
course.id,
upgrade_deadline=datetime.now(tz=pytz.UTC) + timedelta(days=-360),
verification_deadline=verification_deadline,
)
# Set the upgrade deadline for credit mode in future.
self._set_deadlines(
course.id,
upgrade_deadline=datetime.now(tz=pytz.UTC) + timedelta(days=360),
verification_deadline=verification_deadline,
mode_slug="credit"
)
# Try to pay or upgrade.
# We should get an error message since the deadline has passed and did not allow
# directly sale of credit mode.
for page_name in ["verify_student_start_flow",
"verify_student_begin_flow",
"verify_student_upgrade_and_verify"]:
response = self._get_page(page_name, course.id)
self.assertContains(response, "Upgrade Deadline Has Passed")
# Simulate paying for the course and enrolling
self._enroll(course.id, "verified")
# Enter the verification part of the flow
# Expect that we are able to verify
response = self._get_page("verify_student_verify_now", course.id)
self.assertNotContains(response, "Verification is no longer available")
data = self._get_page_data(response)
self.assertEqual(data['message_key'], PayAndVerifyView.VERIFY_NOW_MSG)
# Check that the mode selected is expired verified mode not the credit mode
# because the direct enrollment to the credit mode is not allowed.
self.assertEqual(data['course_mode_slug'], "verified")
# Check that the verification deadline (rather than the upgrade deadline) is displayed
if verification_deadline is not None:
self.assertEqual(data["verification_deadline"], verification_deadline.strftime("%b %d, %Y at %H:%M UTC"))
else:
self.assertEqual(data["verification_deadline"], "")
def test_course_mode_not_expired_verification_deadline_passed(self):
course = self._create_course("verified")
# Set the upgrade deadline in the future
# and the verification deadline in the past
# We try not to discourage this with validation rules,
# since it's a bad user experience
# to purchase a verified track and then not be able to verify,
# but if it happens we need to handle it gracefully.
upgrade_deadline_in_future = datetime.now(tz=pytz.UTC) + timedelta(days=360)
verification_deadline_in_past = datetime.now(tz=pytz.UTC) + timedelta(days=-360)
self._set_deadlines(
course.id,
upgrade_deadline=upgrade_deadline_in_future,
verification_deadline=verification_deadline_in_past,
)
# Enroll as verified (simulate purchasing the verified enrollment)
self._enroll(course.id, "verified")
# Even though the upgrade deadline is in the future,
# the verification deadline has passed, so we should see an error
# message when we go to verify.
response = self._get_page("verify_student_verify_now", course.id)
self.assertContains(response, "verification deadline")
self.assertContains(response, verification_deadline_in_past.strftime("%b %d, %Y at %H:%M UTC"))
@mock.patch.dict(settings.FEATURES, {'EMBARGO': True})
@ddt.data("verify_student_start_flow", "verify_student_begin_flow")
def test_embargo_restrict(self, payment_flow):
course = self._create_course("verified")
with restrict_course(course.id) as redirect_url:
# Simulate that we're embargoed from accessing this
# course based on our IP address.
response = self._get_page(payment_flow, course.id, expected_status_code=302)
self.assertRedirects(response, redirect_url)
@mock.patch.dict(settings.FEATURES, {'EMBARGO': True})
@ddt.data("verify_student_start_flow", "verify_student_begin_flow")
def test_embargo_allow(self, payment_flow):
course = self._create_course("verified")
self._get_page(payment_flow, course.id)
def _create_course(self, *course_modes, **kwargs):
"""Create a new course with the specified course modes. """
course = CourseFactory.create(display_name='<script>alert("XSS")</script>')
if kwargs.get('course_start'):
course.start = kwargs.get('course_start')
modulestore().update_item(course, ModuleStoreEnum.UserID.test)
mode_kwargs = {}
if kwargs.get('sku'):
mode_kwargs['sku'] = kwargs['sku']
for course_mode in course_modes:
min_price = (0 if course_mode in ["honor", "audit"] else self.MIN_PRICE)
CourseModeFactory.create(
course_id=course.id,
mode_slug=course_mode,
mode_display_name=course_mode,
min_price=min_price,
**mode_kwargs
)
return course
def _enroll(self, course_key, mode=CourseMode.DEFAULT_MODE_SLUG):
"""Enroll the user in a course. """
CourseEnrollmentFactory.create(
user=self.user,
course_id=course_key,
mode=mode
)
def _unenroll(self, course_key):
"""Unenroll the user from a course. """
CourseEnrollment.unenroll(self.user, course_key)
def _set_verification_status(self, status):
"""Set the user's photo verification status. """
attempt = SoftwareSecurePhotoVerification.objects.create(user=self.user)
if status in ["submitted", "approved", "expired", "denied", "error"]:
attempt.mark_ready()
attempt.submit()
if status in ["approved", "expired"]:
attempt.approve()
elif status == "denied":
attempt.deny("Denied!")
elif status == "error":
attempt.system_error("Error!")
if status == "expired":
days_good_for = settings.VERIFY_STUDENT["DAYS_GOOD_FOR"]
attempt.created_at = datetime.now(pytz.UTC) - timedelta(days=(days_good_for + 1))
attempt.save()
def _set_deadlines(self, course_key, upgrade_deadline=None, verification_deadline=None, mode_slug="verified"):
"""
Set the upgrade and verification deadlines.
Arguments:
course_key (CourseKey): Identifier for the course.
Keyword Arguments:
upgrade_deadline (datetime): Datetime after which a user cannot
upgrade to a verified mode.
verification_deadline (datetime): Datetime after which a user cannot
submit an initial verification attempt.
"""
# Set the course mode expiration (same as the "upgrade" deadline)
mode = CourseMode.objects.get(course_id=course_key, mode_slug=mode_slug)
mode.expiration_datetime = upgrade_deadline
mode.save()
# Set the verification deadline
VerificationDeadline.set_deadline(course_key, verification_deadline)
def _set_contribution(self, amount, course_id):
"""Set the contribution amount pre-filled in a session var. """
session = self.client.session
session["donation_for_course"] = {
unicode(course_id): amount
}
session.save()
def _get_page(self, url_name, course_key, expected_status_code=200, skip_first_step=False):
"""Retrieve one of the verification pages. """
url = reverse(url_name, kwargs={"course_id": unicode(course_key)})
if skip_first_step:
url += "?skip-first-step=1"
response = self.client.get(url)
self.assertEqual(response.status_code, expected_status_code)
return response
def _assert_displayed_mode(self, response, expected_mode):
"""Check whether a course mode is displayed. """
response_dict = self._get_page_data(response)
self.assertEqual(response_dict['course_mode_slug'], expected_mode)
def _assert_steps_displayed(self, response, expected_steps, expected_current_step):
"""Check whether steps in the flow are displayed to the user. """
response_dict = self._get_page_data(response)
self.assertEqual(response_dict['current_step'], expected_current_step)
self.assertEqual(expected_steps, [
step['name'] for step in
response_dict['display_steps']
])
def _assert_messaging(self, response, expected_message):
"""Check the messaging on the page. """
response_dict = self._get_page_data(response)
self.assertEqual(response_dict['message_key'], expected_message)
def _assert_requirements_displayed(self, response, requirements):
"""Check that requirements are displayed on the page. """
response_dict = self._get_page_data(response)
for req, displayed in response_dict['requirements'].iteritems():
if req in requirements:
self.assertTrue(displayed, msg="Expected '{req}' requirement to be displayed".format(req=req))
else:
self.assertFalse(displayed, msg="Expected '{req}' requirement to be hidden".format(req=req))
def _assert_course_details(self, response, course_key, display_name, start_text, url):
"""Check the course information on the page. """
response_dict = self._get_page_data(response)
self.assertEqual(response_dict['course_key'], course_key)
self.assertEqual(response_dict['course_name'], display_name)
self.assertEqual(response_dict['course_start_date'], start_text)
self.assertEqual(response_dict['courseware_url'], url)
def _assert_user_details(self, response, full_name):
"""Check the user detail information on the page. """
response_dict = self._get_page_data(response)
self.assertEqual(response_dict['full_name'], full_name)
def _assert_contribution_amount(self, response, expected_amount):
"""Check the pre-filled contribution amount. """
response_dict = self._get_page_data(response)
self.assertEqual(response_dict['contribution_amount'], expected_amount)
def _get_page_data(self, response):
"""Retrieve the data attributes rendered on the page. """
soup = BeautifulSoup(response.content)
pay_and_verify_div = soup.find(id="pay-and-verify-container")
self.assertIsNot(
pay_and_verify_div, None,
msg=(
"Could not load pay and verify flow data. "
"Maybe this isn't the pay and verify page?"
)
)
return {
'full_name': pay_and_verify_div['data-full-name'],
'course_key': pay_and_verify_div['data-course-key'],
'course_name': pay_and_verify_div['data-course-name'],
'course_start_date': pay_and_verify_div['data-course-start-date'],
'courseware_url': pay_and_verify_div['data-courseware-url'],
'course_mode_name': pay_and_verify_div['data-course-mode-name'],
'course_mode_slug': pay_and_verify_div['data-course-mode-slug'],
'display_steps': json.loads(pay_and_verify_div['data-display-steps']),
'current_step': pay_and_verify_div['data-current-step'],
'requirements': json.loads(pay_and_verify_div['data-requirements']),
'message_key': pay_and_verify_div['data-msg-key'],
'contribution_amount': pay_and_verify_div['data-contribution-amount'],
'verification_deadline': pay_and_verify_div['data-verification-deadline']
}
def _assert_upgrade_session_flag(self, is_upgrade):
"""Check that the session flag for attempting an upgrade is set. """
self.assertEqual(self.client.session.get('attempting_upgrade'), is_upgrade)
def _assert_redirects_to_dashboard(self, response):
"""Check that the page redirects to the student dashboard. """
self.assertRedirects(response, reverse('dashboard'))
def _assert_redirects_to_start_flow(self, response, course_id):
"""Check that the page redirects to the start of the payment/verification flow. """
url = reverse('verify_student_start_flow', kwargs={'course_id': unicode(course_id)})
self.assertRedirects(response, url)
def _assert_redirects_to_verify_start(self, response, course_id, status_code=302):
"""Check that the page redirects to the "verify later" part of the flow. """
url = reverse('verify_student_verify_now', kwargs={'course_id': unicode(course_id)})
self.assertRedirects(response, url, status_code)
def _assert_redirects_to_upgrade(self, response, course_id):
"""Check that the page redirects to the "upgrade" part of the flow. """
url = reverse('verify_student_upgrade_and_verify', kwargs={'course_id': unicode(course_id)})
self.assertRedirects(response, url)
@ddt.data("verify_student_start_flow", "verify_student_begin_flow")
def test_course_upgrade_page_with_unicode_and_special_values_in_display_name(self, payment_flow):
"""Check the course information on the page. """
mode_display_name = u"Introduction à l'astrophysique"
course = CourseFactory.create(display_name=mode_display_name)
for course_mode in [CourseMode.DEFAULT_MODE_SLUG, "verified"]:
min_price = (self.MIN_PRICE if course_mode != CourseMode.DEFAULT_MODE_SLUG else 0)
CourseModeFactory.create(
course_id=course.id,
mode_slug=course_mode,
mode_display_name=mode_display_name,
min_price=min_price
)
self._enroll(course.id)
response_dict = self._get_page_data(self._get_page(payment_flow, course.id))
self.assertEqual(response_dict['course_name'], mode_display_name)
@httpretty.activate
@override_settings(ECOMMERCE_API_URL=TEST_API_URL, ECOMMERCE_API_SIGNING_KEY=TEST_API_SIGNING_KEY)
@ddt.data("verify_student_start_flow", "verify_student_begin_flow")
def test_processors_api(self, payment_flow):
"""
Check that when working with a product being processed by the
ecommerce api, we correctly call to that api for the list of
available payment processors.
"""
# setting a nonempty sku on the course will a trigger calls to
# the ecommerce api to get payment processors.
course = self._create_course("verified", sku='nonempty-sku')
self._enroll(course.id)
# mock out the payment processors endpoint
httpretty.register_uri(
httpretty.GET,
"{}/payment/processors/".format(TEST_API_URL),
body=json.dumps(['foo', 'bar']),
content_type="application/json",
)
# make the server request
response = self._get_page(payment_flow, course.id)
self.assertEqual(response.status_code, 200)
# ensure the mock api call was made. NOTE: the following line
# approximates the check - if the headers were empty it means
# there was no last request.
self.assertNotEqual(httpretty.last_request().headers, {})
class CheckoutTestMixin(object):
"""
Mixin implementing test methods that should behave identically regardless
of which backend is used (shoppingcart or ecommerce service). Subclasses
immediately follow for each backend, which inherit from TestCase and
define methods needed to customize test parameters, and patch the
appropriate checkout method.
Though the view endpoint under test is named 'create_order' for backward-
compatibility, the effect of using this endpoint is to choose a specific product
(i.e. course mode) and trigger immediate checkout.
"""
def setUp(self):
""" Create a user and course. """
super(CheckoutTestMixin, self).setUp()
self.user = UserFactory.create(username="test", password="test")
self.course = CourseFactory.create()
for mode, min_price in (('audit', 0), ('honor', 0), ('verified', 100)):
CourseModeFactory.create(mode_slug=mode, course_id=self.course.id, min_price=min_price, sku=self.make_sku())
self.client.login(username="test", password="test")
def _assert_checked_out(
self,
post_params,
patched_create_order,
expected_course_key,
expected_mode_slug,
expected_status_code=200
):
"""
DRY helper.
Ensures that checkout functions were invoked as
expected during execution of the create_order endpoint.
"""
post_params.setdefault('processor', None)
response = self.client.post(reverse('verify_student_create_order'), post_params)
self.assertEqual(response.status_code, expected_status_code)
if expected_status_code == 200:
# ensure we called checkout at all
self.assertTrue(patched_create_order.called)
# ensure checkout args were correct
args = self._get_checkout_args(patched_create_order)
self.assertEqual(args['user'], self.user)
self.assertEqual(args['course_key'], expected_course_key)
self.assertEqual(args['course_mode'].slug, expected_mode_slug)
# ensure response data was correct
data = json.loads(response.content)
self.assertEqual(set(data.keys()), PAYMENT_DATA_KEYS)
else:
self.assertFalse(patched_create_order.called)
def test_create_order(self, patched_create_order):
# Create an order
params = {
'course_id': unicode(self.course.id),
'contribution': 100,
}
self._assert_checked_out(params, patched_create_order, self.course.id, 'verified')
def test_create_order_prof_ed(self, patched_create_order):
# Create a prof ed course
course = CourseFactory.create()
CourseModeFactory.create(mode_slug="professional", course_id=course.id, min_price=10, sku=self.make_sku())
# Create an order for a prof ed course
params = {'course_id': unicode(course.id)}
self._assert_checked_out(params, patched_create_order, course.id, 'professional')
def test_create_order_no_id_professional(self, patched_create_order):
# Create a no-id-professional ed course
course = CourseFactory.create()
CourseModeFactory.create(mode_slug="no-id-professional", course_id=course.id, min_price=10, sku=self.make_sku())
# Create an order for a prof ed course
params = {'course_id': unicode(course.id)}
self._assert_checked_out(params, patched_create_order, course.id, 'no-id-professional')
def test_create_order_for_multiple_paid_modes(self, patched_create_order):
# Create a no-id-professional ed course
course = CourseFactory.create()
CourseModeFactory.create(mode_slug="no-id-professional", course_id=course.id, min_price=10, sku=self.make_sku())
CourseModeFactory.create(mode_slug="professional", course_id=course.id, min_price=10, sku=self.make_sku())
# Create an order for a prof ed course
params = {'course_id': unicode(course.id)}
# TODO jsa - is this the intended behavior?
self._assert_checked_out(params, patched_create_order, course.id, 'no-id-professional')
def test_create_order_bad_donation_amount(self, patched_create_order):
# Create an order
params = {
'course_id': unicode(self.course.id),
'contribution': '99.9'
}
self._assert_checked_out(params, patched_create_order, None, None, expected_status_code=400)
def test_create_order_good_donation_amount(self, patched_create_order):
# Create an order
params = {
'course_id': unicode(self.course.id),
'contribution': '100.0'
}
self._assert_checked_out(params, patched_create_order, self.course.id, 'verified')
def test_old_clients(self, patched_create_order):
# ensure the response to a request from a stale js client is modified so as
# not to break behavior in the browser.
# (XCOM-214) remove after release.
expected_payment_data = TEST_PAYMENT_DATA.copy()
expected_payment_data['payment_form_data'].update({'foo': 'bar'})
patched_create_order.return_value = expected_payment_data
# there is no 'processor' parameter in the post payload, so the response should only contain payment form data.
params = {'course_id': unicode(self.course.id), 'contribution': 100}
response = self.client.post(reverse('verify_student_create_order'), params)
self.assertEqual(response.status_code, 200)
self.assertTrue(patched_create_order.called)
# ensure checkout args were correct
args = self._get_checkout_args(patched_create_order)
self.assertEqual(args['user'], self.user)
self.assertEqual(args['course_key'], self.course.id)
self.assertEqual(args['course_mode'].slug, 'verified')
# ensure response data was correct
data = json.loads(response.content)
self.assertEqual(data, {'foo': 'bar'})
@attr(shard=2)
@patch('lms.djangoapps.verify_student.views.checkout_with_shoppingcart', return_value=TEST_PAYMENT_DATA, autospec=True)
class TestCreateOrderShoppingCart(CheckoutTestMixin, ModuleStoreTestCase):
""" Test view behavior when the shoppingcart is used. """
def make_sku(self):
""" Checkout is handled by shoppingcart when the course mode's sku is empty. """
return ''
def _get_checkout_args(self, patched_create_order):
""" Assuming patched_create_order was called, return a mapping containing the call arguments."""
return dict(zip(('request', 'user', 'course_key', 'course_mode', 'amount'), patched_create_order.call_args[0]))
@attr(shard=2)
@override_settings(ECOMMERCE_API_URL=TEST_API_URL, ECOMMERCE_API_SIGNING_KEY=TEST_API_SIGNING_KEY)
@patch(
'lms.djangoapps.verify_student.views.checkout_with_ecommerce_service',
return_value=TEST_PAYMENT_DATA,
autospec=True,
)
class TestCreateOrderEcommerceService(CheckoutTestMixin, ModuleStoreTestCase):
""" Test view behavior when the ecommerce service is used. """
def make_sku(self):
""" Checkout is handled by the ecommerce service when the course mode's sku is nonempty. """
return uuid4().hex.decode('ascii')
def _get_checkout_args(self, patched_create_order):
""" Assuming patched_create_order was called, return a mapping containing the call arguments."""
return dict(zip(('user', 'course_key', 'course_mode', 'processor'), patched_create_order.call_args[0]))
@attr(shard=2)
class TestCheckoutWithEcommerceService(ModuleStoreTestCase):
"""
Ensures correct behavior in the function `checkout_with_ecommerce_service`.
"""
@httpretty.activate
@override_settings(ECOMMERCE_API_URL=TEST_API_URL, ECOMMERCE_API_SIGNING_KEY=TEST_API_SIGNING_KEY)
def test_create_basket(self):
"""
Check that when working with a product being processed by the
ecommerce api, we correctly call to that api to create a basket.
"""
user = UserFactory.create(username="test-username")
course_mode = CourseModeFactory.create(sku="test-sku").to_tuple() # pylint: disable=no-member
expected_payment_data = {'foo': 'bar'}
# mock out the payment processors endpoint
httpretty.register_uri(
httpretty.POST,
"{}/baskets/".format(TEST_API_URL),
body=json.dumps({'payment_data': expected_payment_data}),
content_type="application/json",
)
with mock.patch('lms.djangoapps.verify_student.views.audit_log') as mock_audit_log:
# Call the function
actual_payment_data = checkout_with_ecommerce_service(
user,
'dummy-course-key',
course_mode,
'test-processor'
)
# Verify that an audit message was logged
self.assertTrue(mock_audit_log.called)
# Check the api call
self.assertEqual(json.loads(httpretty.last_request().body), {
'products': [{'sku': 'test-sku'}],
'checkout': True,
'payment_processor_name': 'test-processor',
})
# Check the response
self.assertEqual(actual_payment_data, expected_payment_data)
@attr(shard=2)
class TestCreateOrderView(ModuleStoreTestCase):
"""
Tests for the create_order view of verified course enrollment process.
"""
def setUp(self):
super(TestCreateOrderView, self).setUp()
self.user = UserFactory.create(username="rusty", password="test")
self.client.login(username="rusty", password="test")
self.course_id = 'Robot/999/Test_Course'
self.course = CourseFactory.create(org='Robot', number='999', display_name='Test Course')
verified_mode = CourseMode(
course_id=SlashSeparatedCourseKey("Robot", "999", 'Test_Course'),
mode_slug="verified",
mode_display_name="Verified Certificate",
min_price=50
)
verified_mode.save()
course_mode_post_data = {
'certificate_mode': 'Select Certificate',
'contribution': 50,
'contribution-other-amt': '',
'explain': ''
}
self.client.post(
reverse("course_modes_choose", kwargs={'course_id': self.course_id}),
course_mode_post_data
)
@patch.dict(settings.FEATURES, {'AUTOMATIC_VERIFY_STUDENT_IDENTITY_FOR_TESTING': True})
def test_invalid_amount(self):
response = self._create_order('1.a', self.course_id, expect_status_code=400)
self.assertIn('Selected price is not valid number.', response.content)
@patch.dict(settings.FEATURES, {'AUTOMATIC_VERIFY_STUDENT_IDENTITY_FOR_TESTING': True})
def test_invalid_mode(self):
# Create a course that does not have a verified mode
course_id = 'Fake/999/Test_Course'
CourseFactory.create(org='Fake', number='999', display_name='Test Course')
response = self._create_order('50', course_id, expect_status_code=400)
self.assertIn('This course doesn\'t support paid certificates', response.content)
@patch.dict(settings.FEATURES, {'AUTOMATIC_VERIFY_STUDENT_IDENTITY_FOR_TESTING': True})
def test_create_order_fail_with_get(self):
create_order_post_data = {
'contribution': 50,
'course_id': self.course_id,
}
# Use the wrong HTTP method
response = self.client.get(reverse('verify_student_create_order'), create_order_post_data)
self.assertEqual(response.status_code, 405)
@patch.dict(settings.FEATURES, {'AUTOMATIC_VERIFY_STUDENT_IDENTITY_FOR_TESTING': True})
def test_create_order_success(self):
response = self._create_order(50, self.course_id)
json_response = json.loads(response.content)
self.assertIsNotNone(json_response['payment_form_data'].get('orderNumber')) # TODO not canonical
# Verify that the order exists and is configured correctly
order = Order.objects.get(user=self.user)
self.assertEqual(order.status, 'paying')
item = CertificateItem.objects.get(order=order)
self.assertEqual(item.status, 'paying')
self.assertEqual(item.course_id, self.course.id)
self.assertEqual(item.mode, 'verified')
def _create_order(self, contribution, course_id, expect_success=True, expect_status_code=200):
"""Create a new order.
Arguments:
contribution (int): The contribution amount.
course_id (CourseKey): The course to purchase.
Keyword Arguments:
expect_success (bool): If True, verify that the response was successful.
expect_status_code (int): The expected HTTP status code
Returns:
HttpResponse
"""
url = reverse('verify_student_create_order')
data = {
'contribution': contribution,
'course_id': course_id,
'processor': None,
}
response = self.client.post(url, data)
self.assertEqual(response.status_code, expect_status_code)
if expect_status_code == 200:
json_response = json.loads(response.content)
if expect_success:
self.assertEqual(set(json_response.keys()), PAYMENT_DATA_KEYS)
else:
self.assertFalse(json_response['success'])
return response
@attr(shard=2)
@ddt.ddt
@patch.dict(settings.FEATURES, {'AUTOMATIC_VERIFY_STUDENT_IDENTITY_FOR_TESTING': True})
class TestSubmitPhotosForVerification(TestCase):
"""
Tests for submitting photos for verification.
"""
USERNAME = "test_user"
PASSWORD = "test_password"
IMAGE_DATA = "abcd,1234"
FULL_NAME = u"Ḟüḷḷ Ṅäṁë"
def setUp(self):
super(TestSubmitPhotosForVerification, self).setUp()
self.user = UserFactory.create(username=self.USERNAME, password=self.PASSWORD)
result = self.client.login(username=self.USERNAME, password=self.PASSWORD)
self.assertTrue(result, msg="Could not log in")
def test_submit_photos(self):
# Submit the photos
self._submit_photos(
face_image=self.IMAGE_DATA,
photo_id_image=self.IMAGE_DATA
)
# Verify that the attempt is created in the database
attempt = SoftwareSecurePhotoVerification.objects.get(user=self.user)
self.assertEqual(attempt.status, "submitted")
# Verify that the user's name wasn't changed
self._assert_user_name(self.user.profile.name)
def test_submit_photos_and_change_name(self):
# Submit the photos, along with a name change
self._submit_photos(
face_image=self.IMAGE_DATA,
photo_id_image=self.IMAGE_DATA,
full_name=self.FULL_NAME
)
# Check that the user's name was changed in the database
self._assert_user_name(self.FULL_NAME)
def test_submit_photos_sends_confirmation_email(self):
self._submit_photos(
face_image=self.IMAGE_DATA,
photo_id_image=self.IMAGE_DATA
)
self._assert_confirmation_email(True)
def test_submit_photos_error_does_not_send_email(self):
# Error because invalid parameters, so no confirmation email
# should be sent.
self._submit_photos(expected_status_code=400)
self._assert_confirmation_email(False)
# Disable auto-auth since we will be intercepting POST requests
# to the verification service ourselves in this test.
@patch.dict(settings.FEATURES, {'AUTOMATIC_VERIFY_STUDENT_IDENTITY_FOR_TESTING': False})
@override_settings(VERIFY_STUDENT={
"SOFTWARE_SECURE": {
"API_URL": "https://verify.example.com/submit/",
"API_ACCESS_KEY": "dcf291b5572942f99adaab4c2090c006",
"API_SECRET_KEY": "c392efdcc0354c5f922dc39844ec0dc7",
"FACE_IMAGE_AES_KEY": "f82400259e3b4f88821cd89838758292",
"RSA_PUBLIC_KEY": (
"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDkgtz3fQdiXshy/RfOHkoHlhx/"
"SSPZ+nNyE9JZXtwhlzsXjnu+e9GOuJzgh4kUqo73ePIG5FxVU+mnacvufq2cu1SOx"
"lRYGyBK7qDf9Ym67I5gmmcNhbzdKcluAuDCPmQ4ecKpICQQldrDQ9HWDxwjbbcqpVB"
"PYWkE1KrtypGThmcehLmabf6SPq1CTAGlXsHgUtbWCwV6mqR8yScV0nRLln0djLDm9d"
"L8tIVFFVpAfBaYYh2Cm5EExQZjxyfjWd8P5H+8/l0pmK2jP7Hc0wuXJemIZbsdm+DSD"
"FhCGY3AILGkMwr068dGRxfBtBy/U9U5W+nStvkDdMrSgQezS5+V [email protected]"
),
"AWS_ACCESS_KEY": "c987c7efe35c403caa821f7328febfa1",
"AWS_SECRET_KEY": "fc595fc657c04437bb23495d8fe64881",
"S3_BUCKET": "test.example.com",
},
"DAYS_GOOD_FOR": 10,
})
@httpretty.activate
@moto.mock_s3
def test_submit_photos_for_reverification(self):
# Create the S3 bucket for photo upload
conn = boto.connect_s3()
conn.create_bucket("test.example.com")
# Mock the POST to Software Secure
httpretty.register_uri(httpretty.POST, "https://verify.example.com/submit/")
# Submit an initial verification attempt
self._submit_photos(
face_image=self.IMAGE_DATA + "4567",
photo_id_image=self.IMAGE_DATA + "8910",
)
initial_data = self._get_post_data()
# Submit a face photo for re-verification
self._submit_photos(face_image=self.IMAGE_DATA + "1112")
reverification_data = self._get_post_data()
# Verify that the initial attempt sent the same ID photo as the reverification attempt
self.assertEqual(initial_data["PhotoIDKey"], reverification_data["PhotoIDKey"])
initial_photo_response = requests.get(initial_data["PhotoID"])
self.assertEqual(initial_photo_response.status_code, 200)
reverification_photo_response = requests.get(reverification_data["PhotoID"])
self.assertEqual(reverification_photo_response.status_code, 200)
self.assertEqual(initial_photo_response.content, reverification_photo_response.content)
# Verify that the second attempt sent the updated face photo
initial_photo_response = requests.get(initial_data["UserPhoto"])
self.assertEqual(initial_photo_response.status_code, 200)
reverification_photo_response = requests.get(reverification_data["UserPhoto"])
self.assertEqual(reverification_photo_response.status_code, 200)
self.assertNotEqual(initial_photo_response.content, reverification_photo_response.content)
# Submit a new face photo and photo id for verification
self._submit_photos(
face_image=self.IMAGE_DATA + "9999",
photo_id_image=self.IMAGE_DATA + "1111",
)
two_photo_reverification_data = self._get_post_data()
# Verify that the initial attempt sent a new ID photo for the reverification attempt
self.assertNotEqual(initial_data["PhotoIDKey"], two_photo_reverification_data["PhotoIDKey"])
@ddt.data('face_image', 'photo_id_image')
def test_invalid_image_data(self, invalid_param):
params = {
'face_image': self.IMAGE_DATA,
'photo_id_image': self.IMAGE_DATA
}
params[invalid_param] = ""
response = self._submit_photos(expected_status_code=400, **params)
self.assertEqual(response.content, "Image data is not valid.")
def test_invalid_name(self):
response = self._submit_photos(
face_image=self.IMAGE_DATA,
photo_id_image=self.IMAGE_DATA,
full_name="a",
expected_status_code=400
)
self.assertEqual(response.content, "Name must be at least 2 characters long.")
def test_missing_required_param(self):
# Missing face image parameter
params = {
'photo_id_image': self.IMAGE_DATA
}
response = self._submit_photos(expected_status_code=400, **params)
self.assertEqual(response.content, "Missing required parameter face_image")
def test_no_photo_id_and_no_initial_verification(self):
# Submit face image data, but not photo ID data.
# Since the user doesn't have an initial verification attempt, this should fail
response = self._submit_photos(expected_status_code=400, face_image=self.IMAGE_DATA)
self.assertEqual(
response.content,
"Photo ID image is required if the user does not have an initial verification attempt."
)
# Create the initial verification attempt with some dummy
# value set for field 'photo_id_key'
self._submit_photos(
face_image=self.IMAGE_DATA,
photo_id_image=self.IMAGE_DATA,
)
attempt = SoftwareSecurePhotoVerification.objects.get(user=self.user)
attempt.photo_id_key = "dummy_photo_id_key"
attempt.save()
# Now the request should succeed
self._submit_photos(face_image=self.IMAGE_DATA)
def _submit_photos(self, face_image=None, photo_id_image=None, full_name=None, expected_status_code=200):
"""Submit photos for verification.
Keyword Arguments:
face_image (str): The base-64 encoded face image data.
photo_id_image (str): The base-64 encoded ID image data.
full_name (unicode): The full name of the user, if the user is changing it.
expected_status_code (int): The expected response status code.
Returns:
HttpResponse
"""
url = reverse("verify_student_submit_photos")
params = {}
if face_image is not None:
params['face_image'] = face_image
if photo_id_image is not None:
params['photo_id_image'] = photo_id_image
if full_name is not None:
params['full_name'] = full_name
response = self.client.post(url, params)
self.assertEqual(response.status_code, expected_status_code)
return response
def _assert_confirmation_email(self, expect_email):
"""
Check that a confirmation email was or was not sent.
"""
if expect_email:
# Verify that photo submission confirmation email was sent
self.assertEqual(len(mail.outbox), 1)
self.assertEqual("Verification photos received", mail.outbox[0].subject)
else:
# Verify that photo submission confirmation email was not sent
self.assertEqual(len(mail.outbox), 0)
def _assert_user_name(self, full_name):
"""Check the user's name.
Arguments:
full_name (unicode): The user's full name.
Raises:
AssertionError
"""
request = RequestFactory().get('/url')
request.user = self.user
account_settings = get_account_settings(request)[0]
self.assertEqual(account_settings['name'], full_name)
def _get_post_data(self):
"""Retrieve POST data from the last request. """
last_request = httpretty.last_request()
return json.loads(last_request.body)
@attr(shard=2)
class TestPhotoVerificationResultsCallback(ModuleStoreTestCase):
"""
Tests for the results_callback view.
"""
def setUp(self):
super(TestPhotoVerificationResultsCallback, self).setUp()
self.course = CourseFactory.create(org='Robot', number='999', display_name='Test Course')
self.course_id = self.course.id
self.user = UserFactory.create()
self.attempt = SoftwareSecurePhotoVerification(
status="submitted",
user=self.user
)
self.attempt.save()
self.receipt_id = self.attempt.receipt_id
self.client = Client()
def mocked_has_valid_signature(method, headers_dict, body_dict, access_key, secret_key): # pylint: disable=no-self-argument, unused-argument
"""
Used as a side effect when mocking `verify_student.ssencrypt.has_valid_signature`.
"""
return True
def test_invalid_json(self):
"""
Test for invalid json being posted by software secure.
"""
data = {"Testing invalid"}
response = self.client.post(
reverse('verify_student_results_callback'),
data=data,
content_type='application/json',
HTTP_AUTHORIZATION='test BBBBBBBBBBBBBBBBBBBB: testing',
HTTP_DATE='testdate'
)
self.assertIn('Invalid JSON', response.content)
self.assertEqual(response.status_code, 400)
def test_invalid_dict(self):
"""
Test for invalid dictionary being posted by software secure.
"""
data = '"\\"Test\\tTesting"'
response = self.client.post(
reverse('verify_student_results_callback'),
data=data,
content_type='application/json',
HTTP_AUTHORIZATION='test BBBBBBBBBBBBBBBBBBBB:testing',
HTTP_DATE='testdate'
)
self.assertIn('JSON should be dict', response.content)
self.assertEqual(response.status_code, 400)
@mock.patch(
'lms.djangoapps.verify_student.ssencrypt.has_valid_signature',
mock.Mock(side_effect=mocked_has_valid_signature)
)
def test_invalid_access_key(self):
"""
Test for invalid access key.
"""
data = {
"EdX-ID": self.receipt_id,
"Result": "Testing",
"Reason": "Testing",
"MessageType": "Testing"
}
json_data = json.dumps(data)
response = self.client.post(
reverse('verify_student_results_callback'),
data=json_data,
content_type='application/json',
HTTP_AUTHORIZATION='test testing:testing',
HTTP_DATE='testdate'
)
self.assertIn('Access key invalid', response.content)
self.assertEqual(response.status_code, 400)
@mock.patch(
'lms.djangoapps.verify_student.ssencrypt.has_valid_signature',
mock.Mock(side_effect=mocked_has_valid_signature)
)
def test_wrong_edx_id(self):
"""
Test for wrong id of Software secure verification attempt.
"""
data = {
"EdX-ID": "Invalid-Id",
"Result": "Testing",
"Reason": "Testing",
"MessageType": "Testing"
}
json_data = json.dumps(data)
response = self.client.post(
reverse('verify_student_results_callback'),
data=json_data,
content_type='application/json',
HTTP_AUTHORIZATION='test BBBBBBBBBBBBBBBBBBBB:testing',
HTTP_DATE='testdate'
)
self.assertIn('edX ID Invalid-Id not found', response.content)
self.assertEqual(response.status_code, 400)
@mock.patch(
'lms.djangoapps.verify_student.ssencrypt.has_valid_signature',
mock.Mock(side_effect=mocked_has_valid_signature)
)
def test_pass_result(self):
"""
Test for verification passed.
"""
data = {
"EdX-ID": self.receipt_id,
"Result": "PASS",
"Reason": "",
"MessageType": "You have been verified."
}
json_data = json.dumps(data)
response = self.client.post(
reverse('verify_student_results_callback'), data=json_data,
content_type='application/json',
HTTP_AUTHORIZATION='test BBBBBBBBBBBBBBBBBBBB:testing',
HTTP_DATE='testdate'
)
attempt = SoftwareSecurePhotoVerification.objects.get(receipt_id=self.receipt_id)
self.assertEqual(attempt.status, u'approved')
self.assertEquals(response.content, 'OK!')
@mock.patch(
'lms.djangoapps.verify_student.ssencrypt.has_valid_signature',
mock.Mock(side_effect=mocked_has_valid_signature)
)
def test_fail_result(self):
"""
Test for failed verification.
"""
data = {
"EdX-ID": self.receipt_id,
"Result": 'FAIL',
"Reason": 'Invalid photo',
"MessageType": 'Your photo doesn\'t meet standards.'
}
json_data = json.dumps(data)
response = self.client.post(
reverse('verify_student_results_callback'),
data=json_data,
content_type='application/json',
HTTP_AUTHORIZATION='test BBBBBBBBBBBBBBBBBBBB:testing',
HTTP_DATE='testdate'
)
attempt = SoftwareSecurePhotoVerification.objects.get(receipt_id=self.receipt_id)
self.assertEqual(attempt.status, u'denied')
self.assertEqual(attempt.error_code, u'Your photo doesn\'t meet standards.')
self.assertEqual(attempt.error_msg, u'"Invalid photo"')
self.assertEquals(response.content, 'OK!')
@mock.patch(
'lms.djangoapps.verify_student.ssencrypt.has_valid_signature',
mock.Mock(side_effect=mocked_has_valid_signature)
)
def test_system_fail_result(self):
"""
Test for software secure result system failure.
"""
data = {"EdX-ID": self.receipt_id,
"Result": 'SYSTEM FAIL',
"Reason": 'Memory overflow',
"MessageType": 'You must retry the verification.'}
json_data = json.dumps(data)
response = self.client.post(
reverse('verify_student_results_callback'),
data=json_data,
content_type='application/json',
HTTP_AUTHORIZATION='test BBBBBBBBBBBBBBBBBBBB:testing',
HTTP_DATE='testdate'
)
attempt = SoftwareSecurePhotoVerification.objects.get(receipt_id=self.receipt_id)
self.assertEqual(attempt.status, u'must_retry')
self.assertEqual(attempt.error_code, u'You must retry the verification.')
self.assertEqual(attempt.error_msg, u'"Memory overflow"')
self.assertEquals(response.content, 'OK!')
@mock.patch(
'lms.djangoapps.verify_student.ssencrypt.has_valid_signature',
mock.Mock(side_effect=mocked_has_valid_signature)
)
def test_unknown_result(self):
"""
test for unknown software secure result
"""
data = {
"EdX-ID": self.receipt_id,
"Result": 'Unknown',
"Reason": 'Unknown reason',
"MessageType": 'Unknown message'
}
json_data = json.dumps(data)
response = self.client.post(
reverse('verify_student_results_callback'),
data=json_data,
content_type='application/json',
HTTP_AUTHORIZATION='test BBBBBBBBBBBBBBBBBBBB:testing',
HTTP_DATE='testdate'
)
self.assertIn('Result Unknown not understood', response.content)
@mock.patch(
'lms.djangoapps.verify_student.ssencrypt.has_valid_signature',
mock.Mock(side_effect=mocked_has_valid_signature)
)
def test_in_course_reverify_disabled(self):
"""
Test for verification passed.
"""
data = {
"EdX-ID": self.receipt_id,
"Result": "PASS",
"Reason": "",
"MessageType": "You have been verified."
}
json_data = json.dumps(data)
response = self.client.post(
reverse('verify_student_results_callback'), data=json_data,
content_type='application/json',
HTTP_AUTHORIZATION='test BBBBBBBBBBBBBBBBBBBB:testing',
HTTP_DATE='testdate'
)
attempt = SoftwareSecurePhotoVerification.objects.get(receipt_id=self.receipt_id)
self.assertEqual(attempt.status, u'approved')
self.assertEquals(response.content, 'OK!')
# Verify that photo submission confirmation email was sent
self.assertEqual(len(mail.outbox), 0)
user_status = VerificationStatus.objects.filter(user=self.user).count()
self.assertEqual(user_status, 0)
@mock.patch(
'lms.djangoapps.verify_student.ssencrypt.has_valid_signature',
mock.Mock(side_effect=mocked_has_valid_signature)
)
def test_pass_in_course_reverify_result(self):
"""
Test for verification passed.
"""
# Verify that ICRV status email was sent when config is enabled
IcrvStatusEmailsConfiguration.objects.create(enabled=True)
self.create_reverification_xblock()
data = {
"EdX-ID": self.receipt_id,
"Result": "PASS",
"Reason": "",
"MessageType": "You have been verified."
}
json_data = json.dumps(data)
response = self.client.post(
reverse('verify_student_results_callback'), data=json_data,
content_type='application/json',
HTTP_AUTHORIZATION='test BBBBBBBBBBBBBBBBBBBB:testing',
HTTP_DATE='testdate'
)
attempt = SoftwareSecurePhotoVerification.objects.get(receipt_id=self.receipt_id)
self.assertEqual(attempt.status, u'approved')
self.assertEquals(response.content, 'OK!')
self.assertEqual(len(mail.outbox), 1)
self.assertEqual("Re-verification Status", mail.outbox[0].subject)
@mock.patch('verify_student.ssencrypt.has_valid_signature', mock.Mock(side_effect=mocked_has_valid_signature))
def test_icrv_status_email_with_disable_config(self):
"""
Verify that photo re-verification status email was not sent when config is disable
"""
IcrvStatusEmailsConfiguration.objects.create(enabled=False)
self.create_reverification_xblock()
data = {
"EdX-ID": self.receipt_id,
"Result": "PASS",
"Reason": "",
"MessageType": "You have been verified."
}
json_data = json.dumps(data)
response = self.client.post(
reverse('verify_student_results_callback'), data=json_data,
content_type='application/json',
HTTP_AUTHORIZATION='test BBBBBBBBBBBBBBBBBBBB:testing',
HTTP_DATE='testdate'
)
attempt = SoftwareSecurePhotoVerification.objects.get(receipt_id=self.receipt_id)
self.assertEqual(attempt.status, u'approved')
self.assertEquals(response.content, 'OK!')
self.assertEqual(len(mail.outbox), 0)
@mock.patch('lms.djangoapps.verify_student.views._send_email')
@mock.patch(
'lms.djangoapps.verify_student.ssencrypt.has_valid_signature',
mock.Mock(side_effect=mocked_has_valid_signature)
)
def test_reverification_on_callback(self, mock_send_email):
"""
Test software secure callback flow for re-verification.
"""
IcrvStatusEmailsConfiguration.objects.create(enabled=True)
# Create the 'edx-reverification-block' in course tree
self.create_reverification_xblock()
# create dummy data for software secure photo verification result callback
data = {
"EdX-ID": self.receipt_id,
"Result": "PASS",
"Reason": "",
"MessageType": "You have been verified."
}
json_data = json.dumps(data)
response = self.client.post(
reverse('verify_student_results_callback'),
data=json_data,
content_type='application/json',
HTTP_AUTHORIZATION='test BBBBBBBBBBBBBBBBBBBB:testing',
HTTP_DATE='testdate'
)
self.assertEqual(response.content, 'OK!')
# now check that '_send_email' method is called on result callback
# with required parameters
subject = "Re-verification Status"
mock_send_email.assert_called_once_with(self.user.id, subject, ANY)
def create_reverification_xblock(self):
"""
Create the reverification XBlock.
"""
# Create the 'edx-reverification-block' in course tree
section = ItemFactory.create(parent=self.course, category='chapter', display_name='Test Section')
subsection = ItemFactory.create(parent=section, category='sequential', display_name='Test Subsection')
vertical = ItemFactory.create(parent=subsection, category='vertical', display_name='Test Unit')
reverification = ItemFactory.create(
parent=vertical,
category='edx-reverification-block',
display_name='Test Verification Block'
)
# Create checkpoint
checkpoint = VerificationCheckpoint(course_id=self.course_id, checkpoint_location=reverification.location)
checkpoint.save()
# Add a re-verification attempt
checkpoint.add_verification_attempt(self.attempt)
# Add a re-verification attempt status for the user
VerificationStatus.add_verification_status(checkpoint, self.user, "submitted")
@attr(shard=2)
class TestReverifyView(TestCase):
"""
Tests for the reverification view.
Reverification occurs when a verification attempt is denied or expired,
and the student is given the option to resubmit.
"""
USERNAME = "shaftoe"
PASSWORD = "detachment-2702"
def setUp(self):
super(TestReverifyView, self).setUp()
self.user = UserFactory.create(username=self.USERNAME, password=self.PASSWORD)
success = self.client.login(username=self.USERNAME, password=self.PASSWORD)
self.assertTrue(success, msg="Could not log in")
def test_reverify_view_can_do_initial_verification(self):
"""
Test that a User can use reverify link for initial verification.
"""
self._assert_can_reverify()
def test_reverify_view_can_reverify_denied(self):
# User has a denied attempt, so can reverify
attempt = SoftwareSecurePhotoVerification.objects.create(user=self.user)
attempt.mark_ready()
attempt.submit()
attempt.deny("error")
self._assert_can_reverify()
def test_reverify_view_can_reverify_expired(self):
# User has a verification attempt, but it's expired
attempt = SoftwareSecurePhotoVerification.objects.create(user=self.user)
attempt.mark_ready()
attempt.submit()
attempt.approve()
days_good_for = settings.VERIFY_STUDENT["DAYS_GOOD_FOR"]
attempt.created_at = datetime.now(pytz.UTC) - timedelta(days=(days_good_for + 1))
attempt.save()
# Allow the student to reverify
self._assert_can_reverify()
def test_reverify_view_can_reverify_pending(self):
""" Test that the user can still re-verify even if the previous photo
verification is in pending state.
A photo verification is considered in pending state when the user has
either submitted the photo verification (status in database: 'submitted')
or photo verification submission failed (status in database: 'must_retry').
"""
# User has submitted a verification attempt, but Software Secure has not yet responded
attempt = SoftwareSecurePhotoVerification.objects.create(user=self.user)
attempt.mark_ready()
attempt.submit()
# Can re-verify because an attempt has already been submitted.
self._assert_can_reverify()
def test_reverify_view_cannot_reverify_approved(self):
# Submitted attempt has been approved
attempt = SoftwareSecurePhotoVerification.objects.create(user=self.user)
attempt.mark_ready()
attempt.submit()
attempt.approve()
# Cannot reverify because the user is already verified.
self._assert_cannot_reverify()
def _get_reverify_page(self):
"""
Retrieve the reverification page and return the response.
"""
url = reverse("verify_student_reverify")
return self.client.get(url)
def _assert_can_reverify(self):
"""
Check that the reverification flow is rendered.
"""
response = self._get_reverify_page()
self.assertContains(response, "reverify-container")
def _assert_cannot_reverify(self):
"""
Check that the user is blocked from reverifying.
"""
response = self._get_reverify_page()
self.assertContains(response, "reverify-blocked")
@attr(shard=2)
class TestInCourseReverifyView(ModuleStoreTestCase):
"""
Tests for the incourse reverification views.
"""
IMAGE_DATA = "abcd,1234"
def build_course(self):
"""
Build up a course tree with a Reverificaiton xBlock.
"""
self.course_key = SlashSeparatedCourseKey("Robot", "999", "Test_Course")
self.course = CourseFactory.create(org='Robot', number='999', display_name='Test Course')
# Create the course modes
for mode in ('audit', 'honor', 'verified'):
min_price = 0 if mode in ["honor", "audit"] else 1
CourseModeFactory.create(mode_slug=mode, course_id=self.course_key, min_price=min_price)
# Create the 'edx-reverification-block' in course tree
section = ItemFactory.create(parent=self.course, category='chapter', display_name='Test Section')
subsection = ItemFactory.create(parent=section, category='sequential', display_name='Test Subsection')
vertical = ItemFactory.create(parent=subsection, category='vertical', display_name='Test Unit')
self.reverification = ItemFactory.create(
parent=vertical,
category='edx-reverification-block',
display_name='Test Verification Block'
)
self.section_location = section.location
self.subsection_location = subsection.location
self.vertical_location = vertical.location
self.reverification_location = unicode(self.reverification.location)
self.reverification_assessment = self.reverification.related_assessment
def setUp(self):
super(TestInCourseReverifyView, self).setUp()
self.build_course()
self.user = UserFactory.create(username="rusty", password="test")
self.client.login(username="rusty", password="test")
# Enroll the user in the default mode (honor) to emulate
CourseEnrollment.enroll(self.user, self.course_key, mode="verified")
# mocking and patching for bi events
analytics_patcher = patch('lms.djangoapps.verify_student.views.analytics')
self.mock_tracker = analytics_patcher.start()
self.addCleanup(analytics_patcher.stop)
@patch.dict(settings.FEATURES, {'AUTOMATIC_VERIFY_STUDENT_IDENTITY_FOR_TESTING': True})
def test_incourse_reverify_invalid_checkpoint_get(self):
# Retrieve a checkpoint that doesn't yet exist
response = self.client.get(self._get_url(self.course_key, "invalid_checkpoint"))
self.assertEqual(response.status_code, 404)
@patch.dict(settings.FEATURES, {'AUTOMATIC_VERIFY_STUDENT_IDENTITY_FOR_TESTING': True})
def test_incourse_reverify_initial_redirect_get(self):
self._create_checkpoint()
response = self.client.get(self._get_url(self.course_key, self.reverification_location))
url = reverse('verify_student_verify_now', kwargs={"course_id": unicode(self.course_key)})
url += u"?{params}".format(params=urllib.urlencode({"checkpoint": self.reverification_location}))
self.assertRedirects(response, url)
@override_settings(LMS_SEGMENT_KEY="foobar")
@patch.dict(settings.FEATURES, {'AUTOMATIC_VERIFY_STUDENT_IDENTITY_FOR_TESTING': True})
def test_incourse_reverify_get(self):
"""
Test incourse reverification.
"""
self._create_checkpoint()
self._create_initial_verification()
response = self.client.get(self._get_url(self.course_key, self.reverification_location))
self.assertEquals(response.status_code, 200)
# verify that Google Analytics event fires after successfully
# submitting the photo verification
self.mock_tracker.track.assert_called_once_with( # pylint: disable=no-member
self.user.id,
'edx.bi.reverify.started',
{
'category': "verification",
'label': unicode(self.course_key),
'checkpoint': self.reverification_assessment
},
context={
'ip': '127.0.0.1',
'Google Analytics':
{'clientId': None}
}
)
self.mock_tracker.reset_mock()
@patch.dict(settings.FEATURES, {'AUTOMATIC_VERIFY_STUDENT_IDENTITY_FOR_TESTING': True})
def test_checkpoint_post(self):
"""Verify that POST requests including an invalid checkpoint location
results in a 400 response.
"""
response = self._submit_photos(self.course_key, self.reverification_location, self.IMAGE_DATA)
self.assertEquals(response.status_code, 400)
@patch.dict(settings.FEATURES, {'AUTOMATIC_VERIFY_STUDENT_IDENTITY_FOR_TESTING': True})
def test_incourse_reverify_id_required_if_no_initial_verification(self):
self._create_checkpoint()
# Since the user has no initial verification and we're not sending the ID photo,
# we should expect a 400 bad request
response = self._submit_photos(self.course_key, self.reverification_location, self.IMAGE_DATA)
self.assertEqual(response.status_code, 400)
@patch.dict(settings.FEATURES, {'AUTOMATIC_VERIFY_STUDENT_IDENTITY_FOR_TESTING': True})
def test_incourse_reverify_index_error_post(self):
self._create_checkpoint()
self._create_initial_verification()
response = self._submit_photos(self.course_key, self.reverification_location, "")
self.assertEqual(response.status_code, 400)
@override_settings(LMS_SEGMENT_KEY="foobar")
@patch.dict(settings.FEATURES, {'AUTOMATIC_VERIFY_STUDENT_IDENTITY_FOR_TESTING': True})
def test_incourse_reverify_post(self):
self._create_checkpoint()
self._create_initial_verification()
response = self._submit_photos(self.course_key, self.reverification_location, self.IMAGE_DATA)
self.assertEqual(response.status_code, 200)
# Check that the checkpoint status has been updated
status = VerificationStatus.get_user_status_at_checkpoint(
self.user, self.course_key, self.reverification_location
)
self.assertEqual(status, "submitted")
# Test that Google Analytics event fires after successfully submitting
# photo verification
self.mock_tracker.track.assert_called_once_with( # pylint: disable=no-member
self.user.id,
'edx.bi.reverify.submitted',
{
'category': "verification",
'label': unicode(self.course_key),
'checkpoint': self.reverification_assessment
},
context={
'ip': '127.0.0.1',
'Google Analytics':
{'clientId': None}
}
)
self.mock_tracker.reset_mock()
def _create_checkpoint(self):
"""
Helper method for creating a reverification checkpoint.
"""
checkpoint = VerificationCheckpoint(course_id=self.course_key, checkpoint_location=self.reverification_location)
checkpoint.save()
def _create_initial_verification(self):
"""
Helper method for initial verification.
"""
attempt = SoftwareSecurePhotoVerification(user=self.user, photo_id_key="dummy_photo_id_key")
attempt.mark_ready()
attempt.save()
attempt.submit()
def _get_url(self, course_key, checkpoint_location):
"""
Construct the reverification url.
Arguments:
course_key (unicode): The ID of the course
checkpoint_location (str): Location of verification checkpoint
Returns:
url
"""
return reverse(
'verify_student_incourse_reverify',
kwargs={
"course_id": unicode(course_key),
"usage_id": checkpoint_location
}
)
def _submit_photos(self, course_key, checkpoint_location, face_image_data):
""" Submit photos for verification. """
url = reverse("verify_student_submit_photos")
data = {
"course_key": unicode(course_key),
"checkpoint": checkpoint_location,
"face_image": face_image_data,
}
return self.client.post(url, data)
@attr(shard=2)
class TestEmailMessageWithCustomICRVBlock(ModuleStoreTestCase):
"""
Test email sending on re-verification
"""
def build_course(self):
"""
Build up a course tree with a Reverificaiton xBlock.
"""
self.course_key = SlashSeparatedCourseKey("Robot", "999", "Test_Course")
self.course = CourseFactory.create(org='Robot', number='999', display_name='Test Course')
self.due_date = datetime.now(pytz.UTC) + timedelta(days=20)
self.allowed_attempts = 1
# Create the course modes
for mode in ('audit', 'honor', 'verified'):
min_price = 0 if mode in ["honor", "audit"] else 1
CourseModeFactory.create(mode_slug=mode, course_id=self.course_key, min_price=min_price)
# Create the 'edx-reverification-block' in course tree
section = ItemFactory.create(parent=self.course, category='chapter', display_name='Test Section')
subsection = ItemFactory.create(parent=section, category='sequential', display_name='Test Subsection')
vertical = ItemFactory.create(parent=subsection, category='vertical', display_name='Test Unit')
self.reverification = ItemFactory.create(
parent=vertical,
category='edx-reverification-block',
display_name='Test Verification Block',
metadata={'attempts': self.allowed_attempts, 'due': self.due_date}
)
self.section_location = section.location
self.subsection_location = subsection.location
self.vertical_location = vertical.location
self.reverification_location = unicode(self.reverification.location)
self.assessment = self.reverification.related_assessment
self.re_verification_link = reverse(
'verify_student_incourse_reverify',
args=(
unicode(self.course_key),
self.reverification_location
)
)
def setUp(self):
"""
Setup method for testing photo verification email messages.
"""
super(TestEmailMessageWithCustomICRVBlock, self).setUp()
self.build_course()
self.check_point = VerificationCheckpoint.objects.create(
course_id=self.course.id, checkpoint_location=self.reverification_location
)
self.check_point.add_verification_attempt(SoftwareSecurePhotoVerification.objects.create(user=self.user))
VerificationStatus.add_verification_status(
checkpoint=self.check_point,
user=self.user,
status='submitted'
)
self.attempt = SoftwareSecurePhotoVerification.objects.filter(user=self.user)
location_id = VerificationStatus.get_location_id(self.attempt)
usage_key = UsageKey.from_string(location_id)
redirect_url = get_redirect_url(self.course_key, usage_key.replace(course_key=self.course_key))
self.request = RequestFactory().get('/url')
self.course_link = self.request.build_absolute_uri(redirect_url)
def test_approved_email_message(self):
subject, body = _compose_message_reverification_email(
self.course.id, self.user.id, self.reverification_location, "approved", self.request
)
self.assertIn(
"We have successfully verified your identity for the {assessment} "
"assessment in the {course_name} course.".format(
assessment=self.assessment,
course_name=self.course.display_name_with_default_escaped
),
body
)
self.check_courseware_link_exists(body)
self.assertIn("Re-verification Status", subject)
def test_denied_email_message_with_valid_due_date_and_attempts_allowed(self):
subject, body = _compose_message_reverification_email(
self.course.id, self.user.id, self.reverification_location, "denied", self.request
)
self.assertIn(
"We could not verify your identity for the {assessment} assessment "
"in the {course_name} course. You have used "
"{used_attempts} out of {allowed_attempts} attempts to "
"verify your identity".format(
course_name=self.course.display_name_with_default_escaped,
assessment=self.assessment,
used_attempts=1,
allowed_attempts=self.allowed_attempts + 1
),
body
)
self.assertIn(
"You must verify your identity before the assessment "
"closes on {due_date}".format(
due_date=get_default_time_display(self.due_date)
),
body
)
reverify_link = self.request.build_absolute_uri(self.re_verification_link)
self.assertIn(
"To try to verify your identity again, select the following link:",
body
)
self.assertIn(reverify_link, body)
self.assertIn("Re-verification Status", subject)
def test_denied_email_message_with_due_date_and_no_attempts(self):
""" Denied email message if due date is still open but user has no
attempts available.
"""
VerificationStatus.add_verification_status(
checkpoint=self.check_point,
user=self.user,
status='submitted'
)
__, body = _compose_message_reverification_email(
self.course.id, self.user.id, self.reverification_location, "denied", self.request
)
self.assertIn(
"We could not verify your identity for the {assessment} assessment "
"in the {course_name} course. You have used "
"{used_attempts} out of {allowed_attempts} attempts to "
"verify your identity, and verification is no longer "
"possible".format(
course_name=self.course.display_name_with_default_escaped,
assessment=self.assessment,
used_attempts=2,
allowed_attempts=self.allowed_attempts + 1
),
body
)
self.check_courseware_link_exists(body)
def test_denied_email_message_with_close_verification_dates(self):
# Due date given and expired
return_value = datetime.now(tz=pytz.UTC) + timedelta(days=22)
with patch.object(timezone, 'now', return_value=return_value):
__, body = _compose_message_reverification_email(
self.course.id, self.user.id, self.reverification_location, "denied", self.request
)
self.assertIn(
"We could not verify your identity for the {assessment} assessment "
"in the {course_name} course. You have used "
"{used_attempts} out of {allowed_attempts} attempts to "
"verify your identity, and verification is no longer "
"possible".format(
course_name=self.course.display_name_with_default_escaped,
assessment=self.assessment,
used_attempts=1,
allowed_attempts=self.allowed_attempts + 1
),
body
)
def test_check_num_queries(self):
# Get the re-verification block to check the call made
with check_mongo_calls(1):
ver_block = modulestore().get_item(self.reverification.location)
# Expect that the verification block is fetched
self.assertIsNotNone(ver_block)
def check_courseware_link_exists(self, body):
"""Checking courseware url and signature information of EDX"""
self.assertIn(
"To go to the courseware, select the following link:",
body
)
self.assertIn(
"{course_link}".format(
course_link=self.course_link
),
body
)
self.assertIn("Thanks,", body)
self.assertIn(
"The {platform_name} team".format(
platform_name=settings.PLATFORM_NAME
),
body
)
@attr(shard=2)
class TestEmailMessageWithDefaultICRVBlock(ModuleStoreTestCase):
"""
Test for In-course Re-verification
"""
def build_course(self):
"""
Build up a course tree with a Reverificaiton xBlock.
"""
self.course_key = SlashSeparatedCourseKey("Robot", "999", "Test_Course")
self.course = CourseFactory.create(org='Robot', number='999', display_name='Test Course')
# Create the course modes
for mode in ('audit', 'honor', 'verified'):
min_price = 0 if mode in ["honor", "audit"] else 1
CourseModeFactory.create(mode_slug=mode, course_id=self.course_key, min_price=min_price)
# Create the 'edx-reverification-block' in course tree
section = ItemFactory.create(parent=self.course, category='chapter', display_name='Test Section')
subsection = ItemFactory.create(parent=section, category='sequential', display_name='Test Subsection')
vertical = ItemFactory.create(parent=subsection, category='vertical', display_name='Test Unit')
self.reverification = ItemFactory.create(
parent=vertical,
category='edx-reverification-block',
display_name='Test Verification Block'
)
self.section_location = section.location
self.subsection_location = subsection.location
self.vertical_location = vertical.location
self.reverification_location = unicode(self.reverification.location)
self.assessment = self.reverification.related_assessment
self.re_verification_link = reverse(
'verify_student_incourse_reverify',
args=(
unicode(self.course_key),
self.reverification_location
)
)
def setUp(self):
super(TestEmailMessageWithDefaultICRVBlock, self).setUp()
self.build_course()
self.check_point = VerificationCheckpoint.objects.create(
course_id=self.course.id, checkpoint_location=self.reverification_location
)
self.check_point.add_verification_attempt(SoftwareSecurePhotoVerification.objects.create(user=self.user))
self.attempt = SoftwareSecurePhotoVerification.objects.filter(user=self.user)
self.request = RequestFactory().get('/url')
def test_denied_email_message_with_no_attempt_allowed(self):
VerificationStatus.add_verification_status(
checkpoint=self.check_point,
user=self.user,
status='submitted'
)
__, body = _compose_message_reverification_email(
self.course.id, self.user.id, self.reverification_location, "denied", self.request
)
self.assertIn(
"We could not verify your identity for the {assessment} assessment "
"in the {course_name} course. You have used "
"{used_attempts} out of {allowed_attempts} attempts to "
"verify your identity, and verification is no longer "
"possible".format(
course_name=self.course.display_name_with_default_escaped,
assessment=self.assessment,
used_attempts=1,
allowed_attempts=1
),
body
)
def test_error_on_compose_email(self):
resp = _compose_message_reverification_email(
self.course.id, self.user.id, self.reverification_location, "denied", True
)
self.assertIsNone(resp)
| agpl-3.0 |
MIPS/external-chromium_org-tools-gyp | test/copies/gyptest-slash.py | 249 | 1433 | #!/usr/bin/env python
# Copyright (c) 2011 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies file copies with a trailing slash in the destination directory.
"""
import TestGyp
test = TestGyp.TestGyp()
test.run_gyp('copies-slash.gyp', chdir='src')
test.relocate('src', 'relocate/src')
test.build('copies-slash.gyp', chdir='relocate/src')
test.built_file_must_match('copies-out-slash/directory/file3',
'file3 contents\n',
chdir='relocate/src')
test.built_file_must_match('copies-out-slash/directory/file4',
'file4 contents\n',
chdir='relocate/src')
test.built_file_must_match('copies-out-slash/directory/subdir/file5',
'file5 contents\n',
chdir='relocate/src')
test.built_file_must_match('copies-out-slash-2/directory/file3',
'file3 contents\n',
chdir='relocate/src')
test.built_file_must_match('copies-out-slash-2/directory/file4',
'file4 contents\n',
chdir='relocate/src')
test.built_file_must_match('copies-out-slash-2/directory/subdir/file5',
'file5 contents\n',
chdir='relocate/src')
test.pass_test()
| bsd-3-clause |
dolphin-emu/dolphin | docs/DSP/prefix_replace.py | 164 | 1049 | # this can be used to upgrade disassemblies that aren't too annotated.
# won't do very well on the current zelda disasm.
import os
import sys
def GetPrefixLine(l, a):
for s in a:
if s[0:len(l)] == l:
return s
return ""
def GetComment(l):
comment_start = l.find("//")
if comment_start < 0:
comment_start = l.find("->")
if comment_start < 0:
return ""
while (l[comment_start-1] == ' ') or (l[comment_start-1] == '\t'):
comment_start -= 1
return l[comment_start:]
def main():
old_lines = open("DSP_UC_Zelda.txt", "r").readlines()
# for l in old_lines:
# print l
new_lines = open("zeldanew.txt", "r").readlines()
for i in range(0, len(old_lines)):
prefix = old_lines[i][0:14]
comment = GetComment(old_lines[i])
new_line = GetPrefixLine(prefix, new_lines)
if new_line:
old_lines[i] = new_line[:-1] + comment[:-1] + "\n"
for i in range(0, len(old_lines)):
print old_lines[i],
new_file = open("output.txt", "w")
new_file.writelines(old_lines)
main() | gpl-2.0 |
nash-x/hws | neutron/db/migration/alembic_migrations/versions/16c8803e1cf_trunk_port.py | 1 | 1549 | # Copyright 2014 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
"""trunk_port
Revision ID: 16c8803e1cf
Revises: 544673ac99ab
Create Date: 2014-09-01 18:06:15.722787
"""
# revision identifiers, used by Alembic.
revision = '16c8803e1cf'
down_revision = '42f49dd148cd'
from alembic import op
import sqlalchemy as sa
def upgrade(active_plugins=None, options=None):
op.create_table(
'trunkports',
sa.Column('port_id', sa.String(length=36), nullable=False),
sa.Column('trunk_type', sa.String(length=16), nullable=True),
sa.Column('parent_id', sa.String(length=36), nullable=True),
sa.Column('vid', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['port_id'], ['ports.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('port_id'))
if op.get_bind().dialect.name == 'mysql':
op.execute("ALTER TABLE %s ENGINE=InnoDB" % 'trunkports')
def downgrade(active_plugins=None, options=None):
op.drop_table('trunkports')
| apache-2.0 |
shingonoide/odoo | addons/procurement_jit_stock/procurement_jit_stock.py | 64 | 2047 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2013 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import osv
class procurement_order(osv.osv):
_inherit = "procurement.order"
def run(self, cr, uid, ids, autocommit=False, context=None):
context = dict(context or {}, procurement_autorun_defer=True)
res = super(procurement_order, self).run(cr, uid, ids, autocommit=autocommit, context=context)
procurement_ids = self.search(cr, uid, [('move_dest_id.procurement_id', 'in', ids), ('state', 'not in', ['exception', 'cancel'])], order='id', context=context)
if procurement_ids:
return self.run(cr, uid, procurement_ids, autocommit=autocommit, context=context)
return res
class stock_move(osv.osv):
_inherit = "stock.move"
def _create_procurements(self, cr, uid, moves, context=None):
res = super(stock_move, self)._create_procurements(cr, uid, moves, context=dict(context or {}, procurement_autorun_defer=True))
self.pool['procurement.order'].run(cr, uid, res, context=context)
return res
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
jmcanterafonseca/fiware-orion | test/acceptance/behave/components/common_steps/entities/delete_steps.py | 5 | 3670 | # -*- coding: utf-8 -*-
"""
Copyright 2015 Telefonica Investigacion y Desarrollo, S.A.U
This file is part of Orion Context Broker.
Orion Context Broker is free software: you can redistribute it and/or
modify it under the terms of the GNU Affero General Public License as
published by the Free Software Foundation, either version 3 of the
License, or (at your option) any later version.
Orion Context Broker is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero
General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with Orion Context Broker. If not, see http://www.gnu.org/licenses/.
For those usages not covered by this license please contact with
iot_support at tid dot es
"""
__author__ = 'Iván Arias León (ivan dot ariasleon at telefonica dot com)'
import behave
from behave import step
from iotqatools.helpers_utils import *
from iotqatools.mongo_utils import Mongo
from tools.NGSI_v2 import NGSI
from tools.properties_config import Properties
# constants
CONTEXT_BROKER_ENV = u'context_broker_env'
MONGO_ENV = u'mongo_env'
properties_class = Properties()
behave.use_step_matcher("re")
__logger__ = logging.getLogger("steps")
# ------------------------- delete steps ----------------------------
@step(u'delete an entity with id "([^"]*)"')
def delete_an_entity_by_id(context, entity_id):
"""
delete an entity by id
:param context: It’s a clever place where you and behave can store information to share around. It runs at three levels, automatically managed by behave.
:param entity_id: entity id name
"""
__logger__.debug("Deleting entity: %s..." % entity_id)
context.resp = context.cb.delete_entities_by_id(context, entity_id)
__logger__.info("...\"%s\" entity has been deleted" % entity_id)
@step(u'delete an attribute "([^"]*)" in the entity with id "([^"]*)"')
def delete_an_attribute_in_entity_with_id(context, attribute_name, entity_id):
"""
delete an attribute in an entity
:param context: It’s a clever place where you and behave can store information to share around. It runs at three levels, automatically managed by behave.
:param entity_id: entity id name
:param attribute_name: attribute name to delete
"""
__logger__.debug("Deleting an attribute in an entity defined by ID...")
# if delete a single attribute in several entities a response list is returned, else only one response is returned.
context.resp = context.cb.delete_entities_by_id(context, entity_id, attribute_name)
__logger__.info("... an attribute is deleted in an entity defined by ID")
# ------------------------- verification steps ----------------------------
@step(u'verify that the attribute is deleted into mongo in the defined entity')
@step(u'verify that the attribute is deleted into mongo')
def verify_that_the_attribute_is_deleted_into_mongo(context):
"""
verify that the attribute is deleted into mongo
"""
props_mongo = properties_class.read_properties()[MONGO_ENV] # mongo properties dict
__logger__.debug("Verifying if the atribute is deleted...")
mongo = Mongo(host=props_mongo["MONGO_HOST"], port=props_mongo["MONGO_PORT"], user=props_mongo["MONGO_USER"],
password=props_mongo["MONGO_PASS"])
ngsi = NGSI()
ngsi.verify_attribute_is_deleted(mongo, context.cb.get_entity_context(), context.cb.get_headers(), context.cb.get_entities_parameters())
__logger__.info("...verified that the attribute is deleted")
| agpl-3.0 |
jaimahajan1997/sympy | sympy/series/tests/test_limitseq.py | 62 | 3139 | from sympy import symbols, oo, Sum, harmonic, Add, S, binomial, factorial
from sympy.series.limitseq import limit_seq
from sympy.series.limitseq import difference_delta as dd
from sympy.utilities.pytest import raises, XFAIL
n, m, k = symbols('n m k', integer=True)
def test_difference_delta():
e = n*(n + 1)
e2 = e * k
assert dd(e) == 2*n + 2
assert dd(e2, n, 2) == k*(4*n + 6)
raises(ValueError, lambda: dd(e2))
raises(ValueError, lambda: dd(e2, n, oo))
def test_difference_delta__Sum():
e = Sum(1/k, (k, 1, n))
assert dd(e, n) == 1/(n + 1)
assert dd(e, n, 5) == Add(*[1/(i + n + 1) for i in range(5)])
e = Sum(1/k, (k, 1, 3*n))
assert dd(e, n) == Add(*[1/(i + 3*n + 1) for i in range(3)])
e = n * Sum(1/k, (k, 1, n))
assert dd(e, n) == 1 + Sum(1/k, (k, 1, n))
e = Sum(1/k, (k, 1, n), (m, 1, n))
assert dd(e, n) == harmonic(n)
def test_difference_delta__Add():
e = n + n*(n + 1)
assert dd(e, n) == 2*n + 3
assert dd(e, n, 2) == 4*n + 8
e = n + Sum(1/k, (k, 1, n))
assert dd(e, n) == 1 + 1/(n + 1)
assert dd(e, n, 5) == 5 + Add(*[1/(i + n + 1) for i in range(5)])
def test_difference_delta__Pow():
e = 4**n
assert dd(e, n) == 3*4**n
assert dd(e, n, 2) == 15*4**n
e = 4**(2*n)
assert dd(e, n) == 15*4**(2*n)
assert dd(e, n, 2) == 255*4**(2*n)
e = n**4
assert dd(e, n) == (n + 1)**4 - n**4
e = n**n
assert dd(e, n) == (n + 1)**(n + 1) - n**n
def test_limit_seq():
e = binomial(2*n, n) / Sum(binomial(2*k, k), (k, 1, n))
assert limit_seq(e) == S(3) / 4
assert limit_seq(e, m) == e
e = (5*n**3 + 3*n**2 + 4) / (3*n**3 + 4*n - 5)
assert limit_seq(e, n) == S(5) / 3
e = (harmonic(n) * Sum(harmonic(k), (k, 1, n))) / (n * harmonic(2*n)**2)
assert limit_seq(e, n) == 1
e = Sum(k**2 * Sum(2**m/m, (m, 1, k)), (k, 1, n)) / (2**n*n)
assert limit_seq(e, n) == 4
e = (Sum(binomial(3*k, k) * binomial(5*k, k), (k, 1, n)) /
(binomial(3*n, n) * binomial(5*n, n)))
assert limit_seq(e, n) == S(84375) / 83351
e = Sum(harmonic(k)**2/k, (k, 1, 2*n)) / harmonic(n)**3
assert limit_seq(e, n) == S(1) / 3
raises(ValueError, lambda: limit_seq(e * m))
@XFAIL
def test_limit_seq_fail():
# improve Summation algorithm or add ad-hoc criteria
e = (harmonic(n)**3 * Sum(1/harmonic(k), (k, 1, n)) /
(n * Sum(harmonic(k)/k, (k, 1, n))))
assert limit_seq(e, n) == 2
# No unique dominant term
e = (Sum(2**k * binomial(2*k, k) / k**2, (k, 1, n)) /
(Sum(2**k/k*2, (k, 1, n)) * Sum(binomial(2*k, k), (k, 1, n))))
assert limit_seq(e, n) == S(3) / 7
# Simplifications of summations needs to be improved.
e = n**3*Sum(2**k/k**2, (k, 1, n))**2 / (2**n * Sum(2**k/k, (k, 1, n)))
assert limit_seq(e, n) == 2
e = (harmonic(n) * Sum(2**k/k, (k, 1, n)) /
(n * Sum(2**k*harmonic(k)/k**2, (k, 1, n))))
assert limit_seq(e, n) == 1
e = (Sum(2**k*factorial(k) / k**2, (k, 1, 2*n)) /
(Sum(4**k/k**2, (k, 1, n)) * Sum(factorial(k), (k, 1, 2*n))))
assert limit_seq(e, n) == S(3) / 16
| bsd-3-clause |
b-jesch/service.fritzbox.callmonitor | resources/lib/PhoneBooks/pyicloud/vendorlibs/requests/packages/urllib3/exceptions.py | 515 | 5599 | from __future__ import absolute_import
# Base Exceptions
class HTTPError(Exception):
"Base exception used by this module."
pass
class HTTPWarning(Warning):
"Base warning used by this module."
pass
class PoolError(HTTPError):
"Base exception for errors caused within a pool."
def __init__(self, pool, message):
self.pool = pool
HTTPError.__init__(self, "%s: %s" % (pool, message))
def __reduce__(self):
# For pickling purposes.
return self.__class__, (None, None)
class RequestError(PoolError):
"Base exception for PoolErrors that have associated URLs."
def __init__(self, pool, url, message):
self.url = url
PoolError.__init__(self, pool, message)
def __reduce__(self):
# For pickling purposes.
return self.__class__, (None, self.url, None)
class SSLError(HTTPError):
"Raised when SSL certificate fails in an HTTPS connection."
pass
class ProxyError(HTTPError):
"Raised when the connection to a proxy fails."
pass
class DecodeError(HTTPError):
"Raised when automatic decoding based on Content-Type fails."
pass
class ProtocolError(HTTPError):
"Raised when something unexpected happens mid-request/response."
pass
#: Renamed to ProtocolError but aliased for backwards compatibility.
ConnectionError = ProtocolError
# Leaf Exceptions
class MaxRetryError(RequestError):
"""Raised when the maximum number of retries is exceeded.
:param pool: The connection pool
:type pool: :class:`~urllib3.connectionpool.HTTPConnectionPool`
:param string url: The requested Url
:param exceptions.Exception reason: The underlying error
"""
def __init__(self, pool, url, reason=None):
self.reason = reason
message = "Max retries exceeded with url: %s (Caused by %r)" % (
url, reason)
RequestError.__init__(self, pool, url, message)
class HostChangedError(RequestError):
"Raised when an existing pool gets a request for a foreign host."
def __init__(self, pool, url, retries=3):
message = "Tried to open a foreign host with url: %s" % url
RequestError.__init__(self, pool, url, message)
self.retries = retries
class TimeoutStateError(HTTPError):
""" Raised when passing an invalid state to a timeout """
pass
class TimeoutError(HTTPError):
""" Raised when a socket timeout error occurs.
Catching this error will catch both :exc:`ReadTimeoutErrors
<ReadTimeoutError>` and :exc:`ConnectTimeoutErrors <ConnectTimeoutError>`.
"""
pass
class ReadTimeoutError(TimeoutError, RequestError):
"Raised when a socket timeout occurs while receiving data from a server"
pass
# This timeout error does not have a URL attached and needs to inherit from the
# base HTTPError
class ConnectTimeoutError(TimeoutError):
"Raised when a socket timeout occurs while connecting to a server"
pass
class NewConnectionError(ConnectTimeoutError, PoolError):
"Raised when we fail to establish a new connection. Usually ECONNREFUSED."
pass
class EmptyPoolError(PoolError):
"Raised when a pool runs out of connections and no more are allowed."
pass
class ClosedPoolError(PoolError):
"Raised when a request enters a pool after the pool has been closed."
pass
class LocationValueError(ValueError, HTTPError):
"Raised when there is something wrong with a given URL input."
pass
class LocationParseError(LocationValueError):
"Raised when get_host or similar fails to parse the URL input."
def __init__(self, location):
message = "Failed to parse: %s" % location
HTTPError.__init__(self, message)
self.location = location
class ResponseError(HTTPError):
"Used as a container for an error reason supplied in a MaxRetryError."
GENERIC_ERROR = 'too many error responses'
SPECIFIC_ERROR = 'too many {status_code} error responses'
class SecurityWarning(HTTPWarning):
"Warned when perfoming security reducing actions"
pass
class SubjectAltNameWarning(SecurityWarning):
"Warned when connecting to a host with a certificate missing a SAN."
pass
class InsecureRequestWarning(SecurityWarning):
"Warned when making an unverified HTTPS request."
pass
class SystemTimeWarning(SecurityWarning):
"Warned when system time is suspected to be wrong"
pass
class InsecurePlatformWarning(SecurityWarning):
"Warned when certain SSL configuration is not available on a platform."
pass
class SNIMissingWarning(HTTPWarning):
"Warned when making a HTTPS request without SNI available."
pass
class DependencyWarning(HTTPWarning):
"""
Warned when an attempt is made to import a module with missing optional
dependencies.
"""
pass
class ResponseNotChunked(ProtocolError, ValueError):
"Response needs to be chunked in order to read it as chunks."
pass
class ProxySchemeUnknown(AssertionError, ValueError):
"ProxyManager does not support the supplied scheme"
# TODO(t-8ch): Stop inheriting from AssertionError in v2.0.
def __init__(self, scheme):
message = "Not supported proxy scheme %s" % scheme
super(ProxySchemeUnknown, self).__init__(message)
class HeaderParsingError(HTTPError):
"Raised by assert_header_parsing, but we convert it to a log.warning statement."
def __init__(self, defects, unparsed_data):
message = '%s, unparsed data: %r' % (defects or 'Unknown', unparsed_data)
super(HeaderParsingError, self).__init__(message)
| gpl-2.0 |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.