code
stringlengths 2
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int32 2
1.05M
|
---|---|---|---|---|---|
from client import DjangoClient
from django.db import connection
import time
class BatianAPMMiddleware(object):
def __init__(self, **kwargs):
self.client = DjangoClient()
def process_request(self, request):
request.start_time = time.time()
def process_view(self, request, view_func, view_args, view_kwargs):
request._batian_view_func = view_func
def process_response(self, request, response):
self.client.harvest((request, response, connection.queries))
return response
def process_exception(self, request, exception):
self.client.harvest((request, exception), category="exception")
| ishuah/python-batian | batian/contrib/django/middleware.py | Python | gpl-2.0 | 658 |
# This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
from twisted.internet import defer
from buildbot.mq import base
from buildbot.test.util import validation
from buildbot.util import deferwaiter
from buildbot.util import service
from buildbot.util import tuplematch
class FakeMQConnector(service.AsyncMultiService, base.MQBase):
# a fake connector that doesn't actually bridge messages from production to
# consumption, and thus doesn't do any topic handling or persistence
# note that this *does* verify all messages sent and received, unless this
# is set to false:
verifyMessages = True
def __init__(self, testcase):
super().__init__()
self.testcase = testcase
self.setup_called = False
self.productions = []
self.qrefs = []
self._deferwaiter = deferwaiter.DeferWaiter()
@defer.inlineCallbacks
def stopService(self):
yield self._deferwaiter.wait()
yield super().stopService()
def setup(self):
self.setup_called = True
return defer.succeed(None)
def produce(self, routingKey, data):
self.testcase.assertIsInstance(routingKey, tuple)
# XXX this is incompatible with the new scheme of sending multiple messages,
# since the message type is no longer encoded by the first element of the
# routing key
# if self.verifyMessages:
# validation.verifyMessage(self.testcase, routingKey, data)
if any(not isinstance(k, str) for k in routingKey):
raise AssertionError(f"{routingKey} is not all str")
self.productions.append((routingKey, data))
# note - no consumers are called: IT'S A FAKE
def callConsumer(self, routingKey, msg):
if self.verifyMessages:
validation.verifyMessage(self.testcase, routingKey, msg)
matched = False
for q in self.qrefs:
if tuplematch.matchTuple(routingKey, q.filter):
matched = True
self._deferwaiter.add(q.callback(routingKey, msg))
if not matched:
raise AssertionError("no consumer found")
def startConsuming(self, callback, filter, persistent_name=None):
if any(not isinstance(k, str) and
k is not None for k in filter):
raise AssertionError(f"{filter} is not a filter")
qref = FakeQueueRef()
qref.qrefs = self.qrefs
qref.callback = callback
qref.filter = filter
qref.persistent_name = persistent_name
self.qrefs.append(qref)
return defer.succeed(qref)
def clearProductions(self):
"Clear out the cached productions"
self.productions = []
def assertProductions(self, exp, orderMatters=True):
"""Assert that the given messages have been produced, then flush the
list of produced messages.
If C{orderMatters} is false, then the messages are sorted first; use
this in cases where the messages must all be produced, but the order is
not specified.
"""
if orderMatters:
self.testcase.assertEqual(self.productions, exp)
else:
self.testcase.assertEqual(sorted(self.productions), sorted(exp))
self.productions = []
class FakeQueueRef:
def stopConsuming(self):
if self in self.qrefs:
self.qrefs.remove(self)
| pmisik/buildbot | master/buildbot/test/fake/fakemq.py | Python | gpl-2.0 | 4,021 |
# vim: ft=python fileencoding=utf-8 sts=4 sw=4 et:
# Copyright 2014-2018 Florian Bruhin (The Compiler) <[email protected]>
#
# This file is part of qutebrowser.
#
# qutebrowser is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# qutebrowser is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with qutebrowser. If not, see <http://www.gnu.org/licenses/>.
"""Launcher for an external editor."""
import os
import tempfile
from PyQt5.QtCore import (pyqtSignal, pyqtSlot, QObject, QProcess,
QFileSystemWatcher)
from qutebrowser.config import config
from qutebrowser.utils import message, log
from qutebrowser.misc import guiprocess
class ExternalEditor(QObject):
"""Class to simplify editing a text in an external editor.
Attributes:
_text: The current text before the editor is opened.
_filename: The name of the file to be edited.
_remove_file: Whether the file should be removed when the editor is
closed.
_proc: The GUIProcess of the editor.
_watcher: A QFileSystemWatcher to watch the edited file for changes.
Only set if watch=True.
_content: The last-saved text of the editor.
Signals:
file_updated: The text in the edited file was updated.
arg: The new text.
editing_finished: The editor process was closed.
"""
file_updated = pyqtSignal(str)
editing_finished = pyqtSignal()
def __init__(self, parent=None, watch=False):
super().__init__(parent)
self._filename = None
self._proc = None
self._remove_file = None
self._watcher = QFileSystemWatcher(parent=self) if watch else None
self._content = None
def _cleanup(self):
"""Clean up temporary files after the editor closed."""
assert self._remove_file is not None
watched_files = self._watcher.files() if self._watcher else []
if watched_files:
failed = self._watcher.removePaths(watched_files)
if failed:
log.procs.error("Failed to unwatch paths: {}".format(failed))
if self._filename is None or not self._remove_file:
# Could not create initial file.
return
try:
if self._proc.exit_status() != QProcess.CrashExit:
os.remove(self._filename)
except OSError as e:
# NOTE: Do not replace this with "raise CommandError" as it's
# executed async.
message.error("Failed to delete tempfile... ({})".format(e))
@pyqtSlot(int, QProcess.ExitStatus)
def on_proc_closed(self, _exitcode, exitstatus):
"""Write the editor text into the form field and clean up tempfile.
Callback for QProcess when the editor was closed.
"""
log.procs.debug("Editor closed")
if exitstatus != QProcess.NormalExit:
# No error/cleanup here, since we already handle this in
# on_proc_error.
return
# do a final read to make sure we don't miss the last signal
self._on_file_changed(self._filename)
self.editing_finished.emit()
self._cleanup()
@pyqtSlot(QProcess.ProcessError)
def on_proc_error(self, _err):
self._cleanup()
def edit(self, text, caret_position=None):
"""Edit a given text.
Args:
text: The initial text to edit.
caret_position: The position of the caret in the text.
"""
if self._filename is not None:
raise ValueError("Already editing a file!")
try:
self._filename = self._create_tempfile(text, 'qutebrowser-editor-')
except OSError as e:
message.error("Failed to create initial file: {}".format(e))
return
self._remove_file = True
line, column = self._calc_line_and_column(text, caret_position)
self._start_editor(line=line, column=column)
def backup(self):
"""Create a backup if the content has changed from the original."""
if not self._content:
return
try:
fname = self._create_tempfile(self._content,
'qutebrowser-editor-backup-')
message.info('Editor backup at {}'.format(fname))
except OSError as e:
message.error('Failed to create editor backup: {}'.format(e))
def _create_tempfile(self, text, prefix):
# Close while the external process is running, as otherwise systems
# with exclusive write access (e.g. Windows) may fail to update
# the file from the external editor, see
# https://github.com/qutebrowser/qutebrowser/issues/1767
with tempfile.NamedTemporaryFile(
# pylint: disable=bad-continuation
mode='w', prefix=prefix,
encoding=config.val.editor.encoding,
delete=False) as fobj:
# pylint: enable=bad-continuation
if text:
fobj.write(text)
return fobj.name
@pyqtSlot(str)
def _on_file_changed(self, path):
try:
with open(path, 'r', encoding=config.val.editor.encoding) as f:
text = f.read()
except OSError as e:
# NOTE: Do not replace this with "raise CommandError" as it's
# executed async.
message.error("Failed to read back edited file: {}".format(e))
return
log.procs.debug("Read back: {}".format(text))
if self._content != text:
self._content = text
self.file_updated.emit(text)
def edit_file(self, filename):
"""Edit the file with the given filename."""
self._filename = filename
self._remove_file = False
self._start_editor()
def _start_editor(self, line=1, column=1):
"""Start the editor with the file opened as self._filename.
Args:
line: the line number to pass to the editor
column: the column number to pass to the editor
"""
self._proc = guiprocess.GUIProcess(what='editor', parent=self)
self._proc.finished.connect(self.on_proc_closed)
self._proc.error.connect(self.on_proc_error)
editor = config.val.editor.command
executable = editor[0]
if self._watcher:
ok = self._watcher.addPath(self._filename)
if not ok:
log.procs.error("Failed to watch path: {}"
.format(self._filename))
self._watcher.fileChanged.connect(self._on_file_changed)
args = [self._sub_placeholder(arg, line, column) for arg in editor[1:]]
log.procs.debug("Calling \"{}\" with args {}".format(executable, args))
self._proc.start(executable, args)
def _calc_line_and_column(self, text, caret_position):
r"""Calculate line and column numbers given a text and caret position.
Both line and column are 1-based indexes, because that's what most
editors use as line and column starting index. By "most" we mean at
least vim, nvim, gvim, emacs, atom, sublimetext, notepad++, brackets,
visual studio, QtCreator and so on.
To find the line we just count how many newlines there are before the
caret and add 1.
To find the column we calculate the difference between the caret and
the last newline before the caret.
For example in the text `aaa\nbb|bbb` (| represents the caret):
caret_position = 6
text[:caret_position] = `aaa\nbb`
text[:caret_position].count('\n') = 1
caret_position - text[:caret_position].rfind('\n') = 3
Thus line, column = 2, 3, and the caret is indeed in the second
line, third column
Args:
text: the text for which the numbers must be calculated
caret_position: the position of the caret in the text, or None
Return:
A (line, column) tuple of (int, int)
"""
if caret_position is None:
return 1, 1
line = text[:caret_position].count('\n') + 1
column = caret_position - text[:caret_position].rfind('\n')
return line, column
def _sub_placeholder(self, arg, line, column):
"""Substitute a single placeholder.
If the `arg` input to this function is a valid placeholder it will
be substituted with the appropriate value, otherwise it will be left
unchanged.
Args:
arg: an argument of editor.command.
line: the previously-calculated line number for the text caret.
column: the previously-calculated column number for the text caret.
Return:
The substituted placeholder or the original argument.
"""
replacements = {
'{}': self._filename,
'{file}': self._filename,
'{line}': str(line),
'{line0}': str(line-1),
'{column}': str(column),
'{column0}': str(column-1)
}
for old, new in replacements.items():
arg = arg.replace(old, new)
return arg
| toofar/qutebrowser | qutebrowser/misc/editor.py | Python | gpl-3.0 | 9,693 |
import os
import socket
if hasattr(socket, "AF_UNIX"):
from ._unix import FakeSnapd # noqa: F401
try:
from ._unittests import ( # noqa: F401
FakeElf,
FakeExtension,
FakeMetadataExtractor,
FakeMultipass,
FakePlugin,
FakeProjectOptions,
FakeSnapCommand,
FakeSnapcraftctl,
)
except ImportError as import_error:
if os.path.exists(os.path.join(os.path.dirname(__file__), "..", "snapcraft")):
raise import_error
from ._fixtures import ( # noqa: F401
BzrRepo,
CleanEnvironment,
FakeBaseEnvironment,
FakeParts,
FakePartsServerRunning,
FakePartsWiki,
FakePartsWikiOrigin,
FakePartsWikiOriginRunning,
FakePartsWikiRunning,
FakePartsWikiWithSlashes,
FakePartsWikiWithSlashesRunning,
FakeServerRunning,
FakeSnapcraftIsASnap,
FakeSSOServerRunning,
FakeStore,
FakeStoreAPIServerRunning,
FakeStoreSearchServerRunning,
FakeStoreUploadServerRunning,
FakeTerminal,
GitRepo,
HgRepo,
SharedCache,
SnapcraftYaml,
StagingStore,
SvnRepo,
TempCWD,
TempXDG,
TestStore,
WithoutSnapInstalled,
)
| chipaca/snapcraft | tests/fixture_setup/__init__.py | Python | gpl-3.0 | 1,177 |
#from gi.repository import Gtk
import cairo, os
from softwarecenter.enums import ViewPages
from softwarecenter.paths import datadir
from mkit import floats_from_string
class SectionPainter(object):
# specify background overlay image and color mappings for available and installed view ids
BACKGROUND_IMAGES = {ViewPages.AVAILABLE : cairo.ImageSurface.create_from_png(
os.path.join(datadir, 'images/clouds.png')),
ViewPages.INSTALLED : cairo.ImageSurface.create_from_png(
os.path.join(datadir, 'images/arrows.png')),
}
BACKGROUND_COLORS = {ViewPages.AVAILABLE : floats_from_string('#0769BC'),
ViewPages.INSTALLED : floats_from_string('#aea79f'),
}
def __init__(self):
self._view_id = None
return
def set_view_id(self, id):
self._view_id = id
return
def draw(self, widget, cr):
# sky
#r,g,b = self.get_background_color()
#lin = cairo.LinearGradient(0,a.y,0,a.y+150)
#lin.add_color_stop_rgba(0, r,g,b, 0.3)
#lin.add_color_stop_rgba(1, r,g,b,0)
#cr.set_source(lin)
#cr.rectangle(0,0,a.width, 150)
#cr.fill()
#s = self.get_background_image()
#if widget.get_direction() != Gtk.TextDirection.RTL:
# cr.set_source_surface(s, a.x+a.width-s.get_width(), 0)
#else:
# cr.set_source_surface(s, a.x, 0)
#cr.paint()
return
def get_background_color(self):
return self.BACKGROUND_COLORS[self._view_id]
def get_background_image(self):
return self.BACKGROUND_IMAGES[self._view_id]
| armikhael/software-center | softwarecenter/ui/gtk3/widgets/sections.py | Python | gpl-3.0 | 1,787 |
# -*- coding: utf-8 -*-
############################################################################
# LGPL License #
# #
# This program is free software: you can redistribute it and/or modify #
# it under the terms of the GNU Lesser General Public License as #
# published by the Free Software Foundation, either version 3 of the #
# License, or (at your option) any later version. #
# #
# This program is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU Lesser General Public License for more details. #
# #
# You should have received a copy of the GNU Lesser General Public License #
# along with this program. If not, see <http://www.gnu.org/licenses/>. #
############################################################################
# Build environment configuration for Mac OS X. The libraries must be stored within
# the global system paths or must be set within the environment variables. The env
# variables are read here and are added to the environment object.
#
# Build is set to C++11 with Clang
Import("*")
import Utilities
conf.env.Replace(CPPDEFINES = ["JIMDB_DARWIN"])
conf.env.Replace(CPPFLAGS = ["-std=c++1y"])
conf.env.Replace(LINKFLAGS = [])
if conf.env["buildtype"] == "release" :
conf.env.AppendUnique(CPPDEFINES = ["NDEBUG", "BOOST_UBLAS_NDEBUG", "JIMDB_NDEBUG"])
conf.env.AppendUnique(CPPFLAGS = ["-O2", "-finline-functions"])
elif conf.env["buildtype"] == "debug" :
conf.env.AppendUnique(LINKFLAGS = ["-g"])
conf.env.AppendUnique(CPPFLAGS = ["-g", "-Wall"])
# set pathes for compiler & linker
if not conf.env["withlocallibrary"] :
if conf.env["ENV"].has_key("DYLD_LIBRARY_PATH") :
conf.env.Replace(LIBPATH = conf.env["ENV"]["DYLD_LIBRARY_PATH"].split(os.pathsep))
print("Appending custom OSX dynamic library path (DYLD_LIBRARY_PATH)")
elif conf.env["ENV"].has_key("LD_LIBRARY_PATH") :
conf.env.Replace(LIBPATH = conf.env["ENV"]["LD_LIBRARY_PATH"].split(os.pathsep))
print("Appending custom posix dynamic library path (LD_LIBRARY_PATH)")
elif conf.env["ENV"].has_key("LIBRARY_PATH") :
conf.env.Replace(LIBPATH = conf.env["ENV"]["LIBRARY_PATH"].split(os.pathsep))
print("Appending custom posix dynamic library path (LIBRARY_PATH)")
if conf.env["ENV"].has_key("CPPPATH") :
conf.env.Replace(CPPPATH = conf.env["ENV"]["CPPPATH"].split(os.pathsep))
print("Appending custom include path (CPPPATH)")
| BennX/jim-db | buildenvironment/darwin.py | Python | gpl-3.0 | 2,969 |
#!/usr/bin/env python
# Licensed under a 3-clause BSD style license - see LICENSE.rst
# import sys
import os
from setuptools import setup, find_packages
def package_files(directory):
paths = []
for (path, directories, filenames) in os.walk(directory):
for filename in filenames:
paths.append(os.path.join('.', path, filename))
return paths
dataset = package_files('share')
print("dataset {}".format(dataset))
setup(name='pschitt',
version=1.0,
description="DESCRIPTION",
# these should be minimum list of what is needed to run (note
# don't need to list the sub-dependencies like numpy, since
# astropy already depends on it)
install_requires=[
'numpy',
'scipy',
'matplotlib>=2.0',
'numba'
],
packages=find_packages(),
tests_require=['pytest'],
author='Thomas Vuillaume',
author_email='[email protected]',
license='BSD3',
url='https://github.com/vuillaut/pschitt',
long_description='',
classifiers=[
'Intended Audience :: Science/Research',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 3',
'Topic :: Scientific/Engineering :: Astronomy',
'Development Status :: 3 - Alpha',
],
data_files=[('pschitt/', dataset)],
)
| vuillaut/ImArray | setup.py | Python | gpl-3.0 | 1,396 |
import re
def split_string(string, seperator=' '):
""" Split string on separator """
return string.split(seperator)
def split_regex(string, seperator_pattern):
""" Split string on regular expression """
return re.split(seperator_pattern, string)
# Dedicated filters
def keyboard_layouts(keyboards, f=None):
""" Return keyboards layout configuration as a string of comma separated
layouts and variants separated by colon or only as comma separated layouts
or variants if 'f' (filter) is set.
"""
layouts = []
variants = []
if keyboards:
for keyboard in keyboards:
layouts.append(keyboard['layout'])
try:
variants.append(keyboard['variant'])
except KeyError:
variants.append('')
if not f:
ret = (',').join(i for i in layouts)
ret += ":"
ret += (',').join(i for i in variants)
elif f == 'layouts':
ret = (',').join(i for i in layouts)
elif f == 'variants':
ret = (',').join(i for i in variants)
return ret
def postgresql_shm(mem):
""" Get recommended value of kernel shmmax configuration based on total
server RAM for running PostgreSQL db. System shmmax value which must be
something little bit higher than one fourth of system memory size.
"""
return int(round(mem * 1000000 / 3.5))
class FilterModule(object):
''' utility filters '''
def filters(self):
return {
'split_string': split_string,
'split_regex': split_regex,
'postgresql_shm': postgresql_shm,
'keyboard_layouts': keyboard_layouts
}
# vim: set ts=8 sts=4 sw=4 et:
| gislab-npo/gislab | system/filter_plugins/utils.py | Python | gpl-3.0 | 1,704 |
from hashlib import md5
from django import template
from django.utils.safestring import mark_safe
from django.core.urlresolvers import reverse
from django.template.defaultfilters import title
from social_auth.models import UserSocialAuth
register = template.Library()
@register.filter(is_safe=True)
def social_link(user_social):
username = user_social.extra_data.get('username')
extra_id = user_social.extra_data.get('id')
providers_links = {
'facebook': 'https://www.facebook.com/%s' % extra_id,
'twitter': 'https://www.twitter.com/%s' % username,
'google-oauth2': 'https://plus.google.com/%s/about' % extra_id,
'github': 'https://github.com/%s' % username, }
return mark_safe('<a href="%s">%s</a>'
% (providers_links[user_social.provider],
username)
)
@register.filter(is_safe=True)
def social_user_links(user):
profile = user.get_profile()
accounts = UserSocialAuth.objects.filter(user=user)
providers_links = {
'facebook': {'show': profile.show_facebook,
'link': 'https://www.facebook.com/[id]'},
'twitter': {'show': profile.show_twitter,
'link': 'https://www.twitter.com/[uname]'},
'google-oauth2': {'show': profile.show_google_plus,
'link': 'https://plus.google.com/[id]'},
'github': {'show': profile.show_github,
'link': 'https://github.com/[uname]'}, }
output = ''
for account in accounts:
if providers_links[account.provider]['show']:
extra = account.extra_data
if not 'id' in extra:
extra['id'] = ''
link = providers_links[account.provider]['link'] \
.replace('[uname]', extra['username']) \
.replace('[id]', str(extra['id']))
output += '<li class="%(provider)s-icon">' \
'<a data-toggle="tooltip" title="%(title)s" href="%(link)s"></a>' \
% {
'link': link,
'provider': account.provider,
'title': title(account.provider)
}
return mark_safe(output)
@register.filter(is_safe=True)
def social_sign_in_links(providers, request):
output = ""
for provider in providers:
provider = provider.replace('_', '-')
output += '<li class="%(provider)s-icon">'\
'<a data-toggle="tooltip" title="%(title)s" href="%(link)s%(next)s"></a>'\
% {
'link': reverse("socialauth_begin", args=[provider]),
'next': '?next=' + request.get_full_path(),
'provider': provider,
'title': title(provider)
}
return mark_safe(output)
@register.inclusion_tag("inc/dummy.haml")
def square_thumbnail(user, size=80, itemprop=False):
profile = user.get_profile()
if profile.picture:
pos, dim = profile.avatar.split(' ')
pos = pos.split('x')
dim = dim.split('x')
coeff = float(size) / float(dim[0])
x = -(int(int(pos[0]) * coeff))
y = -(int(int(pos[1]) * coeff))
w = (int(profile.picture.width * coeff))
h = (int(profile.picture.height * coeff))
microdata = 'image' if (bool(itemprop)) else ''
return {'template': 'users/templatetags/thumbnail.haml',
'needed_size': size,
'pos_x': x,
'pos_y': y,
'resize_w': w,
'resize_h': h,
'image_url': profile.picture.url,
'microdata': microdata,
'alt': 'user_picture',
}
return gravatar(user, size, itemprop)
@register.inclusion_tag("inc/dummy.haml")
def gravatar(user, size=80, itemprop=False):
return gravatar_from_email(user.email, user.username, size, itemprop)
@register.inclusion_tag("inc/dummy.haml")
def gravatar_from_email(email, alt="gravatar", size=80, itemprop=False):
g_hash = md5(email.lower()).hexdigest()
link = 'http://www.gravatar.com/avatar/'
microdata = 'image' if (bool(itemprop)) else ''
return {'template': 'users/templatetags/gravatar.haml',
'alt': alt,
'src': link + g_hash,
'microdata': microdata,
'size': size, }
| Nivl/www.melvin.re | nivls_website/user_profile/templatetags/user_tags.py | Python | gpl-3.0 | 4,342 |
#!/usr/bin/env python3
# Copyright (c) 2008-11 Qtrac Ltd. All rights reserved.
# This program or module is free software: you can redistribute it and/or
# modify it under the terms of the GNU General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version. It is provided for educational
# purposes and is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
import sys
sites = {}
for filename in sys.argv[1:]:
with open(filename) as file:
for line in file:
i = 0
while True:
site = None
i = line.find("http://", i)
if i > -1:
i += len("http://")
for j in range(i, len(line)):
if not (line[j].isalnum() or line[j] in ".-"):
site = line[i:j].lower()
break
if site and "." in site:
sites.setdefault(site, set()).add(filename)
i = j
else:
break
for site in sorted(sites):
print("{0} is referred to in:".format(site))
for filename in sorted(sites[site], key=str.lower):
print(" {0}".format(filename))
| therealjumbo/python_summer | py31eg/external_sites.py | Python | gpl-3.0 | 1,475 |
import theano
import theano.tensor as T
import lasagne
class MLPEncoder(object):
def __init__(self, LAYER_UNITS, classifier = False):
self.layer1_units = LAYER_UNITS[0]
self.layer2_units = None
self.layer3_units = None
if len(LAYER_UNITS) > 1:
self.layer2_units = LAYER_UNITS[1]
if len(LAYER_UNITS) > 2:
self.layer3_units = LAYER_UNITS[2]
self.l_in = None
self.output = None
self.classifier = False
if classifier:
self.classifier = True
def build_model(self, input_dim):
#create symbolic representation of inputs, mask and target_value
# l_in input shape ==> (n_batch, n_time_steps, n_features)
# The number of feature dimensions is 1(index).
self.l_in = lasagne.layers.InputLayer(shape=(None, input_dim))
l_hid1 = lasagne.layers.DenseLayer(self.l_in, num_units=self.layer1_units,
nonlinearity=lasagne.nonlinearities.tanh,
W=lasagne.init.GlorotUniform())
if self.layer3_units != None:
l_hid2 = lasagne.layers.DenseLayer(l_hid1, num_units=self.layer2_units,
nonlinearity=lasagne.nonlinearities.tanh,
W=lasagne.init.GlorotUniform())
l_hid3 = lasagne.layers.DenseLayer(l_hid2, num_units=self.layer3_units,
nonlinearity=lasagne.nonlinearities.tanh,
W=lasagne.init.GlorotUniform())
if self.classifier:
self.output = lasagne.layers.DenseLayer(l_hid3, num_units=2,
nonlinearity=lasagne.nonlinearities.softmax,
W=lasagne.init.GlorotUniform())
else:
self.output = l_hid3
elif self.layer3_units == None and self.layer2_units != None:
l_hid2 = lasagne.layers.DenseLayer(l_hid1, num_units=self.layer2_units,
nonlinearity=lasagne.nonlinearities.tanh,
W=lasagne.init.GlorotUniform())
if self.classifier:
self.output = lasagne.layers.DenseLayer(l_hid2, num_units=2,
nonlinearity=lasagne.nonlinearities.softmax,
W=lasagne.init.GlorotUniform())
else:
self.output = l_hid2
else:
if self.classifier:
self.output = lasagne.layers.DenseLayer(l_hid1, num_units=2,
nonlinearity=lasagne.nonlinearities.softmax,
W=lasagne.init.GlorotUniform())
else:
self.output = l_hid1
self.all_params = lasagne.layers.get_all_params(self.output)
| Jonbean/models | neural_reasoner/MLP_Encoder.py | Python | gpl-3.0 | 3,035 |
import sys
import os
import numpy as np
import copy
import pysam
import dinucshuffle
from Bio import SeqIO
import dubiotools as dbt
class Reader(object):
def __init__(self):
#self.type='BaseReader'
pass
def pull(self):
return ([],[])
def pull_as_onehot(self,batch_size):
#Calls the pull function to retrieve a list of nucleotide sequences
#Converts the nucleotides to a [batch_size,4,seq_len] onehot numpy array
nuc_list=self.pull(batch_size)
dna_batch = np.zeros((batch_size,4,self.seq_len))
#labels_batch = np.ones((batch_size))*label
for bi,seq in enumerate(nuc_list):
dna_batch[bi,:,:]=dbt.seq_to_onehot(seq)
return dna_batch
def pull_dinuc_shuffled(self,batch_size):
nuc_list,rec_ids=self.pull(batch_size)
for bi,seq in enumerate(nuc_list):
#Randomize Ns in seq
seq = dinucshuffle.replaceN(seq.upper())
nuc_list[bi] = dinucshuffle.dinuclShuffle(seq)
rec_ids[bi] = rec_ids[bi]+"; dinucleotide_shuffled"
return nuc_list, rec_ids
def get_num_records(self):
#Very expensive call. Must call pull for entire dataset
self.num_records = 0
while True:
nuc_list,rec_ids = self.pull(1)
if nuc_list == [] or rec_ids == []:
break
else:
self.num_records += 1
#reset pull counter
self.num_pulled = 0
def close(self):
print "Closed reader for",self.name
self.parser.close()
class BedReader(Reader):
"""
Sequentially reads entries from a bed file of genomic coordinates and
outputs nucleotides on each pull(num_examples)
if start_window is defined, instead of reading coordinates
exactly, BedReader will read a window relative to the start coordinate
"""
def __init__(self,
coord_file,
genome_file,
chr_sizes_file,
seq_len,
skip_first=True,
start_window=None,
pull_limit = -1,
filter_by_len = False
):
"""Read a bed file with many options for preprocessing
:param coord_file: '.bed' file
:param genome_file: '.fa' file with whole genome
:param chr_sizes_file: chromosome sizes file corresponding to aforementioned genome
:param seq_len: The sequence length to extract from the start coord.
This also represents the maximum length that can be pulled.
bed file lines shorter than this value will not be pulled.
:param skip_first: Skip the first line of the file (the header line)
:param start_window: A tuple (ie: (-100,100). If specified, extract this
range from around the start window instead of using
both start and end
:param pull_limit: If specified, only extract the specified number of lines
(excluding header if skip_first is set to True).
Pull all if set to -1
:param filter_by_len: If True, only pull from lines where
end-start > self.seq_len
:returns: a BedReader object
:rtype: BedReader type object
"""
Reader.__init__(self)
self.coord_file = coord_file
self.name = self.coord_file
self.genome_file = genome_file
self.chr_sizes_file = chr_sizes_file
self.chr_sizes_dict = dbt.chr_sizes_dict(self.chr_sizes_file)
self.seq_len = seq_len
self.parser = None
self.num_pulled= 0
#self.num_records = dbt.check_bed_bounds(self.coord_file,self.chr_sizes_dict)
self.skip_first = skip_first
self.start_window = start_window
self.pull_limit = pull_limit
self.filter_by_len = filter_by_len
def open(self):
print "Opening BedFile",self.coord_file
print "Opening genome file",self.genome_file
self.genome_idx = pysam.FastaFile(self.genome_file)
self.parser = open(self.coord_file,'r')
if self.skip_first==True:
self.parser.readline()
def close(self):
self.parser.close()
'''
def pull_batch_eval(self,num_examples):
#This interface method is used to make this class
#a compatible drop in for InputCollection in certain cases
return pull(num_examples)
'''
def pull(self,num_examples):
if self.pull_limit > -1 and self.num_pulled > self.pull_limit:
return [],[]
#Returns empty lists on failure
nuc_list=[]
rec_ids=[]
#BedReader
"""Pull sequence from genome in sequential order"""
for i in range(num_examples):
line= self.parser.readline().strip().split()
if line != []:
contig,start_str,end_str = line[:3]
contig = str(contig)
real_start = int(start_str)
start = real_start
end= int(end_str)
real_len = end-real_start
if self.filter_by_len and real_len < self.seq_len:
#Recursively call this method until an acceptably long sequence is
#pulled.
#self.num_pulled will only get pulled if successful.
return self.pull(num_examples)
if self.start_window:
#Use a window around the start position instead
start = real_start+self.start_window[0]
end = real_start+self.start_window[1]
#Check start and end bounds
if (start >= 0) and (end <= int(self.chr_sizes_dict[contig])):
#Check specified seq_len
if (end-start)==self.seq_len:
seq= self.genome_idx.fetch(contig,start,end)
#Check pulled sequence
if len(seq) == self.seq_len:
nuc_list.append(seq)
rec_id = [contig,':',start_str,'-',end_str]
rec_ids.append(''.join(rec_id))
self.num_pulled += 1
else:
print "Error! record {}:{}-{} did not yield correct sequence length {}".\
format(contig,start_str,end_str,self.seq_len)
print "on pysam pull."
else:
print "Record",(contig,start,end),"does not have seq_len",self.seq_len
else:
print (contig,start,end),"out of bounds."
actual_num_examples = len(nuc_list)
if actual_num_examples != num_examples:
print "Reached end of file and only pulling {} from file {}".\
format(actual_num_examples,self.coord_file)
print "Pulled {} records from file {}".\
format(self.num_pulled,self.coord_file)
return nuc_list,rec_ids
class FastaReader(Reader):
"""
Sequentially reads records from a fasta file and outputs nucleotides on each pull
"""
def __init__(self,fasta_file,seq_len,pull_limit=-1):
Reader.__init__(self)
if os.path.splitext(fasta_file)[-1] not in ['.fa','.fasta']:
print "File",fasta_file,"should have \'.fa\' or \'.fasta\' extension."
self.fasta_file = fasta_file
self.name = self.fasta_file
self.seq_len = seq_len
#self.num_records = len(SeqIO.index(self.fasta_file,"fasta"))
self.num_pulled = 0 #Determine number of records by pulls
self.pull_limit = pull_limit
#self.open()
def open(self):
#self.num_records = len(SeqIO.to_dict(SeqIO.parse(self.fasta_file,"fasta")))
print "Opening FastaReader {}".format(self.fasta_file)
self.parser = SeqIO.parse(self.fasta_file,"fasta")
def pull_batch_eval(self,num_examples):
#Used to maintain commonality with InputCollection
return pull(num_examples)
def pull(self,num_examples):
if self.pull_limit > -1 and self.num_pulled > self.pull_limit:
print "Reached pull limit after {} pulls".format(self.num_pulled)
return [],[]
#FastaReader
#Returns empty lists on failure
nuc_list = []
rec_ids =[]
"""Pull fasta records in sequential order"""
for i in range(num_examples):
try:
seq_obj = self.parser.next()
nuc_seq = str(seq_obj.seq)
rec_id = seq_obj.id
if len(nuc_seq) == self.seq_len:
nuc_list.append(nuc_seq)
rec_ids.append(rec_id)
self.num_pulled += 1
else:
print "Error. {} sequence length does not match {}."\
.format(rec_id,self.seq_len)
except StopIteration:
print "Failure in FastaReader pull at", self.num_pulled
actual_num_examples = len(nuc_list)
if actual_num_examples != num_examples:
print "Reached end of file and only pulling {} from file {}".\
format(actual_num_examples,self.fasta_file)
print "Pulled {} records from fasta file {}".\
format(self.num_pulled,self.fasta_file)
return nuc_list,rec_ids
class DinucShuffleReader(Reader):
"""
Dinucleotide shuffles the entries of a list of readers
Takes another reader object as sole input
Note: every pull will perform a unique shuffling operation.
To cache pulls, save the output of this reader as a Fasta file
"""
def __init__(self,reader_list):
Reader.__init__(self)
#Copy all readers in reader_list
#Note: copy.copy copies will have refs to objects in the copied
# object, whereas copy.deepcopy will straight copy these objects
self.reader_list = [copy.copy(reader) for reader in reader_list]
self.reader_index = 0 #Index for reader currently being pulled from
self.seq_len = self.reader_list[0].seq_len
self.name = "dinuc_shuffled"
self.num_pulled = 0
def save_as_fasta(self,output_file):
print "Saving dinucleotide shuffled entries in file",output_file
self.reader_index=0
with open(output_file,'w') as of:
while(True):
dinuc_list,rec_ids = self.pull(1)
if not (dinuc_list == [] or dinuc_list == None or
rec_ids == [] or rec_ids == None):
of.write(">{}\n".format(rec_ids[0]))
of.write(dinuc_list[0]+"\n")
else:
break
self.reader_index=0
def pull(self,batch_size):
#Returns empty lists on final pull
cur_reader = self.reader_list[self.reader_index]
dinuc_list,rec_ids = cur_reader.pull_dinuc_shuffled(batch_size)
#Go to next reader if pull fails
if dinuc_list == [] or rec_ids == []:
self.reader_index += 1
else:
self.num_pulled += len(dinuc_list)
return dinuc_list,rec_ids
def open(self):
print "Opening DinucReader"
for reader in self.reader_list:
reader.open()
def close(self):
for reader in self.reader_list:
reader.close()
| LarsDu/DeepNucDecomp | deepnuc/readers.py | Python | gpl-3.0 | 11,974 |
from pytank.Applications import *
from pytank.Buttons import *
from pytank.Fuctions import *
from pytank.SystemFunctions import Ping
import traceback
def Main():
ConnectToConfigurator()
AddDevices(Configurator_security,15,Configurator_securitypanels)
ResetToFactory()
try:
CurrentOpenedWindows = GetListOfAllOpenedWindows()
Main()
except Exception,err:
error = traceback.format_exc()
ScriptError(error)
CloseAllWindowsNotInList(CurrentOpenedWindows)
| kenshay/ImageScript | ProgramData/SystemFiles/Python/Lib/site-packages/elan/__OLD_SCRIPTS/runnner_AddAllSecurityDevices.py | Python | gpl-3.0 | 484 |
# -*- coding: utf-8 -*-
#
# main.py
#
# Copyright (C) 2010 - 2015 Wei-Ning Huang (AZ) <[email protected]>
# All Rights reserved.
#
# This file is part of cppman.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
import gzip
import importlib
import os
import re
import shutil
import sqlite3
import subprocess
import sys
import urllib.request
from cppman import environ
from cppman import util
from cppman.crawler import Crawler
class Cppman(Crawler):
"""Manage cpp man pages, indexes"""
def __init__(self, forced=False):
Crawler.__init__(self)
self.results = set()
self.forced = forced
self.success_count = None
self.failure_count = None
self.blacklist = [
]
self.name_exceptions = [
'http://www.cplusplus.com/reference/string/swap/'
]
def extract_name(self, data):
"""Extract man page name from web page."""
name = re.search('<h1[^>]*>(.+?)</h1>', data).group(1)
name = re.sub(r'<([^>]+)>', r'', name)
name = re.sub(r'>', r'>', name)
name = re.sub(r'<', r'<', name)
return name
def rebuild_index(self):
"""Rebuild index database from cplusplus.com."""
try:
os.remove(environ.index_db_re)
except:
pass
self.db_conn = sqlite3.connect(environ.index_db_re)
self.db_cursor = self.db_conn.cursor()
self.db_cursor.execute('CREATE TABLE "cplusplus.com" '
'(name VARCHAR(255), url VARCHAR(255))')
self.db_cursor.execute('CREATE TABLE "cppreference.com" '
'(name VARCHAR(255), url VARCHAR(255))')
try:
self.add_url_filter('\.(jpg|jpeg|gif|png|js|css|swf|svg)$')
self.set_follow_mode(Crawler.F_SAME_PATH)
# cplusplus.com
self.crawl('http://www.cplusplus.com/reference/')
for name, url in self.results:
self.insert_index('cplusplus.com', name, url)
self.db_conn.commit()
# Rename dumplicate entries
dumplicates = self.db_cursor.execute('SELECT name, COUNT(name) '
'AS NON '
'FROM "cplusplus.com" '
'GROUP BY NAME '
'HAVING (NON > 1)').fetchall()
for name, num in dumplicates:
dump = self.db_cursor.execute('SELECT name, url FROM '
'"cplusplus.com" WHERE name="%s"'
% name).fetchall()
for n, u in dump:
if u not in self.name_exceptions:
n2 = n[5:] if n.startswith('std::') else n
try:
group = re.search('/([^/]+)/%s/$' % n2, u).group(1)
except Exception:
group = re.search('/([^/]+)/[^/]+/$', u).group(1)
new_name = '%s (%s)' % (n, group)
self.db_cursor.execute('UPDATE "cplusplus.com" '
'SET name="%s", url="%s" '
'WHERE url="%s"' %
(new_name, u, u))
self.db_conn.commit()
# cppreference.com
self.results = set()
self.crawl('http://en.cppreference.com/w/cpp', '/w/cpp')
for name, url in self.results:
self.insert_index('cppreference.com', name, url)
self.db_conn.commit()
except KeyboardInterrupt:
os.remove(environ.index_db_re)
raise KeyboardInterrupt
finally:
self.db_conn.close()
def process_document(self, doc):
"""callback to insert index"""
if doc.url not in self.blacklist:
print("Indexing '%s' ..." % doc.url)
name = self.extract_name(doc.text)
self.results.add((name, doc.url))
else:
print("Skipping blacklisted page '%s' ..." % doc.url)
return None
def insert_index(self, table, name, url):
"""callback to insert index"""
names = name.split(',')
if len(names) > 1:
m = re.match(r'^\s*(.*?::(?:operator)?)([^:]*)\s*$', names[0])
if m:
prefix = m.group(1)
names[0] = m.group(2)
names = [prefix + n for n in names]
for n in names:
self.db_cursor.execute(
'INSERT INTO "%s" (name, url) VALUES ("%s", "%s")' %
(table, n.strip(), url))
def cache_all(self):
"""Cache all available man pages"""
print('By default, cppman fetches pages on-the-fly if corresponding '
'page is not found in the cache. The "cache-all" option is only '
'useful if you want to view man pages offline. '
'Caching all contents will take several minutes, '
'do you want to continue [y/N]?')
respond = input()
if respond.lower() not in ['y', 'ye', 'yes']:
raise KeyboardInterrupt
try:
os.makedirs(environ.man_dir)
except:
pass
self.success_count = 0
self.failure_count = 0
if not os.path.exists(environ.index_db):
raise RuntimeError("can't find index.db")
conn = sqlite3.connect(environ.index_db)
cursor = conn.cursor()
source = environ.config.source
print('Caching manpages from %s ...' % source)
data = cursor.execute('SELECT * FROM "%s"' % source).fetchall()
for name, url in data:
retries = 3
print('Caching %s ...' % name)
while retries > 0:
try:
self.cache_man_page(source, url, name)
except Exception:
print('Retrying ...')
retries -= 1
else:
break
if retries == 0:
print('Error caching %s ...' % name)
self.failure_count += 1
else:
self.success_count += 1
conn.close()
print('\n%d manual pages cached successfully.' % self.success_count)
print('%d manual pages failed to cache.' % self.failure_count)
self.update_mandb(False)
def cache_man_page(self, source, url, name):
"""callback to cache new man page"""
# Skip if already exists, override if forced flag is true
outname = self.get_page_path(source, name)
if os.path.exists(outname) and not self.forced:
return
try:
os.makedirs(os.path.join(environ.man_dir, source))
except OSError:
pass
# There are often some errors in the HTML, for example: missing closing
# tag. We use fixupHTML to fix this.
data = util.fixupHTML(urllib.request.urlopen(url).read())
formatter = importlib.import_module('cppman.formatter.%s' % source[:-4])
groff_text = formatter.html2groff(data, name)
with gzip.open(outname, 'w') as f:
f.write(groff_text.encode('utf-8'))
def clear_cache(self):
"""Clear all cache in man3"""
shutil.rmtree(environ.man_dir)
def man(self, pattern):
"""Call viewer.sh to view man page"""
try:
avail = os.listdir(os.path.join(environ.man_dir, environ.source))
except OSError:
avail = []
if not os.path.exists(environ.index_db):
raise RuntimeError("can't find index.db")
conn = sqlite3.connect(environ.index_db)
cursor = conn.cursor()
# Try direct match
try:
page_name, url = cursor.execute(
'SELECT name,url FROM "%s" '
'WHERE name="%s" ORDER BY LENGTH(name)'
% (environ.source, pattern)).fetchone()
except TypeError:
# Try standard library
try:
page_name, url = cursor.execute(
'SELECT name,url FROM "%s" '
'WHERE name="std::%s" ORDER BY LENGTH(name)'
% (environ.source, pattern)).fetchone()
except TypeError:
try:
page_name, url = cursor.execute(
'SELECT name,url FROM "%s" '
'WHERE name LIKE "%%%s%%" ORDER BY LENGTH(name)'
% (environ.source, pattern)).fetchone()
except TypeError:
raise RuntimeError('No manual entry for ' + pattern)
finally:
conn.close()
page_name = page_name.replace('/', '_')
if self.forced or page_name + '.3.gz' not in avail:
self.cache_man_page(environ.source, url, page_name)
pager = environ.pager if sys.stdout.isatty() else environ.renderer
# Call viewer
pid = os.fork()
if pid == 0:
os.execl('/bin/sh', '/bin/sh', pager,
self.get_page_path(environ.source, page_name),
str(util.get_width()), environ.pager_config,
page_name)
return pid
def find(self, pattern):
"""Find pages in database."""
if not os.path.exists(environ.index_db):
raise RuntimeError("can't find index.db")
conn = sqlite3.connect(environ.index_db)
cursor = conn.cursor()
selected = cursor.execute(
'SELECT * FROM "%s" WHERE name '
'LIKE "%%%s%%" ORDER BY LENGTH(name)'
% (environ.source, pattern)).fetchall()
pat = re.compile('(%s)' % pattern, re.I)
if selected:
for name, url in selected:
if os.isatty(sys.stdout.fileno()):
print(pat.sub(r'\033[1;31m\1\033[0m', name))
else:
print(name)
else:
raise RuntimeError('%s: nothing appropriate.' % pattern)
def update_mandb(self, quiet=True):
"""Update mandb."""
if not environ.config.UpdateManPath:
return
print('\nrunning mandb...')
cmd = 'mandb %s' % (' -q' if quiet else '')
subprocess.Popen(cmd, shell=True).wait()
def get_page_path(self, source, name):
return os.path.join(environ.man_dir, source, name + '.3.gz')
| czchen/debian-cppman | cppman/main.py | Python | gpl-3.0 | 11,297 |
# (c) 2012-2014, Michael DeHaan <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import json
import pipes
import random
from ansible import constants as C
from ansible.compat.six import iteritems
from ansible.module_utils._text import to_text
from ansible.plugins.action import ActionBase
class ActionModule(ActionBase):
def run(self, tmp=None, task_vars=None):
''' transfer the given module name, plus the async module, then run it '''
if task_vars is None:
task_vars = dict()
result = super(ActionModule, self).run(tmp, task_vars)
if self._play_context.check_mode:
result['skipped'] = True
result['msg'] = 'check mode not supported for this module'
return result
remote_user = task_vars.get('ansible_ssh_user') or self._play_context.remote_user
if not tmp:
tmp = self._make_tmp_path(remote_user)
self._cleanup_remote_tmp=True
module_name = self._task.action
env_string = self._compute_environment_string()
module_args = self._task.args.copy()
if self._play_context.no_log or C.DEFAULT_NO_TARGET_SYSLOG:
module_args['_ansible_no_log'] = True
# configure, upload, and chmod the target module
(module_style, shebang, module_data, module_path) = self._configure_module(module_name=module_name, module_args=module_args, task_vars=task_vars)
remote_module_filename = self._connection._shell.get_remote_filename(module_path)
remote_module_path = self._connection._shell.join_path(tmp, remote_module_filename)
if module_style == 'binary':
self._transfer_file(module_path, remote_module_path)
else:
self._transfer_data(remote_module_path, module_data)
# configure, upload, and chmod the async_wrapper module
(async_module_style, shebang, async_module_data, async_module_path) = self._configure_module(module_name='async_wrapper', module_args=dict(), task_vars=task_vars)
async_module_remote_filename = self._connection._shell.get_remote_filename(async_module_path)
remote_async_module_path = self._connection._shell.join_path(tmp, async_module_remote_filename)
self._transfer_data(remote_async_module_path, async_module_data)
argsfile = None
if module_style in ('non_native_want_json', 'binary'):
argsfile = self._transfer_data(self._connection._shell.join_path(tmp, 'arguments'), json.dumps(module_args))
elif module_style == 'old':
args_data = ""
for k, v in iteritems(module_args):
args_data += '%s="%s" ' % (k, pipes.quote(to_text(v)))
argsfile = self._transfer_data(self._connection._shell.join_path(tmp, 'arguments'), args_data)
remote_paths = tmp, remote_module_path, remote_async_module_path
# argsfile doesn't need to be executable, but this saves an extra call to the remote host
if argsfile:
remote_paths += argsfile,
self._fixup_perms2(remote_paths, remote_user, execute=True)
async_limit = self._task.async
async_jid = str(random.randint(0, 999999999999))
# call the interpreter for async_wrapper directly
# this permits use of a script for an interpreter on non-Linux platforms
# TODO: re-implement async_wrapper as a regular module to avoid this special case
interpreter = shebang.replace('#!', '').strip()
async_cmd = [interpreter, remote_async_module_path, async_jid, async_limit, remote_module_path]
if env_string:
async_cmd.insert(0, env_string)
if argsfile:
async_cmd.append(argsfile)
else:
# maintain a fixed number of positional parameters for async_wrapper
async_cmd.append('_')
if not self._should_remove_tmp_path(tmp):
async_cmd.append("-preserve_tmp")
async_cmd = " ".join(to_text(x) for x in async_cmd)
result.update(self._low_level_execute_command(cmd=async_cmd))
result['changed'] = True
# the async_wrapper module returns dumped JSON via its stdout
# response, so we (attempt to) parse it here
parsed_result = self._parse_returned_data(result)
# Delete tmpdir from controller unless async_wrapper says something else will do it.
# Windows cannot request deletion of files/directories that are in use, so the async
# supervisory process has to be responsible for it.
if parsed_result.get("_suppress_tmpdir_delete", False) != True:
self._remove_tmp_path(tmp)
# just return the original result
if 'skipped' in result and result['skipped'] or 'failed' in result and result['failed']:
return result
return parsed_result
| eerorika/ansible | lib/ansible/plugins/action/async.py | Python | gpl-3.0 | 5,578 |
# This file is part of Lurklib.
# Copyright (C) 2011 LK-
#
# Lurklib is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Lurklib is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Lurklib. If not, see <http://www.gnu.org/licenses/>.
from __future__ import with_statement
class _Connection(object):
def _connect(self, server, port, tls=True, tls_verify=True, proxy=False,
proxy_type='SOCKS5', proxy_server=None,
proxy_port=None, proxy_username=None, proxy_password=None):
"""
Connects the socket to an IRC server.
Required arguments:
* server - Server to connect to.
* port - Port to use.
Optional arguments:
* tls=True - Should we use TLS/SSL?
* tls_verify=True - Verify the TLS certificate?
Only works with Python 3.
* proxy=False - Should we use a proxy?
* proxy_type='SOCKS5' - Proxy type: SOCKS5, SOCKS4 or HTTP
* proxy_server=None - Proxy server's address
* proxy_port=None - Proxy server's port
* proxy_username=None - If SOCKS5 is used,
a proxy username/password can be specified.
* proxy_password=None - If SOCKS5 is used,
a proxy username/password can be specified.
"""
with self.lock:
if proxy:
if proxy_type == 'SOCKS5':
proxy_type = self._m_proxy.PROXY_TYPE_SOCKS5
elif proxy_type == 'SOCKS4':
proxy_type = self._m_proxy.PROXY_TYPE_SOCKS4
elif proxy_type == 'HTTP':
proxy_type = self._m_proxy.PROXY_TYPE_HTTP
self._socket = self._m_proxy.socksocket()
self._socket.setproxy(proxytype=proxy_type, \
addr=proxy_server, \
port=proxy_port, \
username=proxy_username, \
password=proxy_password)
if tls:
if tls_verify:
ca_bundle = self._m_tempfile.NamedTemporaryFile().name
with open(ca_bundle, 'w') as bundle_file:
bundle_file.write(self._ca_bundle)
cert_required = self._m_tls.CERT_REQUIRED
self._socket = \
self._m_tls.wrap_socket(self._socket, \
cert_reqs=cert_required, \
ca_certs=ca_bundle)
self._socket.connect((server, port))
self._m_tls.match_hostname(self._socket.getpeercert(), \
server)
return None
else:
self._socket = self._m_tls.wrap_socket(self._socket)
if not tls_verify:
self._socket.connect((server, port))
self._socket.connect((server, port))
def _register(self, nick, user, real_name, password=None):
"""
Register the connection with the IRC server.
Required arguments:
* nick - Nick to use. If a tuple/list is specified -
it will try to use the first,
and if the first is already used -
it will try to use the second and so on.
* user - Username to use.
* real_name - Real name to use.
Optional arguments:
* password=None - IRC server password.
"""
with self.lock:
if password:
self._password(password)
self.nick(nick)
self._user(user, real_name)
def _init(self, server, nick, user, real_name, password, port=None,
tls=True, tls_verify=True,
proxy=False, proxy_type='SOCKS5', proxy_server=None,
proxy_port=None, proxy_username=None, proxy_password=None):
"""
Connect and register with the IRC server and -
set server-related information variables.
Required arguments:
* server - Server to connect to.
* nick - Nick to use.
If a tuple/list is specified it will try to use the first,
and if the first is already used -
it will try to use the second and so on.
* user - Username to use.
* real_name - Real name to use.
* password=None - IRC server password.
Optional arguments:
* port - Port to use.
* tls=True - Should we use TLS/SSL?
* tls_verify=True - Verify the TLS certificate?
Only works with Python 3.
* proxy=False - Should we use a proxy?
* proxy_type='SOCKS5' - Proxy type: SOCKS5, SOCKS4 or HTTP
* proxy_server=None - Proxy server's address
* proxy_port=None - Proxy server's port
* proxy_username=None - If SOCKS5 is used,
a proxy username/password can be specified.
* proxy_password=None - If SOCKS5 is used,
a proxy username/password can be specified.
"""
with self.lock:
self.current_nick = nick
if tls:
if not port:
port = 6697
self._connect(server, port, tls, tls_verify, proxy, \
proxy_type, proxy_server, proxy_port, \
proxy_username, proxy_password)
else:
if not port:
port = 6667
self._connect(server, port, tls, tls_verify, proxy, \
proxy_type, proxy_server, proxy_port, \
proxy_username, proxy_password)
while self.readable(2):
data = self.recv()
if data[0] == 'NOTICE':
self.server = data[1][0]
self.con_msg.append(data)
self._register(nick, user, real_name, password)
while self.readable(timeout=4):
rdata = self.recv()
if rdata[0] == 'UNKNOWN':
data = rdata[1][3].replace(':', '', 1)
ncode = rdata[1][1]
if ncode == '004':
info = data.split()
self.server = info[0]
self.ircd = info[1]
self.umodes = info[2]
self.cmodes = info[3]
elif ncode == '005':
version = rdata[1][3].replace(':are supported' + \
'by this server', '')
version = version.split()
for info in version:
try:
info = info.split('=')
name = info[0]
value = info[1]
self.version[name] = value
if name == 'CHARSET':
self.encoding = value
except IndexError:
self.version[info[0]] = True
elif ncode == '376':
self.con_msg.append(rdata)
break
elif ncode == '422':
self.con_msg.append(rdata)
break
else:
if rdata[0] == 'NOTICE':
self.server = rdata[1][0]
self.con_msg.append(rdata[1])
self.motd = tuple(self.motd)
self.con_msg = tuple(self.con_msg)
self.connected = True
self.keep_going = \
True
def _password(self, password):
"""
Authenticates with the IRC server.
NOTE: Method will not raise an exception,
if the password is wrong. It will just fail..
Required arguments:
* password - Password to send.
"""
with self.lock:
self.send('PASS :%s' % password, error_check=True)
def _nick(self, nick):
"""
Sets your nick.
Required arguments:
* nick - New nick.
"""
with self.lock:
self.send('NICK :%s' % nick)
if self.readable():
msg = self._recv(expected_replies='NICK')
if msg[0] == 'NICK':
if not self.hide_called_events:
self.stepback()
for channel in self.channels:
if 'USERS' in self.channels[channel]:
priv_level = \
self.channels[channel]['USERS'][self.current_nick]
del self.channels[channel]['USERS'][self.current_nick]
self.channels[channel]['USERS'][nick] = priv_level
self.current_nick = nick
def nick(self, nick):
"""
Sets your nick.
Required arguments:
* nick - New nick or a tuple of possible new nicks.
"""
nick_set_successfully = False
try:
self._nick(nick)
nick_set_successfully = True
except TypeError:
for nick_ in nick:
try:
self._nick(nick_)
nick_set_successfully = True
break
except self.NicknameInUse:
pass
if not nick_set_successfully:
self.exception('433')
def _user(self, user, real_name):
"""
Sends the USER message.
Required arguments:
* user - Username to send.
* real_name - Real name to send.
"""
with self.lock:
self.send('USER %s 0 * :%s' % (user, real_name))
if self.readable():
self._recv()
self.stepback()
def oper(self, name, password):
"""
Opers up.
Required arguments:
* name - Oper name.
* password - Oper password.
"""
with self.lock:
self.send('OPER %s %s' % (name, password))
snomasks = ''
new_umodes = ''
if self.readable():
msg = self._recv(expected_replies=( \
'MODE', '381', '008'))
if msg[0] == 'MODE':
new_umodes = msg[2].replace(':', '', 1)
elif msg[0] == '381':
return new_umodes, snomasks
elif msg[0] == '008':
snomasks = msg[2].split('(')[1].split(')')[0]
def umode(self, nick, modes=''):
"""
Sets/gets user modes.
Required arguments:
* nick - Nick to set/get user modes for.
Optional arguments:
* modes='' - Sets these user modes on a nick.
"""
with self.lock:
if not modes:
self.send('MODE %s' % nick)
if self.readable():
msg = self._recv(expected_replies=('221',))
if msg[0] == '221':
modes = msg[2].replace('+', '').replace(':', '', 1)
return modes
self.send('MODE %s %s' % (nick, modes))
if self.readable():
msg = self._recv(expected_replies=('MODE',))
if msg[0] == 'MODE':
if not self.hide_called_events:
self.stepback()
return msg[2].replace(':', '', 1)
def service(self):
""" Not implemented. """
raise self.NotImplemented('LurklibError: NotImplemented')
def _quit(self, reason=''):
"""
Sends a QUIT message to the server.
Optional arguments:
* reason='' - Reason for quitting.
"""
with self.lock:
self.send('QUIT :%s' % reason)
def quit(self, reason=''):
"""
Sends a QUIT message, closes the connection and -
ends Lurklib's main loop.
Optional arguments:
* reason='' - Reason for quitting.
"""
with self.lock:
self.keep_going = False
self._quit(reason)
self._socket.shutdown(self._m_socket.SHUT_RDWR)
self._socket.close()
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
""" For use with the Python 'with' statement. """
with self.lock:
self.quit()
def squit(self, server, reason=''):
"""
Quits a server.
Required arguments:
* server - Server to quit.
Optional arguments:
* reason='' - Reason for the server quitting.
"""
with self.lock:
self.send('SQUIT %s :%s' % (server, reason))
while self.readable():
msg = self._recv(expected_replies=('SQUIT',))
if msg[0] == 'SQUIT':
if not self.hide_called_events:
self.stepback()
def latency(self):
""" Checks the connection latency. """
with self.lock:
self.send('PING %s' % self.server)
ctime = self._m_time.time()
msg = self._recv(expected_replies=('PONG',))
if msg[0] == 'PONG':
latency = self._m_time.time() - ctime
return latency
| bbqsrc/lurklib | lurklib/connection.py | Python | gpl-3.0 | 14,032 |
# -*- encoding: utf-8 -*-
from abjad import *
def test_labeltools_label_leaves_in_expr_with_leaf_indices_01():
r'''Leaf indices start at 0.
'''
staff = Staff("c'8 d'8 e'8 f'8")
labeltools.label_leaves_in_expr_with_leaf_indices(staff)
assert systemtools.TestManager.compare(
staff,
r'''
\new Staff {
c'8
_ \markup {
\small
0
}
d'8
_ \markup {
\small
1
}
e'8
_ \markup {
\small
2
}
f'8
_ \markup {
\small
3
}
}
'''
)
assert inspect_(staff).is_well_formed() | mscuthbert/abjad | abjad/tools/labeltools/test/test_labeltools_label_leaves_in_expr_with_leaf_indices.py | Python | gpl-3.0 | 899 |
from Regions import *
from Classes import button
import Settings
GlobalCustomButtonThreshold = Settings.GlobalCustomButtonThreshold
v_home = button()
v_home.Image("v_home.png")
v_home.Region(VIEWER_REGION)
source1 = button()
source1.Image("source1.png")
source1.Region(CONFIGURATOR_REGION)
showunsupporteddevices = button()
showunsupporteddevices.Image("showunsupporteddevices.png")
showunsupporteddevices.Region(CONFIGURATOR_REGION)
elangm88ethernet = button()
elangm88ethernet.Image("elangm88ethernet.png")
elangm88ethernet.Region(CONFIGURATOR_REGION)
source5 = button()
source5.Image("source5.png")
source5.Region(CONFIGURATOR_REGION)
source6 = button()
source6.Image("source6.png")
source6.Region(CONFIGURATOR_REGION)
source7 = button()
source7.Image("source7.png")
source7.Region(CONFIGURATOR_REGION)
source8 = button()
source8.Image("source8.png")
source8.Region(CONFIGURATOR_REGION)
source9 = button()
source9.Image("source9.png")
source9.Region(CONFIGURATOR_REGION)
senseinput = button()
senseinput.Image('senseinput.png')
senseinput.Region(CONFIGURATOR_REGION)
factoryresettheultramatrix = button()
factoryresettheultramatrix.Image('factoryresettheultramatrix.png')
factoryresettheultramatrix.Region(CONFIGURATOR_REGION)
paging = button()
paging.Image('paging.png')
paging.Region(CONFIGURATOR_REGION)
device = button()
device.Image("device.png")
device.Region(CONFIGURATOR_REGION)
ok2 = button()
ok2.Image("ok2.png")
ok2.Region(CONFIGURATOR_REGION)
addnewirlibrarysource = button()
addnewirlibrarysource.Image("addnewirlibrarysource.png")
addnewirlibrarysource.Region(CONFIGURATOR_REGION)
comport = button()
comport.Image("comport.png")
comport.Region(CONFIGURATOR_REGION)
comport.similar(int = .29)
copylayoutfromtemplate = button()
copylayoutfromtemplate.Image("copylayoutfromtemplate.png")
copylayoutfromtemplate.Region(CONFIGURATOR_REGION)
hdmiinput = button()
hdmiinput.Image("hdmiinput.png")
hdmiinput.Region(CONFIGURATOR_REGION)
constant = button()
constant.Image("constant.png")
constant.Region(CONFIGURATOR_REGION)
audioinput = button()
audioinput.Image("audioinput.png")
audioinput.Region(CONFIGURATOR_REGION)
energyefficiencymode = button()
energyefficiencymode.Image("energyefficiencymode.png")
energyefficiencymode.Region(CONFIGURATOR_REGION)
videooutput = button()
videooutput.Image("videooutput.png")
videooutput.Region(CONFIGURATOR_REGION)
audiooutput = button()
audiooutput.Image("audiooutput.png")
audiooutput.Region(CONFIGURATOR_REGION)
analogtodigital = button()
analogtodigital.Image("analogtodigital.png")
analogtodigital.Region(CONFIGURATOR_REGION)
audiooutputtype = button()
audiooutputtype.Image("audiooutputtype.png")
audiooutputtype.Region(CONFIGURATOR_REGION)
zones = button()
zones.Image("zones.png")
zones.Region(CONFIGURATOR_REGION)
zone1 = button()
zone1.Image("zone1.png")
zone1.Region(CONFIGURATOR_REGION)
audioinputtype = button()
audioinputtype.Image("audioinputtype.png")
audioinputtype.Region(CONFIGURATOR_REGION)
automationcomp = button()
automationcomp.Image("automationcomp.png")
automationcomp.Region(CONFIGURATOR_REGION)
apple = button()
apple.Image("apple.png")
apple.Region(CONFIGURATOR_REGION)
lgbluray = button()
lgbluray.Image("lgbluray.png")
lgbluray.Region(CONFIGURATOR_REGION)
av1 = button()
av1.Image("av1.png")
av1.Region(CONFIGURATOR_REGION)
addnewlibrarysource = button()
addnewlibrarysource.Image("addnewlibrarysource.png")
addnewlibrarysource.Region(CONFIGURATOR_REGION)
appletv = button()
appletv.Image("appletv.png")
appletv.Region(CONFIGURATOR_REGION)
v_appletvdevice = button()
v_appletvdevice.Image("v_appletvdevice.png")
v_appletvdevice.Region(VIEWER_REGION)
v_sonysinglebluraydevice = button()
v_sonysinglebluraydevice.Image("v_sonysinglebluraydevice.png")
v_sonysinglebluraydevice.Region(VIEWER_REGION)
v_sonysinglebluraydevice.similar(int = 50)
v_farright = button()
v_farright.Image("v_farright.png")
v_farright.Region(VIEWER_REGION)
ultra6x4192168842 = button()
ultra6x4192168842.Image("ultra6x4192168842.png")
ultra6x4192168842.Region(CONFIGURATOR_REGION)
ultra6x4192168842.similar(int = .29)
ultra6x41921681026 = button()
ultra6x41921681026.Image("ultra6x41921681026.png")
ultra6x41921681026.Region(CONFIGURATOR_REGION)
ultra6x41921681026.similar(int = .95)
page = button()
page.Image("page.png")
page.Region(CONFIGURATOR_REGION)
cmax_edit = button()
cmax_edit.Image("cmax_edit.png")
cmax_edit.Region(CONFIGURATOR_REGION_MAX)
addeditzone_page = button()
addeditzone_page.Image("addeditzone_page.png")
addeditzone_page.Region(ADDEDITZONE)
pagelevel = button()
pagelevel.Image("pagelevel.png")
pagelevel.Region(CONFIGURATOR_REGION)
ultra6x419216810263 = button()
ultra6x419216810263.Image("ultra6x419216810263.png")
ultra6x419216810263.Region(CONFIGURATOR_REGION)
ultra6x419216810263.similar(int = .95)
ultra6x419216810262 = button()
ultra6x419216810262.Image("ultra6x419216810262.png")
ultra6x419216810262.Region(CONFIGURATOR_REGION)
ultra6x419216810262.similar(int = .95)
bah = button()
bah.Image('bah.png')
bah.Region(VIEWER_REGION)
bah.similar(int = .40)
v_sonysinglebluray = button()
v_sonysinglebluray.Image("v_sonysinglebluray.png")
v_sonysinglebluray.Region(VIEWER_REGION)
v_sonymultibluraydevice = button()
v_sonymultibluraydevice.Image("v_sonymultibluraydevice.png")
v_sonymultibluraydevice.Region(VIEWER_REGION)
v_sonymultibluraydevice.similar(int = GlobalCustomButtonThreshold)
v_appletv = button()
v_appletv.Image("v_appletv.png")
v_appletv.Region(VIEWER_REGION)
interfacelgbluray = button()
interfacelgbluray.Image("interfacelgbluray.png")
interfacelgbluray.Region(CONFIGURATOR_REGION)
interfaceappletv = button()
interfaceappletv.Image("interfaceappletv.png")
interfaceappletv.Region(CONFIGURATOR_REGION)
interfacesonysinglebluray = button()
interfacesonysinglebluray.Image("interfacesonysinglebluray.png")
interfacesonysinglebluray.Region(CONFIGURATOR_REGION)
interfacesonysinglebluray.similar(int = 90)
analogblueray = button()
analogblueray.Image("analogblueray.png")
analogblueray.Region(CONFIGURATOR_REGION)
coaxappletv = button()
coaxappletv.Image("coaxappletv.png")
coaxappletv.Region(CONFIGURATOR_REGION)
interfacesonymultibluray = button()
interfacesonymultibluray.Image("interfacesonymultibluray.png")
interfacesonymultibluray.Region(CONFIGURATOR_REGION)
interfacesonymultibluray.similar(int = 90)
v_play = button()
v_play.Image("v_play.png")
v_play.Region(VIEWER_REGION)
v_lgblueraydevice = button()
v_lgblueraydevice.Image("v_lgblueraydevice.png")
v_lgblueraydevice.Region(VIEWER_REGION)
sonymultibluray = button()
sonymultibluray.Image("sonymultibluray.png")
sonymultibluray.Region(CONFIGURATOR_REGION)
sonysinglebluray = button()
sonysinglebluray.Image("sonysinglebluray.png")
sonysinglebluray.Region(CONFIGURATOR_REGION)
irsender = button()
irsender.Image("irsender.png")
irsender.Region(CONFIGURATOR_REGION)
audio1 = button()
audio1.Image("audio1.png")
audio1.Region(CONFIGURATOR_REGION)
sourcedevice = button()
sourcedevice.Image("sourcedevice.png")
sourcedevice.Region(CONFIGURATOR_REGION)
sources2 = button()
sources2.Image("sources2.png")
sources2.Region(CONFIGURATOR_REGION)
v_disc = button()
v_disc.Image("v_disc.png")
v_disc.Region(VIEWER_REGION)
v_disc.similar(int = .30)
v_musicone = button()
v_musicone.Image("v_musicone.png")
v_musicone.Region(VIEWER_REGION)
v_off = button()
v_off.Image("v_off.png")
v_off.Region(VIEWER_REGION)
v_musictwo = button()
v_musictwo.Image("v_musictwo.png")
v_musictwo.Region(VIEWER_REGION)
v_musicthree = button()
v_musicthree.Image("v_musicthree.png")
v_musicthree.Region(VIEWER_REGION)
v_allzonesoff = button()
v_allzonesoff.Image("v_allzonesoff.png")
v_allzonesoff.Region(VIEWER_REGION)
v_musicfour = button()
v_musicfour.Image("v_musicfour.png")
v_musicfour.Region(VIEWER_REGION)
v_zone4off = button()
v_zone4off.Image("v_zone4off.png")
v_zone4off.Region(VIEWER_REGION)
v_zone3off = button()
v_zone3off.Image("v_zone3off.png")
v_zone3off.Region(VIEWER_REGION)
v_zone2off = button()
v_zone2off.Image("v_zone2off.png")
v_zone2off.Region(VIEWER_REGION)
v_downsource = button()
v_downsource.Image("v_downsource.png")
v_downsource.Region(VIEWER_REGION)
v_downsource.similar(int = GlobalCustomButtonThreshold)
v_g = button()
v_g.Image("v_g.png")
v_g.Region(VIEWER_REGION)
v_zone1off = button()
v_zone1off.Image("v_zone1off.png")
v_zone1off.Region(VIEWER_REGION)
v_zone1 = button()
v_zone1.Image("v_zone1.png")
v_zone1.Region(VIEWER_REGION)
v_zone1.similar(int = GlobalCustomButtonThreshold)
v_back = button()
v_back.Image("v_back.png")
v_back.Region(VIEWER_REGION)
v_next = button()
v_next.Image("v_next.png")
v_next.Region(VIEWER_REGION)
v_zone2 = button()
v_zone2.Image("v_zone2.png")
v_zone2.Region(VIEWER_REGION)
v_zone3 = button()
v_zone3.Image("v_zone3.png")
v_zone3.Region(VIEWER_REGION)
v_zone4 = button()
v_zone4.Image("v_zone4.png")
v_zone4.Region(VIEWER_REGION)
v_shuffle = button()
v_shuffle.Image("v_shuffle.png")
v_shuffle.Region(VIEWER_REGION)
v_musicsymbol = button()
v_musicsymbol.Image("v_musicsymbol.png")
v_musicsymbol.Region(VIEWER_REGION)
v_musicsymbol.similar(int = .20)
v_mute = button()
v_mute.Image("v_mute.png")
v_mute.Region(VIEWER_REGION)
v_pandora = button()
v_pandora.Image("v_pandora.png")
v_pandora.Region(VIEWER_REGION)
discovergmvdevices = button()
discovergmvdevices.Image("discovergmvdevices.png")
discovergmvdevices.Region(CONFIGURATOR_REGION)
minusone = button()
minusone.Image("minusone.png")
minusone.Region(CONFIGURATOR_REGION)
v_mainzone = button()
v_mainzone.Image("v_mainzone.png")
v_mainzone.Region(VIEWER_REGION)
cancel = button()
cancel.Image("cancel.png")
cancel.Region(CONFIGURATOR_REGION)
cancel.similar(int = GlobalCustomButtonThreshold)
minustwo = button()
minustwo.Image("minustwo.png")
minustwo.Region(CONFIGURATOR_REGION)
minusthree = button()
minusthree.Image("minusthree.png")
minusthree.Region(CONFIGURATOR_REGION)
gmvdevice = button()
gmvdevice.Image("gmvdevice.png")
gmvdevice.Region(CONFIGURATOR_REGION)
ultra6x4 = button()
ultra6x4.Image("ultra6x4.png")
ultra6x4.Region(CONFIGURATOR_REGION)
n_scrollbar = button()
n_scrollbar.Image("n_scrollbar.png")
n_scrollbar.Region(NILES_REGION)
n_showfavorites = button()
n_showfavorites.Image("n_showfavorites.png")
n_showfavorites.Region(NILES_REGION)
g_configurator = button()
g_configurator.Image("g_configurator.png")
g_configurator.Region(GTOOLS_REGION)
g_configurator.similar(int = GlobalCustomButtonThreshold)
activeclientcon = button()
activeclientcon.Image("activeclientcon.png")
activeclientcon.Region(CONFIGURATOR_REGION)
minus = button()
minus.Image("minus.png")
minus.Region(CONFIGURATOR_REGION)
n_bluraydisc = button()
n_bluraydisc.Image("n_bluraydisc.png")
n_bluraydisc.Region(NILES_REGION)
n_bluraydisc.similar(int = GlobalCustomButtonThreshold)
n_configurerlesinterfacesutilisateur = button()
n_configurerlesinterfacesutilisateur.Image("n_configurerlesinterfacesutilisateur.png")
n_configurerlesinterfacesutilisateur.Region(NILES_REGION)
n_appliquer = button()
n_appliquer.Image("n_appliquer.png")
n_appliquer.Region(NILES_REGION)
n_sourceconfiguration = button()
n_sourceconfiguration.Image("n_sourceconfiguration.png")
n_sourceconfiguration.Region(NILES_REGION)
activeclientconnections = button()
activeclientconnections.Image("activeclientconnections.png")
activeclientconnections.Region(CONFIGURATOR_REGION)
connections = button()
connections.Image("connections.png")
connections.Region(CONFIGURATOR_REGION)
n_yes = button()
n_yes.Image("n_yes.png")
n_yes.Region(NILES_REGION)
n_allmedia = button()
n_allmedia.Image('n_allmedia.png')
n_allmedia.Region(NILES_REGION)
addnew = button()
addnew.Image("addnew.png")
addnew.Region(CONFIGURATOR_REGION)
blank = button()
blank.Image("blank.png")
blank.Region(CONFIGURATOR_REGION)
addnewcommunicationdevice = button()
addnewcommunicationdevice.Image("addnewcommunicationdevice.png")
addnewcommunicationdevice.Region(CONFIGURATOR_REGION)
addnewdevice = button()
addnewdevice.Image("addnewdevice.png")
addnewdevice.Region(CONFIGURATOR_REGION)
admin = button()
admin.Image("admin.png")
admin.Region(CONFIGURATOR_REGION)
apply = button()
apply.Image("apply.png")
apply.Region(CONFIGURATOR_REGION)
audiosharefolders = button()
audiosharefolders.Image("audiosharefolders.png")
audiosharefolders.Region(CONFIGURATOR_REGION)
audiolibraryscanoptions = button()
audiolibraryscanoptions.Image("audiolibraryscanoptions.png")
audiolibraryscanoptions.Region(CONFIGURATOR_REGION)
audiolibraryoptions = button()
audiolibraryoptions.Image("audiolibraryoptions.png")
audiolibraryoptions.Region(CONFIGURATOR_REGION)
backupconfigurationtofile = button()
backupconfigurationtofile.Image("backupconfigurationtofile.png")
backupconfigurationtofile.Region(CONFIGURATOR_REGION)
configureipinfo = button()
configureipinfo.Image("configureipinfo.png")
configureipinfo.Region(CONFIGURATOR_REGION)
closefive = button()
closefive.Image("closefive.png")
closefive.Region(CONFIGURATOR_REGION)
BackupCoreMo = button()
BackupCoreMo.Image("BackupCoreMo.png")
BackupCoreMo.Region(CONFIGURATOR_REGION)
backupcoremodueversionandconfigurationtofile = button()
backupcoremodueversionandconfigurationtofile.Image("backupcoremodueversionandconfigurationtofile.png")
backupcoremodueversionandconfigurationtofile.Region(CONFIGURATOR_REGION)
backupfile = button()
backupfile.Image("backupfile.png")
backupfile.Region(CONFIGURATOR_REGION)
restorebackupfile = button()
restorebackupfile.Image("restorebackupfile.png")
restorebackupfile.Region(CONFIGURATOR_REGION)
backupobjects = button()
backupobjects.Image("backupobjects.png")
backupobjects.Region(CONFIGURATOR_REGION)
booleans = button()
booleans.Image("booleans.png")
booleans.Region(CONFIGURATOR_REGION)
calendarsystemmodes = button()
calendarsystemmodes.Image("calendarsystemmodes.png")
calendarsystemmodes.Region(CONFIGURATOR_REGION)
channelgroups = button()
channelgroups.Image("channelgroups.png")
channelgroups.Region(CONFIGURATOR_REGION)
channels = button()
channels.Image("channels.png")
channels.Region(CONFIGURATOR_REGION)
clientconnections = button()
clientconnections.Image("clientconnections.png")
clientconnections.Region(CONFIGURATOR_REGION)
clienthomelogicsystem = button()
clienthomelogicsystem.Image("clienthomelogicsystem.png")
clienthomelogicsystem.Region(CONFIGURATOR_REGION)
climate = button()
climate.Image("climate.png")
climate.Region(CONFIGURATOR_REGION)
communciationdevices = button()
communciationdevices.Image("communciationdevices.png")
communciationdevices.Region(CONFIGURATOR_REGION)
Communicatio = button()
Communicatio.Image("Communicatio.png")
Communicatio.Region(CONFIGURATOR_REGION)
communicationdevices = button()
communicationdevices.Image("communicationdevices.png")
communicationdevices.Region(CONFIGURATOR_REGION)
configuration = button()
configuration.Image("configuration.png")
configuration.Region(CONFIGURATOR_REGION)
configurator = button()
configurator.Image("configurator.png")
configurator.Region(CONFIGURATOR_REGION)
configuripinfo = button()
configuripinfo.Image("configuripinfo.png")
configuripinfo.Region(CONFIGURATOR_REGION)
content = button()
content.Image("content.png")
content.Region(CONFIGURATOR_REGION)
controller = button()
controller.Image("controller.png")
controller.Region(CONFIGURATOR_REGION)
cpuusage = button()
cpuusage.Image("cpuusage.png")
cpuusage.Region(CONFIGURATOR_REGION)
customimages = button()
customimages.Image("customimages.png")
customimages.Region(CONFIGURATOR_REGION)
customizablescenes = button()
customizablescenes.Image("customizablescenes.png")
customizablescenes.Region(CONFIGURATOR_REGION)
custompages = button()
custompages.Image("custompages.png")
custompages.Region(CONFIGURATOR_REGION)
default = button()
default.Image("default.png")
default.Region(CONFIGURATOR_REGION)
DefaultRescJ = button()
DefaultRescJ.Image("DefaultRescJ.png")
DefaultRescJ.Region(CONFIGURATOR_REGION)
delete = button()
delete.Image("delete.png")
delete.Region(CONFIGURATOR_REGION)
devicename = button()
devicename.Image("devicename.png")
devicename.Region(CONFIGURATOR_REGION)
devices = button()
devices.Image("devices.png")
devices.Region(CONFIGURATOR_REGION)
discoverdevices = button()
discoverdevices.Image("discoverdevices.png")
discoverdevices.Region(CONFIGURATOR_REGION)
dispalysettings = button()
dispalysettings.Image("dispalysettings.png")
dispalysettings.Region(CONFIGURATOR_REGION)
displaysettings = button()
displaysettings.Image("displaysettings.png")
displaysettings.Region(CONFIGURATOR_REGION)
black = button()
black.Image("black.png")
black.Region(CONFIGURATOR_REGION)
blackhivis = button()
blackhivis.Image("blackhivis.png")
blackhivis.Region(CONFIGURATOR_REGION)
classic = button()
classic.Image("classic.png")
classic.Region(CONFIGURATOR_REGION)
documents = button()
documents.Image("documents.png")
documents.Region(CONFIGURATOR_REGION)
doorlocks = button()
doorlocks.Image("doorlocks.png")
doorlocks.Region(CONFIGURATOR_REGION)
emailaccounts = button()
emailaccounts.Image("emailaccounts.png")
emailaccounts.Region(CONFIGURATOR_REGION)
EmailMessage = button()
EmailMessage.Image("EmailMessage.png")
EmailMessage.Region(CONFIGURATOR_REGION)
emailmessagesoutbound = button()
emailmessagesoutbound.Image("emailmessagesoutbound.png")
emailmessagesoutbound.Region(CONFIGURATOR_REGION)
textspeechmessages = button()
textspeechmessages.Image("textspeechmessages.png")
textspeechmessages.Region(CONFIGURATOR_REGION)
ents = button()
ents.Image("ents.png")
ents.Region(CONFIGURATOR_REGION)
mainwindow = button()
mainwindow.Image("mainwindow.png")
mainwindow.Region(CONFIGURATOR_REGION)
two = button()
two.Image("two.png")
two.Region(CONFIGURATOR_REGION)
three = button()
three.Image("three.png")
three.Region(CONFIGURATOR_REGION)
mainwindow = button()
mainwindow.Image("mainwindow.png")
mainwindow.Region(CONFIGURATOR_REGION)
EventMapper = button()
EventMapper.Image("EventMapper.png")
EventMapper.Region(CONFIGURATOR_REGION)
eventmaps = button()
eventmaps.Image("eventmaps.png")
eventmaps.Region(CONFIGURATOR_REGION)
eventmapper = button()
eventmapper.Image("eventmapper.png")
eventmapper.Region(CONFIGURATOR_REGION)
exceptions = button()
exceptions.Image("exceptions.png")
exceptions.Region(CONFIGURATOR_REGION)
factoreconfiguration = button()
factoreconfiguration.Image("factoreconfiguration.png")
factoreconfiguration.Region(CONFIGURATOR_REGION)
family = button()
family.Image("family.png")
family.Region(CONFIGURATOR_REGION)
GenericSeria = button()
GenericSeria.Image("GenericSeria.png")
GenericSeria.Region(CONFIGURATOR_REGION)
genericserialdevices = button()
genericserialdevices.Image("genericserialdevices.png")
genericserialdevices.Region(CONFIGURATOR_REGION)
gernericserialdevices = button()
gernericserialdevices.Image("gernericserialdevices.png")
gernericserialdevices.Region(CONFIGURATOR_REGION)
globalirrigationperiods = button()
globalirrigationperiods.Image("globalirrigationperiods.png")
globalirrigationperiods.Region(CONFIGURATOR_REGION)
globaloptions = button()
globaloptions.Image("globaloptions.png")
globaloptions.Region(CONFIGURATOR_REGION)
graphobjects = button()
graphobjects.Image("graphobjects.png")
graphobjects.Region(CONFIGURATOR_REGION)
gTools = button()
gTools.Image("gTools.png")
gTools.Region(CONFIGURATOR_REGION)
heatingcoolingunits = button()
heatingcoolingunits.Image("heatingcoolingunits.png")
heatingcoolingunits.Region(CONFIGURATOR_REGION)
hhrzoneheaders = button()
hhrzoneheaders.Image("hhrzoneheaders.png")
hhrzoneheaders.Region(CONFIGURATOR_REGION)
homelogicsystems = button()
homelogicsystems.Image("homelogicsystems.png")
homelogicsystems.Region(CONFIGURATOR_REGION)
homepages = button()
homepages.Image("homepages.png")
homepages.Region(CONFIGURATOR_REGION)
hrrzoneheaders = button()
hrrzoneheaders.Image("hrrzoneheaders.png")
hrrzoneheaders.Region(CONFIGURATOR_REGION)
inputoutput = button()
inputoutput.Image("inputoutput.png")
inputoutput.Region(CONFIGURATOR_REGION)
interface = button()
interface.Image("interface.png")
interface.Region(CONFIGURATOR_REGION)
InterfaceDev = button()
InterfaceDev.Image("InterfaceDev.png")
InterfaceDev.Region(CONFIGURATOR_REGION)
interfacedevices = button()
interfacedevices.Image("interfacedevices.png")
interfacedevices.Region(CONFIGURATOR_REGION)
interfacedeviceshrr = button()
interfacedeviceshrr.Image("interfacedeviceshrr.png")
interfacedeviceshrr.Region(CONFIGURATOR_REGION)
hr2zoneheaders = button()
hr2zoneheaders.Image("hr2zoneheaders.png")
hr2zoneheaders.Region(CONFIGURATOR_REGION)
interfacedeviceskp7 = button()
interfacedeviceskp7.Image("interfacedeviceskp7.png")
interfacedeviceskp7.Region(CONFIGURATOR_REGION)
interfacedevicesosd = button()
interfacedevicesosd.Image("interfacedevicesosd.png")
interfacedevicesosd.Region(CONFIGURATOR_REGION)
interfacedevicestouchscreen = button()
interfacedevicestouchscreen.Image("interfacedevicestouchscreen.png")
interfacedevicestouchscreen.Region(CONFIGURATOR_REGION)
interfacegroups = button()
interfacegroups.Image("interfacegroups.png")
interfacegroups.Region(CONFIGURATOR_REGION)
zonecontrollers = button()
zonecontrollers.Image("zonecontrollers.png")
zonecontrollers.Region(CONFIGURATOR_REGION)
zonecontrollers.similar(int = GlobalCustomButtonThreshold)
tvchannelgroups = button()
tvchannelgroups.Image("tvchannelgroups.png")
tvchannelgroups.Region(CONFIGURATOR_REGION)
tvchannels = button()
tvchannels.Image("tvchannels.png")
tvchannels.Region(CONFIGURATOR_REGION)
interfacetemplates = button()
interfacetemplates.Image("interfacetemplates.png")
interfacetemplates.Region(CONFIGURATOR_REGION)
internetradiofavoritegenres = button()
internetradiofavoritegenres.Image("internetradiofavoritegenres.png")
internetradiofavoritegenres.Region(CONFIGURATOR_REGION)
ipaddress = button()
ipaddress.Image("ipaddress.png")
ipaddress.Region(CONFIGURATOR_REGION)
webpictures = button()
webpictures.Image("webpictures.png")
webpictures.Region(CONFIGURATOR_REGION)
local = button()
local.Image("local.png")
local.Region(CONFIGURATOR_REGION)
picturesharefolders = button()
picturesharefolders.Image("picturesharefolders.png")
picturesharefolders.Region(CONFIGURATOR_REGION)
picturelibraryscanoptions = button()
picturelibraryscanoptions.Image("picturelibraryscanoptions.png")
picturelibraryscanoptions.Region(CONFIGURATOR_REGION)
iroutputs = button()
iroutputs.Image("iroutputs.png")
iroutputs.Region(CONFIGURATOR_REGION)
irriagationcontrollers = button()
irriagationcontrollers.Image("irriagationcontrollers.png")
irriagationcontrollers.Region(CONFIGURATOR_REGION)
irreceivers = button()
irreceivers.Image("irreceivers.png")
irreceivers.Region(CONFIGURATOR_REGION)
irdevices = button()
irdevices.Image("irdevices.png")
irdevices.Region(CONFIGURATOR_REGION)
irrigation = button()
irrigation.Image("irrigation.png")
irrigation.Region(CONFIGURATOR_REGION)
irrigationgroups = button()
irrigationgroups.Image("irrigationgroups.png")
irrigationgroups.Region(CONFIGURATOR_REGION)
irrigationcontrollers = button()
irrigationcontrollers.Image("irrigationcontrollers.png")
irrigationcontrollers.Region(CONFIGURATOR_REGION)
irrigationzones = button()
irrigationzones.Image("irrigationzones.png")
irrigationzones.Region(CONFIGURATOR_REGION)
keypadtemplates = button()
keypadtemplates.Image("keypadtemplates.png")
keypadtemplates.Region(CONFIGURATOR_REGION)
kids = button()
kids.Image("kids.png")
kids.Region(CONFIGURATOR_REGION)
language = button()
language.Image("language.png")
language.Region(CONFIGURATOR_REGION)
libraryoptions = button()
libraryoptions.Image("libraryoptions.png")
libraryoptions.Region(CONFIGURATOR_REGION)
libraryscanoptions = button()
libraryscanoptions.Image("libraryscanoptions.png")
libraryscanoptions.Region(CONFIGURATOR_REGION)
lighting = button()
lighting.Image("lighting.png")
lighting.Region(CONFIGURATOR_REGION)
lightinginterfaces = button()
lightinginterfaces.Image("lightinginterfaces.png")
lightinginterfaces.Region(CONFIGURATOR_REGION)
lntemetRadio = button()
lntemetRadio.Image("lntemetRadio.png")
lntemetRadio.Region(CONFIGURATOR_REGION)
loadinterfaces = button()
loadinterfaces.Image("loadinterfaces.png")
loadinterfaces.Region(CONFIGURATOR_REGION)
locationandweather = button()
locationandweather.Image("locationandweather.png")
locationandweather.Region(CONFIGURATOR_REGION)
locationdevices = button()
locationdevices.Image("locationdevices.png")
locationdevices.Region(CONFIGURATOR_REGION)
lockgroups = button()
lockgroups.Image("lockgroups.png")
lockgroups.Region(CONFIGURATOR_REGION)
logitechmediaserver = button()
logitechmediaserver.Image("logitechmediaserver.png")
logitechmediaserver.Region(CONFIGURATOR_REGION)
lookuplatlonfromlocation = button()
lookuplatlonfromlocation.Image("lookuplatlonfromlocation.png")
lookuplatlonfromlocation.Region(CONFIGURATOR_REGION)
maintabs = button()
maintabs.Image("maintabs.png")
maintabs.Region(CONFIGURATOR_REGION)
media = button()
media.Image("media.png")
media.Region(CONFIGURATOR_REGION)
media.similar(GlobalCustomButtonThreshold)
g_cancel = button()
g_cancel.Image("g_cancel.png")
g_cancel.Region(GTOOLS_REGION)
g_viewer = button()
g_viewer.Image("g_viewer.png")
g_viewer.Region(GTOOLS_REGION)
g_viewer.similar(GlobalCustomButtonThreshold)
messaging = button()
messaging.Image("messaging.png")
messaging.Region(CONFIGURATOR_REGION)
min = button()
min.Image("min.png")
min.Region(CONFIGURATOR_REGION)
minus = button()
minus.Image("minus.png")
minus.Region(CONFIGURATOR_REGION)
minus.similar(int = .80)
moduleconfiguration = button()
moduleconfiguration.Image("moduleconfiguration.png")
moduleconfiguration.Region(CONFIGURATOR_REGION)
movies = button()
movies.Image("movies.png")
movies.Region(CONFIGURATOR_REGION)
music = button()
music.Image("music.png")
music.Region(CONFIGURATOR_REGION)
mysystems = button()
mysystems.Image("mysystems.png")
mysystems.Region(CONFIGURATOR_REGION)
name = button()
name.Image("name.png")
name.Region(CONFIGURATOR_REGION)
news = button()
news.Image("news.png")
news.Region(CONFIGURATOR_REGION)
numerictriggers = button()
numerictriggers.Image("numerictriggers.png")
numerictriggers.Region(CONFIGURATOR_REGION)
objects = button()
objects.Image("objects.png")
objects.Region(CONFIGURATOR_REGION)
ondevices = button()
ondevices.Image("ondevices.png")
ondevices.Region(CONFIGURATOR_REGION)
one = button()
one.Image("one.png")
one.Region(CONFIGURATOR_REGION)
n_sourcetwo = button()
n_sourcetwo.Image("n_sourcetwo.png")
n_sourcetwo.Region(NILES_REGION)
n_zone1 = button()
n_zone1.Image("n_zone1.png")
n_zone1.Region(NILES_REGION)
n_off = button()
n_off.Image("n_off.png")
n_off.Region(NILES_REGION)
n_mainzoneoff = button()
n_mainzoneoff.Image("n_mainzoneoff.png")
n_mainzoneoff.Region(NILES_REGION)
n_zone1off = button()
n_zone1off.Image("n_zone1off.png")
n_zone1off.Region(NILES_REGION)
n_notinstalled = button()
n_notinstalled.Image("n_notinstalled.png")
n_notinstalled.Region(NILES_REGION)
n_notinstalled.similar(GlobalCustomButtonThreshold)
n_none = button()
n_none.Image("n_none.png")
n_none.Region(NILES_REGION)
n_none.similar(GlobalCustomButtonThreshold)
n_configurekeypads = button()
n_configurekeypads.Image("n_configurekeypads.png")
n_configurekeypads.Region(NILES_REGION)
n_remove = button()
n_remove.Image("n_remove.png")
n_remove.Region(NILES_REGION)
n_remove.similar(GlobalCustomButtonThreshold)
n_poweron = button()
n_poweron.Image("n_poweron.png")
n_poweron.Region(NILES_REGION)
n_poweroff = button()
n_poweroff.Image("n_poweroff.png")
n_poweroff.Region(NILES_REGION)
n_home = button()
n_home.Image("n_home.png")
n_home.Region(NILES_REGION)
n_mainzone = button()
n_mainzone.Image("n_mainzone.png")
n_mainzone.Region(NILES_REGION)
n_mainzone.similar(int = GlobalCustomButtonThreshold)
n_scrollright = button()
n_scrollright.Image("n_scrollright.png")
n_scrollright.Region(NILES_REGION)
n_scrollright.similar(int = GlobalCustomButtonThreshold)
outcontroller = button()
outcontroller.Image("outcontroller.png")
outcontroller.Region(CONFIGURATOR_REGION)
elangmv64ethernet = button()
elangmv64ethernet.Image("elangmv64ethernet.png")
elangmv64ethernet.Region(CONFIGURATOR_REGION)
v_forward = button()
v_forward.Image("v_forward.png")
v_forward.Region(VIEWER_REGION)
outputs = button()
outputs.Image("outputs.png")
outputs.Region(CONFIGURATOR_REGION)
pictures = button()
pictures.Image("pictures.png")
pictures.Region(CONFIGURATOR_REGION)
picuturesharefolders = button()
picuturesharefolders.Image("picuturesharefolders.png")
picuturesharefolders.Region(CONFIGURATOR_REGION)
plus = button()
plus.Image("plus.png")
plus.Region(CONFIGURATOR_REGION)
poolcontrol = button()
poolcontrol.Image("poolcontrol.png")
poolcontrol.Region(CONFIGURATOR_REGION)
virtualsecuritycontroller = button()
virtualsecuritycontroller.Image("virtualsecuritycontroller.png")
virtualsecuritycontroller.Region(CONFIGURATOR_REGION)
virtualdoorlock = button()
virtualdoorlock.Image("virtualdoorlock.png")
virtualdoorlock.Region(CONFIGURATOR_REGION)
poolcontrollers = button()
poolcontrollers.Image("poolcontrollers.png")
poolcontrollers.Region(CONFIGURATOR_REGION)
yamahayncaethernet = button()
yamahayncaethernet.Image("yamahayncaethernet.png")
yamahayncaethernet.Region(CONFIGURATOR_REGION)
yamahayncaethernet.similar(int = GlobalCustomButtonThreshold)
ipzero = button()
ipzero.Image("ipzero.png")
ipzero.Region(CONFIGURATOR_REGION)
ipzero.similar(int = GlobalCustomButtonThreshold)
v_settings = button()
v_settings.Image("v_settings.png")
v_settings.Region(VIEWER_REGION)
v_three = button()
v_three.Image("v_three.png")
v_three.Region(VIEWER_REGION)
v_six = button()
v_six.Image("v_six.png")
v_six.Region(VIEWER_REGION)
v_six.similar(int = .50)
v_five = button()
v_five.Image("v_five.png")
v_five.Region(VIEWER_REGION)
v_one = button()
v_one.Image("v_one.png")
v_one.Region(VIEWER_REGION)
v_four = button()
v_four.Image("v_four.png")
v_four.Region(VIEWER_REGION)
v_two = button()
v_two.Image("v_two.png")
v_two.Region(VIEWER_REGION)
v_media = button()
v_media.Image("v_media.png")
v_media.Region(VIEWER_REGION)
morning = button()
morning.Image("morning.png")
morning.Region(CONFIGURATOR_REGION)
reboottargetsystemhardware = button()
reboottargetsystemhardware.Image("reboottargetsystemhardware.png")
reboottargetsystemhardware.Region(CONFIGURATOR_REGION)
hardwaretype = button()
hardwaretype.Image("hardwaretype.png")
hardwaretype.Region(CONFIGURATOR_REGION)
hardwaretype.similar(GlobalCustomButtonThreshold)
receivers = button()
receivers.Image("receivers.png")
receivers.Region(CONFIGURATOR_REGION)
RecordResolu = button()
RecordResolu.Image("RecordResolu.png")
RecordResolu.Region(CONFIGURATOR_REGION)
RegOne = button()
RegOne.Image("RegOne.png")
RegOne.Region(CONFIGURATOR_REGION)
RegThree = button()
RegThree.Image("RegThree.png")
RegThree.Region(CONFIGURATOR_REGION)
RegTwo = button()
RegTwo.Image("RegTwo.png")
RegTwo.Region(CONFIGURATOR_REGION)
relayoutputs = button()
relayoutputs.Image("relayoutputs.png")
relayoutputs.Region(CONFIGURATOR_REGION)
remoteusers = button()
remoteusers.Image("remoteusers.png")
remoteusers.Region(CONFIGURATOR_REGION)
repeatingsystemtimers = button()
repeatingsystemtimers.Image("repeatingsystemtimers.png")
repeatingsystemtimers.Region(CONFIGURATOR_REGION)
repeatingsystemtimes = button()
repeatingsystemtimes.Image("repeatingsystemtimes.png")
repeatingsystemtimes.Region(CONFIGURATOR_REGION)
resettofactoryconfiguration = button()
resettofactoryconfiguration.Image("resettofactoryconfiguration.png")
resettofactoryconfiguration.Region(CONFIGURATOR_REGION)
restarttargersystemsoftwareonly = button()
restarttargersystemsoftwareonly.Image("restarttargersystemsoftwareonly.png")
restarttargersystemsoftwareonly.Region(CONFIGURATOR_REGION)
RunOnceSyste = button()
RunOnceSyste.Image("RunOnceSyste.png")
RunOnceSyste.Region(CONFIGURATOR_REGION)
runoncesystemtimers = button()
runoncesystemtimers.Image("runoncesystemtimers.png")
runoncesystemtimers.Region(CONFIGURATOR_REGION)
reapeatingsystemtimers = button()
reapeatingsystemtimers.Image("reapeatingsystemtimers.png")
reapeatingsystemtimers.Region(CONFIGURATOR_REGION)
schedule = button()
schedule.Image("schedule.png")
schedule.Region(CONFIGURATOR_REGION)
security = button()
security.Image("security.png")
security.Region(CONFIGURATOR_REGION)
securitypanels = button()
securitypanels.Image("securitypanels.png")
securitypanels.Region(CONFIGURATOR_REGION)
select = button()
select.Image("select.png")
select.Region(CONFIGURATOR_REGION)
senders = button()
senders.Image("senders.png")
senders.Region(CONFIGURATOR_REGION)
senseinputs = button()
senseinputs.Image("senseinputs.png")
senseinputs.Region(CONFIGURATOR_REGION)
hc6controller = button()
hc6controller.Image("hc6controller.png")
hc6controller.Region(CONFIGURATOR_REGION)
serialdevices = button()
serialdevices.Image("serialdevices.png")
serialdevices.Region(CONFIGURATOR_REGION)
sharing = button()
sharing.Image("sharing.png")
sharing.Region(CONFIGURATOR_REGION)
sources = button()
sources.Image("sources.png")
sources.Region(CONFIGURATOR_REGION)
connecttoexistingdevice = button()
connecttoexistingdevice.Image("connecttoexistingdevice.png")
connecttoexistingdevice.Region(CONFIGURATOR_REGION)
connecttoexistingdevice.similar(int = .30)
none = button()
none.Image("none.png")
none.Region(CONFIGURATOR_REGION)
sources3 = button()
sources3.Image("sources3.png")
sources3.Region(CONFIGURATOR_REGION)
sports = button()
sports.Image("sports.png")
sports.Region(CONFIGURATOR_REGION)
g_ok = button()
g_ok.Image("g_ok.png")
g_ok.Region(GTOOLS_REGION)
system = button()
system.Image("system.png")
system.Region(CONFIGURATOR_REGION)
system.similar(int = GlobalCustomButtonThreshold)
systemimages = button()
systemimages.Image("systemimages.png")
systemimages.Region(CONFIGURATOR_REGION)
systemlogs = button()
systemlogs.Image("systemlogs.png")
systemlogs.Region(CONFIGURATOR_REGION)
systemmodes = button()
systemmodes.Image("systemmodes.png")
systemmodes.Region(CONFIGURATOR_REGION)
systemsounds = button()
systemsounds.Image("systemsounds.png")
systemsounds.Region(CONFIGURATOR_REGION)
onscreendisplay = button()
onscreendisplay.Image("onscreendisplay.png")
onscreendisplay.Region(CONFIGURATOR_REGION)
systemtimers = button()
systemtimers.Image("systemtimers.png")
systemtimers.Region(CONFIGURATOR_REGION)
systemtime = button()
systemtime.Image("systemtime.png")
systemtime.Region(CONFIGURATOR_REGION)
timedevents = button()
timedevents.Image("timedevents.png")
timedevents.Region(CONFIGURATOR_REGION)
ftpfoldertriggers = button()
ftpfoldertriggers.Image("ftpfoldertriggers.png")
ftpfoldertriggers.Region(CONFIGURATOR_REGION)
vianet = button()
vianet.Image("vianet.png")
vianet.Region(CONFIGURATOR_REGION)
tabs = button()
tabs.Image("tabs.png")
tabs.Region(CONFIGURATOR_REGION)
telephonesystems = button()
telephonesystems.Image("telephonesystems.png")
telephonesystems.Region(CONFIGURATOR_REGION)
thermostats = button()
thermostats.Image("thermostats.png")
thermostats.Region(CONFIGURATOR_REGION)
three = button()
three.Image("three.png")
three.Region(CONFIGURATOR_REGION)
timesevents = button()
timesevents.Image("timesevents.png")
timesevents.Region(CONFIGURATOR_REGION)
tpdevices = button()
tpdevices.Image("tpdevices.png")
tpdevices.Region(CONFIGURATOR_REGION)
triggers = button()
triggers.Image("triggers.png")
triggers.Region(CONFIGURATOR_REGION)
two = button()
two.Image("two.png")
two.Region(CONFIGURATOR_REGION)
ups = button()
ups.Image("ups.png")
ups.Region(CONFIGURATOR_REGION)
UPSPowerSupp = button()
UPSPowerSupp.Image("UPSPowerSupp.png")
UPSPowerSupp.Region(CONFIGURATOR_REGION)
upspowersupplies = button()
upspowersupplies.Image("upspowersupplies.png")
upspowersupplies.Region(CONFIGURATOR_REGION)
variables = button()
variables.Image("variables.png")
variables.Region(CONFIGURATOR_REGION)
version = button()
version.Image("version.png")
version.Region(CONFIGURATOR_REGION)
version.similar(GlobalCustomButtonThreshold)
video = button()
video.Image("video.png")
video.Region(CONFIGURATOR_REGION)
videocamerasources = button()
videocamerasources.Image("videocamerasources.png")
videocamerasources.Region(CONFIGURATOR_REGION)
audiobreakout = button()
audiobreakout.Image("audiobreakout.png")
audiobreakout.Region(CONFIGURATOR_REGION)
source2 = button()
source2.Image("source2.png")
source2.Region(CONFIGURATOR_REGION)
source3 = button()
source3.Image("source3.png")
source3.Region(CONFIGURATOR_REGION)
source4 = button()
source4.Image("source4.png")
source4.Region(CONFIGURATOR_REGION)
videodisplays = button()
videodisplays.Image("videodisplays.png")
videodisplays.Region(CONFIGURATOR_REGION)
displayname = button()
displayname.Image("displayname.png")
displayname.Region(CONFIGURATOR_REGION)
viewer = button()
viewer.Image("viewer.png")
viewer.Region(CONFIGURATOR_REGION)
voicemailboxes = button()
voicemailboxes.Image("voicemailboxes.png")
voicemailboxes.Region(CONFIGURATOR_REGION)
house = button()
house.Image("house.png")
house.Region(CONFIGURATOR_REGION)
WeatherCalen = button()
WeatherCalen.Image("WeatherCalen.png")
WeatherCalen.Region(CONFIGURATOR_REGION)
weathercalendar = button()
weathercalendar.Image("weathercalendar.png")
weathercalendar.Region(CONFIGURATOR_REGION)
weathercalendarsystemmo = button()
weathercalendarsystemmo.Image("weathercalendarsystemmo.png")
weathercalendarsystemmo.Region(CONFIGURATOR_REGION)
weathersystemmodes = button()
weathersystemmodes.Image("weathersystemmodes.png")
weathersystemmodes.Region(CONFIGURATOR_REGION)
windows = button()
windows.Image("windows.png")
windows.Region(CONFIGURATOR_REGION)
works = button()
works.Image("works.png")
works.Region(CONFIGURATOR_REGION)
x = button()
x.Image("x.png")
x.Region(CONFIGURATOR_REGION)
############################################################
addnewaudiosharefolder = button()
addnewaudiosharefolder.Image("addnewaudiosharefolder.png")
addnewaudiosharefolder.Region(CONFIGURATOR_REGION)
addnewbackupprocedure = button()
addnewbackupprocedure.Image("addnewbackupprocedure.png")
addnewbackupprocedure.Region(CONFIGURATOR_REGION)
addnewcommunicationdevice = button()
addnewcommunicationdevice.Image("addnewcommunicationdevice.png")
addnewcommunicationdevice.Region(CONFIGURATOR_REGION)
addnewcustompage = button()
addnewcustompage.Image("addnewcustompage.png")
addnewcustompage.Region(CONFIGURATOR_REGION)
addnewdevice = button()
addnewdevice.Image("addnewdevice.png")
addnewdevice.Region(CONFIGURATOR_REGION)
addnewdisplaysettings = button()
addnewdisplaysettings.Image("addnewdisplaysettings.png")
addnewdisplaysettings.Region(CONFIGURATOR_REGION)
addnewdvdplayer = button()
addnewdvdplayer.Image("addnewdvdplayer.png")
addnewdvdplayer.Region(CONFIGURATOR_REGION)
addnewemailaccount = button()
addnewemailaccount.Image("addnewemailaccount.png")
addnewemailaccount.Region(CONFIGURATOR_REGION)
addnewemailmessage = button()
addnewemailmessage.Image("addnewemailmessage.png")
addnewemailmessage.Region(CONFIGURATOR_REGION)
addneweventmap = button()
addneweventmap.Image("addneweventmap.png")
addneweventmap.Region(CONFIGURATOR_REGION)
addneweventmapboolean = button()
addneweventmapboolean.Image("addneweventmapboolean.png")
addneweventmapboolean.Region(CONFIGURATOR_REGION)
addneweventmapvariable = button()
addneweventmapvariable.Image("addneweventmapvariable.png")
addneweventmapvariable.Region(CONFIGURATOR_REGION)
addnewftpfoldertrigger = button()
addnewftpfoldertrigger.Image("addnewftpfoldertrigger.png")
addnewftpfoldertrigger.Region(CONFIGURATOR_REGION)
addnewgenericserialdevice = button()
addnewgenericserialdevice.Image("addnewgenericserialdevice.png")
addnewgenericserialdevice.Region(CONFIGURATOR_REGION)
addnewgraphobject = button()
addnewgraphobject.Image("addnewgraphobject.png")
addnewgraphobject.Region(CONFIGURATOR_REGION)
addnewgroup = button()
addnewgroup.Image("addnewgroup.png")
addnewgroup.Region(CONFIGURATOR_REGION)
addnewinputcontroller = button()
addnewinputcontroller.Image("addnewinputcontroller.png")
addnewinputcontroller.Region(CONFIGURATOR_REGION)
addnewinternetradiogenre = button()
addnewinternetradiogenre.Image("addnewinternetradiogenre.png")
addnewinternetradiogenre.Region(CONFIGURATOR_REGION)
addnewirdevice = button()
addnewirdevice.Image("addnewirdevice.png")
addnewirdevice.Region(CONFIGURATOR_REGION)
addnewirreceiver = button()
addnewirreceiver.Image("addnewirreceiver.png")
addnewirreceiver.Region(CONFIGURATOR_REGION)
addnewirrigationcontroller = button()
addnewirrigationcontroller.Image("addnewirrigationcontroller.png")
addnewirrigationcontroller.Region(CONFIGURATOR_REGION)
addnewirrigationgroup = button()
addnewirrigationgroup.Image("addnewirrigationgroup.png")
addnewirrigationgroup.Region(CONFIGURATOR_REGION)
addnewirrigationperiod = button()
addnewirrigationperiod.Image("addnewirrigationperiod.png")
addnewirrigationperiod.Region(CONFIGURATOR_REGION)
addnewirsender = button()
addnewirsender.Image("addnewirsender.png")
addnewirsender.Region(CONFIGURATOR_REGION)
addnewkeypad = button()
addnewkeypad.Image("addnewkeypad.png")
addnewkeypad.Region(CONFIGURATOR_REGION)
addnewkeypadcontroller = button()
addnewkeypadcontroller.Image("addnewkeypadcontroller.png")
addnewkeypadcontroller.Region(CONFIGURATOR_REGION)
addnewkeypadtemplate = button()
addnewkeypadtemplate.Image("addnewkeypadtemplate.png")
addnewkeypadtemplate.Region(CONFIGURATOR_REGION)
addnewlightinginterface = button()
addnewlightinginterface.Image("addnewlightinginterface.png")
addnewlightinginterface.Region(CONFIGURATOR_REGION)
addnewlockgroup = button()
addnewlockgroup.Image("addnewlockgroup.png")
addnewlockgroup.Region(CONFIGURATOR_REGION)
addnewmediaplayer = button()
addnewmediaplayer.Image("addnewmediaplayer.png")
addnewmediaplayer.Region(CONFIGURATOR_REGION)
addnewmp3player = button()
addnewmp3player.Image("addnewmp3player.png")
addnewmp3player.Region(CONFIGURATOR_REGION)
addnewnumerictrigger = button()
addnewnumerictrigger.Image("addnewnumerictrigger.png")
addnewnumerictrigger.Region(CONFIGURATOR_REGION)
addnewoutputcontroller = button()
addnewoutputcontroller.Image("addnewoutputcontroller.png")
addnewoutputcontroller.Region(CONFIGURATOR_REGION)
addnewpicuturesharefolder = button()
addnewpicuturesharefolder.Image("addnewpicuturesharefolder.png")
addnewpicuturesharefolder.Region(CONFIGURATOR_REGION)
addnewpoolcontroller = button()
addnewpoolcontroller.Image("addnewpoolcontroller.png")
addnewpoolcontroller.Region(CONFIGURATOR_REGION)
addnewpowersupply = button()
addnewpowersupply.Image("addnewpowersupply.png")
addnewpowersupply.Region(CONFIGURATOR_REGION)
addnewrepeatingsystemtimer = button()
addnewrepeatingsystemtimer.Image("addnewrepeatingsystemtimer.png")
addnewrepeatingsystemtimer.Region(CONFIGURATOR_REGION)
addnewrunoncesystemtimer = button()
addnewrunoncesystemtimer.Image("addnewrunoncesystemtimer.png")
addnewrunoncesystemtimer.Region(CONFIGURATOR_REGION)
addnewscene = button()
addnewscene.Image("addnewscene.png")
addnewscene.Region(CONFIGURATOR_REGION)
addnewsource = button()
addnewsource.Image("addnewsource.png")
addnewsource.Region(CONFIGURATOR_REGION)
addnewtextspeechmessage = button()
addnewtextspeechmessage.Image("addnewtextspeechmessage.png")
addnewtextspeechmessage.Region(CONFIGURATOR_REGION)
addnewthermostat = button()
addnewthermostat.Image("addnewthermostat.png")
addnewthermostat.Region(CONFIGURATOR_REGION)
addnewtimedevent = button()
addnewtimedevent.Image("addnewtimedevent.png")
addnewtimedevent.Region(CONFIGURATOR_REGION)
addnewtouchscreen = button()
addnewtouchscreen.Image("addnewtouchscreen.png")
addnewtouchscreen.Region(CONFIGURATOR_REGION)
addnewtouchscreenhomepage = button()
addnewtouchscreenhomepage.Image("addnewtouchscreenhomepage.png")
addnewtouchscreenhomepage.Region(CONFIGURATOR_REGION)
addnewtuner = button()
addnewtuner.Image("addnewtuner.png")
addnewtuner.Region(CONFIGURATOR_REGION)
addnewtvchannel = button()
addnewtvchannel.Image("addnewtvchannel.png")
addnewtvchannel.Region(CONFIGURATOR_REGION)
addnewtvchannelgroup = button()
addnewtvchannelgroup.Image("addnewtvchannelgroup.png")
addnewtvchannelgroup.Region(CONFIGURATOR_REGION)
addnewvideodisplay = button()
addnewvideodisplay.Image("addnewvideodisplay.png")
addnewvideodisplay.Region(CONFIGURATOR_REGION)
addnewvideosourcecamera = button()
addnewvideosourcecamera.Image("addnewvideosourcecamera.png")
addnewvideosourcecamera.Region(CONFIGURATOR_REGION)
addnewvoicemailbox = button()
addnewvoicemailbox.Image("addnewvoicemailbox.png")
addnewvoicemailbox.Region(CONFIGURATOR_REGION)
addnewwebpicture = button()
addnewwebpicture.Image("addnewwebpicture.png")
addnewwebpicture.Region(CONFIGURATOR_REGION)
n_tvpower = button()
n_tvpower.Image("n_tvpower.png")
n_tvpower.Region(NILES_REGION)
n_turnon = button()
n_turnon.Image("n_turnon.png")
n_turnon.Region(NILES_REGION)
n_turnon.similar(int = GlobalCustomButtonThreshold)
n_settopbox = button()
n_settopbox.Image("n_settopbox.png")
n_settopbox.Region(NILES_REGION)
n_xfinity = button()
n_xfinity.Image("n_xfinity.png")
n_xfinity.Region(NILES_REGION)
n_scrollbar2 = button()
n_scrollbar2.Image("n_scrollbar2.png")
n_scrollbar2.Region(NILES_REGION)
n_mx011anmcablebox = button()
n_mx011anmcablebox.Image("n_mx011anmcablebox.png")
n_mx011anmcablebox.Region(NILES_REGION)
n_tvsource = button()
n_tvsource.Image("n_tvsource.png")
n_tvsource.Region(NILES_REGION)
addnewzonecontroller = button()
addnewzonecontroller.Image("addnewzonecontroller.png")
addnewzonecontroller.Region(CONFIGURATOR_REGION)
addnewzoneheaderinterface = button()
addnewzoneheaderinterface.Image("addnewzoneheaderinterface.png")
addnewzoneheaderinterface.Region(CONFIGURATOR_REGION)
browseimagefolder = button()
browseimagefolder.Image("browseimagefolder.png")
browseimagefolder.Region(CONFIGURATOR_REGION)
browseshare = button()
browseshare.Image("browseshare.png")
browseshare.Region(CONFIGURATOR_REGION)
browsesoundsfolder = button()
browsesoundsfolder.Image("browsesoundsfolder.png")
browsesoundsfolder.Region(CONFIGURATOR_REGION)
configurecircuits = button()
configurecircuits.Image("configurecircuits.png")
configurecircuits.Region(CONFIGURATOR_REGION)
copycolors = button()
copycolors.Image("copycolors.png")
copycolors.Region(CONFIGURATOR_REGION)
copysettings = button()
copysettings.Image("copysettings.png")
copysettings.Region(CONFIGURATOR_REGION)
createeventmapfor = button()
createeventmapfor.Image("createeventmapfor.png")
createeventmapfor.Region(CONFIGURATOR_REGION)
deletehcseriesosd = button()
deletehcseriesosd.Image("deletehcseriesosd.png")
deletehcseriesosd.Region(CONFIGURATOR_REGION)
deletelocal = button()
deletelocal.Image("deletelocal.png")
deletelocal.Region(CONFIGURATOR_REGION)
deletemorning = button()
deletemorning.Image("deletemorning.png")
deletemorning.Region(CONFIGURATOR_REGION)
deletenewsettings = button()
deletenewsettings.Image("deletenewsettings.png")
deletenewsettings.Region(CONFIGURATOR_REGION)
exportchannelsandgroups = button()
exportchannelsandgroups.Image("exportchannelsandgroups.png")
exportchannelsandgroups.Region(CONFIGURATOR_REGION)
exporttofile = button()
exporttofile.Image("exporttofile.png")
exporttofile.Region(CONFIGURATOR_REGION)
importchannelsandgroups = button()
importchannelsandgroups.Image("importchannelsandgroups.png")
importchannelsandgroups.Region(CONFIGURATOR_REGION)
importnewirdevicefromfile = button()
importnewirdevicefromfile.Image("importnewirdevicefromfile.png")
importnewirdevicefromfile.Region(CONFIGURATOR_REGION)
importnewirinterfacefromfile = button()
importnewirinterfacefromfile.Image("importnewirinterfacefromfile.png")
importnewirinterfacefromfile.Region(CONFIGURATOR_REGION)
importtofile = button()
importtofile.Image("importtofile.png")
importtofile.Region(CONFIGURATOR_REGION)
moveup = button()
moveup.Image("moveup.png")
moveup.Region(CONFIGURATOR_REGION)
scansharenow = button()
scansharenow.Image("scansharenow.png")
scansharenow.Region(CONFIGURATOR_REGION)
showcommunicationstatus = button()
showcommunicationstatus.Image("showcommunicationstatus.png")
showcommunicationstatus.Region(CONFIGURATOR_REGION)
showeventmaptraceinfo = button()
showeventmaptraceinfo.Image("showeventmaptraceinfo.png")
showeventmaptraceinfo.Region(CONFIGURATOR_REGION)
#####################NILES##########################################
n_add = button()
n_add.Image("n_add.png")
n_add.Region(NILES_REGION)
n_allmodels = button()
n_allmodels.Image("n_allmodels.png")
n_allmodels.Region(NILES_REGION)
n_apple = button()
n_apple.Image("n_apple.png")
n_apple.Region(NILES_REGION)
n_appletv = button()
n_appletv.Image("n_appletv.png")
n_appletv.Region(NILES_REGION)
n_av1 = button()
n_av1.Image("n_av1.png")
n_av1.Region(NILES_REGION)
n_av2 = button()
n_av2.Image("n_av2.png")
n_av2.Region(NILES_REGION)
n_bluray = button()
n_bluray.Image("n_bluray.png")
n_bluray.Region(NILES_REGION)
n_bosa58e001 = button()
n_bosa58e001.Image("n_bosa58e001.png")
n_bosa58e001.Region(NILES_REGION)
n_configurechassis = button()
n_configurechassis.Image("n_configurechassis.png")
n_configurechassis.Region(NILES_REGION)
n_configurehometheatertv = button()
n_configurehometheatertv.Image("n_configurehometheatertv.png")
n_configurehometheatertv.Region(NILES_REGION)
n_configuresources = button()
n_configuresources.Image("n_configuresources.png")
n_configuresources.Region(NILES_REGION)
n_finish = button()
n_finish.Image("n_finish.png")
n_finish.Region(NILES_REGION)
n_five = button()
n_five.Image("n_five.png")
n_five.Region(NILES_REGION)
n_forward = button()
n_forward.Image("n_forward.png")
n_forward.Region(NILES_REGION)
n_forward.similar(GlobalCustomButtonThreshold)
n_gointo = button()
n_gointo.Image("n_gointo.png")
n_gointo.Region(NILES_REGION)
n_gointo.similar(int = .30)
n_ipcontrolledsource = button()
n_ipcontrolledsource.Image("n_ipcontrolledsource.png")
n_ipcontrolledsource.Region(NILES_REGION)
n_ircontrolledsource = button()
n_ircontrolledsource.Image("n_ircontrolledsource.png")
n_ircontrolledsource.Region(NILES_REGION)
n_iroutput01 = button()
n_iroutput01.Image("n_iroutput01.png")
n_iroutput01.Region(NILES_REGION)
n_iroutput02 = button()
n_iroutput02.Image("n_iroutput02.png")
n_iroutput02.Region(NILES_REGION)
n_iroutput03 = button()
n_iroutput03.Image("n_iroutput03.png")
n_iroutput03.Region(NILES_REGION)
n_logitechmediaserver = button()
n_logitechmediaserver.Image("n_logitechmediaserver.png")
n_logitechmediaserver.Region(NILES_REGION)
n_mediamanager = button()
n_mediamanager.Image("n_mediamanager.png")
n_mediamanager.Region(NILES_REGION)
n_mediaserver = button()
n_mediaserver.Image("n_mediaserver.png")
n_mediaserver.Region(NILES_REGION)
n_next = button()
n_next.Image("n_next.png")
n_next.Region(NILES_REGION)
n_next.similar(int = .50)
n_nileslogo = button()
n_nileslogo.Image("n_nileslogo.png")
n_nileslogo.Region(NILES_REGION)
n_ok = button()
n_ok.Image("n_ok.png")
n_ok.Region(NILES_REGION)
n_one = button()
n_one.Image("n_one.png")
n_one.Region(NILES_REGION)
ok = button()
ok.Image("ok.png")
ok.Region(CONFIGURATOR_REGION)
scrollwhitespace = button()
scrollwhitespace.Image("scrollwhitespace.png")
scrollwhitespace.Region(CONFIGURATOR_REGION)
n_removeall = button()
n_removeall.Image("n_removeall.png")
n_removeall.Region(NILES_REGION)
n_abc = button()
n_abc.Image("n_abc.png")
n_abc.Region(NILES_REGION)
n_abcfamily = button()
n_abcfamily.Image("n_abcfamily.png")
n_abcfamily.Region(NILES_REGION)
n_aetv = button()
n_aetv.Image("n_aetv.png")
n_aetv.Region(NILES_REGION)
n_cancel = button()
n_cancel.Image("n_cancel.png")
n_cancel.Region(NILES_REGION)
n_test = button()
n_test.Image("n_test.png")
n_test.Region(NILES_REGION)
n_cursordown = button()
n_cursordown.Image("n_cursordown.png")
n_cursordown.Region(NILES_REGION)
n_cursorenter = button()
n_cursorenter.Image("n_cursorenter.png")
n_cursorenter.Region(NILES_REGION)
gmvdevice
gmvdevice = button()
gmvdevice.Image("gmvdevice.png")
gmvdevice.Region(CONFIGURATOR_REGION)
n_cursorleft = button()
n_cursorleft.Image("n_cursorleft.png")
n_cursorleft.Region(NILES_REGION)
n_cursorright = button()
n_cursorright.Image("n_cursorright.png")
n_cursorright.Region(NILES_REGION)
n_cursorup = button()
n_cursorup.Image("n_cursorup.png")
n_cursorup.Region(NILES_REGION)
n_irrepeat = button()
n_irrepeat.Image("n_irrepeat.png")
n_irrepeat.Region(NILES_REGION)
n_iroutput04 = button()
n_iroutput04.Image("n_iroutput04.png")
n_iroutput04.Region(NILES_REGION)
n_sonyallmodels = button()
n_sonyallmodels.Image("n_sonyallmodels.png")
n_sonyallmodels.Region(NILES_REGION)
n_sonyallmodels.similar(int = .60)
n_sonyallmodels2 = button()
n_sonyallmodels2.Image("n_sonyallmodels2.png")
n_sonyallmodels2.Region(NILES_REGION)
n_sonyallmodels2.similar(int = .60)
n_back = button()
n_back.Image("n_back.png")
n_back.Region(NILES_REGION)
n_english = button()
n_english.Image("n_english.png")
n_english.Region(NILES_REGION)
n_vuegenerale = button()
n_vuegenerale.Image("n_vuegenerale.png")
n_vuegenerale.Region(NILES_REGION)
n_french = button()
n_french.Image("n_french.png")
n_french.Region(NILES_REGION)
n_apply = button()
n_apply.Image("n_apply.png")
n_apply.Region(NILES_REGION)
n_squeezeboxtouch = button()
n_squeezeboxtouch.Image("n_squeezeboxtouch.png")
n_squeezeboxtouch.Region(NILES_REGION)
n_configureuserinterfaces = button()
n_configureuserinterfaces.Image("n_configureuserinterfaces.png")
n_configureuserinterfaces.Region(NILES_REGION)
n_at = button()
n_at.Image("n_at.png")
n_at.Region(NILES_REGION)
n_at.similar(GlobalCustomButtonThreshold)
n_configuresources2 = button()
n_configuresources2.Image("n_configuresources2.png")
n_configuresources2.Region(NILES_REGION)
n_systemconfiguration = button()
n_systemconfiguration.Image("n_systemconfiguration.png")
n_systemconfiguration.Region(NILES_REGION)
n_amc = button()
n_amc.Image("n_amc.png")
n_amc.Region(NILES_REGION)
n_four = button()
n_four.Image("n_four.png")
n_four.Region(NILES_REGION)
n_loadunitedstates = button()
n_loadunitedstates.Image("n_loadunitedstates.png")
n_loadunitedstates.Region(NILES_REGION)
n_unitedstates = button()
n_unitedstates.Image("n_unitedstates.png")
n_unitedstates.Region(NILES_REGION)
n_panasonic = button()
n_panasonic.Image("n_panasonic.png")
n_panasonic.Region(NILES_REGION)
n_scrolldown = button()
n_scrolldown.Image("n_scrolldown.png")
n_scrolldown.Region(NILES_REGION)
n_six = button()
n_six.Image("n_six.png")
n_six.Region(NILES_REGION)
n_softsqueeze = button()
n_softsqueeze.Image("n_softsqueeze.png")
n_softsqueeze.Region(NILES_REGION)
n_sony = button()
n_sony.Image("n_sony.png")
n_sony.Region(NILES_REGION)
n_sourceone = button()
n_sourceone.Image("n_sourceone.png")
n_sourceone.Region(NILES_REGION)
n_three = button()
n_three.Image("n_three.png")
n_three.Region(NILES_REGION)
n_two = button()
n_two.Image("n_two.png")
n_two.Region(NILES_REGION)
n_yamahrxa2040ynca = button()
n_yamahrxa2040ynca.Image("n_yamahrxa2040ynca.png")
n_yamahrxa2040ynca.Region(NILES_REGION)
n_zeroip = button()
n_zeroip.Image("n_zeroip.png")
n_zeroip.Region(NILES_REGION)
adasuite1616videozones = button()
adasuite1616videozones.Image("adasuite1616videozones.png")
adasuite1616videozones.Region(CONFIGURATOR_REGION)
adasuite1616zone = button()
adasuite1616zone.Image("adasuite1616zone.png")
adasuite1616zone.Region(CONFIGURATOR_REGION)
adasuite1632videozones = button()
adasuite1632videozones.Image("adasuite1632videozones.png")
adasuite1632videozones.Region(CONFIGURATOR_REGION)
adasuite1632zone = button()
adasuite1632zone.Image("adasuite1632zone.png")
adasuite1632zone.Region(CONFIGURATOR_REGION)
adasuite1648zone = button()
adasuite1648zone.Image("adasuite1648zone.png")
adasuite1648zone.Region(CONFIGURATOR_REGION)
adasuite1664zone = button()
adasuite1664zone.Image("adasuite1664zone.png")
adasuite1664zone.Region(CONFIGURATOR_REGION)
adasuite1696zone = button()
adasuite1696zone.Image("adasuite1696zone.png")
adasuite1696zone.Region(CONFIGURATOR_REGION)
adasuite3232zone = button()
adasuite3232zone.Image("adasuite3232zone.png")
adasuite3232zone.Region(CONFIGURATOR_REGION)
ziporpostalcode = button()
ziporpostalcode.Image("ziporpostalcode.png")
ziporpostalcode.Region(CONFIGURATOR_REGION)
keypadinterfaces = button()
keypadinterfaces.Image("keypadinterfaces.png")
keypadinterfaces.Region(CONFIGURATOR_REGION)
locationtype = button()
locationtype.Image("locationtype.png")
locationtype.Region(CONFIGURATOR_REGION)
adasuite3264zone = button()
adasuite3264zone.Image("adasuite3264zone.png")
adasuite3264zone.Region(CONFIGURATOR_REGION)
adasuite3296zone = button()
adasuite3296zone.Image("adasuite3296zone.png")
adasuite3296zone.Region(CONFIGURATOR_REGION)
adasuite71 = button()
adasuite71.Image("adasuite71.png")
adasuite71.Region(CONFIGURATOR_REGION)
n_address = button()
n_address.Image("n_address.png")
n_address.Region(NILES_REGION)
adasuite8100 = button()
adasuite8100.Image("adasuite8100.png")
adasuite8100.Region(CONFIGURATOR_REGION)
adasuite8200 = button()
adasuite8200.Image("adasuite8200.png")
adasuite8200.Region(CONFIGURATOR_REGION)
ah66tsinglechassis = button()
ah66tsinglechassis.Image("ah66tsinglechassis.png")
ah66tsinglechassis.Region(CONFIGURATOR_REGION)
atlonaath2h44m4x4hdmi = button()
atlonaath2h44m4x4hdmi.Image("atlonaath2h44m4x4hdmi.png")
atlonaath2h44m4x4hdmi.Region(CONFIGURATOR_REGION)
atlonaath2h88m8x8hdmi = button()
atlonaath2h88m8x8hdmi.Image("atlonaath2h88m8x8hdmi.png")
atlonaath2h88m8x8hdmi.Region(CONFIGURATOR_REGION)
atlonaathdv1616m16x16hdmi = button()
atlonaathdv1616m16x16hdmi.Image("atlonaathdv1616m16x16hdmi.png")
atlonaathdv1616m16x16hdmi.Region(CONFIGURATOR_REGION)
atlonaathdv44m4x4hdmi = button()
atlonaathdv44m4x4hdmi.Image("atlonaathdv44m4x4hdmi.png")
atlonaathdv44m4x4hdmi.Region(CONFIGURATOR_REGION)
atlonaatpro2hd1616m16x16hdbaset = button()
atlonaatpro2hd1616m16x16hdbaset.Image("atlonaatpro2hd1616m16x16hdbaset.png")
atlonaatpro2hd1616m16x16hdbaset.Region(CONFIGURATOR_REGION)
atlonaatpro2hd44m4x4hdbaset = button()
atlonaatpro2hd44m4x4hdbaset.Image("atlonaatpro2hd44m4x4hdbaset.png")
atlonaatpro2hd44m4x4hdbaset.Region(CONFIGURATOR_REGION)
atlonaatpro2hd88m8x8hdbaset = button()
atlonaatpro2hd88m8x8hdbaset.Image("atlonaatpro2hd88m8x8hdbaset.png")
atlonaatpro2hd88m8x8hdbaset.Region(CONFIGURATOR_REGION)
atlonaatpro3hd44m4x4hdbaset = button()
atlonaatpro3hd44m4x4hdbaset.Image("atlonaatpro3hd44m4x4hdbaset.png")
atlonaatpro3hd44m4x4hdbaset.Region(CONFIGURATOR_REGION)
atlonaatpro3hd66m6x6hdbaset = button()
atlonaatpro3hd66m6x6hdbaset.Image("atlonaatpro3hd66m6x6hdbaset.png")
atlonaatpro3hd66m6x6hdbaset.Region(CONFIGURATOR_REGION)
denon28053805 = button()
denon28053805.Image("denon28053805.png")
denon28053805.Region(CONFIGURATOR_REGION)
denonavpa1hdci = button()
denonavpa1hdci.Image("denonavpa1hdci.png")
denonavpa1hdci.Region(CONFIGURATOR_REGION)
denonavr1613 = button()
denonavr1613.Image("denonavr1613.png")
denonavr1613.Region(CONFIGURATOR_REGION)
denonavr1713 = button()
denonavr1713.Image("denonavr1713.png")
denonavr1713.Region(CONFIGURATOR_REGION)
denonavr1912ci = button()
denonavr1912ci.Image("denonavr1912ci.png")
denonavr1912ci.Region(CONFIGURATOR_REGION)
denonavr1913 = button()
denonavr1913.Image("denonavr1913.png")
denonavr1913.Region(CONFIGURATOR_REGION)
denonavr2112ci = button()
denonavr2112ci.Image("denonavr2112ci.png")
denonavr2112ci.Region(CONFIGURATOR_REGION)
denonavr2113ci = button()
denonavr2113ci.Image("denonavr2113ci.png")
denonavr2113ci.Region(CONFIGURATOR_REGION)
denonavr2310ci = button()
denonavr2310ci.Image("denonavr2310ci.png")
denonavr2310ci.Region(CONFIGURATOR_REGION)
denonavr2311ci = button()
denonavr2311ci.Image("denonavr2311ci.png")
denonavr2311ci.Region(CONFIGURATOR_REGION)
denonavr2312ci = button()
denonavr2312ci.Image("denonavr2312ci.png")
denonavr2312ci.Region(CONFIGURATOR_REGION)
denonavr2313ci = button()
denonavr2313ci.Image("denonavr2313ci.png")
denonavr2313ci.Region(CONFIGURATOR_REGION)
denonavr2808ci = button()
denonavr2808ci.Image("denonavr2808ci.png")
denonavr2808ci.Region(CONFIGURATOR_REGION)
denonavr3310ci = button()
denonavr3310ci.Image("denonavr3310ci.png")
denonavr3310ci.Region(CONFIGURATOR_REGION)
denonavr3311ci = button()
denonavr3311ci.Image("denonavr3311ci.png")
denonavr3311ci.Region(CONFIGURATOR_REGION)
denonavr3312ci = button()
denonavr3312ci.Image("denonavr3312ci.png")
denonavr3312ci.Region(CONFIGURATOR_REGION)
denonavr3313ci = button()
denonavr3313ci.Image("denonavr3313ci.png")
denonavr3313ci.Region(CONFIGURATOR_REGION)
denonavr3806 = button()
denonavr3806.Image("denonavr3806.png")
denonavr3806.Region(CONFIGURATOR_REGION)
denonavr3808ci = button()
denonavr3808ci.Image("denonavr3808ci.png")
denonavr3808ci.Region(CONFIGURATOR_REGION)
denonavr4308ci = button()
denonavr4308ci.Image("denonavr4308ci.png")
denonavr4308ci.Region(CONFIGURATOR_REGION)
denonavr4310ci = button()
denonavr4310ci.Image("denonavr4310ci.png")
denonavr4310ci.Region(CONFIGURATOR_REGION)
denonavr4311ci = button()
denonavr4311ci.Image("denonavr4311ci.png")
denonavr4311ci.Region(CONFIGURATOR_REGION)
denonavr45204520ci = button()
denonavr45204520ci.Image("denonavr45204520ci.png")
denonavr45204520ci.Region(CONFIGURATOR_REGION)
denonavr4806 = button()
denonavr4806.Image("denonavr4806.png")
denonavr4806.Region(CONFIGURATOR_REGION)
denonavr4810ci = button()
denonavr4810ci.Image("denonavr4810ci.png")
denonavr4810ci.Region(CONFIGURATOR_REGION)
denonavr5308ci = button()
denonavr5308ci.Image("denonavr5308ci.png")
denonavr5308ci.Region(CONFIGURATOR_REGION)
denonavr5805 = button()
denonavr5805.Image("denonavr5805.png")
denonavr5805.Region(CONFIGURATOR_REGION)
denonavrx1000international = button()
denonavrx1000international.Image("denonavrx1000international.png")
denonavrx1000international.Region(CONFIGURATOR_REGION)
denonavrx1000us = button()
denonavrx1000us.Image("denonavrx1000us.png")
denonavrx1000us.Region(CONFIGURATOR_REGION)
denonavrx1100 = button()
denonavrx1100.Image("denonavrx1100.png")
denonavrx1100.Region(CONFIGURATOR_REGION)
denonavrx2000 = button()
denonavrx2000.Image("denonavrx2000.png")
denonavrx2000.Region(CONFIGURATOR_REGION)
denonavrx2100 = button()
denonavrx2100.Image("denonavrx2100.png")
denonavrx2100.Region(CONFIGURATOR_REGION)
denonavrx3000 = button()
denonavrx3000.Image("denonavrx3000.png")
denonavrx3000.Region(CONFIGURATOR_REGION)
denonavrx3100 = button()
denonavrx3100.Image("denonavrx3100.png")
denonavrx3100.Region(CONFIGURATOR_REGION)
denonavrx4000 = button()
denonavrx4000.Image("denonavrx4000.png")
denonavrx4000.Region(CONFIGURATOR_REGION)
denonavrx4100 = button()
denonavrx4100.Image("denonavrx4100.png")
denonavrx4100.Region(CONFIGURATOR_REGION)
denonavrx5200us = button()
denonavrx5200us.Image("denonavrx5200us.png")
denonavrx5200us.Region(CONFIGURATOR_REGION)
elangm64ethernet = button()
elangm64ethernet.Image("elangm64ethernet.png")
elangm64ethernet.Region(CONFIGURATOR_REGION)
elangm64ethernet.similar(int = .50)
addnewcustomsource = button()
addnewcustomsource.Image("addnewcustomsource.png")
addnewcustomsource.Region(CONFIGURATOR_REGION)
elangmv64rs232 = button()
elangmv64rs232.Image("elangmv64rs232.png")
elangmv64rs232.Region(CONFIGURATOR_REGION)
elanm86a12zones = button()
elanm86a12zones.Image("elanm86a12zones.png")
elanm86a12zones.Region(CONFIGURATOR_REGION)
elanm86a18zones = button()
elanm86a18zones.Image("elanm86a18zones.png")
elanm86a18zones.Region(CONFIGURATOR_REGION)
selectall = button()
selectall.Image("selectall.png")
selectall.Region(CONFIGURATOR_REGION)
selectall.similar(int = .18)
elanm86a24zones = button()
elanm86a24zones.Image("elanm86a24zones.png")
elanm86a24zones.Region(CONFIGURATOR_REGION)
elanm86a6zone = button()
elanm86a6zone.Image("elanm86a6zone.png")
elanm86a6zone.Region(CONFIGURATOR_REGION)
elans1616adualchassismode = button()
elans1616adualchassismode.Image("elans1616adualchassismode.png")
elans1616adualchassismode.Region(CONFIGURATOR_REGION)
elans1616asinglechassismode = button()
elans1616asinglechassismode.Image("elans1616asinglechassismode.png")
elans1616asinglechassismode.Region(CONFIGURATOR_REGION)
elans86ap = button()
elans86ap.Image("elans86ap.png")
elans86ap.Region(CONFIGURATOR_REGION)
elans86ap12zones = button()
elans86ap12zones.Image("elans86ap12zones.png")
elans86ap12zones.Region(CONFIGURATOR_REGION)
elans86ap18zones = button()
elans86ap18zones.Image("elans86ap18zones.png")
elans86ap18zones.Region(CONFIGURATOR_REGION)
elans86ap24zones = button()
elans86ap24zones.Image("elans86ap24zones.png")
elans86ap24zones.Region(CONFIGURATOR_REGION)
elansystem1208zones = button()
elansystem1208zones.Image("elansystem1208zones.png")
elansystem1208zones.Region(CONFIGURATOR_REGION)
elansystem1216zones = button()
elansystem1216zones.Image("elansystem1216zones.png")
elansystem1216zones.Region(CONFIGURATOR_REGION)
elansystem1224zones = button()
elansystem1224zones.Image("elansystem1224zones.png")
elansystem1224zones.Region(CONFIGURATOR_REGION)
elansystem1232zones = button()
elansystem1232zones.Image("elansystem1232zones.png")
elansystem1232zones.Region(CONFIGURATOR_REGION)
elanv8 = button()
elanv8.Image("elanv8.png")
elanv8.Region(CONFIGURATOR_REGION)
elanv85 = button()
elanv85.Image("elanv85.png")
elanv85.Region(CONFIGURATOR_REGION)
elanv883 = button()
elanv883.Image("elanv883.png")
elanv883.Region(CONFIGURATOR_REGION)
gefenhdfst4444elr = button()
gefenhdfst4444elr.Image("gefenhdfst4444elr.png")
gefenhdfst4444elr.Region(CONFIGURATOR_REGION)
gefenhdfst848 = button()
gefenhdfst848.Image("gefenhdfst848.png")
gefenhdfst848.Region(CONFIGURATOR_REGION)
genericsinglezonecontroller = button()
genericsinglezonecontroller.Image("genericsinglezonecontroller.png")
genericsinglezonecontroller.Region(CONFIGURATOR_REGION)
integradhc806 = button()
integradhc806.Image("integradhc806.png")
integradhc806.Region(CONFIGURATOR_REGION)
integradtr203 = button()
integradtr203.Image("integradtr203.png")
integradtr203.Region(CONFIGURATOR_REGION)
integradtr303 = button()
integradtr303.Image("integradtr303.png")
integradtr303.Region(CONFIGURATOR_REGION)
integradtr306 = button()
integradtr306.Image("integradtr306.png")
integradtr306.Region(CONFIGURATOR_REGION)
integradtr401onkyotxnr1007 = button()
integradtr401onkyotxnr1007.Image("integradtr401onkyotxnr1007.png")
integradtr401onkyotxnr1007.Region(CONFIGURATOR_REGION)
integradtr403 = button()
integradtr403.Image("integradtr403.png")
integradtr403.Region(CONFIGURATOR_REGION)
integradtr404 = button()
integradtr404.Image("integradtr404.png")
integradtr404.Region(CONFIGURATOR_REGION)
integradtr405 = button()
integradtr405.Image("integradtr405.png")
integradtr405.Region(CONFIGURATOR_REGION)
integradtr406 = button()
integradtr406.Image("integradtr406.png")
integradtr406.Region(CONFIGURATOR_REGION)
integradtr46dtr56dtr66dtr76 = button()
integradtr46dtr56dtr66dtr76.Image("integradtr46dtr56dtr66dtr76.png")
integradtr46dtr56dtr66dtr76.Region(CONFIGURATOR_REGION)
integradtr49 = button()
integradtr49.Image("integradtr49.png")
integradtr49.Region(CONFIGURATOR_REGION)
integradtr501 = button()
integradtr501.Image("integradtr501.png")
integradtr501.Region(CONFIGURATOR_REGION)
integradtr503 = button()
integradtr503.Image("integradtr503.png")
integradtr503.Region(CONFIGURATOR_REGION)
integradtr504 = button()
integradtr504.Image("integradtr504.png")
integradtr504.Region(CONFIGURATOR_REGION)
integradtr505 = button()
integradtr505.Image("integradtr505.png")
integradtr505.Region(CONFIGURATOR_REGION)
integradtr506 = button()
integradtr506.Image("integradtr506.png")
integradtr506.Region(CONFIGURATOR_REGION)
integradtr59 = button()
integradtr59.Image("integradtr59.png")
integradtr59.Region(CONFIGURATOR_REGION)
integradtr605 = button()
integradtr605.Image("integradtr605.png")
integradtr605.Region(CONFIGURATOR_REGION)
integradtr606 = button()
integradtr606.Image("integradtr606.png")
integradtr606.Region(CONFIGURATOR_REGION)
integradtr701onkyotxnr3007 = button()
integradtr701onkyotxnr3007.Image("integradtr701onkyotxnr3007.png")
integradtr701onkyotxnr3007.Region(CONFIGURATOR_REGION)
integradtr703dtr803 = button()
integradtr703dtr803.Image("integradtr703dtr803.png")
integradtr703dtr803.Region(CONFIGURATOR_REGION)
integradtr704 = button()
integradtr704.Image("integradtr704.png")
integradtr704.Region(CONFIGURATOR_REGION)
integradtr706 = button()
integradtr706.Image("integradtr706.png")
integradtr706.Region(CONFIGURATOR_REGION)
integradtr74dtr54 = button()
integradtr74dtr54.Image("integradtr74dtr54.png")
integradtr74dtr54.Region(CONFIGURATOR_REGION)
integradtr79dtr69onkyotxsr806txsr706 = button()
integradtr79dtr69onkyotxsr806txsr706.Image("integradtr79dtr69onkyotxsr806txsr706.png")
integradtr79dtr69onkyotxsr806txsr706.Region(CONFIGURATOR_REGION)
integradtr801onkyotxnr5007 = button()
integradtr801onkyotxnr5007.Image("integradtr801onkyotxnr5007.png")
integradtr801onkyotxnr5007.Region(CONFIGURATOR_REGION)
integradtr89onkyotxsr876prsc886 = button()
integradtr89onkyotxsr876prsc886.Image("integradtr89onkyotxsr876prsc886.png")
integradtr89onkyotxsr876prsc886.Region(CONFIGURATOR_REGION)
integradtr99onkyotxnr906 = button()
integradtr99onkyotxnr906.Image("integradtr99onkyotxnr906.png")
integradtr99onkyotxnr906.Region(CONFIGURATOR_REGION)
japhdoveripswitch = button()
japhdoveripswitch.Image("japhdoveripswitch.png")
japhdoveripswitch.Region(CONFIGURATOR_REGION)
marantzav7701dtype = button()
marantzav7701dtype.Image("marantzav7701dtype.png")
marantzav7701dtype.Region(CONFIGURATOR_REGION)
marantzav7702dtype = button()
marantzav7702dtype.Image("marantzav7702dtype.png")
marantzav7702dtype.Region(CONFIGURATOR_REGION)
marantzav8801dtype = button()
marantzav8801dtype.Image("marantzav8801dtype.png")
marantzav8801dtype.Region(CONFIGURATOR_REGION)
marantznr1504dtype = button()
marantznr1504dtype.Image("marantznr1504dtype.png")
marantznr1504dtype.Region(CONFIGURATOR_REGION)
marantznr1602dtype = button()
marantznr1602dtype.Image("marantznr1602dtype.png")
marantznr1602dtype.Region(CONFIGURATOR_REGION)
marantznr1603dtype = button()
marantznr1603dtype.Image("marantznr1603dtype.png")
marantznr1603dtype.Region(CONFIGURATOR_REGION)
marantznr1604dtype = button()
marantznr1604dtype.Image("marantznr1604dtype.png")
marantznr1604dtype.Region(CONFIGURATOR_REGION)
marantznr1605dtype = button()
marantznr1605dtype.Image("marantznr1605dtype.png")
marantznr1605dtype.Region(CONFIGURATOR_REGION)
marantzsr5004 = button()
marantzsr5004.Image("marantzsr5004.png")
marantzsr5004.Region(CONFIGURATOR_REGION)
marantzsr5005 = button()
marantzsr5005.Image("marantzsr5005.png")
marantzsr5005.Region(CONFIGURATOR_REGION)
marantzsr5006dtype = button()
marantzsr5006dtype.Image("marantzsr5006dtype.png")
marantzsr5006dtype.Region(CONFIGURATOR_REGION)
marantzsr5007dtype = button()
marantzsr5007dtype.Image("marantzsr5007dtype.png")
marantzsr5007dtype.Region(CONFIGURATOR_REGION)
marantzsr5008dtype = button()
marantzsr5008dtype.Image("marantzsr5008dtype.png")
marantzsr5008dtype.Region(CONFIGURATOR_REGION)
marantzsr5009dtype = button()
marantzsr5009dtype.Image("marantzsr5009dtype.png")
marantzsr5009dtype.Region(CONFIGURATOR_REGION)
marantzsr5500560075008500 = button()
marantzsr5500560075008500.Image("marantzsr5500560075008500.png")
marantzsr5500560075008500.Region(CONFIGURATOR_REGION)
marantzsr6004 = button()
marantzsr6004.Image("marantzsr6004.png")
marantzsr6004.Region(CONFIGURATOR_REGION)
marantzsr6005dtype = button()
marantzsr6005dtype.Image("marantzsr6005dtype.png")
marantzsr6005dtype.Region(CONFIGURATOR_REGION)
marantzsr6006dtype = button()
marantzsr6006dtype.Image("marantzsr6006dtype.png")
marantzsr6006dtype.Region(CONFIGURATOR_REGION)
marantzsr6007dtype = button()
marantzsr6007dtype.Image("marantzsr6007dtype.png")
marantzsr6007dtype.Region(CONFIGURATOR_REGION)
marantzsr6008dtype = button()
marantzsr6008dtype.Image("marantzsr6008dtype.png")
marantzsr6008dtype.Region(CONFIGURATOR_REGION)
marantzsr6009dtype = button()
marantzsr6009dtype.Image("marantzsr6009dtype.png")
marantzsr6009dtype.Region(CONFIGURATOR_REGION)
marantzsr7002 = button()
marantzsr7002.Image("marantzsr7002.png")
marantzsr7002.Region(CONFIGURATOR_REGION)
marantzsr7005av7005dtype = button()
marantzsr7005av7005dtype.Image("marantzsr7005av7005dtype.png")
marantzsr7005av7005dtype.Region(CONFIGURATOR_REGION)
marantzsr7007dtype = button()
marantzsr7007dtype.Image("marantzsr7007dtype.png")
marantzsr7007dtype.Region(CONFIGURATOR_REGION)
marantzsr7008dtype = button()
marantzsr7008dtype.Image("marantzsr7008dtype.png")
marantzsr7008dtype.Region(CONFIGURATOR_REGION)
marantzsr7009dtype = button()
marantzsr7009dtype.Image("marantzsr7009dtype.png")
marantzsr7009dtype.Region(CONFIGURATOR_REGION)
marantzsr8001 = button()
marantzsr8001.Image("marantzsr8001.png")
marantzsr8001.Region(CONFIGURATOR_REGION)
marantzsr8002 = button()
marantzsr8002.Image("marantzsr8002.png")
marantzsr8002.Region(CONFIGURATOR_REGION)
marantzsr9600 = button()
marantzsr9600.Image("marantzsr9600.png")
marantzsr9600.Region(CONFIGURATOR_REGION)
nilesgxr2ethernet = button()
nilesgxr2ethernet.Image("nilesgxr2ethernet.png")
nilesgxr2ethernet.Region(CONFIGURATOR_REGION)
nilesmrc6430 = button()
nilesmrc6430.Image("nilesmrc6430.png")
nilesmrc6430.Region(CONFIGURATOR_REGION)
nuvoconcerto = button()
nuvoconcerto.Image("nuvoconcerto.png")
nuvoconcerto.Region(CONFIGURATOR_REGION)
nuvoessentia = button()
nuvoessentia.Image("nuvoessentia.png")
nuvoessentia.Region(CONFIGURATOR_REGION)
nuvoessentianve6g12zone = button()
nuvoessentianve6g12zone.Image("nuvoessentianve6g12zone.png")
nuvoessentianve6g12zone.Region(CONFIGURATOR_REGION)
nuvoessentianve6g6zone = button()
nuvoessentianve6g6zone.Image("nuvoessentianve6g6zone.png")
nuvoessentianve6g6zone.Region(CONFIGURATOR_REGION)
nuvograndconcerto16zone = button()
nuvograndconcerto16zone.Image("nuvograndconcerto16zone.png")
nuvograndconcerto16zone.Region(CONFIGURATOR_REGION)
nuvograndconcerto8zone = button()
nuvograndconcerto8zone.Image("nuvograndconcerto8zone.png")
nuvograndconcerto8zone.Region(CONFIGURATOR_REGION)
onkyoprsc5530 = button()
onkyoprsc5530.Image("onkyoprsc5530.png")
onkyoprsc5530.Region(CONFIGURATOR_REGION)
onkyotxnr1009txnr3009txnr5009 = button()
onkyotxnr1009txnr3009txnr5009.Image("onkyotxnr1009txnr3009txnr5009.png")
onkyotxnr1009txnr3009txnr5009.Region(CONFIGURATOR_REGION)
onkyotxnr1010 = button()
onkyotxnr1010.Image("onkyotxnr1010.png")
onkyotxnr1010.Region(CONFIGURATOR_REGION)
onkyotxnr1030 = button()
onkyotxnr1030.Image("onkyotxnr1030.png")
onkyotxnr1030.Region(CONFIGURATOR_REGION)
onkyotxnr3010 = button()
onkyotxnr3010.Image("onkyotxnr3010.png")
onkyotxnr3010.Region(CONFIGURATOR_REGION)
onkyotxnr3030 = button()
onkyotxnr3030.Image("onkyotxnr3030.png")
onkyotxnr3030.Region(CONFIGURATOR_REGION)
onkyotxnr5010 = button()
onkyotxnr5010.Image("onkyotxnr5010.png")
onkyotxnr5010.Region(CONFIGURATOR_REGION)
onkyotxnr515 = button()
onkyotxnr515.Image("onkyotxnr515.png")
onkyotxnr515.Region(CONFIGURATOR_REGION)
onkyotxnr525 = button()
onkyotxnr525.Image("onkyotxnr525.png")
onkyotxnr525.Region(CONFIGURATOR_REGION)
onkyotxnr535 = button()
onkyotxnr535.Image("onkyotxnr535.png")
onkyotxnr535.Region(CONFIGURATOR_REGION)
onkyotxnr609 = button()
onkyotxnr609.Image("onkyotxnr609.png")
onkyotxnr609.Region(CONFIGURATOR_REGION)
onkyotxnr616 = button()
onkyotxnr616.Image("onkyotxnr616.png")
onkyotxnr616.Region(CONFIGURATOR_REGION)
onkyotxnr626 = button()
onkyotxnr626.Image("onkyotxnr626.png")
onkyotxnr626.Region(CONFIGURATOR_REGION)
onkyotxnr636htrc660 = button()
onkyotxnr636htrc660.Image("onkyotxnr636htrc660.png")
onkyotxnr636htrc660.Region(CONFIGURATOR_REGION)
onkyotxnr709 = button()
onkyotxnr709.Image("onkyotxnr709.png")
onkyotxnr709.Region(CONFIGURATOR_REGION)
onkyotxnr717 = button()
onkyotxnr717.Image("onkyotxnr717.png")
onkyotxnr717.Region(CONFIGURATOR_REGION)
onkyotxnr727 = button()
onkyotxnr727.Image("onkyotxnr727.png")
onkyotxnr727.Region(CONFIGURATOR_REGION)
onkyotxnr727.similar(.50)
onkyotxnr737 = button()
onkyotxnr737.Image("onkyotxnr737.png")
onkyotxnr737.Region(CONFIGURATOR_REGION)
onkyotxnr809 = button()
onkyotxnr809.Image("onkyotxnr809.png")
onkyotxnr809.Region(CONFIGURATOR_REGION)
onkyotxnr818 = button()
onkyotxnr818.Image("onkyotxnr818.png")
onkyotxnr818.Region(CONFIGURATOR_REGION)
onkyotxnr818.similar(.50)
onkyotxnr828 = button()
onkyotxnr828.Image("onkyotxnr828.png")
onkyotxnr828.Region(CONFIGURATOR_REGION)
onkyotxnr828.similar(.50)
onkyotxnr838 = button()
onkyotxnr838.Image("onkyotxnr838.png")
onkyotxnr838.Region(CONFIGURATOR_REGION)
onkyotxnr838.similar(.50)
onkyotxnr929 = button()
onkyotxnr929.Image("onkyotxnr929.png")
onkyotxnr929.Region(CONFIGURATOR_REGION)
onkyotxnr929.similar(.50)
pioneersc1223k = button()
pioneersc1223k.Image("pioneersc1223k.png")
pioneersc1223k.Region(CONFIGURATOR_REGION)
pioneersc1323k = button()
pioneersc1323k.Image("pioneersc1323k.png")
pioneersc1323k.Region(CONFIGURATOR_REGION)
pioneersc1523k = button()
pioneersc1523k.Image("pioneersc1523k.png")
pioneersc1523k.Region(CONFIGURATOR_REGION)
pioneersc2023k = button()
pioneersc2023k.Image("pioneersc2023k.png")
pioneersc2023k.Region(CONFIGURATOR_REGION)
pioneersc55 = button()
pioneersc55.Image("pioneersc55.png")
pioneersc55.Region(CONFIGURATOR_REGION)
pioneersc57 = button()
pioneersc57.Image("pioneersc57.png")
pioneersc57.Region(CONFIGURATOR_REGION)
pioneersc61 = button()
pioneersc61.Image("pioneersc61.png")
pioneersc61.Region(CONFIGURATOR_REGION)
pioneersc63 = button()
pioneersc63.Image("pioneersc63.png")
pioneersc63.Region(CONFIGURATOR_REGION)
pioneersc65 = button()
pioneersc65.Image("pioneersc65.png")
pioneersc65.Region(CONFIGURATOR_REGION)
pioneersc67 = button()
pioneersc67.Image("pioneersc67.png")
pioneersc67.Region(CONFIGURATOR_REGION)
pioneersc68 = button()
pioneersc68.Image("pioneersc68.png")
pioneersc68.Region(CONFIGURATOR_REGION)
pioneersc71 = button()
pioneersc71.Image("pioneersc71.png")
pioneersc71.Region(CONFIGURATOR_REGION)
pioneersc72 = button()
pioneersc72.Image("pioneersc72.png")
pioneersc72.Region(CONFIGURATOR_REGION)
pioneersc75 = button()
pioneersc75.Image("pioneersc75.png")
pioneersc75.Region(CONFIGURATOR_REGION)
pioneersc77 = button()
pioneersc77.Image("pioneersc77.png")
pioneersc77.Region(CONFIGURATOR_REGION)
pioneersc79 = button()
pioneersc79.Image("pioneersc79.png")
pioneersc79.Region(CONFIGURATOR_REGION)
pioneersclx57k = button()
pioneersclx57k.Image("pioneersclx57k.png")
pioneersclx57k.Region(CONFIGURATOR_REGION)
pioneersclx77k = button()
pioneersclx77k.Image("pioneersclx77k.png")
pioneersclx77k.Region(CONFIGURATOR_REGION)
pioneersclx87k = button()
pioneersclx87k.Image("pioneersclx87k.png")
pioneersclx87k.Region(CONFIGURATOR_REGION)
pioneervsx1123k = button()
pioneervsx1123k.Image("pioneervsx1123k.png")
pioneervsx1123k.Region(CONFIGURATOR_REGION)
pioneervsx50 = button()
pioneervsx50.Image("pioneervsx50.png")
pioneervsx50.Region(CONFIGURATOR_REGION)
pioneervsx51 = button()
pioneervsx51.Image("pioneervsx51.png")
pioneervsx51.Region(CONFIGURATOR_REGION)
pioneervsx52 = button()
pioneervsx52.Image("pioneervsx52.png")
pioneervsx52.Region(CONFIGURATOR_REGION)
pioneervsx53 = button()
pioneervsx53.Image("pioneervsx53.png")
pioneervsx53.Region(CONFIGURATOR_REGION)
pioneervsx60 = button()
pioneervsx60.Image("pioneervsx60.png")
pioneervsx60.Region(CONFIGURATOR_REGION)
pioneervsx70k = button()
pioneervsx70k.Image("pioneervsx70k.png")
pioneervsx70k.Region(CONFIGURATOR_REGION)
pioneervsx923k = button()
pioneervsx923k.Image("pioneervsx923k.png")
pioneervsx923k.Region(CONFIGURATOR_REGION)
snapavb100b3004x4or8x8 = button()
snapavb100b3004x4or8x8.Image("snapavb100b3004x4or8x8.png")
snapavb100b3004x4or8x8.Region(CONFIGURATOR_REGION)
speakercraftmra664 = button()
speakercraftmra664.Image("speakercraftmra664.png")
speakercraftmra664.Region(CONFIGURATOR_REGION)
speakercraftmzc64 = button()
speakercraftmzc64.Image("speakercraftmzc64.png")
speakercraftmzc64.Region(CONFIGURATOR_REGION)
speakercraftmzc648zone = button()
speakercraftmzc648zone.Image("speakercraftmzc648zone.png")
speakercraftmzc648zone.Region(CONFIGURATOR_REGION)
speakercraftmzc66 = button()
speakercraftmzc66.Image("speakercraftmzc66.png")
speakercraftmzc66.Region(CONFIGURATOR_REGION)
speakercraftmzc6612zone = button()
speakercraftmzc6612zone.Image("speakercraftmzc6612zone.png")
speakercraftmzc6612zone.Region(CONFIGURATOR_REGION)
speakercraftmzc6618zone = button()
speakercraftmzc6618zone.Image("speakercraftmzc6618zone.png")
speakercraftmzc6618zone.Region(CONFIGURATOR_REGION)
speakercraftmzc6624zone = button()
speakercraftmzc6624zone.Image("speakercraftmzc6624zone.png")
speakercraftmzc6624zone.Region(CONFIGURATOR_REGION)
speakercraftmzc88 = button()
speakercraftmzc88.Image("speakercraftmzc88.png")
speakercraftmzc88.Region(CONFIGURATOR_REGION)
speakercraftmzc8816zone = button()
speakercraftmzc8816zone.Image("speakercraftmzc8816zone.png")
speakercraftmzc8816zone.Region(CONFIGURATOR_REGION)
speakercraftmzc8824zone = button()
speakercraftmzc8824zone.Image("speakercraftmzc8824zone.png")
speakercraftmzc8824zone.Region(CONFIGURATOR_REGION)
speakercraftmzc8832zone = button()
speakercraftmzc8832zone.Image("speakercraftmzc8832zone.png")
speakercraftmzc8832zone.Region(CONFIGURATOR_REGION)
sunfiretgr3tgp5 = button()
sunfiretgr3tgp5.Image("sunfiretgr3tgp5.png")
sunfiretgr3tgp5.Region(CONFIGURATOR_REGION)
sunfiretgr401tgp401 = button()
sunfiretgr401tgp401.Image("sunfiretgr401tgp401.png")
sunfiretgr401tgp401.Region(CONFIGURATOR_REGION)
wyrestormmx0404 = button()
wyrestormmx0404.Image("wyrestormmx0404.png")
wyrestormmx0404.Region(CONFIGURATOR_REGION)
wyrestormmx0606 = button()
wyrestormmx0606.Image("wyrestormmx0606.png")
wyrestormmx0606.Region(CONFIGURATOR_REGION)
wyrestormmx0804 = button()
wyrestormmx0804.Image("wyrestormmx0804.png")
wyrestormmx0804.Region(CONFIGURATOR_REGION)
wyrestormmx0808 = button()
wyrestormmx0808.Image("wyrestormmx0808.png")
wyrestormmx0808.Region(CONFIGURATOR_REGION)
wyrestormmx0808310 = button()
wyrestormmx0808310.Image("wyrestormmx0808310.png")
wyrestormmx0808310.Region(CONFIGURATOR_REGION)
wyrestormmx0816310 = button()
wyrestormmx0816310.Image("wyrestormmx0816310.png")
wyrestormmx0816310.Region(CONFIGURATOR_REGION)
wyrestormmx1616310 = button()
wyrestormmx1616310.Image("wyrestormmx1616310.png")
wyrestormmx1616310.Region(CONFIGURATOR_REGION)
xantechhd44cc514units = button()
xantechhd44cc514units.Image("xantechhd44cc514units.png")
xantechhd44cc514units.Region(CONFIGURATOR_REGION)
xantechhd88cc514units = button()
xantechhd88cc514units.Image("xantechhd88cc514units.png")
xantechhd88cc514units.Region(CONFIGURATOR_REGION)
yamaharxa1000ynca = button()
yamaharxa1000ynca.Image("yamaharxa1000ynca.png")
yamaharxa1000ynca.Region(CONFIGURATOR_REGION)
yamaharxa1010ynca = button()
yamaharxa1010ynca.Image("yamaharxa1010ynca.png")
yamaharxa1010ynca.Region(CONFIGURATOR_REGION)
yamaharxa1020ynca = button()
yamaharxa1020ynca.Image("yamaharxa1020ynca.png")
yamaharxa1020ynca.Region(CONFIGURATOR_REGION)
yamaharxa1030ynca = button()
yamaharxa1030ynca.Image("yamaharxa1030ynca.png")
yamaharxa1030ynca.Region(CONFIGURATOR_REGION)
yamaharxa1040ynca = button()
yamaharxa1040ynca.Image("yamaharxa1040ynca.png")
yamaharxa1040ynca.Region(CONFIGURATOR_REGION)
yamaharxa2000ynca = button()
yamaharxa2000ynca.Image("yamaharxa2000ynca.png")
yamaharxa2000ynca.Region(CONFIGURATOR_REGION)
yamaharxa2010ynca = button()
yamaharxa2010ynca.Image("yamaharxa2010ynca.png")
yamaharxa2010ynca.Region(CONFIGURATOR_REGION)
yamaharxa2020ynca = button()
yamaharxa2020ynca.Image("yamaharxa2020ynca.png")
yamaharxa2020ynca.Region(CONFIGURATOR_REGION)
yamaharxa2030ynca = button()
yamaharxa2030ynca.Image("yamaharxa2030ynca.png")
yamaharxa2030ynca.Region(CONFIGURATOR_REGION)
yamaharxa2040ynca = button()
yamaharxa2040ynca.Image("yamaharxa2040ynca.png")
yamaharxa2040ynca.Region(CONFIGURATOR_REGION)
yamaharxa3000ynca = button()
yamaharxa3000ynca.Image("yamaharxa3000ynca.png")
yamaharxa3000ynca.Region(CONFIGURATOR_REGION)
yamaharxa3010ynca = button()
yamaharxa3010ynca.Image("yamaharxa3010ynca.png")
yamaharxa3010ynca.Region(CONFIGURATOR_REGION)
yamaharxa3020ynca = button()
yamaharxa3020ynca.Image("yamaharxa3020ynca.png")
yamaharxa3020ynca.Region(CONFIGURATOR_REGION)
yamaharxa3030ynca = button()
yamaharxa3030ynca.Image("yamaharxa3030ynca.png")
yamaharxa3030ynca.Region(CONFIGURATOR_REGION)
yamaharxa3040ynca = button()
yamaharxa3040ynca.Image("yamaharxa3040ynca.png")
yamaharxa3040ynca.Region(CONFIGURATOR_REGION)
yamaharxa710ynca = button()
yamaharxa710ynca.Image("yamaharxa710ynca.png")
yamaharxa710ynca.Region(CONFIGURATOR_REGION)
yamaharxa720ynca = button()
yamaharxa720ynca.Image("yamaharxa720ynca.png")
yamaharxa720ynca.Region(CONFIGURATOR_REGION)
yamaharxa730ynca = button()
yamaharxa730ynca.Image("yamaharxa730ynca.png")
yamaharxa730ynca.Region(CONFIGURATOR_REGION)
yamaharxa740ynca = button()
yamaharxa740ynca.Image("yamaharxa740ynca.png")
yamaharxa740ynca.Region(CONFIGURATOR_REGION)
yamaharxa800ynca = button()
yamaharxa800ynca.Image("yamaharxa800ynca.png")
yamaharxa800ynca.Region(CONFIGURATOR_REGION)
yamaharxa810ynca = button()
yamaharxa810ynca.Image("yamaharxa810ynca.png")
yamaharxa810ynca.Region(CONFIGURATOR_REGION)
yamaharxa820ynca = button()
yamaharxa820ynca.Image("yamaharxa820ynca.png")
yamaharxa820ynca.Region(CONFIGURATOR_REGION)
yamaharxa830ynca = button()
yamaharxa830ynca.Image("yamaharxa830ynca.png")
yamaharxa830ynca.Region(CONFIGURATOR_REGION)
yamaharxa840ynca = button()
yamaharxa840ynca.Image("yamaharxa840ynca.png")
yamaharxa840ynca.Region(CONFIGURATOR_REGION)
yamaharxv1600v2600 = button()
yamaharxv1600v2600.Image("yamaharxv1600v2600.png")
yamaharxv1600v2600.Region(CONFIGURATOR_REGION)
yamaharxv1700v2700 = button()
yamaharxv1700v2700.Image("yamaharxv1700v2700.png")
yamaharxv1700v2700.Region(CONFIGURATOR_REGION)
yamaharxv2065ethernet = button()
yamaharxv2065ethernet.Image("yamaharxv2065ethernet.png")
yamaharxv2065ethernet.Region(CONFIGURATOR_REGION)
yamaharxv2065rs232 = button()
yamaharxv2065rs232.Image("yamaharxv2065rs232.png")
yamaharxv2065rs232.Region(CONFIGURATOR_REGION)
yamaharxv3900ethernet = button()
yamaharxv3900ethernet.Image("yamaharxv3900ethernet.png")
yamaharxv3900ethernet.Region(CONFIGURATOR_REGION)
yamaharxz7ethernet = button()
yamaharxz7ethernet.Image("yamaharxz7ethernet.png")
yamaharxz7ethernet.Region(CONFIGURATOR_REGION)
yamaharxz9 = button()
yamaharxz9.Image("yamaharxz9.png")
yamaharxz9.Region(CONFIGURATOR_REGION)
| kenshay/ImageScript | ProgramData/SystemFiles/Python/Lib/site-packages/elan/__OLD_SCRIPTS/Buttons.py | Python | gpl-3.0 | 86,791 |
# Miro - an RSS based video player application
# Copyright (C) 2005, 2006, 2007, 2008, 2009, 2010, 2011
# Participatory Culture Foundation
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
#
# In addition, as a special exception, the copyright holders give
# permission to link the code of portions of this program with the OpenSSL
# library.
#
# You must obey the GNU General Public License in all respects for all of
# the code used other than OpenSSL. If you modify file(s) with this
# exception, you may extend this exception to your version of the file(s),
# but you are not obligated to do so. If you do not wish to do so, delete
# this exception statement from your version. If you delete this exception
# statement from all source files in the program, then also delete it here.
"""tableviewcells.py - Cell renderers for TableView."""
import gobject
import gtk
import pango
from mvc import signals
from mvc.widgets import widgetconst
import drawing
import wrappermap
from .base import make_gdk_color
class CellRenderer(object):
"""Simple Cell Renderer
https://develop.participatoryculture.org/index.php/WidgetAPITableView"""
def __init__(self):
self._renderer = gtk.CellRendererText()
self.want_hover = False
def setup_attributes(self, column, attr_map):
column.add_attribute(self._renderer, 'text', attr_map['value'])
def set_align(self, align):
if align == 'left':
self._renderer.props.xalign = 0.0
elif align == 'center':
self._renderer.props.xalign = 0.5
elif align == 'right':
self._renderer.props.xalign = 1.0
else:
raise ValueError("unknown alignment: %s" % align)
def set_color(self, color):
self._renderer.props.foreground_gdk = make_gdk_color(color)
def set_bold(self, bold):
font_desc = self._renderer.props.font_desc
if bold:
font_desc.set_weight(pango.WEIGHT_BOLD)
else:
font_desc.set_weight(pango.WEIGHT_NORMAL)
self._renderer.props.font_desc = font_desc
def set_text_size(self, size):
if size == widgetconst.SIZE_NORMAL:
self._renderer.props.scale = 1.0
elif size == widgetconst.SIZE_SMALL:
# FIXME: on 3.5 we just ignored the call. Always setting scale to
# 1.0 basically replicates that behavior, but should we actually
# try to implement the semantics of SIZE_SMALL?
self._renderer.props.scale = 1.0
else:
raise ValueError("unknown size: %s" % size)
def set_font_scale(self, scale_factor):
self._renderer.props.scale = scale_factor
class ImageCellRenderer(object):
"""Cell Renderer for images
https://develop.participatoryculture.org/index.php/WidgetAPITableView"""
def __init__(self):
self._renderer = gtk.CellRendererPixbuf()
self.want_hover = False
def setup_attributes(self, column, attr_map):
column.add_attribute(self._renderer, 'pixbuf', attr_map['image'])
class GTKCheckboxCellRenderer(gtk.CellRendererToggle):
def do_activate(self, event, treeview, path, background_area, cell_area,
flags):
iter = treeview.get_model().get_iter(path)
self.set_active(not self.get_active())
wrappermap.wrapper(self).emit('clicked', iter)
gobject.type_register(GTKCheckboxCellRenderer)
class CheckboxCellRenderer(signals.SignalEmitter):
"""Cell Renderer for booleans
https://develop.participatoryculture.org/index.php/WidgetAPITableView"""
def __init__(self):
signals.SignalEmitter.__init__(self)
self.create_signal("clicked")
self._renderer = GTKCheckboxCellRenderer()
wrappermap.add(self._renderer, self)
self.want_hover = False
def set_control_size(self, size):
pass
def setup_attributes(self, column, attr_map):
column.add_attribute(self._renderer, 'active', attr_map['value'])
class GTKCustomCellRenderer(gtk.GenericCellRenderer):
"""Handles the GTK hide of CustomCellRenderer
https://develop.participatoryculture.org/index.php/WidgetAPITableView"""
def on_get_size(self, widget, cell_area=None):
wrapper = wrappermap.wrapper(self)
widget_wrapper = wrappermap.wrapper(widget)
style = drawing.DrawingStyle(widget_wrapper, use_base_color=True)
# NOTE: CustomCellRenderer.cell_data_func() sets up its attributes
# from the model itself, so we don't have to worry about setting them
# here.
width, height = wrapper.get_size(style, widget_wrapper.layout_manager)
x_offset = self.props.xpad
y_offset = self.props.ypad
width += self.props.xpad * 2
height += self.props.ypad * 2
if cell_area:
x_offset += cell_area.x
y_offset += cell_area.x
extra_width = max(0, cell_area.width - width)
extra_height = max(0, cell_area.height - height)
x_offset += int(round(self.props.xalign * extra_width))
y_offset += int(round(self.props.yalign * extra_height))
return x_offset, y_offset, width, height
def on_render(self, window, widget, background_area, cell_area, expose_area,
flags):
widget_wrapper = wrappermap.wrapper(widget)
cell_wrapper = wrappermap.wrapper(self)
selected = (flags & gtk.CELL_RENDERER_SELECTED)
if selected:
if widget.flags() & gtk.HAS_FOCUS:
state = gtk.STATE_SELECTED
else:
state = gtk.STATE_ACTIVE
else:
state = gtk.STATE_NORMAL
if cell_wrapper.IGNORE_PADDING:
area = background_area
else:
xpad = self.props.xpad
ypad = self.props.ypad
area = gtk.gdk.Rectangle(cell_area.x + xpad, cell_area.y + ypad,
cell_area.width - xpad * 2, cell_area.height - ypad * 2)
context = drawing.DrawingContext(window, area, expose_area)
if (selected and not widget_wrapper.draws_selection and
widget_wrapper.use_custom_style):
# Draw the base color as our background. This erases the gradient
# that GTK draws for selected items.
window.draw_rectangle(widget.style.base_gc[state], True,
background_area.x, background_area.y,
background_area.width, background_area.height)
context.style = drawing.DrawingStyle(widget_wrapper,
use_base_color=True, state=state)
widget_wrapper.layout_manager.update_cairo_context(context.context)
hotspot_tracker = widget_wrapper.hotspot_tracker
if (hotspot_tracker and hotspot_tracker.hit and
hotspot_tracker.column == self.column and
hotspot_tracker.path == self.path):
hotspot = hotspot_tracker.name
else:
hotspot = None
if (self.path, self.column) == widget_wrapper.hover_info:
hover = widget_wrapper.hover_pos
hover = (hover[0] - xpad, hover[1] - ypad)
else:
hover = None
# NOTE: CustomCellRenderer.cell_data_func() sets up its attributes
# from the model itself, so we don't have to worry about setting them
# here.
widget_wrapper.layout_manager.reset()
cell_wrapper.render(context, widget_wrapper.layout_manager, selected,
hotspot, hover)
def on_activate(self, event, widget, path, background_area, cell_area,
flags):
pass
def on_start_editing(self, event, widget, path, background_area,
cell_area, flags):
pass
gobject.type_register(GTKCustomCellRenderer)
class CustomCellRenderer(object):
"""Customizable Cell Renderer
https://develop.participatoryculture.org/index.php/WidgetAPITableView"""
IGNORE_PADDING = False
def __init__(self):
self._renderer = GTKCustomCellRenderer()
self.want_hover = False
wrappermap.add(self._renderer, self)
def setup_attributes(self, column, attr_map):
column.set_cell_data_func(self._renderer, self.cell_data_func,
attr_map)
def cell_data_func(self, column, cell, model, iter, attr_map):
cell.column = column
cell.path = model.get_path(iter)
row = model[iter]
# Set attributes on self instead cell This works because cell is just
# going to turn around and call our methods to do the rendering.
for name, index in attr_map.items():
setattr(self, name, row[index])
def hotspot_test(self, style, layout, x, y, width, height):
return None
class InfoListRenderer(CustomCellRenderer):
"""Custom Renderer for InfoListModels
https://develop.participatoryculture.org/index.php/WidgetAPITableView"""
def cell_data_func(self, column, cell, model, iter, attr_map):
self.info, self.attrs, self.group_info = \
wrappermap.wrapper(model).row_for_iter(iter)
cell.column = column
cell.path = model.get_path(iter)
class InfoListRendererText(CellRenderer):
"""Renderer for InfoListModels that only display text
https://develop.participatoryculture.org/index.php/WidgetAPITableView"""
def setup_attributes(self, column, attr_map):
infolist.gtk.setup_text_cell_data_func(column, self._renderer,
self.get_value)
| rwaldron/mirovideoconverter3 | mvc/widgets/gtk/tableviewcells.py | Python | gpl-3.0 | 10,124 |
# Copyright 2019 Virgil Dupras
#
# This software is licensed under the "GPLv3" License as described in the "LICENSE" file,
# which should be included with this package. The terms are also available at
# http://www.gnu.org/licenses/gpl-3.0.html
import csv
import logging
from core.util import stripfalse
from core.trans import tr
from ..const import AccountType
from ..exception import FileFormatError, FileLoadError
from . import base
class CsvField:
Date = 'date'
Description = 'description'
Payee = 'payee'
Checkno = 'checkno'
Transfer = 'transfer'
Amount = 'amount'
Increase = 'increase'
Decrease = 'decrease'
Currency = 'currency'
Reference = 'reference'
MERGABLE_FIELDS = {CsvField.Description, CsvField.Payee}
class Loader(base.Loader):
FILE_OPEN_MODE = 'rb'
def __init__(self, default_currency, default_date_format=None):
base.Loader.__init__(self, default_currency, default_date_format)
self.columns = []
self.lines = []
self.dialect = None # last used dialect
self.readcontent = b''
# --- Private
@staticmethod
def _merge_columns(columns, lines):
# For any columns that is there more than once, merge the data that goes with it
for field in MERGABLE_FIELDS:
indexes = [i for i, f in enumerate(columns) if f == field]
if len(indexes) <= 1:
continue
for line_index, line in enumerate(lines):
elems = [line[i] for i in indexes]
merged_data = ' '.join(elems)
new_line = line[:] # We don't want to touch original lines
new_line[indexes[0]] = merged_data
for index_to_remove in reversed(indexes[1:]):
del new_line[index_to_remove]
lines[line_index] = new_line
for index_to_remove in reversed(indexes[1:]):
del columns[index_to_remove]
def _prepare(self, infile):
# Comment lines can confuse the sniffer. We remove them
readcontent = infile.read()
content = readcontent.replace(b'\0', b'').decode('latin-1')
lines = content.split('\n')
stripped_lines = [line.strip() for line in lines]
stripped_lines = [line for line in stripped_lines if line and not line.startswith('#')]
try:
self.dialect = csv.Sniffer().sniff('\n'.join(stripped_lines))
except csv.Error:
# The sniffer failed, let's manually try a couple of delimiters. We'll first count
# how many of each delimiter we have per line and we'll use the most popular. Because we
# don't want to accept something obviously not-CSV as a CSV, we'll have a minimal
# standard, that is 3 columns. To ensure that, we could say that the mean number of
# delimiters per line has to be at least 2, but headers and/or footers can have less,
# do to play on the safe side, we go with 1.5.
DELIMITERS = set(';,\t|')
delim2count = {delim: content.count(delim) for delim in DELIMITERS}
delim, count = max(delim2count.items(), key=lambda x: x[1])
if count / len(lines) < 1.5:
raise FileFormatError()
class ManualDialect(csv.excel):
delimiter = delim
self.dialect = ManualDialect
self.readcontent = readcontent
def _scan_lines(self, encoding=None):
if not encoding:
encoding = 'latin-1'
content = self.readcontent.decode(encoding, 'ignore').replace('\0', '')
rawlines = content.splitlines()
try:
reader = csv.reader(iter(rawlines), self.dialect)
except TypeError:
logging.warning("Invalid Dialect (strangely...). Delimiter: %r", self.dialect.delimiter)
lines = stripfalse(reader)
# complete smaller lines and strip whitespaces
maxlen = max(len(line) for line in lines)
for line in (l for l in lines if len(l) < maxlen):
line += [''] * (maxlen - len(line))
self.lines = lines
def _parse_date_format(self, lines, ci):
date_index = ci[CsvField.Date]
lines_to_load = []
for line in lines:
line = line[:]
cleaned_str_date = base.clean_date(line[date_index])
if cleaned_str_date is None:
logging.warning('{0} is not a date. Ignoring line'.format(line[date_index]))
else:
line[date_index] = cleaned_str_date
lines_to_load.append(line)
del line
str_dates = [line[date_index] for line in lines_to_load]
date_format = self.guess_date_format(str_dates)
if date_format is None:
raise FileLoadError(tr("The Date column has been set on a column that doesn't contain dates."))
return date_format, lines_to_load
def _check_amount_values(self, lines, ci):
for line in lines:
for attr in [CsvField.Amount, CsvField.Increase, CsvField.Decrease]:
if attr not in ci:
continue
index = ci[attr]
value = line[index]
try:
base.parse_amount(value, self.default_currency)
except ValueError:
raise FileLoadError(tr("The Amount column has been set on a column that doesn't contain amounts."))
# --- Override
def _parse(self, infile):
self._prepare(infile)
self._scan_lines()
def _load(self):
lines = self.lines[:]
colcount = len(lines[0]) if lines else 0
columns = self.columns[:colcount]
self._merge_columns(columns, lines)
ci = {}
for index, field in enumerate(columns):
if field is not None:
ci[field] = index
hasdate = CsvField.Date in ci
hasamount = (CsvField.Amount in ci) or (CsvField.Increase in ci and CsvField.Decrease in ci)
if not (hasdate and hasamount):
raise FileLoadError(tr("The Date and Amount columns must be set."))
target_account = self.accounts.create(
'CSV Import', self.default_currency, AccountType.Asset)
self.parsing_date_format, lines_to_load = self._parse_date_format(lines, ci)
self._check_amount_values(lines_to_load, ci)
for line in lines_to_load:
info = base.TransactionInfo()
info.account = target_account.name
for attr, index in ci.items():
value = line[index]
if attr == CsvField.Date:
value = base.parse_date_str(value, self.parsing_date_format)
elif attr == CsvField.Increase:
attr = CsvField.Amount
elif attr == CsvField.Decrease:
attr = CsvField.Amount
if value.strip() and not value.startswith('-'):
value = '-' + value
if isinstance(value, str):
value = value.strip()
if value:
setattr(info, attr, value)
if info.is_valid():
txn = info.load(self.accounts)
self.transactions.add(txn)
# --- Public
def rescan(self, encoding=None):
self._scan_lines(encoding=encoding)
| brownnrl/moneyguru | core/loader/csv.py | Python | gpl-3.0 | 7,401 |
def activity01(num1):
'''Determine if an input number is Even or Odd'''
if (num1 % 2 == 0):
return 'Even'
else:
return 'Odd'
def activity02(iv_one, iv_two):
'''Return the sum of two input values'''
one_plus_two = iv_one + iv_two
return one_plus_two
def activity03(num_list):
'''Given a list of integers, count how many are even'''
even_count = 0
odd_count = 0
for number in num_list:
if (number % 2 == 0):
#count = count + 1
even_count += 1
else:
odd_count += 1
return even_count
def activity04(input_string):
'''Return the input string, backward'''
return input_string[::-1]
| Zekvan/Cybrary-PythonForSecurityProfessionals | Module 2 Apprentice Python/Solutions/Apprentice_Activities_Solution.py | Python | gpl-3.0 | 616 |
#!/usr/bin/env python
"""
Directed Graphs
===============
Default visualisation for a directed graph.
"""
import matplotlib.pyplot as plt
from netgraph import Graph
cube = [
(0, 1),
(1, 2),
(2, 3), # <- bidirectional edges
(3, 2), # <-
(3, 0),
(4, 5),
(5, 6), # <-
(6, 5), # <-
(6, 7),
(0, 4),
(7, 4),
(5, 1), # <-
(1, 5), # <-
(2, 6),
(3, 7)
]
Graph(cube, edge_width=2., arrows=True)
plt.show()
| paulbrodersen/netgraph | examples/plot_02_directed_network.py | Python | gpl-3.0 | 461 |
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import re
from django.conf import settings
from django.db.models import Q
from django.http import Http404, HttpResponseRedirect
from django.shortcuts import get_object_or_404
from funfactory.urlresolvers import reverse
from lib import l10n_utils
from rna.models import Release
from product_details import product_details
from bedrock.firefox.firefox_details import firefox_details
from bedrock.firefox.views import get_latest_version as firefox_get_latest_version
from bedrock.mozorg.decorators import cache_control_expires
from bedrock.mozorg.helpers.misc import releasenotes_url
from bedrock.mozorg.helpers.download_buttons import android_builds
from bedrock.thunderbird.utils import get_latest_version as thunderbird_get_latest_version
SUPPORT_URLS = {
'Firefox for Android': 'https://support.mozilla.org/products/mobile',
'Firefox OS': 'https://support.mozilla.org/products/firefox-os',
'Firefox': 'https://support.mozilla.org/products/firefox',
'Thunderbird': 'https://support.mozilla.org/products/thunderbird/',
}
def release_notes_template(channel, product, version=None):
if product == 'Firefox OS':
return 'firefox/releases/os-notes.html'
prefix = dict((c, c.lower()) for c in Release.CHANNELS)
if product == 'Firefox' and channel == 'Aurora' and version >= 35:
return 'firefox/releases/dev-browser-notes.html'
dir = 'firefox'
if product == 'Thunderbird':
dir = 'thunderbird'
return ('{dir}/releases/{channel}-notes.html'
.format(dir=dir, channel=prefix.get(channel, 'release')))
def equivalent_release_url(release):
equivalent_release = (release.equivalent_android_release() or
release.equivalent_desktop_release())
if equivalent_release:
return releasenotes_url(equivalent_release)
def get_release_or_404(version, product):
if product == 'Firefox' and len(version.split('.')) == 3:
product_query = Q(product='Firefox') | Q(
product='Firefox Extended Support Release')
else:
product_query = Q(product=product)
release = get_object_or_404(Release, product_query, version=version)
if not release.is_public and not settings.DEV:
raise Http404
return release
def get_download_url(release):
if release.product == 'Thunderbird':
return 'https://www.mozilla.org/thunderbird/'
elif release.product == 'Firefox for Android':
return android_builds(release.channel)[0]['download_link']
else:
if release.channel == 'Aurora':
return reverse('firefox.channel') + '#aurora'
elif release.channel == 'Beta':
return reverse('firefox.channel') + '#beta'
else:
return reverse('firefox')
@cache_control_expires(1)
def release_notes(request, version, product='Firefox'):
if product == 'Firefox OS' and version in ('1.0.1', '1.1', '1.2'):
return l10n_utils.render(
request, 'firefox/os/notes-%s.html' % version)
try:
release = get_release_or_404(version, product)
except Http404:
release = get_release_or_404(version + 'beta', product)
return HttpResponseRedirect(releasenotes_url(release))
new_features, known_issues = release.notes(public_only=not settings.DEV)
return l10n_utils.render(
request, release_notes_template(release.channel, product,
int(release.major_version())), {
'version': version,
'download_url': get_download_url(release),
'support_url': SUPPORT_URLS.get(product, 'https://support.mozilla.org/'),
'release': release,
'equivalent_release_url': equivalent_release_url(release),
'new_features': new_features,
'known_issues': known_issues})
@cache_control_expires(1)
def system_requirements(request, version, product='Firefox'):
release = get_release_or_404(version, product)
dir = 'firefox'
if product == 'Thunderbird':
dir = 'thunderbird'
return l10n_utils.render(
request, '{dir}/releases/system_requirements.html'.format(dir=dir),
{'release': release, 'version': version})
def latest_notes(request, product='firefox', channel='release'):
if product == 'firefox' and channel == 'developer':
channel = 'aurora'
if product == 'thunderbird':
version = thunderbird_get_latest_version(product, channel)
else:
version = firefox_get_latest_version(product, channel)
if channel == 'beta':
version = re.sub(r'b\d+$', 'beta', version)
if channel == 'organizations':
version = re.sub(r'esr$', '', version)
dir = 'auroranotes' if channel == 'aurora' else 'releasenotes'
path = [product, version, dir]
locale = getattr(request, 'locale', None)
if locale:
path.insert(0, locale)
return HttpResponseRedirect('/' + '/'.join(path) + '/')
def latest_sysreq(request, channel, product):
if product == 'firefox' and channel == 'developer':
channel = 'aurora'
if product == 'thunderbird':
version = thunderbird_get_latest_version(product, channel)
else:
version = firefox_get_latest_version(product, channel)
if channel == 'beta':
version = re.sub(r'b\d+$', 'beta', version)
if channel == 'organizations':
version = re.sub(r'^(\d+).+', r'\1.0', version)
dir = 'system-requirements'
path = [product, version, dir]
locale = getattr(request, 'locale', None)
if locale:
path.insert(0, locale)
return HttpResponseRedirect('/' + '/'.join(path) + '/')
def releases_index(request, product):
releases = {}
if product == 'Firefox':
major_releases = firefox_details.firefox_history_major_releases
minor_releases = firefox_details.firefox_history_stability_releases
elif product == 'Thunderbird':
major_releases = product_details.thunderbird_history_major_releases
minor_releases = product_details.thunderbird_history_stability_releases
for release in major_releases:
major_version = float(re.findall(r'^\d+\.\d+', release)[0])
# The version numbering scheme of Firefox changes sometimes. The second
# number has not been used since Firefox 4, then reintroduced with
# Firefox ESR 24 (Bug 870540). On this index page, 24.1.x should be
# fallen under 24.0. This patter is a tricky part.
major_pattern = r'^' + \
re.escape(
('%s' if major_version < 4 else '%g') % round(major_version, 1))
releases[major_version] = {
'major': release,
'minor': sorted(filter(lambda x: re.findall(major_pattern, x),
minor_releases),
key=lambda x: int(re.findall(r'\d+$', x)[0]))
}
return l10n_utils.render(
request, '{product}/releases/index.html'.format(product=product.lower()),
{'releases': sorted(releases.items(), reverse=True)}
)
| ckprice/bedrock | bedrock/releasenotes/views.py | Python | mpl-2.0 | 7,250 |
# encoding: utf-8
#
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http:# mozilla.org/MPL/2.0/.
#
# Contact: Kyle Lahnakoski ([email protected])
#
from __future__ import absolute_import, division, unicode_literals
from jx_sqlite.sqlite import quote_list
from jx_base.expressions import InOp as InOp_
from jx_base.language import is_op
from jx_sqlite.expressions._utils import SQLang, check
from jx_sqlite.expressions.literal import Literal
from mo_dots import wrap
from mo_json import json2value
from mo_logs import Log
from mo_sql import SQL_FALSE, SQL_OR, sql_iso, ConcatSQL, SQL_IN
class InOp(InOp_):
@check
def to_sql(self, schema, not_null=False, boolean=False):
if not is_op(self.superset, Literal):
Log.error("Not supported")
j_value = json2value(self.superset.json)
if j_value:
var = SQLang[self.value].to_sql(schema)
sql = SQL_OR.join(
sql_iso(v, SQL_IN, quote_list(j_value))
for t, v in var[0].sql.items()
)
else:
sql = SQL_FALSE
return wrap([{"name": ".", "sql": {"b": sql}}])
| klahnakoski/ActiveData-ETL | vendor/jx_sqlite/expressions/in_op.py | Python | mpl-2.0 | 1,259 |
# coding=utf-8
from django.conf.urls import url
from feedback.views import redirect
urlpatterns = [
url(r'^redirect/$', redirect,
{'redirect_to': 'http://www.d120.de/'}),
url(r'^redirect/(?P<tail>.*)$', redirect,
{'redirect_to': 'http://www.d120.de/'}),
]
| d120/pyfeedback | src/feedback/tests/redirect_urls.py | Python | agpl-3.0 | 282 |
import json
import os
from flask import Flask, render_template, request, Response, redirect, url_for
from flask_mail import Mail, Message
from flask_bootstrap import Bootstrap
from flask_wtf import Form
from wtforms import StringField, SubmitField
from wtforms.widgets import TextInput
from wtforms.validators import Required
import logging
from logging.handlers import RotatingFileHandler
from logging import Formatter
from rq import Queue
from rq.job import Job
from worker import conn
try:
import configparser
except ImportError:
import ConfigParser as configparser
# from pyfaup.faup import Faup
from .proxied import ReverseProxied
from url_abuse_async import is_valid_url, url_list, dns_resolve, phish_query, psslcircl, \
vt_query_url, gsb_query, urlquery_query, sphinxsearch, whois, pdnscircl, bgpranking, \
cached, get_mail_sent, set_mail_sent, get_submissions, eupi
config_path = 'config.ini'
class AngularTextInput(TextInput):
def __call__(self, field, **kwargs):
kwargs['ng-model'] = 'input_url'
return super(AngularTextInput, self).__call__(field, **kwargs)
class URLForm(Form):
url = StringField('URL Field',
description='Enter the URL you want to lookup here.',
validators=[Required()], widget=AngularTextInput())
submit_button = SubmitField('Run lookup')
def make_dict(parser, section):
to_return = {}
entries = parser.items(section)
for k, v in entries:
to_return[k] = v.split(',')
return to_return
def prepare_auth():
if not os.path.exists('users.key'):
return None
to_return = {}
with open('users.key', 'r') as f:
for l in f:
l = l.strip()
user, password = l.split('=')
to_return[user] = password
return to_return
def create_app(configfile=None):
app = Flask(__name__)
handler = RotatingFileHandler('urlabuse.log', maxBytes=10000, backupCount=5)
handler.setFormatter(Formatter('%(asctime)s %(message)s'))
app.wsgi_app = ReverseProxied(app.wsgi_app)
app.logger.addHandler(handler)
app.logger.setLevel(logging.INFO)
Bootstrap(app)
q = Queue(connection=conn)
# Mail Config
app.config['MAIL_SERVER'] = 'localhost'
app.config['MAIL_PORT'] = 25
mail = Mail(app)
app.config['SECRET_KEY'] = 'devkey'
app.config['BOOTSTRAP_SERVE_LOCAL'] = True
app.config['configfile'] = config_path
parser = configparser.SafeConfigParser()
parser.read(app.config['configfile'])
replacelist = make_dict(parser, 'replacelist')
auth_users = prepare_auth()
ignorelist = [i.strip()
for i in parser.get('abuse', 'ignore').split('\n')
if len(i.strip()) > 0]
autosend_threshold = 5
def _get_user_ip(request):
ip = request.headers.get('X-Forwarded-For')
if ip is None:
ip = request.remote_addr
return ip
@app.route('/', methods=['GET', 'POST'])
def index():
form = URLForm()
return render_template('index.html', form=form)
@app.route('/urlreport', methods=['GET'])
def url_report():
return render_template('url-report.html')
@app.errorhandler(404)
def page_not_found(e):
ip = request.headers.get('X-Forwarded-For')
if ip is None:
ip = request.remote_addr
if request.path != '/_result/':
app.logger.info('404 of {} on {}'.format(ip, request.path))
return render_template('404.html'), 404
def authenticate():
"""Sends a 401 response that enables basic auth"""
return Response('Could not verify your access level for that URL.\n'
'You have to login with proper credentials', 401,
{'WWW-Authenticate': 'Basic realm="Login Required"'})
def check_auth(username, password):
"""This function is called to check if a username /
password combination is valid.
"""
if auth_users is None:
return False
else:
db_pass = auth_users.get(username)
return db_pass == password
@app.route('/login', methods=['GET', 'POST'])
def login():
auth = request.authorization
if not auth or not check_auth(auth.username, auth.password):
return authenticate()
return redirect(url_for('index'))
@app.route("/_result/<job_key>", methods=['GET'])
def check_valid(job_key):
if job_key is None:
return json.dumps(None), 200
job = Job.fetch(job_key, connection=conn)
if job.is_finished:
return json.dumps(job.result), 200
else:
return json.dumps("Nay!"), 202
@app.route('/start', methods=['POST'])
def run_query():
data = json.loads(request.data)
url = data["url"]
ip = _get_user_ip(request)
app.logger.info('{} {}'.format(ip, url))
if get_submissions(url) >= autosend_threshold:
send(url, '', True)
is_valid = q.enqueue_call(func=is_valid_url, args=(url,), result_ttl=500)
return is_valid.get_id()
@app.route('/urls', methods=['POST'])
def urls():
data = json.loads(request.data)
url = data["url"]
u = q.enqueue_call(func=url_list, args=(url,), result_ttl=500)
return u.get_id()
@app.route('/resolve', methods=['POST'])
def resolve():
data = json.loads(request.data)
url = data["url"]
u = q.enqueue_call(func=dns_resolve, args=(url,), result_ttl=500)
return u.get_id()
@app.route('/phishtank', methods=['POST'])
def phishtank():
data = json.loads(request.data)
if not os.path.exists('phishtank.key'):
return None
url = parser.get("PHISHTANK", "url")
key = open('phishtank.key', 'r').readline().strip()
query = data["query"]
u = q.enqueue_call(func=phish_query, args=(url, key, query,), result_ttl=500)
return u.get_id()
@app.route('/virustotal_report', methods=['POST'])
def vt():
data = json.loads(request.data)
if not os.path.exists('virustotal.key'):
return None
url = parser.get("VIRUSTOTAL", "url_report")
url_up = parser.get("VIRUSTOTAL", "url_upload")
key = open('virustotal.key', 'r').readline().strip()
query = data["query"]
u = q.enqueue_call(func=vt_query_url, args=(url, url_up, key, query,), result_ttl=500)
return u.get_id()
@app.route('/googlesafebrowsing', methods=['POST'])
def gsb():
data = json.loads(request.data)
if not os.path.exists('googlesafebrowsing.key'):
return None
url = parser.get("GOOGLESAFEBROWSING", "url")
key = open('googlesafebrowsing.key', 'r').readline().strip()
url = url.format(key)
query = data["query"]
u = q.enqueue_call(func=gsb_query, args=(url, query,), result_ttl=500)
return u.get_id()
@app.route('/urlquery', methods=['POST'])
def urlquery():
data = json.loads(request.data)
if not os.path.exists('urlquery.key'):
return None
url = parser.get("URLQUERY", "url")
key = open('urlquery.key', 'r').readline().strip()
query = data["query"]
u = q.enqueue_call(func=urlquery_query, args=(url, key, query,), result_ttl=500)
return u.get_id()
@app.route('/ticket', methods=['POST'])
def ticket():
if not request.authorization:
return ''
data = json.loads(request.data)
server = parser.get("SPHINX", "server")
port = int(parser.get("SPHINX", "port"))
url = parser.get("ITS", "url")
query = data["query"]
u = q.enqueue_call(func=sphinxsearch, args=(server, port, url, query,),
result_ttl=500)
return u.get_id()
@app.route('/whois', methods=['POST'])
def whoismail():
if not request.authorization:
return ''
server = parser.get("WHOIS", "server")
port = parser.getint("WHOIS", "port")
data = json.loads(request.data)
query = data["query"]
u = q.enqueue_call(func=whois, args=(server, port, query, ignorelist, replacelist),
result_ttl=500)
return u.get_id()
@app.route('/eupi', methods=['POST'])
def eu():
data = json.loads(request.data)
if not os.path.exists('eupi.key'):
return None
url = parser.get("EUPI", "url")
key = open('eupi.key', 'r').readline().strip()
query = data["query"]
u = q.enqueue_call(func=eupi, args=(url, key, query,), result_ttl=500)
return u.get_id()
@app.route('/pdnscircl', methods=['POST'])
def dnscircl():
url = parser.get("PDNS_CIRCL", "url")
user, password = open('pdnscircl.key', 'r').readlines()
data = json.loads(request.data)
query = data["query"]
u = q.enqueue_call(func=pdnscircl, args=(url, user.strip(), password.strip(),
query,), result_ttl=500)
return u.get_id()
@app.route('/bgpranking', methods=['POST'])
def bgpr():
data = json.loads(request.data)
query = data["query"]
u = q.enqueue_call(func=bgpranking, args=(query,), result_ttl=500)
return u.get_id()
@app.route('/psslcircl', methods=['POST'])
def sslcircl():
url = parser.get("PSSL_CIRCL", "url")
user, password = open('psslcircl.key', 'r').readlines()
data = json.loads(request.data)
query = data["query"]
u = q.enqueue_call(func=psslcircl, args=(url, user.strip(), password.strip(),
query,), result_ttl=500)
return u.get_id()
@app.route('/get_cache', methods=['POST'])
def get_cache():
data = json.loads(request.data)
url = data["query"]
data = cached(url)
dumped = json.dumps(data, sort_keys=True, indent=4, separators=(',', ': '))
return dumped
def digest(data):
to_return = ''
all_mails = set()
for entry in data:
for url, info in list(entry.items()):
to_return += '\n{}\n'.format(url)
if info.get('whois'):
all_mails.update(info.get('whois'))
to_return += '\tContacts: {}\n'.format(', '.join(info.get('whois')))
if info.get('vt') and len(info.get('vt')) == 4:
vtstuff = info.get('vt')
to_return += '\t{} out of {} positive detections in VT - {}\n'.format(
vtstuff[2], vtstuff[3], vtstuff[1])
if info.get('gsb'):
to_return += '\tKnown as malicious on Google Safe Browsing: {}\n'.format(info.get('gsb'))
if info.get('phishtank'):
to_return += '\tKnown as malicious on PhishTank\n'
if info.get('dns'):
ipv4, ipv6 = info.get('dns')
if ipv4 is not None:
for ip in ipv4:
to_return += '\t' + ip + '\n'
data = info[ip]
if data.get('bgp'):
to_return += '\t\t(PTR: {}) is announced by {} ({}).\n'.format(*(data.get('bgp')[:3]))
if data.get('whois'):
all_mails.update(data.get('whois'))
to_return += '\t\tContacts: {}\n'.format(', '.join(data.get('whois')))
if ipv6 is not None:
for ip in ipv6:
to_return += '\t' + ip + '\n'
data = info[ip]
if data.get('whois'):
all_mails.update(data.get('whois'))
to_return += '\t\tContacts: {}\n'.format(', '.join(data.get('whois')))
to_return += '\tAll contacts: {}\n'.format(', '.join(all_mails))
return to_return
def send(url, ip='', autosend=False):
if not get_mail_sent(url):
set_mail_sent(url)
data = cached(url)
if not autosend:
subject = 'URL Abuse report from ' + ip
else:
subject = 'URL Abuse report sent automatically'
msg = Message(subject, sender='[email protected]', recipients=["[email protected]"])
msg.body = digest(data)
msg.body += '\n\n'
msg.body += json.dumps(data, sort_keys=True, indent=4, separators=(',', ': '))
mail.send(msg)
@app.route('/submit', methods=['POST'])
def send_mail():
data = json.loads(request.data)
url = data["url"]
if not get_mail_sent(url):
ip = _get_user_ip(request)
send(url, ip)
return redirect(url_for('index'))
return app
| xujun10110/url-abuse | web/__init__.py | Python | agpl-3.0 | 13,088 |
# -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2019 Compassion CH (http://www.compassion.ch)
# @author: Emanuel Cino <[email protected]>
#
# The licence is in the file __manifest__.py
#
##############################################################################
import os
from base64 import b64encode
from openupgradelib import openupgrade
@openupgrade.migrate(use_env=True)
def migrate(env, version):
if not version:
return
# Restore B2S correspondence layouts
cr = env.cr
cr.execute("""
UPDATE correspondence c
SET template_id = (
SELECT id FROM correspondence_template t
WHERE t.layout like CONCAT('%', c.b2s_layout, '%')
AND t.name like 'B2S - L%'
)
WHERE c.b2s_layout IS NOT NULL;
""")
# Restore template lang checkboxes
cr.execute("""
INSERT into correspondence_lang_checkbox_correspondence_template_rel
SELECT template_id, id
FROM correspondence_lang_checkbox
WHERE template_id is not null;
""")
# Restore template images and layout config
l1_1 = env.ref('sbc_compassion.s2b_l1_textbox_original').id
l1_2 = env.ref('sbc_compassion.s2b_l1_textbox_original2').id
l3_1_design = env.ref('sbc_compassion.s2b_l3_design').id
l3_2_text = env.ref('sbc_compassion.s2b_l3_textbox_original').id
header_box = env.ref('sbc_compassion.s2b_header_box').id
dir_path = os.path.dirname(os.path.realpath(__file__))
template_images = [
f for f in os.listdir(dir_path) if f.endswith('.jpeg')
]
for fname in template_images:
with open(os.path.join(dir_path, fname), 'r') as template_image:
data = b64encode(template_image.read())
template = env['correspondence.template'].search([
('name', 'like', fname.replace('.jpeg', ''))
])
p1_text_box_ids = [(4, l1_1)] if template.layout == 'CH-A-1S11-1'\
else False
p1_design = [(4, l3_1_design)] if template.layout == 'CH-A-3S01-1'\
else False
p2_text_box_ids = [(4, l1_2 if template.layout == 'CH-A-1S11-1'
else l3_2_text)]
template.write({
'page_ids': [
(0, 0, {
'background': data,
'text_box_ids': p1_text_box_ids,
'image_box_ids': p1_design,
'header_box_id': header_box
}),
(0, 0, {
'text_box_ids': p2_text_box_ids,
'page_index': 2
})
],
})
| eicher31/compassion-modules | sbc_compassion/migrations/10.0.1.5.0/post-migration.py | Python | agpl-3.0 | 2,776 |
from reversion import revisions
from judge.models.choices import TIMEZONE, ACE_THEMES, MATH_ENGINES_CHOICES, EFFECTIVE_MATH_ENGINES
from judge.models.comment import Comment, CommentVote
from judge.models.contest import Contest, ContestTag, ContestParticipation, ContestProblem, ContestSubmission, Rating
from judge.models.interface import MiscConfig, validate_regex, NavigationBar, BlogPost
from judge.models.message import PrivateMessage, PrivateMessageThread
from judge.models.problem import ProblemGroup, ProblemType, Problem, ProblemClarification, ProblemTranslation, \
TranslatedProblemQuerySet, TranslatedProblemForeignKeyQuerySet, License, LanguageLimit, Solution
from judge.models.problem_data import problem_data_storage, problem_directory_file, ProblemData, ProblemTestCase, \
CHECKERS
from judge.models.profile import Profile, Organization, OrganizationRequest
from judge.models.runtime import Language, RuntimeVersion, Judge
from judge.models.submission import SUBMISSION_RESULT, Submission, SubmissionTestCase
from judge.models.ticket import Ticket, TicketMessage
revisions.register(Profile, exclude=['points', 'last_access', 'ip', 'rating'])
revisions.register(Problem, follow=['language_limits'])
revisions.register(LanguageLimit)
revisions.register(Contest, follow=['contest_problems'])
revisions.register(ContestProblem)
revisions.register(Organization)
revisions.register(BlogPost)
revisions.register(Solution)
revisions.register(Judge, fields=['name', 'created', 'auth_key', 'description'])
revisions.register(Language)
revisions.register(Comment, fields=['author', 'time', 'page', 'score', 'title', 'body', 'hidden', 'parent'])
del revisions
| monouno/site | judge/models/__init__.py | Python | agpl-3.0 | 1,672 |
# -*- coding: utf-8 -*-
# (c) 2017 Alfredo de la Fuente - AvanzOSC
# License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0.html
from . import test_product_supplierinfo_import
| esthermm/odoo-addons | product_supplierinfo_import/tests/__init__.py | Python | agpl-3.0 | 179 |
# © 2016 Antiun Ingeniería S.L. - Jairo Llopis
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl.html).
from . import partner_merge
| OCA/partner-contact | partner_deduplicate_by_website/wizards/__init__.py | Python | agpl-3.0 | 148 |
# -*- coding: utf-8 -*-
# Generated by Django 1.10.6 on 2017-03-29 12:54
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('wagtailcore', '0032_add_bulk_delete_page_permission'),
('wagtailimages', '0018_remove_rendition_filter'),
]
operations = [
migrations.CreateModel(
name='Logo',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('category', models.CharField(choices=[('committee', 'Committee'), ('section', 'Section')], max_length=20, verbose_name='category')),
('link', models.URLField(verbose_name='links to')),
('belongs_to', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='wagtailcore.Site', verbose_name='belongs to')),
('logo', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailimages.Image', verbose_name='logo')),
('logo_black', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailimages.Image', verbose_name='black logo')),
('logo_white', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailimages.Image', verbose_name='white logo')),
],
options={
'verbose_name_plural': 'logos',
},
),
migrations.CreateModel(
name='SocialMediaSettings',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('facebook', models.URLField(blank=True, help_text='Your Facebook page URL')),
('instagram', models.CharField(blank=True, help_text='Your Instagram username, without the @', max_length=255)),
('twitter', models.CharField(blank=True, help_text='Your Twitter username, without the @', max_length=255)),
('site', models.OneToOneField(editable=False, on_delete=django.db.models.deletion.CASCADE, to='wagtailcore.Site')),
],
options={
'verbose_name': 'Social media accounts',
},
),
]
| UTNkar/moore | src/branding/migrations/0001_initial.py | Python | agpl-3.0 | 2,489 |
"""Tests for the principal package."""
from pyramid import testing
from pytest import fixture
from pytest import mark
from unittest.mock import Mock
import pytest
import unittest
@fixture
def integration(integration):
integration.include('pyramid_mailer.testing')
integration.include('pyramid_mako')
integration.include('adhocracy_core.changelog')
integration.include('adhocracy_core.messaging')
return integration
@fixture
def mock_is_older(monkeypatch):
from adhocracy_core.sheets.metadata import is_older_than
from . import principal
mock = Mock(spec=is_older_than)
monkeypatch.setattr(principal, 'is_older_than', mock)
return mock
@fixture
def principals(pool_with_catalogs, registry):
from adhocracy_core.resources.principal import IPrincipalsService
inst = registry.content.create(IPrincipalsService.__identifier__,
parent=pool_with_catalogs)
return inst
@fixture
def mock_is_anonymize(mocker):
return mocker.patch('adhocracy_core.resources.principal.is_marked_anonymize')
class TestPrincipalsService:
@fixture
def meta(self):
from .principal import principals_meta
return principals_meta
def test_meta(self, meta, context):
from adhocracy_core import sheets
from . import badge
from . import principal
assert meta.iresource is principal.IPrincipalsService
assert meta.permission_create == 'create_service'
assert meta.content_name == 'principals'
assert meta.extended_sheets == \
(sheets.badge.IHasBadgesPool,)
assert badge.add_badges_service in meta.after_creation
@mark.usefixtures('integration')
def test_create(self, meta, registry, pool):
resource = registry.content.create(meta.iresource.__identifier__,
parent=pool)
assert meta.iresource.providedBy(resource)
@mark.usefixtures('integration')
def test_register_services(self, meta, registry, pool):
from substanced.util import find_service
registry.content.create(meta.iresource.__identifier__, parent=pool)
assert find_service(pool, 'principals', 'users')
assert find_service(pool, 'principals', 'groups')
assert find_service(pool, 'principals', 'resets')
class TestUsers:
@fixture
def meta(self):
from .principal import users_meta
return users_meta
def test_meta(self, meta):
from . import badge
from . import asset
from . import principal
from adhocracy_core import sheets
assert meta.iresource is principal.IUsersService
assert meta.permission_create == 'create_service'
assert meta.content_name == 'users'
assert meta.sdi_column_mapper == principal.sdi_user_columns
assert sheets.asset.IHasAssetPool in meta.extended_sheets
assert badge.add_badge_assignments_service in meta.after_creation
assert asset.add_assets_service in meta.after_creation
assert principal.allow_create_asset_authenticated in meta.after_creation
@mark.usefixtures('integration')
def test_create(self, meta, registry):
resource = registry.content.create(meta.iresource.__identifier__)
assert meta.iresource.providedBy(resource)
def test_create_asset_permission(context, registry, mocker):
from . import principal
from .principal import allow_create_asset_authenticated
set_acl = mocker.spy(principal, 'set_acl')
allow_create_asset_authenticated(context, registry, {})
set_acl.assert_called_with(context,
[('Allow', 'system.Authenticated', 'create_asset')],
registry=registry)
class TestSdiUserColums:
@fixture
def request_(self, context, registry_with_content):
request = testing.DummyRequest(context=context)
request.registry = registry_with_content
return request
def call_fut(self, *args, **kwargs):
from .principal import sdi_user_columns
return sdi_user_columns(*args, **kwargs)
def test_sdi_user_columns_none_user(self, request_):
context = testing.DummyResource()
result = self.call_fut(None, context, request_, [])
assert result == [
{'name': 'User','value': ''},
{'name': 'Email','value': ''},
]
def test_sdi_user_columns_user(self, request_):
from .principal import IUser
context = testing.DummyResource(__provides__=IUser)
mock_get_sheet_field = Mock()
mock_get_sheet_field.side_effect = ['Admin', '[email protected]']
request_.registry.content.get_sheet_field = mock_get_sheet_field
result = self.call_fut(None, context, request_, [])
assert result == [
{'name': 'User','value': 'Admin'},
{'name': 'Email','value': '[email protected]'},
]
class TestUser:
@fixture
def meta(self):
from .principal import user_meta
return user_meta
def test_meta(self, meta):
from . import principal
import adhocracy_core.sheets
assert meta.iresource is principal.IUser
assert meta.content_class == principal.User # TODO do we really need this class?
assert meta.permission_create == 'create_user'
assert meta.is_implicit_addable is False
assert meta.basic_sheets == (adhocracy_core.sheets.principal.IUserBasic,
adhocracy_core.sheets.principal.IUserExtended,
adhocracy_core.sheets.description.IDescription,
adhocracy_core.sheets.principal.ICaptcha,
adhocracy_core.sheets.principal.IPermissions,
adhocracy_core.sheets.metadata.IMetadata,
adhocracy_core.sheets.pool.IPool,
adhocracy_core.sheets.principal.IEmailNew,
)
assert meta.extended_sheets == \
(adhocracy_core.sheets.principal.IPasswordAuthentication,
adhocracy_core.sheets.principal.IActivationConfiguration,
adhocracy_core.sheets.principal.IAnonymizeDefault,
adhocracy_core.sheets.rate.ICanRate,
adhocracy_core.sheets.badge.ICanBadge,
adhocracy_core.sheets.badge.IBadgeable,
adhocracy_core.sheets.image.IImageReference,
adhocracy_core.sheets.notification.INotification,
adhocracy_core.sheets.principal.IServiceKonto,
adhocracy_core.sheets.principal.IServiceKontoSettings,
)
assert meta.element_types == ()
assert meta.use_autonaming is True
@mark.usefixtures('integration')
def test_create(self, meta, registry, principals):
from pytz import timezone
from zope.interface.verify import verifyObject
from adhocracy_core import sheets
appstructs = {
sheets.principal.IUserBasic.__identifier__ : {
'name': 'Anna Müller',
},
sheets.principal.IPasswordAuthentication.__identifier__ : {
'password': 'fodThyd2'
},
}
user = registry.content.create(meta.iresource.__identifier__,
parent=principals['users'],
appstructs=appstructs)
assert principals['users']['0000000'] is user
assert meta.iresource.providedBy(user)
assert verifyObject(meta.iresource, user)
assert user.email == ''
assert user.password.startswith('$2')
assert user.tzname == 'UTC'
assert user.roles == []
assert user.timezone == timezone(user.tzname)
assert user.is_password_valid(registry, '123456') == False
assert user.is_password_valid(registry, 'fodThyd2') == True
assert user.has_new_email_pending() == False
@mark.usefixtures('integration')
def test_user_with_new_email(self, meta, registry, principals):
from pytz import timezone
from zope.interface.verify import verifyObject
from adhocracy_core import sheets
appstructs = {
sheets.principal.IUserBasic.__identifier__ : {
'name': 'Anna Müller',
},
sheets.principal.IUserExtended.__identifier__ : {
'email': '[email protected]',
},
sheets.principal.IEmailNew.__identifier__ : {
'email': '[email protected]',
},
}
user = registry.content.create(meta.iresource.__identifier__,
parent=principals['users'],
appstructs=appstructs)
assert user.has_new_email_pending() == True
user.activate_new_email()
assert user.has_new_email_pending() == False
user.email == '[email protected]'
class TestSystemUser:
@fixture
def meta(self):
from .principal import system_user_meta
return system_user_meta
def test_meta(self, meta):
from . import principal
import adhocracy_core.sheets
assert meta.iresource is principal.ISystemUser
assert issubclass(principal.ISystemUser, principal.IUser)
assert meta.content_class == principal.User
assert meta.permission_create == 'create_system_user'
assert meta.is_implicit_addable is False
assert meta.basic_sheets == principal.user_meta.basic_sheets
assert meta.extended_sheets == \
(adhocracy_core.sheets.rate.ICanRate,
adhocracy_core.sheets.principal.IActivationConfiguration,
adhocracy_core.sheets.badge.ICanBadge,
adhocracy_core.sheets.badge.IBadgeable,
adhocracy_core.sheets.image.IImageReference,
adhocracy_core.sheets.notification.INotification,
)
assert meta.element_types == ()
assert meta.use_autonaming is True
assert meta.is_sdi_addable is False
@mark.usefixtures('integration')
def test_create(self, meta, registry, principals):
user = registry.content.create(meta.iresource.__identifier__,
parent=principals['users'])
assert principals['users']['0000000'] is user
assert meta.iresource.providedBy(user)
class TestGroups:
@fixture
def meta(self):
from .principal import groups_meta
return groups_meta
def test_meta(self, meta):
from . import principal
assert meta.iresource is principal.IGroupsService
assert meta.permission_create == 'create_service'
assert meta.content_name == 'groups'
@mark.usefixtures('integration')
def test_create(self, meta, registry):
resource = registry.content.create(meta.iresource.__identifier__)
assert meta.iresource.providedBy(resource)
class TestGroup:
@fixture
def meta(self):
from .principal import group_meta
return group_meta
def test_meta(self, meta):
from . import principal
assert meta.iresource is principal.IGroup
assert meta.content_class == principal.Group
assert meta.permission_create == 'create_group'
assert meta.is_implicit_addable is False
assert meta.element_types == ()
@mark.usefixtures('integration')
def test_create(self, meta, registry):
from zope.interface.verify import verifyObject
resource = registry.content.create(meta.iresource.__identifier__)
assert meta.iresource.providedBy(resource)
assert verifyObject(meta.iresource, resource)
assert resource.roles == []
@mark.usefixtures('integration')
def test_create_and_add_group(self, registry, principals):
from . import principal
from adhocracy_core import sheets
appstructs = {sheets.name.IName.__identifier__: {'name': 'Group1'},
sheets.principal.IGroup.__identifier__:
{'roles': ['reader']}}
group = registry.content.create(principal.IGroup.__identifier__,
parent=principals['groups'],
appstructs=appstructs)
appstructs = {sheets.principal.IPermissions.__identifier__:
{'groups': [group]}}
user = registry.content.create(principal.IUser.__identifier__,
parent=principals['users'],
appstructs=appstructs)
user.activate()
group_sheet = registry.content.get_sheet(group, sheets.principal.IGroup)
assert principals['groups']['Group1'] is group
assert group_sheet.get()['users'] == [user]
assert group_sheet.get()['roles'] == ['reader']
class TestPasswordResets:
@fixture
def meta(self):
from .principal import passwordresets_meta
return passwordresets_meta
def test_meta(self, meta):
from . import principal
assert meta.iresource is principal.IPasswordResetsService
assert meta.permission_create == 'create_service'
assert meta.content_name == 'resets'
@mark.usefixtures('integration')
def test_create(self, meta, registry):
resource = registry.content.create(meta.iresource.__identifier__)
assert meta.iresource.providedBy(resource)
@mark.usefixtures('integration')
def test_remove_view_permission(self, meta, registry):
from pyramid.authorization import Deny
from pyramid.authentication import Everyone
from adhocracy_core.authorization import get_acl
resource = registry.content.create(meta.iresource.__identifier__)
acl = get_acl(resource)
assert acl == [(Deny, Everyone, 'view')]
@mark.usefixtures('integration')
def test_hide(self, meta, registry):
"""Even if view permission is not checked, we don't want to expose
password resets to the client. So in addition we hide them."""
resource = registry.content.create(meta.iresource.__identifier__)
assert resource.hidden
class TestPasswordReset:
@fixture
def meta(self):
from .principal import passwordreset_meta
return passwordreset_meta
def test_meta(self, meta):
from . import principal
import adhocracy_core.sheets
assert meta.iresource is principal.IPasswordReset
assert meta.permission_create == 'create_password_reset'
assert meta.use_autonaming_random
assert meta.basic_sheets == (adhocracy_core.sheets.metadata.IMetadata,)
@mark.usefixtures('integration')
def test_create(self, meta, registry, pool):
from zope.interface.verify import verifyObject
from .principal import IPasswordReset
resource = registry.content.create(meta.iresource.__identifier__)
assert IPasswordReset.providedBy(resource)
assert verifyObject(IPasswordReset, resource)
@mark.usefixtures('integration')
def test_reset_password(self, registry, principals):
from . import principal
user = registry.content.create(principal.IUser.__identifier__,
parent=principals['users'],
appstructs={})
reset = registry.content.create(principal.IPasswordReset.__identifier__,
parent=principals['resets'],
creator=user)
old_password = user.password
reset.reset_password('new_password')
new_password = user.password
assert old_password != new_password
@mark.usefixtures('integration')
def test_suicide_after_reset_password(self, registry, principals):
from . import principal
user = registry.content.create(principal.IUser.__identifier__,
parent=principals['users'],
appstructs={})
reset = registry.content.create(principal.IPasswordReset.__identifier__,
parent=principals['resets'],
creator=user)
reset.reset_password('new_password')
assert reset.__parent__ is None
@mark.usefixtures('integration')
def test_activate_after_reset_password(self, registry, principals):
from . import principal
user = registry.content.create(principal.IUser.__identifier__,
parent=principals['users'],
appstructs={})
reset = registry.content.create(principal.IPasswordReset.__identifier__,
parent=principals['resets'],
creator=user)
reset.reset_password('new_password')
assert user.active
@mark.usefixtures('integration')
def test_hide(self, meta, registry):
"""Even if view permission is not checked, we don't want to expose
password resets to the client. So in addition we hide them."""
resource = registry.content.create(meta.iresource.__identifier__)
assert resource.hidden
class TestUserLocatorAdapter:
@fixture
def mock_catalogs(self, monkeypatch, mock_catalogs) -> Mock:
from . import principal
monkeypatch.setattr(principal, 'find_service',
lambda x, y: mock_catalogs)
return mock_catalogs
@fixture
def context(self, pool, service):
from copy import deepcopy
pool['principals'] = service
pool['principals']['users'] = deepcopy(service)
return pool
@fixture
def request_(self, context, registry_with_content):
request = testing.DummyRequest(context=context)
request.registry = registry_with_content
return request
@fixture
def inst(self, context, request_):
from .principal import UserLocatorAdapter
return UserLocatorAdapter(context, request_)
def test_create(self, inst):
from adhocracy_core.interfaces import IRolesUserLocator
from zope.interface.verify import verifyObject
assert IRolesUserLocator.providedBy(inst)
assert verifyObject(IRolesUserLocator, inst)
def test_get_users(self, inst, context):
from .principal import IUser
user = testing.DummyResource(__provides__=IUser)
context['principals']['users']['user'] = user
context['principals']['users']['other'] = testing.DummyResource()
result = inst.get_users()
assert list(result) == [user]
def test_get_user_by_email_user_exists(self, inst, mock_catalogs,
search_result, query):
user = testing.DummyResource()
mock_catalogs.search.return_value = search_result._replace(
elements=[user])
assert inst.get_user_by_email('[email protected]') is user
assert mock_catalogs.search.call_args[0][0] == query._replace(
indexes={'private_user_email': '[email protected]'},
resolve=True,
)
def test_get_user_by_email_two_identical_user_exists(self, inst, mock_catalogs,
search_result, query):
user = testing.DummyResource()
user2 = testing.DummyResource()
mock_catalogs.search.return_value = search_result._replace(
elements=[user, user2])
with pytest.raises(ValueError):
inst.get_user_by_email('[email protected]')
def test_get_user_by_email_user_not_exists(self, inst, mock_catalogs):
assert inst.get_user_by_email('[email protected]') is None
assert mock_catalogs.search.called
def test_get_user_by_login_user_exists(self, inst, mock_catalogs,
search_result, query):
user = testing.DummyResource()
mock_catalogs.search.return_value = search_result._replace(
elements=[user])
assert inst.get_user_by_login('login name') is user
assert mock_catalogs.search.call_args[0][0] == query._replace(
indexes={'user_name': 'login name'},
resolve=True,
)
def test_get_user_by_login_user_not_exists(self, inst, mock_catalogs):
assert inst.get_user_by_login('wrong login name') is None
assert mock_catalogs.search.called
def test_get_user_by_activation_path_user_exists(self, inst, mock_catalogs,
search_result, query):
user = testing.DummyResource()
mock_catalogs.search.return_value = search_result._replace(
elements=[user])
assert inst.get_user_by_activation_path('/activate/foo') is user
assert mock_catalogs.search.call_args[0][0] == query._replace(
indexes={'private_user_activation_path': '/activate/foo'},
resolve=True,
)
def test_get_user_by_activation_path_user_not_exists(self, inst,
mock_catalogs):
assert inst.get_user_by_activation_path('/activate/no_such_link') is None
assert mock_catalogs.search.called
def test_get_user_by_userid_user_exists(self, context, inst):
user = testing.DummyResource()
context['principals']['users']['User1'] = user
assert inst.get_user_by_userid('/principals/users/User1') is user
def test_get_user_by_userid_user_not_exists(self, inst):
assert inst.get_user_by_userid('/principals/users/User1') is None
def test_get_user_by_service_konto_userid_user_exists(self, inst,
mock_catalogs, search_result, query):
user = testing.DummyResource()
mock_catalogs.search.return_value = search_result._replace(
elements=[user])
assert inst.get_user_by_service_konto_userid('123') is user
assert mock_catalogs.search.call_args[0][0] == query._replace(
indexes={'private_service_konto_userid': '123'},
resolve=True,
)
def test_get_user_by_service_konto_userid_user_not_exists(self, inst,
mock_catalogs):
assert inst.get_user_by_service_konto_userid('312') is None
assert mock_catalogs.search.called
def test_get_groupids_user_exists(self, context, mock_sheet, request_, inst):
from adhocracy_core.sheets.principal import IPermissions
from adhocracy_core.testing import register_sheet
group = testing.DummyResource(__name__='group1')
mock_sheet.meta = mock_sheet.meta._replace(isheet=IPermissions)
mock_sheet.get.return_value = {'groups': [group]}
user = testing.DummyResource()
register_sheet(user, mock_sheet, request_.registry)
context['principals']['users']['User1'] = user
assert inst.get_groupids('/principals/users/User1') == ['group:group1']
def test_get_groupids_user_not_exists(self, inst):
assert inst.get_groupids('/principals/users/User1') is None
def test_get_role_and_group_role_ids_user_exists(self, context, inst):
inst.get_user_by_userid = Mock()
inst.get_user_by_userid.return_value = context
inst.get_roleids = Mock()
inst.get_roleids.return_value = ['role:admin']
inst.get_group_roleids = Mock()
inst.get_group_roleids.return_value = ['role:reader']
assert inst.get_role_and_group_roleids('/principals/users/User1') ==\
['role:admin', 'role:reader']
def test_get_role_and_group_roleids_user_not_exists(self, inst):
assert inst.get_role_and_group_roleids('/principals/users/User1') is None
def test_get_group_roleids_user_exists(self, inst, context, mock_sheet, request_,
):
from adhocracy_core.sheets.principal import IPermissions
from adhocracy_core.testing import register_sheet
group = testing.DummyResource(__name__='group1', roles=[])
user = testing.DummyResource()
mock_sheet.meta = mock_sheet.meta._replace(isheet=IPermissions)
mock_sheet.get.return_value = {'groups': [group]}
register_sheet(user, mock_sheet, request_.registry)
group.roles = ['role1']
context['principals']['users']['User1'] = user
assert inst.get_group_roleids('/principals/users/User1') == ['role:role1']
def test_get_group_roleids_user_not_exists(self, inst):
assert inst.get_group_roleids('/principals/users/User1') is None
def test_get_roleids_user_exists(self, context, mock_sheet, request_, inst):
from adhocracy_core.testing import register_sheet
user = testing.DummyResource(roles=['role1'])
register_sheet(user, mock_sheet, request_.registry)
context['principals']['users']['User1'] = user
assert inst.get_roleids('/principals/users/User1') == ['role:role1']
def test_get_roleids_user_not_exists(self, inst):
assert inst.get_roleids('/principals/users/User1') is None
class UserLocatorAdapterIntegrationTest(unittest.TestCase):
def setUp(self):
self.config = testing.setUp()
self.config.include('adhocracy_core.content')
self.config.include('adhocracy_core.resources.principal')
self.context = testing.DummyResource()
self.registry = self.config.registry
def tearDown(self):
testing.tearDown()
def test_create(self):
from adhocracy_core.interfaces import IRolesUserLocator
assert self.registry.getMultiAdapter(
(self.context, testing.DummyRequest), IRolesUserLocator)
class TestGroupsAndRolesFinder:
@fixture
def request(self, context, registry):
request = testing.DummyRequest(context=context)
request.registry = registry
return request
def call_fut(self, userid, request):
from adhocracy_core.resources.principal import groups_and_roles_finder
return groups_and_roles_finder(userid, request)
def test_userid_wrong(self, request, mock_user_locator):
assert self.call_fut('WRONG', request) == None
assert mock_user_locator.get_groupids.call_args[0] == ('WRONG',)
assert mock_user_locator.get_role_and_group_roleids.call_args[0] == ('WRONG',)
def test_userid_with_roles(self, request, mock_user_locator):
mock_user_locator.get_role_and_group_roleids.return_value = ['role:reader']
assert self.call_fut('userid', request) == ['role:reader']
def test_userid_with_groups_and_group_roles(self, request, mock_user_locator):
mock_user_locator.get_role_and_group_roleids.return_value = ['group:Readers']
assert self.call_fut('userid', request) == ['group:Readers']
class TestDeleteNotActiveUsers:
@fixture
def request_(self, context, registry):
request = testing.DummyRequest(context=context)
request.registry = registry
return request
@fixture
def user(self):
user = testing.DummyResource(active=False, email='', name='')
return user
@fixture
def users(self, service, user):
service['user'] = user
return service
def call_fut(self, *args):
from .principal import delete_not_activated_users
return delete_not_activated_users(*args)
def test_delete_not_active_users_older_then_days(
self, users, user, request_, mock_is_older, mock_user_locator):
mock_is_older.return_value = True
mock_user_locator.get_users.return_value = [user]
self.call_fut(request_, 7)
mock_is_older.assert_called_with(user, 7)
assert 'user' not in users
def test_ignore_not_active_user_younger_then_days(
self, users, user, request_, mock_is_older, mock_user_locator):
mock_is_older.return_value = False
mock_user_locator.get_users.return_value = [user]
self.call_fut(request_, 7)
assert 'user' in users
def test_ignore_active_user(
self, users, user, request_, mock_user_locator):
user.active = True
mock_user_locator.get_users.return_value = [user]
self.call_fut(request_, 7)
assert 'user' in users
class TestDeletePasswordResets:
@fixture
def reset(self):
reset = testing.DummyResource()
return reset
@fixture
def resets(self, reset, service, monkeypatch):
from . import principal
service['reset'] = reset
monkeypatch.setattr(principal, 'find_service', lambda x, y, z: service)
return service
def call_fut(self, *args):
from .principal import delete_password_resets
return delete_password_resets(*args)
def test_delete_resets_older_then_days(
self, resets, reset, request_, mock_is_older):
mock_is_older.return_value = True
self.call_fut(request_, 7)
mock_is_older.assert_called_with(reset, 7)
assert 'reset' not in resets
def test_ignore_resets_younger_then_days(
self, resets, reset, request_, mock_is_older):
mock_is_older.return_value = False
self.call_fut(request_, 7)
assert 'reset' in resets
class TestGetUserOrAnonymous:
def call_fut(self, *args):
from .principal import get_user_or_anonymous
return get_user_or_anonymous(*args)
def test_return_none_if_no_authenticated_user(self, request_):
request_.authenticated_userid = None
assert self.call_fut(request_) is None
def test_return_none_if_user_no_locator(self, request_):
request_.authenticated_userid = 'userid'
assert self.call_fut(request_) is None
def test_return_authenticated_user_if_no_anonymize(
self, request_, mock_user_locator, mock_is_anonymize):
user = testing.DummyResource()
mock_is_anonymize.return_value = False
request_.authenticated_userid = 'userid'
mock_user_locator.get_user_by_userid.return_value = user
assert self.call_fut(request_) == user
mock_is_anonymize.assert_called_with(request_)
def test_return_anonymous_user_if_anonymize(
self, request_, mock_user_locator, mock_is_anonymize):
user = testing.DummyResource()
mock_is_anonymize.return_value = True
request_.registry['config'].adhocracy.anonymous_user = 'anonymous_'
mock_user_locator.get_user_by_login.return_value = user
assert self.call_fut(request_) == user
class TestGetAnonymizedUser:
def call_fut(self, *args):
from .principal import get_anonymized_user
return get_anonymized_user(*args)
def test_return_none_if_no_anonymize_request(self, request_,
mock_is_anonymize):
request_.authenticated_userid = 'userid'
mock_is_anonymize.return_value = None
assert self.call_fut(request_) is None
mock_is_anonymize.assert_called_with(request_)
def test_return_none_if_anonymize_request_but_no_user(
self, request_, mock_is_anonymize):
request_.authenticated_userid = None
mock_is_anonymize.return_value = True
assert self.call_fut(request_) is None
def test_return_authenticated_user_if_anonymize_request(
self, request_, mock_user_locator, mock_is_anonymize):
user = testing.DummyResource()
request_.authenticated_userid = 'userid'
mock_is_anonymize.return_value = True
mock_user_locator.get_user_by_userid.return_value = user
assert self.call_fut(request_) == user
| liqd/adhocracy3.mercator | src/adhocracy_core/adhocracy_core/resources/test_principal.py | Python | agpl-3.0 | 31,954 |
# coding: utf-8
from .models import AppCoffees
from boites.views import AppCreateView, AppUpdateView, AppDeleteView
class AppCoffeesCreateView(AppCreateView):
model = AppCoffees
fields = ['url', 'uid']
class AppCoffeesUpdateView(AppUpdateView):
model = AppCoffees
fields = ['url', 'uid', 'enabled']
class AppCoffeesDeleteView(AppDeleteView):
model = AppCoffees
| bgaultier/laboitepro | laboite/apps/coffees/views.py | Python | agpl-3.0 | 387 |
# This file is part of Shuup.
#
# Copyright (c) 2012-2016, Shoop Ltd. All rights reserved.
#
# This source code is licensed under the AGPLv3 license found in the
# LICENSE file in the root directory of this source tree.
import datetime
import pytest
from shuup.simple_cms.views import PageView
from shuup.testing.factories import get_default_shop
from shuup.testing.utils import apply_request_middleware
from shuup_tests.simple_cms.utils import create_page
def check_children_content(request, page, children_content, children_visibility):
view_func = PageView.as_view()
response = view_func(request, url=page.url)
response.render()
assert page.get_html() in response.rendered_content
assert bool(children_content in response.rendered_content) == children_visibility
@pytest.mark.django_db
def test_visible_children(rf):
get_default_shop()
request = apply_request_middleware(rf.get("/"))
assert request.user.is_anonymous()
parent_content = "Parent content"
page = create_page(available_from=datetime.date(1988, 1, 1), content=parent_content)
children_content = "Children content"
create_page(available_from=datetime.date(2000, 1, 1), content=children_content, parent=page) # Visible child
assert page.list_children_on_page == False
check_children_content(request, page, children_content, False)
page.list_children_on_page = True
page.save()
check_children_content(request, page, children_content, True)
@pytest.mark.django_db
def test_invisible_children(rf):
get_default_shop()
request = apply_request_middleware(rf.get("/"))
parent_content = "Parent content"
page = create_page(available_from=datetime.date(1988, 1, 1), content=parent_content)
children_content = "Children content"
create_page(content=children_content, parent=page) # Create invisible children
assert page.list_children_on_page == False
check_children_content(request, page, children_content, False)
page.list_children_on_page = True
page.save()
check_children_content(request, page, children_content, False)
| hrayr-artunyan/shuup | shuup_tests/simple_cms/test_children.py | Python | agpl-3.0 | 2,105 |
import requests
from django.db import models
from django.conf import settings
from wagtail.wagtailcore.models import Page, Orderable
from wagtail.wagtailsearch import index
from wagtail.wagtailadmin.edit_handlers import FieldPanel
class PersonPage(Orderable, Page):
user = models.OneToOneField(settings.AUTH_USER_MODEL, null=True, blank=True,
related_name='person', on_delete=models.SET_NULL)
name = models.CharField(max_length=200, null=False)
contact = models.URLField(null=False)
github_user = models.CharField(max_length=200, blank=True, null=True)
job_title = models.CharField(max_length=20)
description = models.TextField(blank=True, null=True)
listed = models.BooleanField()
avatar_url = models.URLField(blank=True, null=True)
search_fields = Page.search_fields + (
index.SearchField('name'),
index.SearchField('job_title'),
index.SearchField('github_user'),
index.SearchField('description'),
)
content_panels = Page.content_panels + [
FieldPanel('name'),
FieldPanel('contact'),
FieldPanel('github_user'),
FieldPanel('user'),
FieldPanel('job_title'),
FieldPanel('description'),
FieldPanel('listed')
]
def get_github_link(self):
if not self.github_user:
return None
return 'https://github.com/%s' % self.github_user
def save(self, *args, **kwargs):
# update the avatar url
if self.github_user:
self.avatar_url\
= requests.get('https://api.github.com/users/' + self.github_user).json()['avatar_url']
super().save(*args, **kwargs)
print(self.avatar_url)
class AboutUsIndexPage(Page):
def people(self):
return PersonPage.objects.live().filter(listed=True)
| City-of-Helsinki/devheldev | aboutus/models.py | Python | agpl-3.0 | 1,845 |
# -*- coding: utf-8 -*-
"""
ZUGBRUECKE
Calling routines in Windows DLLs from Python scripts running on unixlike systems
https://github.com/pleiszenburg/zugbruecke
tests/test_error_missingroutine.py: Checks for proper error handling if routine does not exist
Required to run on platform / side: [UNIX, WINE]
Copyright (C) 2017-2019 Sebastian M. Ernst <[email protected]>
<LICENSE_BLOCK>
The contents of this file are subject to the GNU Lesser General Public License
Version 2.1 ("LGPL" or "License"). You may not use this file except in
compliance with the License. You may obtain a copy of the License at
https://www.gnu.org/licenses/old-licenses/lgpl-2.1.txt
https://github.com/pleiszenburg/zugbruecke/blob/master/LICENSE
Software distributed under the License is distributed on an "AS IS" basis,
WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License for the
specific language governing rights and limitations under the License.
</LICENSE_BLOCK>
"""
# +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
# IMPORT
# +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
import pytest
from sys import platform
if any([platform.startswith(os_name) for os_name in ['linux', 'darwin', 'freebsd']]):
import zugbruecke as ctypes
elif platform.startswith('win'):
import ctypes
# +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
# TEST(s)
# +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
def test_missingroutine():
dll = ctypes.windll.LoadLibrary('tests/demo_dll.dll')
with pytest.raises(AttributeError):
missing_routine = dll.missing_routine
| pleiszenburg/zugbruecke | tests/test_error_missingroutine.py | Python | lgpl-2.1 | 1,798 |
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class PyCekit(PythonPackage):
"""CEKit is a Container image creation tool.
CEKit helps to build container images from image definition files
with strong focus on modularity and code reuse."""
homepage = "https://github.com/cekit/cekit/"
url = "https://github.com/cekit/cekit/archive/3.7.0.tar.gz"
version('3.7.0', sha256='2a778b50427f1d7478d5cd54a5df97fb1b8d540892a1e70d7f9a9c7b878f89ca')
version('3.6.0', sha256='d046f25b533ffa1602e3c53e58cc90108bd8fb1f8d0c4fae92f28cf71f81add0')
version('3.5.0', sha256='696a90098cde8a59b8e2c06e1b031ee1fd86e696d1e9894e836da2a1432bfd20')
version('3.4.0', sha256='90817c5bf780235ce70b0228740511ecb9171540bffa4ca86721d3a6155d5901')
version('3.3.2', sha256='a17fcfb1c49d32846f78627b10b45a44d1cb7d99280edd873836c9a721bf30a8')
version('3.3.1', sha256='d31b7800417ec265131fc54df8a1cf275739fe29f3a3f96123dc996667d85368')
depends_on('py-setuptools', type='build')
depends_on('[email protected]:', type=('build', 'run'))
depends_on('[email protected]:', type=('build', 'run'))
depends_on('[email protected]:', type=('build', 'run'))
depends_on('[email protected]:', type=('build', 'run'))
depends_on('[email protected]:', type=('build', 'run'))
depends_on('[email protected]:', type=('build', 'run'))
| iulian787/spack | var/spack/repos/builtin/packages/py-cekit/package.py | Python | lgpl-2.1 | 1,533 |
# Orca
#
# Copyright (C) 2015 Igalia, S.L.
#
# Author: Joanmarie Diggs <[email protected]>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the
# Free Software Foundation, Inc., Franklin Street, Fifth Floor,
# Boston MA 02110-1301 USA.
__id__ = "$Id$"
__version__ = "$Revision$"
__date__ = "$Date$"
__copyright__ = "Copyright (c) 2015 Igalia, S.L."
__license__ = "LGPL"
import orca.braille as braille
import orca.scripts.toolkits.WebKitGtk as WebKitGtk
class BrailleGenerator(WebKitGtk.BrailleGenerator):
def __init__(self, script):
super().__init__(script)
self._cache = {}
def _isMessageListToggleCell(self, obj):
cached = self._cache.get(hash(obj), {})
rv = cached.get("isMessageListToggleCell")
if rv == None:
rv = self._script.utilities.isMessageListToggleCell(obj)
cached["isMessageListToggleCell"] = rv
self._cache[hash(obj)] = cached
return rv
def _generateRealActiveDescendantDisplayedText(self, obj, **args):
if self._isMessageListToggleCell(obj):
return []
return super()._generateRealActiveDescendantDisplayedText(obj, **args)
def generateBraille(self, obj, **args):
self._cache = {}
result, focusedRegion = super().generateBraille(obj, **args)
self._cache = {}
if not result or focusedRegion != result[0]:
return [result, focusedRegion]
hasObj = lambda x: isinstance(x, (braille.Component, braille.Text))
isObj = lambda x: self._script.utilities.isSameObject(obj, x.accessible)
matches = [r for r in result if hasObj(r) and isObj(r)]
if matches:
focusedRegion = matches[0]
return [result, focusedRegion]
| pvagner/orca | src/orca/scripts/apps/evolution/braille_generator.py | Python | lgpl-2.1 | 2,352 |
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
class PySpacy(PythonPackage):
"""spaCy is a library for advanced Natural Language Processing in
Python and Cython."""
homepage = "https://spacy.io/"
url = "https://pypi.io/packages/source/s/spacy/spacy-2.3.2.tar.gz"
version('2.3.2', sha256='818de26e0e383f64ccbe3db185574920de05923d8deac8bbb12113b9e33cee1f')
version('2.2.4', sha256='f0f3a67c5841e6e35d62c98f40ebb3d132587d3aba4f4dccac5056c4e90ff5b9')
depends_on('[email protected]:2.8,3.4:', type=('build', 'run'), when='@2.2.4:2.2.999')
depends_on('[email protected]:2.8,3.5:', type=('build', 'run'), when='@2.3.0:')
depends_on('py-wheel', type='build')
depends_on('[email protected]:', type='build')
depends_on('[email protected]:2.0.999', type=('build', 'run'))
depends_on('[email protected]:3.0.999', type=('build', 'run'))
depends_on('[email protected]:1.0', type=('build', 'run'))
depends_on('[email protected]', type=('build', 'run'), when='@2.2.4:2.2.999')
depends_on('[email protected]', type=('build', 'run'), when='@2.3.0:')
depends_on('[email protected]:0.4.999', type=('build', 'run'))
depends_on('[email protected]:1.0.999', type=('build', 'run'))
depends_on('[email protected]:1.0.999', type=('build', 'run'))
depends_on('[email protected]:1.0', type=('build', 'run'))
depends_on('[email protected]:4.999', type=('build', 'run'))
depends_on('py-setuptools', type=('build', 'run'))
depends_on('[email protected]:', type=('build', 'run'))
depends_on('[email protected]:1.1', type=('build', 'run'))
depends_on('[email protected]:2.999', type=('build', 'run'))
depends_on('[email protected]', when='^python@:3.3', type=('build', 'run'))
| iulian787/spack | var/spack/repos/builtin/packages/py-spacy/package.py | Python | lgpl-2.1 | 1,855 |
# -*- coding: utf-8 -*-
# ***************************************************************************
# * Copyright (c) 2017 sliptonic <[email protected]> *
# * *
# * This program is free software; you can redistribute it and/or modify *
# * it under the terms of the GNU Lesser General Public License (LGPL) *
# * as published by the Free Software Foundation; either version 2 of *
# * the License, or (at your option) any later version. *
# * for detail see the LICENCE text file. *
# * *
# * This program is distributed in the hope that it will be useful, *
# * but WITHOUT ANY WARRANTY; without even the implied warranty of *
# * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
# * GNU Library General Public License for more details. *
# * *
# * You should have received a copy of the GNU Library General Public *
# * License along with this program; if not, write to the Free Software *
# * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 *
# * USA *
# * *
# ***************************************************************************
import FreeCAD
import FreeCADGui
import PathGui as PGui # ensure Path/Gui/Resources are loaded
import PathScripts.PathCircularHoleBaseGui as PathCircularHoleBaseGui
import PathScripts.PathDrilling as PathDrilling
import PathScripts.PathGui as PathGui
import PathScripts.PathLog as PathLog
import PathScripts.PathOpGui as PathOpGui
from PySide import QtCore
__title__ = "Path Drilling Operation UI."
__author__ = "sliptonic (Brad Collette)"
__url__ = "https://www.freecadweb.org"
__doc__ = "UI and Command for Path Drilling Operation."
__contributors__ = "IMBack!"
LOGLEVEL = False
if LOGLEVEL:
PathLog.setLevel(PathLog.Level.DEBUG, PathLog.thisModule())
PathLog.trackModule(PathLog.thisModule())
else:
PathLog.setLevel(PathLog.Level.NOTICE, PathLog.thisModule())
class TaskPanelOpPage(PathCircularHoleBaseGui.TaskPanelOpPage):
'''Controller for the drilling operation's page'''
def initPage(self, obj):
# pylint: disable=attribute-defined-outside-init
self.peckDepthSpinBox = PathGui.QuantitySpinBox(self.form.peckDepth, obj, 'PeckDepth')
self.peckRetractSpinBox = PathGui.QuantitySpinBox(self.form.peckRetractHeight, obj, 'RetractHeight')
self.dwellTimeSpinBox = PathGui.QuantitySpinBox(self.form.dwellTime, obj, 'DwellTime')
def registerSignalHandlers(self, obj):
self.form.peckEnabled.toggled.connect(self.form.peckDepth.setEnabled)
self.form.peckEnabled.toggled.connect(self.form.dwellEnabled.setDisabled)
self.form.dwellEnabled.toggled.connect(self.form.dwellTime.setEnabled)
self.form.dwellEnabled.toggled.connect(self.form.dwellTimelabel.setEnabled)
self.form.dwellEnabled.toggled.connect(self.form.peckEnabled.setDisabled)
self.form.peckRetractHeight.setEnabled(True)
self.form.retractLabel.setEnabled(True)
if self.form.peckEnabled.isChecked():
self.form.dwellEnabled.setEnabled(False)
self.form.peckDepth.setEnabled(True)
self.form.peckDepthLabel.setEnabled(True)
elif self.form.dwellEnabled.isChecked():
self.form.peckEnabled.setEnabled(False)
self.form.dwellTime.setEnabled(True)
self.form.dwellTimelabel.setEnabled(True)
def getForm(self):
'''getForm() ... return UI'''
return FreeCADGui.PySideUic.loadUi(":/panels/PageOpDrillingEdit.ui")
def updateQuantitySpinBoxes(self, index = None):
# pylint: disable=unused-argument
self.peckDepthSpinBox.updateSpinBox()
self.peckRetractSpinBox.updateSpinBox()
self.dwellTimeSpinBox.updateSpinBox()
def getFields(self, obj):
'''setFields(obj) ... update obj's properties with values from the UI'''
PathLog.track()
self.peckDepthSpinBox.updateProperty()
self.peckRetractSpinBox.updateProperty()
self.dwellTimeSpinBox.updateProperty()
if obj.DwellEnabled != self.form.dwellEnabled.isChecked():
obj.DwellEnabled = self.form.dwellEnabled.isChecked()
if obj.PeckEnabled != self.form.peckEnabled.isChecked():
obj.PeckEnabled = self.form.peckEnabled.isChecked()
if obj.ExtraOffset != str(self.form.ExtraOffset.currentText()):
obj.ExtraOffset = str(self.form.ExtraOffset.currentText())
if obj.EnableRotation != str(self.form.enableRotation.currentText()):
obj.EnableRotation = str(self.form.enableRotation.currentText())
self.updateToolController(obj, self.form.toolController)
self.updateCoolant(obj, self.form.coolantController)
def setFields(self, obj):
'''setFields(obj) ... update UI with obj properties' values'''
PathLog.track()
self.updateQuantitySpinBoxes()
if obj.DwellEnabled:
self.form.dwellEnabled.setCheckState(QtCore.Qt.Checked)
else:
self.form.dwellEnabled.setCheckState(QtCore.Qt.Unchecked)
if obj.PeckEnabled:
self.form.peckEnabled.setCheckState(QtCore.Qt.Checked)
else:
self.form.peckEnabled.setCheckState(QtCore.Qt.Unchecked)
self.selectInComboBox(obj.ExtraOffset, self.form.ExtraOffset)
self.setupToolController(obj, self.form.toolController)
self.setupCoolant(obj, self.form.coolantController)
self.selectInComboBox(obj.EnableRotation, self.form.enableRotation)
def getSignalsForUpdate(self, obj):
'''getSignalsForUpdate(obj) ... return list of signals which cause the receiver to update the model'''
signals = []
signals.append(self.form.peckRetractHeight.editingFinished)
signals.append(self.form.peckDepth.editingFinished)
signals.append(self.form.dwellTime.editingFinished)
signals.append(self.form.dwellEnabled.stateChanged)
signals.append(self.form.peckEnabled.stateChanged)
signals.append(self.form.toolController.currentIndexChanged)
signals.append(self.form.coolantController.currentIndexChanged)
signals.append(self.form.ExtraOffset.currentIndexChanged)
signals.append(self.form.enableRotation.currentIndexChanged)
return signals
def updateData(self, obj, prop):
if prop in ['PeckDepth', 'RetractHeight'] and not prop in ['Base', 'Disabled']:
self.updateQuantitySpinBoxes()
Command = PathOpGui.SetupOperation('Drilling',
PathDrilling.Create,
TaskPanelOpPage,
'Path_Drilling',
QtCore.QT_TRANSLATE_NOOP("Path_Drilling", "Drilling"),
QtCore.QT_TRANSLATE_NOOP("Path_Drilling", "Creates a Path Drilling object from a features of a base object"),
PathDrilling.SetupProperties)
FreeCAD.Console.PrintLog("Loading PathDrillingGui... done\n")
| Fat-Zer/FreeCAD_sf_master | src/Mod/Path/PathScripts/PathDrillingGui.py | Python | lgpl-2.1 | 7,330 |
import sys
def setup(core, object):
object.setStfFilename('static_item_n')
object.setStfName('item_jedi_robe_06_04')
object.setDetailFilename('static_item_d')
object.setDetailName('item_jedi_robe_06_04')
object.setStringAttribute('protection_level', 'Radiant')
object.setStringAttribute('class_required', 'Jedi')
object.setIntAttribute('required_combat_level', 90)
object.setIntAttribute('cat_stat_mod_bonus.@stat_n:constitution_modified', 135)
object.setIntAttribute('cat_stat_mod_bonus.@stat_n:precision_modified', 135)
object.setIntAttribute('cat_stat_mod_bonus.@stat_n:agility_modified', 135)
object.setIntAttribute('cat_stat_mod_bonus.@stat_n:luck_modified', 135)
object.setIntAttribute('cat_skill_mod_bonus.@stat_n:expertise_damage_line_fs_ae_dm_cc', 4)
object.setStringAttribute('@set_bonus:piece_bonus_count_2', '@set_bonus:set_bonus_jedi_robe_1')
object.setAttachment('type', 'jedi_cloak')
object.setAttachment('setBonus', 'set_bonus_jedi_robe')
object.setAttachment('radial_filename', 'equipment/jedi_master_robes');
return
| agry/NGECore2 | scripts/object/tangible/wearables/robe/robe_s32.py | Python | lgpl-3.0 | 1,064 |
#!/usr/bin/env python3
import os, sys
pandoraPath = os.getenv('PANDORAPATH', '/usr/local/pandora')
sys.path.append(pandoraPath+'/bin')
sys.path.append(pandoraPath+'/lib')
from pyPandora import Point2DInt
testPoint = Point2DInt(4,6)
print('point: ',testPoint)
testPoint._x += 1
testPoint._y = testPoint._y//2
print('modified point: ',testPoint)
| montanier/pandora | examples/python/testPoint.py | Python | lgpl-3.0 | 350 |
#! /usr/bin/env python
'''
this module runs pylint on all python scripts found in a directory tree
'''
import os
import re
import sys
from optparse import OptionParser
total = 0.0
count = 0
errors = 0
VERBOSE = False
BASE_DIRECTORY = os.getcwd()
SUMMARY = False
class WritableObject(object):
"dummy output stream for pylint"
def __init__(self):
self.content = []
def write(self, st):
"dummy write"
self.content.append(st)
def read(self):
"dummy read"
return self.content
def run_pylint(filename, options):
"run pylint on the given file"
ARGS = ["--rcfile=./.pylintrc"] # put your own here
if not options.show_all:
ARGS.append("-E")
pylint_output = WritableObject()
from pylint import lint
from pylint.reporters.text import TextReporter
lint.Run([filename]+ARGS, reporter=TextReporter(pylint_output), exit=False)
return pylint_output.read()
def print_line(line):
global VERBOSE
if VERBOSE:
print(line.rstrip())
def check(module, options):
'''
apply pylint to the file specified if it is a *.py file
'''
global total, count, errors
if module[-3:] == ".py":
args = ''
print_line("CHECKING %s" % (module))
pout = run_pylint(module, options)
count += 1
for line in pout:
if re.match("E\:.*", line):
errors += 1
if options.summary or options.verbose:
print "Module: %s - %s" % (module, line.rstrip())
if re.match("[RCWF]\:.*", line) and options.show_all:
print_line(line)
if re.match("E....:.", line):
print_line(line)
if "Your code has been rated at" in line:
print_line(line)
score = re.findall("\d.\d\d", line)[0]
total += float(score)
def parse_cmdline(argv):
"""Parses the command-line."""
global BASE_DIRECTORY, VERBOSE, SUMMARY
DEFAULT_BASE_DIR = os.path.join(os.path.dirname(os.path.realpath(sys.argv[0])),
'collectors')
parser = OptionParser(description='Runs pylint recursively on a directory')
parser.add_option('-b', '--base-dir', dest='base_directory', metavar='DIR',
default=DEFAULT_BASE_DIR,
help='Directory to start linting')
parser.add_option('-v', '--verbose', dest='verbose', action='store_true', default=False,
help='Verbose mode (log debug messages).')
parser.add_option('-a', '--show-all', dest='show_all', action='store_true', default=False,
help='By default, we are only showing error lines')
parser.add_option('-s', '--summary', dest='summary', action='store_true', default=False,
help='Show summary report')
(options, args) = parser.parse_args(args=argv[1:])
VERBOSE = options.verbose
BASE_DIRECTORY = options.base_directory
return (options, args)
def check_version():
ver = sys.version_info
if ver[0] == 2 and ver[1] < 7:
sys.stderr.write("Requires Python >2.7 for pylint\n")
return False
return True
def main(argv):
global BASE_DIRECTORY, VERBOSE
if not check_version():
return 0
options, args = parse_cmdline(argv)
print_line("looking for *.py scripts in subdirectories of %s" % (BASE_DIRECTORY))
for root, dirs, files in os.walk(BASE_DIRECTORY):
for name in files:
filepath = os.path.join(root, name)
check(filepath, options)
if options.summary:
print "==" * 50
print "%d modules found" % count
print "%d errors found" % errors
if options.show_all and count > 0:
print "AVERAGE SCORE = %.02f" % (total / count)
return errors
if __name__ == '__main__':
sys.exit(main(sys.argv))
| wangy1931/tcollector | pylint-runner.py | Python | lgpl-3.0 | 3,903 |
#!/usr/bin/env python
#
# Copyright (c) 2013 Juniper Networks, Inc. All rights reserved.
#
import os
import socket
import netaddr
import logging
import netifaces
import tempfile
from contrail_vrouter_provisioning import local
from contrail_vrouter_provisioning.base import ContrailSetup
from contrail_vrouter_provisioning.network import ComputeNetworkSetup
log = logging.getLogger('contrail_vrouter_provisioning.common')
def insert_line_to_file(line, file_name, pattern=None):
if pattern:
local('sed -i \'/%s/d\' %s' % (pattern, file_name), warn_only=True)
local('printf "%s\n" >> %s' % (line, file_name))
class CommonComputeSetup(ContrailSetup, ComputeNetworkSetup):
def __init__(self, args):
super(CommonComputeSetup, self).__init__()
self._args = args
# Using keystone admin password for nova/neutron if not supplied
if not self._args.neutron_password:
self._args.neutron_password = self._args.keystone_admin_password
self.multi_net = False
if self._args.non_mgmt_ip:
self.multi_net = True
self.vhost_ip = self._args.non_mgmt_ip
else:
self.vhost_ip = self._args.self_ip
self.dev = None # Will be physical device
if self._args.physical_interface:
# During re-provision/upgrade vhost0 will be present
# so vhost0 should be treated as dev,
# which is used to get netmask/gateway
if 'vhost0' in netifaces.interfaces():
self.dev = 'vhost0'
# During intial provision actual interface should be treated as dev
# which is used to get netmask/gateway
elif self._args.physical_interface in netifaces.interfaces():
self.dev = self._args.physical_interface
else:
raise KeyError('Interface %s in present' %
self._args.physical_interface)
else:
# Get the physical device and provision status
# if reprov is False, it means fresh install
# True, it means reprovision
(self.dev, self.reprov) = self.get_device_info(self.vhost_ip)
def fixup_config_files(self):
self.add_dev_tun_in_cgroup_device_acl()
self.fixup_contrail_vrouter_agent()
self.add_qos_config()
self.fixup_contrail_vrouter_nodemgr()
self.fixup_contrail_lbaas()
def setup_lbaas_prereq(self):
if self.pdist in ['centos', 'redhat']:
local('sudo groupadd -f nogroup')
cmd = "sudo sed -i s/'Defaults requiretty'/'#Defaults "
cmd += "requiretty'/g /etc/sudoers"
local(cmd)
def add_dev_tun_in_cgroup_device_acl(self):
# add /dev/net/tun in cgroup_device_acl needed
# for type=ethernet interfaces
fl = "/etc/libvirt/qemu.conf"
ret = local("sudo grep -q '^cgroup_device_acl' %s" % fl,
warn_only=True)
if ret.failed:
if self.pdist in ['centos', 'redhat']:
local('sudo echo "clear_emulator_capabilities = 1" >> %s' % fl,
warn_only=True)
local('sudo echo \'user = "root"\' >> %s' % fl, warn_only=True)
local('sudo echo \'group = "root"\' >> %s' % fl,
warn_only=True)
cmds = ['echo \'cgroup_device_acl = [\' >> %s' % fl,
'echo \' "/dev/null", "/dev/full", "/dev/zero",\''
+ ' >> %s' % fl,
'echo \' "/dev/random", "/dev/urandom",\''
+ ' >> %s' % fl,
'echo \' "/dev/ptmx", "/dev/kvm", "/dev/kqemu",\''
+ ' >> %s' % fl,
'echo \' "/dev/rtc", "/dev/hpet", "/dev/net/tun",\''
+ ' >> %s' % fl,
'echo \']\' >> %s' % fl]
for cmd in cmds:
local('sudo ' + cmd, warn_only=True)
self._fixed_qemu_conf = True
# add "alias bridge off" in /etc/modprobe.conf for Centos
if self.pdist in ['centos', 'redhat']:
local('sudo echo "alias bridge off" > /etc/modprobe.conf',
warn_only=True)
def fixup_contrail_vrouter_nodemgr(self):
# Workaround https://bugs.launchpad.net/juniperopenstack/+bug/1681172
cfgfile = '/etc/contrail/contrail-vrouter-nodemgr.conf'
if not os.path.isfile(cfgfile):
local('sudo touch %s' % cfgfile)
collector_list = ' '.join('%s:%s' % (server, '8086')
for server in self._args.collectors)
self.set_config(cfgfile, 'COLLECTOR', 'server_list', collector_list)
self.set_config(cfgfile, 'SANDESH', 'sandesh_ssl_enable',
self._args.sandesh_ssl_enable)
self.set_config(cfgfile, 'SANDESH', 'introspect_ssl_enable',
self._args.introspect_ssl_enable)
def setup_hugepages_node(self, dpdk_args):
"""Setup hugepages on one or list of nodes
"""
# How many times DPDK inits hugepages (rte_eal_init())
# See function map_all_hugepages() in DPDK
DPDK_HUGEPAGES_INIT_TIMES = 2
# get required size of hugetlbfs
factor = int(dpdk_args['huge_pages'])
print dpdk_args
if factor == 0:
factor = 1
# set number of huge pages
memsize = local("sudo grep MemTotal /proc/meminfo |"
" tr -s ' ' | cut -d' ' -f 2 | tr -d '\n'",
capture=True, warn_only=True)
pagesize = local("sudo grep Hugepagesize /proc/meminfo"
" | tr -s ' 'i | cut -d' ' -f 2 | tr -d '\n'",
capture=True, warn_only=True)
reserved = local("sudo grep HugePages_Total /proc/meminfo"
" | tr -s ' 'i | cut -d' ' -f 2 | tr -d '\n'",
capture=True, warn_only=True)
if (reserved == ""):
reserved = "0"
requested = ((int(memsize) * factor) / 100) / int(pagesize)
if (requested > int(reserved)):
pattern = "^vm.nr_hugepages ="
line = "vm.nr_hugepages = %d" % requested
insert_line_to_file(pattern=pattern, line=line,
file_name='/etc/sysctl.conf')
current_max_map_count = local("sudo sysctl -n "
"vm.max_map_count")
if current_max_map_count == "":
current_max_map_count = 0
current_huge_pages = max(int(requested), int(reserved))
requested_max_map_count = (DPDK_HUGEPAGES_INIT_TIMES
* int(current_huge_pages))
if int(requested_max_map_count) > int(current_max_map_count):
pattern = "^vm.max_map_count ="
line = "vm.max_map_count = %d" % requested_max_map_count
insert_line_to_file(pattern=pattern, line=line,
file_name='/etc/sysctl.conf')
local('sudo sysctl -p', warn_only=True)
mounted = local("sudo mount | grep hugetlbfs | cut -d' ' -f 3",
capture=True, warn_only=False)
if (mounted != ""):
print "hugepages already mounted on %s" % mounted
else:
local("sudo mkdir -p /hugepages", warn_only=False)
pattern = "^hugetlbfs"
line = "hugetlbfs "\
"/hugepages hugetlbfs defaults 0 0"
insert_line_to_file(pattern=pattern, line=line,
file_name='/etc/fstab')
local("sudo mount -t hugetlbfs hugetlbfs /hugepages",
warn_only=False)
def search_and_replace(self, lvalue, rvalue, position, vrouter_file):
"""Search and replace strings in the format <key>=<filepath> <args>
- 'position' determines where the <rvalue> needs to be inserted
- If it is "Begin", the string becomes:
<key>=<rvalue> <filepath> <args>
- If it is "End", the string becomes:
<key>=<filepath> <args> <rvalue>
- If <rvalue> already exists in <args>, it deletes it first
- If <rvalue> already preceeds <filepath> it deletes it first
Input:
- lvalue = <key>
- rvalue = <arg> to be searched and replaced
- position = Begin/End
- vrouter_file = path of vrouter file
"""
if position == "Begin":
regexp_del = r"'s/\(^ *%s *=\)\(.*\)\( \/.*\)/\1\3/'" % (lvalue)
regexp_add = r"'s/\(^%s=\)\(.*\)/\1%s \2/'" % (lvalue, rvalue)
regexp = "sed -i.bak -e %s -e %s %s" \
% (regexp_del, regexp_add, vrouter_file)
local(regexp, warn_only=False)
elif position == "End":
regexp_del = r"'s/\(^ *%s *=.*\) \(%s [^ ]*\)\(.*\) *$/\1\3/'" \
% (lvalue, rvalue.split(' ')[0])
regexp_add = r"'s/\(^ *%s *=.*\)/\1 %s/'" % (lvalue, rvalue)
regexp = "sed -i.bak -e %s -e %s %s" \
% (regexp_del, regexp_add, vrouter_file)
local(regexp, warn_only=False)
def setup_coremask_node(self, dpdk_args):
"""Setup core mask on one or list of nodes
"""
try:
coremask = dpdk_args['coremask']
except KeyError:
raise RuntimeError("Core mask for host %s is not defined."
% (dpdk_args))
if not coremask:
raise RuntimeError("Core mask for host %s is not defined."
% dpdk_args)
# if a list of cpus is provided, -c flag must be passed to taskset
if (',' in coremask) or ('-' in coremask):
taskset_param = ' -c'
else:
taskset_param = ''
# supported coremask format: hex: (0x3f); list: (0,3-5), (0,1,2,3,4,5)
# try taskset on a dummy command
if local('sudo taskset%s %s true' % (taskset_param, coremask),
capture=True, warn_only=False).succeeded:
self.search_and_replace(self.command_key, '\/usr\/bin\/taskset ' + coremask,
"Begin", self.vrouter_file)
else:
raise RuntimeError("Error: Core mask %s for host %s is invalid."
% (coremask, dpdk_args))
def setup_vm_coremask_node(self, q_coremask, dpdk_args):
"""
Setup CPU affinity for QEMU processes based on
vRouter/DPDK core affinity on a given node.
Supported core mask format:
vRouter/DPDK: hex (0x3f), list (0,1,2,3,4,5), range (0,3-5)
QEMU/nova.conf: list (0,1,2,3,4,5), range (0,3-5),
exclusion (0-5,^4)
QEMU needs to be pinned to different cores than vRouter. Because of
different core mask formats, it is not possible to just set QEMU to
<not vRouter cores>. This function takes vRouter core mask from
testbed, changes it to list of cores and removes them from list
of all possible cores (generated as a list from 0 to N-1, where
N = number of cores). This is changed back to string and passed to
openstack-config.
"""
try:
vr_coremask = dpdk_args['coremask']
except KeyError:
raise RuntimeError("vRouter core mask for "
"host %s is not defined." % (dpdk_args))
if not vr_coremask:
raise RuntimeError("vRouter core mask for host "
"%s is not defined." % dpdk_args)
if not q_coremask:
try:
cpu_count = int(local(
'sudo grep -c processor /proc/cpuinfo',
capture=True))
except ValueError:
log.info("Cannot count CPUs on host %s. VM core "
"mask cannot be computed." % (dpdk_args))
raise
if not cpu_count or cpu_count == -1:
raise ValueError("Cannot count CPUs on host %s. "
"VM core mask cannot be computed."
% (dpdk_args))
all_cores = [x for x in xrange(cpu_count)]
if 'x' in vr_coremask: # String containing hexadecimal mask.
vr_coremask = int(vr_coremask, 16)
"""
Convert hexmask to a string with numbers of cores to be
used, eg.
0x19 -> 11001 -> 10011 -> [(0,1), (1,0), (2,0),
(3,1), (4,1)] -> '0,3,4'
"""
vr_coremask = [
x[0] for x in enumerate(reversed(bin(vr_coremask)[2:]))
if x[1] == '1']
# Range or list of cores.
elif (',' in vr_coremask) or ('-' in vr_coremask):
# Get list of core numbers and/or core ranges.
vr_coremask = vr_coremask.split(',')
# Expand ranges like 0-4 to 0, 1, 2, 3, 4.
vr_coremask_expanded = []
for rng in vr_coremask:
if '-' in rng: # If it's a range - expand it.
a, b = rng.split('-')
vr_coremask_expanded += range(int(a), int(b) + 1)
else: # If not, just add to the list.
vr_coremask_expanded.append(int(rng))
vr_coremask = vr_coremask_expanded
else: # A single core.
try:
single_core = int(vr_coremask)
except ValueError:
log.error("vRouter core mask %s for host %s is invalid."
% (vr_coremask, dpdk_args))
raise
vr_coremask = []
vr_coremask.append(single_core)
# From list of all cores remove list of vRouter cores
# and stringify.
diff = set(all_cores) - set(vr_coremask)
q_coremask = ','.join(str(x) for x in diff)
# If we have no spare cores for VMs
if not q_coremask:
raise RuntimeError("Setting QEMU core mask for host %s "
"failed - empty string."
% (dpdk_args))
# This can fail eg. because openstack-config is not present.
# There's no sanity check in openstack-config.
if local("sudo crudini --set /etc/nova/nova.conf "
"DEFAULT vcpu_pin_set %s"
% q_coremask, capture=True, warn_only=False).succeeded:
log.info("QEMU coremask on host %s set to %s."
% (dpdk_args, q_coremask))
else:
raise RuntimeError("Error: setting QEMU core mask %s for "
"host %s failed." % (vr_coremask, dpdk_args))
def setup_uio_driver(self, dpdk_args):
"""Setup UIO driver to use for DPDK
(igb_uio, uio_pci_generic or vfio-pci)
"""
vrouter_agent_file = '/etc/contrail/contrail-vrouter-agent.conf'
if 'uio_driver' in dpdk_args:
uio_driver = dpdk_args['uio_driver']
if uio_driver == "vfio-pci":
self.setup_sriov_grub(uio_driver)
else:
print "No UIO driver defined for host, skipping..."
return
if local('sudo modprobe %s'
% (uio_driver), capture=True, warn_only=False).succeeded:
log.info("Setting UIO driver to %s for host..." % uio_driver)
local('sudo contrail-config --set %s DEFAULT '\
'physical_uio_driver %s' % (vrouter_agent_file, uio_driver))
else:
raise RuntimeError("Error: invalid UIO driver %s for host"
% (uio_driver))
def dpdk_increase_vrouter_limit(self,
vrouter_module_params_args):
"""Increase the maximum number of mpls label
and nexthop on tsn node"""
vr_params = {
'flow_entries': '524288',
'oflow_entries': '3000',
'mpls_labels': '5120',
'nexthops': '65536',
'vrfs': '5120',
'macs': {'bridge_entries': '262144'},
}
for param in vr_params:
if isinstance(vr_params[param], dict):
for p in vr_params[param]:
param_name = p
param_val = vrouter_module_params_args.setdefault(
param, vr_params[param][p])
else:
param_name = param
param_val = vrouter_module_params_args.setdefault(
param, vr_params[param])
param = "--vr_" + param_name + " " + param_val
self.search_and_replace(self.command_key, param,
"End", self.vrouter_file)
def fixup_contrail_vrouter_agent(self):
compute_ip = self._args.self_ip
non_mgmt_gw = self._args.non_mgmt_gw
vgw_public_subnet = self._args.vgw_public_subnet
vgw_public_vn_name = self._args.vgw_public_vn_name
vgw_intf_list = self._args.vgw_intf_list
vgw_gateway_routes = self._args.vgw_gateway_routes
compute_as_gateway = self._args.compute_as_gateway
flow_thread_count = self._args.flow_thread_count
self.mac = None
# Fresh install
if self.dev and not self.reprov:
self.mac = netifaces.ifaddresses(self.dev)[netifaces.AF_LINK][0][
'addr']
if not self.mac:
raise KeyError('Interface %s Mac %s' % (str(self.dev),
str(self.mac)))
self.netmask = netifaces.ifaddresses(self.dev)[
netifaces.AF_INET][0]['netmask']
if self.multi_net:
self.gateway = non_mgmt_gw
else:
self.gateway = self.find_gateway(self.dev)
self.cidr = netaddr.IPNetwork('%s/%s' % (self.vhost_ip,
self.netmask))
elif self.dev:
# Reprovision
cfg_file = "/etc/contrail/contrail-vrouter-agent.conf"
section = "DEFAULT"
key = "physical_interface_mac"
self.mac = self.get_config(cfg_file, section, key).strip()
section = "VIRTUAL-HOST-INTERFACE"
key = "ip"
self.cidr = netaddr.IPNetwork(self.get_config(cfg_file, section, key).strip())
section = "VIRTUAL-HOST-INTERFACE"
key = "gateway"
self.gateway = self.get_config(cfg_file, section, key).strip()
self.netmask = "255.255.255.0"
if self.dev:
if vgw_public_subnet:
os.chdir(self._temp_dir_name)
# Manipulating the string to use in agent_param
vgw_public_subnet_str = []
for i in vgw_public_subnet[1:-1].split(";"):
j = i[1:-1].split(",")
j = ";".join(j)
vgw_public_subnet_str.append(j)
vgw_public_subnet_str = str(tuple(
vgw_public_subnet_str)).replace("'", "")
vgw_public_subnet_str = vgw_public_subnet_str.replace(" ", "")
vgw_intf_list_str = str(tuple(
vgw_intf_list[1:-1].split(";"))).replace(" ", "")
cmds = ["sudo sed 's@dev=.*@dev=%s@g;" % self.dev,
"s@vgw_subnet_ip=.*@vgw_subnet_ip=%s@g;" %
vgw_public_subnet_str,
"s@vgw_intf=.*@vgw_intf=%s@g'" % vgw_intf_list_str,
" /etc/contrail/agent_param.tmpl > agent_param.new"]
local(' '.join(cmds))
local("sudo mv agent_param.new /etc/contrail/agent_param")
else:
os.chdir(self._temp_dir_name)
cmds = ["sudo sed 's/dev=.*/dev=%s/g' " % self.dev,
"/etc/contrail/agent_param.tmpl > agent_param.new"]
local(''.join(cmds))
local("sudo mv agent_param.new /etc/contrail/agent_param")
vmware_dev = None
gateway_mode = None
if (self._args.mode == 'vcenter' or
self._args.hypervisor == 'vmware'):
vmware_dev = self.get_secondary_device(self.dev)
if compute_as_gateway == 'server':
gateway_mode = "server"
# Set template options for DPDK mode
pci_dev = ""
platform_mode = "default"
if self._args.dpdk:
dpdk_args = dict(
u.split("=") for u in self._args.dpdk.split(","))
log.info(dpdk_args)
platform_mode = "dpdk"
supervisor_vrouter_file = ('/etc/contrail/' +
'supervisord_vrouter_files/' +
'contrail-vrouter-dpdk.ini')
systemd_vrouter_file = ('/lib/systemd/system/' +
'contrail-vrouter-dpdk.service')
if os.path.isfile(supervisor_vrouter_file):
self.vrouter_file = supervisor_vrouter_file
self.command_key = "command"
elif os.path.isfile(systemd_vrouter_file):
self.vrouter_file = systemd_vrouter_file
self.command_key = "ExecStart"
else:
raise RuntimeError("Vrouter Supervisor/Systemd not found.")
self.setup_hugepages_node(dpdk_args)
self.setup_coremask_node(dpdk_args)
self.setup_vm_coremask_node(False, dpdk_args)
self.setup_uio_driver(dpdk_args)
if self._args.dpdk and not self.reprov:
iface = self.dev
if self.is_interface_vlan(self.dev):
iface = self.get_physical_interface_of_vlan(self.dev)
local("ls /opt/contrail/bin/dpdk_nic_bind.py", warn_only=False)
cmd = "sudo /opt/contrail/bin/dpdk_nic_bind.py --status | "
cmd += "sudo grep -w %s | cut -d' ' -f 1" % iface.strip()
pci_dev = local(cmd, capture=True, warn_only=False)
# If there is no PCI address, the device is a bond.
# Bond interface in DPDK has zero PCI address.
if not pci_dev:
pci_dev = "0000:00:00.0"
elif self._args.dpdk and self.reprov:
cfg_file = "/etc/contrail/contrail-vrouter-agent.conf"
section = "DEFAULT"
key = "physical_interface_address"
pci_dev = self.get_config(cfg_file, section, key).strip()
if self.pdist == 'Ubuntu':
# Fix /dev/vhost-net permissions. It is required for
# multiqueue operation
local('sudo echo \'KERNEL=="vhost-net", '
'GROUP="kvm", MODE="0660"\' > '
'/etc/udev/rules.d/vhost-net.rules', warn_only=True)
# The vhost-net module has to be loaded at startup to
# ensure the correct permissions while the qemu is being
# launched
pattern = "vhost-net"
line = "vhost-net"
insert_line_to_file(pattern=pattern, line=line,
file_name='/etc/modules')
if not self._args.dpdk:
self.setup_vrouter_kmod_hugepages()
vrouter_kmod_1G_page = ''
vrouter_kmod_2M_page = ''
if self._args.vrouter_1G_hugepages != '0':
if (os.path.isfile('/mnt/hugepage_1G/vrouter_1G_mem_0')):
vrouter_kmod_1G_page = '/mnt/hugepage_1G/vrouter_1G_mem_0'
if (os.path.isfile('/mnt/hugepage_1G/vrouter_1G_mem_1')):
vrouter_kmod_1G_page = vrouter_kmod_1G_page + ' /mnt/hugepage_1G/vrouter_1G_mem_1'
if self._args.vrouter_2M_hugepages != '0':
if (os.path.isfile('/mnt/hugepage_2M/vrouter_2M_mem_0')):
vrouter_kmod_2M_page = '/mnt/hugepage_2M/vrouter_2M_mem_0'
if (os.path.isfile('/mnt/hugepage_2M/vrouter_2M_mem_1')):
vrouter_kmod_2M_page = vrouter_kmod_2M_page + ' /mnt/hugepage_2M/vrouter_2M_mem_1'
control_servers = ' '.join('%s:%s' % (server, '5269')
for server in self._args.control_nodes)
dns_servers = ' '.join('%s:%s' % (server, '53')
for server in self._args.control_nodes)
collector_servers = ' '.join('%s:%s' % (server, '8086')
for server in self._args.collectors)
if self._args.tsn_evpn_mode and self._args.tsn_servers:
tsn_servers = ' '.join(self._args.tsn_servers)
else:
tsn_servers = ''
configs = {
'DEFAULT': {
'platform': platform_mode,
'gateway_mode': gateway_mode or '',
'physical_interface_address': pci_dev,
'physical_interface_mac': self.mac,
'collectors': collector_servers,
'xmpp_auth_enable': self._args.xmpp_auth_enable,
'xmpp_dns_auth_enable': self._args.xmpp_dns_auth_enable,
'tsn_servers': tsn_servers,
'agent_mode': ''},
'NETWORKS': {
'control_network_ip': compute_ip},
'VIRTUAL-HOST-INTERFACE': {
'name': 'vhost0',
'ip': str(self.cidr),
'gateway': self.gateway,
'physical_interface': self.dev},
'HYPERVISOR': {
'type': ('kvm' if self._args.hypervisor == 'libvirt'
else self._args.hypervisor),
'vmware_mode': self._args.mode or '',
'vmware_physical_interface': vmware_dev or ''},
'CONTROL-NODE': {
'servers': control_servers},
'DNS': {
'servers': dns_servers},
'SANDESH': {
'sandesh_ssl_enable': self._args.sandesh_ssl_enable,
'introspect_ssl_enable':
self._args.introspect_ssl_enable},
'FLOWS': {
'thread_count': flow_thread_count},
'METADATA': {
'metadata_proxy_secret': self._args.metadata_secret,
'metadata_use_ssl': self._args.metadata_use_ssl,
'metadata_client_cert': ('/etc/contrail/ssl/certs/server.pem'
if self._args.metadata_use_ssl else ''),
'metdata_client_cert_type': ('PEM' if self._args.metadata_use_ssl
else ''),
'metadata_client_key': ('/etc/contrail/ssl/private/server-privkey.pem'
if self._args.metadata_use_ssl else '')},
'RESTART': {
'huge_page_2M': vrouter_kmod_2M_page,
'huge_page_1G': vrouter_kmod_1G_page,
'backup_enable': (True
if self._args.resource_backup_restore else False),
'backup_dir': ('/var/lib/contrail/backup'),
'backup_file_count': (self._args.backup_file_count),
'backup_idle_timeout': (self._args.backup_idle_timeout),
'restore_enable': (True
if self._args.resource_backup_restore else False),
'restore_audit_timeout': (self._args.restore_audit_timeout)},
}
# VGW configs
if vgw_public_vn_name and vgw_public_subnet:
vgw_public_vn_name = vgw_public_vn_name[1:-1].split(';')
vgw_public_subnet = vgw_public_subnet[1:-1].split(';')
vgw_intf_list = vgw_intf_list[1:-1].split(';')
if vgw_gateway_routes is not None:
vgw_gateway_routes = vgw_gateway_routes[1:-1].split(';')
for i in range(len(vgw_public_vn_name)):
ip_blocks = ''
if vgw_public_subnet[i].find("[") != -1:
for ele in vgw_public_subnet[i][1:-1].split(","):
ip_blocks += ele[1:-1] + " "
else:
ip_blocks += vgw_public_subnet[i]
routes = ''
if (vgw_gateway_routes is not None and
i < len(vgw_gateway_routes)):
if vgw_gateway_routes[i] != '[]':
if vgw_gateway_routes[i].find("[") != -1:
for ele in vgw_gateway_routes[i][1:-1].split(
","):
routes += ele[1:-1] + " "
else:
routes += vgw_gateway_routes[i]
configs['GATEWAY-%s' % i] = {'interface': vgw_intf_list[i],
'ip_blocks': ip_blocks,
'routes': routes,
'routing_instance': vgw_public_vn_name[i]}
for section, key_vals in configs.items():
for key, val in key_vals.items():
self.set_config(
'/etc/contrail/contrail-vrouter-agent.conf',
section, key, val)
if self.running_in_container:
self.config_vhost0_interface_in_container()
else:
self.fixup_vhost0_interface_configs()
def config_vhost0_interface_in_container(self):
if self.reprov:
log.info("vhost0 configuration already present")
return
# Insert vrouter and setup vrouter vifs
insert_cmd = "source /opt/contrail/bin/vrouter-functions.sh && "
insert_cmd += "insert_vrouter"
local(insert_cmd, executable='/bin/bash')
# Move ip address from vrouter physical device to vhost
config_vhost0_cmd = "ip address delete %s/%s dev %s && " % (
self.vhost_ip, self.cidr.prefixlen, self.dev)
config_vhost0_cmd += "ip address add %s/%s dev vhost0 && " % (
self.vhost_ip, self.cidr.prefixlen)
config_vhost0_cmd += "ip link set dev vhost0 up"
local(config_vhost0_cmd)
# Add default gateway to new device as link local if /32 IP Address
if self.cidr.prefixlen == 32:
local("ip route add unicast %s dev vhost0 scope link" %
self.gateway)
if not self.multi_net:
# Add default gateway to vhost
local("ip route add default via %s dev vhost0" % self.gateway)
def fixup_contrail_lbaas(self):
auth_url = self._args.keystone_auth_protocol + '://'
auth_url += self._args.keystone_ip
auth_url += ':' + self._args.keystone_auth_port
auth_url += '/' + 'v2.0'
configs = {
'BARBICAN': {
'admin_tenant_name': 'service',
'admin_user': 'neutron',
'admin_password': self._args.neutron_password,
'auth_url': auth_url,
'region': 'RegionOne'}
}
# Workaround https://bugs.launchpad.net/juniperopenstack/+bug/1681172
cfgfile = '/etc/contrail/contrail-lbaas-auth.conf'
if not os.path.isfile(cfgfile):
local('sudo touch %s' % cfgfile)
for section, key_vals in configs.items():
for key, val in key_vals.items():
self.set_config(cfgfile, section, key, val)
def fixup_vhost0_interface_configs(self):
if self.reprov:
log.info("fixup_vhost0_interface_configs() not applicable")
return
if self.pdist in ['centos', 'fedora', 'redhat']:
# make ifcfg-vhost0
with open('%s/ifcfg-vhost0' % self._temp_dir_name, 'w') as f:
f.write('''#Contrail vhost0
DEVICE=vhost0
ONBOOT=yes
BOOTPROTO=none
IPV6INIT=no
USERCTL=yes
IPADDR=%s
NETMASK=%s
NM_CONTROLLED=no
#NETWORK MANAGER BUG WORKAROUND
SUBCHANNELS=1,2,3
''' % (self.vhost_ip, self.netmask))
# Don't set gateway and DNS on vhost0 if on non-mgmt network
if not self.multi_net:
if self.gateway:
f.write('GATEWAY=%s\n' % self.gateway)
dns_list = self.get_dns_servers(self.dev)
for i, dns in enumerate(dns_list):
f.write('DNS%d=%s\n' % (i + 1, dns))
domain_list = self.get_domain_search_list()
if domain_list:
f.write('DOMAIN="%s"\n' % domain_list)
prsv_cfg = []
mtu = self.get_if_mtu(self.dev)
if mtu:
dcfg = 'MTU=%s' % str(mtu)
f.write(dcfg + '\n')
prsv_cfg.append(dcfg)
f.flush()
if self.dev != 'vhost0':
src = "%s/ifcfg-vhost0" % self._temp_dir_name
dst = "/etc/sysconfig/network-scripts/ifcfg-vhost0"
local("sudo mv %s %s" % (src, dst), warn_only=True)
local("sudo sync", warn_only=True)
# make ifcfg-$dev
ifcfg = "/etc/sysconfig/network-scripts/ifcfg-%s" % self.dev
ifcfg_bkp = "/etc/sysconfig/network-scripts/orig.ifcfg-%s.rpmsave"\
% self.dev
if not os.path.isfile(ifcfg_bkp):
local("sudo cp %s %s" % (ifcfg, ifcfg_bkp), warn_only=True)
ifcfg_tmp = '%s/ifcfg-%s' % (self._temp_dir_name, self.dev)
self._rewrite_ifcfg_file(ifcfg_tmp, self.dev, prsv_cfg)
if self.multi_net:
self.migrate_routes(self.dev)
local("sudo mv %s /etc/contrail/" % ifcfg_tmp, warn_only=True)
if self.pdist not in ['Ubuntu']:
local("sudo chkconfig network on", warn_only=True)
local("sudo chkconfig supervisor-vrouter on",
warn_only=True)
# end self.pdist == centos | fedora | redhat
# setup lbaas prereqs
self.setup_lbaas_prereq()
if self.pdist in ['Ubuntu']:
self._rewrite_net_interfaces_file(
self.dev, self.mac, self.vhost_ip, self.netmask,
self.gateway, self._args.vmware,
self._args.vmware_vmpg_vswitch_mtu,
self._args.vmware_datanic_mtu)
# end self.pdist == ubuntu
def run_services(self):
if self.pdist not in ['Ubuntu']:
for svc in ['supervisor-vrouter']:
local('sudo chkconfig %s on' % svc)
if self.running_in_container:
for svc in ['contrail-vrouter-agent', 'contrail-vrouter-nodemgr']:
local('sudo service %s restart' % svc)
def add_vnc_config(self):
compute_ip = self._args.self_ip
compute_hostname = socket.gethostname()
use_ssl = False
if self._args.apiserver_auth_protocol == 'https':
use_ssl = True
prov_args = "--host_name %s --host_ip %s --api_server_ip %s "\
"--oper add --admin_user %s --admin_password %s "\
"--admin_tenant_name %s --openstack_ip %s "\
"--api_server_use_ssl %s" \
% (compute_hostname, compute_ip, self._args.cfgm_ip,
self._args.keystone_admin_user,
self._args.keystone_admin_password,
self._args.keystone_admin_tenant_name,
self._args.keystone_ip,
use_ssl)
if self._args.dpdk:
prov_args += " --dpdk_enabled"
cmd = "sudo python /opt/contrail/utils/provision_vrouter.py "
local(cmd + prov_args)
def add_qos_config(self):
qos_logical_queue = self._args.qos_logical_queue
qos_queue_id_list = self._args.qos_queue_id
default_hw_queue_qos = self._args.default_hw_queue_qos
qos_priority_tagging = self._args.qos_priority_tagging
priority_id_list = self._args.priority_id
priority_scheduling = self._args.priority_scheduling
priority_bandwidth = self._args.priority_bandwidth
agent_conf = "/etc/contrail/contrail-vrouter-agent.conf"
conf_file = "contrail-vrouter-agent.conf"
configs = {}
# Clean existing qos config
ltemp_dir = tempfile.mkdtemp()
local("sudo cp %s %s/" % (agent_conf, ltemp_dir))
local(
"sudo sed -i -e '/^\[QOS\]/d' -e '/^\[QUEUE-/d' -e '/^logical_queue/d' -e '/^default_hw_queue/d' -e '/^priority_tagging/d' %s/%s" %
(ltemp_dir, conf_file))
local(
"sudo sed -i -e '/^\[QOS-NIANTIC\]/d' -e '/^\[PG-/d' -e '/^scheduling/d' -e '/^bandwidth/d' %s/%s" %
(ltemp_dir, conf_file))
local("sudo cp %s/%s %s" % (ltemp_dir, conf_file, agent_conf))
local('sudo rm -rf %s' % (ltemp_dir))
# Set qos_enabled in agent_param to false
self.set_config(
'/etc/contrail/agent_param',
sec="''",
var='qos_enabled',
val='false')
# QOS configs
if qos_queue_id_list is not None:
self.set_config(
agent_conf,
'QOS',
'priority_tagging',
qos_priority_tagging)
num_sections = len(qos_logical_queue)
if(len(qos_logical_queue) == len(qos_queue_id_list) and
default_hw_queue_qos):
num_sections = num_sections - 1
for i in range(num_sections):
configs['QUEUE-%s' % qos_queue_id_list[i]] = {
'logical_queue':
'[%s]' % qos_logical_queue[i].replace(",", ", ")}
if (default_hw_queue_qos):
if(len(qos_logical_queue) == len(qos_queue_id_list)):
logical_queue = '[%s]' %\
qos_logical_queue[-1].replace(",", ", ")
else:
logical_queue = '[ ]'
configs['QUEUE-%s' % qos_queue_id_list[-1]] = {
'default_hw_queue': 'true',
'logical_queue': logical_queue}
for section, key_vals in configs.items():
for key, val in key_vals.items():
self.set_config(
agent_conf,
section, key, val)
if priority_id_list is not None:
local(
'sudo contrail-config --set /etc/contrail/contrail-vrouter-agent.conf QOS-NIANTIC')
for i in range(len(priority_id_list)):
configs['PG-%s' % priority_id_list[i]] = {
'scheduling': priority_scheduling[i],
'bandwidth': priority_bandwidth[i]}
for section, key_vals in configs.items():
for key, val in key_vals.items():
self.set_config(
agent_conf,
section, key, val)
if (qos_queue_id_list or priority_id_list):
# Set qos_enabled in agent_param
self.set_config(
'/etc/contrail/agent_param',
sec="''",
var='qos_enabled',
val='true')
# Run qosmap script on physical interface (on all members for bond
# interface)
physical_interface = local(
"sudo openstack-config --get /etc/contrail/contrail-vrouter-agent.conf VIRTUAL-HOST-INTERFACE physical_interface")
if os.path.isdir('/sys/class/net/%s/bonding' % physical_interface):
physical_interfaces_str = local(
"sudo cat /sys/class/net/%s/bonding/slaves | tr ' ' '\n' | sort | tr '\n' ' '" %
physical_interface)
else:
physical_interfaces_str = physical_interface
local(
"cd /opt/contrail/utils; python qosmap.py --interface_list %s " %
physical_interfaces_str)
def disable_nova_compute(self):
# Check if nova-compute is present in nova service list
# Disable nova-compute on TSN node
if local("nova service-list | grep nova-compute", warn_only=True).succeeded:
# Stop the service
local("sudo service nova-compute stop", warn_only=True)
if self.pdist in ['Ubuntu']:
local('sudo echo "manual" >> /etc/init/nova-compute.override')
else:
local('sudo chkconfig nova-compute off')
def add_tsn_vnc_config(self):
tsn_ip = self._args.self_ip
self.tsn_hostname = socket.gethostname()
prov_args = "--host_name %s --host_ip %s --api_server_ip %s --oper add "\
"--admin_user %s --admin_password %s --admin_tenant_name %s "\
"--openstack_ip %s --router_type tor-service-node --disable_vhost_vmi "\
% (self.tsn_hostname, tsn_ip, self._args.cfgm_ip,
self._args.keystone_admin_user,
self._args.keystone_admin_password,
self._args.keystone_admin_tenant_name, self._args.keystone_ip)
if self._args.apiserver_auth_protocol == 'https':
prov_args += " --api_server_use_ssl True"
local(
"python /opt/contrail/utils/provision_vrouter.py %s" %
(prov_args))
def start_tsn_service(self):
nova_conf_file = '/etc/contrail/contrail-vrouter-agent.conf'
mode = 'tsn'
if self._args.tsn_evpn_mode:
mode = 'tsn-no-forwarding'
local(
"openstack-config --set %s DEFAULT agent_mode %s" %
(nova_conf_file, mode))
def setup_tsn_node(self):
self.disable_nova_compute()
self.add_tsn_vnc_config()
self.start_tsn_service()
def increase_vrouter_limit(self):
"""Increase the maximum number of mpls label
and nexthop on tsn node"""
if self._args.vrouter_module_params:
vrouter_module_params = self._args.vrouter_module_params.rstrip(
',')
vrouter_module_params_args = dict(
u.split("=") for u in
vrouter_module_params.split(","))
if self._args.dpdk:
self.dpdk_increase_vrouter_limit(
vrouter_module_params_args)
else:
cmd = "options vrouter"
if 'mpls_labels' in vrouter_module_params_args.keys():
cmd += " vr_mpls_labels=%s" % vrouter_module_params_args['mpls_labels']
if 'nexthops' in vrouter_module_params_args.keys():
cmd += " vr_nexthops=%s" % vrouter_module_params_args['nexthops']
if 'vrfs' in vrouter_module_params_args.keys():
cmd += " vr_vrfs=%s" % vrouter_module_params_args['vrfs']
if 'macs' in vrouter_module_params_args.keys():
cmd += " vr_bridge_entries=%s" % vrouter_module_params_args['macs']
if 'flow_entries' in vrouter_module_params_args.keys():
cmd += " vr_flow_entries=%s" % vrouter_module_params_args['flow_entries']
if 'oflow_entries' in vrouter_module_params_args.keys():
cmd += " vr_oflow_entries=%s" % vrouter_module_params_args['oflow_entries']
if 'mac_oentries' in vrouter_module_params_args.keys():
cmd += " vr_bridge_oentries=%s" % vrouter_module_params_args['mac_oentries']
if 'flow_hold_limit' in vrouter_module_params_args.keys():
cmd += " vr_flow_hold_limit=%s" % vrouter_module_params_args['flow_hold_limit']
if 'max_interface_entries' in vrouter_module_params_args.keys():
cmd += " vr_interfaces=%s" % vrouter_module_params_args['max_interface_entries']
if 'vrouter_dbg' in vrouter_module_params_args.keys():
cmd += " vrouter_dbg=%s" % vrouter_module_params_args['vrouter_dbg']
if 'vr_memory_alloc_checks' in vrouter_module_params_args.keys():
cmd += " vr_memory_alloc_checks=%s" % vrouter_module_params_args['vr_memory_alloc_checks']
local(
"echo %s > %s" %
(cmd, '/etc/modprobe.d/vrouter.conf'), warn_only=True)
def setup_vrouter_kmod_hugepages(self):
"""Setup 1G and 2M hugepages for vrouter"""
no_of_pages = 2
# Update vrouter kernel mode hugepage config
self.setup_vrouter_kmod_hugepage_grub()
# Delete vrouter kernel mode 1G hugepage config
if os.path.isfile('/etc/fstab'):
pattern = "hugepage_1G"
line = ""
insert_line_to_file(pattern=pattern, line=line,
file_name='/etc/fstab')
pattern = "vrouter_kmod_1G_hugepages"
line = "vrouter_kmod_1G_hugepages=0"
insert_line_to_file(pattern=pattern, line=line,
file_name='/etc/contrail/agent_param')
# Delete vrouter kernel mode 2M hugepage config
if os.path.isfile('/etc/fstab'):
pattern = "hugepage_2M"
line = ""
insert_line_to_file(pattern=pattern, line=line,
file_name='/etc/fstab')
pattern = "vrouter_kmod_2M_hugepages"
line = "vrouter_kmod_2M_hugepages=0"
insert_line_to_file(pattern=pattern, line=line,
file_name='/etc/contrail/agent_param')
# Configure vrouter kernel mode 1G hugepages
if self._args.vrouter_1G_hugepages != '0':
if int(self._args.vrouter_1G_hugepages) > 0 and int(self._args.vrouter_1G_hugepages) <= 2:
no_of_pages = int(self._args.vrouter_1G_hugepages)
mounted = local("sudo mount | grep hugepage_1G | cut -d' ' -f 3",
capture=True, warn_only=False)
if (mounted != ""):
print "hugepages already mounted on %s" % mounted
else:
local("sudo mkdir -p /mnt/hugepage_1G", warn_only=False)
local("sudo mount -t hugetlbfs -o pagesize=1G none /mnt/hugepage_1G", warn_only=False)
if os.path.isdir('/mnt/hugepage_1G'):
for i in range(no_of_pages):
local("sudo touch /mnt/hugepage_1G/vrouter_1G_mem_%s " % i, warn_only=False)
pattern = "hugepage_1G"
line = "hugetlbfs "\
"/mnt/hugepage_1G hugetlbfs defaults,pagesize=1G 0 0"
insert_line_to_file(pattern=pattern, line=line,
file_name='/etc/fstab')
pattern = "vrouter_kmod_1G_hugepages"
line = "vrouter_kmod_1G_hugepages=%s" % no_of_pages
insert_line_to_file(pattern=pattern, line=line,
file_name='/etc/contrail/agent_param')
# Configure vrouter kernel mode 2M hugepages
if self._args.vrouter_2M_hugepages != '0' and self._args.vrouter_1G_hugepages != '0':
if int(self._args.vrouter_2M_hugepages) >= 0 and int(self._args.vrouter_2M_hugepages) <= 2:
no_of_pages = int(self._args.vrouter_2M_hugepages)
mounted = local("sudo mount | grep hugepage_2M | cut -d' ' -f 3",
capture=True, warn_only=False)
if (mounted != ""):
print "hugepages already mounted on %s" % mounted
else:
local("sudo mkdir -p /mnt/hugepage_2M", warn_only=False)
local("sudo mount -t hugetlbfs -o pagesize=2M none /mnt/hugepage_2M", warn_only=False)
if os.path.isdir('/mnt/hugepage_2M'):
for i in range(no_of_pages):
local("sudo touch /mnt/hugepage_2M/vrouter_2M_mem_%s " % i, warn_only=False)
pattern = "hugepage_2M"
line = "hugetlbfs "\
"/mnt/hugepage_2M hugetlbfs defaults,pagesize=2M 0 0"
insert_line_to_file(pattern=pattern, line=line,
file_name='/etc/fstab')
pattern = "vrouter_kmod_2M_hugepages"
line = "vrouter_kmod_2M_hugepages=%s" % no_of_pages
insert_line_to_file(pattern=pattern, line=line,
file_name='/etc/contrail/agent_param')
def setup(self):
self.disable_selinux()
self.disable_iptables()
self.setup_coredump()
self.fixup_config_files()
self.increase_vrouter_limit()
self.setup_sriov_grub()
if self._args.tsn_mode or self._args.tsn_evpn_mode:
self.setup_tsn_node()
self.run_services()
else:
self.run_services()
if self._args.register and not self.reprov:
self.add_vnc_config()
| eonpatapon/contrail-controller | src/vnsw/provisioning/contrail_vrouter_provisioning/common.py | Python | apache-2.0 | 50,063 |
import re
from typing import Any, Callable, Dict, Match, Optional
from urllib.parse import urljoin
import magic
import requests
from django.conf import settings
from django.utils.encoding import smart_str
from version import ZULIP_VERSION
from zerver.lib.cache import cache_with_key, get_cache_with_key, preview_url_cache_key
from zerver.lib.outgoing_http import OutgoingSession
from zerver.lib.pysa import mark_sanitized
from zerver.lib.url_preview.oembed import get_oembed_data
from zerver.lib.url_preview.parsers import GenericParser, OpenGraphParser
# FIXME: Should we use a database cache or a memcached in production? What if
# OpenGraph data is changed for a site?
# Use an in-memory cache for development, to make it easy to develop this code
CACHE_NAME = "database" if not settings.DEVELOPMENT else "in-memory"
# Based on django.core.validators.URLValidator, with ftp support removed.
link_regex = re.compile(
r"^(?:http)s?://" # http:// or https://
r"(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|" # domain...
r"\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})" # ...or ip
r"(?::\d+)?" # optional port
r"(?:/?|[/?]\S+)$",
re.IGNORECASE,
)
# Use Chrome User-Agent, since some sites refuse to work on old browsers
ZULIP_URL_PREVIEW_USER_AGENT = (
"Mozilla/5.0 (compatible; ZulipURLPreview/{version}; +{external_host})"
).format(version=ZULIP_VERSION, external_host=settings.ROOT_DOMAIN_URI)
# FIXME: This header and timeout are not used by pyoembed, when trying to autodiscover!
HEADERS = {"User-Agent": ZULIP_URL_PREVIEW_USER_AGENT}
TIMEOUT = 15
class PreviewSession(OutgoingSession):
def __init__(self) -> None:
super().__init__(role="preview", timeout=TIMEOUT, headers=HEADERS)
def is_link(url: str) -> Optional[Match[str]]:
return link_regex.match(smart_str(url))
def guess_mimetype_from_content(response: requests.Response) -> str:
mime_magic = magic.Magic(mime=True)
try:
content = next(response.iter_content(1000))
except StopIteration:
content = ""
return mime_magic.from_buffer(content)
def valid_content_type(url: str) -> bool:
try:
response = PreviewSession().get(url, stream=True)
except requests.RequestException:
return False
if not response.ok:
return False
content_type = response.headers.get("content-type")
# Be accommodating of bad servers: assume content may be html if no content-type header
if not content_type or content_type.startswith("text/html"):
# Verify that the content is actually HTML if the server claims it is
content_type = guess_mimetype_from_content(response)
return content_type.startswith("text/html")
def catch_network_errors(func: Callable[..., Any]) -> Callable[..., Any]:
def wrapper(*args: Any, **kwargs: Any) -> Any:
try:
return func(*args, **kwargs)
except requests.exceptions.RequestException:
pass
return wrapper
@catch_network_errors
@cache_with_key(preview_url_cache_key, cache_name=CACHE_NAME, with_statsd_key="urlpreview_data")
def get_link_embed_data(
url: str, maxwidth: int = 640, maxheight: int = 480
) -> Optional[Dict[str, Any]]:
if not is_link(url):
return None
if not valid_content_type(url):
return None
# We are using two different mechanisms to get the embed data
# 1. Use oEmbed data, if found, for photo and video "type" sites
# 2. Otherwise, use a combination of Open Graph tags and Meta tags
data = get_oembed_data(url, maxwidth=maxwidth, maxheight=maxheight) or {}
if data.get("oembed"):
return data
response = PreviewSession().get(mark_sanitized(url), stream=True)
if response.ok:
og_data = OpenGraphParser(
response.content, response.headers.get("Content-Type")
).extract_data()
for key in ["title", "description", "image"]:
if not data.get(key) and og_data.get(key):
data[key] = og_data[key]
generic_data = (
GenericParser(response.content, response.headers.get("Content-Type")).extract_data()
or {}
)
for key in ["title", "description", "image"]:
if not data.get(key) and generic_data.get(key):
data[key] = generic_data[key]
if "image" in data:
data["image"] = urljoin(response.url, data["image"])
return data
@get_cache_with_key(preview_url_cache_key, cache_name=CACHE_NAME)
def link_embed_data_from_cache(url: str, maxwidth: int = 640, maxheight: int = 480) -> Any:
return
| zulip/zulip | zerver/lib/url_preview/preview.py | Python | apache-2.0 | 4,624 |
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""ResNet56 model for Keras adapted from tf.keras.applications.ResNet50.
# Reference:
- [Deep Residual Learning for Image Recognition](
https://arxiv.org/abs/1512.03385)
Adapted from code contributed by BigMoyan.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import functools
import REDACTED
import tensorflow as tf
from REDACTED.tensorflow.python.keras import backend
from REDACTED.tensorflow.python.keras import initializers
from REDACTED.tensorflow.python.keras import layers
from REDACTED.tensorflow.python.keras import regularizers
BATCH_NORM_DECAY = 0.997
BATCH_NORM_EPSILON = 1e-5
L2_WEIGHT_DECAY = 2e-4
def identity_building_block(input_tensor,
kernel_size,
filters,
stage,
block,
training=None):
"""The identity block is the block that has no conv layer at shortcut.
Arguments:
input_tensor: input tensor
kernel_size: default 3, the kernel size of
middle conv layer at main path
filters: list of integers, the filters of 3 conv layer at main path
stage: integer, current stage label, used for generating layer names
block: current block label, used for generating layer names
training: Only used if training keras model with Estimator. In other
scenarios it is handled automatically.
Returns:
Output tensor for the block.
"""
filters1, filters2 = filters
if backend.image_data_format() == 'channels_last':
bn_axis = 3
else:
bn_axis = 1
conv_name_base = 'res' + str(stage) + block + '_branch'
bn_name_base = 'bn' + str(stage) + block + '_branch'
x = layers.Conv2D(filters1, kernel_size,
padding='same', use_bias=False,
kernel_initializer='he_normal',
kernel_regularizer=regularizers.l2(L2_WEIGHT_DECAY),
name=conv_name_base + '2a')(input_tensor)
x = layers.BatchNormalization(
axis=bn_axis, momentum=BATCH_NORM_DECAY, epsilon=BATCH_NORM_EPSILON,
name=bn_name_base + '2a')(x, training=training)
x = layers.Activation('relu')(x)
x = layers.Conv2D(filters2, kernel_size,
padding='same', use_bias=False,
kernel_initializer='he_normal',
kernel_regularizer=regularizers.l2(L2_WEIGHT_DECAY),
name=conv_name_base + '2b')(x)
x = layers.BatchNormalization(
axis=bn_axis, momentum=BATCH_NORM_DECAY, epsilon=BATCH_NORM_EPSILON,
name=bn_name_base + '2b')(x, training=training)
x = layers.add([x, input_tensor])
x = layers.Activation('relu')(x)
return x
def conv_building_block(input_tensor,
kernel_size,
filters,
stage,
block,
strides=(2, 2),
training=None):
"""A block that has a conv layer at shortcut.
Arguments:
input_tensor: input tensor
kernel_size: default 3, the kernel size of
middle conv layer at main path
filters: list of integers, the filters of 3 conv layer at main path
stage: integer, current stage label, used for generating layer names
block: current block label, used for generating layer names
strides: Strides for the first conv layer in the block.
training: Only used if training keras model with Estimator. In other
scenarios it is handled automatically.
Returns:
Output tensor for the block.
Note that from stage 3,
the first conv layer at main path is with strides=(2, 2)
And the shortcut should have strides=(2, 2) as well
"""
filters1, filters2 = filters
if tf.keras.backend.image_data_format() == 'channels_last':
bn_axis = 3
else:
bn_axis = 1
conv_name_base = 'res' + str(stage) + block + '_branch'
bn_name_base = 'bn' + str(stage) + block + '_branch'
x = layers.Conv2D(filters1, kernel_size, strides=strides,
padding='same', use_bias=False,
kernel_initializer='he_normal',
kernel_regularizer=regularizers.l2(L2_WEIGHT_DECAY),
name=conv_name_base + '2a')(input_tensor)
x = layers.BatchNormalization(
axis=bn_axis, momentum=BATCH_NORM_DECAY, epsilon=BATCH_NORM_EPSILON,
name=bn_name_base + '2a')(x, training=training)
x = layers.Activation('relu')(x)
x = layers.Conv2D(filters2, kernel_size, padding='same', use_bias=False,
kernel_initializer='he_normal',
kernel_regularizer=regularizers.l2(L2_WEIGHT_DECAY),
name=conv_name_base + '2b')(x)
x = layers.BatchNormalization(
axis=bn_axis, momentum=BATCH_NORM_DECAY, epsilon=BATCH_NORM_EPSILON,
name=bn_name_base + '2b')(x, training=training)
shortcut = layers.Conv2D(filters2, (1, 1), strides=strides, use_bias=False,
kernel_initializer='he_normal',
kernel_regularizer=regularizers.l2(L2_WEIGHT_DECAY),
name=conv_name_base + '1')(input_tensor)
shortcut = layers.BatchNormalization(
axis=bn_axis, momentum=BATCH_NORM_DECAY, epsilon=BATCH_NORM_EPSILON,
name=bn_name_base + '1')(shortcut, training=training)
x = layers.add([x, shortcut])
x = layers.Activation('relu')(x)
return x
def resnet_block(input_tensor,
size,
kernel_size,
filters,
stage,
conv_strides=(2, 2),
training=None):
"""A block which applies conv followed by multiple identity blocks.
Arguments:
input_tensor: input tensor
size: integer, number of constituent conv/identity building blocks.
A conv block is applied once, followed by (size - 1) identity blocks.
kernel_size: default 3, the kernel size of
middle conv layer at main path
filters: list of integers, the filters of 3 conv layer at main path
stage: integer, current stage label, used for generating layer names
conv_strides: Strides for the first conv layer in the block.
training: Only used if training keras model with Estimator. In other
scenarios it is handled automatically.
Returns:
Output tensor after applying conv and identity blocks.
"""
x = conv_building_block(input_tensor, kernel_size, filters, stage=stage,
strides=conv_strides, block='block_0',
training=training)
for i in range(size - 1):
x = identity_building_block(x, kernel_size, filters, stage=stage,
block='block_%d' % (i + 1), training=training)
return x
def resnet(num_blocks, classes=10, training=None):
"""Instantiates the ResNet architecture.
Arguments:
num_blocks: integer, the number of conv/identity blocks in each block.
The ResNet contains 3 blocks with each block containing one conv block
followed by (layers_per_block - 1) number of idenity blocks. Each
conv/idenity block has 2 convolutional layers. With the input
convolutional layer and the pooling layer towards the end, this brings
the total size of the network to (6*num_blocks + 2)
classes: optional number of classes to classify images into
training: Only used if training keras model with Estimator. In other
scenarios it is handled automatically.
Returns:
A Keras model instance.
"""
input_shape = (32, 32, 3)
img_input = layers.Input(shape=input_shape)
if backend.image_data_format() == 'channels_first':
x = layers.Lambda(lambda x: backend.permute_dimensions(x, (0, 3, 1, 2)),
name='transpose')(img_input)
bn_axis = 1
else: # channel_last
x = img_input
bn_axis = 3
x = layers.ZeroPadding2D(padding=(1, 1), name='conv1_pad')(x)
x = layers.Conv2D(16, (3, 3),
strides=(1, 1),
padding='valid', use_bias=False,
kernel_initializer='he_normal',
kernel_regularizer=regularizers.l2(L2_WEIGHT_DECAY),
name='conv1')(x)
x = layers.BatchNormalization(axis=bn_axis,
momentum=BATCH_NORM_DECAY,
epsilon=BATCH_NORM_EPSILON,
name='bn_conv1',)(x, training=training)
x = layers.Activation('relu')(x)
x = resnet_block(x, size=num_blocks, kernel_size=3, filters=[16, 16],
stage=2, conv_strides=(1, 1), training=training)
x = resnet_block(x, size=num_blocks, kernel_size=3, filters=[32, 32],
stage=3, conv_strides=(2, 2), training=training)
x = resnet_block(x, size=num_blocks, kernel_size=3, filters=[64, 64],
stage=4, conv_strides=(2, 2), training=training)
rm_axes = [1, 2] if backend.image_data_format() == 'channels_last' else [2, 3]
x = layers.Lambda(lambda x: backend.mean(x, rm_axes), name='reduce_mean')(x)
x = layers.Dense(classes,
activation='softmax',
kernel_initializer=initializers.RandomNormal(stddev=0.01),
kernel_regularizer=regularizers.l2(L2_WEIGHT_DECAY),
bias_regularizer=regularizers.l2(L2_WEIGHT_DECAY),
name='fc10')(x)
inputs = img_input
# Create model.
model = tf.keras.models.Model(inputs, x, name='resnet56')
return model
resnet20 = functools.partial(resnet, num_blocks=3)
resnet32 = functools.partial(resnet, num_blocks=5)
resnet56 = functools.partial(resnet, num_blocks=9)
resnet10 = functools.partial(resnet, num_blocks=110)
| mlperf/training_results_v0.7 | Google/benchmarks/bert/implementations/bert-cloud-TF2.0-tpu-v3-32/tf2_common/benchmark/models/resnet_cifar_model.py | Python | apache-2.0 | 10,418 |
import sys
try:
from pymysql.tests import base
import pymysql.cursors
from pymysql.constants import CLIENT
except Exception:
# For local testing from top-level directory, without installing
sys.path.append('../pymysql')
from pymysql.tests import base
import pymysql.cursors
from pymysql.constants import CLIENT
class TestSSCursor(base.PyMySQLTestCase):
def test_SSCursor(self):
affected_rows = 18446744073709551615
conn = self.connect(client_flag=CLIENT.MULTI_STATEMENTS)
data = [
('America', '', 'America/Jamaica'),
('America', '', 'America/Los_Angeles'),
('America', '', 'America/Lima'),
('America', '', 'America/New_York'),
('America', '', 'America/Menominee'),
('America', '', 'America/Havana'),
('America', '', 'America/El_Salvador'),
('America', '', 'America/Costa_Rica'),
('America', '', 'America/Denver'),
('America', '', 'America/Detroit'),]
cursor = conn.cursor(pymysql.cursors.SSCursor)
# Create table
cursor.execute('CREATE TABLE tz_data ('
'region VARCHAR(64),'
'zone VARCHAR(64),'
'name VARCHAR(64))')
conn.begin()
# Test INSERT
for i in data:
cursor.execute('INSERT INTO tz_data VALUES (%s, %s, %s)', i)
self.assertEqual(conn.affected_rows(), 1, 'affected_rows does not match')
conn.commit()
# Test fetchone()
iter = 0
cursor.execute('SELECT * FROM tz_data')
while True:
row = cursor.fetchone()
if row is None:
break
iter += 1
# Test cursor.rowcount
self.assertEqual(cursor.rowcount, affected_rows,
'cursor.rowcount != %s' % (str(affected_rows)))
# Test cursor.rownumber
self.assertEqual(cursor.rownumber, iter,
'cursor.rowcount != %s' % (str(iter)))
# Test row came out the same as it went in
self.assertEqual((row in data), True,
'Row not found in source data')
# Test fetchall
cursor.execute('SELECT * FROM tz_data')
self.assertEqual(len(cursor.fetchall()), len(data),
'fetchall failed. Number of rows does not match')
# Test fetchmany
cursor.execute('SELECT * FROM tz_data')
self.assertEqual(len(cursor.fetchmany(2)), 2,
'fetchmany failed. Number of rows does not match')
# So MySQLdb won't throw "Commands out of sync"
while True:
res = cursor.fetchone()
if res is None:
break
# Test update, affected_rows()
cursor.execute('UPDATE tz_data SET zone = %s', ['Foo'])
conn.commit()
self.assertEqual(cursor.rowcount, len(data),
'Update failed. affected_rows != %s' % (str(len(data))))
# Test executemany
cursor.executemany('INSERT INTO tz_data VALUES (%s, %s, %s)', data)
self.assertEqual(cursor.rowcount, len(data),
'executemany failed. cursor.rowcount != %s' % (str(len(data))))
# Test multiple datasets
cursor.execute('SELECT 1; SELECT 2; SELECT 3')
self.assertListEqual(list(cursor), [(1, )])
self.assertTrue(cursor.nextset())
self.assertListEqual(list(cursor), [(2, )])
self.assertTrue(cursor.nextset())
self.assertListEqual(list(cursor), [(3, )])
self.assertFalse(cursor.nextset())
cursor.execute('DROP TABLE IF EXISTS tz_data')
cursor.close()
__all__ = ["TestSSCursor"]
if __name__ == "__main__":
import unittest
unittest.main()
| imron/scalyr-agent-2 | scalyr_agent/third_party/pymysql/tests/test_SSCursor.py | Python | apache-2.0 | 3,766 |
# Copyright 2014 Scalyr Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ------------------------------------------------------------------------
#
# author: Steven Czerwinski <[email protected]>
from __future__ import unicode_literals
from __future__ import absolute_import
__author__ = "[email protected]"
import unittest
from scalyr_agent.json_lib import JsonArray, JsonObject
from scalyr_agent.json_lib import JsonConversionException, JsonMissingFieldException
from scalyr_agent.test_base import ScalyrTestCase
class JsonObjectTests(ScalyrTestCase):
def test_constructor(self):
x = JsonObject(foo=5, bar=True)
self.assertEquals(x["foo"], 5)
self.assertEquals(x.get("bar"), True)
def test_get_bool(self):
x = JsonObject(foo=True, bar=False)
self.assertEquals(x.get_bool("foo"), True)
self.assertEquals(x.get_bool("bar"), False)
# Test conversion from int to bool
x = JsonObject(foo=1, bar=0)
self.assertEquals(x.get_bool("foo"), True)
self.assertEquals(x.get_bool("bar"), False)
# Test conversion from string to bool
x = JsonObject(foo="ok", bar="f", barb="false", barc="")
self.assertEquals(x.get_bool("foo"), True)
self.assertEquals(x.get_bool("bar"), False)
self.assertEquals(x.get_bool("barb"), False)
self.assertEquals(x.get_bool("barc"), False)
# Test that bad numbers raise an exception
x = JsonObject(foo=5)
self.assertRaises(JsonConversionException, x.get_bool, "foo")
# Test the default value is returned if field is missing.
self.assertEquals(x.get_bool("none", default_value=True), True)
# Test returns none if missing.
self.assertEquals(x.get_bool("none", none_if_missing=True), None)
# Raise an exception when field is missing.
self.assertRaises(JsonMissingFieldException, x.get_bool, "none")
def test_get_int(self):
x = JsonObject(foo=5)
self.assertEquals(x.get_int("foo"), 5)
x = JsonObject(foo=5)
self.assertEquals(x.get_int("foo"), 5)
x = JsonObject(foo=5.21)
self.assertEquals(x.get_int("foo"), 5)
x = JsonObject(foo="5")
self.assertEquals(x.get_int("foo"), 5)
x = JsonObject(foo="5.2")
self.assertEquals(x.get_int("foo"), 5)
# Test that bad strings raise an exception
x = JsonObject(foo="fhi3")
self.assertRaises(JsonConversionException, x.get_int, "foo")
# Test the default value is returned if field is missing.
self.assertEquals(x.get_int("none", default_value=5), 5)
# Test returns none if missing.
self.assertEquals(x.get_int("none", none_if_missing=True), None)
# Raise an exception when field is missing.
self.assertRaises(JsonMissingFieldException, x.get_int, "none")
def test_get_long(self):
x = JsonObject(foo=5)
self.assertEquals(x.get_long("foo"), 5)
x = JsonObject(foo=5)
self.assertEquals(x.get_long("foo"), 5)
x = JsonObject(foo=5.21)
self.assertEquals(x.get_long("foo"), 5)
x = JsonObject(foo="5")
self.assertEquals(x.get_long("foo"), 5)
x = JsonObject(foo="5.2")
self.assertEquals(x.get_long("foo"), 5)
# Test that bad strings raise an exception
x = JsonObject(foo="fhi3")
self.assertRaises(JsonConversionException, x.get_long, "foo")
# Test the default value is returned if field is missing.
self.assertEquals(x.get_long("none", default_value=5), 5)
# Test returns none if missing.
self.assertEquals(x.get_long("none", none_if_missing=True), None)
# Raise an exception when field is missing.
self.assertRaises(JsonMissingFieldException, x.get_long, "none")
def test_get_float(self):
x = JsonObject(foo=5.2, bar=True)
self.assertEquals(x.get_float("foo"), 5.2)
x = JsonObject(foo="5.2", bar=True)
self.assertEquals(x.get_float("foo"), 5.2)
# Test that bad strings raise an exception
x = JsonObject(foo="fhi3")
self.assertRaises(JsonConversionException, x.get_float, "foo")
# Test the default value is returned if field is missing.
self.assertEquals(x.get_long("none", default_value=5.2), 5.2)
# Test returns none if missing.
self.assertEquals(x.get_long("none", none_if_missing=True), None)
# Raise an exception when field is missing.
self.assertRaises(JsonMissingFieldException, x.get_long, "none")
def test_get_string(self):
x = JsonObject(foo="hi")
self.assertEquals(x.get_string("foo"), "hi")
x = JsonObject(foo=1)
self.assertEquals(x.get_string("foo"), "1")
# Test the default value is returned if field is missing.
self.assertEquals(x.get_string("none", default_value="ok"), "ok")
# Test returns none if missing.
self.assertEquals(x.get_string("none", none_if_missing=True), None)
# Raise an exception when field is missing.
self.assertRaises(JsonMissingFieldException, x.get_string, "none")
def test_get_json_object(self):
x = JsonObject(foo=5, bar=True)
y = JsonObject(bar=x)
self.assertTrue(y.get_json_object("bar") == x)
# Test the default value is returned if field is missing.
self.assertTrue(x.get_json_object("none", default_value=x) == x)
# Test returns none if missing.
self.assertEquals(x.get_json_object("none", none_if_missing=True), None)
# Raise an exception when field is missing.
self.assertRaises(JsonMissingFieldException, y.get_json_object, "none")
# Raise an exception if field is not JsonObject
self.assertRaises(JsonConversionException, x.get_json_object, "foo")
def test_get_or_create_json_object(self):
x = JsonObject(foo=5, bar=True)
y = JsonObject(bar=x)
self.assertTrue(y.get_or_create_json_object("bar") == x)
self.assertEquals(len(y.get_or_create_json_object("foo")), 0)
def test_json_array_conversion(self):
JsonObject(foo=5, bar=True)
def test_equality(self):
x = JsonObject(foo="a", bar=10)
y = JsonObject(foo="a", bar=10)
z = JsonObject(foo="a", bar=10, zar=True)
self.assertEquals(x, y)
self.assertNotEquals(x, z)
self.assertNotEquals(y, z)
def test_keys(self):
x = JsonObject(foo="a", bar=10)
keys = list(x.keys())
self.assertEquals(len(keys), 2)
self.assertTrue(keys[0] == "foo" or keys[0] == "bar")
self.assertTrue(keys[1] == "foo" or keys[1] == "bar")
def test_contains(self):
x = JsonObject(foo="a", bar=10)
self.assertTrue("foo" in x)
self.assertFalse("baz" in x)
def test_iter(self):
x = JsonObject(foo="a", bar=10)
keys = []
for key in x:
keys.append(key)
self.assertEquals(len(keys), 2)
self.assertTrue("foo" in keys)
self.assertTrue("bar" in keys)
def test_to_dict(self):
x = JsonObject(foo="a", bar=10)
self.assertEquals(dict(foo="a", bar=10), x.to_dict())
x = JsonObject(foo=JsonObject(bee=1), bar=10)
self.assertEquals(dict(foo=dict(bee=1), bar=10), x.to_dict())
x = JsonObject(foo=dict(bee=1, boo=JsonObject(hi=True)), bar=10)
self.assertEquals(dict(foo=dict(bee=1, boo=dict(hi=True)), bar=10), x.to_dict())
x = JsonObject(foo=JsonArray(1, 2, 3), bar=10)
self.assertEquals(dict(foo=[1, 2, 3], bar=10), x.to_dict())
x = JsonObject(foo=[1, 2, JsonObject(foo=5)], bar=10)
self.assertEquals(dict(foo=[1, 2, dict(foo=5)], bar=10), x.to_dict())
class JsonArrayTests(ScalyrTestCase):
def test_constructor(self):
x = JsonArray("hi", True)
self.assertEquals(len(x), 2)
self.assertEquals(x[0], "hi")
self.assertEquals(x[1], True)
def test_get_json_object(self):
y = JsonObject(foo=True)
x = JsonArray(y, "Not an object")
self.assertEquals(len(x), 2)
self.assertTrue(x.get_json_object(0) == y)
self.assertRaises(JsonConversionException, x.get_json_object, 1)
def test_iter(self):
y = JsonObject(foo=True)
x = JsonArray(y, "Not an object")
z = []
for element in x:
z.append(element)
self.assertEquals(len(z), 2)
self.assertTrue(x[0] == z[0])
self.assertTrue(x[1] == z[1])
def test_json_objects(self):
y = JsonObject(foo=True)
x = JsonArray(y)
z = []
for element in x.json_objects():
z.append(element)
self.assertEquals(len(z), 1)
self.assertTrue(x[0] == z[0])
def test_set_item(self):
x = JsonArray("bye", 3)
x[0] = "hi"
self.assertEquals(x[0], "hi")
self.assertRaises(IndexError, x.__setitem__, 5, "foo")
def test_equals(self):
x = JsonArray(1, 2)
y = JsonArray(1, 2)
z = JsonArray(3, 4)
self.assertEquals(x, y)
self.assertNotEqual(x, z)
self.assertNotEqual(y, z)
def main():
unittest.main()
if __name__ == "__main__":
main()
| imron/scalyr-agent-2 | tests/unit/json_lib/objects_test.py | Python | apache-2.0 | 9,833 |
#!/usr/bin/env python
bob2 = {'name': {'first': 'Bob', 'last': 'Smith'},
'age': 42,
'job': ['software', 'writing'],
# 'pay': [ 40000, 50000]}
'pay': ( 40000, 50000)}
print bob2
print(bob2['name'])
print(bob2['name']['first'])
print(bob2['name']['last'])
print(bob2['job'][0])
print(bob2['job'][1])
print(bob2['pay'][0])
print(bob2['pay'][1])
for job in bob2['job']:
print(job)
print '#' * 10
print bob2['job'][-1]
print bob2['job'][-2]
bob2['job'].append('janitor')
print '#' * 10
for job in bob2['job']:
print(job)
| lichengshuang/createvhost | python/others/Preview/p26_1.py | Python | apache-2.0 | 596 |
import subprocess
class Printer( object ):
def __init__(self, name, flags=None, options=None):
self.name = name
if flags:
self.flags = flags
else:
self.options = {}
if options:
self.options = options
else:
self.options = []
def __str__(self):
ret = 'Printer: ' + self.name + '\n'
ret += 'With the call of: '
for flag in self.flags.keys():
ret += '-{0} {1} '.format(flag,self.flags[flag])
for op in self.options:
o = str(op)
if o != '':
ret += o + ' '
return ret
def setFlag(self,flag,value):
if flag == 'd':
return False
try:
self.flags[flag] = value
except:
return False
return True
def getFlag(self,flag):
try:
return self.flags[flag]
except:
return False
def addOption(self,new_op):
for i,op in enumerate(self.options):
if op.name == new_op.name:
self.options[i] = new_op
return True
self.options.append(op)
def getOption(self,name):
for op in self.options:
if op.name == name:
return op
return False
def __call__(self,item):
self.sendPrint(item)
def sendPrint(self,item):
#command = ['lp','-d',self.name]
command = ['/usr/bin/lp']
for flag in self.flags.keys():
command.append('-{0} {1}'.format(flag,self.flags[flag]))
for op in self.options:
o = str(op)
if o != '':
command.append(str(op))
print command
p = subprocess.Popen(command,stdout=subprocess.PIPE,stdin=subprocess.PIPE)
#outs = p.communicate(input=item)[0]
p.stdin.write(item)
outs = p.communicate()
print outs
class Option( object ):
def __init__(self,name,options,default=None,human_name=None):
self.name = name
self.options = options
self.human_name = human_name
if default:
self.default = default
else:
self.default = self.options[0]
def __str__(self):
if self.default:
return '-o{0}={1} '.format(self.name,self.default)
return ''
def setDefault(self,op):
self.default = op
return True
def listPrinters():
lpsc = subprocess.Popen(['lpstat','-s'],stdout=subprocess.PIPE)
lpstats = lpsc.communicate()[0]
lpsplit = lpstats.split('\n')[1:-1]
printers = []
for p in lpsplit:
printers.append(p.split()[2:4])
return printers
def listOptions(printer):
lpop = subprocess.Popen(['lpoptions','-p',printer,'-l'],stdout=subprocess.PIPE)
lpout = lpop.communicate()[0].split('\n')[:-1]
ops = []
for line in lpout:
name, values = line.split(':')
human_name = name[name.index('/')+1:]
name = name[:name.index('/')]
valuelist = values.split(' ')
for i,v in enumerate(valuelist):
if '*' in v:
valuelist[i] = valuelist[i].replace('*','')
ops.append(Option(name,valuelist,None,human_name))
return ops
def getRicoh():
ops = listOptions('ricoh-double')
prin = Printer('ricoh-double',{'U':'tester','t':'testPrint.pdf'},ops)
op = prin.getOption('ColorModel')
op.setDefault('Gray')
prin.addOption(op)
op = prin.getOption('Duplex')
op.setDefault('DuplexNoTumble')
prin.addOption(op)
op = prin.getOption('JobType')
op.setDefault('LockedPrint')
prin.addOption(op)
op = prin.getOption('LockedPrintPassword')
op.setDefault('1234')
prin.addOption(op)
return prin
if __name__ == '__main__':
r = getRicoh()
print r
r(open('printing.py','r').read())
#To the King!
| roycem90/python-o365 | examples/EmailPrinting/printing.py | Python | apache-2.0 | 3,325 |
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for the experimental input pipeline ops."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import gzip
import os
import zlib
from tensorflow.python.data.ops import dataset_ops
from tensorflow.python.data.ops import iterator_ops
from tensorflow.python.data.ops import readers
from tensorflow.python.eager import context
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import errors
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.lib.io import python_io
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import gen_dataset_ops
from tensorflow.python.ops import io_ops
from tensorflow.python.ops import parsing_ops
from tensorflow.python.platform import test
from tensorflow.python.util import compat
try:
import psutil # pylint: disable=g-import-not-at-top
psutil_import_succeeded = True
except ImportError:
psutil_import_succeeded = False
class TextLineDatasetTest(test.TestCase):
def _lineText(self, f, l):
return compat.as_bytes("%d: %d" % (f, l))
def _createFiles(self,
num_files,
num_lines,
crlf=False,
compression_type=None):
filenames = []
for i in range(num_files):
fn = os.path.join(self.get_temp_dir(), "text_line.%d.txt" % i)
filenames.append(fn)
contents = []
for j in range(num_lines):
contents.append(self._lineText(i, j))
# Always include a newline after the record unless it is
# at the end of the file, in which case we include it
if j + 1 != num_lines or i == 0:
contents.append(b"\r\n" if crlf else b"\n")
contents = b"".join(contents)
if not compression_type:
with open(fn, "wb") as f:
f.write(contents)
elif compression_type == "GZIP":
with gzip.GzipFile(fn, "wb") as f:
f.write(contents)
elif compression_type == "ZLIB":
contents = zlib.compress(contents)
with open(fn, "wb") as f:
f.write(contents)
else:
raise ValueError("Unsupported compression_type", compression_type)
return filenames
def _testTextLineDataset(self, compression_type=None):
test_filenames = self._createFiles(
2, 5, crlf=True, compression_type=compression_type)
filenames = array_ops.placeholder(dtypes.string, shape=[None])
num_epochs = array_ops.placeholder(dtypes.int64, shape=[])
batch_size = array_ops.placeholder(dtypes.int64, shape=[])
repeat_dataset = readers.TextLineDataset(
filenames, compression_type=compression_type).repeat(num_epochs)
batch_dataset = repeat_dataset.batch(batch_size)
iterator = iterator_ops.Iterator.from_structure(batch_dataset.output_types)
init_op = iterator.make_initializer(repeat_dataset)
init_batch_op = iterator.make_initializer(batch_dataset)
get_next = iterator.get_next()
with self.cached_session() as sess:
# Basic test: read from file 0.
sess.run(
init_op, feed_dict={filenames: [test_filenames[0]],
num_epochs: 1})
for i in range(5):
self.assertEqual(self._lineText(0, i), sess.run(get_next))
with self.assertRaises(errors.OutOfRangeError):
sess.run(get_next)
# Basic test: read from file 1.
sess.run(
init_op, feed_dict={filenames: [test_filenames[1]],
num_epochs: 1})
for i in range(5):
self.assertEqual(self._lineText(1, i), sess.run(get_next))
with self.assertRaises(errors.OutOfRangeError):
sess.run(get_next)
# Basic test: read from both files.
sess.run(init_op, feed_dict={filenames: test_filenames, num_epochs: 1})
for j in range(2):
for i in range(5):
self.assertEqual(self._lineText(j, i), sess.run(get_next))
with self.assertRaises(errors.OutOfRangeError):
sess.run(get_next)
# Test repeated iteration through both files.
sess.run(init_op, feed_dict={filenames: test_filenames, num_epochs: 10})
for _ in range(10):
for j in range(2):
for i in range(5):
self.assertEqual(self._lineText(j, i), sess.run(get_next))
with self.assertRaises(errors.OutOfRangeError):
sess.run(get_next)
# Test batched and repeated iteration through both files.
sess.run(
init_batch_op,
feed_dict={filenames: test_filenames,
num_epochs: 10,
batch_size: 5})
for _ in range(10):
self.assertAllEqual([self._lineText(0, i) for i in range(5)],
sess.run(get_next))
self.assertAllEqual([self._lineText(1, i) for i in range(5)],
sess.run(get_next))
def testTextLineDatasetNoCompression(self):
self._testTextLineDataset()
def testTextLineDatasetGzipCompression(self):
self._testTextLineDataset(compression_type="GZIP")
def testTextLineDatasetZlibCompression(self):
self._testTextLineDataset(compression_type="ZLIB")
def testTextLineDatasetBuffering(self):
test_filenames = self._createFiles(2, 5, crlf=True)
repeat_dataset = readers.TextLineDataset(test_filenames, buffer_size=10)
iterator = repeat_dataset.make_one_shot_iterator()
with self.cached_session() as sess:
for j in range(2):
for i in range(5):
self.assertEqual(self._lineText(j, i), sess.run(iterator.get_next()))
with self.assertRaises(errors.OutOfRangeError):
sess.run(iterator.get_next())
def testIteratorResourceCleanup(self):
filename = os.path.join(self.get_temp_dir(), "text.txt")
with open(filename, "wt") as f:
for i in range(3):
f.write("%d\n" % (i,))
with context.eager_mode():
first_iterator = iter(readers.TextLineDataset(filename))
self.assertEqual(b"0", next(first_iterator).numpy())
second_iterator = iter(readers.TextLineDataset(filename))
self.assertEqual(b"0", next(second_iterator).numpy())
# Eager kernel caching is based on op attributes, which includes the
# Dataset's output shape. Create a different kernel to test that they
# don't create resources with the same names.
different_kernel_iterator = iter(
readers.TextLineDataset(filename).repeat().batch(16))
self.assertEqual([16], next(different_kernel_iterator).shape)
# Remove our references to the Python Iterator objects, which (assuming no
# reference cycles) is enough to trigger DestroyResourceOp and close the
# partially-read files.
del first_iterator
del second_iterator
del different_kernel_iterator
if not psutil_import_succeeded:
self.skipTest(
"psutil is required to check that we've closed our files.")
open_files = psutil.Process().open_files()
self.assertNotIn(filename, [open_file.path for open_file in open_files])
class FixedLengthRecordReaderTest(test.TestCase):
def setUp(self):
super(FixedLengthRecordReaderTest, self).setUp()
self._num_files = 2
self._num_records = 7
self._header_bytes = 5
self._record_bytes = 3
self._footer_bytes = 2
def _record(self, f, r):
return compat.as_bytes(str(f * 2 + r) * self._record_bytes)
def _createFiles(self):
filenames = []
for i in range(self._num_files):
fn = os.path.join(self.get_temp_dir(), "fixed_length_record.%d.txt" % i)
filenames.append(fn)
with open(fn, "wb") as f:
f.write(b"H" * self._header_bytes)
for j in range(self._num_records):
f.write(self._record(i, j))
f.write(b"F" * self._footer_bytes)
return filenames
def testFixedLengthRecordDataset(self):
test_filenames = self._createFiles()
filenames = array_ops.placeholder(dtypes.string, shape=[None])
num_epochs = array_ops.placeholder(dtypes.int64, shape=[])
batch_size = array_ops.placeholder(dtypes.int64, shape=[])
repeat_dataset = (readers.FixedLengthRecordDataset(
filenames, self._record_bytes, self._header_bytes, self._footer_bytes)
.repeat(num_epochs))
batch_dataset = repeat_dataset.batch(batch_size)
iterator = iterator_ops.Iterator.from_structure(batch_dataset.output_types)
init_op = iterator.make_initializer(repeat_dataset)
init_batch_op = iterator.make_initializer(batch_dataset)
get_next = iterator.get_next()
with self.cached_session() as sess:
# Basic test: read from file 0.
sess.run(
init_op, feed_dict={filenames: [test_filenames[0]],
num_epochs: 1})
for i in range(self._num_records):
self.assertEqual(self._record(0, i), sess.run(get_next))
with self.assertRaises(errors.OutOfRangeError):
sess.run(get_next)
# Basic test: read from file 1.
sess.run(
init_op, feed_dict={filenames: [test_filenames[1]],
num_epochs: 1})
for i in range(self._num_records):
self.assertEqual(self._record(1, i), sess.run(get_next))
with self.assertRaises(errors.OutOfRangeError):
sess.run(get_next)
# Basic test: read from both files.
sess.run(init_op, feed_dict={filenames: test_filenames, num_epochs: 1})
for j in range(self._num_files):
for i in range(self._num_records):
self.assertEqual(self._record(j, i), sess.run(get_next))
with self.assertRaises(errors.OutOfRangeError):
sess.run(get_next)
# Test repeated iteration through both files.
sess.run(init_op, feed_dict={filenames: test_filenames, num_epochs: 10})
for _ in range(10):
for j in range(self._num_files):
for i in range(self._num_records):
self.assertEqual(self._record(j, i), sess.run(get_next))
with self.assertRaises(errors.OutOfRangeError):
sess.run(get_next)
# Test batched and repeated iteration through both files.
sess.run(
init_batch_op,
feed_dict={
filenames: test_filenames,
num_epochs: 10,
batch_size: self._num_records
})
for _ in range(10):
for j in range(self._num_files):
self.assertAllEqual(
[self._record(j, i) for i in range(self._num_records)],
sess.run(get_next))
with self.assertRaises(errors.OutOfRangeError):
sess.run(get_next)
def testFixedLengthRecordDatasetBuffering(self):
test_filenames = self._createFiles()
dataset = readers.FixedLengthRecordDataset(
test_filenames,
self._record_bytes,
self._header_bytes,
self._footer_bytes,
buffer_size=10)
iterator = dataset.make_one_shot_iterator()
with self.cached_session() as sess:
for j in range(self._num_files):
for i in range(self._num_records):
self.assertEqual(self._record(j, i), sess.run(iterator.get_next()))
with self.assertRaises(errors.OutOfRangeError):
sess.run(iterator.get_next())
def testFixedLengthRecordDatasetWrongSize(self):
test_filenames = self._createFiles()
dataset = readers.FixedLengthRecordDataset(
test_filenames,
self._record_bytes + 1, # Incorrect record length.
self._header_bytes,
self._footer_bytes,
buffer_size=10)
iterator = dataset.make_one_shot_iterator()
with self.cached_session() as sess:
with self.assertRaisesRegexp(
errors.InvalidArgumentError,
r"Excluding the header \(5 bytes\) and footer \(2 bytes\), input "
r"file \".*fixed_length_record.0.txt\" has body length 21 bytes, "
r"which is not an exact multiple of the record length \(4 bytes\)."):
sess.run(iterator.get_next())
def _iterator_checkpoint_path(self):
return os.path.join(self.get_temp_dir(), "iterator")
def _save_op(self, iterator_resource):
iterator_state_variant = gen_dataset_ops.serialize_iterator(
iterator_resource)
save_op = io_ops.write_file(
self._iterator_checkpoint_path(),
parsing_ops.serialize_tensor(iterator_state_variant))
return save_op
def _restore_op(self, iterator_resource):
iterator_state_variant = parsing_ops.parse_tensor(
io_ops.read_file(self._iterator_checkpoint_path()), dtypes.variant)
restore_op = gen_dataset_ops.deserialize_iterator(iterator_resource,
iterator_state_variant)
return restore_op
def _build_iterator_graph(self, num_epochs):
filenames = self._createFiles()
dataset = (readers.FixedLengthRecordDataset(
filenames, self._record_bytes, self._header_bytes, self._footer_bytes)
.repeat(num_epochs))
iterator = dataset.make_initializable_iterator()
init_op = iterator.initializer
get_next_op = iterator.get_next()
save_op = self._save_op(iterator._iterator_resource)
restore_op = self._restore_op(iterator._iterator_resource)
return init_op, get_next_op, save_op, restore_op
def _restore_iterator(self):
output_types = dtypes.string
output_shapes = tensor_shape.scalar()
iterator = iterator_ops.Iterator.from_structure(output_types, output_shapes)
get_next = iterator.get_next()
restore_op = self._restore_op(iterator._iterator_resource)
return restore_op, get_next
def testSaveRestore(self):
num_epochs = 10
epoch_break = 5
file_break = self._num_files // 2
record_break = self._num_records // 2
with ops.Graph().as_default() as g:
init_op, get_next_op, save_op, restore_op = self._build_iterator_graph(
num_epochs=num_epochs)
with self.session(graph=g) as sess:
sess.run(init_op)
# Note: There is no checkpoint saved currently so a NotFoundError is
# raised.
with self.assertRaises(errors.NotFoundError):
sess.run(restore_op)
for epoch in range(num_epochs):
for f in range(self._num_files):
for r in range(self._num_records):
if (epoch == epoch_break and f == file_break and
r == record_break):
sess.run(save_op)
break
self.assertEqual(self._record(f, r), sess.run(get_next_op))
else:
continue
break
else:
continue
break
else:
with self.assertRaises(errors.OutOfRangeError):
sess.run(get_next_op)
with ops.Graph().as_default() as g:
init_op, get_next_op, save_op, restore_op = self._build_iterator_graph(
num_epochs=num_epochs)
with self.session(graph=g) as sess:
sess.run(restore_op)
for epoch in range(num_epochs):
for f in range(self._num_files):
for r in range(self._num_records):
if (epoch < epoch_break or
(epoch == epoch_break and f < file_break) or
(epoch == epoch_break and f == file_break and
r < record_break)):
continue
self.assertEqual(self._record(f, r), sess.run(get_next_op))
with self.assertRaises(errors.OutOfRangeError):
sess.run(get_next_op)
def testInitThenRestore(self):
# Note: Calling init_op before restore_op is redundant. This test just makes
# sure we do not fail if restore is called on an already initialized
# iterator resource.
num_epochs = 10
epoch_break = 5
file_break = self._num_files // 2
record_break = self._num_records // 2
with ops.Graph().as_default() as g:
init_op, get_next_op, save_op, restore_op = self._build_iterator_graph(
num_epochs=num_epochs)
with self.session(graph=g) as sess:
sess.run(init_op)
# Note: There is no checkpoint saved currently so a NotFoundError is
# raised.
with self.assertRaises(errors.NotFoundError):
sess.run(restore_op)
for epoch in range(num_epochs):
for f in range(self._num_files):
for r in range(self._num_records):
if (epoch == epoch_break and f == file_break and
r == record_break):
sess.run(save_op)
break
self.assertEqual(self._record(f, r), sess.run(get_next_op))
else:
continue
break
else:
continue
break
else:
with self.assertRaises(errors.OutOfRangeError):
sess.run(get_next_op)
with ops.Graph().as_default() as g:
init_op, get_next_op, save_op, restore_op = self._build_iterator_graph(
num_epochs=num_epochs)
with self.session(graph=g) as sess:
sess.run(init_op)
sess.run(restore_op)
for epoch in range(num_epochs):
for f in range(self._num_files):
for r in range(self._num_records):
if (epoch < epoch_break or
(epoch == epoch_break and f < file_break) or
(epoch == epoch_break and f == file_break and
r < record_break)):
continue
self.assertEqual(self._record(f, r), sess.run(get_next_op))
with self.assertRaises(errors.OutOfRangeError):
sess.run(get_next_op)
def testRestoreInModifiedGraph(self):
num_epochs = 10
num_epochs_1 = 20
epoch_break = 5
file_break = self._num_files // 2
record_break = self._num_records // 2
with ops.Graph().as_default() as g:
init_op, get_next_op, save_op, restore_op = self._build_iterator_graph(
num_epochs=num_epochs)
with self.session(graph=g) as sess:
sess.run(init_op)
# Note: There is no checkpoint saved currently so a NotFoundError is
# raised.
with self.assertRaises(errors.NotFoundError):
sess.run(restore_op)
for epoch in range(num_epochs):
for f in range(self._num_files):
for r in range(self._num_records):
if (epoch == epoch_break and f == file_break and
r == record_break):
sess.run(save_op)
break
self.assertEqual(self._record(f, r), sess.run(get_next_op))
else:
continue
break
else:
continue
break
else:
with self.assertRaises(errors.OutOfRangeError):
sess.run(get_next_op)
with ops.Graph().as_default() as g:
init_op, get_next_op, save_op, restore_op = self._build_iterator_graph(
num_epochs=num_epochs_1)
with self.session(graph=g) as sess:
sess.run(restore_op)
for epoch in range(num_epochs):
for f in range(self._num_files):
for r in range(self._num_records):
if (epoch < epoch_break or
(epoch == epoch_break and f < file_break) or
(epoch == epoch_break and f == file_break and
r < record_break)):
continue
self.assertEqual(self._record(f, r), sess.run(get_next_op))
with self.assertRaises(errors.OutOfRangeError):
sess.run(get_next_op)
def testRestoreWithoutBuildingDatasetGraph(self):
num_epochs = 10
epoch_break = 5
file_break = self._num_files // 2
record_break = self._num_records // 2
with ops.Graph().as_default() as g:
init_op, get_next_op, save_op, restore_op = self._build_iterator_graph(
num_epochs=num_epochs)
with self.session(graph=g) as sess:
sess.run(init_op)
# Note: There is no checkpoint saved currently so a NotFoundError is
# raised.
with self.assertRaises(errors.NotFoundError):
sess.run(restore_op)
for epoch in range(num_epochs):
for f in range(self._num_files):
for r in range(self._num_records):
if (epoch == epoch_break and f == file_break and
r == record_break):
sess.run(save_op)
break
self.assertEqual(self._record(f, r), sess.run(get_next_op))
else:
continue
break
else:
continue
break
else:
with self.assertRaises(errors.OutOfRangeError):
sess.run(get_next_op)
with ops.Graph().as_default() as g:
restore_op, get_next_op = self._restore_iterator()
with self.session(graph=g) as sess:
sess.run(restore_op)
for epoch in range(num_epochs):
for f in range(self._num_files):
for r in range(self._num_records):
if (epoch < epoch_break or
(epoch == epoch_break and f < file_break) or
(epoch == epoch_break and f == file_break and
r < record_break)):
continue
self.assertEqual(self._record(f, r), sess.run(get_next_op))
with self.assertRaises(errors.OutOfRangeError):
sess.run(get_next_op)
def testRestoreUnusedIterator(self):
num_epochs = 10
with ops.Graph().as_default() as g:
init_op, get_next_op, save_op, restore_op = self._build_iterator_graph(
num_epochs=num_epochs)
with self.session(graph=g) as sess:
sess.run(init_op)
# Note: There is no checkpoint saved currently so a NotFoundError is
# raised.
with self.assertRaises(errors.NotFoundError):
sess.run(restore_op)
# Save unused iterator.
sess.run(save_op)
with ops.Graph().as_default() as g:
init_op, get_next_op, save_op, restore_op = self._build_iterator_graph(
num_epochs=num_epochs)
with self.session(graph=g) as sess:
sess.run(restore_op)
for _ in range(num_epochs * self._num_files * self._num_records):
sess.run(get_next_op)
with self.assertRaises(errors.OutOfRangeError):
sess.run(get_next_op)
def testRestoreExhaustedIterator(self):
num_epochs = 10
with ops.Graph().as_default() as g:
init_op, get_next_op, save_op, restore_op = self._build_iterator_graph(
num_epochs=num_epochs)
with self.session(graph=g) as sess:
sess.run(init_op)
# Note: There is no checkpoint saved currently so a NotFoundError is
# raised.
with self.assertRaises(errors.NotFoundError):
sess.run(restore_op)
for _ in range(num_epochs):
for f in range(self._num_files):
for r in range(self._num_records):
self.assertEqual(self._record(f, r), sess.run(get_next_op))
with self.assertRaises(errors.OutOfRangeError):
sess.run(get_next_op)
sess.run(save_op)
with ops.Graph().as_default() as g:
init_op, get_next_op, save_op, restore_op = self._build_iterator_graph(
num_epochs=num_epochs)
with self.session(graph=g) as sess:
sess.run(restore_op)
with self.assertRaises(errors.OutOfRangeError):
sess.run(get_next_op)
class TFRecordDatasetTest(test.TestCase):
def setUp(self):
super(TFRecordDatasetTest, self).setUp()
self._num_files = 2
self._num_records = 7
self.test_filenames = self._createFiles()
self.filenames = array_ops.placeholder(dtypes.string, shape=[None])
self.num_epochs = array_ops.placeholder_with_default(
constant_op.constant(1, dtypes.int64), shape=[])
self.compression_type = array_ops.placeholder_with_default("", shape=[])
self.batch_size = array_ops.placeholder(dtypes.int64, shape=[])
repeat_dataset = readers.TFRecordDataset(self.filenames,
self.compression_type).repeat(
self.num_epochs)
batch_dataset = repeat_dataset.batch(self.batch_size)
iterator = iterator_ops.Iterator.from_structure(batch_dataset.output_types)
self.init_op = iterator.make_initializer(repeat_dataset)
self.init_batch_op = iterator.make_initializer(batch_dataset)
self.get_next = iterator.get_next()
def _record(self, f, r):
return compat.as_bytes("Record %d of file %d" % (r, f))
def _createFiles(self):
filenames = []
for i in range(self._num_files):
fn = os.path.join(self.get_temp_dir(), "tf_record.%d.txt" % i)
filenames.append(fn)
writer = python_io.TFRecordWriter(fn)
for j in range(self._num_records):
writer.write(self._record(i, j))
writer.close()
return filenames
def testReadOneEpoch(self):
with self.cached_session() as sess:
# Basic test: read from file 0.
sess.run(
self.init_op,
feed_dict={
self.filenames: [self.test_filenames[0]],
self.num_epochs: 1
})
for i in range(self._num_records):
self.assertAllEqual(self._record(0, i), sess.run(self.get_next))
with self.assertRaises(errors.OutOfRangeError):
sess.run(self.get_next)
# Basic test: read from file 1.
sess.run(
self.init_op,
feed_dict={
self.filenames: [self.test_filenames[1]],
self.num_epochs: 1
})
for i in range(self._num_records):
self.assertAllEqual(self._record(1, i), sess.run(self.get_next))
with self.assertRaises(errors.OutOfRangeError):
sess.run(self.get_next)
# Basic test: read from both files.
sess.run(
self.init_op,
feed_dict={self.filenames: self.test_filenames,
self.num_epochs: 1})
for j in range(self._num_files):
for i in range(self._num_records):
self.assertAllEqual(self._record(j, i), sess.run(self.get_next))
with self.assertRaises(errors.OutOfRangeError):
sess.run(self.get_next)
def testReadTenEpochs(self):
with self.cached_session() as sess:
sess.run(
self.init_op,
feed_dict={self.filenames: self.test_filenames,
self.num_epochs: 10})
for _ in range(10):
for j in range(self._num_files):
for i in range(self._num_records):
self.assertAllEqual(self._record(j, i), sess.run(self.get_next))
with self.assertRaises(errors.OutOfRangeError):
sess.run(self.get_next)
def testReadTenEpochsOfBatches(self):
with self.cached_session() as sess:
sess.run(
self.init_batch_op,
feed_dict={
self.filenames: self.test_filenames,
self.num_epochs: 10,
self.batch_size: self._num_records
})
for _ in range(10):
for j in range(self._num_files):
values = sess.run(self.get_next)
self.assertAllEqual(
[self._record(j, i) for i in range(self._num_records)], values)
with self.assertRaises(errors.OutOfRangeError):
sess.run(self.get_next)
def testReadZlibFiles(self):
zlib_files = []
for i, fn in enumerate(self.test_filenames):
with open(fn, "rb") as f:
cdata = zlib.compress(f.read())
zfn = os.path.join(self.get_temp_dir(), "tfrecord_%s.z" % i)
with open(zfn, "wb") as f:
f.write(cdata)
zlib_files.append(zfn)
with self.cached_session() as sess:
sess.run(
self.init_op,
feed_dict={self.filenames: zlib_files,
self.compression_type: "ZLIB"})
for j in range(self._num_files):
for i in range(self._num_records):
self.assertAllEqual(self._record(j, i), sess.run(self.get_next))
with self.assertRaises(errors.OutOfRangeError):
sess.run(self.get_next)
def testReadGzipFiles(self):
gzip_files = []
for i, fn in enumerate(self.test_filenames):
with open(fn, "rb") as f:
gzfn = os.path.join(self.get_temp_dir(), "tfrecord_%s.gz" % i)
with gzip.GzipFile(gzfn, "wb") as gzf:
gzf.write(f.read())
gzip_files.append(gzfn)
with self.cached_session() as sess:
sess.run(
self.init_op,
feed_dict={self.filenames: gzip_files,
self.compression_type: "GZIP"})
for j in range(self._num_files):
for i in range(self._num_records):
self.assertAllEqual(self._record(j, i), sess.run(self.get_next))
with self.assertRaises(errors.OutOfRangeError):
sess.run(self.get_next)
def testReadWithBuffer(self):
one_mebibyte = 2**20
d = readers.TFRecordDataset(self.test_filenames, buffer_size=one_mebibyte)
iterator = d.make_one_shot_iterator()
next_element = iterator.get_next()
with self.cached_session() as sess:
for j in range(self._num_files):
for i in range(self._num_records):
self.assertAllEqual(self._record(j, i), sess.run(next_element))
with self.assertRaises(errors.OutOfRangeError):
sess.run(next_element)
def testReadFromDatasetOfFiles(self):
files = dataset_ops.Dataset.from_tensor_slices(self.test_filenames)
d = readers.TFRecordDataset(files)
iterator = d.make_one_shot_iterator()
next_element = iterator.get_next()
with self.cached_session() as sess:
for j in range(self._num_files):
for i in range(self._num_records):
self.assertAllEqual(self._record(j, i), sess.run(next_element))
with self.assertRaises(errors.OutOfRangeError):
sess.run(next_element)
def testReadTenEpochsFromDatasetOfFilesInParallel(self):
files = dataset_ops.Dataset.from_tensor_slices(
self.test_filenames).repeat(10)
d = readers.TFRecordDataset(files, num_parallel_reads=4)
iterator = d.make_one_shot_iterator()
next_element = iterator.get_next()
expected = []
actual = []
with self.cached_session() as sess:
for _ in range(10):
for j in range(self._num_files):
for i in range(self._num_records):
expected.append(self._record(j, i))
actual.append(sess.run(next_element))
with self.assertRaises(errors.OutOfRangeError):
sess.run(next_element)
self.assertEqual(sorted(expected), sorted(actual))
if __name__ == "__main__":
test.main()
| AnishShah/tensorflow | tensorflow/python/data/kernel_tests/reader_dataset_ops_test.py | Python | apache-2.0 | 31,212 |
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""DB related custom exceptions."""
import six
from ceilometer.openstack.common.gettextutils import _
class DBError(Exception):
"""Wraps an implementation specific exception."""
def __init__(self, inner_exception=None):
self.inner_exception = inner_exception
super(DBError, self).__init__(six.text_type(inner_exception))
class DBDuplicateEntry(DBError):
"""Wraps an implementation specific exception."""
def __init__(self, columns=[], inner_exception=None):
self.columns = columns
super(DBDuplicateEntry, self).__init__(inner_exception)
class DBDeadlock(DBError):
def __init__(self, inner_exception=None):
super(DBDeadlock, self).__init__(inner_exception)
class DBInvalidUnicodeParameter(Exception):
message = _("Invalid Parameter: "
"Unicode is not supported by the current database.")
class DbMigrationError(DBError):
"""Wraps migration specific exception."""
def __init__(self, message=None):
super(DbMigrationError, self).__init__(message)
class DBConnectionError(DBError):
"""Wraps connection specific exception."""
pass
| tanglei528/ceilometer | ceilometer/openstack/common/db/exception.py | Python | apache-2.0 | 1,876 |
# Copyright (c) 2015 Russell Sim <[email protected]>
#
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import unicode_literals
try:
from StringIO import StringIO
except ImportError:
from io import StringIO
import unittest
from fairy_slipper.cmd import tempest_log
SIMPLE_LOG = """2015-09-04 15:51:29.023 18793 DEBUG tempest_lib.common.rest_client [req-30784c0a-e9a1-4411-a7c1-20715b26598f ] Request (FlavorsV2TestJSON:setUpClass): 200 POST http://192.168.122.201:5000/v2.0/tokens
2015-09-04 15:51:29.023 18793 DEBUG tempest_lib.common.rest_client [req-30784c0a-e9a1-4411-a7c1-20715b26598f ] Request - Headers: {}
Body: None
Response - Headers: {'status': '200', 'content-length': '2987', 'vary': 'X-Auth-Token', 'server': 'Apache/2.4.7 (Ubuntu)', 'connection': 'close', 'date': 'Sun, 13 Sep 2015 07:43:01 GMT', 'content-type': 'application/json', 'x-openstack-request-id': 'req-1'}
Body: None
2015-09-04 15:51:45.472 18793 INFO tempest_lib.common.rest_client [req-b710aeba-6263-4a49-bf50-2da42227c870 ] Request (FlavorsV2TestJSON:test_get_flavor): 200 POST http://192.168.122.201:5000/v2.0/tokens
2015-09-04 15:51:45.472 18793 DEBUG tempest_lib.common.rest_client [req-b710aeba-6263-4a49-bf50-2da42227c870 ] Request - Headers: {}
Body: None
Response - Headers: {'status': '200', 'content-length': '2987', 'vary': 'X-Auth-Token', 'server': 'Apache/2.4.7 (Ubuntu)', 'connection': 'close', 'date': 'Sun, 13 Sep 2015 07:43:01 GMT', 'content-type': 'application/json', 'x-openstack-request-id': 'req-2'}
Body: None
""" # noqa
SIMPLE_LOG_BODY = """2015-09-04 15:51:29.007 18793 INFO tempest_lib.common.rest_client [req-9e329507-e0ce-448c-a363-f49e39dd96b0 ] Request (FlavorsV2TestJSON:test_get_flavor): 200 GET http://192.168.122.201:8774/v2.1/6b45254f6f7c44a1b65ddb8218932226/flavors/1 0.117s
2015-09-04 15:51:29.007 18793 DEBUG tempest_lib.common.rest_client [req-9e329507-e0ce-448c-a363-f49e39dd96b0 ] Request - Headers: {'Content-Type': 'application/json', 'Accept': 'application/json', 'X-Auth-Token': '<omitted>'}
Body: None
Response - Headers: {'status': '200', 'content-length': '430', 'content-location': 'http://192.168.122.201:8774/v2.1/6b45254f6f7c44a1b65ddb8218932226/flavors/1', 'x-compute-request-id': 'req-959a09e8-3628-419d-964a-1be4ca604232', 'vary': 'X-OpenStack-Nova-API-Version', 'connection': 'close', 'x-openstack-nova-api-version': '2.1', 'date': 'Sun, 13 Sep 2015 07:43:01 GMT', 'content-type': 'application/json'}
Body: {"flavor": {"name": "m1.tiny", "links": [{"href": "http://192.168.122.201:8774/v2.1/6b45254f6f7c44a1b65ddb8218932226/flavors/1", "rel": "self"}, {"href": "http://192.168.122.201:8774/6b45254f6f7c44a1b65ddb8218932226/flavors/1", "rel": "bookmark"}], "ram": 512, "OS-FLV-DISABLED:disabled": false, "vcpus": 1, "swap": "", "os-flavor-access:is_public": true, "rxtx_factor": 1.0, "OS-FLV-EXT-DATA:ephemeral": 0, "disk": 1, "id": "1"}}
""" # noqa
DEBUG_LOG = """2015-09-04 15:54:42.296 18793 INFO tempest_lib.common.rest_client [req-39c6042e-5a4a-4517-9fe9-32b34cfaa5a8 ] Request (TestSessionsTenantIsolation:test_delete_session_in_env_from_another_tenant): 403 DELETE http://127.0.0.1:8082/v1/environments/7501923609b145ec88eeb4a5c93e371c/sessions/db214e36e0494c4e9dc67fb0df8548f7 0.010s
2015-09-04 15:54:42.296 18793 DEBUG tempest_lib.common.rest_client [req-39c6042e-5a4a-4517-9fe9-32b34cfaa5a8 ] Request - Headers: {'Content-Type': 'application/json', 'Accept': 'application/json', 'X-Auth-Token': '<omitted>'}
Body: None
Response - Headers: {'status': '403', 'content-length': '75', 'connection': 'close', 'date': 'Fri, 04 Sep 2015 15:54:42 GMT', 'content-type': 'text/plain; charset=UTF-8', 'x-openstack-request-id': 'req-39c6042e-5a4a-4517-9fe9-32b34cfaa5a8'}
Body: 403 Forbidden
User is not authorized to access these tenant resources
_log_request_full /opt/stack/new/tempest/.venv/local/lib/python2.7/site-packages/tempest_lib/common/rest_client.py:411
2015-09-04 15:52:13.727 18793 INFO tempest_lib.common.rest_client [req-0ff36a16-dacd-49c8-9835-7ce92d50f5a7 ] Request (TestEnvironmentsTenantIsolation:tearDown): 200 DELETE http://127.0.0.1:8082/v1/environments/c32c6d5095c4476da549ed065e9b5196 0.054s
2015-09-04 15:52:13.727 18793 DEBUG tempest_lib.common.rest_client [req-0ff36a16-dacd-49c8-9835-7ce92d50f5a7 ] Request - Headers: {'Content-Type': 'application/json', 'Accept': 'application/json', 'X-Auth-Token': '<omitted>'}
Body: None
Response - Headers: {'status': '200', 'content-length': '0', 'connection': 'close', 'date': 'Fri, 04 Sep 2015 15:52:13 GMT', 'content-type': 'application/json', 'x-openstack-request-id': 'req-0ff36a16-dacd-49c8-9835-7ce92d50f5a7'}
Body: _log_request_full /opt/stack/new/tempest/.venv/local/lib/python2.7/site-packages/tempest_lib/common/rest_client.py:411
""" # noqa
DEBUG_LOG_AUTH = """2015-09-04 15:49:46.056 14923 INFO tempest_lib.common.rest_client [req-280bc347-e650-473e-92bb-bcc59103e12c ] Request (main): 200 POST http://127.0.0.1:5000/v2.0/tokens
2015-09-04 15:49:46.056 14923 DEBUG tempest_lib.common.rest_client [req-280bc347-e650-473e-92bb-bcc59103e12c ] Request - Headers: {}
Body: None
Response - Headers: {'server': 'Apache/2.4.7 (Ubuntu)', 'vary': 'X-Auth-Token', 'x-openstack-request-id': 'req-280bc347-e650-473e-92bb-bcc59103e12c', 'content-length': '4846', 'connection': 'close', 'status': '200', 'content-type': 'application/json', 'date': 'Fri, 04 Sep 2015 15:49:42 GMT'}
Body: None _log_request_full /opt/stack/new/tempest/.tox/venv/local/lib/python2.7/site-packages/tempest_lib/common/rest_client.py:411
""" # noqa
def db_to_call_list(db):
calls = []
for req in sorted(db.requests):
calls.append((db.requests[req], db.responses[req]))
return calls
class TestLogParser(unittest.TestCase):
maxDiff = 10000
def test_simple_parse(self):
result = db_to_call_list(
tempest_log.parse_logfile(StringIO(SIMPLE_LOG)))
self.assertEqual(result, [
({'url': '/v2.0/tokens',
'service': 'identity',
'headers': {},
'body': None,
'method': 'POST'},
{'status_code': '200',
'body': None,
'headers': {'status': '200',
'content-length': '0',
'date': 'Sun, 13 Sep 2015 07:43:01 GMT',
'content-type': 'application/json',
'x-openstack-request-id': 'req-1',
'vary': 'X-Auth-Token',
'connection': 'close',
'server': 'Apache/2.4.7 (Ubuntu)'}}),
({'url': '/v2.0/tokens',
'service': 'identity',
'headers': {},
'body': None,
'method': 'POST'},
{'status_code': '200',
'body': None,
'headers': {'status': '200',
'content-length': '0',
'date': 'Sun, 13 Sep 2015 07:43:01 GMT',
'content-type': 'application/json',
'x-openstack-request-id': 'req-2',
'vary': 'X-Auth-Token',
'connection': 'close',
'server': 'Apache/2.4.7 (Ubuntu)'}})])
def test_body_parse(self):
result = db_to_call_list(
tempest_log.parse_logfile(StringIO(SIMPLE_LOG_BODY)))
self.assertEqual(result, [
({'url': '/v2.1/6b45254f6f7c44a1b65ddb8218932226/flavors/1',
'headers': {'content-type': 'application/json',
'content-length': '0',
'accept': 'application/json',
'x-auth-token': '<omitted>'},
'body': None,
'method': 'GET',
'service': 'compute'},
{'body': '{\n "flavor": {\n "OS-FLV-DISABLED:disabled": false,\n "OS-FLV-EXT-DATA:ephemeral": 0,\n "disk": 1,\n "id": "1",\n "links": [\n {\n "href": "http://192.168.122.201:8774/v2.1/6b45254f6f7c44a1b65ddb8218932226/flavors/1",\n "rel": "self"\n },\n {\n "href": "http://192.168.122.201:8774/6b45254f6f7c44a1b65ddb8218932226/flavors/1",\n "rel": "bookmark"\n }\n ],\n "name": "m1.tiny",\n "os-flavor-access:is_public": true,\n "ram": 512,\n "rxtx_factor": 1.0,\n "swap": "",\n "vcpus": 1\n }\n}', # noqa
'status_code': '200',
'headers': {'status': '200', 'content-length': '548',
'content-location': 'http://192.168.122.201:8774/v2.1/6b45254f6f7c44a1b65ddb8218932226/flavors/1', # noqa
'x-openstack-nova-api-version': '2.1',
'date': 'Sun, 13 Sep 2015 07:43:01 GMT',
'vary': 'X-OpenStack-Nova-API-Version',
'x-compute-request-id': 'req-959a09e8-3628-419d-964a-1be4ca604232', # noqa
'content-type': 'application/json',
'connection': 'close'}})])
def test_debug_log(self):
result = db_to_call_list(
tempest_log.parse_logfile(StringIO(DEBUG_LOG)))
self.assertEqual(result, [
({'body': None,
'headers': {'accept': 'application/json',
'content-type': 'application/json',
'content-length': '0',
'x-auth-token': '<omitted>'},
'method': 'DELETE',
'service': 'application-catalog',
'url': '/v1/environments/c32c6d5095c4476da549ed065e9b5196'},
{'body': None,
'headers': {'connection': 'close',
'content-length': '0',
'content-type': 'application/json',
'date': 'Fri, 04 Sep 2015 15:52:13 GMT',
'status': '200',
'x-openstack-request-id':
'req-0ff36a16-dacd-49c8-9835-7ce92d50f5a7'},
'status_code': '200'}),
({'body': None,
'headers': {'accept': 'application/json',
'content-type': 'application/json',
'content-length': '0',
'x-auth-token': '<omitted>'},
'method': 'DELETE',
'service': 'application-catalog',
'url': '/v1/environments/7501923609b145ec88eeb4a5c93e371c'
'/sessions/db214e36e0494c4e9dc67fb0df8548f7'},
{'body': '403 Forbidden\n'
'User is not authorized to access these tenant resources\n\n',
'headers': {'connection': 'close',
'content-length': '13',
'content-type': 'text/plain; charset=UTF-8',
'date': 'Fri, 04 Sep 2015 15:54:42 GMT',
'status': '403',
'x-openstack-request-id':
'req-39c6042e-5a4a-4517-9fe9-32b34cfaa5a8'},
'status_code': '403'})])
def test_debug_admin_log(self):
result = db_to_call_list(
tempest_log.parse_logfile(StringIO(DEBUG_LOG_AUTH)))
self.assertEqual(result, [
({'body': None,
'headers': {},
'method': 'POST',
'service': 'identity',
'url': '/v2.0/tokens'},
{'body': None,
'headers': {'connection': 'close',
'content-length': '0',
'content-type': 'application/json',
'date': 'Fri, 04 Sep 2015 15:49:42 GMT',
'server': 'Apache/2.4.7 (Ubuntu)',
'status': '200',
'vary': 'X-Auth-Token',
'x-openstack-request-id':
'req-280bc347-e650-473e-92bb-bcc59103e12c'},
'status_code': '200'})])
| annegentle/fairy-slipper | fairy_slipper/tests/cmd/test_tempest.py | Python | apache-2.0 | 12,719 |
# Copyright 2013 Huawei Technologies Co.,LTD.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import uuid
from tempest_lib.common.utils import data_utils
from tempest_lib import exceptions as lib_exc
import testtools
from tempest.api.compute import base
from tempest.common import tempest_fixtures as fixtures
from tempest import config
from tempest import test
CONF = config.CONF
class ServersAdminNegativeTestJSON(base.BaseV2ComputeAdminTest):
"""
Tests Servers API using admin privileges
"""
@classmethod
def setup_clients(cls):
super(ServersAdminNegativeTestJSON, cls).setup_clients()
cls.client = cls.os_adm.servers_client
cls.non_adm_client = cls.servers_client
cls.flavors_client = cls.os_adm.flavors_client
@classmethod
def resource_setup(cls):
super(ServersAdminNegativeTestJSON, cls).resource_setup()
cls.tenant_id = cls.client.tenant_id
cls.s1_name = data_utils.rand_name('server')
server = cls.create_test_server(name=cls.s1_name,
wait_until='ACTIVE')
cls.s1_id = server['id']
def _get_unused_flavor_id(self):
flavor_id = data_utils.rand_int_id(start=1000)
while True:
try:
self.flavors_client.get_flavor_details(flavor_id)
except lib_exc.NotFound:
break
flavor_id = data_utils.rand_int_id(start=1000)
return flavor_id
@test.idempotent_id('28dcec23-f807-49da-822c-56a92ea3c687')
@testtools.skipUnless(CONF.compute_feature_enabled.resize,
'Resize not available.')
@test.attr(type=['negative'])
def test_resize_server_using_overlimit_ram(self):
# NOTE(mriedem): Avoid conflicts with os-quota-class-sets tests.
self.useFixture(fixtures.LockFixture('compute_quotas'))
flavor_name = data_utils.rand_name("flavor")
flavor_id = self._get_unused_flavor_id()
quota_set = self.quotas_client.get_default_quota_set(self.tenant_id)
ram = int(quota_set['ram']) + 1
vcpus = 8
disk = 10
flavor_ref = self.flavors_client.create_flavor(flavor_name,
ram, vcpus, disk,
flavor_id)
self.addCleanup(self.flavors_client.delete_flavor, flavor_id)
self.assertRaises((lib_exc.Forbidden, lib_exc.OverLimit),
self.client.resize,
self.servers[0]['id'],
flavor_ref['id'])
@test.idempotent_id('7368a427-2f26-4ad9-9ba9-911a0ec2b0db')
@testtools.skipUnless(CONF.compute_feature_enabled.resize,
'Resize not available.')
@test.attr(type=['negative'])
def test_resize_server_using_overlimit_vcpus(self):
# NOTE(mriedem): Avoid conflicts with os-quota-class-sets tests.
self.useFixture(fixtures.LockFixture('compute_quotas'))
flavor_name = data_utils.rand_name("flavor")
flavor_id = self._get_unused_flavor_id()
ram = 512
quota_set = self.quotas_client.get_default_quota_set(self.tenant_id)
vcpus = int(quota_set['cores']) + 1
disk = 10
flavor_ref = self.flavors_client.create_flavor(flavor_name,
ram, vcpus, disk,
flavor_id)
self.addCleanup(self.flavors_client.delete_flavor, flavor_id)
self.assertRaises((lib_exc.Forbidden, lib_exc.OverLimit),
self.client.resize,
self.servers[0]['id'],
flavor_ref['id'])
@test.attr(type=['negative'])
@test.idempotent_id('b0b4d8af-1256-41ef-9ee7-25f1c19dde80')
def test_reset_state_server_invalid_state(self):
self.assertRaises(lib_exc.BadRequest,
self.client.reset_state, self.s1_id,
state='invalid')
@test.attr(type=['negative'])
@test.idempotent_id('4cdcc984-fab0-4577-9a9d-6d558527ee9d')
def test_reset_state_server_invalid_type(self):
self.assertRaises(lib_exc.BadRequest,
self.client.reset_state, self.s1_id,
state=1)
@test.attr(type=['negative'])
@test.idempotent_id('e741298b-8df2-46f0-81cb-8f814ff2504c')
def test_reset_state_server_nonexistent_server(self):
self.assertRaises(lib_exc.NotFound,
self.client.reset_state, '999')
@test.attr(type=['negative'])
@test.idempotent_id('e84e2234-60d2-42fa-8b30-e2d3049724ac')
def test_get_server_diagnostics_by_non_admin(self):
# Non-admin user can not view server diagnostics according to policy
self.assertRaises(lib_exc.Forbidden,
self.non_adm_client.get_server_diagnostics,
self.s1_id)
@test.attr(type=['negative'])
@test.idempotent_id('46a4e1ca-87ae-4d28-987a-1b6b136a0221')
def test_migrate_non_existent_server(self):
# migrate a non existent server
self.assertRaises(lib_exc.NotFound,
self.client.migrate_server,
str(uuid.uuid4()))
@test.idempotent_id('b0b17f83-d14e-4fc4-8f31-bcc9f3cfa629')
@testtools.skipUnless(CONF.compute_feature_enabled.resize,
'Resize not available.')
@testtools.skipUnless(CONF.compute_feature_enabled.suspend,
'Suspend is not available.')
@test.attr(type=['negative'])
def test_migrate_server_invalid_state(self):
# create server.
server = self.create_test_server(wait_until='ACTIVE')
server_id = server['id']
# suspend the server.
self.client.suspend_server(server_id)
self.client.wait_for_server_status(server_id, 'SUSPENDED')
# migrate an suspended server should fail
self.assertRaises(lib_exc.Conflict,
self.client.migrate_server,
server_id)
| danielmellado/tempest | tempest/api/compute/admin/test_servers_negative.py | Python | apache-2.0 | 6,694 |
# -*- coding: utf-8 -*-
import pytest
from zirkon.toolbox.scope import Scope
from zirkon.toolbox.serializer.codec import Codec
def test_Codec_subclass():
class MyCodec(Codec):
def encode(self, obj):
pass
def decode(self, encoded_obj):
pass
my_codec = MyCodec(Scope())
| simone-campagna/daikon | tests/unit/toolbox/serializer/test_codec.py | Python | apache-2.0 | 320 |
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (c) 2011 X.commerce, a business unit of eBay Inc.
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# Copyright 2011 Piston Cloud Computing, Inc.
# Copyright 2012 Cloudscaling Group, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
SQLAlchemy models.
"""
import six
from sqlalchemy import Column, Integer
from sqlalchemy import DateTime
from sqlalchemy.orm import object_mapper
from payload.openstack.common.db.sqlalchemy import session as sa
from payload.openstack.common import timeutils
class ModelBase(object):
"""Base class for models."""
__table_initialized__ = False
def save(self, session=None):
"""Save this object."""
if not session:
session = sa.get_session()
# NOTE(boris-42): This part of code should be look like:
# sesssion.add(self)
# session.flush()
# But there is a bug in sqlalchemy and eventlet that
# raises NoneType exception if there is no running
# transaction and rollback is called. As long as
# sqlalchemy has this bug we have to create transaction
# explicity.
with session.begin(subtransactions=True):
session.add(self)
session.flush()
def __setitem__(self, key, value):
setattr(self, key, value)
def __getitem__(self, key):
return getattr(self, key)
def get(self, key, default=None):
return getattr(self, key, default)
def _get_extra_keys(self):
return []
def __iter__(self):
columns = dict(object_mapper(self).columns).keys()
# NOTE(russellb): Allow models to specify other keys that can be looked
# up, beyond the actual db columns. An example would be the 'name'
# property for an Instance.
columns.extend(self._get_extra_keys())
self._i = iter(columns)
return self
def next(self):
n = six.advance_iterator(self._i)
return n, getattr(self, n)
def update(self, values):
"""Make the model object behave like a dict."""
for k, v in six.iteritems(values):
setattr(self, k, v)
def iteritems(self):
"""Make the model object behave like a dict.
Includes attributes from joins.
"""
local = dict(self)
joined = dict([(k, v) for k, v in six.iteritems(self.__dict__)
if not k[0] == '_'])
local.update(joined)
return local.iteritems()
class TimestampMixin(object):
created_at = Column(DateTime, default=timeutils.utcnow)
updated_at = Column(DateTime, onupdate=timeutils.utcnow)
class SoftDeleteMixin(object):
deleted_at = Column(DateTime)
deleted = Column(Integer, default=0)
def soft_delete(self, session=None):
"""Mark this object as deleted."""
self.deleted = self.id
self.deleted_at = timeutils.utcnow()
self.save(session=session)
| gorocacher/payload | payload/openstack/common/db/sqlalchemy/models.py | Python | apache-2.0 | 3,729 |
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Parsing Ops."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.framework import ops
from tensorflow.python.framework import sparse_tensor
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import gen_parsing_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import parsing_config
# go/tf-wildcard-import
# pylint: disable=wildcard-import,undefined-variable
from tensorflow.python.ops.gen_parsing_ops import *
# pylint: enable=wildcard-import,undefined-variable
from tensorflow.python.util import deprecation
from tensorflow.python.util import dispatch
from tensorflow.python.util.tf_export import tf_export
ops.NotDifferentiable("DecodeRaw")
ops.NotDifferentiable("DecodePaddedRaw")
ops.NotDifferentiable("ParseTensor")
ops.NotDifferentiable("SerializeTensor")
ops.NotDifferentiable("StringToNumber")
VarLenFeature = parsing_config.VarLenFeature
RaggedFeature = parsing_config.RaggedFeature
SparseFeature = parsing_config.SparseFeature
FixedLenFeature = parsing_config.FixedLenFeature
FixedLenSequenceFeature = parsing_config.FixedLenSequenceFeature
# pylint: disable=protected-access
_ParseOpParams = parsing_config._ParseOpParams
_construct_tensors_for_composite_features = (
parsing_config._construct_tensors_for_composite_features)
# pylint: enable=protected-access
# TODO(b/122887740) Switch files that use this private symbol to use new name.
_construct_sparse_tensors_for_sparse_features = \
_construct_tensors_for_composite_features
def _prepend_none_dimension(features):
"""Returns a copy of features with adjusted FixedLenSequenceFeature shapes."""
if features:
modified_features = dict(features) # Create a copy to modify
for key, feature in features.items():
if isinstance(feature, FixedLenSequenceFeature):
if not feature.allow_missing:
raise ValueError("Unsupported: FixedLenSequenceFeature requires "
"allow_missing to be True.")
modified_features[key] = FixedLenSequenceFeature(
[None] + list(feature.shape),
feature.dtype,
feature.allow_missing,
feature.default_value)
return modified_features
else:
return features
@tf_export("io.parse_example", v1=[])
@dispatch.add_dispatch_support
def parse_example_v2(serialized, features, example_names=None, name=None):
# pylint: disable=line-too-long
"""Parses `Example` protos into a `dict` of tensors.
Parses a number of serialized [`Example`](https://www.tensorflow.org/code/tensorflow/core/example/example.proto)
protos given in `serialized`. We refer to `serialized` as a batch with
`batch_size` many entries of individual `Example` protos.
`example_names` may contain descriptive names for the corresponding serialized
protos. These may be useful for debugging purposes, but they have no effect on
the output. If not `None`, `example_names` must be the same length as
`serialized`.
This op parses serialized examples into a dictionary mapping keys to `Tensor`
`SparseTensor`, and `RaggedTensor` objects. `features` is a dict from keys to
`VarLenFeature`, `SparseFeature`, `RaggedFeature`, and `FixedLenFeature`
objects. Each `VarLenFeature` and `SparseFeature` is mapped to a
`SparseTensor`; each `FixedLenFeature` is mapped to a `Tensor`; and each
`RaggedFeature` is mapped to a `RaggedTensor`.
Each `VarLenFeature` maps to a `SparseTensor` of the specified type
representing a ragged matrix. Its indices are `[batch, index]` where `batch`
identifies the example in `serialized`, and `index` is the value's index in
the list of values associated with that feature and example.
Each `SparseFeature` maps to a `SparseTensor` of the specified type
representing a Tensor of `dense_shape` `[batch_size] + SparseFeature.size`.
Its `values` come from the feature in the examples with key `value_key`.
A `values[i]` comes from a position `k` in the feature of an example at batch
entry `batch`. This positional information is recorded in `indices[i]` as
`[batch, index_0, index_1, ...]` where `index_j` is the `k-th` value of
the feature in the example at with key `SparseFeature.index_key[j]`.
In other words, we split the indices (except the first index indicating the
batch entry) of a `SparseTensor` by dimension into different features of the
`Example`. Due to its complexity a `VarLenFeature` should be preferred over a
`SparseFeature` whenever possible.
Each `FixedLenFeature` `df` maps to a `Tensor` of the specified type (or
`tf.float32` if not specified) and shape `(serialized.size(),) + df.shape`.
`FixedLenFeature` entries with a `default_value` are optional. With no default
value, we will fail if that `Feature` is missing from any example in
`serialized`.
Each `FixedLenSequenceFeature` `df` maps to a `Tensor` of the specified type
(or `tf.float32` if not specified) and shape
`(serialized.size(), None) + df.shape`.
All examples in `serialized` will be padded with `default_value` along the
second dimension.
Each `RaggedFeature` maps to a `RaggedTensor` of the specified type. It
is formed by stacking the `RaggedTensor` for each example, where the
`RaggedTensor` for each individual example is constructed using the tensors
specified by `RaggedTensor.values_key` and `RaggedTensor.partition`. See
the `tf.io.RaggedFeature` documentation for details and examples.
Examples:
For example, if one expects a `tf.float32` `VarLenFeature` `ft` and three
serialized `Example`s are provided:
```
serialized = [
features
{ feature { key: "ft" value { float_list { value: [1.0, 2.0] } } } },
features
{ feature []},
features
{ feature { key: "ft" value { float_list { value: [3.0] } } }
]
```
then the output will look like:
```python
{"ft": SparseTensor(indices=[[0, 0], [0, 1], [2, 0]],
values=[1.0, 2.0, 3.0],
dense_shape=(3, 2)) }
```
If instead a `FixedLenSequenceFeature` with `default_value = -1.0` and
`shape=[]` is used then the output will look like:
```python
{"ft": [[1.0, 2.0], [3.0, -1.0]]}
```
Given two `Example` input protos in `serialized`:
```
[
features {
feature { key: "kw" value { bytes_list { value: [ "knit", "big" ] } } }
feature { key: "gps" value { float_list { value: [] } } }
},
features {
feature { key: "kw" value { bytes_list { value: [ "emmy" ] } } }
feature { key: "dank" value { int64_list { value: [ 42 ] } } }
feature { key: "gps" value { } }
}
]
```
And arguments
```
example_names: ["input0", "input1"],
features: {
"kw": VarLenFeature(tf.string),
"dank": VarLenFeature(tf.int64),
"gps": VarLenFeature(tf.float32),
}
```
Then the output is a dictionary:
```python
{
"kw": SparseTensor(
indices=[[0, 0], [0, 1], [1, 0]],
values=["knit", "big", "emmy"]
dense_shape=[2, 2]),
"dank": SparseTensor(
indices=[[1, 0]],
values=[42],
dense_shape=[2, 1]),
"gps": SparseTensor(
indices=[],
values=[],
dense_shape=[2, 0]),
}
```
For dense results in two serialized `Example`s:
```
[
features {
feature { key: "age" value { int64_list { value: [ 0 ] } } }
feature { key: "gender" value { bytes_list { value: [ "f" ] } } }
},
features {
feature { key: "age" value { int64_list { value: [] } } }
feature { key: "gender" value { bytes_list { value: [ "f" ] } } }
}
]
```
We can use arguments:
```
example_names: ["input0", "input1"],
features: {
"age": FixedLenFeature([], dtype=tf.int64, default_value=-1),
"gender": FixedLenFeature([], dtype=tf.string),
}
```
And the expected output is:
```python
{
"age": [[0], [-1]],
"gender": [["f"], ["f"]],
}
```
An alternative to `VarLenFeature` to obtain a `SparseTensor` is
`SparseFeature`. For example, given two `Example` input protos in
`serialized`:
```
[
features {
feature { key: "val" value { float_list { value: [ 0.5, -1.0 ] } } }
feature { key: "ix" value { int64_list { value: [ 3, 20 ] } } }
},
features {
feature { key: "val" value { float_list { value: [ 0.0 ] } } }
feature { key: "ix" value { int64_list { value: [ 42 ] } } }
}
]
```
And arguments
```
example_names: ["input0", "input1"],
features: {
"sparse": SparseFeature(
index_key="ix", value_key="val", dtype=tf.float32, size=100),
}
```
Then the output is a dictionary:
```python
{
"sparse": SparseTensor(
indices=[[0, 3], [0, 20], [1, 42]],
values=[0.5, -1.0, 0.0]
dense_shape=[2, 100]),
}
```
See the `tf.io.RaggedFeature` documentation for examples showing how
`RaggedFeature` can be used to obtain `RaggedTensor`s.
Args:
serialized: A vector (1-D Tensor) of strings, a batch of binary
serialized `Example` protos.
features: A `dict` mapping feature keys to `FixedLenFeature`,
`VarLenFeature`, `SparseFeature`, and `RaggedFeature` values.
example_names: A vector (1-D Tensor) of strings (optional), the names of
the serialized protos in the batch.
name: A name for this operation (optional).
Returns:
A `dict` mapping feature keys to `Tensor`, `SparseTensor`, and
`RaggedTensor` values.
Raises:
ValueError: if any feature is invalid.
"""
if not features:
raise ValueError("Argument `features` cannot be None.")
features = _prepend_none_dimension(features)
params = _ParseOpParams.from_features(features, [
VarLenFeature, SparseFeature, FixedLenFeature, FixedLenSequenceFeature,
RaggedFeature
])
outputs = _parse_example_raw(serialized, example_names, params, name=name)
return _construct_tensors_for_composite_features(features, outputs)
@tf_export(v1=["io.parse_example", "parse_example"])
@dispatch.add_dispatch_support
def parse_example(serialized, features, name=None, example_names=None):
return parse_example_v2(serialized, features, example_names, name)
parse_example.__doc__ = parse_example_v2.__doc__
def _parse_example_raw(serialized, names, params, name):
"""Parses `Example` protos.
Args:
serialized: A vector (1-D Tensor) of strings, a batch of binary
serialized `Example` protos.
names: A vector (1-D Tensor) of strings (optional), the names of
the serialized protos.
params: A `ParseOpParams` containing the parameters for the parse op.
name: A name for this operation (optional).
Returns:
A `dict` mapping keys to `Tensor`s and `SparseTensor`s and `RaggedTensor`s.
"""
if params.num_features == 0:
raise ValueError("Must provide at least one feature key.")
with ops.name_scope(name, "ParseExample", [serialized, names]):
names = [] if names is None else names
serialized = ops.convert_to_tensor(serialized, name="serialized")
if params.ragged_keys and serialized.shape.ndims is None:
raise ValueError("serialized must have statically-known rank to "
"parse ragged features.")
outputs = gen_parsing_ops.parse_example_v2(
serialized=serialized,
names=names,
sparse_keys=params.sparse_keys,
dense_keys=params.dense_keys,
ragged_keys=params.ragged_keys,
dense_defaults=params.dense_defaults_vec,
num_sparse=len(params.sparse_keys),
sparse_types=params.sparse_types,
ragged_value_types=params.ragged_value_types,
ragged_split_types=params.ragged_split_types,
dense_shapes=params.dense_shapes_as_proto,
name=name)
(sparse_indices, sparse_values, sparse_shapes, dense_values,
ragged_values, ragged_row_splits) = outputs
# pylint: disable=protected-access
ragged_tensors = parsing_config._build_ragged_tensors(
serialized.shape, ragged_values, ragged_row_splits)
sparse_tensors = [
sparse_tensor.SparseTensor(ix, val, shape) for (ix, val, shape)
in zip(sparse_indices, sparse_values, sparse_shapes)]
return dict(
zip(params.sparse_keys + params.dense_keys + params.ragged_keys,
sparse_tensors + dense_values + ragged_tensors))
@tf_export(v1=["io.parse_single_example", "parse_single_example"])
@dispatch.add_dispatch_support
def parse_single_example(serialized, features, name=None, example_names=None):
"""Parses a single `Example` proto.
Similar to `parse_example`, except:
For dense tensors, the returned `Tensor` is identical to the output of
`parse_example`, except there is no batch dimension, the output shape is the
same as the shape given in `dense_shape`.
For `SparseTensor`s, the first (batch) column of the indices matrix is removed
(the indices matrix is a column vector), the values vector is unchanged, and
the first (`batch_size`) entry of the shape vector is removed (it is now a
single element vector).
One might see performance advantages by batching `Example` protos with
`parse_example` instead of using this function directly.
Args:
serialized: A scalar string Tensor, a single serialized Example.
features: A `dict` mapping feature keys to `FixedLenFeature` or
`VarLenFeature` values.
name: A name for this operation (optional).
example_names: (Optional) A scalar string Tensor, the associated name.
Returns:
A `dict` mapping feature keys to `Tensor` and `SparseTensor` values.
Raises:
ValueError: if any feature is invalid.
"""
return parse_single_example_v2(serialized, features, example_names, name)
@tf_export("io.parse_single_example", v1=[])
@dispatch.add_dispatch_support
def parse_single_example_v2(
serialized, features, example_names=None, name=None
):
"""Parses a single `Example` proto.
Similar to `parse_example`, except:
For dense tensors, the returned `Tensor` is identical to the output of
`parse_example`, except there is no batch dimension, the output shape is the
same as the shape given in `dense_shape`.
For `SparseTensor`s, the first (batch) column of the indices matrix is removed
(the indices matrix is a column vector), the values vector is unchanged, and
the first (`batch_size`) entry of the shape vector is removed (it is now a
single element vector).
One might see performance advantages by batching `Example` protos with
`parse_example` instead of using this function directly.
Args:
serialized: A scalar string Tensor, a single serialized Example.
features: A `dict` mapping feature keys to `FixedLenFeature` or
`VarLenFeature` values.
example_names: (Optional) A scalar string Tensor, the associated name.
name: A name for this operation (optional).
Returns:
A `dict` mapping feature keys to `Tensor` and `SparseTensor` values.
Raises:
ValueError: if any feature is invalid.
"""
if not features:
raise ValueError("Invalid argument: features cannot be None.")
with ops.name_scope(name, "ParseSingleExample", [serialized, example_names]):
serialized = ops.convert_to_tensor(serialized, name="serialized")
serialized = _assert_scalar(serialized, "serialized")
return parse_example_v2(serialized, features, example_names, name)
@tf_export("io.parse_sequence_example")
@dispatch.add_dispatch_support
def parse_sequence_example(serialized,
context_features=None,
sequence_features=None,
example_names=None,
name=None):
# pylint: disable=line-too-long
"""Parses a batch of `SequenceExample` protos.
Parses a vector of serialized
[`SequenceExample`](https://www.tensorflow.org/code/tensorflow/core/example/example.proto)
protos given in `serialized`.
This op parses serialized sequence examples into a tuple of dictionaries,
each mapping keys to `Tensor` and `SparseTensor` objects.
The first dictionary contains mappings for keys appearing in
`context_features`, and the second dictionary contains mappings for keys
appearing in `sequence_features`.
At least one of `context_features` and `sequence_features` must be provided
and non-empty.
The `context_features` keys are associated with a `SequenceExample` as a
whole, independent of time / frame. In contrast, the `sequence_features` keys
provide a way to access variable-length data within the `FeatureList` section
of the `SequenceExample` proto. While the shapes of `context_features` values
are fixed with respect to frame, the frame dimension (the first dimension)
of `sequence_features` values may vary between `SequenceExample` protos,
and even between `feature_list` keys within the same `SequenceExample`.
`context_features` contains `VarLenFeature`, `RaggedFeature`, and
`FixedLenFeature` objects. Each `VarLenFeature` is mapped to a
`SparseTensor`; each `RaggedFeature` is mapped to a `RaggedTensor`; and each
`FixedLenFeature` is mapped to a `Tensor`, of the specified type, shape, and
default value.
`sequence_features` contains `VarLenFeature`, `RaggedFeature`, and
`FixedLenSequenceFeature` objects. Each `VarLenFeature` is mapped to a
`SparseTensor`; each `RaggedFeature` is mapped to a `RaggedTensor`; and
each `FixedLenSequenceFeature` is mapped to a `Tensor`, each of the specified
type. The shape will be `(B,T,) + df.dense_shape` for
`FixedLenSequenceFeature` `df`, where `B` is the batch size, and `T` is the
length of the associated `FeatureList` in the `SequenceExample`. For instance,
`FixedLenSequenceFeature([])` yields a scalar 2-D `Tensor` of static shape
`[None, None]` and dynamic shape `[B, T]`, while
`FixedLenSequenceFeature([k])` (for `int k >= 1`) yields a 3-D matrix `Tensor`
of static shape `[None, None, k]` and dynamic shape `[B, T, k]`.
Like the input, the resulting output tensors have a batch dimension. This
means that the original per-example shapes of `VarLenFeature`s and
`FixedLenSequenceFeature`s can be lost. To handle that situation, this op also
provides dicts of shape tensors as part of the output. There is one dict for
the context features, and one for the feature_list features. Context features
of type `FixedLenFeature`s will not be present, since their shapes are already
known by the caller. In situations where the input `FixedLenSequenceFeature`s
are of different sequence lengths across examples, the shorter examples will
be padded with default datatype values: 0 for numeric types, and the empty
string for string types.
Each `SparseTensor` corresponding to `sequence_features` represents a ragged
vector. Its indices are `[time, index]`, where `time` is the `FeatureList`
entry and `index` is the value's index in the list of values associated with
that time.
`FixedLenFeature` entries with a `default_value` and `FixedLenSequenceFeature`
entries with `allow_missing=True` are optional; otherwise, we will fail if
that `Feature` or `FeatureList` is missing from any example in `serialized`.
`example_name` may contain a descriptive name for the corresponding serialized
proto. This may be useful for debugging purposes, but it has no effect on the
output. If not `None`, `example_name` must be a scalar.
Args:
serialized: A vector (1-D Tensor) of type string containing binary
serialized `SequenceExample` protos.
context_features: A `dict` mapping feature keys to `FixedLenFeature` or
`VarLenFeature` or `RaggedFeature` values. These features are associated
with a `SequenceExample` as a whole.
sequence_features: A `dict` mapping feature keys to
`FixedLenSequenceFeature` or `VarLenFeature` or `RaggedFeature` values.
These features are associated with data within the `FeatureList` section
of the `SequenceExample` proto.
example_names: A vector (1-D Tensor) of strings (optional), the name of the
serialized protos.
name: A name for this operation (optional).
Returns:
A tuple of three `dict`s, each mapping keys to `Tensor`s,
`SparseTensor`s, and `RaggedTensor`. The first dict contains the context
key/values, the second dict contains the feature_list key/values, and the
final dict contains the lengths of any dense feature_list features.
Raises:
ValueError: if any feature is invalid.
"""
if not (context_features or sequence_features):
raise ValueError("Both `context_features` and `sequence_features` argument "
"are None, but at least one should have values.")
context_params = _ParseOpParams.from_features(
context_features, [VarLenFeature, FixedLenFeature, RaggedFeature])
feature_list_params = _ParseOpParams.from_features(
sequence_features,
[VarLenFeature, FixedLenSequenceFeature, RaggedFeature])
with ops.name_scope(name, "ParseSequenceExample",
[serialized, example_names]):
outputs = _parse_sequence_example_raw(serialized, example_names,
context_params, feature_list_params,
name)
context_output, feature_list_output, feature_list_lengths = outputs
if context_params.ragged_keys:
context_output = _construct_tensors_for_composite_features(
context_features, context_output)
if feature_list_params.ragged_keys:
feature_list_output = _construct_tensors_for_composite_features(
sequence_features, feature_list_output)
return context_output, feature_list_output, feature_list_lengths
def _parse_sequence_example_raw(serialized,
debug_name,
context,
feature_list,
name=None):
"""Parses a vector of `SequenceExample` protos.
Args:
serialized: A vector (1-D Tensor) of type string, containing binary
serialized `SequenceExample` protos.
debug_name: A vector (1-D Tensor) of strings (optional), the names of the
serialized protos.
context: A `ParseOpParams` containing the parameters for the parse
op for the context features.
feature_list: A `ParseOpParams` containing the parameters for the
parse op for the feature_list features.
name: A name for this operation (optional).
Returns:
A tuple of three `dict`s, each mapping keys to `Tensor`s, `SparseTensor`s,
and `RaggedTensor`s. The first dict contains the context key/values, the
second dict contains the feature_list key/values, and the final dict
contains the lengths of any dense feature_list features.
Raises:
TypeError: if feature_list.dense_defaults is not either None or a dict.
"""
if context.num_features + feature_list.num_features == 0:
raise ValueError("Must provide at least one feature key.")
with ops.name_scope(name, "ParseSequenceExample", [serialized]):
debug_name = [] if debug_name is None else debug_name
# Internal
feature_list_dense_missing_assumed_empty = []
for k, v in feature_list.dense_defaults.items():
if v is not None:
raise ValueError("Value feature_list.dense_defaults[%s] must be None" %
k)
feature_list_dense_missing_assumed_empty.append(k)
has_ragged = context.ragged_keys or feature_list.ragged_keys
serialized = ops.convert_to_tensor(serialized, name="serialized")
if has_ragged and serialized.shape.ndims is None:
raise ValueError("serialized must have statically-known rank to "
"parse ragged features.")
feature_list_dense_missing_assumed_empty_vector = [
key in feature_list_dense_missing_assumed_empty
for key in feature_list.dense_keys
]
outputs = gen_parsing_ops.parse_sequence_example_v2(
# Inputs
serialized=serialized,
debug_name=debug_name,
context_sparse_keys=context.sparse_keys,
context_dense_keys=context.dense_keys,
context_ragged_keys=context.ragged_keys,
feature_list_sparse_keys=feature_list.sparse_keys,
feature_list_dense_keys=feature_list.dense_keys,
feature_list_ragged_keys=feature_list.ragged_keys,
feature_list_dense_missing_assumed_empty=(
feature_list_dense_missing_assumed_empty_vector),
context_dense_defaults=context.dense_defaults_vec,
# Attrs
Ncontext_sparse=len(context.sparse_keys),
Nfeature_list_sparse=len(feature_list.sparse_keys),
Nfeature_list_dense=len(feature_list.dense_keys),
context_sparse_types=context.sparse_types,
context_ragged_value_types=context.ragged_value_types,
context_ragged_split_types=context.ragged_split_types,
feature_list_dense_types=feature_list.dense_types,
feature_list_sparse_types=feature_list.sparse_types,
feature_list_ragged_value_types=feature_list.ragged_value_types,
feature_list_ragged_split_types=feature_list.ragged_split_types,
context_dense_shapes=context.dense_shapes_as_proto,
feature_list_dense_shapes=feature_list.dense_shapes,
name=name)
(context_sparse_indices, context_sparse_values, context_sparse_shapes,
context_dense_values, context_ragged_values, context_ragged_row_splits,
feature_list_sparse_indices, feature_list_sparse_values,
feature_list_sparse_shapes, feature_list_dense_values,
feature_list_dense_lengths, feature_list_ragged_values,
feature_list_ragged_outer_splits,
feature_list_ragged_inner_splits) = outputs
# pylint: disable=protected-access
context_ragged_tensors = parsing_config._build_ragged_tensors(
serialized.shape, context_ragged_values, context_ragged_row_splits)
feature_list_ragged_tensors = parsing_config._build_ragged_tensors(
serialized.shape, feature_list_ragged_values,
feature_list_ragged_outer_splits, feature_list_ragged_inner_splits)
# pylint: disable=g-complex-comprehension
context_sparse_tensors = [
sparse_tensor.SparseTensor(ix, val, shape)
for (ix, val,
shape) in zip(context_sparse_indices, context_sparse_values,
context_sparse_shapes)
]
feature_list_sparse_tensors = [
sparse_tensor.SparseTensor(ix, val, shape)
for (ix, val, shape
) in zip(feature_list_sparse_indices, feature_list_sparse_values,
feature_list_sparse_shapes)
]
# pylint: enable=g-complex-comprehension
context_output = dict(
zip(
context.sparse_keys + context.dense_keys + context.ragged_keys,
context_sparse_tensors + context_dense_values +
context_ragged_tensors))
feature_list_output = dict(
zip(
feature_list.sparse_keys + feature_list.dense_keys +
feature_list.ragged_keys, feature_list_sparse_tensors +
feature_list_dense_values + feature_list_ragged_tensors))
feature_list_lengths = dict(
zip(feature_list.dense_keys, feature_list_dense_lengths))
return (context_output, feature_list_output, feature_list_lengths)
@tf_export("io.parse_single_sequence_example",
v1=["io.parse_single_sequence_example",
"parse_single_sequence_example"])
@dispatch.add_dispatch_support
def parse_single_sequence_example(
serialized, context_features=None, sequence_features=None,
example_name=None, name=None):
# pylint: disable=line-too-long
"""Parses a single `SequenceExample` proto.
Parses a single serialized [`SequenceExample`](https://www.tensorflow.org/code/tensorflow/core/example/example.proto)
proto given in `serialized`.
This op parses a serialized sequence example into a tuple of dictionaries,
each mapping keys to `Tensor` and `SparseTensor` objects.
The first dictionary contains mappings for keys appearing in
`context_features`, and the second dictionary contains mappings for keys
appearing in `sequence_features`.
At least one of `context_features` and `sequence_features` must be provided
and non-empty.
The `context_features` keys are associated with a `SequenceExample` as a
whole, independent of time / frame. In contrast, the `sequence_features` keys
provide a way to access variable-length data within the `FeatureList` section
of the `SequenceExample` proto. While the shapes of `context_features` values
are fixed with respect to frame, the frame dimension (the first dimension)
of `sequence_features` values may vary between `SequenceExample` protos,
and even between `feature_list` keys within the same `SequenceExample`.
`context_features` contains `VarLenFeature`, `RaggedFeature`, and
`FixedLenFeature` objects. Each `VarLenFeature` is mapped to a `SparseTensor`;
each `RaggedFeature` is mapped to a `RaggedTensor`; and each `FixedLenFeature`
is mapped to a `Tensor`, of the specified type, shape, and default value.
`sequence_features` contains `VarLenFeature`, `RaggedFeature`, and
`FixedLenSequenceFeature` objects. Each `VarLenFeature` is mapped to a
`SparseTensor`; each `RaggedFeature` is mapped to a `RaggedTensor`; and each
`FixedLenSequenceFeature` is mapped to a `Tensor`, each of the specified type.
The shape will be `(T,) + df.dense_shape` for `FixedLenSequenceFeature` `df`,
where `T` is the length of the associated `FeatureList` in the
`SequenceExample`. For instance, `FixedLenSequenceFeature([])` yields a scalar
1-D `Tensor` of static shape `[None]` and dynamic shape `[T]`, while
`FixedLenSequenceFeature([k])` (for `int k >= 1`) yields a 2-D matrix `Tensor`
of static shape `[None, k]` and dynamic shape `[T, k]`.
Each `SparseTensor` corresponding to `sequence_features` represents a ragged
vector. Its indices are `[time, index]`, where `time` is the `FeatureList`
entry and `index` is the value's index in the list of values associated with
that time.
`FixedLenFeature` entries with a `default_value` and `FixedLenSequenceFeature`
entries with `allow_missing=True` are optional; otherwise, we will fail if
that `Feature` or `FeatureList` is missing from any example in `serialized`.
`example_name` may contain a descriptive name for the corresponding serialized
proto. This may be useful for debugging purposes, but it has no effect on the
output. If not `None`, `example_name` must be a scalar.
Note that the batch version of this function, `tf.parse_sequence_example`,
is written for better memory efficiency and will be faster on large
`SequenceExample`s.
Args:
serialized: A scalar (0-D Tensor) of type string, a single binary
serialized `SequenceExample` proto.
context_features: A `dict` mapping feature keys to `FixedLenFeature` or
`VarLenFeature` or `RaggedFeature` values. These features are associated
with a `SequenceExample` as a whole.
sequence_features: A `dict` mapping feature keys to
`FixedLenSequenceFeature` or `VarLenFeature` or `RaggedFeature` values.
These features are associated with data within the `FeatureList` section
of the `SequenceExample` proto.
example_name: A scalar (0-D Tensor) of strings (optional), the name of
the serialized proto.
name: A name for this operation (optional).
Returns:
A tuple of two `dict`s, each mapping keys to `Tensor`s and `SparseTensor`s
and `RaggedTensor`s.
* The first dict contains the context key/values.
* The second dict contains the feature_list key/values.
Raises:
ValueError: if any feature is invalid.
"""
# pylint: enable=line-too-long
if not (context_features or sequence_features):
raise ValueError("Both context_features and sequence_features are None, but"
" at least one should have values.")
context_params = _ParseOpParams.from_features(
context_features, [VarLenFeature, FixedLenFeature, RaggedFeature])
feature_list_params = _ParseOpParams.from_features(
sequence_features,
[VarLenFeature, FixedLenSequenceFeature, RaggedFeature])
with ops.name_scope(name, "ParseSingleSequenceExample",
[serialized, example_name]):
context_output, feature_list_output = (
_parse_single_sequence_example_raw(serialized, context_params,
feature_list_params, example_name,
name))
if context_params.ragged_keys:
context_output = _construct_tensors_for_composite_features(
context_features, context_output)
if feature_list_params.ragged_keys:
feature_list_output = _construct_tensors_for_composite_features(
sequence_features, feature_list_output)
return context_output, feature_list_output
def _parse_single_sequence_example_raw(serialized,
context,
feature_list,
debug_name,
name=None):
"""Parses a single `SequenceExample` proto.
Args:
serialized: A scalar (0-D Tensor) of type string, a single binary serialized
`SequenceExample` proto.
context: A `ParseOpParams` containing the parameters for the parse op for
the context features.
feature_list: A `ParseOpParams` containing the parameters for the parse op
for the feature_list features.
debug_name: A scalar (0-D Tensor) of strings (optional), the name of the
serialized proto.
name: A name for this operation (optional).
Returns:
A tuple of two `dict`s, each mapping keys to `Tensor`s and `SparseTensor`s.
The first dict contains the context key/values.
The second dict contains the feature_list key/values.
Raises:
TypeError: if feature_list.dense_defaults is not either None or a dict.
"""
with ops.name_scope(name, "ParseSingleExample", [serialized, debug_name]):
serialized = ops.convert_to_tensor(serialized, name="serialized")
serialized = _assert_scalar(serialized, "serialized")
return _parse_sequence_example_raw(serialized, debug_name, context,
feature_list, name)[:2]
@tf_export("io.decode_raw", v1=[])
@dispatch.add_dispatch_support
def decode_raw(input_bytes,
out_type,
little_endian=True,
fixed_length=None,
name=None):
r"""Convert raw bytes from input tensor into numeric tensors.
Every component of the input tensor is interpreted as a sequence of bytes.
These bytes are then decoded as numbers in the format specified by `out_type`.
>>> tf.io.decode_raw(tf.constant("1"), tf.uint8)
<tf.Tensor: shape=(1,), dtype=uint8, numpy=array([49], dtype=uint8)>
>>> tf.io.decode_raw(tf.constant("1,2"), tf.uint8)
<tf.Tensor: shape=(3,), dtype=uint8, numpy=array([49, 44, 50], dtype=uint8)>
Note that the rank of the output tensor is always one more than the input one:
>>> tf.io.decode_raw(tf.constant(["1","2"]), tf.uint8).shape
TensorShape([2, 1])
>>> tf.io.decode_raw(tf.constant([["1"],["2"]]), tf.uint8).shape
TensorShape([2, 1, 1])
This is because each byte in the input is converted to a new value on the
output (if output type is `uint8` or `int8`, otherwise chunks of inputs get
coverted to a new value):
>>> tf.io.decode_raw(tf.constant("123"), tf.uint8)
<tf.Tensor: shape=(3,), dtype=uint8, numpy=array([49, 50, 51], dtype=uint8)>
>>> tf.io.decode_raw(tf.constant("1234"), tf.uint8)
<tf.Tensor: shape=(4,), dtype=uint8, numpy=array([49, 50, 51, 52], ...
>>> # chuncked output
>>> tf.io.decode_raw(tf.constant("12"), tf.uint16)
<tf.Tensor: shape=(1,), dtype=uint16, numpy=array([12849], dtype=uint16)>
>>> tf.io.decode_raw(tf.constant("1234"), tf.uint16)
<tf.Tensor: shape=(2,), dtype=uint16, numpy=array([12849, 13363], ...
>>> # int64 output
>>> tf.io.decode_raw(tf.constant("12345678"), tf.int64)
<tf.Tensor: ... numpy=array([4050765991979987505])>
>>> tf.io.decode_raw(tf.constant("1234567887654321"), tf.int64)
<tf.Tensor: ... numpy=array([4050765991979987505, 3544952156018063160])>
The operation allows specifying endianness via the `little_endian` parameter.
>>> tf.io.decode_raw(tf.constant("\x0a\x0b"), tf.int16)
<tf.Tensor: shape=(1,), dtype=int16, numpy=array([2826], dtype=int16)>
>>> hex(2826)
'0xb0a'
>>> tf.io.decode_raw(tf.constant("\x0a\x0b"), tf.int16, little_endian=False)
<tf.Tensor: shape=(1,), dtype=int16, numpy=array([2571], dtype=int16)>
>>> hex(2571)
'0xa0b'
If the elements of `input_bytes` are of different length, you must specify
`fixed_length`:
>>> tf.io.decode_raw(tf.constant([["1"],["23"]]), tf.uint8, fixed_length=4)
<tf.Tensor: shape=(2, 1, 4), dtype=uint8, numpy=
array([[[49, 0, 0, 0]],
[[50, 51, 0, 0]]], dtype=uint8)>
If the `fixed_length` value is larger that the length of the `out_type` dtype,
multiple values are generated:
>>> tf.io.decode_raw(tf.constant(["1212"]), tf.uint16, fixed_length=4)
<tf.Tensor: shape=(1, 2), dtype=uint16, numpy=array([[12849, 12849]], ...
If the input value is larger than `fixed_length`, it is truncated:
>>> x=''.join([chr(1), chr(2), chr(3), chr(4)])
>>> tf.io.decode_raw(x, tf.uint16, fixed_length=2)
<tf.Tensor: shape=(1,), dtype=uint16, numpy=array([513], dtype=uint16)>
>>> hex(513)
'0x201'
If `little_endian` and `fixed_length` are specified, truncation to the fixed
length occurs before endianness conversion:
>>> x=''.join([chr(1), chr(2), chr(3), chr(4)])
>>> tf.io.decode_raw(x, tf.uint16, fixed_length=2, little_endian=False)
<tf.Tensor: shape=(1,), dtype=uint16, numpy=array([258], dtype=uint16)>
>>> hex(258)
'0x102'
If input values all have the same length, then specifying `fixed_length`
equal to the size of the strings should not change output:
>>> x = ["12345678", "87654321"]
>>> tf.io.decode_raw(x, tf.int16)
<tf.Tensor: shape=(2, 4), dtype=int16, numpy=
array([[12849, 13363, 13877, 14391],
[14136, 13622, 13108, 12594]], dtype=int16)>
>>> tf.io.decode_raw(x, tf.int16, fixed_length=len(x[0]))
<tf.Tensor: shape=(2, 4), dtype=int16, numpy=
array([[12849, 13363, 13877, 14391],
[14136, 13622, 13108, 12594]], dtype=int16)>
Args:
input_bytes:
Each element of the input Tensor is converted to an array of bytes.
Currently, this must be a tensor of strings (bytes), although semantically
the operation should support any input.
out_type:
`DType` of the output. Acceptable types are `half`, `float`, `double`,
`int32`, `uint16`, `uint8`, `int16`, `int8`, `int64`.
little_endian:
Whether the `input_bytes` data is in little-endian format. Data will be
converted into host byte order if necessary.
fixed_length:
If set, the first `fixed_length` bytes of each element will be converted.
Data will be zero-padded or truncated to the specified length.
`fixed_length` must be a multiple of the size of `out_type`.
`fixed_length` must be specified if the elements of `input_bytes` are of
variable length.
name: A name for the operation (optional).
Returns:
A `Tensor` object storing the decoded bytes.
"""
if fixed_length is not None:
return gen_parsing_ops.decode_padded_raw(
input_bytes,
fixed_length=fixed_length,
out_type=out_type,
little_endian=little_endian,
name=name)
else:
return gen_parsing_ops.decode_raw(
input_bytes, out_type, little_endian=little_endian, name=name)
@tf_export(v1=["decode_raw", "io.decode_raw"])
@dispatch.add_dispatch_support
@deprecation.deprecated_args(None,
"bytes is deprecated, use input_bytes instead",
"bytes")
def decode_raw_v1(
input_bytes=None,
out_type=None,
little_endian=True,
name=None,
bytes=None # pylint: disable=redefined-builtin
):
"""Convert raw byte strings into tensors.
Args:
input_bytes:
Each element of the input Tensor is converted to an array of bytes.
out_type:
`DType` of the output. Acceptable types are `half`, `float`, `double`,
`int32`, `uint16`, `uint8`, `int16`, `int8`, `int64`.
little_endian:
Whether the `input_bytes` data is in little-endian format. Data will be
converted into host byte order if necessary.
name: A name for the operation (optional).
bytes: Deprecated parameter. Use `input_bytes` instead.
Returns:
A `Tensor` object storing the decoded bytes.
"""
input_bytes = deprecation.deprecated_argument_lookup("input_bytes",
input_bytes, "bytes",
bytes)
# out_type is a required positional argument in the original API, and had to
# be changed to a keyword argument in order to facilitate the transition from
# the reserved named `bytes` to `input_bytes`. Ensure it's still set.
if out_type is None:
raise ValueError(
"decode_raw_v1() missing 1 positional argument: 'out_type'")
return gen_parsing_ops.decode_raw(
input_bytes, out_type, little_endian=little_endian, name=name)
# Swap `name` and `na_value` for backward compatibility.
@tf_export(v1=["io.decode_csv", "decode_csv"])
@dispatch.add_dispatch_support
@deprecation.deprecated_endpoints("decode_csv")
def decode_csv(records,
record_defaults,
field_delim=",",
use_quote_delim=True,
name=None,
na_value="",
select_cols=None):
"""Convert CSV records to tensors. Each column maps to one tensor.
RFC 4180 format is expected for the CSV records.
(https://tools.ietf.org/html/rfc4180)
Note that we allow leading and trailing spaces with int or float field.
Args:
records: A `Tensor` of type `string`.
Each string is a record/row in the csv and all records should have
the same format.
record_defaults: A list of `Tensor` objects with specific types.
Acceptable types are `float32`, `float64`, `int32`, `int64`, `string`.
One tensor per column of the input record, with either a
scalar default value for that column or an empty vector if the column is
required.
field_delim: An optional `string`. Defaults to `","`.
char delimiter to separate fields in a record.
use_quote_delim: An optional `bool`. Defaults to `True`.
If false, treats double quotation marks as regular
characters inside of the string fields (ignoring RFC 4180, Section 2,
Bullet 5).
name: A name for the operation (optional).
na_value: Additional string to recognize as NA/NaN.
select_cols: Optional sorted list of column indices to select. If specified,
only this subset of columns will be parsed and returned.
Returns:
A list of `Tensor` objects. Has the same type as `record_defaults`.
Each tensor will have the same shape as records.
Raises:
ValueError: If any of the arguments is malformed.
"""
return decode_csv_v2(
records, record_defaults,
field_delim, use_quote_delim,
na_value, select_cols, name
)
@tf_export("io.decode_csv", v1=[])
@dispatch.add_dispatch_support
def decode_csv_v2(records,
record_defaults,
field_delim=",",
use_quote_delim=True,
na_value="",
select_cols=None,
name=None):
"""Convert CSV records to tensors. Each column maps to one tensor.
RFC 4180 format is expected for the CSV records.
(https://tools.ietf.org/html/rfc4180)
Note that we allow leading and trailing spaces with int or float field.
Args:
records: A `Tensor` of type `string`.
Each string is a record/row in the csv and all records should have
the same format.
record_defaults: A list of `Tensor` objects with specific types.
Acceptable types are `float32`, `float64`, `int32`, `int64`, `string`.
One tensor per column of the input record, with either a
scalar default value for that column or an empty vector if the column is
required.
field_delim: An optional `string`. Defaults to `","`.
char delimiter to separate fields in a record.
use_quote_delim: An optional `bool`. Defaults to `True`.
If false, treats double quotation marks as regular
characters inside of the string fields (ignoring RFC 4180, Section 2,
Bullet 5).
na_value: Additional string to recognize as NA/NaN.
select_cols: Optional sorted list of column indices to select. If specified,
only this subset of columns will be parsed and returned.
name: A name for the operation (optional).
Returns:
A list of `Tensor` objects. Has the same type as `record_defaults`.
Each tensor will have the same shape as records.
Raises:
ValueError: If any of the arguments is malformed.
"""
if select_cols is not None and any(select_cols[i] >= select_cols[i + 1]
for i in range(len(select_cols) - 1)):
raise ValueError("select_cols is not strictly increasing.")
if select_cols is not None and select_cols[0] < 0:
raise ValueError("select_cols contains negative values.")
if select_cols is not None and len(select_cols) != len(record_defaults):
raise ValueError("Length of select_cols and record_defaults do not match.")
return gen_parsing_ops.decode_csv(
records=records,
record_defaults=record_defaults,
field_delim=field_delim,
use_quote_delim=use_quote_delim,
na_value=na_value,
name=name,
select_cols=select_cols,
)
def _assert_scalar(value, name):
"""Asserts that `value` is scalar, and returns `value`."""
value_rank = value.shape.rank
if value_rank is None:
check = control_flow_ops.Assert(
math_ops.equal(array_ops.rank(value), 0),
["Input %s must be a scalar" % name],
name="%sIsScalar" % name.capitalize())
result = control_flow_ops.with_dependencies([check],
value,
name="%sDependencies" % name)
result.set_shape([])
return result
elif value_rank == 0:
return value
else:
raise ValueError("Input %s must be a scalar" % name)
@tf_export("io.decode_json_example",
v1=["decode_json_example", "io.decode_json_example"])
def decode_json_example(json_examples, name=None):
r"""Convert JSON-encoded Example records to binary protocol buffer strings.
Note: This is **not** a general purpose JSON parsing op.
This op converts JSON-serialized `tf.train.Example` (maybe created with
`json_format.MessageToJson`, following the
[standard JSON mapping](
https://developers.google.com/protocol-buffers/docs/proto3#json))
to a binary-serialized `tf.train.Example` (equivalent to
`Example.SerializeToString()`) suitable for conversion to tensors with
`tf.io.parse_example`.
Here is a `tf.train.Example` proto:
>>> example = tf.train.Example(
... features=tf.train.Features(
... feature={
... "a": tf.train.Feature(
... int64_list=tf.train.Int64List(
... value=[1, 1, 3]))}))
Here it is converted to JSON:
>>> from google.protobuf import json_format
>>> example_json = json_format.MessageToJson(example)
>>> print(example_json)
{
"features": {
"feature": {
"a": {
"int64List": {
"value": [
"1",
"1",
"3"
]
}
}
}
}
}
This op converts the above json string to a binary proto:
>>> example_binary = tf.io.decode_json_example(example_json)
>>> example_binary.numpy()
b'\n\x0f\n\r\n\x01a\x12\x08\x1a\x06\x08\x01\x08\x01\x08\x03'
The OP works on string tensors of andy shape:
>>> tf.io.decode_json_example([
... [example_json, example_json],
... [example_json, example_json]]).shape.as_list()
[2, 2]
This resulting binary-string is equivalent to `Example.SerializeToString()`,
and can be converted to Tensors using `tf.io.parse_example` and related
functions:
>>> tf.io.parse_example(
... serialized=[example_binary.numpy(),
... example.SerializeToString()],
... features = {'a': tf.io.FixedLenFeature(shape=[3], dtype=tf.int64)})
{'a': <tf.Tensor: shape=(2, 3), dtype=int64, numpy=
array([[1, 1, 3],
[1, 1, 3]])>}
Args:
json_examples: A string tensor containing json-serialized `tf.Example`
protos.
name: A name for the op.
Returns:
A string Tensor containing the binary-serialized `tf.Example` protos.
Raises:
`tf.errors.InvalidArgumentError`: If the JSON could not be converted to a
`tf.Example`
"""
return gen_parsing_ops.decode_json_example(json_examples, name=name)
| frreiss/tensorflow-fred | tensorflow/python/ops/parsing_ops.py | Python | apache-2.0 | 49,979 |
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
from openstack_dashboard.test.test_data import utils
from saharaclient.api import cluster_templates
from saharaclient.api import clusters
from saharaclient.api import data_sources
from saharaclient.api import job_binaries
from saharaclient.api import job_executions
from saharaclient.api import job_types
from saharaclient.api import jobs
from saharaclient.api import node_group_templates
from saharaclient.api import plugins
def data(TEST):
TEST.plugins = utils.TestDataContainer()
TEST.plugins_configs = utils.TestDataContainer()
TEST.nodegroup_templates = utils.TestDataContainer()
TEST.cluster_templates = utils.TestDataContainer()
TEST.clusters = utils.TestDataContainer()
TEST.data_sources = utils.TestDataContainer()
TEST.job_binaries = utils.TestDataContainer()
TEST.jobs = utils.TestDataContainer()
TEST.job_executions = utils.TestDataContainer()
TEST.registered_images = copy.copy(TEST.images)
TEST.job_types = utils.TestDataContainer()
plugin1_dict = {
"description": "vanilla plugin",
"name": "vanilla",
"title": "Vanilla Apache Hadoop",
"versions": ["2.3.0", "1.2.1"]
}
plugin1 = plugins.Plugin(plugins.PluginManager(None), plugin1_dict)
TEST.plugins.add(plugin1)
plugin_config1_dict = {
"node_processes": {
"HDFS": [
"namenode",
"datanode",
"secondarynamenode"
],
"MapReduce": [
"tasktracker",
"jobtracker"
]
},
"description": "This plugin provides an ability to launch vanilla "
"Apache Hadoop cluster without any management "
"consoles.",
"versions": [
"1.2.1"
],
"required_image_tags": [
"vanilla",
"1.2.1"
],
"configs": [
{
"default_value": "/tmp/hadoop-${user.name}",
"name": "hadoop.tmp.dir",
"priority": 2,
"config_type": "string",
"applicable_target": "HDFS",
"is_optional": True,
"scope": "node",
"description": "A base for other temporary directories."
},
{
"default_value": True,
"name": "hadoop.native.lib",
"priority": 2,
"config_type": "bool",
"applicable_target": "HDFS",
"is_optional": True,
"scope": "node",
"description": "Should native hadoop libraries, if present, "
"be used."
},
],
"title": "Vanilla Apache Hadoop",
"name": "vanilla"
}
TEST.plugins_configs.add(plugins.Plugin(plugins.PluginManager(None),
plugin_config1_dict))
# Nodegroup_Templates.
ngt1_dict = {
"created_at": "2014-06-04 14:01:03.701243",
"description": None,
"flavor_id": "aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa",
"availability_zone": None,
"floating_ip_pool": None,
"auto_security_group": True,
"security_groups": [],
"hadoop_version": "1.2.1",
"id": "c166dfcc-9cc7-4b48-adc9-f0946169bb36",
"image_id": None,
"name": "sample-template",
"node_configs": {},
"node_processes": [
"namenode",
"jobtracker",
"secondarynamenode",
"hiveserver",
"oozie"
],
"plugin_name": "vanilla",
"tenant_id": "429ad8447c2d47bc8e0382d244e1d1df",
"updated_at": None,
"volume_mount_prefix": "/volumes/disk",
"volumes_per_node": 0,
"volumes_size": 0,
"volume_type": None,
"volume_local_to_instance": False,
"security_groups": [],
"volumes_availability_zone": None,
"is_proxy_gateway": False,
"use_autoconfig": True,
}
ngt1 = node_group_templates.NodeGroupTemplate(
node_group_templates.NodeGroupTemplateManager(None), ngt1_dict)
TEST.nodegroup_templates.add(ngt1)
# Cluster_templates.
ct1_dict = {
"anti_affinity": [],
"cluster_configs": {},
"created_at": "2014-06-04 14:01:06.460711",
"default_image_id": None,
"description": "Sample description",
"hadoop_version": "1.2.1",
"id": "a2c3743f-31a2-4919-8d02-792138a87a98",
"name": "sample-cluster-template",
"neutron_management_network": None,
"use_autoconfig": True,
"node_groups": [
{
"count": 1,
"created_at": "2014-06-04 14:01:06.462512",
"flavor_id": "aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa",
"floating_ip_pool": None,
"image_id": None,
"name": "master",
"node_configs": {},
"node_group_template_id": "c166dfcc-9cc7-4b48-adc9",
"node_processes": [
"namenode",
"jobtracker",
"secondarynamenode",
"hiveserver",
"oozie"
],
"updated_at": None,
"volume_mount_prefix": "/volumes/disk",
"volumes_per_node": 0,
"volumes_size": 0,
"volume_type": None,
"volume_local_to_instance": False,
"volumes_availability_zone": None,
"use_autoconfig": True,
"is_proxy_gateway": False,
},
{
"count": 2,
"created_at": "2014-06-04 14:01:06.463214",
"flavor_id": "aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa",
"floating_ip_pool": None,
"image_id": None,
"name": "workers",
"node_configs": {},
"node_group_template_id": "4eb5504c-94c9-4049-a440",
"node_processes": [
"datanode",
"tasktracker"
],
"updated_at": None,
"volume_mount_prefix": "/volumes/disk",
"volumes_per_node": 0,
"volumes_size": 0,
"volume_type": None,
"volume_local_to_instance": False,
"volumes_availability_zone": None,
"use_autoconfig": True,
"is_proxy_gateway": False
}
],
"plugin_name": "vanilla",
"tenant_id": "429ad8447c2d47bc8e0382d244e1d1df",
"updated_at": None
}
ct1 = cluster_templates.ClusterTemplate(
cluster_templates.ClusterTemplateManager(None), ct1_dict)
TEST.cluster_templates.add(ct1)
# Clusters.
cluster1_dict = {
"anti_affinity": [],
"cluster_configs": {},
"cluster_template_id": "a2c3743f-31a2-4919-8d02-792138a87a98",
"created_at": "2014-06-04 20:02:14.051328",
"default_image_id": "9eb4643c-dca8-4ea7-92d2-b773f88a8dc6",
"description": "",
"hadoop_version": "1.2.1",
"id": "ec9a0d28-5cfb-4028-a0b5-40afe23f1533",
"info": {},
"is_transient": False,
"management_public_key": "fakekey",
"name": "cercluster",
"neutron_management_network": None,
"use_autoconfig": True,
"node_groups": [
{
"count": 1,
"created_at": "2014-06-04 20:02:14.053153",
"flavor_id": "0",
"floating_ip_pool": None,
"image_id": None,
"instances": [
{
"created_at": "2014-06-04 20:02:14.834529",
"id": "c3b8004b-7063-4b99-a082-820cdc6e961c",
"instance_id": "a45f5495-4a10-4f17-8fae",
"instance_name": "cercluster-master-001",
"internal_ip": None,
"management_ip": None,
"updated_at": None,
"volumes": []
}
],
"name": "master",
"node_configs": {},
"node_group_template_id": "c166dfcc-9cc7-4b48-adc9",
"node_processes": [
"namenode",
"jobtracker",
"secondarynamenode",
"hiveserver",
"oozie"
],
"updated_at": "2014-06-04 20:02:14.841760",
"volume_mount_prefix": "/volumes/disk",
"volumes_per_node": 0,
"volumes_size": 0,
"volume_type": None,
"volume_local_to_instance": False,
"security_groups": [],
"volumes_availability_zone": None,
"id": "ng1",
"use_autoconfig": True,
"is_proxy_gateway": False
},
{
"count": 2,
"created_at": "2014-06-04 20:02:14.053849",
"flavor_id": "0",
"floating_ip_pool": None,
"image_id": None,
"instances": [
{
"created_at": "2014-06-04 20:02:15.097655",
"id": "6a8ae0b1-bb28-4de2-bfbb-bdd3fd2d72b2",
"instance_id": "38bf8168-fb30-483f-8d52",
"instance_name": "cercluster-workers-001",
"internal_ip": None,
"management_ip": None,
"updated_at": None,
"volumes": []
},
{
"created_at": "2014-06-04 20:02:15.344515",
"id": "17b98ed3-a776-467a-90cf-9f46a841790b",
"instance_id": "85606938-8e53-46a5-a50b",
"instance_name": "cercluster-workers-002",
"internal_ip": None,
"management_ip": None,
"updated_at": None,
"volumes": []
}
],
"name": "workers",
"node_configs": {},
"node_group_template_id": "4eb5504c-94c9-4049-a440",
"node_processes": [
"datanode",
"tasktracker"
],
"updated_at": "2014-06-04 20:02:15.355745",
"volume_mount_prefix": "/volumes/disk",
"volumes_per_node": 0,
"volumes_size": 0,
"volume_type": None,
"volume_local_to_instance": False,
"security_groups": ["b7857890-09bf-4ee0-a0d5-322d7a6978bf"],
"volumes_availability_zone": None,
"id": "ng2",
"use_autoconfig": True,
"is_proxy_gateway": False
}
],
"plugin_name": "vanilla",
"status": "Active",
"status_description": "",
"tenant_id": "429ad8447c2d47bc8e0382d244e1d1df",
"trust_id": None,
"updated_at": "2014-06-04 20:02:15.446087",
"user_keypair_id": "stackboxkp"
}
cluster1 = clusters.Cluster(
clusters.ClusterManager(None), cluster1_dict)
TEST.clusters.add(cluster1)
cluster2_dict = copy.deepcopy(cluster1_dict)
cluster2_dict.update({
"id": "cl2",
"name": "cl2_name",
"provision_progress": [
{
"created_at": "2015-03-27T15:51:54",
"updated_at": "2015-03-27T15:59:34",
"step_name": "first_step",
"step_type": "some_type",
"successful": True,
"events": [],
"total": 3
},
{
"created_at": "2015-03-27T16:01:54",
"updated_at": "2015-03-27T16:10:22",
"step_name": "second_step",
"step_type": "some_other_type",
"successful": None,
"events": [
{
"id": "evt1",
"created_at": "2015-03-27T16:01:22",
"node_group_id": "ng1",
"instance_name": "cercluster-master-001",
"successful": True,
"event_info": None
},
{
"id": "evt2",
"created_at": "2015-03-27T16:04:51",
"node_group_id": "ng2",
"instance_name": "cercluster-workers-001",
"successful": True,
"event_info": None
}
],
"total": 3
}
]
})
cluster2 = clusters.Cluster(
clusters.ClusterManager(None), cluster2_dict)
TEST.clusters.add(cluster2)
# Data Sources.
data_source1_dict = {
"created_at": "2014-06-04 14:01:10.371562",
"description": "sample output",
"id": "426fb01c-5c7e-472d-bba2-b1f0fe7e0ede",
"name": "sampleOutput",
"tenant_id": "429ad8447c2d47bc8e0382d244e1d1df",
"type": "swift",
"updated_at": None,
"url": "swift://example.sahara/output"
}
data_source2_dict = {
"created_at": "2014-06-05 15:01:12.331361",
"description": "second sample output",
"id": "ab3413-adfb-bba2-123456785675",
"name": "sampleOutput2",
"tenant_id": "429ad8447c2d47bc8e0382d244e1d1df",
"type": "hdfs",
"updated_at": None,
"url": "hdfs://example.sahara/output"
}
data_source1 = data_sources.DataSources(
data_sources.DataSourceManager(None), data_source1_dict)
data_source2 = data_sources.DataSources(
data_sources.DataSourceManager(None), data_source2_dict)
TEST.data_sources.add(data_source1)
TEST.data_sources.add(data_source2)
# Job Binaries.
job_binary1_dict = {
"created_at": "2014-06-05 18:15:15.581285",
"description": "",
"id": "3f3a07ac-7d6f-49e8-8669-40b25ee891b7",
"name": "example.pig",
"tenant_id": "429ad8447c2d47bc8e0382d244e1d1df",
"updated_at": None,
"url": "internal-db://80121dea-f8bd-4ad3-bcc7-096f4bfc722d"
}
job_binary2_dict = {
"created_at": "2014-10-10 13:12:15.583631",
"description": "Test for spaces in name",
"id": "abcdef56-1234-abcd-abcd-defabcdaedcb",
"name": "example with spaces.pig",
"tenant_id": "429ad8447c2d47bc8e0382d244e1d1df",
"updated_at": None,
"url": "internal-db://abcdef56-1234-abcd-abcd-defabcdaedcb"
}
job_binary1 = job_binaries.JobBinaries(
job_binaries.JobBinariesManager(None), job_binary1_dict)
job_binary2 = job_binaries.JobBinaries(
job_binaries.JobBinariesManager(None), job_binary2_dict)
TEST.job_binaries.add(job_binary1)
TEST.job_binaries.add(job_binary2)
# Jobs.
job1_dict = {
"created_at": "2014-06-05 19:23:59.637165",
"description": "",
"id": "a077b851-46be-4ad7-93c3-2d83894546ef",
"libs": [
{
"created_at": "2014-06-05 19:23:42.742057",
"description": "",
"id": "ab140807-59f8-4235-b4f2-e03daf946256",
"name": "udf.jar",
"tenant_id": "429ad8447c2d47bc8e0382d244e1d1df",
"updated_at": None,
"url": "internal-db://d186e2bb-df93-47eb-8c0e-ce21e7ecb78b"
}
],
"mains": [
{
"created_at": "2014-06-05 18:15:15.581285",
"description": "",
"id": "3f3a07ac-7d6f-49e8-8669-40b25ee891b7",
"name": "example.pig",
"tenant_id": "429ad8447c2d47bc8e0382d244e1d1df",
"updated_at": None,
"url": "internal-db://80121dea-f8bd-4ad3-bcc7-096f4bfc722d"
}
],
"interface": [],
"name": "pigjob",
"tenant_id": "429ad8447c2d47bc8e0382d244e1d1df",
"type": "Pig",
"updated_at": None,
"job_config": {"configs": {}}
}
job1 = jobs.Job(jobs.JobsManager(None), job1_dict)
TEST.jobs.add(job1)
# Job Executions.
jobex1_dict = {
"cluster_id": "ec9a0d28-5cfb-4028-a0b5-40afe23f1533",
"created_at": "2014-06-05 20:03:06.195937",
"end_time": None,
"id": "4b6c1cbf-c713-49d3-8025-808a87c514a6",
"info": {
"acl": None,
"actions": [
{
"consoleUrl": "-",
"cred": "None",
"data": None,
"endTime": "Thu,05 Jun 2014 20:03:32 GMT",
"errorCode": None,
"errorMessage": None,
"externalChildIDs": None,
"externalId": "-",
"externalStatus": "OK",
"id": "0000000-140604200538581-oozie-hado-W@:start:",
"name": ":start:",
"retries": 0,
"startTime": "Thu,05 Jun 2014 20:03:32 GMT",
"stats": None,
"status": "OK",
"toString": "Action name[:start:] status[OK]",
"trackerUri": "-",
"transition": "job-node",
"type": ":START:"
},
{
"consoleUrl": "fake://console.url",
"cred": "None",
"data": None,
"endTime": None,
"errorCode": None,
"errorMessage": None,
"externalChildIDs": None,
"externalId": "job_201406042004_0001",
"externalStatus": "RUNNING",
"id": "0000000-140604200538581-oozie-hado-W@job-node",
"name": "job-node",
"retries": 0,
"startTime": "Thu,05 Jun 2014 20:03:33 GMT",
"stats": None,
"status": "RUNNING",
"toString": "Action name[job-node] status[RUNNING]",
"trackerUri": "cercluster-master-001:8021",
"transition": None,
"type": "pig"
}
],
"appName": "job-wf",
"appPath": "hdfs://fakepath/workflow.xml",
"conf": "<configuration>fakeconfig</configuration>",
"consoleUrl": "fake://consoleURL",
"createdTime": "Thu,05 Jun 2014 20:03:32 GMT",
"endTime": None,
"externalId": None,
"group": None,
"id": "0000000-140604200538581-oozie-hado-W",
"lastModTime": "Thu,05 Jun 2014 20:03:35 GMT",
"parentId": None,
"run": 0,
"startTime": "Thu,05 Jun 2014 20:03:32 GMT",
"status": "RUNNING",
"toString": "Workflow ...status[RUNNING]",
"user": "hadoop"
},
"input_id": "85884883-3083-49eb-b442-71dd3734d02c",
"job_configs": {
"args": [],
"configs": {},
"params": {}
},
"interface": {},
"job_id": "a077b851-46be-4ad7-93c3-2d83894546ef",
"oozie_job_id": "0000000-140604200538581-oozie-hado-W",
"output_id": "426fb01c-5c7e-472d-bba2-b1f0fe7e0ede",
"progress": None,
"return_code": None,
"start_time": "2014-06-05T16:03:32",
"tenant_id": "429ad8447c2d47bc8e0382d244e1d1df",
"updated_at": "2014-06-05 20:03:46.438248",
"cluster_name_set": True,
"job_name_set": True,
"cluster_name": "cluster-1",
"job_name": "job-1",
"data_source_urls": {
"85884883-3083-49eb-b442-71dd3734d02c": "swift://a.sahara/input",
"426fb01c-5c7e-472d-bba2-b1f0fe7e0ede": "hdfs://a.sahara/output"
}
}
jobex1 = job_executions.JobExecution(
job_executions.JobExecutionsManager(None), jobex1_dict)
TEST.job_executions.add(jobex1)
augmented_image = TEST.registered_images.first()
augmented_image.tags = {}
augmented_image.username = 'myusername'
augmented_image.description = 'mydescription'
job_type1_dict = {
"name": "Pig",
"plugins": [
{
"description": "Fake description",
"versions": {
"2.6.0": {
},
"1.2.1": {
}
},
"name": "vanilla",
"title": "Vanilla Apache Hadoop"
},
]
}
job_types1 = job_types.JobType(
job_types.JobTypesManager(None), job_type1_dict)
TEST.job_types.add(job_types1)
| saydulk/horizon | openstack_dashboard/test/test_data/sahara_data.py | Python | apache-2.0 | 21,811 |
# Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
"""
# from activations import *
from activations import LinearActivation, ReluActivation, SoftmaxActivation, \
IdentityActivation, TanhActivation, SequenceSoftmaxActivation
from attrs import ExtraAttr
from default_decorators import wrap_name_default, wrap_act_default, \
wrap_param_default, wrap_bias_attr_default, wrap_param_attr_default
from layers import * # There are too many layers used in network, so import *
from poolings import MaxPooling, SumPooling
from paddle.trainer.config_parser import *
__all__ = [
'sequence_conv_pool', 'simple_lstm', "simple_img_conv_pool",
"img_conv_bn_pool", 'dropout_layer', 'lstmemory_group', 'lstmemory_unit',
'small_vgg', 'img_conv_group', 'vgg_16_network', 'gru_unit', 'gru_group',
'simple_gru', 'simple_attention', 'simple_gru2', 'bidirectional_gru',
'text_conv_pool', 'bidirectional_lstm', 'inputs', 'outputs'
]
######################################################
# Text CNN #
######################################################
@wrap_name_default("sequence_conv_pooling")
def sequence_conv_pool(input,
context_len,
hidden_size,
name=None,
context_start=None,
pool_type=None,
context_proj_layer_name=None,
context_proj_param_attr=False,
fc_layer_name=None,
fc_param_attr=None,
fc_bias_attr=None,
fc_act=None,
pool_bias_attr=None,
fc_attr=None,
context_attr=None,
pool_attr=None):
"""
Text convolution pooling layers helper.
Text input => Context Projection => FC Layer => Pooling => Output.
:param name: name of output layer(pooling layer name)
:type name: basestring
:param input: name of input layer
:type input: LayerOutput
:param context_len: context projection length. See
context_projection's document.
:type context_len: int
:param hidden_size: FC Layer size.
:type hidden_size: int
:param context_start: context projection length. See
context_projection's context_start.
:type context_start: int or None
:param pool_type: pooling layer type. See pooling_layer's document.
:type pool_type: BasePoolingType.
:param context_proj_layer_name: context projection layer name.
None if user don't care.
:type context_proj_layer_name: basestring
:param context_proj_param_attr: context projection parameter attribute.
None if user don't care.
:type context_proj_param_attr: ParameterAttribute or None.
:param fc_layer_name: fc layer name. None if user don't care.
:type fc_layer_name: basestring
:param fc_param_attr: fc layer parameter attribute. None if user don't care.
:type fc_param_attr: ParameterAttribute or None
:param fc_bias_attr: fc bias parameter attribute. False if no bias,
None if user don't care.
:type fc_bias_attr: ParameterAttribute or None
:param fc_act: fc layer activation type. None means tanh
:type fc_act: BaseActivation
:param pool_bias_attr: pooling layer bias attr. None if don't care.
False if no bias.
:type pool_bias_attr: ParameterAttribute or None.
:param fc_attr: fc layer extra attribute.
:type fc_attr: ExtraLayerAttribute
:param context_attr: context projection layer extra attribute.
:type context_attr: ExtraLayerAttribute
:param pool_attr: pooling layer extra attribute.
:type pool_attr: ExtraLayerAttribute
:return: output layer name.
:rtype: LayerOutput
"""
# Set Default Value to param
context_proj_layer_name = "%s_conv_proj" % name \
if context_proj_layer_name is None else context_proj_layer_name
with mixed_layer(
name=context_proj_layer_name,
size=input.size * context_len,
act=LinearActivation(),
layer_attr=context_attr) as m:
m += context_projection(
input,
context_len=context_len,
context_start=context_start,
padding_attr=context_proj_param_attr)
fc_layer_name = "%s_conv_fc" % name \
if fc_layer_name is None else fc_layer_name
fl = fc_layer(
name=fc_layer_name,
input=m,
size=hidden_size,
act=fc_act,
layer_attr=fc_attr,
param_attr=fc_param_attr,
bias_attr=fc_bias_attr)
return pooling_layer(
name=name,
input=fl,
pooling_type=pool_type,
bias_attr=pool_bias_attr,
layer_attr=pool_attr)
text_conv_pool = sequence_conv_pool
############################################################################
# Images #
############################################################################
@wrap_name_default("conv_pool")
def simple_img_conv_pool(input,
filter_size,
num_filters,
pool_size,
name=None,
pool_type=None,
act=None,
groups=1,
conv_stride=1,
conv_padding=0,
bias_attr=None,
num_channel=None,
param_attr=None,
shared_bias=True,
conv_layer_attr=None,
pool_stride=1,
pool_padding=0,
pool_layer_attr=None):
"""
Simple image convolution and pooling group.
Input => conv => pooling
:param name: group name
:type name: basestring
:param input: input layer name.
:type input: LayerOutput
:param filter_size: see img_conv_layer for details
:type filter_size: int
:param num_filters: see img_conv_layer for details
:type num_filters: int
:param pool_size: see img_pool_layer for details
:type pool_size: int
:param pool_type: see img_pool_layer for details
:type pool_type: BasePoolingType
:param act: see img_conv_layer for details
:type act: BaseActivation
:param groups: see img_conv_layer for details
:type groups: int
:param conv_stride: see img_conv_layer for details
:type conv_stride: int
:param conv_padding: see img_conv_layer for details
:type conv_padding: int
:param bias_attr: see img_conv_layer for details
:type bias_attr: ParameterAttribute
:param num_channel: see img_conv_layer for details
:type num_channel: int
:param param_attr: see img_conv_layer for details
:type param_attr: ParameterAttribute
:param shared_bias: see img_conv_layer for details
:type shared_bias: bool
:param conv_layer_attr: see img_conv_layer for details
:type conv_layer_attr: ExtraLayerAttribute
:param pool_stride: see img_pool_layer for details
:type pool_stride: int
:param pool_padding: see img_pool_layer for details
:type pool_padding: int
:param pool_layer_attr: see img_pool_layer for details
:type pool_layer_attr: ExtraLayerAttribute
:return: Layer's output
:rtype: LayerOutput
"""
_conv_ = img_conv_layer(
name="%s_conv" % name,
input=input,
filter_size=filter_size,
num_filters=num_filters,
num_channels=num_channel,
act=act,
groups=groups,
stride=conv_stride,
padding=conv_padding,
bias_attr=bias_attr,
param_attr=param_attr,
shared_biases=shared_bias,
layer_attr=conv_layer_attr)
return img_pool_layer(
name="%s_pool" % name,
input=_conv_,
pool_size=pool_size,
pool_type=pool_type,
stride=pool_stride,
padding=pool_padding,
layer_attr=pool_layer_attr)
@wrap_name_default("conv_bn_pool")
def img_conv_bn_pool(input,
filter_size,
num_filters,
pool_size,
name=None,
pool_type=None,
act=None,
groups=1,
conv_stride=1,
conv_padding=0,
conv_bias_attr=None,
num_channel=None,
conv_param_attr=None,
shared_bias=True,
conv_layer_attr=None,
bn_param_attr=None,
bn_bias_attr=None,
bn_layer_attr=None,
pool_stride=1,
pool_padding=0,
pool_layer_attr=None):
"""
Convolution, batch normalization, pooling group.
:param name: group name
:type name: basestring
:param input: layer's input
:type input: LayerOutput
:param filter_size: see img_conv_layer's document
:type filter_size: int
:param num_filters: see img_conv_layer's document
:type num_filters: int
:param pool_size: see img_pool_layer's document.
:type pool_size: int
:param pool_type: see img_pool_layer's document.
:type pool_type: BasePoolingType
:param act: see batch_norm_layer's document.
:type act: BaseActivation
:param groups: see img_conv_layer's document
:type groups: int
:param conv_stride: see img_conv_layer's document.
:type conv_stride: int
:param conv_padding: see img_conv_layer's document.
:type conv_padding: int
:param conv_bias_attr: see img_conv_layer's document.
:type conv_bias_attr: ParameterAttribute
:param num_channel: see img_conv_layer's document.
:type num_channel: int
:param conv_param_attr: see img_conv_layer's document.
:type conv_param_attr: ParameterAttribute
:param shared_bias: see img_conv_layer's document.
:type shared_bias: bool
:param conv_layer_attr: see img_conv_layer's document.
:type conv_layer_attr: ExtraLayerOutput
:param bn_param_attr: see batch_norm_layer's document.
:type bn_param_attr: ParameterAttribute.
:param bn_bias_attr: see batch_norm_layer's document.
:param bn_layer_attr: ParameterAttribute.
:param pool_stride: see img_pool_layer's document.
:type pool_stride: int
:param pool_padding: see img_pool_layer's document.
:type pool_padding: int
:param pool_layer_attr: see img_pool_layer's document.
:type pool_layer_attr: ExtraLayerAttribute
:return: Layer groups output
:rtype: LayerOutput
"""
__conv__ = img_conv_layer(
name="%s_conv" % name,
input=input,
filter_size=filter_size,
num_filters=num_filters,
num_channels=num_channel,
act=LinearActivation(),
groups=groups,
stride=conv_stride,
padding=conv_padding,
bias_attr=conv_bias_attr,
param_attr=conv_param_attr,
shared_biases=shared_bias,
layer_attr=conv_layer_attr)
__bn__ = batch_norm_layer(
name="%s_bn" % name,
input=__conv__,
act=act,
bias_attr=bn_bias_attr,
param_attr=bn_param_attr,
layer_attr=bn_layer_attr)
return img_pool_layer(
name="%s_pool" % name,
input=__bn__,
pool_type=pool_type,
pool_size=pool_size,
stride=pool_stride,
padding=pool_padding,
layer_attr=pool_layer_attr)
@wrap_act_default(param_names=['conv_act'], act=ReluActivation())
@wrap_param_default(
param_names=['pool_type'], default_factory=lambda _: MaxPooling())
def img_conv_group(input,
conv_num_filter,
pool_size,
num_channels=None,
conv_padding=1,
conv_filter_size=3,
conv_act=None,
conv_with_batchnorm=False,
conv_batchnorm_drop_rate=0,
pool_stride=1,
pool_type=None):
"""
Image Convolution Group, Used for vgg net.
TODO(yuyang18): Complete docs
:param conv_batchnorm_drop_rate:
:param input:
:param conv_num_filter:
:param pool_size:
:param num_channels:
:param conv_padding:
:param conv_filter_size:
:param conv_act:
:param conv_with_batchnorm:
:param pool_stride:
:param pool_type:
:return:
"""
tmp = input
# Type checks
assert isinstance(tmp, LayerOutput)
assert isinstance(conv_num_filter, list) or isinstance(conv_num_filter,
tuple)
for each_num_filter in conv_num_filter:
assert isinstance(each_num_filter, int)
assert isinstance(pool_size, int)
def __extend_list__(obj):
if not hasattr(obj, '__len__'):
return [obj] * len(conv_num_filter)
else:
return obj
conv_padding = __extend_list__(conv_padding)
conv_filter_size = __extend_list__(conv_filter_size)
conv_act = __extend_list__(conv_act)
conv_with_batchnorm = __extend_list__(conv_with_batchnorm)
conv_batchnorm_drop_rate = __extend_list__(conv_batchnorm_drop_rate)
for i in xrange(len(conv_num_filter)):
extra_kwargs = dict()
if num_channels is not None:
extra_kwargs['num_channels'] = num_channels
num_channels = None
if conv_with_batchnorm[i]:
extra_kwargs['act'] = LinearActivation()
else:
extra_kwargs['act'] = conv_act[i]
tmp = img_conv_layer(
input=tmp,
padding=conv_padding[i],
filter_size=conv_filter_size[i],
num_filters=conv_num_filter[i],
**extra_kwargs)
# logger.debug("tmp.num_filters = %d" % tmp.num_filters)
if conv_with_batchnorm[i]:
dropout = conv_batchnorm_drop_rate[i]
if dropout == 0 or abs(dropout) < 1e-5: # dropout not set
tmp = batch_norm_layer(input=tmp, act=conv_act[i])
else:
tmp = batch_norm_layer(
input=tmp,
act=conv_act[i],
layer_attr=ExtraAttr(drop_rate=dropout))
return img_pool_layer(
input=tmp, stride=pool_stride, pool_size=pool_size, pool_type=pool_type)
def small_vgg(input_image, num_channels, num_classes):
def __vgg__(ipt, num_filter, times, dropouts, num_channels_=None):
return img_conv_group(
input=ipt,
num_channels=num_channels_,
pool_size=2,
pool_stride=2,
conv_num_filter=[num_filter] * times,
conv_filter_size=3,
conv_act=ReluActivation(),
conv_with_batchnorm=True,
conv_batchnorm_drop_rate=dropouts,
pool_type=MaxPooling())
tmp = __vgg__(input_image, 64, 2, [0.3, 0], num_channels)
tmp = __vgg__(tmp, 128, 2, [0.4, 0])
tmp = __vgg__(tmp, 256, 3, [0.4, 0.4, 0])
tmp = __vgg__(tmp, 512, 3, [0.4, 0.4, 0])
tmp = img_pool_layer(
input=tmp, stride=2, pool_size=2, pool_type=MaxPooling())
tmp = dropout_layer(input=tmp, dropout_rate=0.5)
tmp = fc_layer(
input=tmp,
size=512,
layer_attr=ExtraAttr(drop_rate=0.5),
act=LinearActivation())
tmp = batch_norm_layer(input=tmp, act=ReluActivation())
return fc_layer(input=tmp, size=num_classes, act=SoftmaxActivation())
def vgg_16_network(input_image, num_channels, num_classes=1000):
"""
Same model from https://gist.github.com/ksimonyan/211839e770f7b538e2d8
:param num_classes:
:param input_image:
:type input_image: LayerOutput
:param num_channels:
:type num_channels: int
:return:
"""
tmp = img_conv_group(
input=input_image,
num_channels=num_channels,
conv_padding=1,
conv_num_filter=[64, 64],
conv_filter_size=3,
conv_act=ReluActivation(),
pool_size=2,
pool_stride=2,
pool_type=MaxPooling())
tmp = img_conv_group(
input=tmp,
conv_num_filter=[128, 128],
conv_padding=1,
conv_filter_size=3,
conv_act=ReluActivation(),
pool_stride=2,
pool_type=MaxPooling(),
pool_size=2)
tmp = img_conv_group(
input=tmp,
conv_num_filter=[256, 256, 256],
conv_padding=1,
conv_filter_size=3,
conv_act=ReluActivation(),
pool_stride=2,
pool_type=MaxPooling(),
pool_size=2)
tmp = img_conv_group(
input=tmp,
conv_num_filter=[512, 512, 512],
conv_padding=1,
conv_filter_size=3,
conv_act=ReluActivation(),
pool_stride=2,
pool_type=MaxPooling(),
pool_size=2)
tmp = img_conv_group(
input=tmp,
conv_num_filter=[512, 512, 512],
conv_padding=1,
conv_filter_size=3,
conv_act=ReluActivation(),
pool_stride=2,
pool_type=MaxPooling(),
pool_size=2)
tmp = fc_layer(
input=tmp,
size=4096,
act=ReluActivation(),
layer_attr=ExtraAttr(drop_rate=0.5))
tmp = fc_layer(
input=tmp,
size=4096,
act=ReluActivation(),
layer_attr=ExtraAttr(drop_rate=0.5))
return fc_layer(input=tmp, size=num_classes, act=SoftmaxActivation())
############################################################################
# Recurrent #
############################################################################
@wrap_name_default("lstm")
def simple_lstm(input,
size,
name=None,
reverse=False,
mat_param_attr=None,
bias_param_attr=None,
inner_param_attr=None,
act=None,
gate_act=None,
state_act=None,
mixed_layer_attr=None,
lstm_cell_attr=None):
"""
Simple LSTM Cell.
It just combine a mixed layer with fully_matrix_projection and a lstmemory
layer. The simple lstm cell was implemented as follow equations.
.. math::
i_t & = \\sigma(W_{xi}x_{t} + W_{hi}h_{t-1} + W_{ci}c_{t-1} + b_i)
f_t & = \\sigma(W_{xf}x_{t} + W_{hf}h_{t-1} + W_{cf}c_{t-1} + b_f)
c_t & = f_tc_{t-1} + i_t tanh (W_{xc}x_t+W_{hc}h_{t-1} + b_c)
o_t & = \\sigma(W_{xo}x_{t} + W_{ho}h_{t-1} + W_{co}c_t + b_o)
h_t & = o_t tanh(c_t)
Please refer **Generating Sequences With Recurrent Neural Networks** if you
want to know what lstm is. Link_ is here.
.. _Link: http://arxiv.org/abs/1308.0850
:param name: lstm layer name.
:type name: basestring
:param input: input layer name.
:type input: LayerOutput
:param size: lstm layer size.
:type size: int
:param reverse: whether to process the input data in a reverse order
:type reverse: bool
:param mat_param_attr: mixed layer's matrix projection parameter attribute.
:type mat_param_attr: ParameterAttribute
:param bias_param_attr: bias parameter attribute. False means no bias, None
means default bias.
:type bias_param_attr: ParameterAttribute|False
:param inner_param_attr: lstm cell parameter attribute.
:type inner_param_attr: ParameterAttribute
:param act: lstm final activiation type
:type act: BaseActivation
:param gate_act: lstm gate activiation type
:type gate_act: BaseActivation
:param state_act: lstm state activiation type.
:type state_act: BaseActivation
:param mixed_layer_attr: mixed layer's extra attribute.
:type mixed_layer_attr: ExtraLayerAttribute
:param lstm_cell_attr: lstm layer's extra attribute.
:type lstm_cell_attr: ExtraLayerAttribute
:return: lstm layer name.
:rtype: LayerOutput
"""
fc_name = 'lstm_transform_%s' % name
with mixed_layer(
name=fc_name,
size=size * 4,
act=IdentityActivation(),
layer_attr=mixed_layer_attr,
bias_attr=False) as m:
m += full_matrix_projection(input, param_attr=mat_param_attr)
return lstmemory(
name=name,
input=m,
reverse=reverse,
bias_attr=bias_param_attr,
param_attr=inner_param_attr,
act=act,
gate_act=gate_act,
state_act=state_act,
layer_attr=lstm_cell_attr)
@wrap_name_default('lstm_unit')
def lstmemory_unit(input,
name=None,
size=None,
param_attr=None,
act=None,
gate_act=None,
state_act=None,
mixed_bias_attr=None,
lstm_bias_attr=None,
mixed_layer_attr=None,
lstm_layer_attr=None,
get_output_layer_attr=None):
"""
Define calculations that a LSTM unit performs in a single time step.
This function itself is not a recurrent layer, so that it can not be
directly applied to sequence input. This function is always used in
recurrent_group (see layers.py for more details) to implement attention
mechanism.
Please refer to **Generating Sequences With Recurrent Neural Networks**
for more details about LSTM. The link goes as follows:
.. _Link: https://arxiv.org/abs/1308.0850
.. math::
i_t & = \\sigma(W_{xi}x_{t} + W_{hi}h_{t-1} + W_{ci}c_{t-1} + b_i)
f_t & = \\sigma(W_{xf}x_{t} + W_{hf}h_{t-1} + W_{cf}c_{t-1} + b_f)
c_t & = f_tc_{t-1} + i_t tanh (W_{xc}x_t+W_{hc}h_{t-1} + b_c)
o_t & = \\sigma(W_{xo}x_{t} + W_{ho}h_{t-1} + W_{co}c_t + b_o)
h_t & = o_t tanh(c_t)
The example usage is:
.. code-block:: python
lstm_step = lstmemory_unit(input=[layer1],
size=256,
act=TanhActivation(),
gate_act=SigmoidActivation(),
state_act=TanhActivation())
:param input: input layer name.
:type input: LayerOutput
:param name: lstmemory unit name.
:type name: basestring
:param size: lstmemory unit size.
:type size: int
:param param_attr: Parameter config, None if use default.
:type param_attr: ParameterAttribute
:param act: lstm final activiation type
:type act: BaseActivation
:param gate_act: lstm gate activiation type
:type gate_act: BaseActivation
:param state_act: lstm state activiation type.
:type state_act: BaseActivation
:param mixed_bias_attr: bias parameter attribute of mixed layer.
False means no bias, None means default bias.
:type mixed_bias_attr: ParameterAttribute|False
:param lstm_bias_attr: bias parameter attribute of lstm layer.
False means no bias, None means default bias.
:type lstm_bias_attr: ParameterAttribute|False
:param mixed_layer_attr: mixed layer's extra attribute.
:type mixed_layer_attr: ExtraLayerAttribute
:param lstm_layer_attr: lstm layer's extra attribute.
:type lstm_layer_attr: ExtraLayerAttribute
:param get_output_layer_attr: get output layer's extra attribute.
:type get_output_layer_attr: ExtraLayerAttribute
:return: lstmemory unit name.
:rtype: LayerOutput
"""
if size is None:
assert input.size % 4 == 0
size = input.size / 4
out_mem = memory(name=name, size=size)
state_mem = memory(name="%s_state" % name, size=size)
with mixed_layer(
name="%s_input_recurrent" % name,
size=size * 4,
bias_attr=mixed_bias_attr,
layer_attr=mixed_layer_attr,
act=IdentityActivation()) as m:
m += identity_projection(input=input)
m += full_matrix_projection(input=out_mem, param_attr=param_attr)
lstm_out = lstm_step_layer(
name=name,
input=m,
state=state_mem,
size=size,
bias_attr=lstm_bias_attr,
act=act,
gate_act=gate_act,
state_act=state_act,
layer_attr=lstm_layer_attr)
get_output_layer(
name='%s_state' % name,
input=lstm_out,
arg_name='state',
layer_attr=get_output_layer_attr)
return lstm_out
@wrap_name_default('lstm_group')
def lstmemory_group(input,
size=None,
name=None,
reverse=False,
param_attr=None,
act=None,
gate_act=None,
state_act=None,
mixed_bias_attr=None,
lstm_bias_attr=None,
mixed_layer_attr=None,
lstm_layer_attr=None,
get_output_layer_attr=None):
"""
lstm_group is a recurrent layer group version of Long Short Term Memory. It
does exactly the same calculation as the lstmemory layer (see lstmemory in
layers.py for the maths) does. A promising benefit is that LSTM memory
cell states, or hidden states in every time step are accessible to the
user. This is especially useful in attention model. If you do not need to
access the internal states of the lstm, but merely use its outputs,
it is recommended to use the lstmemory, which is relatively faster than
lstmemory_group.
NOTE: In PaddlePaddle's implementation, the following input-to-hidden
multiplications:
:math:`W_{xi}x_{t}` , :math:`W_{xf}x_{t}`,
:math:`W_{xc}x_t`, :math:`W_{xo}x_{t}` are not done in lstmemory_unit to
speed up the calculations. Consequently, an additional mixed_layer with
full_matrix_projection must be included before lstmemory_unit is called.
The example usage is:
.. code-block:: python
lstm_step = lstmemory_group(input=[layer1],
size=256,
act=TanhActivation(),
gate_act=SigmoidActivation(),
state_act=TanhActivation())
:param input: input layer name.
:type input: LayerOutput
:param name: lstmemory group name.
:type name: basestring
:param size: lstmemory group size.
:type size: int
:param reverse: is lstm reversed
:type reverse: bool
:param param_attr: Parameter config, None if use default.
:type param_attr: ParameterAttribute
:param act: lstm final activiation type
:type act: BaseActivation
:param gate_act: lstm gate activiation type
:type gate_act: BaseActivation
:param state_act: lstm state activiation type.
:type state_act: BaseActivation
:param mixed_bias_attr: bias parameter attribute of mixed layer.
False means no bias, None means default bias.
:type mixed_bias_attr: ParameterAttribute|False
:param lstm_bias_attr: bias parameter attribute of lstm layer.
False means no bias, None means default bias.
:type lstm_bias_attr: ParameterAttribute|False
:param mixed_layer_attr: mixed layer's extra attribute.
:type mixed_layer_attr: ExtraLayerAttribute
:param lstm_layer_attr: lstm layer's extra attribute.
:type lstm_layer_attr: ExtraLayerAttribute
:param get_output_layer_attr: get output layer's extra attribute.
:type get_output_layer_attr: ExtraLayerAttribute
:return: the lstmemory group.
:rtype: LayerOutput
"""
def __lstm_step__(ipt):
return lstmemory_unit(
input=ipt,
name=name,
size=size,
mixed_bias_attr=mixed_bias_attr,
mixed_layer_attr=mixed_layer_attr,
param_attr=param_attr,
lstm_bias_attr=lstm_bias_attr,
act=act,
gate_act=gate_act,
state_act=state_act,
lstm_layer_attr=lstm_layer_attr,
get_output_layer_attr=get_output_layer_attr)
return recurrent_group(
name='%s_recurrent_group' % name,
step=__lstm_step__,
reverse=reverse,
input=input)
@wrap_name_default('gru_unit')
def gru_unit(input,
size=None,
name=None,
gru_bias_attr=None,
gru_param_attr=None,
act=None,
gate_act=None,
gru_layer_attr=None,
naive=False):
"""
Define calculations that a gated recurrent unit performs in a single time
step. This function itself is not a recurrent layer, so that it can not be
directly applied to sequence input. This function is almost always used in
the recurrent_group (see layers.py for more details) to implement attention
mechanism.
Please see grumemory in layers.py for the details about the maths.
:param input: input layer name.
:type input: LayerOutput
:param name: name of the gru group.
:type name: basestring
:param size: hidden size of the gru.
:type size: int
:param act: type of the activation
:type act: BaseActivation
:param gate_act: type of the gate activation
:type gate_act: BaseActivation
:param gru_layer_attr: Extra parameter attribute of the gru layer.
:type gru_layer_attr: ParameterAttribute|False
:return: the gru output layer.
:rtype: LayerOutput
"""
assert input.size % 3 == 0
if size is None:
size = input.size / 3
out_mem = memory(name=name, size=size)
if naive:
__step__ = gru_step_naive_layer
else:
__step__ = gru_step_layer
gru_out = __step__(
name=name,
input=input,
output_mem=out_mem,
size=size,
bias_attr=gru_bias_attr,
param_attr=gru_param_attr,
act=act,
gate_act=gate_act,
layer_attr=gru_layer_attr)
return gru_out
@wrap_name_default('gru_group')
def gru_group(input,
size=None,
name=None,
reverse=False,
gru_bias_attr=None,
gru_param_attr=None,
act=None,
gate_act=None,
gru_layer_attr=None,
naive=False):
"""
gru_group is a recurrent layer group version of Gated Recurrent Unit. It
does exactly the same calculation as the grumemory layer does. A promising
benefit is that gru hidden states are accessible to the user. This is
especially useful in attention model. If you do not need to access
any internal state, but merely use the outputs of a GRU, it is recommended
to use the grumemory, which is relatively faster.
Please see grumemory in layers.py for more detail about the maths.
The example usage is:
.. code-block:: python
gru = gur_group(input=[layer1],
size=256,
act=TanhActivation(),
gate_act=SigmoidActivation())
:param input: input layer name.
:type input: LayerOutput
:param name: name of the gru group.
:type name: basestring
:param size: hidden size of the gru.
:type size: int
:param reverse: whether to process the input data in a reverse order
:type reverse: bool
:param act: type of the activiation
:type act: BaseActivation
:param gate_act: type of the gate activiation
:type gate_act: BaseActivation
:param gru_bias_attr: bias. False means no bias, None means default bias.
:type gru_bias_attr: ParameterAttribute|False
:param gru_layer_attr: Extra parameter attribute of the gru layer.
:type gru_layer_attr: ParameterAttribute|False
:return: the gru group.
:rtype: LayerOutput
"""
def __gru_step__(ipt):
return gru_unit(
input=ipt,
name=name,
size=size,
gru_bias_attr=gru_bias_attr,
gru_param_attr=gru_param_attr,
act=act,
gate_act=gate_act,
gru_layer_attr=gru_layer_attr,
naive=naive)
return recurrent_group(
name='%s_recurrent_group' % name,
step=__gru_step__,
reverse=reverse,
input=input)
@wrap_name_default('simple_gru')
def simple_gru(input,
size,
name=None,
reverse=False,
mixed_param_attr=None,
mixed_bias_param_attr=None,
mixed_layer_attr=None,
gru_bias_attr=None,
gru_param_attr=None,
act=None,
gate_act=None,
gru_layer_attr=None,
naive=False):
"""
You maybe see gru_step_layer, grumemory in layers.py, gru_unit, gru_group,
simple_gru in network.py. The reason why there are so many interfaces is
that we have two ways to implement recurrent neural network. One way is to
use one complete layer to implement rnn (including simple rnn, gru and lstm)
with multiple time steps, such as recurrent_layer, lstmemory, grumemory. But,
the multiplication operation :math:`W x_t` is not computed in these layers.
See details in their interfaces in layers.py.
The other implementation is to use an recurrent group which can ensemble a
series of layers to compute rnn step by step. This way is flexible for
attenion mechanism or other complex connections.
- gru_step_layer: only compute rnn by one step. It needs an memory as input
and can be used in recurrent group.
- gru_unit: a wrapper of gru_step_layer with memory.
- gru_group: a GRU cell implemented by a combination of multiple layers in
recurrent group.
But :math:`W x_t` is not done in group.
- gru_memory: a GRU cell implemented by one layer, which does same calculation
with gru_group and is faster than gru_group.
- simple_gru: a complete GRU implementation inlcuding :math:`W x_t` and
gru_group. :math:`W` contains :math:`W_r`, :math:`W_z` and :math:`W`, see
formula in grumemory.
The computational speed is that, grumemory is relatively better than
gru_group, and gru_group is relatively better than simple_gru.
The example usage is:
.. code-block:: python
gru = simple_gru(input=[layer1], size=256)
:param input: input layer name.
:type input: LayerOutput
:param name: name of the gru group.
:type name: basestring
:param size: hidden size of the gru.
:type size: int
:param reverse: whether to process the input data in a reverse order
:type reverse: bool
:param act: type of the activiation
:type act: BaseActivation
:param gate_act: type of the gate activiation
:type gate_act: BaseActivation
:param gru_bias_attr: bias. False means no bias, None means default bias.
:type gru_bias_attr: ParameterAttribute|False
:param gru_layer_attr: Extra parameter attribute of the gru layer.
:type gru_layer_attr: ParameterAttribute|False
:return: the gru group.
:rtype: LayerOutput
"""
with mixed_layer(
name='%s_transform' % name,
size=size * 3,
bias_attr=mixed_bias_param_attr,
layer_attr=mixed_layer_attr) as m:
m += full_matrix_projection(input=input, param_attr=mixed_param_attr)
return gru_group(
name=name,
size=size,
input=m,
reverse=reverse,
gru_bias_attr=gru_bias_attr,
gru_param_attr=gru_param_attr,
act=act,
gate_act=gate_act,
gru_layer_attr=gru_layer_attr,
naive=naive)
@wrap_name_default('simple_gru2')
def simple_gru2(input,
size,
name=None,
reverse=False,
mixed_param_attr=None,
mixed_bias_attr=None,
gru_param_attr=None,
gru_bias_attr=None,
act=None,
gate_act=None,
mixed_layer_attr=None,
gru_cell_attr=None):
"""
simple_gru2 is the same with simple_gru, but using grumemory instead
Please see grumemory in layers.py for more detail about the maths.
simple_gru2 is faster than simple_gru.
The example usage is:
.. code-block:: python
gru = simple_gru2(input=[layer1], size=256)
:param input: input layer name.
:type input: LayerOutput
:param name: name of the gru group.
:type name: basestring
:param size: hidden size of the gru.
:type size: int
:param reverse: whether to process the input data in a reverse order
:type reverse: bool
:param act: type of the activiation
:type act: BaseActivation
:param gate_act: type of the gate activiation
:type gate_act: BaseActivation
:param gru_bias_attr: bias. False means no bias, None means default bias.
:type gru_bias_attr: ParameterAttribute|False
:param gru_layer_attr: Extra parameter attribute of the gru layer.
:type gru_layer_attr: ParameterAttribute|False
:return: the gru group.
:rtype: LayerOutput
"""
with mixed_layer(
name='%s_transform' % name,
size=size * 3,
bias_attr=mixed_bias_attr,
layer_attr=mixed_layer_attr) as m:
m += full_matrix_projection(input=input, param_attr=mixed_param_attr)
return grumemory(
name=name,
size=size,
input=m,
reverse=reverse,
bias_attr=gru_bias_attr,
param_attr=gru_param_attr,
act=act,
gate_act=gate_act,
layer_attr=gru_cell_attr)
@wrap_name_default("bidirectional_gru")
def bidirectional_gru(input,
size,
name=None,
return_seq=False,
fwd_mixed_param_attr=None,
fwd_mixed_bias_attr=None,
fwd_gru_param_attr=None,
fwd_gru_bias_attr=None,
fwd_act=None,
fwd_gate_act=None,
fwd_mixed_layer_attr=None,
fwd_gru_cell_attr=None,
bwd_mixed_param_attr=None,
bwd_mixed_bias_attr=None,
bwd_gru_param_attr=None,
bwd_gru_bias_attr=None,
bwd_act=None,
bwd_gate_act=None,
bwd_mixed_layer_attr=None,
bwd_gru_cell_attr=None,
last_seq_attr=None,
first_seq_attr=None,
concat_attr=None,
concat_act=None):
"""
A bidirectional_gru is a recurrent unit that iterates over the input
sequence both in forward and bardward orders, and then concatenate two
outputs to form a final output. However, concatenation of two outputs
is not the only way to form the final output, you can also, for example,
just add them together.
The example usage is:
.. code-block:: python
bi_gru = bidirectional_gru(input=[input1], size=512)
:param name: bidirectional gru layer name.
:type name: basestring
:param input: input layer.
:type input: LayerOutput
:param size: gru layer size.
:type size: int
:param return_seq: If set False, outputs of the last time step are
concatenated and returned.
If set True, the entire output sequences that are
processed in forward and backward directions are
concatenated and returned.
:type return_seq: bool
:return: LayerOutput object.
:rtype: LayerOutput
"""
args = locals()
fw = simple_gru2(
name='%s_fw' % name,
input=input,
size=size,
**dict((k[len('fwd_'):], v) for k, v in args.iteritems()
if k.startswith('fwd_')))
bw = simple_gru2(
name="%s_bw" % name,
input=input,
size=size,
reverse=True,
**dict((k[len('bwd_'):], v) for k, v in args.iteritems()
if k.startswith('bwd_')))
if return_seq:
return concat_layer(
name=name, input=[fw, bw], layer_attr=concat_attr, act=concat_act)
else:
fw_seq = last_seq(
name="%s_fw_last" % name, input=fw, layer_attr=last_seq_attr)
bw_seq = first_seq(
name="%s_bw_last" % name, input=bw, layer_attr=first_seq_attr)
return concat_layer(
name=name,
input=[fw_seq, bw_seq],
layer_attr=concat_attr,
act=concat_act)
@wrap_name_default("bidirectional_lstm")
def bidirectional_lstm(input,
size,
name=None,
return_seq=False,
fwd_mat_param_attr=None,
fwd_bias_param_attr=None,
fwd_inner_param_attr=None,
fwd_act=None,
fwd_gate_act=None,
fwd_state_act=None,
fwd_mixed_layer_attr=None,
fwd_lstm_cell_attr=None,
bwd_mat_param_attr=None,
bwd_bias_param_attr=None,
bwd_inner_param_attr=None,
bwd_act=None,
bwd_gate_act=None,
bwd_state_act=None,
bwd_mixed_layer_attr=None,
bwd_lstm_cell_attr=None,
last_seq_attr=None,
first_seq_attr=None,
concat_attr=None,
concat_act=None):
"""
A bidirectional_lstm is a recurrent unit that iterates over the input
sequence both in forward and bardward orders, and then concatenate two
outputs form a final output. However, concatenation of two outputs
is not the only way to form the final output, you can also, for example,
just add them together.
Please refer to **Neural Machine Translation by Jointly Learning to Align
and Translate** for more details about the bidirectional lstm.
The link goes as follows:
.. _Link: https://arxiv.org/pdf/1409.0473v3.pdf
The example usage is:
.. code-block:: python
bi_lstm = bidirectional_lstm(input=[input1], size=512)
:param name: bidirectional lstm layer name.
:type name: basestring
:param input: input layer.
:type input: LayerOutput
:param size: lstm layer size.
:type size: int
:param return_seq: If set False, outputs of the last time step are
concatenated and returned.
If set True, the entire output sequences that are
processed in forward and backward directions are
concatenated and returned.
:type return_seq: bool
:return: LayerOutput object accroding to the return_seq.
:rtype: LayerOutput
"""
args = locals()
fw = simple_lstm(
name='%s_fw' % name,
input=input,
size=size,
**dict((k[len('fwd_'):], v) for k, v in args.iteritems()
if k.startswith('fwd_')))
bw = simple_lstm(
name="%s_bw" % name,
input=input,
size=size,
reverse=True,
**dict((k[len('bwd_'):], v) for k, v in args.iteritems()
if k.startswith('bwd_')))
if return_seq:
return concat_layer(
name=name, input=[fw, bw], layer_attr=concat_attr, act=concat_act)
else:
fw_seq = last_seq(
name="%s_fw_last" % name, input=fw, layer_attr=last_seq_attr)
bw_seq = first_seq(
name="%s_bw_last" % name, input=bw, layer_attr=first_seq_attr)
return concat_layer(
name=name,
input=[fw_seq, bw_seq],
layer_attr=concat_attr,
act=concat_act)
@wrap_name_default()
@wrap_act_default(param_names=['weight_act'], act=TanhActivation())
def simple_attention(encoded_sequence,
encoded_proj,
decoder_state,
transform_param_attr=None,
softmax_param_attr=None,
weight_act=None,
name=None):
"""
Calculate and then return a context vector by attention machanism.
Size of the context vector equals to size of the encoded_sequence.
.. math::
a(s_{i-1},h_{j}) & = v_{a}f(W_{a}s_{t-1} + U_{a}h_{j})
e_{i,j} & = a(s_{i-1}, h_{j})
a_{i,j} & = \\frac{exp(e_{i,j})}{\\sum_{k=1}^{T_x}{exp(e_{i,k})}}
c_{i} & = \\sum_{j=1}^{T_{x}}a_{i,j}h_{j}
where :math:`h_{j}` is the jth element of encoded_sequence,
:math:`U_{a}h_{j}` is the jth element of encoded_proj
:math:`s_{i-1}` is decoder_state
:math:`f` is weight_act, and is set to tanh by default.
Please refer to **Neural Machine Translation by Jointly Learning to
Align and Translate** for more details. The link is as follows:
https://arxiv.org/abs/1409.0473.
The example usage is:
.. code-block:: python
context = simple_attention(encoded_sequence=enc_seq,
encoded_proj=enc_proj,
decoder_state=decoder_prev,)
:param name: name of the attention model.
:type name: basestring
:param softmax_param_attr: parameter attribute of sequence softmax
that is used to produce attention weight
:type softmax_param_attr: ParameterAttribute
:param weight_act: activation of the attention model
:type weight_act: Activation
:param encoded_sequence: output of the encoder
:type encoded_sequence: LayerOutput
:param encoded_proj: attention weight is computed by a feed forward neural
network which has two inputs : decoder's hidden state
of previous time step and encoder's output.
encoded_proj is output of the feed-forward network for
encoder's output. Here we pre-compute it outside
simple_attention for speed consideration.
:type encoded_proj: LayerOutput
:param decoder_state: hidden state of decoder in previous time step
:type decoder_state: LayerOutput
:param transform_param_attr: parameter attribute of the feed-forward
network that takes decoder_state as inputs to
compute attention weight.
:type transform_param_attr: ParameterAttribute
:return: a context vector
"""
assert encoded_proj.size == decoder_state.size
proj_size = encoded_proj.size
with mixed_layer(size=proj_size, name="%s_transform" % name) as m:
m += full_matrix_projection(
decoder_state, param_attr=transform_param_attr)
expanded = expand_layer(
input=m, expand_as=encoded_sequence, name='%s_expand' % name)
with mixed_layer(
size=proj_size, act=weight_act, name="%s_combine" % name) as m:
m += identity_projection(expanded)
m += identity_projection(encoded_proj)
# sequence softmax is used to normalize similarities between decoder state
# and encoder outputs into a distribution
attention_weight = fc_layer(
input=m,
size=1,
act=SequenceSoftmaxActivation(),
param_attr=softmax_param_attr,
name="%s_softmax" % name,
bias_attr=False)
scaled = scaling_layer(
weight=attention_weight,
input=encoded_sequence,
name='%s_scaling' % name)
return pooling_layer(
input=scaled, pooling_type=SumPooling(), name="%s_pooling" % name)
############################################################################
# Miscs #
############################################################################
@wrap_name_default("dropout")
def dropout_layer(input, dropout_rate, name=None):
"""
@TODO(yuyang18): Add comments.
:param name:
:param input:
:param dropout_rate:
:return:
"""
return addto_layer(
name=name,
input=input,
act=LinearActivation(),
bias_attr=False,
layer_attr=ExtraAttr(drop_rate=dropout_rate))
def inputs(layers, *args):
"""
Declare the inputs of network. The order of input should be as same as
the data provider's return order.
:param layers: Input Layers.
:type layers: list|tuple|LayerOutput.
:return:
"""
if isinstance(layers, LayerOutput) or isinstance(layers, basestring):
layers = [layers]
if len(args) != 0:
layers.extend(args)
Inputs(*[l.name for l in layers])
def outputs(layers, *args):
"""
Declare the outputs of network. If user have not defined the inputs of
network, this method will calculate the input order by dfs travel.
:param layers: Output layers.
:type layers: list|tuple|LayerOutput
:return:
"""
def __dfs_travel__(layer,
predicate=lambda x: x.layer_type == LayerType.DATA):
"""
DFS LRV Travel for output layer.
The return order is define order for data_layer in this leaf node.
:param layer:
:type layer: LayerOutput
:return:
"""
assert isinstance(layer, LayerOutput), "layer is %s" % (layer)
retv = []
if layer.parents is not None:
for p in layer.parents:
retv.extend(__dfs_travel__(p, predicate))
if predicate(layer):
retv.append(layer)
return retv
if isinstance(layers, LayerOutput):
layers = [layers]
if len(args) != 0:
layers.extend(args)
assert len(layers) > 0
if HasInputsSet(): # input already set
Outputs(*[l.name for l in layers])
return # just return outputs.
if len(layers) != 1:
logger.warning("`outputs` routine try to calculate network's"
" inputs and outputs order. It might not work well."
"Please see follow log carefully.")
inputs = []
outputs_ = []
for each_layer in layers:
assert isinstance(each_layer, LayerOutput)
inputs.extend(__dfs_travel__(each_layer))
outputs_.extend(
__dfs_travel__(each_layer,
lambda x: x.layer_type == LayerType.COST))
# Currently, we got each leaf node's inputs order, output order.
# We merge them together.
final_inputs = []
final_outputs = []
for each_input in inputs:
assert isinstance(each_input, LayerOutput)
if each_input.name not in final_inputs:
final_inputs.append(each_input.name)
for each_output in outputs_:
assert isinstance(each_output, LayerOutput)
if each_output.name not in final_outputs:
final_outputs.append(each_output.name)
logger.info("".join(["The input order is [", ", ".join(final_inputs), "]"]))
if len(final_outputs) == 0:
final_outputs = map(lambda x: x.name, layers)
logger.info("".join(
["The output order is [", ", ".join(final_outputs), "]"]))
Inputs(*final_inputs)
Outputs(*final_outputs)
| cxysteven/Paddle | python/paddle/trainer_config_helpers/networks.py | Python | apache-2.0 | 52,297 |
import datetime
import os
from django.apps import apps
from django.core.management.base import BaseCommand
from django.db import connection
from django.db.migrations import Migration
from django.db.migrations import operations
from django.db.migrations.loader import MigrationLoader
from django.db.migrations.writer import MigrationWriter
# Triggers are Faster and will run in any insert/update situation
# Model based logic will not run in certain scenarios. IE Bulk operations
class Command(BaseCommand):
can_import_settings = True
PROCEDURE = '''
CREATE OR REPLACE FUNCTION before_{concrete}_change() RETURNS trigger AS $$
DECLARE
vid INTEGER;
BEGIN
INSERT INTO {version}({columns}) VALUES ({new_columns}) RETURNING (id) INTO vid;
NEW.version_id = vid;
RETURN NEW;
END;
$$ LANGUAGE plpgsql;
'''
PROCEDURE_REVERSE = '''
DROP FUNCTION before_{concrete}_change();
'''
TRIGGER = '''
DROP TRIGGER IF EXISTS {concrete}_change ON {concrete};
CREATE TRIGGER {concrete}_change
BEFORE INSERT OR UPDATE ON {concrete}
FOR EACH ROW
EXECUTE PROCEDURE before_{concrete}_change();
'''
TRIGGER_REVERSE = '''
DROP TRIGGER {concrete}_change
'''
def collect_fields(self, model):
concrete_fields = ['NEW.' + f.column for f in model._meta.fields]
version_fields = [f.column for f in model.VersionModel._meta.fields]
version_fields.remove('id')
version_fields.remove('action')
version_fields.remove('persistent_id')
concrete_fields.remove('NEW.id')
concrete_fields.remove('NEW.version_id')
assert len(version_fields) == len(concrete_fields)
return concrete_fields, version_fields
def build_operations(self, model):
concrete_fields, version_fields = self.collect_fields(model)
params = {
'concrete': model._meta.db_table,
'version': model.VersionModel._meta.db_table,
'columns': ', '.join(['persistent_id', 'action'] + sorted(version_fields)),
'new_columns': ', '.join(['NEW.id', 'TG_OP'] + sorted(concrete_fields)),
}
return [
operations.RunSQL(self.PROCEDURE.format(**params).strip(), reverse_sql=self.PROCEDURE_REVERSE.format(**params).strip()),
operations.RunSQL(self.TRIGGER.format(**params).strip(), reverse_sql=self.TRIGGER_REVERSE.format(**params).strip()),
]
def write_migration(self, migration):
writer = MigrationWriter(migration)
os.makedirs(os.path.dirname(writer.path), exist_ok=True)
with open(writer.path, 'w') as fp:
fp.write(writer.as_string())
def handle(self, *args, **options):
ops = []
for model in apps.get_models(include_auto_created=True):
if not hasattr(model, 'VersionModel') or model._meta.proxy:
continue
ops.extend(self.build_operations(model))
if options['initial']:
m = Migration('0003_triggers', 'share')
m.dependencies = [('share', '0002_create_share_user')]
else:
ml = MigrationLoader(connection=connection)
ml.build_graph()
last_share_migration = [x[1] for x in ml.graph.leaf_nodes() if x[0] == 'share'][0]
next_number = '{0:04d}'.format(int(last_share_migration[0:4]) + 1)
m = Migration('{}_update_trigger_migrations_{}'.format(next_number, datetime.datetime.now().strftime("%Y%m%d_%H%M")), 'share')
m.dependencies = [('share', '0002_create_share_user'), ('share', last_share_migration)]
m.operations = ops
self.write_migration(m)
def add_arguments(self, parser):
parser.add_argument('--initial', action='store_true', help='Create initial trigger migrations')
parser.add_argument('--update', action='store_true', help='Update trigger migrations after schema change')
| CenterForOpenScience/SHARE | share/management/commands/maketriggermigrations.py | Python | apache-2.0 | 4,022 |
# Copyright 2017 Janos Czentye
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Sublayer for classes related to the merged REST-API
"""
from pox.core import core
LAYER_NAME = "REST-API"
log = core.getLogger(LAYER_NAME) | 5GExchange/escape | escape/escape/api/__init__.py | Python | apache-2.0 | 717 |
# -*- coding: utf-8 -*-
# Copyright 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from synapse.push.rulekinds import (
PRIORITY_CLASS_MAP, PRIORITY_CLASS_INVERSE_MAP
)
import copy
def format_push_rules_for_user(user, ruleslist):
"""Converts a list of rawrules and a enabled map into nested dictionaries
to match the Matrix client-server format for push rules"""
# We're going to be mutating this a lot, so do a deep copy
ruleslist = copy.deepcopy(ruleslist)
rules = {'global': {}, 'device': {}}
rules['global'] = _add_empty_priority_class_arrays(rules['global'])
for r in ruleslist:
rulearray = None
template_name = _priority_class_to_template_name(r['priority_class'])
# Remove internal stuff.
for c in r["conditions"]:
c.pop("_id", None)
pattern_type = c.pop("pattern_type", None)
if pattern_type == "user_id":
c["pattern"] = user.to_string()
elif pattern_type == "user_localpart":
c["pattern"] = user.localpart
rulearray = rules['global'][template_name]
template_rule = _rule_to_template(r)
if template_rule:
if 'enabled' in r:
template_rule['enabled'] = r['enabled']
else:
template_rule['enabled'] = True
rulearray.append(template_rule)
return rules
def _add_empty_priority_class_arrays(d):
for pc in PRIORITY_CLASS_MAP.keys():
d[pc] = []
return d
def _rule_to_template(rule):
unscoped_rule_id = None
if 'rule_id' in rule:
unscoped_rule_id = _rule_id_from_namespaced(rule['rule_id'])
template_name = _priority_class_to_template_name(rule['priority_class'])
if template_name in ['override', 'underride']:
templaterule = {k: rule[k] for k in ["conditions", "actions"]}
elif template_name in ["sender", "room"]:
templaterule = {'actions': rule['actions']}
unscoped_rule_id = rule['conditions'][0]['pattern']
elif template_name == 'content':
if len(rule["conditions"]) != 1:
return None
thecond = rule["conditions"][0]
if "pattern" not in thecond:
return None
templaterule = {'actions': rule['actions']}
templaterule["pattern"] = thecond["pattern"]
if unscoped_rule_id:
templaterule['rule_id'] = unscoped_rule_id
if 'default' in rule:
templaterule['default'] = rule['default']
return templaterule
def _rule_id_from_namespaced(in_rule_id):
return in_rule_id.split('/')[-1]
def _priority_class_to_template_name(pc):
return PRIORITY_CLASS_INVERSE_MAP[pc]
| TribeMedia/synapse | synapse/push/clientformat.py | Python | apache-2.0 | 3,212 |
# -*- cpy-indent-level: 4; indent-tabs-mode: nil -*-
# ex: set expandtab softtabstop=4 shiftwidth=4:
#
# Copyright (C) 2010,2011,2012,2013 Contributor
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Contains the logic for `aq add sandbox`."""
import re
from aquilon.exceptions_ import (AuthorizationException, ArgumentError)
from aquilon.worker.broker import BrokerCommand # pylint: disable=W0611
from aquilon.worker.commands.get import CommandGet
from aquilon.aqdb.model import Sandbox, Branch
from aquilon.worker.processes import run_git
class CommandAddSandbox(CommandGet):
required_parameters = ["sandbox"]
# Need to override CommandGet which has this as True...
requires_readonly = False
default_style = "csv"
requires_format = True
def render(self, session, logger, dbuser, sandbox, start, get, comments,
**arguments):
if not dbuser:
raise AuthorizationException("Cannot create a sandbox without an "
"authenticated connection.")
sandbox = self.force_my_sandbox(session, logger, dbuser, sandbox)
# See `git check-ref-format --help` for naming restrictions.
# We want to layer a few extra restrictions on top of that...
valid = re.compile('^[a-zA-Z0-9_.-]+$')
if (not valid.match(sandbox)):
raise ArgumentError("sandbox name '%s' is not valid" % sandbox)
Branch.get_unique(session, sandbox, preclude=True)
if not start:
start = self.config.get("broker", "default_domain_start")
dbstart = Branch.get_unique(session, start, compel=True)
kingdir = self.config.get("broker", "kingdir")
base_commit = run_git(["show-ref", "--hash", "refs/heads/" +
dbstart.name], logger=logger, path=kingdir)
compiler = self.config.get("panc", "pan_compiler")
dbsandbox = Sandbox(name=sandbox, owner=dbuser, compiler=compiler,
base_commit=base_commit, comments=comments)
session.add(dbsandbox)
session.flush()
# Currently this will fail if the branch already exists...
# That seems like the right behavior. It's an internal
# consistency issue that would need to be addressed explicitly.
run_git(["branch", sandbox, dbstart.name], logger=logger, path=kingdir)
if get == False:
# The client knows to interpret an empty response as no action.
return []
return CommandGet.render(self, session=session, logger=logger,
dbuser=dbuser, sandbox=sandbox)
| stdweird/aquilon | lib/python2.6/aquilon/worker/commands/add_sandbox.py | Python | apache-2.0 | 3,149 |
import json
from django.conf import settings
from django.core.urlresolvers import reverse
from django.contrib import messages
from django.http import HttpResponseRedirect, Http404, HttpResponse
from django.shortcuts import render_to_response
from django.template import RequestContext
from django.template.loader import render_to_string
from django.utils.translation import ugettext as _
from seahub.auth.decorators import login_required, login_required_ajax
from seahub.notifications.models import Notification, NotificationForm, \
UserNotification
from seahub.notifications.utils import refresh_cache
from seahub.avatar.util import get_default_avatar_url
@login_required
def notification_list(request):
if not request.user.is_staff:
raise Http404
notes = Notification.objects.all().order_by('-id')
return render_to_response('notifications/notification_list.html', {
'notes': notes,
}, context_instance=RequestContext(request))
@login_required
def notification_add(request):
if not request.user.is_staff or request.method != 'POST':
raise Http404
f = NotificationForm(request.POST)
f.save()
return HttpResponseRedirect(reverse('notification_list', args=[]))
@login_required
def notification_delete(request, nid):
if not request.user.is_staff:
raise Http404
Notification.objects.filter(id=nid).delete()
refresh_cache()
return HttpResponseRedirect(reverse('notification_list', args=[]))
@login_required
def set_primary(request, nid):
if not request.user.is_staff:
raise Http404
# TODO: use transaction?
Notification.objects.filter(primary=1).update(primary=0)
Notification.objects.filter(id=nid).update(primary=1)
refresh_cache()
return HttpResponseRedirect(reverse('notification_list', args=[]))
########## user notifications
@login_required
def user_notification_list(request):
"""
Arguments:
- `request`:
"""
username = request.user.username
count = 25 # initial notification count
limit = 25 # next a mount of notifications fetched by AJAX
notices = UserNotification.objects.get_user_notifications(username)[:count]
# Add 'msg_from' or 'default_avatar_url' to notice.
notices = add_notice_from_info(notices)
notices_more = True if len(notices) == count else False
return render_to_response("notifications/user_notification_list.html", {
'notices': notices,
'start': count,
'limit': limit,
'notices_more': notices_more,
}, context_instance=RequestContext(request))
@login_required_ajax
def user_notification_more(request):
"""Fetch next ``limit`` notifications starts from ``start``.
Arguments:
- `request`:
- `start`:
- `limit`:
"""
username = request.user.username
start = int(request.GET.get('start', 0))
limit = int(request.GET.get('limit', 0))
notices = UserNotification.objects.get_user_notifications(username)[
start: start+limit]
# Add 'msg_from' or 'default_avatar_url' to notice.
notices = add_notice_from_info(notices)
notices_more = True if len(notices) == limit else False
new_start = start+limit
ctx = {'notices': notices}
html = render_to_string("notifications/user_notification_tr.html", ctx)
ct = 'application/json; charset=utf-8'
return HttpResponse(json.dumps({
'html':html,
'notices_more':notices_more,
'new_start': new_start}), content_type=ct)
@login_required
def user_notification_remove(request):
"""
Arguments:
- `request`:
"""
UserNotification.objects.remove_user_notifications(request.user.username)
messages.success(request, _("Successfully cleared all notices."))
next = request.META.get('HTTP_REFERER', None)
if not next:
next = settings.SITE_ROOT
return HttpResponseRedirect(next)
def add_notice_from_info(notices):
'''Add 'msg_from' or 'default_avatar_url' to notice.
'''
default_avatar_url = get_default_avatar_url()
for notice in notices:
if notice.is_user_message():
d = notice.user_message_detail_to_dict()
notice.msg_from = d.get('msg_from')
elif notice.is_group_msg():
d = notice.group_message_detail_to_dict()
if d.get('msg_from') is not None:
notice.msg_from = d.get('msg_from')
else:
notice.default_avatar_url = default_avatar_url
elif notice.is_grpmsg_reply():
d = notice.grpmsg_reply_detail_to_dict()
if d.get('reply_from') is not None:
notice.msg_from = d.get('reply_from')
else:
notice.default_avatar_url = default_avatar_url
elif notice.is_file_uploaded_msg():
notice.default_avatar_url = default_avatar_url
elif notice.is_repo_share_msg():
d = json.loads(notice.detail)
notice.msg_from = d['share_from']
elif notice.is_priv_file_share_msg():
d = json.loads(notice.detail)
notice.msg_from = d['share_from']
elif notice.is_group_join_request():
d = json.loads(notice.detail)
notice.msg_from = d['username']
else:
pass
return notices
| cloudcopy/seahub | seahub/notifications/views.py | Python | apache-2.0 | 5,436 |
#!/usr/bin/env python
# Licensed to Cloudera, Inc. under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. Cloudera, Inc. licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import json
import posixpath
import threading
from desktop.lib.rest.http_client import HttpClient
from desktop.lib.rest.resource import Resource
from spark.conf import get_livy_server_url, SECURITY_ENABLED
LOG = logging.getLogger(__name__)
DEFAULT_USER = 'hue'
_API_VERSION = 'v1'
_JSON_CONTENT_TYPE = 'application/json'
_BINARY_CONTENT_TYPE = 'application/octet-stream'
_TEXT_CONTENT_TYPE = 'text/plain'
API_CACHE = None
API_CACHE_LOCK = threading.Lock()
def get_api(user):
global API_CACHE
if API_CACHE is None:
API_CACHE_LOCK.acquire()
try:
if API_CACHE is None:
API_CACHE = JobServerApi(get_livy_server_url())
finally:
API_CACHE_LOCK.release()
API_CACHE.setuser(user)
return API_CACHE
class JobServerApi(object):
def __init__(self, oozie_url):
self._url = posixpath.join(oozie_url)
self._client = HttpClient(self._url, logger=LOG)
self._root = Resource(self._client)
self._security_enabled = SECURITY_ENABLED.get()
self._thread_local = threading.local()
if self.security_enabled:
self._client.set_kerberos_auth()
def __str__(self):
return "JobServerApi at %s" % (self._url,)
@property
def url(self):
return self._url
@property
def security_enabled(self):
return self._security_enabled
@property
def user(self):
return self._thread_local.user
def setuser(self, user):
if hasattr(user, 'username'):
self._thread_local.user = user.username
else:
self._thread_local.user = user
def get_status(self):
return self._root.get('sessions')
def get_log(self, uuid, startFrom=None, size=None):
params = {}
if startFrom is not None:
params['from'] = startFrom
if size is not None:
params['size'] = size
response = self._root.get('sessions/%s/log' % uuid, params=params)
return '\n'.join(response['log'])
def create_session(self, **properties):
properties['proxyUser'] = self.user
return self._root.post('sessions', data=json.dumps(properties), contenttype=_JSON_CONTENT_TYPE)
def get_sessions(self):
return self._root.get('sessions')
def get_session(self, uuid):
return self._root.get('sessions/%s' % uuid)
def get_statements(self, uuid):
return self._root.get('sessions/%s/statements' % uuid)
def submit_statement(self, uuid, statement):
data = {'code': statement}
return self._root.post('sessions/%s/statements' % uuid, data=json.dumps(data), contenttype=_JSON_CONTENT_TYPE)
def inspect(self, uuid, statement):
data = {'code': statement}
return self._root.post('sessions/%s/inspect' % uuid, data=json.dumps(data), contenttype=_JSON_CONTENT_TYPE)
def fetch_data(self, session, statement):
return self._root.get('sessions/%s/statements/%s' % (session, statement))
def cancel(self, session):
return self._root.post('sessions/%s/interrupt' % session)
def close(self, uuid):
return self._root.delete('sessions/%s' % uuid)
def get_batches(self):
return self._root.get('batches')
def submit_batch(self, properties):
properties['proxyUser'] = self.user
return self._root.post('batches', data=json.dumps(properties), contenttype=_JSON_CONTENT_TYPE)
def get_batch(self, uuid):
return self._root.get('batches/%s' % uuid)
def get_batch_status(self, uuid):
response = self._root.get('batches/%s/state' % uuid)
return response['state']
def get_batch_log(self, uuid, startFrom=None, size=None):
params = {}
if startFrom is not None:
params['from'] = startFrom
if size is not None:
params['size'] = size
response = self._root.get('batches/%s/log' % uuid, params=params)
return '\n'.join(response['log'])
def close_batch(self, uuid):
return self._root.delete('batches/%s' % uuid)
| xq262144/hue | apps/spark/src/spark/job_server_api.py | Python | apache-2.0 | 4,607 |
#! /usr/bin/env python
#
# Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Computes basic statistics about Myanmar codepoint bigrams.
Usage examples:
* Print counts of codepoint bigrams in Myanmar text:
$ ./bigrams.py < some_file.txt
* Read filenames from stdin (one per line) and write frequency and
percentage of unseen codepoint bigrams to stdout:
$ find path/to/directory -type f | ./bigrams.py known_bigrams.txt
* Read filenames from stdin (one per line) and write frequency, percentage,
and beta-binomial p-value of unseen codepoint bigrams to stdout:
$ find path/to/directory -type f | ./bigrams.py known_bigrams.txt 2 700
"""
from __future__ import unicode_literals
import codecs
import re
import sys
import extract_text
import betabinom_test
STDIN = codecs.getreader('utf-8')(sys.stdin)
STDOUT = codecs.getwriter('utf-8')(sys.stdout)
STDERR = codecs.getwriter('utf-8')(sys.stderr)
IGNORE = re.compile(r'[-()]+')
def MyanmarPhrases(reader):
for phrase in extract_text.ExtractText(reader):
yield IGNORE.sub('', phrase)
return
def NGrams(phrases, n, separator):
assert n >= 1
history = [separator]
while len(history) >= n:
history.pop(0)
for phrase in phrases:
for c in phrase:
yield c, history
history.append(c)
while len(history) >= n:
history.pop(0)
yield separator, history
history.append(separator)
while len(history) >= n:
history.pop(0)
return
def FormatNGrams(ngrams):
for current, history in ngrams:
yield ' '.join('%04X' % ord(cp) for cp in history + [current])
return
def Count(ngrams):
total = 0
count = {}
for ngram in ngrams:
total += 1
count[ngram] = 1 + count.get(ngram, 0)
return total, count
def PrintNgramCounts(n=2):
ngrams = NGrams(MyanmarPhrases(STDIN), n, ' ')
total, count = Count(FormatNGrams(ngrams))
items = count.items()
items.sort(key=lambda (ngrm, cnt): (-cnt, ngrm))
for ngrm_cnt in items:
STDOUT.write('%s\t%d\n' % ngrm_cnt)
return
def ReadNgrams(path, default_count=1):
with codecs.open(path, 'r', 'utf-8') as reader:
for line in reader:
line = line.rstrip('\n')
if not line or line.startswith('#'):
continue
fields = line.split('\t')
if len(fields) >= 2:
yield fields[0], int(fields[1])
else:
yield fields[0], default_count
return
def ProcessFile(path, known_bigrams, alpha, beta):
total = 0
unseen = 0
with codecs.open(path, 'r', 'utf-8') as reader:
ngrams = FormatNGrams(NGrams(MyanmarPhrases(reader), 2, ' '))
for ngram in ngrams:
total += 1
if ngram not in known_bigrams:
unseen += 1
sys.stdout.write('%s\t%d\t%d' % (path, unseen, total))
if total == 0:
sys.stdout.write('\tNA')
else:
sys.stdout.write('\t%f' % (unseen * 100.0 / total))
if alpha and beta:
p_value = betabinom_test.UpperTailPValue(unseen, total, alpha, beta)
if p_value < 0.001:
sig = '***'
elif p_value < 0.01:
sig = '**'
elif p_value < 0.05:
sig = '*'
elif p_value < 0.1:
sig = '.'
else:
sig = ''
sys.stdout.write('\t%g\t%s' % (p_value, sig))
sys.stdout.write('\n')
return
def ProcessFiles(known_bigrams, alpha, beta):
for path in sys.stdin:
path = path[:-1]
ProcessFile(path, known_bigrams, alpha, beta)
return
def main(argv):
if len(argv) == 1:
PrintNgramCounts()
elif len(argv) >= 2:
known_bigrams = dict(ReadNgrams(argv[1]))
if len(argv) >= 4:
alpha = float(argv[2])
beta = float(argv[3])
else:
alpha = None
beta = None
ProcessFiles(known_bigrams, alpha, beta)
else:
STDERR.write('Usage: %s [frequent_bigrams.txt [alpha beta]]\n' % argv[0])
sys.exit(2)
return
if __name__ == '__main__':
main(sys.argv)
| googlei18n/language-resources | my/bigrams.py | Python | apache-2.0 | 4,368 |
# -*- coding: utf-8 -*-
class Keywords(dict):
def __init__(self,
keywords=[]):
for keyword in keywords:
self.__setitem__(keyword.name, keyword)
# def __iter__(self):
# return self.itervalues()
| jfrygeo/solutions-geoprocessing-toolbox | suitability/toolboxes/scripts/MultidimensionSupplementalTools/MultidimensionSupplementalTools/Scripts/mds/keywords.py | Python | apache-2.0 | 245 |
# -*- coding: utf-8 -*-
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Horizon Release Notes documentation build configuration file, created by
# sphinx-quickstart on Tue Nov 3 17:40:50 2015.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
# sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'reno.sphinxext',
'sphinx.ext.extlinks',
'openstackdocstheme',
]
# openstackdocstheme options
repository_name = 'openstack/horizon'
bug_project = 'horizon'
bug_tag = 'documentation'
html_last_updated_fmt = '%Y-%m-%d %H:%M'
# Set aliases for extlinks
# - generic launchpad bug - [:bug:`12345`]
# - horizon blueprint - [:blueprint:`drop-nova-network`]
extlinks = {
'bug': (
'https://bugs.launchpad.net/bugs/%s',
'bug ',
),
'blueprint': (
'https://blueprints.launchpad.net/horizon/+spec/%s',
'blueprint ',
),
}
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'Horizon Release Notes'
copyright = u'2015, Horizon Developers'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
import pbr.version
horizon_version = pbr.version.VersionInfo('horizon')
# The full version, including alpha/beta/rc tags.
release = horizon_version.version_string_with_vcs()
# The short X.Y version.
version = horizon_version.canonical_version_string()
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
# language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
# today = ''
# Else, today_fmt is used as the format for a strftime call.
# today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all
# documents.
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
# keep_warnings = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'openstackdocs'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
# html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
# html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
# html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
# html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
# html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
# html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
# html_additional_pages = {}
# If false, no module index is generated.
# html_domain_indices = True
# If false, no index is generated.
# html_use_index = True
# If true, the index is split into individual pages for each letter.
# html_split_index = False
# If true, links to the reST sources are added to the pages.
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'HorizonReleaseNotesdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
# 'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
('index', 'HorizonReleaseNotes.tex',
u'Horizon Release Notes Documentation',
u'Horizon Developers', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
# latex_use_parts = False
# If true, show page references after internal links.
# latex_show_pagerefs = False
# If true, show URL addresses after external links.
# latex_show_urls = False
# Documents to append as an appendix to all manuals.
# latex_appendices = []
# If false, no module index is generated.
# latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'horizonreleasenotes', u'Horizon Release Notes Documentation',
[u'Horizon Developers'], 1)
]
# If true, show URL addresses after external links.
# man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'HorizonReleaseNotes', u'Horizon Release Notes Documentation',
u'Horizon Developers', 'HorizonReleaseNotes',
'Dashboard for OpenStack.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
# texinfo_appendices = []
# If false, no module index is generated.
# texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
# texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
# texinfo_no_detailmenu = False
# -- Options for Internationalization output ------------------------------
locale_dirs = ['locale/']
| BiznetGIO/horizon | releasenotes/source/conf.py | Python | apache-2.0 | 9,736 |
# Copyright 2013-2014 Massachusetts Open Cloud Contributors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the
# License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an "AS
# IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
# express or implied. See the License for the specific language
# governing permissions and limitations under the License.
"""Functional tests for model.py"""
# Some Notes:
#
# * We don't really have any agreed-upon requirements about what __repr__
# should print, but I'm fairly certain I hit an argument mistmatch at
# some point, which is definitely wrong. The test_repr methods are there just
# to make sure it isn't throwing an exception.
from haas.model import *
from haas import config
from haas.ext.obm.ipmi import Ipmi
from haas.test_common import fresh_database, config_testsuite, ModelTest
import pytest
@pytest.fixture
def configure():
config_testsuite()
config.load_extensions()
fresh_database = pytest.fixture(fresh_database)
pytestmark = pytest.mark.usefixtures('configure', 'fresh_database')
class TestNic(ModelTest):
def sample_obj(self):
return Nic(Node(label='node-99', obm=Ipmi(type="http://schema.massopencloud.org/haas/v0/obm/ipmi",
host= "ipmihost", user= "root", password= "tapeworm")), 'ipmi', '00:11:22:33:44:55')
class TestNode(ModelTest):
def sample_obj(self):
return Node(label='node-99', obm=Ipmi(type="http://schema.massopencloud.org/haas/v0/obm/ipmi",
host= "ipmihost", user= "root", password= "tapeworm"))
class TestProject(ModelTest):
def sample_obj(self):
return Project('manhattan')
class TestHeadnode(ModelTest):
def sample_obj(self):
return Headnode(Project('anvil-nextgen'), 'hn-example', 'base-headnode')
class TestHnic(ModelTest):
def sample_obj(self):
return Hnic(Headnode(Project('anvil-nextgen'),
'hn-0', 'base-headnode'), 'storage')
class TestNetwork(ModelTest):
def sample_obj(self):
pj = Project('anvil-nextgen')
return Network(pj, pj, True, '102', 'hammernet')
class TestNetworkingAction(ModelTest):
def sample_obj(self):
nic = Nic(Node(label='node-99', obm=Ipmi(type="http://schema.massopencloud.org/haas/v0/obm/ipmi",
host= "ipmihost", user= "root", password= "tapeworm")), 'ipmi', '00:11:22:33:44:55')
project = Project('anvil-nextgen')
network = Network(project, project, True, '102', 'hammernet')
return NetworkingAction(nic=nic,
new_network=network,
channel='null')
| henn/hil | tests/unit/model.py | Python | apache-2.0 | 2,883 |
import os
import subprocess
import runner
import parsers
import printers
PROJECT_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), "../")
def call(cmd, cwd):
proc = subprocess.Popen(cmd, cwd=cwd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=True)
try:
out, err = proc.communicate(timeout=5*60)
except subprocess.TimeoutExpired:
proc.kill()
raise
print(out)
return proc.returncode, out.decode("utf-8")
if __name__ == '__main__':
print(os.getcwd())
builders = {
"manual": runner.CMakeBuilder(PROJECT_DIR, "NO_GC", build_type="Debug"),
"shared_ptr": runner.CMakeBuilder(PROJECT_DIR, "SHARED_PTR", build_type="Debug"),
"BDW GC": runner.CMakeBuilder(PROJECT_DIR, "BDW_GC", build_type="Release"),
"gc_ptr_serial": runner.CMakeBuilder(PROJECT_DIR, "PRECISE_GC_SERIAL", build_type="Release"),
"gc_ptr_cms": runner.CMakeBuilder(PROJECT_DIR, "PRECISE_GC_CMS", build_type="Release")
}
massif_cmd = "valgrind --tool=massif --massif-out-file=massif.out {runnable} {args} && cat massif.out"
boehm_cmd = "GC_PRINT_STATS=1 {runnable} {args}"
suites = {
"manual": {"builder": builders["manual"], "cmd": massif_cmd, "parser": parsers.MassifParser()},
"shared-ptr": {"builder": builders["shared_ptr"], "cmd": massif_cmd, "parser": parsers.MassifParser()},
"BoehmGC": {"builder": builders["BDW GC"], "cmd": boehm_cmd, "parser": parsers.BoehmStatsParser()},
"BoehmGC incremental": {"builder": builders["BDW GC"], "cmd": boehm_cmd, "args": ["--incremental"], "parser": parsers.BoehmStatsParser()},
"gc-ptr serial": {"builder": builders["gc_ptr_serial"], "parser": parsers.GCHeapParser()},
"gc-ptr cms": {"builder": builders["gc_ptr_cms"], "parser": parsers.GCHeapParser()}
}
targets = {
"gcbench-top-down": {
"name": "boehm",
"runnable": "benchmark/boehm/boehm",
"suites": ["manual", "shared-ptr", "BoehmGC", "BoehmGC incremental", "gc-ptr serial", "gc-ptr cms"],
"params": ["--top-down"]
}
# "gcbench bottom-up": {
# "name": "boehm",
# "runnable": "benchmark/boehm/boehm",
# "suites": ["manual", "shared_ptr", "BDW GC", "BDW GC incremental", "gc_ptr serial", "gc_ptr cms"],
# "params": ["bottom-up"]
# },
# "parallel merge sort": {
# "name": "parallel_merge_sort",
# "runnable": "benchmark/parallel_merge_sort/parallel_merge_sort",
# "suites": ["manual", "shared_ptr", "BDW GC", "BDW GC incremental", "gc_ptr serial", "gc_ptr cms"]
# },
# "cord-build": {
# "name": "cord",
# "runnable": "benchmark/cord/cord",
# "suites": ["shared_ptr", "BDW GC", "BDW GC incremental", "gc_ptr serial", "gc_ptr cms"],
# "params": ["build", {"len": [6]}]
# },
# "cord-substr": {
# "name": "cord",
# "runnable": "benchmark/cord/cord",
# "suites": ["shared_ptr", "BDW GC", "BDW GC incremental", "gc_ptr serial", "gc_ptr cms"],
# "params": ["substr", {"len": [6]}]
# },
# "cord-flatten": {
# "name": "cord",
# "runnable": "benchmark/cord/cord",
# "suites": ["shared_ptr", "BDW GC", "BDW GC incremental", "gc_ptr serial", "gc_ptr cms"],
# "params": ["flatten", {"len": [5]}]
# }
}
# printer = printers.JSONPrinter()
# for name, target in targets.items():
#
# results = {}
#
# for suite_name in target["suites"]:
# suite = suites[suite_name]
# build = suite["builder"].build(target["name"])
# parser = suite["parser"]
#
# args = suite.get("args", []) + target["params"]
#
# cmd = suite.get("cmd")
# if cmd:
# cmd = cmd.format(runnable=target["runnable"], args=" ".join(args))
# else:
# cmd = "{} {}".format(target["runnable"], " ".join(args))
#
# rc, out = call(cmd, build.dirname())
# assert rc == 0
# parser.parse(out)
# results[suite_name] = parser.result()
#
# printer.print_report(results, "gcbench-top-down-heap")
parser = parsers.JSONParser()
with open("gcbench-top-down-heap.json") as fd:
parser.parse(fd.read())
results = parser.result()
printer = printers.GCHeapPlotPrinter()
printer.print_report(parser.result(), "heap")
| eucpp/gcmalloc | tools/helper_heap_plots.py | Python | apache-2.0 | 4,616 |
"""Drop create Program permission from ProgramOwner and ProgramEditor roles.
Revision ID: 40a621571ac7
Revises: 1a22bb208258
Create Date: 2013-12-05 22:12:46.273929
"""
# revision identifiers, used by Alembic.
revision = '40a621571ac7'
down_revision = '1a22bb208258'
import sqlalchemy as sa
from alembic import op
from datetime import datetime
from sqlalchemy.sql import table, column, select
import json
roles_table = table('roles',
column('id', sa.Integer),
column('name', sa.String),
column('permissions_json', sa.String)
)
def get_role_permissions(role):
connection = op.get_bind()
role = connection.execute(
select([roles_table.c.permissions_json])\
.where(roles_table.c.name == role)).fetchone()
return json.loads(role.permissions_json)
def update_role_permissions(role, permissions):
op.execute(roles_table\
.update()\
.values(permissions_json = json.dumps(permissions))\
.where(roles_table.c.name == role))
def upgrade():
for role in ['ProgramOwner', 'ProgramEditor']:
permissions = get_role_permissions(role)
permissions['create'].remove('Program')
update_role_permissions(role, permissions)
def downgrade():
for role in ['ProgramOwner', 'ProgramEditor']:
permissions = get_role_permissions(role)
permissions['create'].append('Program')
update_role_permissions(role, permissions)
| uskudnik/ggrc-core | src/ggrc_basic_permissions/migrations/versions/20131205221246_40a621571ac7_drop_create_program_.py | Python | apache-2.0 | 1,385 |
# -*- coding: utf-8 -*-
from django.db import migrations, models
from django.db.backends.postgresql_psycopg2.schema import DatabaseSchemaEditor
from django.db.migrations.state import StateApps
from django.db.models import Max
from django.utils.timezone import now as timezone_now
def backfill_subscription_log_events(apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None:
migration_time = timezone_now()
RealmAuditLog = apps.get_model('zerver', 'RealmAuditLog')
Subscription = apps.get_model('zerver', 'Subscription')
Message = apps.get_model('zerver', 'Message')
objects_to_create = []
subs_query = Subscription.objects.select_related(
"user_profile", "user_profile__realm", "recipient").filter(recipient__type=2)
for sub in subs_query:
entry = RealmAuditLog(
realm=sub.user_profile.realm,
modified_user=sub.user_profile,
modified_stream_id=sub.recipient.type_id,
event_last_message_id=0,
event_type='subscription_created',
event_time=migration_time,
backfilled=True)
objects_to_create.append(entry)
RealmAuditLog.objects.bulk_create(objects_to_create)
objects_to_create = []
event_last_message_id = Message.objects.aggregate(Max('id'))['id__max']
migration_time_for_deactivation = timezone_now()
for sub in subs_query.filter(active=False):
entry = RealmAuditLog(
realm=sub.user_profile.realm,
modified_user=sub.user_profile,
modified_stream_id=sub.recipient.type_id,
event_last_message_id=event_last_message_id,
event_type='subscription_deactivated',
event_time=migration_time_for_deactivation,
backfilled=True)
objects_to_create.append(entry)
RealmAuditLog.objects.bulk_create(objects_to_create)
objects_to_create = []
def reverse_code(apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None:
RealmAuditLog = apps.get_model('zerver', 'RealmAuditLog')
RealmAuditLog.objects.filter(event_type='subscription_created').delete()
RealmAuditLog.objects.filter(event_type='subscription_deactivated').delete()
class Migration(migrations.Migration):
dependencies = [
('zerver', '0092_create_scheduledemail'),
]
operations = [
migrations.AddField(
model_name='realmauditlog',
name='event_last_message_id',
field=models.IntegerField(null=True),
),
migrations.RunPython(backfill_subscription_log_events,
reverse_code=reverse_code),
]
| tommyip/zulip | zerver/migrations/0093_subscription_event_log_backfill.py | Python | apache-2.0 | 2,636 |
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import unittest
from datetime import datetime
import mock
from libcloud.common.aws import AWSRequestSignerAlgorithmV4
from libcloud.common.aws import SignedAWSConnection
from libcloud.common.aws import UNSIGNED_PAYLOAD
from libcloud.test import LibcloudTestCase
class EC2MockDriver(object):
region_name = "my_region"
class AWSRequestSignerAlgorithmV4TestCase(LibcloudTestCase):
def setUp(self):
SignedAWSConnection.driver = EC2MockDriver()
SignedAWSConnection.service_name = "my_service"
SignedAWSConnection.version = "2013-10-15"
self.connection = SignedAWSConnection("my_key", "my_secret")
self.signer = AWSRequestSignerAlgorithmV4(
access_key="my_key",
access_secret="my_secret",
version="2013-10-15",
connection=self.connection,
)
SignedAWSConnection.action = "/my_action/"
SignedAWSConnection.driver = EC2MockDriver()
self.now = datetime(2015, 3, 4, hour=17, minute=34, second=52)
def test_v4_signature(self):
params = {"Action": "DescribeInstances", "Version": "2013-10-15"}
headers = {
"Host": "ec2.eu-west-1.amazonaws.com",
"Accept-Encoding": "gzip,deflate",
"X-AMZ-Date": "20150304T173452Z",
"User-Agent": "libcloud/0.17.0 (Amazon EC2 (eu-central-1)) ",
}
dt = self.now
sig = self.signer._get_authorization_v4_header(
params=params, headers=headers, dt=dt, method="GET", path="/my_action/"
)
self.assertEqual(
sig,
"AWS4-HMAC-SHA256 "
"Credential=my_key/20150304/my_region/my_service/aws4_request, "
"SignedHeaders=accept-encoding;host;user-agent;x-amz-date, "
"Signature=f9868f8414b3c3f856c7955019cc1691265541f5162b9b772d26044280d39bd3",
)
def test_v4_signature_contains_user_id(self):
sig = self.signer._get_authorization_v4_header(
params={}, headers={}, dt=self.now
)
self.assertIn("Credential=my_key/", sig)
def test_v4_signature_contains_credential_scope(self):
with mock.patch(
"libcloud.common.aws.AWSRequestSignerAlgorithmV4._get_credential_scope"
) as mock_get_creds:
mock_get_creds.return_value = "my_credential_scope"
sig = self.signer._get_authorization_v4_header(
params={}, headers={}, dt=self.now
)
self.assertIn("Credential=my_key/my_credential_scope, ", sig)
def test_v4_signature_contains_signed_headers(self):
with mock.patch(
"libcloud.common.aws.AWSRequestSignerAlgorithmV4._get_signed_headers"
) as mock_get_headers:
mock_get_headers.return_value = "my_signed_headers"
sig = self.signer._get_authorization_v4_header(
{}, {}, self.now, method="GET", path="/"
)
self.assertIn("SignedHeaders=my_signed_headers, ", sig)
def test_v4_signature_contains_signature(self):
with mock.patch(
"libcloud.common.aws.AWSRequestSignerAlgorithmV4._get_signature"
) as mock_get_signature:
mock_get_signature.return_value = "my_signature"
sig = self.signer._get_authorization_v4_header({}, {}, self.now)
self.assertIn("Signature=my_signature", sig)
def test_get_signature_(self):
def _sign(key, msg, hex=False):
if hex:
return "H|%s|%s" % (key, msg)
else:
return "%s|%s" % (key, msg)
with mock.patch(
"libcloud.common.aws.AWSRequestSignerAlgorithmV4._get_key_to_sign_with"
) as mock_get_key:
with mock.patch(
"libcloud.common.aws.AWSRequestSignerAlgorithmV4._get_string_to_sign"
) as mock_get_string:
with mock.patch("libcloud.common.aws._sign", new=_sign):
mock_get_key.return_value = "my_signing_key"
mock_get_string.return_value = "my_string_to_sign"
sig = self.signer._get_signature(
{}, {}, self.now, method="GET", path="/", data=None
)
self.assertEqual(sig, "H|my_signing_key|my_string_to_sign")
def test_get_string_to_sign(self):
with mock.patch("hashlib.sha256") as mock_sha256:
mock_sha256.return_value.hexdigest.return_value = (
"chksum_of_canonical_request"
)
to_sign = self.signer._get_string_to_sign(
{}, {}, self.now, method="GET", path="/", data=None
)
self.assertEqual(
to_sign,
"AWS4-HMAC-SHA256\n"
"20150304T173452Z\n"
"20150304/my_region/my_service/aws4_request\n"
"chksum_of_canonical_request",
)
def test_get_key_to_sign_with(self):
def _sign(key, msg, hex=False):
return "%s|%s" % (key, msg)
with mock.patch("libcloud.common.aws._sign", new=_sign):
key = self.signer._get_key_to_sign_with(self.now)
self.assertEqual(
key, "AWS4my_secret|20150304|my_region|my_service|aws4_request"
)
def test_get_signed_headers_contains_all_headers_lowercased(self):
headers = {
"Content-Type": "text/plain",
"Host": "my_host",
"X-Special-Header": "",
}
signed_headers = self.signer._get_signed_headers(headers)
self.assertIn("content-type", signed_headers)
self.assertIn("host", signed_headers)
self.assertIn("x-special-header", signed_headers)
def test_get_signed_headers_concats_headers_sorted_lexically(self):
headers = {
"Host": "my_host",
"X-Special-Header": "",
"1St-Header": "2",
"Content-Type": "text/plain",
}
signed_headers = self.signer._get_signed_headers(headers)
self.assertEqual(
signed_headers, "1st-header;content-type;host;x-special-header"
)
def test_get_credential_scope(self):
scope = self.signer._get_credential_scope(self.now)
self.assertEqual(scope, "20150304/my_region/my_service/aws4_request")
def test_get_canonical_headers_joins_all_headers(self):
headers = {
"accept-encoding": "gzip,deflate",
"host": "my_host",
}
self.assertEqual(
self.signer._get_canonical_headers(headers),
"accept-encoding:gzip,deflate\n" "host:my_host\n",
)
def test_get_canonical_headers_sorts_headers_lexically(self):
headers = {
"accept-encoding": "gzip,deflate",
"host": "my_host",
"1st-header": "2",
"x-amz-date": "20150304T173452Z",
"user-agent": "my-ua",
}
self.assertEqual(
self.signer._get_canonical_headers(headers),
"1st-header:2\n"
"accept-encoding:gzip,deflate\n"
"host:my_host\n"
"user-agent:my-ua\n"
"x-amz-date:20150304T173452Z\n",
)
def test_get_canonical_headers_lowercases_headers_names(self):
headers = {"Accept-Encoding": "GZIP,DEFLATE", "User-Agent": "My-UA"}
self.assertEqual(
self.signer._get_canonical_headers(headers),
"accept-encoding:GZIP,DEFLATE\n" "user-agent:My-UA\n",
)
def test_get_canonical_headers_trims_header_values(self):
# TODO: according to AWS spec (and RFC 2616 Section 4.2.) excess whitespace
# from inside non-quoted strings should be stripped. Now we only strip the
# start and end of the string. See
# http://docs.aws.amazon.com/general/latest/gr/sigv4-create-canonical-request.html
headers = {
"accept-encoding": " gzip,deflate",
"user-agent": "libcloud/0.17.0 ",
}
self.assertEqual(
self.signer._get_canonical_headers(headers),
"accept-encoding:gzip,deflate\n" "user-agent:libcloud/0.17.0\n",
)
def test_get_request_params_joins_params_sorted_lexically(self):
self.assertEqual(
self.signer._get_request_params(
{
"Action": "DescribeInstances",
"Filter.1.Name": "state",
"Version": "2013-10-15",
}
),
"Action=DescribeInstances&Filter.1.Name=state&Version=2013-10-15",
)
def test_get_canonical_headers_allow_numeric_header_value(self):
headers = {"Accept-Encoding": "gzip,deflate", "Content-Length": 314}
self.assertEqual(
self.signer._get_canonical_headers(headers),
"accept-encoding:gzip,deflate\n" "content-length:314\n",
)
def test_get_request_params_allows_integers_as_value(self):
self.assertEqual(
self.signer._get_request_params(
{"Action": "DescribeInstances", "Port": 22}
),
"Action=DescribeInstances&Port=22",
)
def test_get_request_params_urlquotes_params_keys(self):
self.assertEqual(
self.signer._get_request_params({"Action+Reaction": "DescribeInstances"}),
"Action%2BReaction=DescribeInstances",
)
def test_get_request_params_urlquotes_params_values(self):
self.assertEqual(
self.signer._get_request_params(
{"Action": "DescribeInstances&Addresses", "Port-Range": "2000 3000"}
),
"Action=DescribeInstances%26Addresses&Port-Range=2000%203000",
)
def test_get_request_params_urlquotes_params_values_allows_safe_chars_in_value(
self,
):
# http://docs.aws.amazon.com/general/latest/gr/sigv4-create-canonical-request.html
self.assertEqual(
"Action=a~b.c_d-e", self.signer._get_request_params({"Action": "a~b.c_d-e"})
)
def test_get_payload_hash_returns_digest_of_empty_string_for_GET_requests(self):
SignedAWSConnection.method = "GET"
self.assertEqual(
self.signer._get_payload_hash(method="GET"),
"e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
)
def test_get_payload_hash_with_data_for_PUT_requests(self):
SignedAWSConnection.method = "PUT"
self.assertEqual(
self.signer._get_payload_hash(method="PUT", data="DUMMY"),
"ceec12762e66397b56dad64fd270bb3d694c78fb9cd665354383c0626dbab013",
)
def test_get_payload_hash_with_empty_data_for_POST_requests(self):
SignedAWSConnection.method = "POST"
self.assertEqual(self.signer._get_payload_hash(method="POST"), UNSIGNED_PAYLOAD)
def test_get_canonical_request(self):
req = self.signer._get_canonical_request(
{"Action": "DescribeInstances", "Version": "2013-10-15"},
{"Accept-Encoding": "gzip,deflate", "User-Agent": "My-UA"},
method="GET",
path="/my_action/",
data=None,
)
self.assertEqual(
req,
"GET\n"
"/my_action/\n"
"Action=DescribeInstances&Version=2013-10-15\n"
"accept-encoding:gzip,deflate\n"
"user-agent:My-UA\n"
"\n"
"accept-encoding;user-agent\n"
"e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
)
def test_post_canonical_request(self):
req = self.signer._get_canonical_request(
{"Action": "DescribeInstances", "Version": "2013-10-15"},
{"Accept-Encoding": "gzip,deflate", "User-Agent": "My-UA"},
method="POST",
path="/my_action/",
data="{}",
)
self.assertEqual(
req,
"POST\n"
"/my_action/\n"
"Action=DescribeInstances&Version=2013-10-15\n"
"accept-encoding:gzip,deflate\n"
"user-agent:My-UA\n"
"\n"
"accept-encoding;user-agent\n"
"44136fa355b3678a1146ad16f7e8649e94fb4fc21fe77e8310c060f61caaff8a",
)
if __name__ == "__main__":
sys.exit(unittest.main())
| apache/libcloud | libcloud/test/common/test_aws.py | Python | apache-2.0 | 13,084 |
"""Support for NWS weather service."""
from datetime import timedelta
from homeassistant.components.weather import (
ATTR_CONDITION_CLEAR_NIGHT,
ATTR_CONDITION_SUNNY,
ATTR_FORECAST_CONDITION,
ATTR_FORECAST_PRECIPITATION_PROBABILITY,
ATTR_FORECAST_TEMP,
ATTR_FORECAST_TIME,
ATTR_FORECAST_WIND_BEARING,
ATTR_FORECAST_WIND_SPEED,
WeatherEntity,
)
from homeassistant.const import (
CONF_LATITUDE,
CONF_LONGITUDE,
LENGTH_KILOMETERS,
LENGTH_METERS,
LENGTH_MILES,
PRESSURE_HPA,
PRESSURE_INHG,
PRESSURE_PA,
TEMP_CELSIUS,
TEMP_FAHRENHEIT,
)
from homeassistant.core import callback
from homeassistant.helpers.typing import ConfigType, HomeAssistantType
from homeassistant.util.distance import convert as convert_distance
from homeassistant.util.dt import utcnow
from homeassistant.util.pressure import convert as convert_pressure
from homeassistant.util.temperature import convert as convert_temperature
from . import base_unique_id
from .const import (
ATTR_FORECAST_DAYTIME,
ATTR_FORECAST_DETAILED_DESCRIPTION,
ATTRIBUTION,
CONDITION_CLASSES,
COORDINATOR_FORECAST,
COORDINATOR_FORECAST_HOURLY,
COORDINATOR_OBSERVATION,
DAYNIGHT,
DOMAIN,
HOURLY,
NWS_DATA,
)
PARALLEL_UPDATES = 0
OBSERVATION_VALID_TIME = timedelta(minutes=20)
FORECAST_VALID_TIME = timedelta(minutes=45)
def convert_condition(time, weather):
"""
Convert NWS codes to HA condition.
Choose first condition in CONDITION_CLASSES that exists in weather code.
If no match is found, return first condition from NWS
"""
conditions = [w[0] for w in weather]
prec_probs = [w[1] or 0 for w in weather]
# Choose condition with highest priority.
cond = next(
(
key
for key, value in CONDITION_CLASSES.items()
if any(condition in value for condition in conditions)
),
conditions[0],
)
if cond == "clear":
if time == "day":
return ATTR_CONDITION_SUNNY, max(prec_probs)
if time == "night":
return ATTR_CONDITION_CLEAR_NIGHT, max(prec_probs)
return cond, max(prec_probs)
async def async_setup_entry(
hass: HomeAssistantType, entry: ConfigType, async_add_entities
) -> None:
"""Set up the NWS weather platform."""
hass_data = hass.data[DOMAIN][entry.entry_id]
async_add_entities(
[
NWSWeather(entry.data, hass_data, DAYNIGHT, hass.config.units),
NWSWeather(entry.data, hass_data, HOURLY, hass.config.units),
],
False,
)
class NWSWeather(WeatherEntity):
"""Representation of a weather condition."""
def __init__(self, entry_data, hass_data, mode, units):
"""Initialise the platform with a data instance and station name."""
self.nws = hass_data[NWS_DATA]
self.latitude = entry_data[CONF_LATITUDE]
self.longitude = entry_data[CONF_LONGITUDE]
self.coordinator_observation = hass_data[COORDINATOR_OBSERVATION]
if mode == DAYNIGHT:
self.coordinator_forecast = hass_data[COORDINATOR_FORECAST]
else:
self.coordinator_forecast = hass_data[COORDINATOR_FORECAST_HOURLY]
self.station = self.nws.station
self.is_metric = units.is_metric
self.mode = mode
self.observation = None
self._forecast = None
async def async_added_to_hass(self) -> None:
"""Set up a listener and load data."""
self.async_on_remove(
self.coordinator_observation.async_add_listener(self._update_callback)
)
self.async_on_remove(
self.coordinator_forecast.async_add_listener(self._update_callback)
)
self._update_callback()
@callback
def _update_callback(self) -> None:
"""Load data from integration."""
self.observation = self.nws.observation
if self.mode == DAYNIGHT:
self._forecast = self.nws.forecast
else:
self._forecast = self.nws.forecast_hourly
self.async_write_ha_state()
@property
def should_poll(self) -> bool:
"""Entities do not individually poll."""
return False
@property
def attribution(self):
"""Return the attribution."""
return ATTRIBUTION
@property
def name(self):
"""Return the name of the station."""
return f"{self.station} {self.mode.title()}"
@property
def temperature(self):
"""Return the current temperature."""
temp_c = None
if self.observation:
temp_c = self.observation.get("temperature")
if temp_c is not None:
return convert_temperature(temp_c, TEMP_CELSIUS, TEMP_FAHRENHEIT)
return None
@property
def pressure(self):
"""Return the current pressure."""
pressure_pa = None
if self.observation:
pressure_pa = self.observation.get("seaLevelPressure")
if pressure_pa is None:
return None
if self.is_metric:
pressure = convert_pressure(pressure_pa, PRESSURE_PA, PRESSURE_HPA)
pressure = round(pressure)
else:
pressure = convert_pressure(pressure_pa, PRESSURE_PA, PRESSURE_INHG)
pressure = round(pressure, 2)
return pressure
@property
def humidity(self):
"""Return the name of the sensor."""
humidity = None
if self.observation:
humidity = self.observation.get("relativeHumidity")
return humidity
@property
def wind_speed(self):
"""Return the current windspeed."""
wind_km_hr = None
if self.observation:
wind_km_hr = self.observation.get("windSpeed")
if wind_km_hr is None:
return None
if self.is_metric:
wind = wind_km_hr
else:
wind = convert_distance(wind_km_hr, LENGTH_KILOMETERS, LENGTH_MILES)
return round(wind)
@property
def wind_bearing(self):
"""Return the current wind bearing (degrees)."""
wind_bearing = None
if self.observation:
wind_bearing = self.observation.get("windDirection")
return wind_bearing
@property
def temperature_unit(self):
"""Return the unit of measurement."""
return TEMP_FAHRENHEIT
@property
def condition(self):
"""Return current condition."""
weather = None
if self.observation:
weather = self.observation.get("iconWeather")
time = self.observation.get("iconTime")
if weather:
cond, _ = convert_condition(time, weather)
return cond
return None
@property
def visibility(self):
"""Return visibility."""
vis_m = None
if self.observation:
vis_m = self.observation.get("visibility")
if vis_m is None:
return None
if self.is_metric:
vis = convert_distance(vis_m, LENGTH_METERS, LENGTH_KILOMETERS)
else:
vis = convert_distance(vis_m, LENGTH_METERS, LENGTH_MILES)
return round(vis, 0)
@property
def forecast(self):
"""Return forecast."""
if self._forecast is None:
return None
forecast = []
for forecast_entry in self._forecast:
data = {
ATTR_FORECAST_DETAILED_DESCRIPTION: forecast_entry.get(
"detailedForecast"
),
ATTR_FORECAST_TEMP: forecast_entry.get("temperature"),
ATTR_FORECAST_TIME: forecast_entry.get("startTime"),
}
if self.mode == DAYNIGHT:
data[ATTR_FORECAST_DAYTIME] = forecast_entry.get("isDaytime")
time = forecast_entry.get("iconTime")
weather = forecast_entry.get("iconWeather")
if time and weather:
cond, precip = convert_condition(time, weather)
else:
cond, precip = None, None
data[ATTR_FORECAST_CONDITION] = cond
data[ATTR_FORECAST_PRECIPITATION_PROBABILITY] = precip
data[ATTR_FORECAST_WIND_BEARING] = forecast_entry.get("windBearing")
wind_speed = forecast_entry.get("windSpeedAvg")
if wind_speed is not None:
if self.is_metric:
data[ATTR_FORECAST_WIND_SPEED] = round(
convert_distance(wind_speed, LENGTH_MILES, LENGTH_KILOMETERS)
)
else:
data[ATTR_FORECAST_WIND_SPEED] = round(wind_speed)
else:
data[ATTR_FORECAST_WIND_SPEED] = None
forecast.append(data)
return forecast
@property
def unique_id(self):
"""Return a unique_id for this entity."""
return f"{base_unique_id(self.latitude, self.longitude)}_{self.mode}"
@property
def available(self):
"""Return if state is available."""
last_success = (
self.coordinator_observation.last_update_success
and self.coordinator_forecast.last_update_success
)
if (
self.coordinator_observation.last_update_success_time
and self.coordinator_forecast.last_update_success_time
):
last_success_time = (
utcnow() - self.coordinator_observation.last_update_success_time
< OBSERVATION_VALID_TIME
and utcnow() - self.coordinator_forecast.last_update_success_time
< FORECAST_VALID_TIME
)
else:
last_success_time = False
return last_success or last_success_time
async def async_update(self):
"""Update the entity.
Only used by the generic entity update service.
"""
await self.coordinator_observation.async_request_refresh()
await self.coordinator_forecast.async_request_refresh()
@property
def entity_registry_enabled_default(self) -> bool:
"""Return if the entity should be enabled when first added to the entity registry."""
return self.mode == DAYNIGHT
| partofthething/home-assistant | homeassistant/components/nws/weather.py | Python | apache-2.0 | 10,236 |
#!/usr/bin/env python
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from apache_ranger.model.ranger_base import RangerBaseModelObject
class RangerService(RangerBaseModelObject):
def __init__(self, attrs={}):
RangerBaseModelObject.__init__(self, attrs)
self.type = attrs.get('type')
self.name = attrs.get('name')
self.displayName = attrs.get('displayName')
self.description = attrs.get('description')
self.tagService = attrs.get('tagService')
self.configs = attrs.get('configs')
self.policyVersion = attrs.get('policyVersion')
self.policyUpdateTime = attrs.get('policyUpdateTime')
self.tagVersion = attrs.get('tagVersion')
self.tagUpdateTime = attrs.get('tagUpdateTime')
| cloudera/hue | desktop/core/ext-py/apache-ranger-0.0.3/apache_ranger/model/ranger_service.py | Python | apache-2.0 | 1,561 |
# Copyright 2016 NEC Corporation. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest.lib.services.identity.v3 import roles_client
from tempest.tests.lib import fake_auth_provider
from tempest.tests.lib.services import base
class TestRolesClient(base.BaseServiceTest):
FAKE_ROLE_INFO = {
"role": {
"domain_id": "1",
"id": "1",
"name": "test",
"links": "example.com"
}
}
FAKE_LIST_ROLES = {
"roles": [
{
"domain_id": "1",
"id": "1",
"name": "test",
"links": "example.com"
},
{
"domain_id": "2",
"id": "2",
"name": "test2",
"links": "example.com"
}
]
}
def setUp(self):
super(TestRolesClient, self).setUp()
fake_auth = fake_auth_provider.FakeAuthProvider()
self.client = roles_client.RolesClient(fake_auth,
'identity', 'regionOne')
def _test_create_role(self, bytes_body=False):
self.check_service_client_function(
self.client.create_role,
'tempest.lib.common.rest_client.RestClient.post',
self.FAKE_ROLE_INFO,
bytes_body,
domain_id="1",
name="test",
status=201)
def _test_show_role(self, bytes_body=False):
self.check_service_client_function(
self.client.show_role,
'tempest.lib.common.rest_client.RestClient.get',
self.FAKE_ROLE_INFO,
bytes_body,
role_id="1")
def _test_list_roles(self, bytes_body=False):
self.check_service_client_function(
self.client.list_roles,
'tempest.lib.common.rest_client.RestClient.get',
self.FAKE_LIST_ROLES,
bytes_body)
def _test_update_role(self, bytes_body=False):
self.check_service_client_function(
self.client.update_role,
'tempest.lib.common.rest_client.RestClient.patch',
self.FAKE_ROLE_INFO,
bytes_body,
role_id="1",
name="test")
def _test_create_user_role_on_project(self, bytes_body=False):
self.check_service_client_function(
self.client.create_user_role_on_project,
'tempest.lib.common.rest_client.RestClient.put',
{},
bytes_body,
project_id="b344506af7644f6794d9cb316600b020",
user_id="123",
role_id="1234",
status=204)
def _test_create_user_role_on_domain(self, bytes_body=False):
self.check_service_client_function(
self.client.create_user_role_on_domain,
'tempest.lib.common.rest_client.RestClient.put',
{},
bytes_body,
domain_id="b344506af7644f6794d9cb316600b020",
user_id="123",
role_id="1234",
status=204)
def _test_list_user_roles_on_project(self, bytes_body=False):
self.check_service_client_function(
self.client.list_user_roles_on_project,
'tempest.lib.common.rest_client.RestClient.get',
self.FAKE_LIST_ROLES,
bytes_body,
project_id="b344506af7644f6794d9cb316600b020",
user_id="123")
def _test_list_user_roles_on_domain(self, bytes_body=False):
self.check_service_client_function(
self.client.list_user_roles_on_domain,
'tempest.lib.common.rest_client.RestClient.get',
self.FAKE_LIST_ROLES,
bytes_body,
domain_id="b344506af7644f6794d9cb316600b020",
user_id="123")
def _test_create_group_role_on_project(self, bytes_body=False):
self.check_service_client_function(
self.client.create_group_role_on_project,
'tempest.lib.common.rest_client.RestClient.put',
{},
bytes_body,
project_id="b344506af7644f6794d9cb316600b020",
group_id="123",
role_id="1234",
status=204)
def _test_create_group_role_on_domain(self, bytes_body=False):
self.check_service_client_function(
self.client.create_group_role_on_domain,
'tempest.lib.common.rest_client.RestClient.put',
{},
bytes_body,
domain_id="b344506af7644f6794d9cb316600b020",
group_id="123",
role_id="1234",
status=204)
def _test_list_group_roles_on_project(self, bytes_body=False):
self.check_service_client_function(
self.client.list_group_roles_on_project,
'tempest.lib.common.rest_client.RestClient.get',
self.FAKE_LIST_ROLES,
bytes_body,
project_id="b344506af7644f6794d9cb316600b020",
group_id="123")
def _test_list_group_roles_on_domain(self, bytes_body=False):
self.check_service_client_function(
self.client.list_group_roles_on_domain,
'tempest.lib.common.rest_client.RestClient.get',
self.FAKE_LIST_ROLES,
bytes_body,
domain_id="b344506af7644f6794d9cb316600b020",
group_id="123")
def test_create_role_with_str_body(self):
self._test_create_role()
def test_create_role_with_bytes_body(self):
self._test_create_role(bytes_body=True)
def test_show_role_with_str_body(self):
self._test_show_role()
def test_show_role_with_bytes_body(self):
self._test_show_role(bytes_body=True)
def test_list_roles_with_str_body(self):
self._test_list_roles()
def test_list_roles_with_bytes_body(self):
self._test_list_roles(bytes_body=True)
def test_update_role_with_str_body(self):
self._test_update_role()
def test_update_role_with_bytes_body(self):
self._test_update_role(bytes_body=True)
def test_delete_role(self):
self.check_service_client_function(
self.client.delete_role,
'tempest.lib.common.rest_client.RestClient.delete',
{},
role_id="1",
status=204)
def test_create_user_role_on_project_with_str_body(self):
self._test_create_user_role_on_project()
def test_create_user_role_on_project_with_bytes_body(self):
self._test_create_user_role_on_project(bytes_body=True)
def test_create_user_role_on_domain_with_str_body(self):
self._test_create_user_role_on_domain()
def test_create_user_role_on_domain_with_bytes_body(self):
self._test_create_user_role_on_domain(bytes_body=True)
def test_create_group_role_on_domain_with_str_body(self):
self._test_create_group_role_on_domain()
def test_create_group_role_on_domain_with_bytes_body(self):
self._test_create_group_role_on_domain(bytes_body=True)
def test_list_user_roles_on_project_with_str_body(self):
self._test_list_user_roles_on_project()
def test_list_user_roles_on_project_with_bytes_body(self):
self._test_list_user_roles_on_project(bytes_body=True)
def test_list_user_roles_on_domain_with_str_body(self):
self._test_list_user_roles_on_domain()
def test_list_user_roles_on_domain_with_bytes_body(self):
self._test_list_user_roles_on_domain(bytes_body=True)
def test_list_group_roles_on_domain_with_str_body(self):
self._test_list_group_roles_on_domain()
def test_list_group_roles_on_domain_with_bytes_body(self):
self._test_list_group_roles_on_domain(bytes_body=True)
def test_delete_role_from_user_on_project(self):
self.check_service_client_function(
self.client.delete_role_from_user_on_project,
'tempest.lib.common.rest_client.RestClient.delete',
{},
project_id="b344506af7644f6794d9cb316600b020",
user_id="123",
role_id="1234",
status=204)
def test_delete_role_from_user_on_domain(self):
self.check_service_client_function(
self.client.delete_role_from_user_on_domain,
'tempest.lib.common.rest_client.RestClient.delete',
{},
domain_id="b344506af7644f6794d9cb316600b020",
user_id="123",
role_id="1234",
status=204)
def test_delete_role_from_group_on_project(self):
self.check_service_client_function(
self.client.delete_role_from_group_on_project,
'tempest.lib.common.rest_client.RestClient.delete',
{},
project_id="b344506af7644f6794d9cb316600b020",
group_id="123",
role_id="1234",
status=204)
def test_delete_role_from_group_on_domain(self):
self.check_service_client_function(
self.client.delete_role_from_group_on_domain,
'tempest.lib.common.rest_client.RestClient.delete',
{},
domain_id="b344506af7644f6794d9cb316600b020",
group_id="123",
role_id="1234",
status=204)
def test_check_user_role_existence_on_project(self):
self.check_service_client_function(
self.client.check_user_role_existence_on_project,
'tempest.lib.common.rest_client.RestClient.head',
{},
project_id="b344506af7644f6794d9cb316600b020",
user_id="123",
role_id="1234",
status=204)
def test_check_user_role_existence_on_domain(self):
self.check_service_client_function(
self.client.check_user_role_existence_on_domain,
'tempest.lib.common.rest_client.RestClient.head',
{},
domain_id="b344506af7644f6794d9cb316600b020",
user_id="123",
role_id="1234",
status=204)
def test_check_role_from_group_on_project_existence(self):
self.check_service_client_function(
self.client.check_role_from_group_on_project_existence,
'tempest.lib.common.rest_client.RestClient.head',
{},
project_id="b344506af7644f6794d9cb316600b020",
group_id="123",
role_id="1234",
status=204)
def test_check_role_from_group_on_domain_existence(self):
self.check_service_client_function(
self.client.check_role_from_group_on_domain_existence,
'tempest.lib.common.rest_client.RestClient.head',
{},
domain_id="b344506af7644f6794d9cb316600b020",
group_id="123",
role_id="1234",
status=204)
| sebrandon1/tempest | tempest/tests/lib/services/identity/v3/test_roles_client.py | Python | apache-2.0 | 11,253 |
import datetime
import os
from tornado.web import UIModule
from pushmanager.core import util
from pushmanager.core.settings import Settings
class Request(UIModule):
"""Displays an individual request entry with expandable details/comments."""
def javascript_files(self):
return [self.handler.static_url('js/modules/request.js')]
def css_files(self):
return [self.handler.static_url('css/modules/request.css')]
def render(self, request, **kwargs):
kwargs.setdefault('edit_buttons', False) # Whether or not to show the 'Edit'/'Takeover' button
kwargs.setdefault('expand', False) # Whether to automatically expand this entry (only used on /request)
kwargs.setdefault('push_buttons', False) # Whether or not to show buttons related to push management
kwargs.setdefault('pushmaster', False) # Whether or not to show pushmaster-related push buttons (Add/Remove)
kwargs.setdefault('show_ago', False) # Whether or not to show relative time indicator at the beginning of the entry
kwargs.setdefault('show_state_inline', False) # Whether or not to show state (requested, added, etc) at the end of the entry
if request['repo'] != Settings['git']['main_repository']:
kwargs['cherry_string'] = '%s/%s' % (request['repo'], request['branch'])
else:
kwargs['cherry_string'] = request['branch']
if request['reviewid']:
kwargs['review'] = {
'url': "http://%s/r/%s" % (Settings['reviewboard']['servername'], request['reviewid']),
'display': str(request['reviewid']),
}
else:
kwargs['review'] = None
repo = request['repo']
if repo != Settings['git']['main_repository']:
repo = os.path.join(Settings['git']['dev_repositories_dir'], repo)
kwargs.setdefault('tags', self._generate_tag_list(request, repo))
kwargs.setdefault('repo_url', 'https://%s/?p=%s.git;a=summary' % (
Settings['git']['gitweb_servername'],
repo
))
kwargs.setdefault('branch_url', 'https://%s/?p=%s.git;a=log;h=refs/heads/%s' % (
Settings['git']['gitweb_servername'],
repo,
request['branch']
))
kwargs.setdefault('diff_url', 'https://%s/?p=%s.git;a=history;f=pushplans;hb=refs/heads/%s' % (
Settings['git']['gitweb_servername'],
repo,
request['branch']
))
kwargs.setdefault('web_hooks', Settings['web_hooks'])
kwargs.setdefault('create_time', datetime.datetime.fromtimestamp(request['created']).strftime("%x %X"))
kwargs.setdefault('modify_time', datetime.datetime.fromtimestamp(request['modified']).strftime("%x %X"))
return self.render_string('modules/request.html', request=request, pretty_date=util.pretty_date, **kwargs)
def _generate_tag_list(self, request, repo):
tags = dict((tag, None) for tag in (request['tags'].split(',') if request['tags'] else []))
if 'buildbot' in tags:
tags['buildbot'] = "https://%s/rev/%s" % (Settings['buildbot']['servername'], request['revision'])
if 'pushplans' in tags:
tags['pushplans'] = "https://%s/?p=%s.git;a=history;f=pushplans;hb=refs/heads/%s" % (
Settings['git']['gitweb_servername'],
repo,
request['branch']
)
return sorted(tags.iteritems())
class NewRequestDialog(UIModule):
"""Displays a button which opens a dialog to create a new request."""
def javascript_files(self):
return [self.handler.static_url('js/modules/newrequest.js')]
def css_files(self):
return [self.handler.static_url('css/modules/newrequest.css')]
def render(self):
return self.render_string('modules/newrequest.html')
| hashbrowncipher/pushmanager | pushmanager/ui_modules.py | Python | apache-2.0 | 3,881 |
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Logic to update a Tensorflow model graph with quantization operations."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import re
from tensorflow.contrib import graph_editor
from tensorflow.contrib.quantize.python import common
from tensorflow.contrib.quantize.python import input_to_ops
from tensorflow.contrib.quantize.python import quant_ops
from tensorflow.python.framework import ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.training import training_util
# Operation types used to select operations of interest.
_QUANTIZABLE_TYPES = {'Conv2D', 'MatMul', 'DepthwiseConv2dNative'}
# Custom key for storing and retrieving update ops used by quantizing nodes.
_UPDATE_QUANT_OPS = 'update_quant_ops'
def Quantize(graph,
weight_bits=8,
weight_narrow_range=False,
activation_bits=8,
ema_decay=0.999,
quant_delay=None,
vars_collection=ops.GraphKeys.MOVING_AVERAGE_VARIABLES,
is_training=True,
quantize_folded_weights_use_ema=False):
"""Updates graph with quantization operations.
Args:
graph: Graph to modify.
weight_bits: Number of bits to use for quantizing weights.
weight_narrow_range: Whether to use a more efficient narrow range for
weights quantization. With weight_narrow_range true, the range is
[1; 2^weight_bits - 1], with it false [0; 2^weight_bits - 1].
activation_bits: Number of bits to use for quantizing activations.
ema_decay: (Optional) Float, EMA decay parameter. EMA is used to update
quantization intervals for quantizing activations (see here about EMA:
https://en.wikipedia.org/wiki/Moving_average#Exponential_moving_average).
quant_delay: (Optional, default None) Int, count of global steps for which
to delay quantization. This helps weights stabilize at the start of
training.
vars_collection: (Optional) Collection where to store the variables for
quantization interval ends.
is_training: (Optional) Whether quantizing training graph or eval graph.
quantize_folded_weights_use_ema: (Optional, default False) Whether to
quantize weights after batchnorm-folding with exponential average
quantization.
Raises:
ValueError: When quantization fails.
"""
context = _QuantizeContext(graph, weight_bits, weight_narrow_range,
activation_bits, ema_decay, quant_delay,
vars_collection, is_training,
quantize_folded_weights_use_ema)
graph_ops = graph.get_operations()
# Filter out backprop and summary related operations, leave only interesting
# op types.
def _IsInterestingOpWithWeights(op):
return (op.type in _QUANTIZABLE_TYPES and
not op.name.startswith(common.SKIPPED_PREFIXES))
for op in (op for op in graph_ops if _IsInterestingOpWithWeights(op)):
if op.name.endswith('/depthwise'):
# Separable convolution may consist of 2 convolution nodes. If so, skip
# .../depthwise and only quantize the top one.
separable_conv = context.GetOperationByNameDontThrow(
op.name[:-len('/depthwise')])
if separable_conv and separable_conv.type == 'Conv2D':
continue
# Quantize add ops that come after Conv2D or DepthwiseConv2dNative.
if op.type in ['Conv2D', 'DepthwiseConv2dNative']:
add_context_re = re.search(r'^(.*)/[^/]+/', op.name)
if add_context_re is not None:
context.add_contexts.add(add_context_re.group(1))
if not op.name.endswith('_Fold'):
folded_op = context.GetOperationByNameDontThrow(op.name + '_Fold')
# Do nothing if found, it will be quantized when it is iterated over.
if not folded_op:
context.QuantizeOpWithWeights(op, folded=False)
else:
context.QuantizeOpWithWeights(op, folded=True)
context.QuantizeAddContexts()
# Once all quantization ops have been inserted in the graph, collect update
# ops for their variables and modify the TF Slim update barrier (see
# https://www.tensorflow.org/code/tensorflow/contrib/slim/python/slim/learning.py)
# to depend on them.
try:
update_barrier = graph.get_operation_by_name('update_barrier')
except KeyError:
# In evaluation graph, this barrier may not exist.
return None
update_quant_ops = graph.get_collection_ref(_UPDATE_QUANT_OPS)
graph_editor.add_control_inputs(update_barrier, update_quant_ops)
class _QuantizeContext(object):
"""Context holds references needed for quantization."""
def __init__(self,
graph,
weight_bits,
weight_narrow_range,
activation_bits,
ema_decay=0.999,
quant_delay=None,
vars_collection=ops.GraphKeys.MOVING_AVERAGE_VARIABLES,
is_training=True,
quantize_folded_weights_use_ema=False):
"""Initializes context to hold references needed for quantization.
Args:
graph: Graph to modify.
weight_bits: Number of bits to use for quantizing weights.
weight_narrow_range: Whether to use a more efficient narrow range for
weights quantization. With weight_narrow_range true, the range is
[1; 2^weight_bits - 1], with it false [0; 2^weight_bits - 1].
activation_bits: Number of bits to use for quantizing activations.
ema_decay: (Optional) Float, EMA decay parameter.
quant_delay: (Optional, default None) Int, count of global steps for which
to delay quantization. This helps weights stabilize at the start of
training.
vars_collection: (Optional) Collection where to store the variables for
quantization interval ends.
is_training: (Optional) Whether quantizing training or eval graph.
quantize_folded_weights_use_ema: (Optional, default False) Whether to
quantize weights after batchnorm-folding with exponential average
quantization.
"""
self.graph = graph
self.weight_bits = weight_bits
self.weight_narrow_range = weight_narrow_range
self.activation_bits = activation_bits
self.ema_decay = ema_decay
self.quant_delay = quant_delay
self.vars_collection = vars_collection
self.is_training = is_training
self.quantize_folded_weights_use_ema = quantize_folded_weights_use_ema
self.input_to_ops_map = input_to_ops.InputToOps(graph)
self.add_contexts = set()
def QuantizeAddContexts(self):
"""Quantizes all add ops in self.add_contexts."""
for add_context in self.add_contexts:
add_op = self.GetOperationByNamesDontThrow([
add_context + '/Add', add_context + '/add'])
if add_op is not None:
self._InsertQuantOp(
add_context,
add_op,
self.input_to_ops_map.ConsumerOperations(add_op),
name='add_quant',
moving_avg=True,
bits=self.activation_bits,
narrow_range=False)
def QuantizeOpWithWeights(self, op, folded):
"""Quantizes around the specific operation with or without batch norm.
Args:
op: Operation to quantize.
folded: Operation has been folded and needs special handling if True.
Raises:
ValueError: When quantization fails.
"""
# Op name component before the last slash will be used as context.
context = re.search(r'^(.*)/([^/]+)', op.name).group(1)
# Quantize weights.
if folded:
producer_op = self.graph.get_operation_by_name(context + '/mul_fold')
else:
try:
input_idx = next(i for i, v in enumerate(op.inputs)
if '/weights/' in v.name or
'/depthwise_weights' in v.name)
except StopIteration:
raise ValueError('No inputs to quantize for op: %s' % op)
producer_op = op.inputs[input_idx].op
# If batch norm is used, the folded weights depend on the batch std, hence
# it is sensible to use EMA during training to smooth out the noise. This is
# controlled by the flag quantize_folded_weights_use_ema. Its default is
# False for backward compatibility.
# If there is no batch norm, weights do not depend on the batch and using
# the latest value of min and max is more efficient.
weight_use_ema = folded and self.quantize_folded_weights_use_ema
self._InsertQuantOp(
context,
producer_op, [op],
name='weights_quant',
moving_avg=weight_use_ema,
delay_requested=weight_use_ema,
bits=self.weight_bits,
narrow_range=self.weight_narrow_range)
# Important: do not quantize biases here. During inference they are
# quantized to 32 bits, which is much finer than 8 bit quantization and
# depends on weight and input activation ranges.
# Find activation and (optionally) Add operations to quantize.
activation_op, add_op, add_context = self._GetReluAndAddOperations(context,
op)
if add_op:
original_context = context
context = add_context
# Quantize activation outputs.
consumer_ops = self.input_to_ops_map.ConsumerOperations(activation_op)
self._InsertQuantOp(
context,
activation_op,
consumer_ops,
name='act_quant',
moving_avg=True,
init_min=0.0,
bits=self.activation_bits,
narrow_range=False)
# When a bypass connection was found, also quantize Add op input.
if add_op:
def _QuantizeAddInput(add_input):
if folded:
return add_input.op.name.endswith('/add_fold')
else:
return add_input.op.name.startswith(original_context + '/')
for add_input in add_op.inputs:
if _QuantizeAddInput(add_input):
self._InsertQuantOp(
original_context,
add_input.op, [add_op],
name='conv_quant',
moving_avg=True,
bits=self.activation_bits,
narrow_range=False)
def _GetReluAndAddOperations(self, context, op):
"""Looks up a Relu* and Add operations in given context.
Args:
context: Context where to look for operations.
op: Operation to quantize.
Returns:
A triplet (Operation, Operation, string), the first element is an end
point operation, the second is Add operation (optional), the third element
is string context where the Add operation was found (optional).
Raises:
ValueError: When operations cannot be found.
"""
activation_op = common.GetEndpointActivationOp(self.graph, context)
if activation_op:
return activation_op, None, None
if '/' in context:
# If no activation op is there, look for them one level up.
add_context = re.search(r'^(.*)/([^/]+)', context).group(1)
activation_op = common.GetEndpointActivationOp(self.graph, add_context)
if not activation_op:
# Still no Relu, can happen on the top layer, just find the next node up,
# make sure it is BiasAdd.
consumers = [c for outp in op.outputs for c in outp.consumers()]
if len(consumers) != 1 or consumers[0].type != 'BiasAdd':
raise ValueError('Failed to quantize op: %s, %s' % (op.name, op.type))
return consumers[0], None, None
if add_context:
add_op = self.GetOperationByNamesDontThrow([
add_context + '/Add', add_context + '/add'])
return activation_op, add_op, add_context
else:
raise ValueError('Failed to quantize op: %s, %s' % (op.name, op.type))
def GetOperationByNameDontThrow(self, name):
"""Returns an Operation with the given name.
Args:
name: Name of Operation to return.
Returns:
The Operation with the given name. None if the name does not correspond to
any operation in the graph.
"""
try:
return self.graph.get_operation_by_name(name)
except KeyError:
return None
def GetOperationByNamesDontThrow(self, names):
"""Returns an Operation with one of the given names.
Args:
names: Names of Operation to return.
Returns:
The Operation with one of the given names. None if none of the names
corresponds to any operation in the graph.
"""
for name in names:
op = self.GetOperationByNameDontThrow(name)
if op is not None:
return op
return None
def _InsertQuantOp(
self,
context,
producer,
consumers,
name,
moving_avg=True,
init_min=-6.0,
init_max=6.0,
delay_requested=True,
bits=8,
narrow_range=False,):
"""Inserts a quant op between a producer op and (multiple) consumer ops.
Args:
context: Context where producer and consumer operations are nested.
producer: Producer operation of the pairs where quantization will be
inserted.
consumers: Consumer operations of the pairs.
name: Name for the new quantization op within the context.
moving_avg: Specifies whether to use exponential moving average or just
the last value seen.
init_min: Starting minimum value for the new quantization op.
init_max: Starting maximum value for the new quantization op.
delay_requested: If true, implement quantization delay where needed.
False value explicitly disables delay quantization everywhere.
bits: Number of bits to use for quantization, must be between 2 and 8.
narrow_range: Whether to use the narrow quantization range
[1; 2^bits - 1] or wide range [0; 2^bits - 1].
Raises:
ValueError: When producer operation is not directly connected to the
consumer operation.
"""
scope = context + '/' + name
inputs = producer.outputs[0]
if moving_avg:
quant = (quant_ops.MovingAvgQuantize(
inputs,
init_min=init_min,
init_max=init_max,
ema_decay=self.ema_decay,
is_training=self.is_training,
num_bits=bits,
narrow_range=narrow_range,
updates_collection=_UPDATE_QUANT_OPS,
vars_collection=self.vars_collection,
scope=scope))
else:
quant = (quant_ops.LastValueQuantize(
inputs,
init_min=init_min,
init_max=init_max,
is_training=self.is_training,
num_bits=bits,
narrow_range=narrow_range,
updates_collection=_UPDATE_QUANT_OPS,
vars_collection=self.vars_collection,
scope=scope))
if delay_requested and self.quant_delay and self.quant_delay > 0:
activate_quant = math_ops.greater_equal(
training_util.get_or_create_global_step(),
self.quant_delay,
name=scope + '/activate_quant')
quant = control_flow_ops.cond(
activate_quant,
lambda: quant,
lambda: inputs,
name=scope + '/delayed_quant')
nodes_modified_count = graph_editor.reroute_ts(
[quant], [inputs], can_modify=consumers)
if nodes_modified_count != len(consumers):
raise ValueError('Some inputs not quantized for ops: [%s]' %
', '.join([consumer.name for consumer in consumers]))
| horance-liu/tensorflow | tensorflow/contrib/quantize/python/quantize.py | Python | apache-2.0 | 16,035 |
from django.conf import settings
from django.db import models
from django.utils.encoding import force_bytes, force_str
from django.utils.six import with_metaclass
from cryptography.fernet import Fernet, InvalidToken
fernet = Fernet(settings.FERNET_KEY)
class EncryptedTextField(with_metaclass(models.SubfieldBase, models.TextField)):
"""A TextField encrypted with Fernet (AES).
EncryptedTextField rely on `Fernet` from `cryptography` to ensure symetric
encryption. This field is compatible with South migrations.
"""
def db_type(self, connection):
"""Value stored in the database is hexadecimal."""
return 'bytea'
def get_prep_value(self, value):
return fernet.encrypt(force_bytes(value))
def to_python(self, value):
"""
Returns unencrypted or decrypted value.
`to_python` is called either when assigning a value to the model or
when retrieving a value from it. It should be either be able to return
the string assigned or to decrypt it. This behavior (from django) is
not ideal but will change in the future see
https://docs.djangoproject.com/en/dev/howto/custom-model-fields/#converting-values-to-python-objects
"""
try:
value = fernet.decrypt(force_bytes(value))
except InvalidToken:
return value
else:
return force_str(value)
def south_field_triple(self):
"""Returns a suitable description of this field for South."""
from south.modelsinspector import introspector
field_class = '{}.{}'.format(self.__class__.__module__, self.__class__.__name__)
args, kwargs = introspector(self)
return (field_class, args, kwargs)
| tombooth/django-field-cryptography | django_field_cryptography/fields.py | Python | bsd-2-clause | 1,749 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2005 onwards University of Deusto
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution.
#
# This software consists of contributions made by many individuals,
# listed below:
#
# Author: Jaime Irurzun <[email protected]>
#
from abc import ABCMeta, abstractmethod
from voodoo.override import Override
from experiments.ud_xilinx.exc import InvalidDeviceToProgramError
from weblab.experiment.devices.digilent_adept import DigilentAdept
from weblab.experiment.devices.jtag_blazer import JTagBlazer
class UdXilinxProgrammer(object):
__metaclass__ = ABCMeta
def __init__(self, cfg_manager, xilinx_impact_device):
super(UdXilinxProgrammer, self).__init__()
self._cfg_manager = cfg_manager
self._xilinx_impact_device = xilinx_impact_device
@staticmethod
def create(device_name, cfg_manager, xilinx_impact_device):
if device_name == 'XilinxImpact':
return XilinxImpactProgrammer(cfg_manager, xilinx_impact_device)
elif device_name == 'JTagBlazer':
return JTagBlazerSvfProgrammer(cfg_manager, xilinx_impact_device)
elif device_name == 'DigilentAdept':
return DigilentAdeptSvfProgrammer(cfg_manager, xilinx_impact_device)
else:
raise InvalidDeviceToProgramError(device_name)
@abstractmethod
def program(self, file_name):
pass
class XilinxImpactProgrammer(UdXilinxProgrammer):
def __init__(self, cfg_manager, xilinx_impact_device):
super(XilinxImpactProgrammer, self).__init__(cfg_manager, xilinx_impact_device)
@Override(UdXilinxProgrammer)
def program(self, file_name):
self._xilinx_impact_device.program_device(file_name)
class JTagBlazerSvfProgrammer(UdXilinxProgrammer):
def __init__(self, cfg_manager, xilinx_impact_device):
super(JTagBlazerSvfProgrammer, self).__init__(cfg_manager, xilinx_impact_device)
self._jtag_blazer = JTagBlazer(cfg_manager)
self._device_ip = self._cfg_manager.get_value('xilinx_jtag_blazer_device_ip')
@Override(UdXilinxProgrammer)
def program(self, file_name):
self._xilinx_impact_device.source2svf(file_name)
svf_file_name = file_name.replace("."+self._xilinx_impact_device.get_suffix(), ".svf")
self._jtag_blazer.program_device(svf_file_name, self._device_ip)
class DigilentAdeptSvfProgrammer(UdXilinxProgrammer):
def __init__(self, cfg_manager, xilinx_impact_device):
super(DigilentAdeptSvfProgrammer, self).__init__(cfg_manager, xilinx_impact_device)
self._digilent_adept = DigilentAdept(cfg_manager)
@Override(UdXilinxProgrammer)
def program(self, file_name):
# self._xilinx_impact_device.source2svf(file_name)
# svf_file_name = file_name.replace("."+self._xilinx_impact_device.get_suffix(), ".svf")
# self._digilent_adept.program_device(svf_file_name)
self._digilent_adept.program_device(file_name)
| zstars/weblabdeusto | server/src/experiments/ud_xilinx/programmers.py | Python | bsd-2-clause | 3,090 |
# Copyright (c) 2009-2016 Hewlett Packard Enterprise Development LP
#
# Redistribution and use of this software in source and binary forms,
# with or without modification, are permitted provided that the following
# conditions are met:
#
# Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import base64
import os.path
from requestbuilder import Arg, MutuallyExclusiveArgList
from requestbuilder.exceptions import ArgumentError
from euca2ools.commands.argtypes import (ec2_block_device_mapping,
flexible_bool, vpc_interface)
from euca2ools.commands.ec2 import EC2Request
class RunInstances(EC2Request):
DESCRIPTION = 'Launch instances of a machine image'
ARGS = [Arg('ImageId', metavar='IMAGE',
help='ID of the image to instantiate (required)'),
Arg('-n', '--instance-count', dest='count', metavar='MIN[-MAX]',
default='1', route_to=None,
help='''number of instances to launch. If this number of
instances cannot be launched, no instances will launch.
If specified as a range (min-max), the server will
attempt to launch the maximum number, but no fewer
than the minimum number.'''),
Arg('-g', '--group', action='append', default=[], route_to=None,
help='security group(s) in which to launch the instances'),
Arg('-k', '--key', dest='KeyName', metavar='KEYPAIR',
help='name of the key pair to use'),
MutuallyExclusiveArgList(
Arg('-d', '--user-data', metavar='DATA', route_to=None,
help='''user data to make available to instances in this
reservation'''),
Arg('--user-data-force', metavar='DATA', route_to=None,
help='''same as -d/--user-data, but without checking if a
file by that name exists first'''),
Arg('-f', '--user-data-file', metavar='FILE', route_to=None,
help='''file containing user data to make available to the
instances in this reservation''')),
Arg('--addressing', dest='AddressingType',
choices=('public', 'private'),
help='''[Eucalyptus only, non-VPC only]
addressing scheme to launch the instance with. Use "private"
to run an instance with no public address.'''),
Arg('-t', '--instance-type', dest='InstanceType',
help='type of instance to launch'),
Arg('-z', '--availability-zone', metavar='ZONE',
dest='Placement.AvailabilityZone'),
Arg('--kernel', dest='KernelId', metavar='KERNEL',
help='ID of the kernel to launch the instance(s) with'),
Arg('--ramdisk', dest='RamdiskId', metavar='RAMDISK',
help='ID of the ramdisk to launch the instance(s) with'),
Arg('-b', '--block-device-mapping', metavar='DEVICE=MAPPED',
dest='BlockDeviceMapping', action='append',
type=ec2_block_device_mapping, default=[],
help='''define a block device mapping for the instances, in the
form DEVICE=MAPPED, where "MAPPED" is "none", "ephemeral(0-3)",
or
"[SNAP-ID]:[GiB]:[true|false]:[standard|VOLTYPE[:IOPS]]"'''),
Arg('-m', '--monitor', dest='Monitoring.Enabled',
action='store_const', const='true',
help='enable detailed monitoring for the instance(s)'),
Arg('--disable-api-termination', dest='DisableApiTermination',
action='store_const', const='true',
help='prevent API users from terminating the instance(s)'),
Arg('--instance-initiated-shutdown-behavior',
dest='InstanceInitiatedShutdownBehavior',
choices=('stop', 'terminate'),
help=('whether to "stop" (default) or terminate EBS instances '
'when they shut down')),
Arg('--placement-group', dest='Placement.GroupName',
metavar='PLGROUP', help='''name of a placement group to launch
into'''),
Arg('--tenancy', dest='Placement.Tenancy',
choices=('default', 'dedicated'), help='''[VPC only]
"dedicated" to run on single-tenant hardware'''),
Arg('--client-token', dest='ClientToken', metavar='TOKEN',
help='unique identifier to ensure request idempotency'),
Arg('-s', '--subnet', metavar='SUBNET', route_to=None,
help='''[VPC only] subnet to create the instance's network
interface in'''),
Arg('--associate-public-ip-address', type=flexible_bool,
route_to=None, help='''[VPC only] whether or not to assign a
public address to the instance's network interface'''),
Arg('--private-ip-address', metavar='ADDRESS', route_to=None,
help='''[VPC only] assign a specific primary private IP address
to an instance's interface'''),
MutuallyExclusiveArgList(
Arg('--secondary-address', '--secondary-private-ip-address',
metavar='ADDRESS', action='append', route_to=None,
help='''[VPC only] assign a specific secondary private IP
address to an instance's network interface. Use this
option multiple times to add additional addresses.'''),
Arg('--secondary-count',
'--secondary-private-ip-address-count', metavar='COUNT',
type=int, route_to=None, help='''[VPC only] automatically
assign a specific number of secondary private IP addresses
to an instance's network interface''')),
Arg('-a', '--network-interface', dest='NetworkInterface',
metavar='INTERFACE', action='append', type=vpc_interface,
help=('[VPC only] add a network interface to the new '
'instance. If the interface already exists, supply its '
'ID and a numeric index for it, separated by ":", in '
'the form "eni-NNNNNNNN:INDEX". To create a new '
'interface, supply a numeric index and subnet ID for '
'it, along with (in order) an optional description, a '
'primary private IP address, a list of security group '
'IDs to associate with the interface, whether to delete '
'the interface upon instance termination ("true" or '
'"false"), a number of secondary private IP addresses '
'to create automatically, and a list of secondary '
'private IP addresses to assign to the interface, '
'separated by ":", in the form ":INDEX:SUBNET:'
'[DESCRIPTION]:[PRIV_IP]:[GROUP1,GROUP2,...]:[true|'
'false]:[SEC_IP_COUNT|:SEC_IP1,SEC_IP2,...]". You '
'cannot specify both of the latter two. This option '
'may be used multiple times. Each adds another network '
'interface.')),
Arg('-p', '--iam-profile', metavar='IPROFILE', route_to=None,
help='''name or ARN of the IAM instance profile to associate
with the new instance(s)'''),
Arg('--ebs-optimized', dest='EbsOptimized', action='store_const',
const='true', help='optimize the new instance(s) for EBS I/O')]
LIST_TAGS = ['reservationSet', 'instancesSet', 'groupSet', 'tagSet',
'blockDeviceMapping', 'productCodes', 'networkInterfaceSet',
'privateIpAddressesSet']
# noinspection PyExceptionInherit
def configure(self):
EC2Request.configure(self)
if self.args.get('user_data'):
if os.path.isfile(self.args['user_data']):
raise ArgumentError(
'argument -d/--user-data: to pass the contents of a file '
'as user data, use -f/--user-data-file. To pass the '
"literal value '{0}' as user data even though it matches "
'the name of a file, use --user-data-force.')
else:
self.params['UserData'] = base64.b64encode(
self.args['user_data'])
elif self.args.get('user_data_force'):
self.params['UserData'] = base64.b64encode(
self.args['user_data_force'])
elif self.args.get('user_data_file'):
with open(self.args['user_data_file']) as user_data_file:
self.params['UserData'] = base64.b64encode(
user_data_file.read())
if self.args.get('KeyName') is None:
default_key_name = self.config.get_region_option(
'ec2-default-keypair')
if default_key_name:
self.log.info("using default key pair '%s'", default_key_name)
self.params['KeyName'] = default_key_name
# noinspection PyExceptionInherit
def preprocess(self):
counts = self.args['count'].split('-')
if len(counts) == 1:
try:
self.params['MinCount'] = int(counts[0])
self.params['MaxCount'] = int(counts[0])
except ValueError:
raise ArgumentError('argument -n/--instance-count: instance '
'count must be an integer')
elif len(counts) == 2:
try:
self.params['MinCount'] = int(counts[0])
self.params['MaxCount'] = int(counts[1])
except ValueError:
raise ArgumentError('argument -n/--instance-count: instance '
'count range must be must be comprised of '
'integers')
else:
raise ArgumentError('argument -n/--instance-count: value must '
'have format "1" or "1-2"')
if self.params['MinCount'] < 1 or self.params['MaxCount'] < 1:
raise ArgumentError('argument -n/--instance-count: instance count '
'must be positive')
if self.params['MinCount'] > self.params['MaxCount']:
self.log.debug('MinCount > MaxCount; swapping')
self.params.update({'MinCount': self.params['MaxCount'],
'MaxCount': self.params['MinCount']})
iprofile = self.args.get('iam_profile')
if iprofile:
if iprofile.startswith('arn:'):
self.params['IamInstanceProfile.Arn'] = iprofile
else:
self.params['IamInstanceProfile.Name'] = iprofile
if (self.args.get('subnet') or self.args.get('NetworkInterface') or
self.args.get('associate_public_ip_address') is not None):
# This is going into a VPC.
# We can't mix top-level and interface-level parameters, so
# build an interface out of all the network-related options
# to make the split-up, "friendlier" options work.
cli_iface = {}
for group in self.args['group']:
if not group.startswith('sg-'):
raise ArgumentError('argument -g/--group: groups must be '
'specified by ID when using VPC')
cli_iface.setdefault('SecurityGroupId', [])
cli_iface['SecurityGroupId'].append(group)
if self.args.get('associate_public_ip_address') is not None:
cli_iface['AssociatePublicIpAddress'] = \
self.args['associate_public_ip_address']
if self.args.get('private_ip_address'):
cli_iface['PrivateIpAddresses'] = [
{'PrivateIpAddress': self.args['private_ip_address'],
'Primary': 'true'}]
if self.args.get('secondary_address'):
sec_ips = [{'PrivateIpAddress': addr} for addr in
self.args['secondary_address']]
if not cli_iface.get('PrivateIpAddresses'):
cli_iface['PrivateIpAddresses'] = []
cli_iface['PrivateIpAddresses'].extend(sec_ips)
if self.args.get('secondary_count'):
sec_ip_count = self.args['secondary_count']
cli_iface['SecondaryPrivateIpAddressCount'] = sec_ip_count
if self.args.get('subnet'):
cli_iface['SubnetId'] = self.args['subnet']
if cli_iface:
cli_iface['DeviceIndex'] = 0
if not self.params.get('NetworkInterface'):
self.params['NetworkInterface'] = []
self.params['NetworkInterface'].append(cli_iface)
self.log.debug('built network interface from CLI options: {0}'
.format(cli_iface))
else:
# Non-VPC
for group in self.args['group']:
if group.startswith('sg-'):
if not self.params.get('SecurityGroupId'):
self.params['SecurityGroupId'] = []
self.params['SecurityGroupId'].append(group)
else:
if not self.params.get('SecurityGroup'):
self.params['SecurityGroup'] = []
self.params['SecurityGroup'].append(group)
def print_result(self, result):
self.print_reservation(result)
| gholms/euca2ools | euca2ools/commands/ec2/runinstances.py | Python | bsd-2-clause | 14,968 |
# Copyright (c) 2012-2013, Mark Peek <[email protected]>
# All rights reserved.
#
# See LICENSE file for full license.
from . import AWSObject, AWSProperty, Tags
from .validators import integer, positive_integer, network_port, boolean
VALID_RULETYPES = ('SYSTEM', 'FORWARD')
def validate_ruletype(ruletype):
"""Validate RuleType for ResolverRule."""
if ruletype not in VALID_RULETYPES:
raise ValueError("Rule type must be one of: %s" %
", ".join(VALID_RULETYPES))
return ruletype
class AliasTarget(AWSProperty):
props = {
'HostedZoneId': (basestring, True),
'DNSName': (basestring, True),
'EvaluateTargetHealth': (boolean, False)
}
def __init__(self,
hostedzoneid=None,
dnsname=None,
evaluatetargethealth=None,
**kwargs):
# provided for backward compatibility
if hostedzoneid is not None:
kwargs['HostedZoneId'] = hostedzoneid
if dnsname is not None:
kwargs['DNSName'] = dnsname
if evaluatetargethealth is not None:
kwargs['EvaluateTargetHealth'] = evaluatetargethealth
super(AliasTarget, self).__init__(**kwargs)
class GeoLocation(AWSProperty):
props = {
'ContinentCode': (basestring, False),
'CountryCode': (basestring, False),
'SubdivisionCode': (basestring, False),
}
class BaseRecordSet(object):
props = {
'AliasTarget': (AliasTarget, False),
'Comment': (basestring, False),
'Failover': (basestring, False),
'GeoLocation': (GeoLocation, False),
'HealthCheckId': (basestring, False),
'HostedZoneId': (basestring, False),
'HostedZoneName': (basestring, False),
'MultiValueAnswer': (boolean, False),
'Name': (basestring, True),
'Region': (basestring, False),
'ResourceRecords': (list, False),
'SetIdentifier': (basestring, False),
'TTL': (integer, False),
'Type': (basestring, True),
'Weight': (integer, False),
}
class RecordSetType(AWSObject, BaseRecordSet):
# This is a top-level resource
resource_type = "AWS::Route53::RecordSet"
class RecordSet(AWSProperty, BaseRecordSet):
# This is for use in a list with RecordSetGroup (below)
pass
class RecordSetGroup(AWSObject):
resource_type = "AWS::Route53::RecordSetGroup"
props = {
'HostedZoneId': (basestring, False),
'HostedZoneName': (basestring, False),
'RecordSets': (list, False),
'Comment': (basestring, False),
}
class AlarmIdentifier(AWSProperty):
props = {
'Name': (basestring, True),
'Region': (basestring, True),
}
class HealthCheckConfiguration(AWSProperty):
props = {
'AlarmIdentifier': (AlarmIdentifier, False),
'ChildHealthChecks': ([basestring], False),
'EnableSNI': (boolean, False),
'FailureThreshold': (positive_integer, False),
'FullyQualifiedDomainName': (basestring, False),
'HealthThreshold': (positive_integer, False),
'InsufficientDataHealthStatus': (basestring, False),
'Inverted': (boolean, False),
'IPAddress': (basestring, False),
'MeasureLatency': (boolean, False),
'Port': (network_port, False),
'Regions': ([basestring], False),
'RequestInterval': (positive_integer, False),
'ResourcePath': (basestring, False),
'SearchString': (basestring, False),
'Type': (basestring, True),
}
class HealthCheck(AWSObject):
resource_type = "AWS::Route53::HealthCheck"
props = {
'HealthCheckConfig': (HealthCheckConfiguration, True),
'HealthCheckTags': (Tags, False),
}
class HostedZoneConfiguration(AWSProperty):
props = {
'Comment': (basestring, False),
}
class HostedZoneVPCs(AWSProperty):
props = {
'VPCId': (basestring, True),
'VPCRegion': (basestring, True),
}
class QueryLoggingConfig(AWSProperty):
props = {
'CloudWatchLogsLogGroupArn': (basestring, True),
}
class HostedZone(AWSObject):
resource_type = "AWS::Route53::HostedZone"
props = {
'HostedZoneConfig': (HostedZoneConfiguration, False),
'HostedZoneTags': (Tags, False),
'Name': (basestring, True),
'QueryLoggingConfig': (QueryLoggingConfig, False),
'VPCs': ([HostedZoneVPCs], False),
}
class IpAddressRequest(AWSProperty):
props = {
'Ip': (basestring, False),
'SubnetId': (basestring, True),
}
class ResolverEndpoint(AWSObject):
resource_type = "AWS::Route53Resolver::ResolverEndpoint"
props = {
'Direction': (basestring, True),
'IpAddresses': ([IpAddressRequest], True),
'Name': (basestring, False),
'SecurityGroupIds': ([basestring], True),
'Tags': (Tags, False),
}
class TargetAddress(AWSProperty):
props = {
'Ip': (basestring, True),
'Port': (basestring, True),
}
class ResolverRule(AWSObject):
resource_type = "AWS::Route53Resolver::ResolverRule"
props = {
'DomainName': (basestring, True),
'Name': (basestring, False),
'ResolverEndpointId': (basestring, False),
'RuleType': (validate_ruletype, True),
'Tags': (Tags, False),
'TargetIps': ([TargetAddress], False),
}
class ResolverRuleAssociation(AWSObject):
resource_type = "AWS::Route53Resolver::ResolverRuleAssociation"
props = {
'Name': (basestring, False),
'ResolverRuleId': (basestring, True),
'VPCId': (basestring, True),
}
| ikben/troposphere | troposphere/route53.py | Python | bsd-2-clause | 5,700 |
#/usr/bin/env python
'''
Author: Chris Duffy
Date: March 2015
Name: smtp_vrfy.py
Purpose: To validate users on a box running SMTP
Copyright (c) 2015, Christopher Duffy All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met: * Redistributions
of source code must retain the above copyright notice, this list of conditions and
the following disclaimer. * Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution. * Neither the
name of the nor the names of its contributors may be used to endorse or promote
products derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL CHRISTOPHER DUFFY BE LIABLE FOR ANY DIRECT, INDIRECT,
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
'''
import socket, time, argparse, os, sys
def read_file(filename):
with open(filename) as file:
lines = file.read().splitlines()
return lines
def verify_smtp(verbose, filename, ip, timeout_value, sleep_value, port=25):
if port is None:
port=int(25)
elif port is "":
port=int(25)
else:
port=int(port)
if verbose > 0:
print "[*] Connecting to %s on port %s to execute the test" % (ip, port)
valid_users=[]
username_list = read_file(filename)
for user in username_list:
try:
sys.stdout.flush()
s=socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.settimeout(timeout_value)
connect=s.connect((ip,port))
banner=s.recv(1024)
if verbose > 0:
print("[*] The system banner is: '%s'") % (str(banner))
command='VRFY ' + user + '\n'
if verbose > 0:
print("[*] Executing: %s") % (command)
print("[*] Testing entry %s of %s") % (str(username_list.index(user)),str( len(username_list)))
s.send(command)
result=s.recv(1024)
if "252" in result:
valid_users.append(user)
if verbose > 1:
print("[+] Username %s is valid") % (user)
if "550" in result:
if verbose > 1:
print "[-] 550 Username does not exist"
if "503" in result:
print("[!] The server requires authentication")
break
if "500" in result:
print("[!] The VRFY command is not supported")
break
except IOError as e:
if verbose > 1:
print("[!] The following error occured: '%s'") % (str(e))
if 'Operation now in progress' in e:
print("[!] The connection to SMTP failed")
break
finally:
if valid_users and verbose > 0:
print("[+] %d User(s) are Valid" % (len(valid_users)))
elif verbose > 0 and not valid_users:
print("[!] No valid users were found")
s.close()
if sleep_value is not 0:
time.sleep(sleep_value)
sys.stdout.flush()
return valid_users
def write_username_file(username_list, filename, verbose):
open(filename, 'w').close() #Delete contents of file name
if verbose > 1:
print("[*] Writing to %s") % (filename)
with open(filename, 'w') as file:
file.write('\n'.join(username_list))
return
if __name__ == '__main__':
# If script is executed at the CLI
usage = '''usage: %(prog)s [-u username_file] [-f output_filename] [-i ip address] [-p port_number] [-t timeout] [-s sleep] -q -v -vv -vvv'''
parser = argparse.ArgumentParser(usage=usage)
parser.add_argument("-u", "--usernames", type=str, help="The usernames that are to be read", action="store", dest="username_file")
parser.add_argument("-f", "--filename", type=str, help="Filename for output the confirmed usernames", action="store", dest="filename")
parser.add_argument("-i", "--ip", type=str, help="The IP address of the target system", action="store", dest="ip")
parser.add_argument("-p","--port", type=int, default=25, action="store", help="The port of the target system's SMTP service", dest="port")
parser.add_argument("-t","--timeout", type=float, default=1, action="store", help="The timeout value for service responses in seconds", dest="timeout_value")
parser.add_argument("-s","--sleep", type=float, default=0.0, action="store", help="The wait time between each request in seconds", dest="sleep_value")
parser.add_argument("-v", action="count", dest="verbose", default=1, help="Verbosity level, defaults to one, this outputs each command and result")
parser.add_argument("-q", action="store_const", dest="verbose", const=0, help="Sets the results to be quiet")
parser.add_argument('--version', action='version', version='%(prog)s 0.42b')
args = parser.parse_args()
# Set Constructors
username_file = args.username_file # Usernames to test
filename = args.filename # Filename for outputs
verbose = args.verbose # Verbosity level
ip = args.ip # IP Address to test
port = args.port # Port for the service to test
timeout_value = args.timeout_value # Timeout value for service connections
sleep_value = args.sleep_value # Sleep value between requests
dir = os.getcwd() # Get current working directory
username_list =[]
# Argument Validator
if len(sys.argv)==1:
parser.print_help()
sys.exit(1)
if not filename:
if os.name != "nt":
filename = dir + "/confirmed_username_list"
else:
filename = dir + "\\confirmed_username_list"
else:
if filename:
if "\\" or "/" in filename:
if verbose > 1:
print("[*] Using filename: %s") % (filename)
else:
if os.name != "nt":
filename = dir + "/" + filename
else:
filename = dir + "\\" + filename
if verbose > 1:
print("[*] Using filename: %s") % (filename)
username_list = verify_smtp(verbose, username_file, ip, timeout_value, sleep_value, port)
if len(username_list) > 0:
write_username_file(username_list, filename, verbose)
| liorvh/pythonpentest | smtp_vrfy.py | Python | bsd-3-clause | 7,182 |
# -*- coding: utf-8 -*-
from sqlalchemy import Column, Table, ForeignKey, types
from sqlalchemy.ext.mutable import Mutable
#from werkzeug import generate_password_hash, check_password_hash
from geoalchemy2 import Geometry
from flask.ext.login import UserMixin
from .constants import *
from ..extensions import db
from ..utils import get_current_time, SEX_TYPE, STRING_LEN
'''
Author, location, datetime, status, expiration date,
keywords(classification, food, labor, commute), short text,
long html allow picture(how bbs post is stored) and so on.
—>biders many-many
—>conversation 1-many
—>deal 1-1
message all other
'''
class Chat(db.Model):
"""docstring for Conversation"""
id = Column(db.Integer, primary_key=True)
post = Column(db.Integer, db.ForeignKey('posts.id'))
poster = Column(db.Integer, db.ForeignKey('users.id'))
replier = Column(db.Integer, db.ForeignKey('users.id'))
class Message(db.Model):
"""docstring for Conversation"""
id = Column(db.Integer, primary_key=True)
in_chat = Column(db.Integer, db.ForeignKey('chat.id'))
content = Column(db.Integer, db.ForeignKey('users.id'))
| wandonye/vshare | vshare/chat/models.py | Python | bsd-3-clause | 1,154 |
from setuptools import setup, find_packages
setup(
name='submitty_utils',
author='Submitty',
version='0.6.0',
packages=find_packages(exclude=('tests',)),
license='BSD',
description='Python Submitty Utils',
install_requires=[
'tzlocal'
],
tests_require=[],
zip_safe=True
)
| RCOS-Grading-Server/HWserver | python_submitty_utils/setup.py | Python | bsd-3-clause | 321 |
"""
SQLite3 backend for the sqlite3 module in the standard library.
"""
import decimal
import re
import warnings
from sqlite3 import dbapi2 as Database
import pytz
from django.core.exceptions import ImproperlyConfigured
from django.db import utils
from django.db.backends import utils as backend_utils
from django.db.backends.base.base import BaseDatabaseWrapper
from django.utils import timezone
from django.utils.dateparse import (
parse_date, parse_datetime, parse_duration, parse_time,
)
from .client import DatabaseClient # isort:skip
from .creation import DatabaseCreation # isort:skip
from .features import DatabaseFeatures # isort:skip
from .introspection import DatabaseIntrospection # isort:skip
from .operations import DatabaseOperations # isort:skip
from .schema import DatabaseSchemaEditor # isort:skip
def decoder(conv_func):
"""
Convert bytestrings from Python's sqlite3 interface to a regular string.
"""
return lambda s: conv_func(s.decode())
Database.register_converter("bool", decoder(lambda s: s == '1'))
Database.register_converter("time", decoder(parse_time))
Database.register_converter("date", decoder(parse_date))
Database.register_converter("datetime", decoder(parse_datetime))
Database.register_converter("timestamp", decoder(parse_datetime))
Database.register_converter("TIMESTAMP", decoder(parse_datetime))
Database.register_converter("decimal", decoder(backend_utils.typecast_decimal))
Database.register_adapter(decimal.Decimal, backend_utils.rev_typecast_decimal)
class DatabaseWrapper(BaseDatabaseWrapper):
vendor = 'sqlite'
# SQLite doesn't actually support most of these types, but it "does the right
# thing" given more verbose field definitions, so leave them as is so that
# schema inspection is more useful.
data_types = {
'AutoField': 'integer',
'BigAutoField': 'integer',
'BinaryField': 'BLOB',
'BooleanField': 'bool',
'CharField': 'varchar(%(max_length)s)',
'DateField': 'date',
'DateTimeField': 'datetime',
'DecimalField': 'decimal',
'DurationField': 'bigint',
'FileField': 'varchar(%(max_length)s)',
'FilePathField': 'varchar(%(max_length)s)',
'FloatField': 'real',
'IntegerField': 'integer',
'BigIntegerField': 'bigint',
'IPAddressField': 'char(15)',
'GenericIPAddressField': 'char(39)',
'NullBooleanField': 'bool',
'OneToOneField': 'integer',
'PositiveIntegerField': 'integer unsigned',
'PositiveSmallIntegerField': 'smallint unsigned',
'SlugField': 'varchar(%(max_length)s)',
'SmallIntegerField': 'smallint',
'TextField': 'text',
'TimeField': 'time',
'UUIDField': 'char(32)',
}
data_types_suffix = {
'AutoField': 'AUTOINCREMENT',
'BigAutoField': 'AUTOINCREMENT',
}
# SQLite requires LIKE statements to include an ESCAPE clause if the value
# being escaped has a percent or underscore in it.
# See http://www.sqlite.org/lang_expr.html for an explanation.
operators = {
'exact': '= %s',
'iexact': "LIKE %s ESCAPE '\\'",
'contains': "LIKE %s ESCAPE '\\'",
'icontains': "LIKE %s ESCAPE '\\'",
'regex': 'REGEXP %s',
'iregex': "REGEXP '(?i)' || %s",
'gt': '> %s',
'gte': '>= %s',
'lt': '< %s',
'lte': '<= %s',
'startswith': "LIKE %s ESCAPE '\\'",
'endswith': "LIKE %s ESCAPE '\\'",
'istartswith': "LIKE %s ESCAPE '\\'",
'iendswith': "LIKE %s ESCAPE '\\'",
}
# The patterns below are used to generate SQL pattern lookup clauses when
# the right-hand side of the lookup isn't a raw string (it might be an expression
# or the result of a bilateral transformation).
# In those cases, special characters for LIKE operators (e.g. \, *, _) should be
# escaped on database side.
#
# Note: we use str.format() here for readability as '%' is used as a wildcard for
# the LIKE operator.
pattern_esc = r"REPLACE(REPLACE(REPLACE({}, '\', '\\'), '%%', '\%%'), '_', '\_')"
pattern_ops = {
'contains': r"LIKE '%%' || {} || '%%' ESCAPE '\'",
'icontains': r"LIKE '%%' || UPPER({}) || '%%' ESCAPE '\'",
'startswith': r"LIKE {} || '%%' ESCAPE '\'",
'istartswith': r"LIKE UPPER({}) || '%%' ESCAPE '\'",
'endswith': r"LIKE '%%' || {} ESCAPE '\'",
'iendswith': r"LIKE '%%' || UPPER({}) ESCAPE '\'",
}
Database = Database
SchemaEditorClass = DatabaseSchemaEditor
# Classes instantiated in __init__().
client_class = DatabaseClient
creation_class = DatabaseCreation
features_class = DatabaseFeatures
introspection_class = DatabaseIntrospection
ops_class = DatabaseOperations
def get_connection_params(self):
settings_dict = self.settings_dict
if not settings_dict['NAME']:
raise ImproperlyConfigured(
"settings.DATABASES is improperly configured. "
"Please supply the NAME value.")
kwargs = {
'database': settings_dict['NAME'],
'detect_types': Database.PARSE_DECLTYPES | Database.PARSE_COLNAMES,
}
kwargs.update(settings_dict['OPTIONS'])
# Always allow the underlying SQLite connection to be shareable
# between multiple threads. The safe-guarding will be handled at a
# higher level by the `BaseDatabaseWrapper.allow_thread_sharing`
# property. This is necessary as the shareability is disabled by
# default in pysqlite and it cannot be changed once a connection is
# opened.
if 'check_same_thread' in kwargs and kwargs['check_same_thread']:
warnings.warn(
'The `check_same_thread` option was provided and set to '
'True. It will be overridden with False. Use the '
'`DatabaseWrapper.allow_thread_sharing` property instead '
'for controlling thread shareability.',
RuntimeWarning
)
kwargs.update({'check_same_thread': False})
if self.features.can_share_in_memory_db:
kwargs.update({'uri': True})
return kwargs
def get_new_connection(self, conn_params):
conn = Database.connect(**conn_params)
conn.create_function("django_date_extract", 2, _sqlite_date_extract)
conn.create_function("django_date_trunc", 2, _sqlite_date_trunc)
conn.create_function("django_datetime_cast_date", 2, _sqlite_datetime_cast_date)
conn.create_function("django_datetime_cast_time", 2, _sqlite_datetime_cast_time)
conn.create_function("django_datetime_extract", 3, _sqlite_datetime_extract)
conn.create_function("django_datetime_trunc", 3, _sqlite_datetime_trunc)
conn.create_function("django_time_extract", 2, _sqlite_time_extract)
conn.create_function("django_time_trunc", 2, _sqlite_time_trunc)
conn.create_function("django_time_diff", 2, _sqlite_time_diff)
conn.create_function("django_timestamp_diff", 2, _sqlite_timestamp_diff)
conn.create_function("regexp", 2, _sqlite_regexp)
conn.create_function("django_format_dtdelta", 3, _sqlite_format_dtdelta)
conn.create_function("django_power", 2, _sqlite_power)
return conn
def init_connection_state(self):
pass
def create_cursor(self, name=None):
return self.connection.cursor(factory=SQLiteCursorWrapper)
def close(self):
self.validate_thread_sharing()
# If database is in memory, closing the connection destroys the
# database. To prevent accidental data loss, ignore close requests on
# an in-memory db.
if not self.is_in_memory_db():
BaseDatabaseWrapper.close(self)
def _savepoint_allowed(self):
# Two conditions are required here:
# - A sufficiently recent version of SQLite to support savepoints,
# - Being in a transaction, which can only happen inside 'atomic'.
# When 'isolation_level' is not None, sqlite3 commits before each
# savepoint; it's a bug. When it is None, savepoints don't make sense
# because autocommit is enabled. The only exception is inside 'atomic'
# blocks. To work around that bug, on SQLite, 'atomic' starts a
# transaction explicitly rather than simply disable autocommit.
return self.features.uses_savepoints and self.in_atomic_block
def _set_autocommit(self, autocommit):
if autocommit:
level = None
else:
# sqlite3's internal default is ''. It's different from None.
# See Modules/_sqlite/connection.c.
level = ''
# 'isolation_level' is a misleading API.
# SQLite always runs at the SERIALIZABLE isolation level.
with self.wrap_database_errors:
self.connection.isolation_level = level
def check_constraints(self, table_names=None):
"""
Check each table name in `table_names` for rows with invalid foreign
key references. This method is intended to be used in conjunction with
`disable_constraint_checking()` and `enable_constraint_checking()`, to
determine if rows with invalid references were entered while constraint
checks were off.
Raise an IntegrityError on the first invalid foreign key reference
encountered (if any) and provide detailed information about the
invalid reference in the error message.
Backends can override this method if they can more directly apply
constraint checking (e.g. via "SET CONSTRAINTS ALL IMMEDIATE")
"""
cursor = self.cursor()
if table_names is None:
table_names = self.introspection.table_names(cursor)
for table_name in table_names:
primary_key_column_name = self.introspection.get_primary_key_column(cursor, table_name)
if not primary_key_column_name:
continue
key_columns = self.introspection.get_key_columns(cursor, table_name)
for column_name, referenced_table_name, referenced_column_name in key_columns:
cursor.execute(
"""
SELECT REFERRING.`%s`, REFERRING.`%s` FROM `%s` as REFERRING
LEFT JOIN `%s` as REFERRED
ON (REFERRING.`%s` = REFERRED.`%s`)
WHERE REFERRING.`%s` IS NOT NULL AND REFERRED.`%s` IS NULL
"""
% (
primary_key_column_name, column_name, table_name,
referenced_table_name, column_name, referenced_column_name,
column_name, referenced_column_name,
)
)
for bad_row in cursor.fetchall():
raise utils.IntegrityError(
"The row in table '%s' with primary key '%s' has an "
"invalid foreign key: %s.%s contains a value '%s' that "
"does not have a corresponding value in %s.%s." % (
table_name, bad_row[0], table_name, column_name,
bad_row[1], referenced_table_name, referenced_column_name,
)
)
def is_usable(self):
return True
def _start_transaction_under_autocommit(self):
"""
Start a transaction explicitly in autocommit mode.
Staying in autocommit mode works around a bug of sqlite3 that breaks
savepoints when autocommit is disabled.
"""
self.cursor().execute("BEGIN")
def is_in_memory_db(self):
return self.creation.is_in_memory_db(self.settings_dict['NAME'])
FORMAT_QMARK_REGEX = re.compile(r'(?<!%)%s')
class SQLiteCursorWrapper(Database.Cursor):
"""
Django uses "format" style placeholders, but pysqlite2 uses "qmark" style.
This fixes it -- but note that if you want to use a literal "%s" in a query,
you'll need to use "%%s".
"""
def execute(self, query, params=None):
if params is None:
return Database.Cursor.execute(self, query)
query = self.convert_query(query)
return Database.Cursor.execute(self, query, params)
def executemany(self, query, param_list):
query = self.convert_query(query)
return Database.Cursor.executemany(self, query, param_list)
def convert_query(self, query):
return FORMAT_QMARK_REGEX.sub('?', query).replace('%%', '%')
def _sqlite_date_extract(lookup_type, dt):
if dt is None:
return None
try:
dt = backend_utils.typecast_timestamp(dt)
except (ValueError, TypeError):
return None
if lookup_type == 'week_day':
return (dt.isoweekday() % 7) + 1
elif lookup_type == 'week':
return dt.isocalendar()[1]
else:
return getattr(dt, lookup_type)
def _sqlite_date_trunc(lookup_type, dt):
try:
dt = backend_utils.typecast_timestamp(dt)
except (ValueError, TypeError):
return None
if lookup_type == 'year':
return "%i-01-01" % dt.year
elif lookup_type == 'month':
return "%i-%02i-01" % (dt.year, dt.month)
elif lookup_type == 'day':
return "%i-%02i-%02i" % (dt.year, dt.month, dt.day)
def _sqlite_time_trunc(lookup_type, dt):
try:
dt = backend_utils.typecast_time(dt)
except (ValueError, TypeError):
return None
if lookup_type == 'hour':
return "%02i:00:00" % dt.hour
elif lookup_type == 'minute':
return "%02i:%02i:00" % (dt.hour, dt.minute)
elif lookup_type == 'second':
return "%02i:%02i:%02i" % (dt.hour, dt.minute, dt.second)
def _sqlite_datetime_parse(dt, tzname):
if dt is None:
return None
try:
dt = backend_utils.typecast_timestamp(dt)
except (ValueError, TypeError):
return None
if tzname is not None:
dt = timezone.localtime(dt, pytz.timezone(tzname))
return dt
def _sqlite_datetime_cast_date(dt, tzname):
dt = _sqlite_datetime_parse(dt, tzname)
if dt is None:
return None
return dt.date().isoformat()
def _sqlite_datetime_cast_time(dt, tzname):
dt = _sqlite_datetime_parse(dt, tzname)
if dt is None:
return None
return dt.time().isoformat()
def _sqlite_datetime_extract(lookup_type, dt, tzname):
dt = _sqlite_datetime_parse(dt, tzname)
if dt is None:
return None
if lookup_type == 'week_day':
return (dt.isoweekday() % 7) + 1
elif lookup_type == 'week':
return dt.isocalendar()[1]
else:
return getattr(dt, lookup_type)
def _sqlite_datetime_trunc(lookup_type, dt, tzname):
dt = _sqlite_datetime_parse(dt, tzname)
if dt is None:
return None
if lookup_type == 'year':
return "%i-01-01 00:00:00" % dt.year
elif lookup_type == 'month':
return "%i-%02i-01 00:00:00" % (dt.year, dt.month)
elif lookup_type == 'day':
return "%i-%02i-%02i 00:00:00" % (dt.year, dt.month, dt.day)
elif lookup_type == 'hour':
return "%i-%02i-%02i %02i:00:00" % (dt.year, dt.month, dt.day, dt.hour)
elif lookup_type == 'minute':
return "%i-%02i-%02i %02i:%02i:00" % (dt.year, dt.month, dt.day, dt.hour, dt.minute)
elif lookup_type == 'second':
return "%i-%02i-%02i %02i:%02i:%02i" % (dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second)
def _sqlite_time_extract(lookup_type, dt):
if dt is None:
return None
try:
dt = backend_utils.typecast_time(dt)
except (ValueError, TypeError):
return None
return getattr(dt, lookup_type)
def _sqlite_format_dtdelta(conn, lhs, rhs):
"""
LHS and RHS can be either:
- An integer number of microseconds
- A string representing a timedelta object
- A string representing a datetime
"""
try:
if isinstance(lhs, int):
lhs = str(decimal.Decimal(lhs) / decimal.Decimal(1000000))
real_lhs = parse_duration(lhs)
if real_lhs is None:
real_lhs = backend_utils.typecast_timestamp(lhs)
if isinstance(rhs, int):
rhs = str(decimal.Decimal(rhs) / decimal.Decimal(1000000))
real_rhs = parse_duration(rhs)
if real_rhs is None:
real_rhs = backend_utils.typecast_timestamp(rhs)
if conn.strip() == '+':
out = real_lhs + real_rhs
else:
out = real_lhs - real_rhs
except (ValueError, TypeError):
return None
# typecast_timestamp returns a date or a datetime without timezone.
# It will be formatted as "%Y-%m-%d" or "%Y-%m-%d %H:%M:%S[.%f]"
return str(out)
def _sqlite_time_diff(lhs, rhs):
left = backend_utils.typecast_time(lhs)
right = backend_utils.typecast_time(rhs)
return (
(left.hour * 60 * 60 * 1000000) +
(left.minute * 60 * 1000000) +
(left.second * 1000000) +
(left.microsecond) -
(right.hour * 60 * 60 * 1000000) -
(right.minute * 60 * 1000000) -
(right.second * 1000000) -
(right.microsecond)
)
def _sqlite_timestamp_diff(lhs, rhs):
left = backend_utils.typecast_timestamp(lhs)
right = backend_utils.typecast_timestamp(rhs)
return (left - right).total_seconds() * 1000000
def _sqlite_regexp(re_pattern, re_string):
return bool(re.search(re_pattern, str(re_string))) if re_string is not None else False
def _sqlite_power(x, y):
return x ** y
| alexallah/django | django/db/backends/sqlite3/base.py | Python | bsd-3-clause | 17,790 |
# -*- coding: utf-8 -*-
import copy
import json
import sys
import traceback
from django.conf import settings
from django.core.files.storage import default_storage as storage
import mock
from nose.plugins.attrib import attr
from nose.tools import eq_
from pyquery import PyQuery as pq
import waffle
import amo
import amo.tests
from addons.models import Addon
from amo.tests import assert_no_validation_errors
from amo.tests.test_helpers import get_image_path
from amo.urlresolvers import reverse
from applications.models import AppVersion, Application
from constants.applications import FIREFOX
from devhub.tasks import compatibility_check
from devhub.views import make_validation_result
from files.helpers import copyfileobj
from files.models import File, FileUpload, FileValidation
from files.tests.test_models import UploadTest as BaseUploadTest
from files.utils import parse_addon
from users.models import UserProfile
from zadmin.models import ValidationResult
class TestUploadValidation(BaseUploadTest):
fixtures = ['base/apps', 'base/users',
'devhub/invalid-id-uploaded-xpi.json']
def setUp(self):
super(TestUploadValidation, self).setUp()
assert self.client.login(username='[email protected]',
password='password')
def test_no_html_in_messages(self):
upload = FileUpload.objects.get(name='invalid-id-20101206.xpi')
resp = self.client.get(reverse('devhub.upload_detail',
args=[upload.uuid, 'json']))
eq_(resp.status_code, 200)
data = json.loads(resp.content)
msg = data['validation']['messages'][1]
eq_(msg['message'], 'The value of <em:id> is invalid.')
eq_(sorted(msg['context']),
[[u'<foo/>'], u'<em:description>...'])
def test_date_on_upload(self):
upload = FileUpload.objects.get(name='invalid-id-20101206.xpi')
resp = self.client.get(reverse('devhub.upload_detail',
args=[upload.uuid]))
eq_(resp.status_code, 200)
doc = pq(resp.content)
eq_(doc('td').text(), 'December 6, 2010')
class TestUploadErrors(BaseUploadTest):
fixtures = ('base/apps', 'base/addon_3615', 'base/users')
def setUp(self):
super(TestUploadErrors, self).setUp()
self.client.login(username='[email protected]',
password='password')
@mock.patch.object(waffle, 'flag_is_active')
def test_dupe_uuid(self, flag_is_active):
flag_is_active.return_value = True
addon = Addon.objects.get(pk=3615)
d = parse_addon(self.get_upload('extension.xpi'))
addon.update(guid=d['guid'])
dupe_xpi = self.get_upload('extension.xpi')
res = self.client.get(reverse('devhub.upload_detail',
args=[dupe_xpi.uuid, 'json']))
eq_(res.status_code, 400, res.content)
data = json.loads(res.content)
eq_(data['validation']['messages'],
[{'tier': 1, 'message': 'Duplicate UUID found.',
'type': 'error', 'fatal': True}])
eq_(data['validation']['ending_tier'], 1)
class TestFileValidation(amo.tests.TestCase):
fixtures = ['base/apps', 'base/users', 'base/platforms',
'devhub/addon-validation-1']
def setUp(self):
assert self.client.login(username='[email protected]', password='password')
self.user = UserProfile.objects.get(email='[email protected]')
self.file_validation = FileValidation.objects.get(pk=1)
self.file = self.file_validation.file
with storage.open(self.file.file_path, 'w') as f:
f.write('<pretend this is an xpi>\n')
self.addon = self.file.version.addon
args = [self.addon.slug, self.file.id]
self.url = reverse('devhub.file_validation', args=args)
self.json_url = reverse('devhub.json_file_validation', args=args)
def test_version_list(self):
r = self.client.get(self.addon.get_dev_url('versions'))
eq_(r.status_code, 200)
a = pq(r.content)('td.file-validation a')
eq_(a.text(), '0 errors, 0 warnings')
eq_(a.attr('href'), self.url)
def test_results_page(self):
r = self.client.get(self.url, follow=True)
eq_(r.status_code, 200)
eq_(r.context['addon'], self.addon)
doc = pq(r.content)
assert not doc('#site-nav').hasClass('app-nav'), (
'Expected add-ons devhub nav')
eq_(doc('header h2').text(),
u'Validation Results for searchaddon11102010-20101217.xml')
eq_(doc('#addon-validator-suite').attr('data-validateurl'),
self.json_url)
def test_only_dev_can_see_results(self):
self.client.logout()
assert self.client.login(username='[email protected]',
password='password')
eq_(self.client.head(self.url, follow=True).status_code, 403)
def test_only_dev_can_see_json_results(self):
self.client.logout()
assert self.client.login(username='[email protected]',
password='password')
eq_(self.client.head(self.json_url, follow=True).status_code, 403)
def test_editor_can_see_results(self):
self.client.logout()
assert self.client.login(username='[email protected]',
password='password')
eq_(self.client.head(self.url, follow=True).status_code, 200)
def test_editor_can_see_json_results(self):
self.client.logout()
assert self.client.login(username='[email protected]',
password='password')
eq_(self.client.head(self.json_url, follow=True).status_code, 200)
def test_no_html_in_messages(self):
r = self.client.post(self.json_url, follow=True)
eq_(r.status_code, 200)
data = json.loads(r.content)
msg = data['validation']['messages'][0]
eq_(msg['message'], 'The value of <em:id> is invalid.')
eq_(sorted(msg['context']),
[[u'<foo/>'], u'<em:description>...'])
@mock.patch('files.models.File.has_been_validated')
def test_json_results_post(self, has_been_validated):
has_been_validated.__nonzero__.return_value = False
eq_(self.client.post(self.json_url).status_code, 200)
has_been_validated.__nonzero__.return_value = True
eq_(self.client.post(self.json_url).status_code, 200)
@mock.patch('files.models.File.has_been_validated')
def test_json_results_get(self, has_been_validated):
has_been_validated.__nonzero__.return_value = True
eq_(self.client.get(self.json_url).status_code, 200)
has_been_validated.__nonzero__.return_value = False
eq_(self.client.get(self.json_url).status_code, 405)
class TestValidateAddon(amo.tests.TestCase):
fixtures = ['base/users']
def setUp(self):
super(TestValidateAddon, self).setUp()
assert self.client.login(username='[email protected]',
password='password')
def test_login_required(self):
self.client.logout()
r = self.client.get(reverse('devhub.validate_addon'))
eq_(r.status_code, 302)
def test_context(self):
r = self.client.get(reverse('devhub.validate_addon'))
eq_(r.status_code, 200)
doc = pq(r.content)
eq_(doc('#upload-addon').attr('data-upload-url'),
reverse('devhub.standalone_upload'))
class TestValidateFile(BaseUploadTest):
fixtures = ['base/apps', 'base/users', 'base/addon_3615',
'devhub/addon-file-100456', 'base/platforms']
def setUp(self):
super(TestValidateFile, self).setUp()
assert self.client.login(username='[email protected]', password='password')
self.user = UserProfile.objects.get(email='[email protected]')
self.file = File.objects.get(pk=100456)
# Move the file into place as if it were a real file
with storage.open(self.file.file_path, 'w') as dest:
copyfileobj(open(self.file_path('invalid-id-20101206.xpi')),
dest)
self.addon = self.file.version.addon
def tearDown(self):
super(TestValidateFile, self).tearDown()
if storage.exists(self.file.file_path):
storage.delete(self.file.file_path)
@attr('validator')
def test_lazy_validate(self):
r = self.client.post(reverse('devhub.json_file_validation',
args=[self.addon.slug, self.file.id]),
follow=True)
eq_(r.status_code, 200)
data = json.loads(r.content)
assert_no_validation_errors(data)
msg = data['validation']['messages'][0]
eq_(msg['message'], 'The value of <em:id> is invalid.')
def test_time(self):
r = self.client.post(reverse('devhub.file_validation',
args=[self.addon.slug, self.file.id]),
follow=True)
doc = pq(r.content)
assert doc('time').text()
@mock.patch.object(settings, 'EXPOSE_VALIDATOR_TRACEBACKS', False)
@mock.patch('devhub.tasks.run_validator')
def test_validator_errors(self, v):
v.side_effect = ValueError('catastrophic failure in amo-validator')
r = self.client.post(reverse('devhub.json_file_validation',
args=[self.addon.slug, self.file.id]),
follow=True)
eq_(r.status_code, 200)
data = json.loads(r.content)
eq_(data['validation'], '')
eq_(data['error'].strip(),
'ValueError: catastrophic failure in amo-validator')
@mock.patch('devhub.tasks.run_validator')
def test_validator_sets_binary_flag_for_extensions(self, v):
v.return_value = json.dumps({
"errors": 0,
"success": True,
"warnings": 0,
"notices": 0,
"message_tree": {},
"messages": [],
"metadata": {
"contains_binary_extension": True,
"version": "1.0",
"name": "gK0Bes Bot",
"id": "gkobes@gkobes"
}
})
eq_(self.addon.binary, False)
r = self.client.post(reverse('devhub.json_file_validation',
args=[self.addon.slug, self.file.id]),
follow=True)
eq_(r.status_code, 200)
data = json.loads(r.content)
assert_no_validation_errors(data)
addon = Addon.objects.get(pk=self.addon.id)
eq_(addon.binary, True)
@mock.patch('validator.validate.validate')
def test_validator_sets_binary_flag_for_extensions(self, v):
self.client.post(reverse('devhub.json_file_validation',
args=[self.addon.slug, self.file.id]),
follow=True)
assert not v.call_args[1].get('compat_test', True)
@mock.patch('devhub.tasks.run_validator')
def test_ending_tier_is_preserved(self, v):
v.return_value = json.dumps({
"errors": 0,
"success": True,
"warnings": 0,
"notices": 0,
"message_tree": {},
"messages": [],
"ending_tier": 5,
"metadata": {
"contains_binary_extension": True,
"version": "1.0",
"name": "gK0Bes Bot",
"id": "gkobes@gkobes"
}
})
r = self.client.post(reverse('devhub.json_file_validation',
args=[self.addon.slug, self.file.id]),
follow=True)
eq_(r.status_code, 200)
data = json.loads(r.content)
eq_(data['validation']['ending_tier'], 5)
@mock.patch('devhub.tasks.run_validator')
def test_validator_sets_binary_flag_for_content(self, v):
v.return_value = json.dumps({
"errors": 0,
"success": True,
"warnings": 0,
"notices": 0,
"message_tree": {},
"messages": [],
"metadata": {
"contains_binary_content": True,
"version": "1.0",
"name": "gK0Bes Bot",
"id": "gkobes@gkobes"
}
})
eq_(self.addon.binary, False)
r = self.client.post(reverse('devhub.json_file_validation',
args=[self.addon.slug, self.file.id]),
follow=True)
eq_(r.status_code, 200)
data = json.loads(r.content)
assert_no_validation_errors(data)
addon = Addon.objects.get(pk=self.addon.id)
eq_(addon.binary, True)
@mock.patch('devhub.tasks.run_validator')
def test_linkify_validation_messages(self, v):
v.return_value = json.dumps({
"errors": 0,
"success": True,
"warnings": 1,
"notices": 0,
"message_tree": {},
"messages": [{
"context": ["<code>", None],
"description": [
"Something something, see https://bugzilla.mozilla.org/"],
"column": 0,
"line": 1,
"file": "chrome/content/down.html",
"tier": 2,
"message": "Some warning",
"type": "warning",
"id": [],
"uid": "bb9948b604b111e09dfdc42c0301fe38"
}],
"metadata": {}
})
r = self.client.post(reverse('devhub.json_file_validation',
args=[self.addon.slug, self.file.id]),
follow=True)
eq_(r.status_code, 200)
data = json.loads(r.content)
assert_no_validation_errors(data)
doc = pq(data['validation']['messages'][0]['description'][0])
eq_(doc('a').text(), 'https://bugzilla.mozilla.org/')
@mock.patch.object(settings, 'VALIDATOR_MESSAGE_LIMIT', 10)
def test_limit_validator_warnings(self):
data = {
"error": None,
"validation": {
"errors": 0,
"success": True,
"warnings": 500,
"notices": 0,
"message_tree": {},
"messages": [{
"context": ["<code>", None],
"description": [
"Something something, see https://bugzilla.mozilla.org/"],
"column": 0,
"line": 1,
"file": "chrome/content/down.html",
"tier": 2,
"message": "Some warning",
"type": "warning",
"id": [],
"uid": "bb9948b604b111e09dfdc42c0301fe38"
}] * 12,
"metadata": {}
}
}
make_validation_result(data)
eq_(len(data['validation']['messages']), 11)
assert 'truncated' in data['validation']['messages'][-1]['message']
eq_(data['validation']['messages'][-1]['type'], 'warning')
@mock.patch.object(settings, 'VALIDATOR_MESSAGE_LIMIT', 10)
def test_limit_validator_compat_errors(self):
orig_data = {
"error": None,
"validation": {
"errors": 0,
"success": True,
"warnings": 100,
"notices": 0,
"message_tree": {},
"compatibility_summary": {"errors": 100, "warnings": 0, "notices": 0},
"messages": [{
"context": ["<code>", None],
"description": [
"Something something, see https://bugzilla.mozilla.org/"],
"column": 0,
"line": 1,
"file": "chrome/content/down.html",
"tier": 2,
"message": "Some warning",
"type": "warning",
"compatibility_type": "warning",
"id": [],
"uid": "bb9948b604b111e09dfdc42c0301fe38"
},
{
"context": ["<code>", None],
"description": [
"Something something, see https://bugzilla.mozilla.org/"],
"column": 0,
"line": 1,
"file": "chrome/content/down.html",
"tier": 2,
"message": "Some error",
"type": "warning",
"compatibility_type": "warning",
"id": [],
"uid": "bb9948b604b111e09dfdc42c0301fe38"
}] * 50,
"metadata": {}
}
}
data = copy.deepcopy(orig_data)
make_validation_result(data)
eq_(len(data['validation']['messages']), 11)
assert 'truncated' in data['validation']['messages'][-1]['message']
eq_(data['validation']['messages'][-1]['type'], 'warning')
data = copy.deepcopy(orig_data)
make_validation_result(data, is_compatibility=True)
eq_(len(data['validation']['messages']), 11)
assert 'truncated' in data['validation']['messages'][-1]['message']
eq_(data['validation']['messages'][-1]['type'], 'error')
@mock.patch.object(settings, 'VALIDATOR_MESSAGE_LIMIT', 10)
def test_limit_validator_errors(self):
data = {
"error": None,
"validation": {
"errors": 100,
"success": True,
"warnings": 100,
"notices": 0,
"message_tree": {},
"messages": [{
"context": ["<code>", None],
"description": [
"Something something, see https://bugzilla.mozilla.org/"],
"column": 0,
"line": 1,
"file": "chrome/content/down.html",
"tier": 2,
"message": "Some warning",
"type": "warning",
"id": [],
"uid": "bb9948b604b111e09dfdc42c0301fe38"
},
{
"context": ["<code>", None],
"description": [
"Something something, see https://bugzilla.mozilla.org/"],
"column": 0,
"line": 1,
"file": "chrome/content/down.html",
"tier": 2,
"message": "Some error",
"type": "error",
"id": [],
"uid": "bb9948b604b111e09dfdc42c0301fe38"
}] * 50,
"metadata": {}
}
}
make_validation_result(data)
eq_(len(data['validation']['messages']), 11)
assert 'truncated' in data['validation']['messages'][-1]['message']
eq_(data['validation']['messages'][-1]['type'], 'error')
@mock.patch.object(settings, 'EXPOSE_VALIDATOR_TRACEBACKS', False)
@mock.patch('devhub.tasks.run_validator')
def test_hide_validation_traceback(self, run_validator):
run_validator.side_effect = RuntimeError('simulated task error')
r = self.client.post(reverse('devhub.json_file_validation',
args=[self.addon.slug, self.file.id]),
follow=True)
eq_(r.status_code, 200)
data = json.loads(r.content)
eq_(data['validation'], '')
eq_(data['error'], 'RuntimeError: simulated task error')
@mock.patch.object(waffle, 'flag_is_active')
@mock.patch('devhub.tasks.run_validator')
def test_rdf_parse_errors_are_ignored(self, run_validator,
flag_is_active):
run_validator.return_value = json.dumps({
"errors": 0,
"success": True,
"warnings": 0,
"notices": 0,
"message_tree": {},
"messages": [],
"metadata": {}
})
flag_is_active.return_value = True
addon = Addon.objects.get(pk=3615)
xpi = self.get_upload('extension.xpi')
d = parse_addon(xpi.path)
# Set up a duplicate upload:
addon.update(guid=d['guid'])
res = self.client.get(reverse('devhub.validate_addon'))
doc = pq(res.content)
upload_url = doc('#upload-addon').attr('data-upload-url')
with storage.open(xpi.path, 'rb') as f:
# Simulate JS file upload
res = self.client.post(upload_url, {'upload': f}, follow=True)
data = json.loads(res.content)
# Simulate JS result polling:
res = self.client.get(data['url'])
data = json.loads(res.content)
# Make sure we don't see a dupe UUID error:
eq_(data['validation']['messages'], [])
# Simulate JS result polling on detail page:
res = self.client.get(data['full_report_url'], follow=True)
res = self.client.get(res.context['validate_url'], follow=True)
data = json.loads(res.content)
# Again, make sure we don't see a dupe UUID error:
eq_(data['validation']['messages'], [])
@mock.patch('devhub.tasks.run_validator')
def test_compatibility_check(self, run_validator):
run_validator.return_value = json.dumps({
'errors': 0,
'success': True,
'warnings': 0,
'notices': 0,
'message_tree': {},
'messages': [],
'metadata': {}
})
xpi = self.get_upload('extension.xpi')
AppVersion.objects.create(
application=Application.objects.get(guid=FIREFOX.guid),
version='10.0.*')
compatibility_check(xpi, FIREFOX.guid, '10.0.*')
eq_(run_validator.call_args[1]['compat'], True)
class TestCompatibilityResults(amo.tests.TestCase):
fixtures = ['base/users', 'devhub/addon-compat-results']
def setUp(self):
super(TestCompatibilityResults, self).setUp()
assert self.client.login(username='[email protected]',
password='password')
self.addon = Addon.objects.get(slug='addon-compat-results')
self.result = ValidationResult.objects.get(
file__version__addon=self.addon)
self.job = self.result.validation_job
def validate(self, expected_status=200):
r = self.client.post(reverse('devhub.json_bulk_compat_result',
args=[self.addon.slug, self.result.id]),
follow=True)
eq_(r.status_code, expected_status)
return json.loads(r.content)
def test_login_protected(self):
self.client.logout()
r = self.client.get(reverse('devhub.bulk_compat_result',
args=[self.addon.slug, self.result.id]))
eq_(r.status_code, 302)
r = self.client.post(reverse('devhub.json_bulk_compat_result',
args=[self.addon.slug, self.result.id]))
eq_(r.status_code, 302)
def test_target_version(self):
r = self.client.get(reverse('devhub.bulk_compat_result',
args=[self.addon.slug, self.result.id]))
eq_(r.status_code, 200)
doc = pq(r.content)
ver = json.loads(doc('.results').attr('data-target-version'))
assert amo.FIREFOX.guid in ver, ('Unexpected: %s' % ver)
eq_(ver[amo.FIREFOX.guid], self.job.target_version.version)
def test_app_trans(self):
r = self.client.get(reverse('devhub.bulk_compat_result',
args=[self.addon.slug, self.result.id]))
eq_(r.status_code, 200)
doc = pq(r.content)
trans = json.loads(doc('.results').attr('data-app-trans'))
for app in amo.APPS.values():
eq_(trans[app.guid], app.pretty)
def test_app_version_change_links(self):
r = self.client.get(reverse('devhub.bulk_compat_result',
args=[self.addon.slug, self.result.id]))
eq_(r.status_code, 200)
doc = pq(r.content)
trans = json.loads(doc('.results').attr('data-version-change-links'))
eq_(trans['%s 4.0.*' % amo.FIREFOX.guid],
'https://developer.mozilla.org/en/Firefox_4_for_developers')
def test_validation_success(self):
data = self.validate()
eq_(data['validation']['messages'][3]['for_appversions'],
{'{ec8030f7-c20a-464f-9b0e-13a3a9e97384}': ['4.0b3']})
def test_time(self):
r = self.client.post(reverse('devhub.bulk_compat_result',
args=[self.addon.slug, self.result.id]),
follow=True)
eq_(r.status_code, 200)
doc = pq(r.content)
assert doc('time').text()
eq_(doc('table tr td:eq(1)').text(), 'Firefox 4.0.*')
@mock.patch.object(settings, 'EXPOSE_VALIDATOR_TRACEBACKS', True)
def test_validation_error(self):
try:
raise RuntimeError('simulated task error')
except:
error = ''.join(traceback.format_exception(*sys.exc_info()))
self.result.update(validation='', task_error=error)
data = self.validate()
eq_(data['validation'], '')
eq_(data['error'], error)
@mock.patch.object(settings, 'EXPOSE_VALIDATOR_TRACEBACKS', False)
def test_hide_validation_traceback(self):
try:
raise RuntimeError('simulated task error')
except:
error = ''.join(traceback.format_exception(*sys.exc_info()))
self.result.update(validation='', task_error=error)
data = self.validate()
eq_(data['validation'], '')
eq_(data['error'], 'RuntimeError: simulated task error')
class TestUploadCompatCheck(BaseUploadTest):
fixtures = ['base/apps', 'base/appversions', 'base/addon_3615']
compatibility_result = json.dumps({
"errors": 0,
"success": True,
"warnings": 0,
"notices": 0,
"compatibility_summary": {"notices": 0,
"errors": 0,
"warnings": 1},
"message_tree": {},
"messages": [],
"metadata": {}
})
def setUp(self):
super(TestUploadCompatCheck, self).setUp()
assert self.client.login(username='[email protected]', password='password')
self.app = Application.objects.get(pk=amo.FIREFOX.id)
self.appver = AppVersion.objects.get(application=self.app,
version='3.7a1pre')
self.upload_url = reverse('devhub.standalone_upload')
def poll_upload_status_url(self, upload_uuid):
return reverse('devhub.standalone_upload_detail', args=[upload_uuid])
def fake_xpi(self, filename=None):
"""Any useless file that has a name property (for Django)."""
if not filename:
return open(get_image_path('non-animated.gif'), 'rb')
return storage.open(filename, 'rb')
def upload(self, filename=None):
with self.fake_xpi(filename=filename) as f:
# Simulate how JS posts data w/ app/version from the form.
res = self.client.post(self.upload_url,
{'upload': f,
'app_id': self.app.pk,
'version_id': self.appver.pk},
follow=True)
return json.loads(res.content)
def test_compat_form(self):
res = self.client.get(reverse('devhub.check_addon_compatibility'))
eq_(res.status_code, 200)
doc = pq(res.content)
options = doc('#id_application option')
expected = [(str(a.id), unicode(a.pretty)) for a in amo.APP_USAGE]
for idx, element in enumerate(options):
e = pq(element)
val, text = expected[idx]
eq_(e.val(), val)
eq_(e.text(), text)
eq_(doc('#upload-addon').attr('data-upload-url'), self.upload_url)
# TODO(Kumar) actually check the form here after bug 671587
@mock.patch('devhub.tasks.run_validator')
def test_js_upload_validates_compatibility(self, run_validator):
run_validator.return_value = '' # Empty to simulate unfinished task.
data = self.upload()
kw = run_validator.call_args[1]
eq_(kw['for_appversions'], {self.app.guid: [self.appver.version]})
eq_(kw['overrides'],
{'targetapp_minVersion': {self.app.guid: self.appver.version},
'targetapp_maxVersion': {self.app.guid: self.appver.version}})
eq_(data['url'], self.poll_upload_status_url(data['upload']))
@mock.patch('devhub.tasks.run_validator')
def test_js_poll_upload_status(self, run_validator):
run_validator.return_value = self.compatibility_result
data = self.upload()
url = self.poll_upload_status_url(data['upload'])
res = self.client.get(url)
data = json.loads(res.content)
if data['validation'] and data['validation']['messages']:
raise AssertionError('Unexpected validation errors: %s'
% data['validation']['messages'])
@mock.patch('devhub.tasks.run_validator')
def test_compat_result_report(self, run_validator):
run_validator.return_value = self.compatibility_result
data = self.upload()
poll_url = self.poll_upload_status_url(data['upload'])
res = self.client.get(poll_url)
data = json.loads(res.content)
res = self.client.get(data['full_report_url'])
eq_(res.status_code, 200)
eq_(res.context['result_type'], 'compat')
doc = pq(res.content)
# Shows app/version on the results page.
eq_(doc('table tr td:eq(0)').text(), 'Firefox 3.7a1pre')
eq_(res.context['validate_url'], poll_url)
def test_compat_application_versions(self):
res = self.client.get(reverse('devhub.check_addon_compatibility'))
eq_(res.status_code, 200)
doc = pq(res.content)
data = {'application_id': amo.FIREFOX.id,
'csrfmiddlewaretoken':
doc('input[name=csrfmiddlewaretoken]').val()}
r = self.client.post(doc('#id_application').attr('data-url'),
data)
eq_(r.status_code, 200)
data = json.loads(r.content)
empty = True
for id, ver in data['choices']:
empty = False
eq_(AppVersion.objects.get(pk=id).version, ver)
assert not empty, "Unexpected: %r" % data
@mock.patch.object(waffle, 'flag_is_active')
@mock.patch('devhub.tasks.run_validator')
def test_rdf_parse_errors_are_ignored(self, run_validator,
flag_is_active):
run_validator.return_value = self.compatibility_result
flag_is_active.return_value = True
addon = Addon.objects.get(pk=3615)
dupe_xpi = self.get_upload('extension.xpi')
d = parse_addon(dupe_xpi)
# Set up a duplicate upload:
addon.update(guid=d['guid'])
data = self.upload(filename=dupe_xpi.path)
# Make sure we don't see a dupe UUID error:
eq_(data['validation']['messages'], [])
@mock.patch('devhub.tasks.run_validator')
def test_compat_summary_overrides(self, run_validator):
run_validator.return_value = json.dumps({
"success": True,
"errors": 0,
"warnings": 0,
"notices": 0,
"compatibility_summary": {"notices": 1,
"errors": 2,
"warnings": 3},
"message_tree": {},
"messages": [],
"metadata": {}
})
data = self.upload()
eq_(data['validation']['notices'], 1)
eq_(data['validation']['errors'], 2)
eq_(data['validation']['warnings'], 3)
res = self.client.get(self.poll_upload_status_url(data['upload']))
data = json.loads(res.content)
eq_(data['validation']['notices'], 1)
eq_(data['validation']['errors'], 2)
eq_(data['validation']['warnings'], 3)
@mock.patch('devhub.tasks.run_validator')
def test_compat_error_type_override(self, run_validator):
run_validator.return_value = json.dumps({
"success": True,
"errors": 0,
"warnings": 0,
"notices": 0,
"compatibility_summary": {"notices": 0,
"errors": 1,
"warnings": 0},
"message_tree": {},
"messages": [{"type": "warning",
"compatibility_type": "error",
"tier": 1},
{"type": "warning",
"compatibility_type": None,
"tier": 1}],
"metadata": {}
})
data = self.upload()
eq_(data['validation']['messages'][0]['type'], 'error')
eq_(data['validation']['messages'][1]['type'], 'warning')
| robhudson/zamboni | apps/devhub/tests/test_views_validation.py | Python | bsd-3-clause | 33,929 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import (absolute_import, division,
print_function, unicode_literals)
from builtins import *
from carcd import constants
import pinyin
import string
import re
from collections import OrderedDict
import sys
def beautify(context):
"""
Keep better looking in ascii-chinese combined context
add space between ascii_letters and Chinese
avoid multiple spaces,
avoid space between chinese and symbols,
"""
if sys.hexversion >= 0x03000000:
chinese = r'(?a)[^{}]'.format(re.escape(string.printable))
else:
chinese = r'[^{}]'.format(re.escape(string.printable))
symbols = r'[{}]'.format(re.escape(string.punctuation))
ascii = r'\w'
context = re.sub(r'({0})({1})'.format(chinese, ascii), r'\1 \2', context)
context = re.sub(r'({1})({0})'.format(chinese, ascii), r'\1 \2', context)
context = re.sub(r'({0}) ({1})'.format(chinese, symbols), r'\1\2', context)
context = re.sub(r'({1}) ({0})'.format(chinese, symbols), r'\1\2', context)
remove_multispace = re.sub(r'\s+', ' ', context)
return remove_multispace.strip()
def asciilize(context):
"""
Transform Chinese characters to pinyin,
multibyte symbol to unibyte symbol
"""
output = []
for ch in context:
if ch in string.printable:
output.append(ch)
elif ch in constants.FULL2HALF:
output.append(constants.FULL2HALF.get(ch, ''))
else:
output.append(' ')
output.extend(pinyin.get(ch).capitalize())
return beautify(''.join(output))
def name_split(name):
"""
Split a cd filename to multiple parts
return as a OrderedDict:
{
<category>: <string>,
}
the categories are following:
`number` for leading numbers
`title` for title name
`ext` for file extension
'space' for space ' '
"""
categories = ['number', 'space', 'title', 'ext']
pattern = (r'(?P<number>\d+(-?\d+))'
'(?P<space> )?'
'(?P<title>.*?)'
'(?P<ext>\....)')
itemdict = re.match(pattern, name).groupdict()
itemlist = [(category, itemdict[category]) for category in categories]
return OrderedDict(itemlist)
def name_join(items):
"""
Join nameitems, eleminate None in the OrderedDict
"""
return ''.join([item for item in items.values() if item is not None])
def number_format(number_string, fill=2):
"""
add padding zeros to make alinged numbers
ex.
>>> number_format('2')
'02'
>>> number_format('1-2')
'01-02'
"""
output = []
digits_spliter = r'(?P<digit>\d+)|(?P<nondigit>.)'
for token in [m.groups() for m in re.finditer(digits_spliter, number_string)]:
if token[0] is None:
output.append(token[1])
else:
output.append(token[0].zfill(2))
return ''.join(output)
def name_handle(name):
"""
Complicated processes to manipulate a given filename
"""
items = name_split(name)
output = []
for item in items.items():
if (item[0] == 'number') & (item[1] is not None):
output.append(('number', number_format(item[1])))
if (item[0] == 'space') & (item[1] is not None):
output.append(('space', ' '))
if (item[0] == 'title') & len(set(item[1]) - set(string.printable)) > 0:
output.append(('title', asciilize(item[1]) + item[1]))
if (item[0] == 'ext') & (item[1] is not None):
output.append(('ext', item[1]))
items = OrderedDict(output)
return beautify(name_join(items))
def is_pinyined(context):
"""
Return True if both Chinese and its pinyin are in the context
else return False
"""
if sys.hexversion >= 0x03000000:
chinese = r'(?a)[^{}]+'.format(re.escape(string.printable))
else:
chinese = r'[^{}]+'.format(re.escape(string.printable))
chinese_frags = re.findall(chinese, context)
tests = [asciilize(frag) in context for frag in chinese_frags]
return all(tests)
| jeroyang/carcd | carcd/name.py | Python | bsd-3-clause | 4,120 |
import numpy as np
import pytest
from pandas._libs.tslibs.timedeltas import delta_to_nanoseconds
import pandas as pd
from pandas import Timedelta
@pytest.mark.parametrize(
"obj,expected",
[
(np.timedelta64(14, "D"), 14 * 24 * 3600 * 1e9),
(Timedelta(minutes=-7), -7 * 60 * 1e9),
(Timedelta(minutes=-7).to_pytimedelta(), -7 * 60 * 1e9),
(pd.offsets.Nano(125), 125),
(1, 1),
(np.int64(2), 2),
(np.int32(3), 3),
],
)
def test_delta_to_nanoseconds(obj, expected):
result = delta_to_nanoseconds(obj)
assert result == expected
def test_delta_to_nanoseconds_error():
obj = np.array([123456789], dtype="m8[ns]")
with pytest.raises(TypeError, match="<class 'numpy.ndarray'>"):
delta_to_nanoseconds(obj)
| toobaz/pandas | pandas/tests/tslibs/test_timedeltas.py | Python | bsd-3-clause | 793 |
import functools
import networkx
class Ordering(object):
@functools.total_ordering
class OrderedItem(object):
def __init__(self, ordering, item):
self._ordering = ordering
self._item = item
def __eq__(self, other):
return self._item == other._item
def __lt__(self, other):
return self._ordering.lt(self._item, other._item)
def __init__(self):
self._graph = networkx.DiGraph()
self._compiled = True
def lt(self, left, right):
self._compile()
return self._graph.has_edge(left, right)
def key(self, item):
return Ordering.OrderedItem(self, item)
def order(self, *args):
self._compiled = False
result = set()
for blob in args:
for item in self._iterate(blob):
result.add(item)
for i in range(len(args)-1):
for lower in self._iterate(args[i]):
for higher in self._iterate(args[i+1]):
self._graph.add_edge(lower, higher)
return result
def _compile(self):
if self._compiled:
return
for left in self._graph.nodes_iter():
for right in networkx.descendants(self._graph, left):
self._graph.add_edge(left, right)
self._check_cycles()
self._compiled = True
def _check_cycles(self):
if not networkx.is_directed_acyclic_graph(self._graph):
assert False, "Cycle detected"
def _iterate(self, list_or_not):
try:
for item in list_or_not:
yield item
except TypeError:
yield list_or_not
| abortz/saycbridge | src/z3b/ordering.py | Python | bsd-3-clause | 1,694 |
# -*- coding: utf-8 -*-
"""
:copyright: (C) 2010-2013 by Contrail Consortium.
"""
from conpaas.core.https import client
def check_agent_process(host, port):
"""GET () check_agent_process"""
method = 'check_agent_process'
return client.check_response(client.jsonrpc_get(host, port, '/', method))
def create_hub(host, port):
"""POST (my_ip) create_hub"""
method = 'create_hub'
params = { 'my_ip': host }
return client.check_response(client.jsonrpc_post(
host, port, '/', method, params))
def create_node(host, port, hub_ip):
"""POST (my_ip, hub_ip) create_node"""
method = 'create_node'
params = { 'my_ip': host, 'hub_ip': hub_ip }
return client.check_response(client.jsonrpc_post(
host, port, '/', method, params))
| mihaisoloi/conpaas | conpaas-services/src/conpaas/services/selenium/agent/client.py | Python | bsd-3-clause | 783 |
###############################################################################
##
## Copyright (C) 2014-2016, New York University.
## Copyright (C) 2011-2014, NYU-Poly.
## Copyright (C) 2006-2011, University of Utah.
## All rights reserved.
## Contact: [email protected]
##
## This file is part of VisTrails.
##
## "Redistribution and use in source and binary forms, with or without
## modification, are permitted provided that the following conditions are met:
##
## - Redistributions of source code must retain the above copyright notice,
## this list of conditions and the following disclaimer.
## - Redistributions in binary form must reproduce the above copyright
## notice, this list of conditions and the following disclaimer in the
## documentation and/or other materials provided with the distribution.
## - Neither the name of the New York University nor the names of its
## contributors may be used to endorse or promote products derived from
## this software without specific prior written permission.
##
## THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
## AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
## THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
## PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
## CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
## EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
## PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
## OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
## WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
## OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
## ADVISED OF THE POSSIBILITY OF SUCH DAMAGE."
##
###############################################################################
from __future__ import division
from entity import Entity
class ParameterExplorationEntity(Entity):
type_id = 6
def __init__(self, pe=None):
Entity.__init__(self)
self.id = None
self.update(pe)
@staticmethod
def create(*args):
entity = ParameterExplorationEntity()
entity.load(*args)
return entity
def update(self, pe):
self.pe = pe
if self.pe is not None:
self.name = pe.name
self.user = pe.user
self.mod_time = pe.date if pe.date else self.now()
self.create_time = pe.date if pe.date else self.now()
self.size = len(pe.functions)
self.description = ""
self.url = 'test'
self.was_updated = True
# # returns string
# def get_name(self):
# raise RuntimeError("Method is abstract")
# # returns datetime
# def get_mod_time(self):
# raise RuntimeError("Method is abstract")
# # returns datetime
# def get_create_time(self):
# raise RuntimeError("Method is abstract")
# # returns string
# # FIXME: perhaps this should be a User object at some point
# def get_user(self):
# raise RuntimeError("Method is abstract")
# # returns tuple (<entity_type>, <entity_id>)
# def get_id(self):
# raise RuntimeError("Method is abstract")
# # returns integer
# def get_size(self):
# raise RuntimeError("Method is abstract")
# # returns possibly empty list of Entity objects
# def get_children(self):
# raise RuntimeError("Method is abstract")
# # returns list of strings representing paths
# # FIXME: should this be uris for database access?
# def get_image_fnames(self):
# raise RuntimeError("Method is abstract")
# returns boolean, True if search input is satisfied else False
def match(self, search):
raise RuntimeError("Not implemented") | VisTrails/VisTrails | vistrails/core/collection/parameter_exploration.py | Python | bsd-3-clause | 3,905 |
"""Undocumented Module"""
__all__ = ['DirectCheckButton']
from panda3d.core import *
from .DirectButton import *
from .DirectLabel import *
class DirectCheckButton(DirectButton):
"""
DirectCheckButton(parent) - Create a DirectGuiWidget which responds
to mouse clicks by setting a state of on or off and execute a callback
function (passing that state through) if defined
"""
def __init__(self, parent = None, **kw):
# Inherits from DirectButton
# A Direct Frame can have:
# - A background texture (pass in path to image, or Texture Card)
# - A midground geometry item (pass in geometry)
# - A foreground text Node (pass in text string or Onscreen Text)
# For a direct button:
# Each button has 4 states (ready, press, rollover, disabled)
# The same image/geom/text can be used for all four states or each
# state can have a different text/geom/image
# State transitions happen automatically based upon mouse interaction
# Responds to click event and calls command if None
self.colors = None
optiondefs = (
('indicatorValue', 0, self.setIndicatorValue),
# boxBorder defines the space created around the check box
('boxBorder', 0, None),
# boxPlacement maps left, above, right, below
('boxPlacement', 'left', None),
('boxImage', None, None),
('boxImageScale', 1, None),
('boxImageColor', None, None),
('boxRelief', 'sunken', None),
)
# Merge keyword options with default options
self.defineoptions(kw, optiondefs)
# Initialize superclasses
DirectButton.__init__(self, parent)
self.indicator = self.createcomponent("indicator", (), None,
DirectLabel, (self,),
numStates = 2,
image = self['boxImage'],
image_scale = self['boxImageScale'],
image_color = self['boxImageColor'],
state = 'disabled',
text = ('X', 'X'),
relief = self['boxRelief'],
)
# Call option initialization functions
self.initialiseoptions(DirectCheckButton)
# After initialization with X giving it the correct size, put back space
if self['boxImage'] == None:
self.indicator['text'] = (' ', '*')
self.indicator['text_pos'] = (0, -.2)
else:
self.indicator['text'] = (' ', ' ')
if self['boxImageColor'] != None and self['boxImage'] != None:
self.colors = [VBase4(0, 0, 0, 0), self['boxImageColor']]
self.component('indicator')['image_color'] = VBase4(0, 0, 0, 0)
# Override the resetFrameSize of DirectGuiWidget inorder to provide space for label
def resetFrameSize(self):
self.setFrameSize(fClearFrame = 1)
def setFrameSize(self, fClearFrame = 0):
if self['frameSize']:
# Use user specified bounds
self.bounds = self['frameSize']
frameType = self.frameStyle[0].getType()
ibw = self.indicator['borderWidth']
else:
# Use ready state to compute bounds
frameType = self.frameStyle[0].getType()
if fClearFrame and (frameType != PGFrameStyle.TNone):
self.frameStyle[0].setType(PGFrameStyle.TNone)
self.guiItem.setFrameStyle(0, self.frameStyle[0])
# To force an update of the button
self.guiItem.getStateDef(0)
# Clear out frame before computing bounds
self.getBounds()
# Restore frame style if necessary
if (frameType != PGFrameStyle.TNone):
self.frameStyle[0].setType(frameType)
self.guiItem.setFrameStyle(0, self.frameStyle[0])
# Ok, they didn't set specific bounds,
# let's add room for the label indicator
# get the difference in height
ibw = self.indicator['borderWidth']
indicatorWidth = (self.indicator.getWidth() + (2*ibw[0]))
indicatorHeight = (self.indicator.getHeight() + (2*ibw[1]))
diff = (indicatorHeight + (2*self['boxBorder']) -
(self.bounds[3] - self.bounds[2]))
# If background is smaller then indicator, enlarge background
if diff > 0:
if self['boxPlacement'] == 'left': #left
self.bounds[0] += -(indicatorWidth + (2*self['boxBorder']))
self.bounds[3] += diff/2
self.bounds[2] -= diff/2
elif self['boxPlacement'] == 'below': #below
self.bounds[2] += -(indicatorHeight+(2*self['boxBorder']))
elif self['boxPlacement'] == 'right': #right
self.bounds[1] += indicatorWidth + (2*self['boxBorder'])
self.bounds[3] += diff/2
self.bounds[2] -= diff/2
else: #above
self.bounds[3] += indicatorHeight + (2*self['boxBorder'])
# Else make space on correct side for indicator
else:
if self['boxPlacement'] == 'left': #left
self.bounds[0] += -(indicatorWidth + (2*self['boxBorder']))
elif self['boxPlacement'] == 'below': #below
self.bounds[2] += -(indicatorHeight + (2*self['boxBorder']))
elif self['boxPlacement'] == 'right': #right
self.bounds[1] += indicatorWidth + (2*self['boxBorder'])
else: #above
self.bounds[3] += indicatorHeight + (2*self['boxBorder'])
# Set frame to new dimensions
if ((frameType != PGFrameStyle.TNone) and
(frameType != PGFrameStyle.TFlat)):
bw = self['borderWidth']
else:
bw = (0, 0)
# Set frame to new dimensions
self.guiItem.setFrame(
self.bounds[0] - bw[0],
self.bounds[1] + bw[0],
self.bounds[2] - bw[1],
self.bounds[3] + bw[1])
# If they didn't specify a position, put it in the center of new area
if not self.indicator['pos']:
bbounds = self.bounds
lbounds = self.indicator.bounds
newpos = [0, 0, 0]
if self['boxPlacement'] == 'left': #left
newpos[0] += bbounds[0]-lbounds[0] + self['boxBorder'] + ibw[0]
dropValue = (bbounds[3]-bbounds[2]-lbounds[3]+lbounds[2])/2 + self['boxBorder']
newpos[2] += (bbounds[3]-lbounds[3] + self['boxBorder'] -
dropValue)
elif self['boxPlacement'] == 'right': #right
newpos[0] += bbounds[1]-lbounds[1] - self['boxBorder'] - ibw[0]
dropValue = (bbounds[3]-bbounds[2]-lbounds[3]+lbounds[2])/2 + self['boxBorder']
newpos[2] += (bbounds[3]-lbounds[3] + self['boxBorder']
- dropValue)
elif self['boxPlacement'] == 'above': #above
newpos[2] += bbounds[3]-lbounds[3] - self['boxBorder'] - ibw[1]
else: #below
newpos[2] += bbounds[2]-lbounds[2] + self['boxBorder'] + ibw[1]
self.indicator.setPos(newpos[0], newpos[1], newpos[2])
def commandFunc(self, event):
self['indicatorValue'] = 1 - self['indicatorValue']
if self.colors != None:
self.component('indicator')['image_color'] = self.colors[self['indicatorValue']]
if self['command']:
# Pass any extra args to command
self['command'](*[self['indicatorValue']] + self['extraArgs'])
def setIndicatorValue(self):
self.component('indicator').guiItem.setState(self['indicatorValue'])
if self.colors != None:
self.component('indicator')['image_color'] = self.colors[self['indicatorValue']]
| tobspr/panda3d | direct/src/gui/DirectCheckButton.py | Python | bsd-3-clause | 8,498 |
import logging
import time
from pyquery import PyQuery as pq
from django.conf import settings
from django.db import connection
from django.contrib.auth.models import AnonymousUser, User
from django.test import TestCase
from django.test.client import Client
from nose.tools import assert_equal, with_setup, assert_false, eq_, ok_
from nose.plugins.attrib import attr
from django.db import models
from django.core.urlresolvers import reverse
from ..models import Key
from ..views import ITEMS_PER_PAGE
class KeyViewsTest(TestCase):
def setUp(self):
self.username = 'tester23'
self.password = 'trustno1'
self.email = '[email protected]'
self.user = User(username=self.username,
email=self.email)
self.user.set_password(self.password)
self.user.save()
self.user2 = User(username='someone', email='[email protected]')
self.user2.save()
self.key1 = Key(user=self.user, description='Test Key 1')
self.key1.save()
self.key2 = Key(user=self.user, description='Test Key 2')
self.key2.save()
self.key3 = Key(user=self.user2, description='Test Key 3')
self.key3.save()
self.client = Client()
def tearDown(self):
self.user.delete()
self.user2.delete()
self.key1.delete()
self.key2.delete()
self.key3.delete()
def test_new_key(self):
self.client.login(username=self.username,
password=self.password)
data = {"description": "This is meant for a test app"}
url = reverse('valet_keys.new')
# Check out the creation page, look for the form.
resp = self.client.get(url)
eq_(200, resp.status_code)
page = pq(resp.content)
eq_(1, page.find('form.key').length)
# We don't have this key yet, right?
keys = Key.objects.filter(description=data['description'])
eq_(0, keys.count())
# Okay, create it.
resp = self.client.post(url, data, follow=False)
eq_(200, resp.status_code)
# We have the key now, right?
keys = Key.objects.filter(description=data['description'])
eq_(1, keys.count())
# Okay, and it should belong to the logged-in user
key = keys[0]
eq_(key.user, self.user)
# Take a look at the description and key shown on the result page.
page = pq(resp.content)
ok_(data['description'], page.find('.key .description').text())
ok_(key.key, page.find('.key .key').text())
# Ensure the secret on the page checks out.
secret = page.find('.key .secret').text()
ok_(key.check_secret(secret))
def test_list_key(self):
"""The current user's keys should be shown, but only that user's"""
self.client.login(username=self.username,
password=self.password)
url = reverse('valet_keys.list')
resp = self.client.get(url)
eq_(200, resp.status_code)
page = pq(resp.content)
for ct, key in ((1, self.key1), (1, self.key2), (0, self.key3)):
key_row = page.find('.keys #key-%s' % key.pk)
eq_(ct, key_row.length)
if ct > 0:
eq_(key.description, key_row.find('.description').text())
def test_key_history(self):
self.client.login(username=self.username,
password=self.password)
# Assemble some sample log lines
log_lines = []
for i in range(0, ITEMS_PER_PAGE * 2):
log_lines.append(('ping', self.user2, 'Number #%s' % i))
# Record the log lines for this key
for l in log_lines:
self.key1.log(*l)
# Reverse the lines for comparison.
log_lines.reverse()
# Iterate through 2 expected pages...
for qs, offset in (('', 0), ('?page=2', ITEMS_PER_PAGE)):
url = '%s%s' % (reverse('valet_keys.history', args=(self.key1.pk,)), qs)
resp = self.client.get(url)
eq_(200, resp.status_code)
page = pq(resp.content)
rows = page.find('.item')
for idx in range(0, ITEMS_PER_PAGE):
row = rows.eq(idx)
expected = log_lines[idx + offset]
line = (row.find('.action').text(),
row.find('.object').text(),
row.find('.notes').text())
eq_(expected[0], line[0])
ok_('%s' % expected[1] in line[1])
eq_(expected[2], line[2])
def test_disable_key(self):
"""User should be able to disable own keys, but no one else's"""
self.client.login(username=self.username,
password=self.password)
url = reverse('valet_keys.disable', args=(self.key3.pk,))
resp = self.client.get(url, follow=True)
eq_(403, resp.status_code)
resp = self.client.post(url, follow=False)
ok_(403, resp.status_code)
url = reverse('valet_keys.list')
resp = self.client.get(url)
eq_(200, resp.status_code)
page = pq(resp.content)
eq_(0, page.find('#key-%s.disabled' % self.key1.pk).length)
ok_(not Key.objects.get(pk=self.key1.pk).is_disabled)
url = reverse('valet_keys.disable', args=(self.key1.pk,))
resp = self.client.get(url, follow=True)
eq_(200, resp.status_code)
page = pq(resp.content)
eq_(self.key1.description, page.find('.key .description').text())
resp = self.client.post(url, follow=False)
ok_(302, resp.status_code)
url = reverse('valet_keys.list')
resp = self.client.get(url)
eq_(200, resp.status_code)
page = pq(resp.content)
eq_(1, page.find('#key-%s.disabled' % self.key1.pk).length)
ok_(Key.objects.get(pk=self.key1.pk).is_disabled)
| mozilla/mozilla-badges | vendor-local/src/django-valet-keys/valet_keys/tests/test_views.py | Python | bsd-3-clause | 5,954 |
from django.template.loader import get_template
from django.utils.translation import ugettext
import jinja2
from django_jinja import library
from olympia import amo
from olympia.access import acl
from olympia.ratings.models import RatingFlag
from .. import forms
@library.filter
def stars(num, large=False):
# check for 0.0 incase None was cast to a float. Should
# be safe since lowest rating you can give is 1.0
if num is None or num == 0.0:
return ugettext('Not yet rated')
else:
num = min(5, int(round(num)))
t = get_template('ratings/impala/reviews_rating.html')
# These are getting renamed for contextual sense in the template.
return jinja2.Markup(t.render({'rating': num, 'detailpage': large}))
@library.global_function
def reviews_link(addon, collection_uuid=None):
t = get_template('ratings/reviews_link.html')
return jinja2.Markup(t.render({'addon': addon,
'collection_uuid': collection_uuid}))
@library.global_function
def impala_reviews_link(addon, collection_uuid=None):
t = get_template('ratings/impala/reviews_link.html')
return jinja2.Markup(t.render({'addon': addon,
'collection_uuid': collection_uuid}))
@library.global_function
@library.render_with('ratings/report_review.html')
def report_review_popup():
return {'RatingFlag': RatingFlag, 'flag_form': forms.RatingFlagForm()}
@library.global_function
@library.render_with('ratings/edit_review.html')
def edit_review_form():
return {'form': forms.RatingForm()}
@library.global_function
@library.render_with('ratings/edit_review.html')
def edit_review_reply_form():
return {'form': forms.RatingReplyForm()}
def user_can_delete_review(request, review):
"""Return whether or not the request.user can delete reviews.
People who can delete reviews:
* The original review author.
* Reviewers with Ratings:Moderate, if the review has been flagged and
they are not an author of this add-on.
* Users in a group with "Users:Edit" or "Addons:Edit" privileges and
they are not an author of this add-on.
"""
is_rating_author = review.user_id == request.user.id
is_addon_author = review.addon.has_author(request.user)
is_moderator = (
acl.action_allowed(request, amo.permissions.RATINGS_MODERATE) and
review.editorreview
)
can_edit_users_or_addons = (
acl.action_allowed(request, amo.permissions.USERS_EDIT) or
acl.action_allowed(request, amo.permissions.ADDONS_EDIT)
)
return (
is_rating_author or
(not is_addon_author and (is_moderator or can_edit_users_or_addons))
)
@library.global_function
@jinja2.contextfunction
def check_review_delete(context, review):
return user_can_delete_review(context['request'], review)
| lavish205/olympia | src/olympia/ratings/templatetags/jinja_helpers.py | Python | bsd-3-clause | 2,881 |
"""
pyshtools datasets.
To load a dataset, call the relevant method as in this example:
hlm = pysh.datasets.Venus.VenusTopo719()
When accessing a pyshtools dataset, the file will first be downloaded from the
orginal source and stored in the user's cache directory (if it had not been
done previously). The file hash will be verified to ensure that it has not been
modified, and the file will then be used to instantiate and return an SHCoeffs,
SHGravCoeffs or SHMagCoeffs class instance.
For datasets of spherical harmonic coefficients, the coefficients can be read
up to a maximum specified degree by providing the optional variable lmax. For
magnetic field data, the coefficients can be returned in Tesla or nT by
use of the variable nt.
"""
from . import Mercury
from . import Venus
from . import Earth
from . import Moon
from . import Mars
from . import Vesta
from . import Ceres
__all__ = ['Mercury', 'Venus', 'Earth', 'Moon', 'Mars', 'Vesta', 'Ceres']
| SHTOOLS/SHTOOLS | pyshtools/datasets/__init__.py | Python | bsd-3-clause | 968 |
##########################################################################
#
# Copyright (c) 2013-2014, John Haddon. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above
# copyright notice, this list of conditions and the following
# disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided with
# the distribution.
#
# * Neither the name of John Haddon nor the names of
# any other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
__import__( "GafferScene" )
from _GafferOSL import *
__import__( "IECore" ).loadConfig( "GAFFER_STARTUP_PATHS", {}, subdirectory = "GafferOSL" )
| chippey/gaffer | python/GafferOSL/__init__.py | Python | bsd-3-clause | 1,943 |
"""
Set of "markup" template filters for Django. These filters transform plain text
markup syntaxes to HTML; currently there is support for:
* Textile, which requires the PyTextile library available at
http://loopcore.com/python-textile/
* Markdown, which requires the Python-markdown library from
http://www.freewisdom.org/projects/python-markdown
* reStructuredText, which requires docutils from http://docutils.sf.net/
"""
from django import template
from django.conf import settings
from django.utils.encoding import smart_str, force_unicode
from django.utils.safestring import mark_safe
register = template.Library()
@register.filter(is_safe=True)
def textile(value):
try:
import textile
except ImportError:
if settings.DEBUG:
raise template.TemplateSyntaxError("Error in {% textile %} filter: The Python textile library isn't installed.")
return force_unicode(value)
else:
return mark_safe(force_unicode(textile.textile(smart_str(value), encoding='utf-8', output='utf-8')))
@register.filter(is_safe=True)
def markdown(value, arg=''):
"""
Runs Markdown over a given value, optionally using various
extensions python-markdown supports.
Syntax::
{{ value|markdown:"extension1_name,extension2_name..." }}
To enable safe mode, which strips raw HTML and only returns HTML
generated by actual Markdown syntax, pass "safe" as the first
extension in the list.
If the version of Markdown in use does not support extensions,
they will be silently ignored.
"""
try:
import markdown
except ImportError:
if settings.DEBUG:
raise template.TemplateSyntaxError("Error in {% markdown %} filter: The Python markdown library isn't installed.")
return force_unicode(value)
else:
# markdown.version was first added in 1.6b. The only version of markdown
# to fully support extensions before 1.6b was the shortlived 1.6a.
if hasattr(markdown, 'version'):
extensions = [e for e in arg.split(",") if e]
if len(extensions) > 0 and extensions[0] == "safe":
extensions = extensions[1:]
safe_mode = True
else:
safe_mode = False
# Unicode support only in markdown v1.7 or above. Version_info
# exist only in markdown v1.6.2rc-2 or above.
if getattr(markdown, "version_info", None) < (1,7):
return mark_safe(force_unicode(markdown.markdown(smart_str(value), extensions, safe_mode=safe_mode)))
else:
return mark_safe(markdown.markdown(force_unicode(value), extensions, safe_mode=safe_mode))
else:
return mark_safe(force_unicode(markdown.markdown(smart_str(value))))
@register.filter(is_safe=True)
def restructuredtext(value):
try:
from docutils.core import publish_parts
except ImportError:
if settings.DEBUG:
raise template.TemplateSyntaxError("Error in {% restructuredtext %} filter: The Python docutils library isn't installed.")
return force_unicode(value)
else:
docutils_settings = getattr(settings, "RESTRUCTUREDTEXT_FILTER_SETTINGS", {})
parts = publish_parts(source=smart_str(value), writer_name="html4css1", settings_overrides=docutils_settings)
return mark_safe(force_unicode(parts["fragment"]))
| mixman/djangodev | django/contrib/markup/templatetags/markup.py | Python | bsd-3-clause | 3,450 |
# coding=utf-8
from django.contrib.contenttypes.generic import BaseGenericInlineFormSet
from django.contrib.contenttypes.models import ContentType
from django.forms.models import construct_instance
class GenericInlineFormSet(BaseGenericInlineFormSet):
def save_new(self, form, commit=True):
kwargs = {
self.ct_field.get_attname(): ContentType.objects.get_for_model(
self.instance, for_concrete_model=self.for_concrete_model).pk,
self.ct_fk_field.get_attname(): self.instance.pk,
}
form.instance = construct_instance(form, self.model(**kwargs))
return form.save(commit=commit)
| suselrd/django-allmedia | media/formsets.py | Python | bsd-3-clause | 655 |
from os.path import dirname, join
import pandas as pd
from bokeh.layouts import row, column
from bokeh.models import ColumnDataSource, CustomJS
from bokeh.models.widgets import RangeSlider, Button, DataTable, TableColumn, NumberFormatter
from bokeh.io import curdoc
df = pd.read_csv(join(dirname(__file__), 'salary_data.csv'))
source = ColumnDataSource(data=dict())
def update():
current = df[(df['salary'] >= slider.value[0]) & (df['salary'] <= slider.value[1])].dropna()
source.data = {
'name' : current.name,
'salary' : current.salary,
'years_experience' : current.years_experience,
}
slider = RangeSlider(title="Max Salary", start=10000, end=110000, value=(10000, 50000), step=1000, format="0,0")
slider.on_change('value', lambda attr, old, new: update())
button = Button(label="Download", button_type="success")
button.callback = CustomJS(args=dict(source=source),
code=open(join(dirname(__file__), "download.js")).read())
columns = [
TableColumn(field="name", title="Employee Name"),
TableColumn(field="salary", title="Income", formatter=NumberFormatter(format="$0,0.00")),
TableColumn(field="years_experience", title="Experience (years)")
]
data_table = DataTable(source=source, columns=columns, width=800)
controls = column(slider, button)
curdoc().add_root(row(controls, data_table))
curdoc().title = "Export CSV"
update()
| stonebig/bokeh | examples/app/export_csv/main.py | Python | bsd-3-clause | 1,441 |
Subsets and Splits