text
stringlengths 4
1.02M
| meta
dict |
---|---|
"""Various utilities to power the command line interface."""
import importlib
import inspect
import os
import sys
import click
import nlp_playground.scripts as scripts
from nlp_playground.lib.data import list_datasets
DATASET_PARAM_NAME = 'dataset'
def import_script(command):
"""Return a module under /scripts/ named `command`."""
return importlib.import_module(
'.'.join((scripts.__name__, command))
)
def list_subcommands():
"""Return a generator listing all script module names."""
scripts_path = os.path.dirname(inspect.getfile(scripts))
files = os.listdir(scripts_path)
for some_file in files:
if some_file != '__init__.py' and some_file[-3:] == '.py':
yield some_file[:-3]
class ScriptLoader(click.MultiCommand):
"""Dynamic loader for subcommands under /scripts/."""
def list_commands(self, ctx):
"""
List every subcommand we have.
This will find main() functions in `*.py`
files under /scripts/.
"""
return list(list_subcommands())
def get_command(self, ctx, cmd_name):
"""Get a script command."""
try:
module = import_script(cmd_name)
except ModuleNotFoundError:
return None
else:
wrapper = click.command(name=cmd_name)(module.main)
return wrapper
# require_dataset() is a decorator that adds a Click argument
# for a dataset. When this argument is missing, we tell the user
# what datasets are available.
require_dataset = click.argument(DATASET_PARAM_NAME) # pylint: disable=C0103
def missing_dataset_error():
"""List the available datasets in the installation."""
print("No dataset specified.")
print("Available datasets include:")
print("\t", "\n\t".join(list_datasets()))
def invoke_script(script_name, raw_args):
"""Run the command line with arguments."""
cli = ScriptLoader()
ctx = cli.make_context(script_name, raw_args)
try:
cli.invoke(ctx)
except click.exceptions.MissingParameter as missing:
if missing.param and missing.param.name == DATASET_PARAM_NAME:
missing_dataset_error()
except click.exceptions.ClickException as click_exception:
print(click_exception.message)
sys.exit(1)
| {
"content_hash": "1da7df20b11c69cc4936dabb1ae91d08",
"timestamp": "",
"source": "github",
"line_count": 78,
"max_line_length": 77,
"avg_line_length": 29.487179487179485,
"alnum_prop": 0.6539130434782608,
"repo_name": "jamesmishra/nlp-playground",
"id": "d0759c4b949f8bd56dd45fe65eb39a653a4168e2",
"size": "2300",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "nlp_playground/lib/cmd.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "76418"
},
{
"name": "Shell",
"bytes": "2459"
}
],
"symlink_target": ""
} |
"""This code example creates new orders.
To determine which orders exist, run get_all_orders.py."""
__author__ = ('Nicholas Chen',
'Joseph DiLallo')
import uuid
# Import appropriate modules from the client library.
from googleads import dfp
COMPANY_ID = 'INSERT_ADVERTISER_COMPANY_ID_HERE'
SALESPERSON_ID = 'INSERT_SALESPERSON_ID_HERE'
TRAFFICKER_ID = 'INSERT_TRAFFICKER_ID_HERE'
def main(client, company_id, salesperson_id, trafficker_id):
# Initialize appropriate service.
order_service = client.GetService('OrderService', version='v201505')
# Create order objects.
orders = []
for i in xrange(5):
order = {
'name': 'Order #%s' % uuid.uuid4(),
'advertiserId': company_id,
'salespersonId': salesperson_id,
'traffickerId': trafficker_id
}
orders.append(order)
# Add orders.
orders = order_service.createOrders(orders)
# Display results.
for order in orders:
print ('Order with id \'%s\' and name \'%s\' was created.'
% (order['id'], order['name']))
if __name__ == '__main__':
# Initialize client object.
dfp_client = dfp.DfpClient.LoadFromStorage()
main(dfp_client, COMPANY_ID, SALESPERSON_ID, TRAFFICKER_ID)
| {
"content_hash": "0c132fa10118753b33bf18fa179cf9b6",
"timestamp": "",
"source": "github",
"line_count": 44,
"max_line_length": 70,
"avg_line_length": 27.65909090909091,
"alnum_prop": 0.6647493837304848,
"repo_name": "ya7lelkom/googleads-python-lib",
"id": "24d967e3e281e81469d6a69473ea9d3e2f8437bf",
"size": "1835",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "examples/dfp/v201505/order_service/create_orders.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "492"
},
{
"name": "HTML",
"bytes": "8336"
},
{
"name": "JavaScript",
"bytes": "504"
},
{
"name": "Python",
"bytes": "2535232"
}
],
"symlink_target": ""
} |
"""
File: settings.py
Author: William J. Bosl
Children's Hospital Boston
300 Longwood Avenue
Boston, MA 02115
Email: [email protected]
Web: http://chip.org
Copyright (C) 2011 William Bosl, Children's Hospital Boston Informatics Program (CHIP)
http://chip.org.
Purpose:
Django settings.py for MedAdherePredict project. See
http://www.smartplatforms.org/ for detailed information about SMArt applications.
License information should go here.
$Log: settings.py,v $
"""
# Imports
import os.path
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = (
# ('Your Name', '[email protected]'),
)
MANAGERS = ADMINS
#SMART Server Endpoint
ENDPOINT = {
"url": "http://localhost:7000",
"name": "SMART Sandbox API v1.0",
"app_id": "[email protected]",
"consumer_key": "[email protected]",
"consumer_secret": "smartapp-secret"
}
# Used to assign relative paths
SITE_ROOT = os.path.dirname(os.path.realpath(__file__))
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# On Unix systems, a value of None will cause Django to use the same
# timezone as the operating system.
# If running in a Windows environment this must be set to the same as your
# system time zone.
TIME_ZONE = 'America/New_York'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale
USE_L10N = True
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/home/media/media.lawrence.com/media/"
MEDIA_ROOT = os.path.join(SITE_ROOT, 'Media/')
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://media.lawrence.com/media/", "http://example.com/media/"
MEDIA_URL = 'http://localhost:8000/static/'
# The base URI for the NDF-RT drug information database web service. We don't use
# this currently, but it may be useful for getting drug class information in the future.
#NDF_RT = 'http://rxnav.nlm.nih.gov/REST/Ndfrt/version/'
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/home/media/media.lawrence.com/static/"
STATIC_ROOT = ''
STATICFILES_DIRS = os.path.join(SITE_ROOT, 'static/')
# Drug class information is contained here.
DRUGCLASSFILE = os.path.join(STATICFILES_DIRS, 'drugClass.csv')
# Drug class information is contained here.
LOGREGFILE = os.path.join(STATICFILES_DIRS, 'genLinearModel.txt')
# Parameters used by adherence tests. Since these are debated in research
# and practice, they're placed here to enable future changes.
GOOD_MPR_THRESHOLD = 0.80
GAP_THRESHOLD = 30
# URL prefix for static files.
# Example: "http://media.lawrence.com/static/"
STATIC_URL = '/static/'
# URL prefix for admin static files -- CSS, JavaScript and images.
# Make sure to use a trailing slash.
# Examples: "http://foo.com/static/admin/", "/static/admin/".
ADMIN_MEDIA_PREFIX = '/static/admin/'
# Additional locations of static files
STATICFILES_DIRS = (
# Put strings here, like "/home/html/static" or "C:/www/django/static".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
)
# Make this unique, and don't share it with anybody.
SECRET_KEY = '_2%_03e2$%36(7%__wryy=fcppgh_wvgi0*)4c!i5uhh=nqvut'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
)
ROOT_URLCONF = 'mpr_monitor.urls'
TEMPLATE_DIRS = (
os.path.join(os.path.dirname(__file__), 'templates'),
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
# Uncomment the next line to enable the admin:
# 'django.contrib.admin',
# Uncomment the next line to enable admin documentation:
# 'django.contrib.admindocs',
)
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'handlers': {
'mail_admins': {
'level': 'ERROR',
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
| {
"content_hash": "6fad4a225e7eabba153a7077d742c7c4",
"timestamp": "",
"source": "github",
"line_count": 185,
"max_line_length": 90,
"avg_line_length": 32.77297297297297,
"alnum_prop": 0.7093847930067624,
"repo_name": "smart-classic/smart_sample_apps",
"id": "c4579dd7eac8c97ea38aa6a677bbca59ab0c13da",
"size": "6063",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "mpr_monitor/settings.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "243587"
},
{
"name": "JavaScript",
"bytes": "5140276"
},
{
"name": "Perl",
"bytes": "635"
},
{
"name": "Python",
"bytes": "253358"
},
{
"name": "Ruby",
"bytes": "792"
},
{
"name": "Shell",
"bytes": "6751"
}
],
"symlink_target": ""
} |
import re
import sys
import logging
from ming.orm import session
from mock import patch, Mock
from . import base
from allura import model as M
log = logging.getLogger(__name__)
class CreateTroveCategoriesCommand(base.Command):
min_args = 1
max_args = None
usage = '<ini file>'
summary = 'Remove any existing trove categories and load new ones'
parser = base.Command.standard_parser(verbose=True)
# NOTE: order is important
# To add new migration append it's name to following list,
# and create method m__<migration_name>
migrations = [
'sync',
'set_parent_only',
'add_license',
'set_show_as_skills',
]
def create_trove_cat(self, cat_data):
data = {'trove_cat_id': cat_data[0],
'trove_parent_id': cat_data[1],
'shortname': cat_data[2],
'fullname': cat_data[3],
'fullpath': cat_data[4]}
if len(cat_data) > 5:
data['show_as_skill'] = cat_data[5]
M.TroveCategory(**data)
def update_trove_cat(self, trove_cat_id, attr_dict):
ts = M.TroveCategory.query.find(dict(trove_cat_id=trove_cat_id))
if ts.count() < 1:
sys.exit("Couldn't find TroveCategory with trove_cat_id=%s" %
trove_cat_id)
for t in ts:
for k, v in attr_dict.iteritems():
setattr(t, k, v)
# patching to avoid a *lot* of event hooks firing, and taking a long long time
@patch.object(M.project.TroveCategoryMapperExtension, 'after_insert', Mock())
@patch.object(M.project.TroveCategoryMapperExtension, 'after_update', Mock())
@patch.object(M.project.TroveCategoryMapperExtension, 'after_delete', Mock())
def command(self):
self.basic_setup()
M.TroveCategory.query.remove()
self.create_trove_cat(
(617, 274, "kirghiz", "Kirghiz", "Translations :: Kirghiz", True))
self.create_trove_cat(
(372, 274, "croatian", "Croatian", "Translations :: Croatian", True))
self.create_trove_cat(
(351, 274, "thai", "Thai", "Translations :: Thai", True))
self.create_trove_cat(
(349, 274, "tamil", "Tamil", "Translations :: Tamil", True))
self.create_trove_cat(
(347, 274, "romanian", "Romanian", "Translations :: Romanian", True))
self.create_trove_cat(
(339, 274, "korean", "Korean", "Translations :: Korean", True))
self.create_trove_cat(
(632, 160, "cobol", "COBOL", "Programming Language :: COBOL", True))
self.create_trove_cat(
(598, 160, "aspectj", "AspectJ", "Programming Language :: AspectJ", True))
self.create_trove_cat(
(167, 160, "euler", "Euler", "Programming Language :: Euler", True))
self.create_trove_cat(
(185, 160, "shell", "Unix Shell", "Programming Language :: Unix Shell", True))
self.create_trove_cat(
(184, 160, "asp", "ASP", "Programming Language :: ASP", True))
self.create_trove_cat(
(273, 160, "Pike", "Pike", "Programming Language :: Pike", True))
self.create_trove_cat(
(271, 160, "csharp", "C#", "Programming Language :: C#", True))
self.create_trove_cat(
(170, 160, "lisp", "Lisp", "Programming Language :: Lisp", True))
self.create_trove_cat(
(169, 160, "fortran", "Fortran", "Programming Language :: Fortran", True))
self.create_trove_cat(
(625, 160, "simulink", "Simulink", "Programming Language :: Simulink", True))
self.create_trove_cat(
(626, 160, "matlab", "MATLAB", "Programming Language :: MATLAB", True))
self.create_trove_cat(
(1, 0, "audience", "Intended Audience", "Intended Audience", False))
self.create_trove_cat(
(618, 535, "nonprofit", "Non-Profit Organizations",
"Intended Audience :: by Industry or Sector :: Non-Profit Organizations", False))
self.create_trove_cat((599, 535, "aerospace", "Aerospace",
"Intended Audience :: by Industry or Sector :: Aerospace", False))
self.create_trove_cat((569, 535, "government", "Government",
"Intended Audience :: by Industry or Sector :: Government", False))
self.create_trove_cat(
(363, 535, "informationtechnology", "Information Technology",
"Intended Audience :: by Industry or Sector :: Information Technology", False))
self.create_trove_cat(
(361, 535, "financialinsurance", "Financial and Insurance Industry",
"Intended Audience :: by Industry or Sector :: Financial and Insurance Industry", False))
self.create_trove_cat(
(362, 535, "healthcareindustry", "Healthcare Industry",
"Intended Audience :: by Industry or Sector :: Healthcare Industry", False))
self.create_trove_cat((367, 535, "scienceresearch", "Science/Research",
"Intended Audience :: by Industry or Sector :: Science/Research", False))
self.create_trove_cat((359, 535, "customerservice", "Customer Service",
"Intended Audience :: by Industry or Sector :: Customer Service", False))
self.create_trove_cat((360, 535, "education", "Education",
"Intended Audience :: by Industry or Sector :: Education", False))
self.create_trove_cat((365, 535, "manufacturing", "Manufacturing",
"Intended Audience :: by Industry or Sector :: Manufacturing", False))
self.create_trove_cat(
(368, 535, "telecommunications", "Telecommunications Industry",
"Intended Audience :: by Industry or Sector :: Telecommunications Industry", False))
self.create_trove_cat(
(166, 160, "eiffel", "Eiffel", "Programming Language :: Eiffel", True))
self.create_trove_cat(
(550, 160, "oberon", "Oberon", "Programming Language :: Oberon", True))
self.create_trove_cat(
(553, 160, "realbasic", "REALbasic", "Programming Language :: REALbasic", True))
self.create_trove_cat(
(178, 160, "python", "Python", "Programming Language :: Python", True))
self.create_trove_cat(
(179, 160, "rexx", "Rexx", "Programming Language :: Rexx", True))
self.create_trove_cat(
(177, 160, "prolog", "Prolog", "Programming Language :: Prolog", True))
self.create_trove_cat(
(176, 160, "perl", "Perl", "Programming Language :: Perl", True))
self.create_trove_cat(
(175, 160, "pascal", "Pascal", "Programming Language :: Pascal", True))
self.create_trove_cat(
(536, 534, "enduser_advanced", "Advanced End Users",
"Intended Audience :: by End-User Class :: Advanced End Users", False))
self.create_trove_cat((4, 534, "sysadmins", "System Administrators",
"Intended Audience :: by End-User Class :: System Administrators", False))
self.create_trove_cat(
(471, 456, "ui_swing", "Java Swing", "User Interface :: Graphical :: Java Swing", True))
self.create_trove_cat(
(469, 456, "ui_dotnet", ".NET/Mono", "User Interface :: Graphical :: .NET/Mono", True))
self.create_trove_cat(
(231, 456, "gnome", "Gnome", "User Interface :: Graphical :: Gnome", True))
self.create_trove_cat((229, 456, "x11", "X Window System (X11)",
"User Interface :: Graphical :: X Window System (X11)", True))
self.create_trove_cat(
(475, 456, "ui_opengl", "OpenGL", "User Interface :: Graphical :: OpenGL", True))
self.create_trove_cat(
(474, 456, "ui_framebuffer", "Framebuffer", "User Interface :: Graphical :: Framebuffer", True))
self.create_trove_cat(
(472, 456, "ui_swt", "Java SWT", "User Interface :: Graphical :: Java SWT", True))
self.create_trove_cat(
(470, 456, "ui_awt", "Java AWT", "User Interface :: Graphical :: Java AWT", True))
self.create_trove_cat((230, 456, "win32", "Win32 (MS Windows)",
"User Interface :: Graphical :: Win32 (MS Windows)", True))
self.create_trove_cat(
(232, 456, "kde", "KDE", "User Interface :: Graphical :: KDE", True))
self.create_trove_cat((310, 456, "cocoa", "Cocoa (MacOS X)",
"User Interface :: Graphical :: Cocoa (MacOS X)", True))
self.create_trove_cat(
(476, 456, "ui_tabletpc", "TabletPC", "User Interface :: Graphical :: TabletPC", True))
self.create_trove_cat((314, 456, "handhelds", "Handheld/Mobile/PDA",
"User Interface :: Graphical :: Handheld/Mobile/PDA", True))
self.create_trove_cat(
(462, 225, "ui_groupingdesc", "Grouping and Descriptive Categories (UI)",
"User Interface :: Grouping and Descriptive Categories (UI)", True))
self.create_trove_cat(
(466, 462, "ui_meta_3d", "Project is a 3D engine",
"User Interface :: Grouping and Descriptive Categories (UI) :: Project is a 3D engine", True))
self.create_trove_cat(
(464, 462, "ui_meta_template", "Project is a templating system",
"User Interface :: Grouping and Descriptive Categories (UI) :: Project is a templating system", True))
self.create_trove_cat(
(463, 462, "ui_meta_system", "Project is a user interface (UI) system",
"User Interface :: Grouping and Descriptive Categories (UI) :: Project is a user interface (UI) system",
True))
self.create_trove_cat(
(465, 462, "ui_meta_windowmanager", "Project is a window manager",
"User Interface :: Grouping and Descriptive Categories (UI) :: Project is a window manager", True))
self.create_trove_cat(
(467, 462, "ui_meta_toolkit", "Project is a graphics toolkit",
"User Interface :: Grouping and Descriptive Categories (UI) :: Project is a graphics toolkit", True))
self.create_trove_cat(
(468, 462, "ui_meta_remotecontrol", "Project is a remote control application",
"User Interface :: Grouping and Descriptive Categories (UI) :: Project is a remote control application",
True))
self.create_trove_cat(
(237, 225, "web", "Web-based", "User Interface :: Web-based", True))
self.create_trove_cat((238, 225, "daemon", "Non-interactive (Daemon)",
"User Interface :: Non-interactive (Daemon)", True))
self.create_trove_cat(
(457, 225, "textual_ui", "Textual", "User Interface :: Textual", True))
self.create_trove_cat((460, 457, "ui_consoleterm", "Console/Terminal",
"User Interface :: Textual :: Console/Terminal", True))
self.create_trove_cat(
(459, 457, "ui_commandline", "Command-line", "User Interface :: Textual :: Command-line", True))
self.create_trove_cat(
(225, 0, "environment", "User Interface", "User Interface", True))
self.create_trove_cat(
(461, 225, "ui_plugins", "Plugins", "User Interface :: Plugins", True))
self.create_trove_cat(
(583, 461, "eclipse_plugins", "Eclipse", "User Interface :: Plugins :: Eclipse", True))
self.create_trove_cat(
(458, 225, "ui_toolkit", "Toolkits/Libraries", "User Interface :: Toolkits/Libraries", True))
self.create_trove_cat((495, 458, "ui_othertoolkit", "Other toolkit",
"User Interface :: Toolkits/Libraries :: Other toolkit", True))
self.create_trove_cat((493, 458, "ui_motif", "Motif/LessTif",
"User Interface :: Toolkits/Libraries :: Motif/LessTif", True))
self.create_trove_cat((491, 458, "ui_crystalspace", "Crystal Space",
"User Interface :: Toolkits/Libraries :: Crystal Space", True))
self.create_trove_cat((489, 458, "ui_clanlib", "ClanLib",
"User Interface :: Toolkits/Libraries :: ClanLib", True))
self.create_trove_cat(
(516, 500, "db_group_objmap", "Project is a relational object mapper",
"Database Environment :: Grouping and Descriptive Categories (DB) :: Project is a relational object mapper", # nopep8
True))
self.create_trove_cat(
(487, 458, "ui_ggi", "GGI", "User Interface :: Toolkits/Libraries :: GGI", True))
self.create_trove_cat((485, 458, "ui_directx", "DirectX",
"User Interface :: Toolkits/Libraries :: DirectX", True))
self.create_trove_cat((483, 458, "ui_svgalib", "SVGAlib",
"User Interface :: Toolkits/Libraries :: SVGAlib", True))
self.create_trove_cat((481, 458, "ui_wxwidgets", "wxWidgets",
"User Interface :: Toolkits/Libraries :: wxWidgets", True))
self.create_trove_cat(
(511, 500, "db_group_mgmt", "Project is a database management tool",
"Database Environment :: Grouping and Descriptive Categories (DB) :: Project is a database management tool", # nopep8
True))
self.create_trove_cat(
(479, 458, "ui_qt", "Qt", "User Interface :: Toolkits/Libraries :: Qt", True))
self.create_trove_cat(
(477, 458, "ui_gtk", "GTK+", "User Interface :: Toolkits/Libraries :: GTK+", True))
self.create_trove_cat(
(513, 500, "db_group_netdbms", "Project is a network-based DBMS (database system)",
"Database Environment :: Grouping and Descriptive Categories (DB) :: Project is a network-based DBMS (database system)", # nopep8
True))
self.create_trove_cat(
(228, 458, "newt", "Newt", "User Interface :: Toolkits/Libraries :: Newt", True))
self.create_trove_cat((227, 458, "curses", "Curses/Ncurses",
"User Interface :: Toolkits/Libraries :: Curses/Ncurses", True))
self.create_trove_cat(
(515, 500, "db_group_conv", "Project is a database conversion tool",
"Database Environment :: Grouping and Descriptive Categories (DB) :: Project is a database conversion tool", # nopep8
True))
self.create_trove_cat(
(478, 458, "ui_tk", "Tk", "User Interface :: Toolkits/Libraries :: Tk", True))
self.create_trove_cat(
(480, 458, "ui_sdl", "SDL", "User Interface :: Toolkits/Libraries :: SDL", True))
self.create_trove_cat((33, 28, "postoffice", "Post-Office",
"Topic :: Communications :: Email :: Post-Office", True))
self.create_trove_cat(
(514, 500, "db_group_propfmt", "Project is a tool for a proprietary database file format",
"Database Environment :: Grouping and Descriptive Categories (DB) :: Project is a tool for a proprietary database file format", # nopep8
True))
self.create_trove_cat(
(482, 458, "ui_aalib", "AAlib", "User Interface :: Toolkits/Libraries :: AAlib", True))
self.create_trove_cat(
(484, 458, "ui_fltk", "FLTK", "User Interface :: Toolkits/Libraries :: FLTK", True))
self.create_trove_cat(
(512, 500, "db_group_filedbms", "Project is a file-based DBMS (database system)",
"Database Environment :: Grouping and Descriptive Categories (DB) :: Project is a file-based DBMS (database system)", # nopep8
True))
self.create_trove_cat(
(486, 458, "ui_plib", "Plib", "User Interface :: Toolkits/Libraries :: Plib", True))
self.create_trove_cat(
(488, 458, "ui_glide", "Glide", "User Interface :: Toolkits/Libraries :: Glide", True))
self.create_trove_cat(
(510, 500, "db_group_api", "Project is a database abstraction layer (API)",
"Database Environment :: Grouping and Descriptive Categories (DB) :: Project is a database abstraction layer (API)", # nopep8
True))
self.create_trove_cat(
(490, 458, "ui_glut", "GLUT", "User Interface :: Toolkits/Libraries :: GLUT", True))
self.create_trove_cat((492, 458, "ui_allegro", "Allegro",
"User Interface :: Toolkits/Libraries :: Allegro", True))
self.create_trove_cat(
(500, 496, "db_grouping", "Grouping and Descriptive Categories (DB)",
"Database Environment :: Grouping and Descriptive Categories (DB)", True))
self.create_trove_cat(
(494, 458, "ui_quartz", "Quartz", "User Interface :: Toolkits/Libraries :: Quartz", True))
self.create_trove_cat(
(456, 225, "graphical_ui", "Graphical", "User Interface :: Graphical", True))
self.create_trove_cat(
(276, 274, "french", "French", "Translations :: French", True))
self.create_trove_cat((473, 456, "ui_carbon", "Carbon (Mac OS X)",
"User Interface :: Graphical :: Carbon (Mac OS X)", True))
self.create_trove_cat(
(535, 1, "by_industrysector", "by Industry or Sector",
"Intended Audience :: by Industry or Sector", False))
self.create_trove_cat((364, 535, "legalindustry", "Legal Industry",
"Intended Audience :: by Industry or Sector :: Legal Industry", False))
self.create_trove_cat(
(353, 274, "ukrainian", "Ukrainian", "Translations :: Ukrainian", True))
self.create_trove_cat(
(330, 274, "dutch", "Dutch", "Translations :: Dutch", True))
self.create_trove_cat(
(343, 274, "persian", "Persian", "Translations :: Persian", True))
self.create_trove_cat(
(344, 274, "polish", "Polish", "Translations :: Polish", True))
self.create_trove_cat(
(455, 274, "irish_gaelic", "Irish Gaelic", "Translations :: Irish Gaelic", True))
self.create_trove_cat(
(413, 274, "lithuanian", "Lithuanian", "Translations :: Lithuanian", True))
self.create_trove_cat(
(414, 274, "albanian", "Albanian", "Translations :: Albanian", True))
self.create_trove_cat(
(415, 274, "malagasy", "Malagasy", "Translations :: Malagasy", True))
self.create_trove_cat(
(416, 274, "mongolian", "Mongolian", "Translations :: Mongolian", True))
self.create_trove_cat(
(417, 274, "maltese", "Maltese", "Translations :: Maltese", True))
self.create_trove_cat(
(380, 274, "slovenian", "Slovene", "Translations :: Slovene", True))
self.create_trove_cat(
(374, 274, "icelandic", "Icelandic", "Translations :: Icelandic", True))
self.create_trove_cat(
(376, 274, "macedonian", "Macedonian", "Translations :: Macedonian", True))
self.create_trove_cat(
(377, 274, "latin", "Latin", "Translations :: Latin", True))
self.create_trove_cat(
(375, 274, "latvian", "Latvian", "Translations :: Latvian", True))
self.create_trove_cat(
(373, 274, "czech", "Czech", "Translations :: Czech", True))
self.create_trove_cat(
(369, 274, "afrikaans", "Afrikaans", "Translations :: Afrikaans", True))
self.create_trove_cat(
(357, 274, "finnish", "Finnish", "Translations :: Finnish", True))
self.create_trove_cat(
(186, 160, "visualbasic", "Visual Basic", "Programming Language :: Visual Basic", True))
self.create_trove_cat((505, 499, "db_pear", "PHP Pear::DB",
"Database Environment :: Database API :: PHP Pear::DB", True))
self.create_trove_cat((507, 499, "db_api_xml", "XML-based",
"Database Environment :: Database API :: XML-based", True))
self.create_trove_cat((509, 499, "db_api_other", "Other API",
"Database Environment :: Database API :: Other API", True))
self.create_trove_cat(
(532, 497, "db_net_hsql", "HSQL", "Database Environment :: Network-based DBMS :: HSQL", True))
self.create_trove_cat(
(547, 160, "applescript", "AppleScript", "Programming Language :: AppleScript", True))
self.create_trove_cat(
(173, 160, "modula", "Modula", "Programming Language :: Modula", True))
self.create_trove_cat(
(337, 274, "italian", "Italian", "Translations :: Italian", True))
self.create_trove_cat(
(333, 274, "hebrew", "Hebrew", "Translations :: Hebrew", True))
self.create_trove_cat(
(331, 274, "esperanto", "Esperanto", "Translations :: Esperanto", True))
self.create_trove_cat(
(329, 274, "catalan", "Catalan", "Translations :: Catalan", True))
self.create_trove_cat(
(327, 274, "bengali", "Bengali", "Translations :: Bengali", True))
self.create_trove_cat(
(332, 274, "greek", "Greek", "Translations :: Greek", True))
self.create_trove_cat(
(341, 274, "marathi", "Marathi", "Translations :: Marathi", True))
self.create_trove_cat(
(355, 274, "vietnamese", "Vietnamese", "Translations :: Vietnamese", True))
self.create_trove_cat(
(275, 274, "english", "English", "Translations :: English", True))
self.create_trove_cat(
(345, 274, "portuguese", "Portuguese", "Translations :: Portuguese", True))
self.create_trove_cat(
(171, 160, "logo", "Logo", "Programming Language :: Logo", True))
self.create_trove_cat(
(502, 499, "db_api_jdbc", "JDBC", "Database Environment :: Database API :: JDBC", True))
self.create_trove_cat((504, 499, "db_api_perldbi", "Perl DBI/DBD",
"Database Environment :: Database API :: Perl DBI/DBD", True))
self.create_trove_cat(
(274, 0, "natlanguage", "Translations", "Translations", True))
self.create_trove_cat((506, 499, "db_python", "Python Database API",
"Database Environment :: Database API :: Python Database API", True))
self.create_trove_cat((526, 497, "db_net_oracle", "Oracle",
"Database Environment :: Network-based DBMS :: Oracle", True))
self.create_trove_cat((524, 497, "db_net_mysql", "MySQL",
"Database Environment :: Network-based DBMS :: MySQL", True))
self.create_trove_cat((525, 497, "db_net_pgsql", "PostgreSQL (pgsql)",
"Database Environment :: Network-based DBMS :: PostgreSQL (pgsql)", True))
self.create_trove_cat((527, 497, "db_net_ibmdb2", "IBM DB2",
"Database Environment :: Network-based DBMS :: IBM DB2", True))
self.create_trove_cat((529, 497, "db_net_sybase", "Sybase",
"Database Environment :: Network-based DBMS :: Sybase", True))
self.create_trove_cat((531, 497, "db_net_sqlite", "SQLite",
"Database Environment :: Network-based DBMS :: SQLite", True))
self.create_trove_cat(
(533, 497, "db_net_other", "Other network-based DBMS",
"Database Environment :: Network-based DBMS :: Other network-based DBMS", True))
self.create_trove_cat(
(497, 496, "db_networkbased", "Network-based DBMS",
"Database Environment :: Network-based DBMS", True))
self.create_trove_cat(
(426, 199, "os_emu_api", "Emulation and API Compatibility",
"Operating System :: Emulation and API Compatibility", True))
self.create_trove_cat((311, 236, "macos9", "Apple Mac OS Classic",
"Operating System :: Other Operating Systems :: Apple Mac OS Classic", True))
self.create_trove_cat(
(224, 236, "beos", "BeOS", "Operating System :: Other Operating Systems :: BeOS", True))
self.create_trove_cat(
(215, 236, "msdos", "MS-DOS", "Operating System :: Other Operating Systems :: MS-DOS", True))
self.create_trove_cat(
(421, 236, "mswin_95", "Win95", "Operating System :: Other Operating Systems :: Win95", True))
self.create_trove_cat((508, 499, "db_api_sql", "SQL-based",
"Database Environment :: Database API :: SQL-based", True))
self.create_trove_cat(
(499, 496, "db_api", "Database API", "Database Environment :: Database API", True))
self.create_trove_cat(
(378, 274, "serbian", "Serbian", "Translations :: Serbian", True))
self.create_trove_cat(
(379, 274, "slovak", "Slovak", "Translations :: Slovak", True))
self.create_trove_cat(
(371, 274, "chinesetraditional", "Chinese (Traditional)", "Translations :: Chinese (Traditional)", True))
self.create_trove_cat(
(410, 274, "belarusian", "Belarusian", "Translations :: Belarusian", True))
self.create_trove_cat(
(411, 274, "estonian", "Estonian", "Translations :: Estonian", True))
self.create_trove_cat(
(412, 274, "galician", "Galician", "Translations :: Galician", True))
self.create_trove_cat(
(34, 33, "pop3", "POP3", "Topic :: Communications :: Email :: Post-Office :: POP3", True))
self.create_trove_cat(
(35, 33, "imap", "IMAP", "Topic :: Communications :: Email :: Post-Office :: IMAP", True))
self.create_trove_cat(
(29, 28, "filters", "Filters", "Topic :: Communications :: Email :: Filters", True))
self.create_trove_cat((30, 28, "listservers", "Mailing List Servers",
"Topic :: Communications :: Email :: Mailing List Servers", True))
self.create_trove_cat(
(597, 80, "card_games", "Card Games", "Topic :: Games/Entertainment :: Card Games", True))
self.create_trove_cat(
(63, 18, "editors", "Text Editors", "Topic :: Text Editors", True))
self.create_trove_cat((366, 535, "religion", "Religion",
"Intended Audience :: by Industry or Sector :: Religion", False))
self.create_trove_cat(
(534, 1, "by_enduser", "by End-User Class", "Intended Audience :: by End-User Class", False))
self.create_trove_cat(
(528, 497, "db_net_firebird", "Firebird/InterBase",
"Database Environment :: Network-based DBMS :: Firebird/InterBase", True))
self.create_trove_cat((3, 534, "developers", "Developers",
"Intended Audience :: by End-User Class :: Developers", False))
self.create_trove_cat(
(530, 497, "db_net_mssql", "Microsoft SQL Server",
"Database Environment :: Network-based DBMS :: Microsoft SQL Server", True))
self.create_trove_cat((2, 534, "endusers", "End Users/Desktop",
"Intended Audience :: by End-User Class :: End Users/Desktop", False))
self.create_trove_cat(
(498, 496, "db_filebased", "File-based DBMS", "Database Environment :: File-based DBMS", True))
self.create_trove_cat((537, 534, "enduser_qa", "Quality Engineers",
"Intended Audience :: by End-User Class :: Quality Engineers", False))
self.create_trove_cat(
(5, 1, "other", "Other Audience", "Intended Audience :: Other Audience", False))
self.create_trove_cat(
(517, 498, "db_file_dbm", "Berkeley/Sleepycat/Gdbm (DBM)",
"Database Environment :: File-based DBMS :: Berkeley/Sleepycat/Gdbm (DBM)", True))
self.create_trove_cat(
(358, 6, "inactive", "7 - Inactive", "Development Status :: 7 - Inactive", False))
self.create_trove_cat((520, 498, "db_file_palm", "PalmOS PDB",
"Database Environment :: File-based DBMS :: PalmOS PDB", True))
self.create_trove_cat(
(523, 498, "db_file_other", "Other file-based DBMS",
"Database Environment :: File-based DBMS :: Other file-based DBMS", True))
self.create_trove_cat(
(165, 160, "cpp", "C++", "Programming Language :: C++", True))
self.create_trove_cat(
(163, 160, "ada", "Ada", "Programming Language :: Ada", True))
self.create_trove_cat(
(328, 274, "bulgarian", "Bulgarian", "Translations :: Bulgarian", True))
self.create_trove_cat(
(546, 274, "swahili", "Swahili", "Translations :: Swahili", True))
self.create_trove_cat(
(348, 274, "swedish", "Swedish", "Translations :: Swedish", True))
self.create_trove_cat(
(350, 274, "telugu", "Telugu", "Translations :: Telugu", True))
self.create_trove_cat(
(162, 160, "assembly", "Assembly", "Programming Language :: Assembly", True))
self.create_trove_cat(
(164, 160, "c", "C", "Programming Language :: C", True))
self.create_trove_cat(
(161, 160, "apl", "APL", "Programming Language :: APL", True))
self.create_trove_cat(
(267, 160, "zope", "Zope", "Programming Language :: Zope", True))
self.create_trove_cat(
(264, 160, "erlang", "Erlang", "Programming Language :: Erlang", True))
self.create_trove_cat(
(263, 160, "euphoria", "Euphoria", "Programming Language :: Euphoria", True))
self.create_trove_cat(
(183, 160, "php", "PHP", "Programming Language :: PHP", True))
self.create_trove_cat(
(182, 160, "tcl", "Tcl", "Programming Language :: Tcl", True))
self.create_trove_cat(
(181, 160, "smalltalk", "Smalltalk", "Programming Language :: Smalltalk", True))
self.create_trove_cat(
(180, 160, "simula", "Simula", "Programming Language :: Simula", True))
self.create_trove_cat(
(174, 160, "objectivec", "Objective C", "Programming Language :: Objective C", True))
self.create_trove_cat((560, 160, "xsl", "XSL (XSLT/XPath/XSL-FO)",
"Programming Language :: XSL (XSLT/XPath/XSL-FO)", True))
self.create_trove_cat(
(293, 160, "ruby", "Ruby", "Programming Language :: Ruby", True))
self.create_trove_cat(
(265, 160, "Delphi", "Delphi/Kylix", "Programming Language :: Delphi/Kylix", True))
self.create_trove_cat(
(281, 160, "REBOL", "REBOL", "Programming Language :: REBOL", True))
self.create_trove_cat((454, 160, "ocaml", "OCaml (Objective Caml)",
"Programming Language :: OCaml (Objective Caml)", True))
self.create_trove_cat(
(453, 160, "vb_net", "Visual Basic .NET", "Programming Language :: Visual Basic .NET", True))
self.create_trove_cat(
(452, 160, "visual_foxpro", "Visual FoxPro", "Programming Language :: Visual FoxPro", True))
self.create_trove_cat(
(451, 160, "haskell", "Haskell", "Programming Language :: Haskell", True))
self.create_trove_cat(
(450, 160, "lua", "Lua", "Programming Language :: Lua", True))
self.create_trove_cat(
(280, 160, "JavaScript", "JavaScript", "Programming Language :: JavaScript", True))
self.create_trove_cat(
(262, 160, "coldfusion", "Cold Fusion", "Programming Language :: Cold Fusion", True))
self.create_trove_cat(
(261, 160, "xbasic", "XBasic", "Programming Language :: XBasic", True))
self.create_trove_cat(
(258, 160, "objectpascal", "Object Pascal", "Programming Language :: Object Pascal", True))
self.create_trove_cat(
(539, 160, "proglang_basic", "BASIC", "Programming Language :: BASIC", True))
self.create_trove_cat(
(543, 160, "groovy", "Groovy", "Programming Language :: Groovy", True))
self.create_trove_cat(
(545, 160, "proglang_labview", "LabVIEW", "Programming Language :: LabVIEW", True))
self.create_trove_cat(
(548, 160, "vbscript", "VBScript", "Programming Language :: VBScript", True))
self.create_trove_cat(
(552, 160, "d_proglang", "D", "Programming Language :: D", True))
self.create_trove_cat(
(551, 160, "vhdl_verilog", "VHDL/Verilog", "Programming Language :: VHDL/Verilog", True))
self.create_trove_cat(
(549, 160, "proglang_lpc", "LPC", "Programming Language :: LPC", True))
self.create_trove_cat(
(544, 160, "yacc", "Yacc", "Programming Language :: Yacc", True))
self.create_trove_cat(
(352, 274, "turkish", "Turkish", "Translations :: Turkish", True))
self.create_trove_cat(
(354, 274, "urdu", "Urdu", "Translations :: Urdu", True))
self.create_trove_cat(
(160, 0, "language", "Programming Language", "Programming Language", True))
self.create_trove_cat(
(542, 160, "emacs_lisp", "Emacs-Lisp", "Programming Language :: Emacs-Lisp", True))
self.create_trove_cat(
(540, 160, "clisp", "Common Lisp", "Programming Language :: Common Lisp", True))
self.create_trove_cat(
(12, 6, "mature", "6 - Mature", "Development Status :: 6 - Mature", False))
self.create_trove_cat(
(538, 160, "awk", "AWK", "Programming Language :: AWK", True))
self.create_trove_cat(
(572, 160, "jsp", "JSP", "Programming Language :: JSP", True))
self.create_trove_cat(
(172, 160, "ml", "Standard ML", "Programming Language :: Standard ML", True))
self.create_trove_cat(
(255, 160, "progress", "PROGRESS", "Programming Language :: PROGRESS", True))
self.create_trove_cat(
(254, 160, "plsql", "PL/SQL", "Programming Language :: PL/SQL", True))
self.create_trove_cat(
(242, 160, "scheme", "Scheme", "Programming Language :: Scheme", True))
self.create_trove_cat(
(624, 160, "idl", "IDL", "Programming Language :: IDL", True))
self.create_trove_cat(
(198, 160, "java", "Java", "Programming Language :: Java", True))
self.create_trove_cat(
(589, 160, "asp_dot_net", "ASP.NET", "Programming Language :: ASP.NET", True))
self.create_trove_cat(
(608, 160, "mumps", "MUMPS", "Programming Language :: MUMPS", True))
self.create_trove_cat(
(541, 160, "dylan", "Dylan", "Programming Language :: Dylan", True))
self.create_trove_cat(
(573, 160, "s_slash_r", "S/R", "Programming Language :: S/R", True))
self.create_trove_cat(
(584, 160, "actionscript", "ActionScript", "Programming Language :: ActionScript", True))
self.create_trove_cat(
(168, 160, "forth", "Forth", "Programming Language :: Forth", True))
self.create_trove_cat(
(334, 274, "hindi", "Hindi", "Translations :: Hindi", True))
self.create_trove_cat(
(336, 274, "indonesian", "Indonesian", "Translations :: Indonesian", True))
self.create_trove_cat((521, 498, "db_file_flat", "Flat-file",
"Database Environment :: File-based DBMS :: Flat-file", True))
self.create_trove_cat((519, 498, "db_file_xbase", "xBase",
"Database Environment :: File-based DBMS :: xBase", True))
self.create_trove_cat(
(338, 274, "javanese", "Javanese", "Translations :: Javanese", True))
self.create_trove_cat((518, 498, "db_msaccess", "Microsoft Access",
"Database Environment :: File-based DBMS :: Microsoft Access", True))
self.create_trove_cat(
(522, 498, "db_file_proprietary", "Proprietary file format",
"Database Environment :: File-based DBMS :: Proprietary file format", True))
self.create_trove_cat(
(496, 0, "root_database", "Database Environment", "Database Environment", True))
self.create_trove_cat(
(501, 499, "db_api_odbc", "ODBC", "Database Environment :: Database API :: ODBC", True))
self.create_trove_cat(
(503, 499, "db_adodb", "ADOdb", "Database Environment :: Database API :: ADOdb", True))
self.create_trove_cat(
(340, 274, "malay", "Malay", "Translations :: Malay", True))
self.create_trove_cat(
(6, 0, "developmentstatus", "Development Status", "Development Status", False))
self.create_trove_cat(
(342, 274, "norwegian", "Norwegian", "Translations :: Norwegian", True))
self.create_trove_cat(
(381, 274, "portuguesebrazilian", "Brazilian Portuguese", "Translations :: Brazilian Portuguese", True))
self.create_trove_cat(
(382, 274, "chinesesimplified", "Chinese (Simplified)", "Translations :: Chinese (Simplified)", True))
self.create_trove_cat(
(356, 274, "danish", "Danish", "Translations :: Danish", True))
self.create_trove_cat(
(346, 274, "panjabi", "Panjabi", "Translations :: Panjabi", True))
self.create_trove_cat(
(370, 274, "bosnian", "Bosnian", "Translations :: Bosnian", True))
self.create_trove_cat(
(279, 274, "german", "German", "Translations :: German", True))
self.create_trove_cat(
(278, 274, "japanese", "Japanese", "Translations :: Japanese", True))
self.create_trove_cat(
(277, 274, "spanish", "Spanish", "Translations :: Spanish", True))
self.create_trove_cat((11, 6, "production", "5 - Production/Stable",
"Development Status :: 5 - Production/Stable", False))
self.create_trove_cat(
(10, 6, "beta", "4 - Beta", "Development Status :: 4 - Beta", False))
self.create_trove_cat(
(9, 6, "alpha", "3 - Alpha", "Development Status :: 3 - Alpha", False))
self.create_trove_cat(
(8, 6, "prealpha", "2 - Pre-Alpha", "Development Status :: 2 - Pre-Alpha", False))
self.create_trove_cat(
(7, 6, "planning", "1 - Planning", "Development Status :: 1 - Planning", False))
self.create_trove_cat(
(295, 274, "russian", "Russian", "Translations :: Russian", True))
self.create_trove_cat(
(326, 274, "arabic", "Arabic", "Translations :: Arabic", True))
self.create_trove_cat(
(335, 274, "hungarian", "Hungarian", "Translations :: Hungarian", True))
self.create_trove_cat((13, 0, "license", "License", "License", False))
self.create_trove_cat(
(14, 13, "osi", "OSI-Approved Open Source", "License :: OSI-Approved Open Source", False))
self.create_trove_cat((388, 14, "osl", "Open Software License",
"License :: OSI-Approved Open Source :: Open Software License", False))
self.create_trove_cat((321, 14, "motosoto", "Motosoto License",
"License :: OSI-Approved Open Source :: Motosoto License", False))
self.create_trove_cat(
(325, 14, "attribut", "Attribution Assurance License",
"License :: OSI-Approved Open Source :: Attribution Assurance License", False))
self.create_trove_cat(
(304, 14, "mpl", "Mozilla Public License 1.0 (MPL)",
"License :: OSI-Approved Open Source :: Mozilla Public License 1.0 (MPL)", False))
self.create_trove_cat(
(398, 14, "plan9", "Lucent Public License (Plan9)",
"License :: OSI-Approved Open Source :: Lucent Public License (Plan9)", False))
self.create_trove_cat(
(187, 14, "bsd", "BSD License", "License :: OSI-Approved Open Source :: BSD License", False))
self.create_trove_cat(
(393, 14, "historical", "Historical Permission Notice and Disclaimer",
"License :: OSI-Approved Open Source :: Historical Permission Notice and Disclaimer", False))
self.create_trove_cat(
(395, 14, "real", "RealNetworks Public Source License V1.0",
"License :: OSI-Approved Open Source :: RealNetworks Public Source License V1.0", False))
self.create_trove_cat((396, 14, "rpl", "Reciprocal Public License",
"License :: OSI-Approved Open Source :: Reciprocal Public License", False))
self.create_trove_cat((392, 14, "eiffel2", "Eiffel Forum License V2.0",
"License :: OSI-Approved Open Source :: Eiffel Forum License V2.0", False))
self.create_trove_cat(
(320, 14, "w3c", "W3C License", "License :: OSI-Approved Open Source :: W3C License", False))
self.create_trove_cat((400, 14, "frameworx", "Frameworx Open License",
"License :: OSI-Approved Open Source :: Frameworx Open License", False))
self.create_trove_cat(
(194, 14, "python", "Python License (CNRI Python License)",
"License :: OSI-Approved Open Source :: Python License (CNRI Python License)", False))
self.create_trove_cat((296, 14, "apache", "Apache Software License",
"License :: OSI-Approved Open Source :: Apache Software License", False))
self.create_trove_cat(
(298, 14, "sissl", "Sun Industry Standards Source License (SISSL)",
"License :: OSI-Approved Open Source :: Sun Industry Standards Source License (SISSL)", False))
self.create_trove_cat(
(196, 13, "other", "Other/Proprietary License", "License :: Other/Proprietary License", False))
self.create_trove_cat(
(197, 13, "publicdomain", "Public Domain", "License :: Public Domain", False))
self.create_trove_cat((301, 14, "nokia", "Nokia Open Source License",
"License :: OSI-Approved Open Source :: Nokia Open Source License", False))
self.create_trove_cat((319, 14, "eiffel", "Eiffel Forum License",
"License :: OSI-Approved Open Source :: Eiffel Forum License", False))
self.create_trove_cat((318, 14, "sunpublic", "Sun Public License",
"License :: OSI-Approved Open Source :: Sun Public License", False))
self.create_trove_cat((190, 14, "qpl", "Qt Public License (QPL)",
"License :: OSI-Approved Open Source :: Qt Public License (QPL)", False))
self.create_trove_cat(
(390, 14, "oclc", "OCLC Research Public License 2.0",
"License :: OSI-Approved Open Source :: OCLC Research Public License 2.0", False))
self.create_trove_cat(
(407, 14, "nasalicense", "NASA Open Source Agreement",
"License :: OSI-Approved Open Source :: NASA Open Source Agreement", False))
self.create_trove_cat(
(406, 14, "eclipselicense", "Eclipse Public License",
"License :: OSI-Approved Open Source :: Eclipse Public License", False))
self.create_trove_cat(
(316, 14, "opengroup", "Open Group Test Suite License",
"License :: OSI-Approved Open Source :: Open Group Test Suite License", False))
self.create_trove_cat((300, 14, "jabber", "Jabber Open Source License",
"License :: OSI-Approved Open Source :: Jabber Open Source License", False))
self.create_trove_cat(
(297, 14, "vovida", "Vovida Software License 1.0",
"License :: OSI-Approved Open Source :: Vovida Software License 1.0", False))
self.create_trove_cat((324, 14, "afl", "Academic Free License (AFL)",
"License :: OSI-Approved Open Source :: Academic Free License (AFL)", False))
self.create_trove_cat(
(189, 14, "psfl", "Python Software Foundation License",
"License :: OSI-Approved Open Source :: Python Software Foundation License", False))
self.create_trove_cat(
(193, 14, "rscpl", "Ricoh Source Code Public License",
"License :: OSI-Approved Open Source :: Ricoh Source Code Public License", False))
self.create_trove_cat((17, 14, "artistic", "Artistic License",
"License :: OSI-Approved Open Source :: Artistic License", False))
self.create_trove_cat(
(389, 14, "sybase", "Sybase Open Watcom Public License",
"License :: OSI-Approved Open Source :: Sybase Open Watcom Public License", False))
self.create_trove_cat(
(391, 14, "wxwindows", "wxWindows Library Licence",
"License :: OSI-Approved Open Source :: wxWindows Library Licence", False))
self.create_trove_cat((397, 14, "entessa", "Entessa Public License",
"License :: OSI-Approved Open Source :: Entessa Public License", False))
self.create_trove_cat(
(16, 14, "lgpl", "GNU Library or Lesser General Public License (LGPL)",
"License :: OSI-Approved Open Source :: GNU Library or Lesser General Public License (LGPL)", False))
self.create_trove_cat(
(629, 14, "educom", "Educational Community License",
"License :: OSI-Approved Open Source :: Educational Community License", False))
self.create_trove_cat(
(15, 14, "gpl", "GNU General Public License (GPL)",
"License :: OSI-Approved Open Source :: GNU General Public License (GPL)", False))
self.create_trove_cat((191, 14, "ibm", "IBM Public License",
"License :: OSI-Approved Open Source :: IBM Public License", False))
self.create_trove_cat(
(192, 14, "cvw", "MITRE Collaborative Virtual Workspace License (CVW)",
"License :: OSI-Approved Open Source :: MITRE Collaborative Virtual Workspace License (CVW)", False))
self.create_trove_cat((299, 14, "iosl", "Intel Open Source License",
"License :: OSI-Approved Open Source :: Intel Open Source License", False))
self.create_trove_cat((399, 14, "php-license", "PHP License",
"License :: OSI-Approved Open Source :: PHP License", False))
self.create_trove_cat(
(188, 14, "mit", "MIT License", "License :: OSI-Approved Open Source :: MIT License", False))
self.create_trove_cat(
(405, 14, "public102", "Lucent Public License Version 1.02",
"License :: OSI-Approved Open Source :: Lucent Public License Version 1.02", False))
self.create_trove_cat(
(404, 14, "fair", "Fair License", "License :: OSI-Approved Open Source :: Fair License", False))
self.create_trove_cat(
(403, 14, "datagrid", "EU DataGrid Software License",
"License :: OSI-Approved Open Source :: EU DataGrid Software License", False))
self.create_trove_cat((307, 14, "ibmcpl", "Common Public License",
"License :: OSI-Approved Open Source :: Common Public License", False))
self.create_trove_cat(
(402, 14, "cua", "CUA Office Public License Version 1.0",
"License :: OSI-Approved Open Source :: CUA Office Public License Version 1.0", False))
self.create_trove_cat((401, 14, "apache2", "Apache License V2.0",
"License :: OSI-Approved Open Source :: Apache License V2.0", False))
self.create_trove_cat((394, 14, "nausite", "Naumen Public License",
"License :: OSI-Approved Open Source :: Naumen Public License", False))
self.create_trove_cat((317, 14, "xnet", "X.Net License",
"License :: OSI-Approved Open Source :: X.Net License", False))
self.create_trove_cat((195, 14, "zlib", "zlib/libpng License",
"License :: OSI-Approved Open Source :: zlib/libpng License", False))
self.create_trove_cat(
(323, 14, "ncsa", "University of Illinois/NCSA Open Source License",
"License :: OSI-Approved Open Source :: University of Illinois/NCSA Open Source License", False))
self.create_trove_cat((322, 14, "zope", "Zope Public License",
"License :: OSI-Approved Open Source :: Zope Public License", False))
self.create_trove_cat((302, 14, "sleepycat", "Sleepycat License",
"License :: OSI-Approved Open Source :: Sleepycat License", False))
self.create_trove_cat(
(303, 14, "nethack", "Nethack General Public License",
"License :: OSI-Approved Open Source :: Nethack General Public License", False))
self.create_trove_cat((306, 14, "apsl", "Apple Public Source License",
"License :: OSI-Approved Open Source :: Apple Public Source License", False))
self.create_trove_cat(
(305, 14, "mpl11", "Mozilla Public License 1.1 (MPL 1.1)",
"License :: OSI-Approved Open Source :: Mozilla Public License 1.1 (MPL 1.1)", False))
self.create_trove_cat((628, 14, "adaptive", "Adaptive Public License",
"License :: OSI-Approved Open Source :: Adaptive Public License", False))
self.create_trove_cat(
(630, 14, "cddl", "Common Development and Distribution License",
"License :: OSI-Approved Open Source :: Common Development and Distribution License", False))
self.create_trove_cat(
(631, 14, "catosl", "Computer Associates Trusted Open Source License",
"License :: OSI-Approved Open Source :: Computer Associates Trusted Open Source License", False))
self.create_trove_cat(
(199, 0, "os", "Operating System", "Operating System", True))
self.create_trove_cat((429, 426, "fink", "Fink (Mac OS X)",
"Operating System :: Emulation and API Compatibility :: Fink (Mac OS X)", True))
self.create_trove_cat((427, 426, "cygwin", "Cygwin (MS Windows)",
"Operating System :: Emulation and API Compatibility :: Cygwin (MS Windows)", True))
self.create_trove_cat(
(428, 426, "dosemu", "DOSEMU", "Operating System :: Emulation and API Compatibility :: DOSEMU", True))
self.create_trove_cat(
(430, 426, "wine", "WINE", "Operating System :: Emulation and API Compatibility :: WINE", True))
self.create_trove_cat((431, 426, "emx", "EMX (OS/2 and MS-DOS)",
"Operating System :: Emulation and API Compatibility :: EMX (OS/2 and MS-DOS)", True))
self.create_trove_cat(
(445, 426, "mingw_msys", "MinGW/MSYS (MS Windows)",
"Operating System :: Emulation and API Compatibility :: MinGW/MSYS (MS Windows)", True))
self.create_trove_cat(
(315, 199, "pdasystems", "Handheld/Embedded Operating Systems",
"Operating System :: Handheld/Embedded Operating Systems", True))
self.create_trove_cat(
(222, 315, "wince", "WinCE", "Operating System :: Handheld/Embedded Operating Systems :: WinCE", True))
self.create_trove_cat(
(223, 315, "palmos", "PalmOS", "Operating System :: Handheld/Embedded Operating Systems :: PalmOS", True))
self.create_trove_cat(
(441, 315, "ecos", "eCos", "Operating System :: Handheld/Embedded Operating Systems :: eCos", True))
self.create_trove_cat(
(
443, 315, "vxworks", "VxWorks", "Operating System :: Handheld/Embedded Operating Systems :: VxWorks", True)) # nopep8
self.create_trove_cat((444, 315, "symbianos", "SymbianOS",
"Operating System :: Handheld/Embedded Operating Systems :: SymbianOS", True))
self.create_trove_cat(
(442, 315, "qnx", "QNX", "Operating System :: Handheld/Embedded Operating Systems :: QNX", True))
self.create_trove_cat(
(
440, 315, "uclinux", "uClinux", "Operating System :: Handheld/Embedded Operating Systems :: uClinux", True)) # nopep8
self.create_trove_cat(
(418, 199, "modern_oses", "Modern (Vendor-Supported) Desktop Operating Systems",
"Operating System :: Modern (Vendor-Supported) Desktop Operating Systems", True))
self.create_trove_cat((420, 418, "mswin_2000", "Win2K",
"Operating System :: Modern (Vendor-Supported) Desktop Operating Systems :: Win2K",
True))
self.create_trove_cat(
(207, 418, "sun", "Solaris",
"Operating System :: Modern (Vendor-Supported) Desktop Operating Systems :: Solaris", True))
self.create_trove_cat(
(201, 418, "linux", "Linux",
"Operating System :: Modern (Vendor-Supported) Desktop Operating Systems :: Linux", True))
self.create_trove_cat((205, 418, "openbsd", "OpenBSD",
"Operating System :: Modern (Vendor-Supported) Desktop Operating Systems :: OpenBSD",
True))
self.create_trove_cat((203, 418, "freebsd", "FreeBSD",
"Operating System :: Modern (Vendor-Supported) Desktop Operating Systems :: FreeBSD",
True))
self.create_trove_cat(
(204, 418, "netbsd", "NetBSD",
"Operating System :: Modern (Vendor-Supported) Desktop Operating Systems :: NetBSD", True))
self.create_trove_cat(
(309, 418, "macosx", "OS X",
"Operating System :: Modern (Vendor-Supported) Desktop Operating Systems :: OS X", True))
self.create_trove_cat(
(419, 418, "mswin_xp", "WinXP",
"Operating System :: Modern (Vendor-Supported) Desktop Operating Systems :: WinXP", True))
self.create_trove_cat((236, 199, "other", "Other Operating Systems",
"Operating System :: Other Operating Systems", True))
self.create_trove_cat(
(206, 236, "bsdos", "BSD/OS", "Operating System :: Other Operating Systems :: BSD/OS", True))
self.create_trove_cat(
(634, 236, "console-platforms", "Console-based Platforms",
"Operating System :: Other Operating Systems :: Console-based Platforms", True))
self.create_trove_cat((637, 634, "sega-dreamcast", "Sega Dreamcast",
"Operating System :: Other Operating Systems :: Console-based Platforms :: Sega Dreamcast", # nopep8
True))
self.create_trove_cat((635, 634, "xbox", "Microsoft Xbox",
"Operating System :: Other Operating Systems :: Console-based Platforms :: Microsoft Xbox", # nopep8
True))
self.create_trove_cat((636, 634, "sony-ps2", "Sony Playstation 2",
"Operating System :: Other Operating Systems :: Console-based Platforms :: Sony Playstation 2", # nopep8
True))
self.create_trove_cat(
(422, 236, "mswin_98", "Win98", "Operating System :: Other Operating Systems :: Win98", True))
self.create_trove_cat((425, 422, "mswin_98_osr2", "Win98 OSR2",
"Operating System :: Other Operating Systems :: Win98 :: Win98 OSR2", True))
self.create_trove_cat(
(424, 236, "mswin_me", "WinME", "Operating System :: Other Operating Systems :: WinME", True))
self.create_trove_cat(
(423, 236, "mswin_nt", "WinNT", "Operating System :: Other Operating Systems :: WinNT", True))
self.create_trove_cat(
(220, 236, "os2", "IBM OS/2", "Operating System :: Other Operating Systems :: IBM OS/2", True))
self.create_trove_cat(
(211, 236, "irix", "SGI IRIX", "Operating System :: Other Operating Systems :: SGI IRIX", True))
self.create_trove_cat(
(210, 236, "aix", "IBM AIX", "Operating System :: Other Operating Systems :: IBM AIX", True))
self.create_trove_cat(
(212, 236, "other", "Other", "Operating System :: Other Operating Systems :: Other", True))
self.create_trove_cat(
(446, 236, "openvms", "OpenVMS", "Operating System :: Other Operating Systems :: OpenVMS", True))
self.create_trove_cat(
(434, 236, "amigaos", "AmigaOS", "Operating System :: Other Operating Systems :: AmigaOS", True))
self.create_trove_cat(
(448, 236, "mswin_server2003", "Microsoft Windows Server 2003",
"Operating System :: Other Operating Systems :: Microsoft Windows Server 2003", True))
self.create_trove_cat(
(447, 236, "morphos", "MorphOS", "Operating System :: Other Operating Systems :: MorphOS", True))
self.create_trove_cat(
(209, 236, "hpux", "HP-UX", "Operating System :: Other Operating Systems :: HP-UX", True))
self.create_trove_cat(
(208, 236, "sco", "SCO", "Operating System :: Other Operating Systems :: SCO", True))
self.create_trove_cat(
(240, 236, "gnuhurd", "GNU Hurd", "Operating System :: Other Operating Systems :: GNU Hurd", True))
self.create_trove_cat((217, 236, "win31", "Microsoft Windows 3.x",
"Operating System :: Other Operating Systems :: Microsoft Windows 3.x", True))
self.create_trove_cat(
(432, 199, "os_groups", "Grouping and Descriptive Categories",
"Operating System :: Grouping and Descriptive Categories", True))
self.create_trove_cat((218, 432, "win95", "32-bit MS Windows (95/98)",
"Operating System :: Grouping and Descriptive Categories :: 32-bit MS Windows (95/98)",
True))
self.create_trove_cat(
(439, 432, "os_projectdistrospecific", "Project is OS Distribution-Specific",
"Operating System :: Grouping and Descriptive Categories :: Project is OS Distribution-Specific", True))
self.create_trove_cat(
(449, 432, "eightbit_oses", "Classic 8-bit Operating Systems (Apple, Atari, Commodore, etc.)",
"Operating System :: Grouping and Descriptive Categories :: Classic 8-bit Operating Systems (Apple, Atari, Commodore, etc.)", # nopep8
True))
self.create_trove_cat(
(436, 432, "os_portable", "OS Portable (Source code to work with many OS platforms)",
"Operating System :: Grouping and Descriptive Categories :: OS Portable (Source code to work with many OS platforms)", # nopep8
True))
self.create_trove_cat(
(438, 432, "os_projectdistro", "Project is an Operating System Distribution",
"Operating System :: Grouping and Descriptive Categories :: Project is an Operating System Distribution",
True))
self.create_trove_cat(
(235, 432, "independent", "OS Independent (Written in an interpreted language)",
"Operating System :: Grouping and Descriptive Categories :: OS Independent (Written in an interpreted language)", # nopep8
True))
self.create_trove_cat(
(200, 432, "posix", "All POSIX (Linux/BSD/UNIX-like OSes)",
"Operating System :: Grouping and Descriptive Categories :: All POSIX (Linux/BSD/UNIX-like OSes)", True))
self.create_trove_cat(
(219, 432, "winnt", "32-bit MS Windows (NT/2000/XP)",
"Operating System :: Grouping and Descriptive Categories :: 32-bit MS Windows (NT/2000/XP)", True))
self.create_trove_cat(
(202, 432, "bsd", "All BSD Platforms (FreeBSD/NetBSD/OpenBSD/Apple Mac OS X)",
"Operating System :: Grouping and Descriptive Categories :: All BSD Platforms (FreeBSD/NetBSD/OpenBSD/Apple Mac OS X)", # nopep8
True))
self.create_trove_cat(
(435, 432, "mswin_all32bit", "All 32-bit MS Windows (95/98/NT/2000/XP)",
"Operating System :: Grouping and Descriptive Categories :: All 32-bit MS Windows (95/98/NT/2000/XP)",
True))
self.create_trove_cat(
(437, 432, "os_projectkernel", "Project is an Operating System Kernel",
"Operating System :: Grouping and Descriptive Categories :: Project is an Operating System Kernel", True))
self.create_trove_cat(
(64, 63, "emacs", "Emacs", "Topic :: Text Editors :: Emacs", True))
self.create_trove_cat(
(65, 63, "ide", "Integrated Development Environments (IDE)",
"Topic :: Text Editors :: Integrated Development Environments (IDE)", True))
self.create_trove_cat(
(69, 63, "documentation", "Documentation", "Topic :: Text Editors :: Documentation", True))
self.create_trove_cat(
(70, 63, "wordprocessors", "Word Processors", "Topic :: Text Editors :: Word Processors", True))
self.create_trove_cat(
(285, 63, "textprocessing", "Text Processing", "Topic :: Text Editors :: Text Processing", True))
self.create_trove_cat((611, 18, "formats_and_protocols",
"Formats and Protocols", "Topic :: Formats and Protocols", True))
self.create_trove_cat((554, 611, "data_formats", "Data Formats",
"Topic :: Formats and Protocols :: Data Formats", True))
self.create_trove_cat(
(559, 554, "xml", "XML", "Topic :: Formats and Protocols :: Data Formats :: XML", True))
self.create_trove_cat(
(557, 554, "sgml", "SGML", "Topic :: Formats and Protocols :: Data Formats :: SGML", True))
self.create_trove_cat(
(555, 554, "docbook", "DocBook", "Topic :: Formats and Protocols :: Data Formats :: DocBook", True))
self.create_trove_cat((556, 554, "html_xhtml", "HTML/XHTML",
"Topic :: Formats and Protocols :: Data Formats :: HTML/XHTML", True))
self.create_trove_cat((558, 554, "tex_latex", "TeX/LaTeX",
"Topic :: Formats and Protocols :: Data Formats :: TeX/LaTeX", True))
self.create_trove_cat(
(612, 611, "protocols", "Protocols", "Topic :: Formats and Protocols :: Protocols", True))
self.create_trove_cat(
(616, 612, "xml_rpc", "XML-RPC", "Topic :: Formats and Protocols :: Protocols :: XML-RPC", True))
self.create_trove_cat(
(614, 612, "nntp", "NNTP", "Topic :: Formats and Protocols :: Protocols :: NNTP", True))
self.create_trove_cat(
(613, 612, "soap", "SOAP", "Topic :: Formats and Protocols :: Protocols :: SOAP", True))
self.create_trove_cat(
(615, 612, "rss", "RSS", "Topic :: Formats and Protocols :: Protocols :: RSS", True))
self.create_trove_cat(
(156, 18, "terminals", "Terminals", "Topic :: Terminals", True))
self.create_trove_cat(
(157, 156, "serial", "Serial", "Topic :: Terminals :: Serial", True))
self.create_trove_cat(
(158, 156, "virtual", "Terminal Emulators/X Terminals",
"Topic :: Terminals :: Terminal Emulators/X Terminals", True))
self.create_trove_cat(
(159, 156, "telnet", "Telnet", "Topic :: Terminals :: Telnet", True))
self.create_trove_cat(
(20, 18, "communications", "Communications", "Topic :: Communications", True))
self.create_trove_cat(
(37, 20, "fido", "FIDO", "Topic :: Communications :: FIDO", True))
self.create_trove_cat(
(38, 20, "hamradio", "Ham Radio", "Topic :: Communications :: Ham Radio", True))
self.create_trove_cat(
(39, 20, "usenet", "Usenet News", "Topic :: Communications :: Usenet News", True))
self.create_trove_cat(
(40, 20, "internetphone", "Internet Phone", "Topic :: Communications :: Internet Phone", True))
self.create_trove_cat(
(36, 20, "fax", "Fax", "Topic :: Communications :: Fax", True))
self.create_trove_cat(
(22, 20, "chat", "Chat", "Topic :: Communications :: Chat", True))
self.create_trove_cat((574, 22, "msn_messenger", "MSN Messenger",
"Topic :: Communications :: Chat :: MSN Messenger", True))
self.create_trove_cat((26, 22, "aim", "AOL Instant Messenger",
"Topic :: Communications :: Chat :: AOL Instant Messenger", True))
self.create_trove_cat((24, 22, "irc", "Internet Relay Chat",
"Topic :: Communications :: Chat :: Internet Relay Chat", True))
self.create_trove_cat(
(25, 22, "talk", "Unix Talk", "Topic :: Communications :: Chat :: Unix Talk", True))
self.create_trove_cat(
(23, 22, "icq", "ICQ", "Topic :: Communications :: Chat :: ICQ", True))
self.create_trove_cat(
(590, 20, "streaming_comms", "Streaming", "Topic :: Communications :: Streaming", True))
self.create_trove_cat(
(27, 20, "conferencing", "Conferencing", "Topic :: Communications :: Conferencing", True))
self.create_trove_cat(
(247, 20, "telephony", "Telephony", "Topic :: Communications :: Telephony", True))
self.create_trove_cat(
(251, 20, "filesharing", "File Sharing", "Topic :: Communications :: File Sharing", True))
self.create_trove_cat((622, 251, "bittorrent", "BitTorrent",
"Topic :: Communications :: File Sharing :: BitTorrent", True))
self.create_trove_cat(
(286, 251, "gnutella", "Gnutella", "Topic :: Communications :: File Sharing :: Gnutella", True))
self.create_trove_cat(
(241, 251, "napster", "Napster", "Topic :: Communications :: File Sharing :: Napster", True))
self.create_trove_cat(
(21, 20, "bbs", "BBS", "Topic :: Communications :: BBS", True))
self.create_trove_cat(
(28, 20, "email", "Email", "Topic :: Communications :: Email", True))
self.create_trove_cat((31, 28, "mua", "Email Clients (MUA)",
"Topic :: Communications :: Email :: Email Clients (MUA)", True))
self.create_trove_cat((32, 28, "mta", "Mail Transport Agents",
"Topic :: Communications :: Email :: Mail Transport Agents", True))
self.create_trove_cat(
(234, 18, "other", "Other/Nonlisted Topic", "Topic :: Other/Nonlisted Topic", True))
self.create_trove_cat(
(129, 18, "office", "Office/Business", "Topic :: Office/Business", True))
self.create_trove_cat(
(576, 129, "enterprise", "Enterprise", "Topic :: Office/Business :: Enterprise", True))
self.create_trove_cat(
(579, 576, "crm", "CRM", "Topic :: Office/Business :: Enterprise :: CRM", True))
self.create_trove_cat(
(577, 576, "erp", "ERP", "Topic :: Office/Business :: Enterprise :: ERP", True))
self.create_trove_cat(
(578, 576, "olap", "OLAP", "Topic :: Office/Business :: Enterprise :: OLAP", True))
self.create_trove_cat(
(580, 576, "data_warehousing", "Data Warehousing",
"Topic :: Office/Business :: Enterprise :: Data Warehousing", True))
self.create_trove_cat(
(587, 129, "time_tracking", "Time Tracking", "Topic :: Office/Business :: Time Tracking", True))
self.create_trove_cat(
(75, 129, "financial", "Financial", "Topic :: Office/Business :: Financial", True))
self.create_trove_cat((76, 75, "accounting", "Accounting",
"Topic :: Office/Business :: Financial :: Accounting", True))
self.create_trove_cat((77, 75, "investment", "Investment",
"Topic :: Office/Business :: Financial :: Investment", True))
self.create_trove_cat((78, 75, "spreadsheet", "Spreadsheet",
"Topic :: Office/Business :: Financial :: Spreadsheet", True))
self.create_trove_cat((79, 75, "pointofsale", "Point-Of-Sale",
"Topic :: Office/Business :: Financial :: Point-Of-Sale", True))
self.create_trove_cat(
(130, 129, "scheduling", "Scheduling", "Topic :: Office/Business :: Scheduling", True))
self.create_trove_cat(
(585, 130, "calendar", "Calendar", "Topic :: Office/Business :: Scheduling :: Calendar", True))
self.create_trove_cat(
(586, 130, "resource_booking", "Resource Booking",
"Topic :: Office/Business :: Scheduling :: Resource Booking", True))
self.create_trove_cat(
(131, 129, "suites", "Office Suites", "Topic :: Office/Business :: Office Suites", True))
self.create_trove_cat(
(588, 129, "todo_lists", "To-Do Lists", "Topic :: Office/Business :: To-Do Lists", True))
self.create_trove_cat(
(607, 129, "project_management", "Project Management",
"Topic :: Office/Business :: Project Management", True))
self.create_trove_cat(
(66, 18, "database", "Database", "Topic :: Database", True))
self.create_trove_cat(
(68, 66, "frontends", "Front-Ends", "Topic :: Database :: Front-Ends", True))
self.create_trove_cat((67, 66, "engines", "Database Engines/Servers",
"Topic :: Database :: Database Engines/Servers", True))
self.create_trove_cat(
(43, 18, "security", "Security", "Topic :: Security", True))
self.create_trove_cat(
(44, 43, "cryptography", "Cryptography", "Topic :: Security :: Cryptography", True))
self.create_trove_cat(
(55, 18, "desktop", "Desktop Environment", "Topic :: Desktop Environment", True))
self.create_trove_cat((56, 55, "windowmanagers", "Window Managers",
"Topic :: Desktop Environment :: Window Managers", True))
self.create_trove_cat((59, 56, "enlightenment", "Enlightenment",
"Topic :: Desktop Environment :: Window Managers :: Enlightenment", True))
self.create_trove_cat(
(60, 59, "themes", "Themes", "Topic :: Desktop Environment :: Window Managers :: Enlightenment :: Themes",
True))
self.create_trove_cat((57, 55, "kde", "K Desktop Environment (KDE)",
"Topic :: Desktop Environment :: K Desktop Environment (KDE)", True))
self.create_trove_cat(
(61, 57, "themes", "Themes", "Topic :: Desktop Environment :: K Desktop Environment (KDE) :: Themes", True)) # nopep8
self.create_trove_cat(
(58, 55, "gnome", "Gnome", "Topic :: Desktop Environment :: Gnome", True))
self.create_trove_cat((62, 55, "screensavers", "Screen Savers",
"Topic :: Desktop Environment :: Screen Savers", True))
self.create_trove_cat(
(80, 18, "games", "Games/Entertainment", "Topic :: Games/Entertainment", True))
self.create_trove_cat((633, 80, "console-games", "Console-based Games",
"Topic :: Games/Entertainment :: Console-based Games", True))
self.create_trove_cat(
(287, 80, "boardgames", "Board Games", "Topic :: Games/Entertainment :: Board Games", True))
self.create_trove_cat(
(288, 80, "sidescrolling", "Side-Scrolling/Arcade Games",
"Topic :: Games/Entertainment :: Side-Scrolling/Arcade Games", True))
self.create_trove_cat(
(81, 80, "realtimestrategy", "Real Time Strategy",
"Topic :: Games/Entertainment :: Real Time Strategy", True))
self.create_trove_cat(
(82, 80, "firstpersonshooters", "First Person Shooters",
"Topic :: Games/Entertainment :: First Person Shooters", True))
self.create_trove_cat(
(83, 80, "turnbasedstrategy", "Turn Based Strategy",
"Topic :: Games/Entertainment :: Turn Based Strategy", True))
self.create_trove_cat(
(84, 80, "rpg", "Role-Playing", "Topic :: Games/Entertainment :: Role-Playing", True))
self.create_trove_cat(
(85, 80, "simulation", "Simulation", "Topic :: Games/Entertainment :: Simulation", True))
self.create_trove_cat((86, 80, "mud", "Multi-User Dungeons (MUD)",
"Topic :: Games/Entertainment :: Multi-User Dungeons (MUD)", True))
self.create_trove_cat(
(268, 80, "Puzzles", "Puzzle Games", "Topic :: Games/Entertainment :: Puzzle Games", True))
self.create_trove_cat(
(88, 87, "finger", "Finger", "Topic :: Internet :: Finger", True))
self.create_trove_cat((89, 87, "ftp", "File Transfer Protocol (FTP)",
"Topic :: Internet :: File Transfer Protocol (FTP)", True))
self.create_trove_cat(
(270, 87, "WAP", "WAP", "Topic :: Internet :: WAP", True))
self.create_trove_cat(
(90, 87, "www", "WWW/HTTP", "Topic :: Internet :: WWW/HTTP", True))
self.create_trove_cat(
(91, 90, "browsers", "Browsers", "Topic :: Internet :: WWW/HTTP :: Browsers", True))
self.create_trove_cat((92, 90, "dynamic", "Dynamic Content",
"Topic :: Internet :: WWW/HTTP :: Dynamic Content", True))
self.create_trove_cat((95, 92, "messageboards", "Message Boards",
"Topic :: Internet :: WWW/HTTP :: Dynamic Content :: Message Boards", True))
self.create_trove_cat((96, 92, "cgi", "CGI Tools/Libraries",
"Topic :: Internet :: WWW/HTTP :: Dynamic Content :: CGI Tools/Libraries", True))
self.create_trove_cat((94, 92, "counters", "Page Counters",
"Topic :: Internet :: WWW/HTTP :: Dynamic Content :: Page Counters", True))
self.create_trove_cat((93, 90, "indexing", "Indexing/Search",
"Topic :: Internet :: WWW/HTTP :: Indexing/Search", True))
self.create_trove_cat((243, 90, "sitemanagement", "Site Management",
"Topic :: Internet :: WWW/HTTP :: Site Management", True))
self.create_trove_cat((244, 243, "linkchecking", "Link Checking",
"Topic :: Internet :: WWW/HTTP :: Site Management :: Link Checking", True))
self.create_trove_cat((250, 90, "httpservers", "HTTP Servers",
"Topic :: Internet :: WWW/HTTP :: HTTP Servers", True))
self.create_trove_cat(
(149, 87, "dns", "Name Service (DNS)", "Topic :: Internet :: Name Service (DNS)", True))
self.create_trove_cat(
(245, 87, "loganalysis", "Log Analysis", "Topic :: Internet :: Log Analysis", True))
self.create_trove_cat(
(45, 18, "development", "Software Development", "Topic :: Software Development", True))
self.create_trove_cat(
(563, 45, "modeling", "Modeling", "Topic :: Software Development :: Modeling", True))
self.create_trove_cat(
(46, 45, "build", "Build Tools", "Topic :: Software Development :: Build Tools", True))
self.create_trove_cat(
(575, 45, "testing", "Testing", "Topic :: Software Development :: Testing", True))
self.create_trove_cat(
(620, 45, "algorithms", "Algorithms", "Topic :: Software Development :: Algorithms", True))
self.create_trove_cat(
(621, 620, "genetic_algorithms", "Genetic Algorithms",
"Topic :: Software Development :: Algorithms :: Genetic Algorithms", True))
self.create_trove_cat(
(606, 45, "frameworks", "Frameworks", "Topic :: Software Development :: Frameworks", True))
self.create_trove_cat((564, 45, "documentation", "Documentation",
"Topic :: Software Development :: Documentation", True))
self.create_trove_cat((562, 45, "swdev_oo", "Object Oriented",
"Topic :: Software Development :: Object Oriented", True))
self.create_trove_cat((409, 45, "l10n", "L10N (Localization)",
"Topic :: Software Development :: L10N (Localization)", True))
self.create_trove_cat((408, 45, "i18n", "I18N (Internationalization)",
"Topic :: Software Development :: I18N (Internationalization)", True))
self.create_trove_cat((50, 45, "objectbrokering", "Object Brokering",
"Topic :: Software Development :: Object Brokering", True))
self.create_trove_cat(
(51, 50, "corba", "CORBA", "Topic :: Software Development :: Object Brokering :: CORBA", True))
self.create_trove_cat((52, 45, "versioncontrol", "Version Control",
"Topic :: Software Development :: Version Control", True))
self.create_trove_cat(
(53, 52, "cvs", "CVS", "Topic :: Software Development :: Version Control :: CVS", True))
self.create_trove_cat(
(54, 52, "rcs", "RCS", "Topic :: Software Development :: Version Control :: RCS", True))
self.create_trove_cat(
(260, 52, "SCCS", "SCCS", "Topic :: Software Development :: Version Control :: SCCS", True))
self.create_trove_cat((259, 45, "codegen", "Code Generators",
"Topic :: Software Development :: Code Generators", True))
self.create_trove_cat(
(47, 45, "debuggers", "Debuggers", "Topic :: Software Development :: Debuggers", True))
self.create_trove_cat(
(48, 45, "compilers", "Compilers", "Topic :: Software Development :: Compilers", True))
self.create_trove_cat((49, 45, "interpreters", "Interpreters",
"Topic :: Software Development :: Interpreters", True))
self.create_trove_cat((561, 45, "softwaredev_ui", "User Interfaces",
"Topic :: Software Development :: User Interfaces", True))
self.create_trove_cat(
(565, 45, "quality_assurance", "Quality Assurance",
"Topic :: Software Development :: Quality Assurance", True))
self.create_trove_cat(
(570, 45, "case_tools", "CASE", "Topic :: Software Development :: CASE", True))
self.create_trove_cat(
(582, 45, "design", "Design", "Topic :: Software Development :: Design", True))
self.create_trove_cat((593, 45, "cross_compilers", "Cross Compilers",
"Topic :: Software Development :: Cross Compilers", True))
self.create_trove_cat(
(603, 45, "profilers", "Profiling", "Topic :: Software Development :: Profiling", True))
self.create_trove_cat((610, 45, "virtual_machines", "Virtual Machines",
"Topic :: Software Development :: Virtual Machines", True))
self.create_trove_cat(
(619, 45, "usability", "Usability", "Topic :: Software Development :: Usability", True))
self.create_trove_cat(
(581, 71, "library", "Library", "Topic :: Education :: Library", True))
self.create_trove_cat(
(604, 581, "opac", "OPAC", "Topic :: Education :: Library :: OPAC", True))
self.create_trove_cat(
(605, 581, "marc_and_metadata", "MARC and Book/Library Metadata",
"Topic :: Education :: Library :: MARC and Book/Library Metadata", True))
self.create_trove_cat(
(132, 18, "religion", "Religion and Philosophy", "Topic :: Religion and Philosophy", True))
self.create_trove_cat(
(571, 132, "new_age", "New Age", "Topic :: Religion and Philosophy :: New Age", True))
self.create_trove_cat(
(136, 18, "system", "System", "Topic :: System", True))
self.create_trove_cat(
(638, 136, "storage", "Storage", "Topic :: System :: Storage", True))
self.create_trove_cat((601, 638, "file_management", "File Management",
"Topic :: System :: Storage :: File Management", True))
self.create_trove_cat(
(19, 638, "archiving", "Archiving", "Topic :: System :: Storage :: Archiving", True))
self.create_trove_cat((42, 19, "compression", "Compression",
"Topic :: System :: Storage :: Archiving :: Compression", True))
self.create_trove_cat(
(137, 19, "backup", "Backup", "Topic :: System :: Storage :: Archiving :: Backup", True))
self.create_trove_cat(
(41, 19, "packaging", "Packaging", "Topic :: System :: Storage :: Archiving :: Packaging", True))
self.create_trove_cat(
(294, 136, "shells", "System Shells", "Topic :: System :: System Shells", True))
self.create_trove_cat(
(74, 136, "emulators", "Emulators", "Topic :: System :: Emulators", True))
self.create_trove_cat(
(627, 136, "system_search", "Search", "Topic :: System :: Search", True))
self.create_trove_cat(
(257, 136, "softwaredist", "Software Distribution", "Topic :: System :: Software Distribution", True))
self.create_trove_cat(
(122, 113, "players", "Players", "Topic :: Multimedia :: Sound/Audio :: Players", True))
self.create_trove_cat(
(253, 136, "sysadministration", "Systems Administration",
"Topic :: System :: Systems Administration", True))
self.create_trove_cat(
(289, 253, "authentication", "Authentication/Directory",
"Topic :: System :: Systems Administration :: Authentication/Directory", True))
self.create_trove_cat(
(290, 289, "nis", "NIS", "Topic :: System :: Systems Administration :: Authentication/Directory :: NIS",
True))
self.create_trove_cat(
(291, 289, "ldap", "LDAP", "Topic :: System :: Systems Administration :: Authentication/Directory :: LDAP",
True))
self.create_trove_cat(
(153, 136, "power", "Power (UPS)", "Topic :: System :: Power (UPS)", True))
self.create_trove_cat(
(150, 136, "networking", "Networking", "Topic :: System :: Networking", True))
self.create_trove_cat(
(566, 150, "wireless", "Wireless", "Topic :: System :: Networking :: Wireless", True))
self.create_trove_cat(
(151, 150, "firewalls", "Firewalls", "Topic :: System :: Networking :: Firewalls", True))
self.create_trove_cat(
(152, 150, "monitoring", "Monitoring", "Topic :: System :: Networking :: Monitoring", True))
self.create_trove_cat((155, 152, "watchdog", "Hardware Watchdog",
"Topic :: System :: Networking :: Monitoring :: Hardware Watchdog", True))
self.create_trove_cat(
(148, 136, "logging", "Logging", "Topic :: System :: Logging", True))
self.create_trove_cat(
(592, 148, "log_rotation", "Log Rotation", "Topic :: System :: Logging :: Log Rotation", True))
self.create_trove_cat((144, 136, "kernels", "Operating System Kernels",
"Topic :: System :: Operating System Kernels", True))
self.create_trove_cat(
(145, 144, "bsd", "BSD", "Topic :: System :: Operating System Kernels :: BSD", True))
self.create_trove_cat(
(239, 144, "gnuhurd", "GNU Hurd", "Topic :: System :: Operating System Kernels :: GNU Hurd", True))
self.create_trove_cat(
(143, 144, "linux", "Linux", "Topic :: System :: Operating System Kernels :: Linux", True))
self.create_trove_cat(
(147, 136, "setup", "Installation/Setup", "Topic :: System :: Installation/Setup", True))
self.create_trove_cat(
(146, 136, "hardware", "Hardware", "Topic :: System :: Hardware", True))
self.create_trove_cat(
(313, 146, "mainframe", "Mainframes", "Topic :: System :: Hardware :: Mainframes", True))
self.create_trove_cat((312, 146, "smp", "Symmetric Multi-processing",
"Topic :: System :: Hardware :: Symmetric Multi-processing", True))
self.create_trove_cat((292, 146, "drivers", "Hardware Drivers",
"Topic :: System :: Hardware :: Hardware Drivers", True))
self.create_trove_cat(
(138, 136, "benchmark", "Benchmark", "Topic :: System :: Benchmark", True))
self.create_trove_cat(
(139, 136, "boot", "Boot", "Topic :: System :: Boot", True))
self.create_trove_cat(
(140, 139, "init", "Init", "Topic :: System :: Boot :: Init", True))
self.create_trove_cat(
(141, 136, "clustering", "Clustering", "Topic :: System :: Clustering", True))
self.create_trove_cat((308, 136, "distributed_computing",
"Distributed Computing", "Topic :: System :: Distributed Computing", True))
self.create_trove_cat(
(142, 136, "filesystems", "Filesystems", "Topic :: System :: Filesystems", True))
self.create_trove_cat(
(154, 18, "printing", "Printing", "Topic :: Printing", True))
self.create_trove_cat(
(87, 18, "internet", "Internet", "Topic :: Internet", True))
self.create_trove_cat((118, 116, "cdripping", "CD Ripping",
"Topic :: Multimedia :: Sound/Audio :: CD Audio :: CD Ripping", True))
self.create_trove_cat((119, 113, "conversion", "Conversion",
"Topic :: Multimedia :: Sound/Audio :: Conversion", True))
self.create_trove_cat(
(120, 113, "editors", "Editors", "Topic :: Multimedia :: Sound/Audio :: Editors", True))
self.create_trove_cat(
(121, 113, "mixers", "Mixers", "Topic :: Multimedia :: Sound/Audio :: Mixers", True))
self.create_trove_cat(
(100, 99, "graphics", "Graphics", "Topic :: Multimedia :: Graphics", True))
self.create_trove_cat((109, 100, "3dmodeling", "3D Modeling",
"Topic :: Multimedia :: Graphics :: 3D Modeling", True))
self.create_trove_cat((110, 100, "3drendering", "3D Rendering",
"Topic :: Multimedia :: Graphics :: 3D Rendering", True))
self.create_trove_cat((111, 100, "presentation", "Presentation",
"Topic :: Multimedia :: Graphics :: Presentation", True))
self.create_trove_cat(
(112, 100, "viewers", "Viewers", "Topic :: Multimedia :: Graphics :: Viewers", True))
self.create_trove_cat(
(101, 100, "capture", "Capture", "Topic :: Multimedia :: Graphics :: Capture", True))
self.create_trove_cat((104, 101, "screencapture", "Screen Capture",
"Topic :: Multimedia :: Graphics :: Capture :: Screen Capture", True))
self.create_trove_cat((103, 101, "cameras", "Digital Camera",
"Topic :: Multimedia :: Graphics :: Capture :: Digital Camera", True))
self.create_trove_cat((102, 101, "scanners", "Scanners",
"Topic :: Multimedia :: Graphics :: Capture :: Scanners", True))
self.create_trove_cat((105, 100, "conversion", "Graphics Conversion",
"Topic :: Multimedia :: Graphics :: Graphics Conversion", True))
self.create_trove_cat(
(106, 100, "editors", "Editors", "Topic :: Multimedia :: Graphics :: Editors", True))
self.create_trove_cat((108, 106, "raster", "Raster-Based",
"Topic :: Multimedia :: Graphics :: Editors :: Raster-Based", True))
self.create_trove_cat((107, 106, "vector", "Vector-Based",
"Topic :: Multimedia :: Graphics :: Editors :: Vector-Based", True))
self.create_trove_cat(
(97, 18, "scientific", "Scientific/Engineering", "Topic :: Scientific/Engineering", True))
self.create_trove_cat(
(609, 97, "molecular_science", "Molecular Science",
"Topic :: Scientific/Engineering :: Molecular Science", True))
self.create_trove_cat(
(602, 97, "robotics", "Robotics", "Topic :: Scientific/Engineering :: Robotics", True))
self.create_trove_cat((600, 97, "simulations", "Simulations",
"Topic :: Scientific/Engineering :: Simulations", True))
self.create_trove_cat(
(568, 97, "ecosystem_sciences", "Ecosystem Sciences",
"Topic :: Scientific/Engineering :: Ecosystem Sciences", True))
self.create_trove_cat(
(386, 97, "interfaceengine", "Interface Engine/Protocol Translator",
"Topic :: Scientific/Engineering :: Interface Engine/Protocol Translator", True))
self.create_trove_cat(
(384, 97, "chemistry", "Chemistry", "Topic :: Scientific/Engineering :: Chemistry", True))
self.create_trove_cat((252, 97, "bioinformatics", "Bio-Informatics",
"Topic :: Scientific/Engineering :: Bio-Informatics", True))
self.create_trove_cat(
(246, 97, "eda", "Electronic Design Automation (EDA)",
"Topic :: Scientific/Engineering :: Electronic Design Automation (EDA)", True))
self.create_trove_cat((135, 97, "visualization", "Visualization",
"Topic :: Scientific/Engineering :: Visualization", True))
self.create_trove_cat(
(134, 97, "astronomy", "Astronomy", "Topic :: Scientific/Engineering :: Astronomy", True))
self.create_trove_cat((133, 97, "ai", "Artificial Intelligence",
"Topic :: Scientific/Engineering :: Artificial Intelligence", True))
self.create_trove_cat(
(591, 133, "intelligent_agents", "Intelligent Agents",
"Topic :: Scientific/Engineering :: Artificial Intelligence :: Intelligent Agents", True))
self.create_trove_cat((98, 97, "mathematics", "Mathematics",
"Topic :: Scientific/Engineering :: Mathematics", True))
self.create_trove_cat((272, 97, "HMI", "Human Machine Interfaces",
"Topic :: Scientific/Engineering :: Human Machine Interfaces", True))
self.create_trove_cat((266, 97, "medical", "Medical Science Apps.",
"Topic :: Scientific/Engineering :: Medical Science Apps.", True))
self.create_trove_cat(
(383, 97, "gis", "GIS", "Topic :: Scientific/Engineering :: GIS", True))
self.create_trove_cat(
(385, 97, "informationanalysis", "Information Analysis",
"Topic :: Scientific/Engineering :: Information Analysis", True))
self.create_trove_cat(
(387, 97, "physics", "Physics", "Topic :: Scientific/Engineering :: Physics", True))
self.create_trove_cat((567, 97, "earth_science", "Earth Sciences",
"Topic :: Scientific/Engineering :: Earth Sciences", True))
self.create_trove_cat(
(282, 18, "Sociology", "Sociology", "Topic :: Sociology", True))
self.create_trove_cat(
(284, 282, "Genealogy", "Genealogy", "Topic :: Sociology :: Genealogy", True))
self.create_trove_cat(
(283, 282, "History", "History", "Topic :: Sociology :: History", True))
self.create_trove_cat(
(71, 18, "education", "Education", "Topic :: Education", True))
self.create_trove_cat(
(73, 71, "testing", "Testing", "Topic :: Education :: Testing", True))
self.create_trove_cat(
(72, 71, "cai", "Computer Aided Instruction (CAI)",
"Topic :: Education :: Computer Aided Instruction (CAI)", True))
self.create_trove_cat((18, 0, "topic", "Topic", "Topic", True))
self.create_trove_cat(
(125, 99, "video", "Video", "Topic :: Multimedia :: Video", True))
self.create_trove_cat((594, 125, "still_capture", "Still Capture",
"Topic :: Multimedia :: Video :: Still Capture", True))
self.create_trove_cat(
(596, 125, "codec", "Codec", "Topic :: Multimedia :: Video :: Codec", True))
self.create_trove_cat(
(127, 125, "conversion", "Conversion", "Topic :: Multimedia :: Video :: Conversion", True))
self.create_trove_cat(
(128, 125, "display", "Display", "Topic :: Multimedia :: Video :: Display", True))
self.create_trove_cat(
(256, 125, "nonlineareditor", "Non-Linear Editor",
"Topic :: Multimedia :: Video :: Non-Linear Editor", True))
self.create_trove_cat((595, 125, "special_effects", "Special Effects",
"Topic :: Multimedia :: Video :: Special Effects", True))
self.create_trove_cat(
(623, 125, "video_realtime", "Realtime Processing",
"Topic :: Multimedia :: Video :: Realtime Processing", True))
self.create_trove_cat((126, 125, "vidcapture", "Video Capture",
"Topic :: Multimedia :: Video :: Video Capture", True))
self.create_trove_cat(
(113, 99, "sound", "Sound/Audio", "Topic :: Multimedia :: Sound/Audio", True))
self.create_trove_cat(
(123, 122, "mp3", "MP3", "Topic :: Multimedia :: Sound/Audio :: Players :: MP3", True))
self.create_trove_cat(
(124, 113, "speech", "Speech", "Topic :: Multimedia :: Sound/Audio :: Speech", True))
self.create_trove_cat(
(114, 113, "analysis", "Analysis", "Topic :: Multimedia :: Sound/Audio :: Analysis", True))
self.create_trove_cat((115, 113, "capture", "Capture/Recording",
"Topic :: Multimedia :: Sound/Audio :: Capture/Recording", True))
self.create_trove_cat(
(248, 113, "midi", "MIDI", "Topic :: Multimedia :: Sound/Audio :: MIDI", True))
self.create_trove_cat((249, 113, "synthesis", "Sound Synthesis",
"Topic :: Multimedia :: Sound/Audio :: Sound Synthesis", True))
self.create_trove_cat(
(116, 113, "cdaudio", "CD Audio", "Topic :: Multimedia :: Sound/Audio :: CD Audio", True))
self.create_trove_cat((117, 116, "cdplay", "CD Playing",
"Topic :: Multimedia :: Sound/Audio :: CD Audio :: CD Playing", True))
self.create_trove_cat(
(99, 18, "multimedia", "Multimedia", "Topic :: Multimedia", True))
self.create_trove_cat((670, 14, "agpl", "Affero GNU Public License",
"License :: OSI-Approved Open Source :: Affero GNU Public License", True))
self.create_trove_cat((862, 14, "lppl", "LaTeX Project Public License",
"License :: OSI-Approved Open Source :: LaTeX Project Public License", True))
self.create_trove_cat((655, 432, "win64", "64-bit MS Windows",
"Operating System :: Grouping and Descriptive Categories :: 64-bit MS Windows", True))
self.create_trove_cat(
(657, 418, "vista", "Vista",
"Operating System :: Modern (Vendor-Supported) Desktop Operating Systems :: Vista", True))
self.create_trove_cat(
(851, 418, "win7", "Windows 7",
"Operating System :: Modern (Vendor-Supported) Desktop Operating Systems :: Windows 7", True))
self.create_trove_cat(
(
728, 315, "android", "Android", "Operating System :: Handheld/Embedded Operating Systems :: Android", True)) # nopep8
self.create_trove_cat((780, 315, "ios", "Apple iPhone",
"Operating System :: Handheld/Embedded Operating Systems :: Apple iPhone", True))
self.create_trove_cat((863, 534, "architects", "Architects",
"Intended Audience :: by End-User Class :: Architects", False))
self.create_trove_cat(
(864, 534, "auditors", "Auditors", "Intended Audience :: by End-User Class :: Auditors", False))
self.create_trove_cat(
(865, 534, "testers", "Testers", "Intended Audience :: by End-User Class :: Testers", False))
self.create_trove_cat((866, 534, "secpros", "Security Professionals",
"Intended Audience :: by End-User Class :: Security Professionals", False))
self.create_trove_cat((867, 535, "secindustry", "Security",
"Intended Audience :: by Industry or Sector :: Security", False))
session(M.TroveCategory).flush()
for name in self.migrations:
getattr(self, 'm__' + name)()
session(M.TroveCategory).flush()
def m__sync(self):
self.create_trove_cat(
(639, 14, "cpal", "Common Public Attribution License 1.0 (CPAL)",
"License :: OSI-Approved Open Source :: Common Public Attribution License 1.0 (CPAL)"))
self.create_trove_cat(
(640, 99, "dvd", "DVD", "Topic :: Multimedia :: DVD"))
self.create_trove_cat(
(641, 576, "workflow", "Workflow", "Topic :: Office/Business :: Enterprise :: Workflow"))
self.create_trove_cat((642, 292, "linuxdrivers", "Linux",
"Topic :: System :: Hardware :: Hardware Drivers :: Linux"))
self.create_trove_cat(
(643, 582, "uml", "UML", "Topic :: Software Development :: Design :: UML"))
self.create_trove_cat(
(644, 92, "cms", "CMS Systems", "Topic :: Internet :: WWW/HTTP :: Dynamic Content :: CMS Systems"))
self.create_trove_cat(
(645, 92, "blogging", "Blogging", "Topic :: Internet :: WWW/HTTP :: Dynamic Content :: Blogging"))
self.create_trove_cat((646, 52, "subversion", "Subversion",
"Topic :: Software Development :: Version Control :: Subversion"))
self.create_trove_cat((647, 612, "webservices", "Web Services",
"Topic :: Formats and Protocols :: Protocols :: Web Services"))
self.create_trove_cat(
(648, 554, "json", "JSON", "Topic :: Formats and Protocols :: Data Formats :: JSON"))
self.create_trove_cat((649, 100, "imagegalleries", "Image Galleries",
"Topic :: Multimedia :: Graphics :: Image Galleries"))
self.create_trove_cat(
(650, 612, "ajax", "AJAX", "Topic :: Formats and Protocols :: Protocols :: AJAX"))
self.create_trove_cat(
(651, 92, "wiki", "Wiki", "Topic :: Internet :: WWW/HTTP :: Dynamic Content :: Wiki"))
self.create_trove_cat((652, 45, "appservers", "Application Servers",
"Topic :: Software Development :: Application Servers"))
self.create_trove_cat(
(653, 20, "rssreaders", "RSS Feed Readers", "Topic :: Communications :: RSS Feed Readers"))
self.create_trove_cat((654, 129, "ecommerce", "E-Commerce / Shopping",
"Topic :: Office/Business :: E-Commerce / Shopping"))
self.create_trove_cat(
(656, 99, "htpc", "Home Theater PC", "Topic :: Multimedia :: Home Theater PC"))
self.create_trove_cat(
(658, 22, "jabber", "Jabber", "Topic :: Communications :: Chat :: Jabber"))
self.create_trove_cat(
(659, 576, "enterprisebpm", "Business Performance Management",
"Topic :: Office/Business :: Enterprise :: Business Performance Management"))
self.create_trove_cat(
(660, 576, "enterprisebi", "Business Intelligence",
"Topic :: Office/Business :: Enterprise :: Business Intelligence"))
self.create_trove_cat(
(661, 75, "budgetingandforecasting", "Budgeting and Forecasting",
"Topic :: Office/Business :: Financial :: Budgeting and Forecasting"))
self.create_trove_cat(
(662, 497, "ingres", "Ingres", "Database Environment :: Network-based DBMS :: Ingres"))
self.create_trove_cat(
(663, 92, "socialnetworking", "Social Networking",
"Topic :: Internet :: WWW/HTTP :: Dynamic Content :: Social Networking"))
self.create_trove_cat(
(664, 199, "virtualization", "Virtualization", "Operating System :: Virtualization"))
self.create_trove_cat(
(665, 664, "vmware", "VMware", "Operating System :: Virtualization :: VMware"))
self.create_trove_cat(
(666, 664, "xen", "Xen", "Operating System :: Virtualization :: Xen"))
self.create_trove_cat(
(667, 247, "voip", "VoIP", "Topic :: Communications :: Telephony :: VoIP"))
self.create_trove_cat((668, 92, "ticketing", "Ticketing Systems",
"Topic :: Internet :: WWW/HTTP :: Dynamic Content :: Ticketing Systems"))
self.create_trove_cat((669, 315, "blackberryos", "Blackberry RIM OS",
"Operating System :: Handheld/Embedded Operating Systems :: Blackberry RIM OS"))
self.create_trove_cat((671, 14, "ms-pl", "Microsoft Public License",
"License :: OSI-Approved Open Source :: Microsoft Public License"))
self.create_trove_cat(
(672, 14, "ms-rl", "Microsoft Reciprocal License",
"License :: OSI-Approved Open Source :: Microsoft Reciprocal License"))
self.create_trove_cat((673, 576, "bsm", "Business Service Management",
"Topic :: Office/Business :: Enterprise :: Business Service Management"))
self.create_trove_cat((674, 673, "servicesupport", "Service Support",
"Topic :: Office/Business :: Enterprise :: Business Service Management :: Service Support")) # nopep8
self.create_trove_cat(
(675, 673, "serviceassurance", "Service Assurance",
"Topic :: Office/Business :: Enterprise :: Business Service Management :: Service Assurance"))
self.create_trove_cat(
(676, 673, "serviceautomation", "Service Automation",
"Topic :: Office/Business :: Enterprise :: Business Service Management :: Service Automation"))
self.create_trove_cat((677, 14, "artisticv2", "Artistic License 2.0",
"License :: OSI-Approved Open Source :: Artistic License 2.0"))
self.create_trove_cat(
(678, 14, "boostlicense", "Boost Software License (BSL1.0)",
"License :: OSI-Approved Open Source :: Boost Software License (BSL1.0)"))
self.create_trove_cat(
(681, 14, "isclicense", "ISC License", "License :: OSI-Approved Open Source :: ISC License"))
self.create_trove_cat((682, 14, "multicslicense", "Multics License",
"License :: OSI-Approved Open Source :: Multics License"))
self.create_trove_cat(
(683, 14, "ntplicense", "NTP License", "License :: OSI-Approved Open Source :: NTP License"))
self.create_trove_cat(
(684, 14, "nposl3", "Non-Profit Open Software License 3.0 (Non-Profit OSL 3.0)",
"License :: OSI-Approved Open Source :: Non-Profit Open Software License 3.0 (Non-Profit OSL 3.0)"))
self.create_trove_cat(
(685, 14, "rpl15", "Reciprocal Public License 1.5 (RPL1.5)",
"License :: OSI-Approved Open Source :: Reciprocal Public License 1.5 (RPL1.5)"))
self.create_trove_cat(
(686, 14, "splicense2", "Simple Public License 2.0",
"License :: OSI-Approved Open Source :: Simple Public License 2.0"))
self.create_trove_cat(
(687, 673, "cmdb", "Configuration Management Database (CMDB)",
"Topic :: Office/Business :: Enterprise :: Business Service Management :: Configuration Management Database (CMDB)")) # nopep8
self.create_trove_cat(
(688, 18, "mobileapps", "Mobile", "Topic :: Mobile"))
self.create_trove_cat((689, 315, "winmobile", "Windows Mobile",
"Operating System :: Handheld/Embedded Operating Systems :: Windows Mobile"))
self.create_trove_cat(
(690, 315, "brew", "BREW (Binary Runtime Environment for Wireless)",
"Operating System :: Handheld/Embedded Operating Systems :: BREW (Binary Runtime Environment for Wireless)")) # nopep8
self.create_trove_cat(
(691, 315, "j2me", "J2ME (Java Platform, Micro Edition)",
"Operating System :: Handheld/Embedded Operating Systems :: J2ME (Java Platform, Micro Edition)"))
self.create_trove_cat(
(692, 315, "maemo", "Maemo", "Operating System :: Handheld/Embedded Operating Systems :: Maemo"))
self.create_trove_cat((693, 315, "limo", "LiMo (Linux Mobile)",
"Operating System :: Handheld/Embedded Operating Systems :: LiMo (Linux Mobile)"))
self.create_trove_cat(
(694, 160, "clean", "Clean", "Programming Language :: Clean"))
self.create_trove_cat(
(695, 160, "lasso", "Lasso", "Programming Language :: Lasso"))
self.create_trove_cat(
(696, 160, "turing", "Turing", "Programming Language :: Turing"))
self.create_trove_cat(
(697, 160, "glsl", "GLSL (OpenGL Shading Language)",
"Programming Language :: GLSL (OpenGL Shading Language)"))
self.create_trove_cat(
(698, 160, "lazarus", "Lazarus", "Programming Language :: Lazarus"))
self.create_trove_cat(
(699, 160, "freepascal", "Free Pascal", "Programming Language :: Free Pascal"))
self.create_trove_cat(
(700, 160, "scriptol", "Scriptol", "Programming Language :: Scriptol"))
self.create_trove_cat(
(701, 160, "pl-i", "PL/I (Programming Language One)",
"Programming Language :: PL/I (Programming Language One)"))
self.create_trove_cat(
(702, 160, "oz", "Oz", "Programming Language :: Oz"))
self.create_trove_cat(
(703, 160, "limbo", "Limbo", "Programming Language :: Limbo"))
self.create_trove_cat(
(704, 160, "scala", "Scala", "Programming Language :: Scala"))
self.create_trove_cat(
(705, 160, "blitzmax", "BlitzMax", "Programming Language :: BlitzMax"))
self.create_trove_cat(
(706, 160, "xbaseclipper", "XBase/Clipper", "Programming Language :: XBase/Clipper"))
self.create_trove_cat(
(707, 160, "curl", "Curl", "Programming Language :: Curl"))
self.create_trove_cat(
(708, 160, "flex", "Flex", "Programming Language :: Flex"))
self.create_trove_cat(
(709, 160, "mathematica", "Mathematica", "Programming Language :: Mathematica"))
self.create_trove_cat(
(710, 160, "visualdataflex", "Visual DataFlex", "Programming Language :: Visual DataFlex"))
self.create_trove_cat(
(711, 160, "fenix", "Fenix", "Programming Language :: Fenix"))
self.create_trove_cat(
(713, 456, "vexi", "Vexi", "User Interface :: Graphical :: Vexi"))
self.create_trove_cat(
(714, 160, "kaya", "Kaya", "Programming Language :: Kaya"))
self.create_trove_cat((715, 160, "transcript-revolution",
"Transcript/Revolution", "Programming Language :: Transcript/Revolution"))
self.create_trove_cat(
(716, 160, "haXe", "haXe", "Programming Language :: haXe"))
self.create_trove_cat(
(717, 160, "proglangmeta", "Project is a programming language",
"Programming Language :: Project is a programming language"))
self.create_trove_cat((718, 634, "msxb360", "Microsoft Xbox 360",
"Operating System :: Other Operating Systems :: Console-based Platforms :: Microsoft Xbox 360")) # nopep8
self.create_trove_cat((719, 634, "nintendogc", "Nintendo GameCube",
"Operating System :: Other Operating Systems :: Console-based Platforms :: Nintendo GameCube")) # nopep8
self.create_trove_cat((720, 634, "nintendowii", "Nintendo Wii",
"Operating System :: Other Operating Systems :: Console-based Platforms :: Nintendo Wii")) # nopep8
self.create_trove_cat((721, 634, "sonyps3", "Sony PlayStation 3",
"Operating System :: Other Operating Systems :: Console-based Platforms :: Sony PlayStation 3")) # nopep8
self.create_trove_cat(
(722, 634, "sonypsp", "Sony PlayStation Portable (PSP)",
"Operating System :: Other Operating Systems :: Console-based Platforms :: Sony PlayStation Portable (PSP)")) # nopep8
self.create_trove_cat(
(723, 160, "scilab", "Scilab", "Programming Language :: Scilab"))
self.create_trove_cat(
(724, 160, "scicos", "Scicos", "Programming Language :: Scicos"))
self.create_trove_cat((725, 534, "management", "Management",
"Intended Audience :: by End-User Class :: Management"))
self.create_trove_cat(
(726, 71, "edadministration", "Administration", "Topic :: Education :: Administration"))
self.create_trove_cat(
(727, 97, "mechcivileng", "Mechanical and Civil Engineering",
"Topic :: Scientific/Engineering :: Mechanical and Civil Engineering"))
self.create_trove_cat((729, 535, "audienceengineering", "Engineering",
"Intended Audience :: by Industry or Sector :: Engineering"))
self.create_trove_cat(
(730, 274, "basque", "Basque (Euskara)", "Translations :: Basque (Euskara)"))
self.create_trove_cat(
(731, 14, "classpath", "GNU General Public License with Classpath exception (Classpath::License)",
"License :: OSI-Approved Open Source :: GNU General Public License with Classpath exception (Classpath::License)")) # nopep8
self.create_trove_cat(
(732, 727, "caddcam", "Computer-aided technologies (CADD/CAM/CAE)",
"Topic :: Scientific/Engineering :: Mechanical and Civil Engineering :: Computer-aided technologies (CADD/CAM/CAE)")) # nopep8
self.create_trove_cat((733, 576, "humanresources", "Human Resources",
"Topic :: Office/Business :: Enterprise :: Human Resources"))
self.create_trove_cat(
(734, 554, "mcml", "Media Center Markup Language (MCML)",
"Topic :: Formats and Protocols :: Data Formats :: Media Center Markup Language (MCML)"))
self.create_trove_cat(
(735, 461, "nsis", "Nullsoft Scriptable Install System (NSIS)",
"User Interface :: Plugins :: Nullsoft Scriptable Install System (NSIS)"))
self.create_trove_cat(
(736, 97, "scada", "SCADA", "Topic :: Scientific/Engineering :: SCADA"))
self.create_trove_cat(
(737, 461, "autohotkey", "AutoHotkey", "User Interface :: Plugins :: AutoHotkey"))
self.create_trove_cat(
(738, 160, "autoit", "AutoIt", "Programming Language :: AutoIt"))
self.create_trove_cat((739, 132, "humanitarianism", "Humanitarianism",
"Topic :: Religion and Philosophy :: Humanitarianism"))
self.create_trove_cat(
(740, 129, "insurance", "Insurance", "Topic :: Office/Business :: Insurance"))
self.create_trove_cat(
(741, 97, "linguistics", "Linguistics", "Topic :: Scientific/Engineering :: Linguistics"))
self.create_trove_cat(
(742, 741, "machinetranslation", "Machine Translation",
"Topic :: Scientific/Engineering :: Linguistics :: Machine Translation"))
self.create_trove_cat(
(743, 43, "antispam", "Anti-Spam", "Topic :: Security :: Anti-Spam"))
self.create_trove_cat(
(744, 43, "antivirus", "Anti-Virus", "Topic :: Security :: Anti-Virus"))
self.create_trove_cat(
(745, 43, "antimalware", "Anti-Malware", "Topic :: Security :: Anti-Malware"))
self.create_trove_cat((746, 554, "autocaddxf", "AutoCAD DXF",
"Topic :: Formats and Protocols :: Data Formats :: AutoCAD DXF"))
self.create_trove_cat(
(747, 75, "billing", "Billing", "Topic :: Office/Business :: Financial :: Billing"))
self.create_trove_cat(
(748, 576, "processmanagement", "Business Process Management",
"Topic :: Office/Business :: Enterprise :: Business Process Management"))
self.create_trove_cat(
(749, 136, "embedded", "Embedded systems", "Topic :: System :: Embedded systems"))
self.create_trove_cat(
(750, 456, "magicui", "Magic User Interface (MUI)",
"User Interface :: Graphical :: Magic User Interface (MUI)"))
self.create_trove_cat(
(751, 237, "xul", "XUL", "User Interface :: Web-based :: XUL"))
self.create_trove_cat(
(752, 80, "flightsim", "Flight simulator", "Topic :: Games/Entertainment :: Flight simulator"))
self.create_trove_cat(
(753, 63, "vivim", "Vi/Vim", "Topic :: Text Editors :: Vi/Vim"))
self.create_trove_cat(
(754, 45, "sourceanalysis", "Source code analysis",
"Topic :: Software Development :: Source code analysis"))
self.create_trove_cat(
(755, 45, "sourcebrowsing", "Source code browsing",
"Topic :: Software Development :: Source code browsing"))
self.create_trove_cat(
(756, 576, "plm", "Product lifecycle management (PLM)",
"Topic :: Office/Business :: Enterprise :: Product lifecycle management (PLM)"))
self.create_trove_cat(
(757, 274, "breton", "Breton", "Translations :: Breton"))
self.create_trove_cat((758, 498, "db4o", "db4objects (db4o)",
"Database Environment :: File-based DBMS :: db4objects (db4o)"))
self.create_trove_cat(
(759, 497, "nexusdb", "NexusDB", "Database Environment :: Network-based DBMS :: NexusDB"))
self.create_trove_cat(
(760, 160, "prism", "Prism", "Programming Language :: Prism"))
self.create_trove_cat(
(761, 45, "collaborative", "Collaborative development tools",
"Topic :: Software Development :: Collaborative development tools"))
self.create_trove_cat((762, 91, "pluginsaddons", "Plugins and add-ons",
"Topic :: Internet :: WWW/HTTP :: Browsers :: Plugins and add-ons"))
self.create_trove_cat(
(763, 456, "winaero", "Windows Aero", "User Interface :: Graphical :: Windows Aero"))
self.create_trove_cat((764, 45, "agile", "Agile development tools",
"Topic :: Software Development :: Agile development tools"))
self.create_trove_cat((765, 535, "agriculture", "Agriculture",
"Intended Audience :: by Industry or Sector :: Agriculture"))
self.create_trove_cat(
(766, 100, "animation", "Animation", "Topic :: Multimedia :: Graphics :: Animation"))
self.create_trove_cat(
(767, 45, "assemblers", "Assemblers", "Topic :: Software Development :: Assemblers"))
self.create_trove_cat((768, 535, "automotive", "Automotive",
"Intended Audience :: by Industry or Sector :: Automotive"))
self.create_trove_cat((769, 554, "CSV", "Comma-separated values (CSV)",
"Topic :: Formats and Protocols :: Data Formats :: Comma-separated values (CSV)"))
self.create_trove_cat(
(770, 45, "softdevlibraries", "Libraries", "Topic :: Software Development :: Libraries"))
self.create_trove_cat((771, 45, "sourcereview", "Source code review",
"Topic :: Software Development :: Source code review"))
self.create_trove_cat(
(772, 80, "hobbies", "Hobbies", "Topic :: Games/Entertainment :: Hobbies"))
self.create_trove_cat(
(773, 772, "collectionmanage", "Collection management",
"Topic :: Games/Entertainment :: Hobbies :: Collection management"))
self.create_trove_cat(
(774, 80, "multiplayer", "Multiplayer", "Topic :: Games/Entertainment :: Multiplayer"))
self.create_trove_cat(
(775, 80, "mmorpg", "MMORPG", "Topic :: Games/Entertainment :: MMORPG"))
self.create_trove_cat(
(776, 97, "mapping", "Mapping", "Topic :: Scientific/Engineering :: Mapping"))
self.create_trove_cat(
(777, 776, "gps", "GPS (Global Positioning System)",
"Topic :: Scientific/Engineering :: Mapping :: GPS (Global Positioning System)"))
self.create_trove_cat(
(778, 43, "passwordmanage", "Password manager", "Topic :: Security :: Password manager"))
self.create_trove_cat(
(779, 315, "linksyswrt54g", "Linksys WRT54G series",
"Operating System :: Handheld/Embedded Operating Systems :: Linksys WRT54G series"))
self.create_trove_cat((781, 576, "medhealth", "Medical/Healthcare",
"Topic :: Office/Business :: Enterprise :: Medical/Healthcare"))
self.create_trove_cat(
(782, 45, "bined", "Binary editors", "Topic :: Software Development :: Binary editors"))
self.create_trove_cat(
(783, 99, "mmcatalog", "Cataloguing", "Topic :: Multimedia :: Cataloguing"))
self.create_trove_cat(
(784, 113, "composition", "Composition", "Topic :: Multimedia :: Sound/Audio :: Composition"))
self.create_trove_cat(
(785, 772, "cooking", "Cooking", "Topic :: Games/Entertainment :: Hobbies :: Cooking"))
self.create_trove_cat(
(786, 136, "cron", "Cron and scheduling", "Topic :: System :: Cron and scheduling"))
self.create_trove_cat(
(787, 638, "recovery", "Data recovery", "Topic :: System :: Storage :: Data recovery"))
self.create_trove_cat(
(788, 87, "otherfile", "Other file transfer protocol",
"Topic :: Internet :: Other file transfer protocol"))
self.create_trove_cat((789, 581, "digpreserve", "Digital preservation",
"Topic :: Education :: Library :: Digital preservation"))
self.create_trove_cat((790, 251, "directconnect", "Direct Connect",
"Topic :: Communications :: File Sharing :: Direct Connect"))
self.create_trove_cat(
(791, 129, "dtp", "Desktop Publishing", "Topic :: Office/Business :: Desktop Publishing"))
self.create_trove_cat(
(792, 580, "etl", "ETL", "Topic :: Office/Business :: Enterprise :: Data Warehousing :: ETL"))
self.create_trove_cat(
(793, 55, "fonts", "Fonts", "Topic :: Desktop Environment :: Fonts"))
self.create_trove_cat(
(794, 80, "gameframeworks", "Game development framework",
"Topic :: Games/Entertainment :: Game development framework"))
self.create_trove_cat((795, 100, "handrec", "Handwriting recognition",
"Topic :: Multimedia :: Graphics :: Handwriting recognition"))
self.create_trove_cat(
(796, 136, "homeauto", "Home Automation", "Topic :: System :: Home Automation"))
self.create_trove_cat(
(797, 63, "translation", "Computer Aided Translation (CAT)",
"Topic :: Text Editors :: Computer Aided Translation (CAT)"))
self.create_trove_cat(
(798, 136, "osdistro", "OS distribution", "Topic :: System :: OS distribution"))
self.create_trove_cat(
(799, 798, "livecd", "Live CD", "Topic :: System :: OS distribution :: Live CD"))
self.create_trove_cat((800, 497, "lotusnotes", "Lotus Notes/Domino",
"Database Environment :: Network-based DBMS :: Lotus Notes/Domino"))
self.create_trove_cat(
(801, 160, "lotusscript", "LotusScript", "Programming Language :: LotusScript"))
self.create_trove_cat((802, 133, "machinelearning", "Machine Learning",
"Topic :: Scientific/Engineering :: Artificial Intelligence :: Machine Learning"))
self.create_trove_cat((803, 106, "metadata", "Metadata editors",
"Topic :: Multimedia :: Graphics :: Editors :: Metadata editors"))
self.create_trove_cat(
(804, 236, "riscos", "RISC OS", "Operating System :: Other Operating Systems :: RISC OS"))
self.create_trove_cat(
(805, 282, "politics", "Politics", "Topic :: Social sciences :: Politics"))
self.create_trove_cat(
(806, 80, "sports", "Sports", "Topic :: Games/Entertainment :: Sports"))
self.create_trove_cat(
(807, 282, "psychology", "Psychology", "Topic :: Social sciences :: Psychology"))
self.create_trove_cat(
(808, 458, "ogre3d", "Ogre3D", "User Interface :: Toolkits/Libraries :: Ogre3D"))
self.create_trove_cat(
(809, 45, "orm", "ORM (Object-relational mapping)",
"Topic :: Software Development :: ORM (Object-relational mapping)"))
self.create_trove_cat((810, 575, "perftest", "Performance Testing",
"Topic :: Software Development :: Testing :: Performance Testing"))
self.create_trove_cat((811, 75, "personalfinance", "Personal finance",
"Topic :: Office/Business :: Financial :: Personal finance"))
self.create_trove_cat((812, 499, "pearmdb2", "PHP Pear::MDB2",
"Database Environment :: Database API :: PHP Pear::MDB2"))
self.create_trove_cat(
(813, 461, "intellij", "IntelliJ", "User Interface :: Plugins :: IntelliJ"))
self.create_trove_cat((814, 554, "postscript", "PostScript",
"Topic :: Formats and Protocols :: Data Formats :: PostScript"))
self.create_trove_cat(
(815, 100, "fractals", "Fractals and Procedural Generation",
"Topic :: Multimedia :: Graphics :: Fractals and Procedural Generation"))
self.create_trove_cat((816, 554, "w3cvoice", "W3C Voice",
"Topic :: Formats and Protocols :: Data Formats :: W3C Voice"))
self.create_trove_cat((817, 97, "quantumcomp", "Quantum Computing",
"Topic :: Scientific/Engineering :: Quantum Computing"))
self.create_trove_cat(
(818, 129, "reportgen", "Report Generators", "Topic :: Office/Business :: Report Generators"))
self.create_trove_cat(
(819, 581, "research", "Research", "Topic :: Education :: Library :: Research"))
self.create_trove_cat(
(820, 87, "ssh", "SSH (Secure SHell)", "Topic :: Internet :: SSH (Secure SHell)"))
self.create_trove_cat(
(821, 554, "semantic", "Semantic Web (RDF, OWL, etc.)",
"Topic :: Formats and Protocols :: Data Formats :: Semantic Web (RDF, OWL, etc.)"))
self.create_trove_cat(
(822, 90, "socialbookmarking", "Social Bookmarking",
"Topic :: Internet :: WWW/HTTP :: Social Bookmarking"))
self.create_trove_cat(
(823, 20, "synchronization", "Synchronization", "Topic :: Communications :: Synchronization"))
self.create_trove_cat(
(824, 45, "templates", "Templates", "Topic :: Software Development :: Templates"))
self.create_trove_cat((825, 97, "testmeasure", "Test and Measurement",
"Topic :: Scientific/Engineering :: Test and Measurement"))
self.create_trove_cat((826, 98, "statistics", "Statistics",
"Topic :: Scientific/Engineering :: Mathematics :: Statistics"))
self.create_trove_cat(
(827, 129, "knowledgemanagement", "Knowledge Management",
"Topic :: Office/Business :: Knowledge Management"))
self.create_trove_cat(
(828, 147, "unattended", "Unattended", "Topic :: System :: Installation/Setup :: Unattended"))
self.create_trove_cat(
(829, 457, "emailinterface", "Email-based interface",
"User Interface :: Textual :: Email-based interface"))
self.create_trove_cat(
(830, 282, "voting", "Voting", "Topic :: Social sciences :: Voting"))
self.create_trove_cat((831, 27, "webconferencing", "Web Conferencing",
"Topic :: Communications :: Conferencing :: Web Conferencing"))
self.create_trove_cat(
(832, 27, "videoconferencing", "Video Conferencing",
"Topic :: Communications :: Conferencing :: Video Conferencing"))
self.create_trove_cat(
(833, 160, "objectivec2", "Objective-C 2.0", "Programming Language :: Objective-C 2.0"))
self.create_trove_cat(
(834, 274, "georgian", "Georgian", "Translations :: Georgian"))
self.create_trove_cat(
(835, 499, "adonet", "ADO.NET", "Database Environment :: Database API :: ADO.NET"))
self.create_trove_cat(
(836, 554, "xbrl", "XBRL", "Topic :: Formats and Protocols :: Data Formats :: XBRL"))
self.create_trove_cat(
(837, 461, "excel", "Excel", "User Interface :: Plugins :: Excel"))
self.create_trove_cat(
(838, 160, "visualbasicforapplications", "Visual Basic for Applications (VBA)",
"Programming Language :: Visual Basic for Applications (VBA)"))
self.create_trove_cat(
(839, 160, "booprogramminglang", "Boo", "Programming Language :: Boo"))
self.create_trove_cat(
(840, 52, "git", "Git", "Topic :: Software Development :: Version Control :: Git"))
self.create_trove_cat((841, 52, "mercurial", "Mercurial",
"Topic :: Software Development :: Version Control :: Mercurial"))
self.create_trove_cat(
(842, 52, "bazaar", "Bazaar", "Topic :: Software Development :: Version Control :: Bazaar"))
self.create_trove_cat(
(843, 14, "eupublicense", "European Union Public License",
"License :: OSI-Approved Open Source :: European Union Public License"))
self.create_trove_cat((844, 14, "ipafontlicense", "IPA Font License",
"License :: OSI-Approved Open Source :: IPA Font License"))
self.create_trove_cat((845, 14, "miroslicense", "MirOS License",
"License :: OSI-Approved Open Source :: MirOS License"))
self.create_trove_cat(
(846, 14, "openfontlicense11", "Open Font License 1.1 (OFL 1.1)",
"License :: OSI-Approved Open Source :: Open Font License 1.1 (OFL 1.1)"))
self.create_trove_cat(
(847, 80, "realtimetactical", "Real Time Tactical",
"Topic :: Games/Entertainment :: Real Time Tactical"))
self.create_trove_cat(
(848, 160, "algol68", "ALGOL 68", "Programming Language :: ALGOL 68"))
self.create_trove_cat((849, 92, "groupware", "Groupware",
"Topic :: Internet :: WWW/HTTP :: Dynamic Content :: Groupware"))
self.create_trove_cat(
(850, 576, "businesscontinuity", "Business Continuity",
"Topic :: Office/Business :: Enterprise :: Business Continuity"))
self.create_trove_cat(
(852, 554, "teiformat", "TEI", "Topic :: Formats and Protocols :: Data Formats :: TEI"))
self.create_trove_cat(
(853, 160, "clarion", "Clarion", "Programming Language :: Clarion"))
self.create_trove_cat(
(854, 576, "sales", "Sales", "Topic :: Office/Business :: Enterprise :: Sales"))
self.create_trove_cat((855, 97, "buildingauto", "Building Automation",
"Topic :: Scientific/Engineering :: Building Automation"))
self.create_trove_cat(
(856, 129, "businessmodelling", "Modelling", "Topic :: Office/Business :: Modelling"))
self.create_trove_cat(
(857, 150, "routing", "Routing", "Topic :: System :: Networking :: Routing"))
self.create_trove_cat((858, 97, "medicalphysics", "Medical Physics",
"Topic :: Scientific/Engineering :: Medical Physics"))
self.create_trove_cat(
(859, 71, "edlanguage", "Languages", "Topic :: Education :: Languages"))
self.create_trove_cat((860, 97, "molecularmech", "Molecular Mechanics",
"Topic :: Scientific/Engineering :: Molecular Mechanics"))
self.create_trove_cat(
(861, 148, "loganalysis", "Log Analysis", "Topic :: System :: Logging :: Log Analysis"))
def m__set_parent_only(self):
parent_only_ids = [1, 225, 274, 160, 496, 6, 13, 199, 18, 535, 534, 14,
611, 612, 432, 500, 426, 315, 418, 236, 457, 458, 456, 497, 499, 498]
troves = M.TroveCategory.query.find(
dict(trove_cat_id={'$in': parent_only_ids})).all()
for t in troves:
t.parent_only = True
def m__add_license(self):
self.update_trove_cat(
16, dict(
fullname="GNU Library or Lesser General Public License version 2.0 (LGPLv2)",
fullpath="License :: OSI-Approved Open Source :: GNU Library or Lesser General Public License version 2.0 (LGPLv2)")) # nopep8
self.update_trove_cat(
15, dict(fullname="GNU General Public License version 2.0 (GPLv2)",
fullpath="License :: OSI-Approved Open Source :: GNU General Public License version 2.0 (GPLv2)"))
self.update_trove_cat(
670, dict(trove_cat_id=628, fullname="Affero GNU Public License"))
self.create_trove_cat(
(868, 13, "ccal", "Creative Commons Attribution License",
"License :: Creative Commons Attribution License"))
self.create_trove_cat(
(869, 868, "ccaslv2", "Creative Commons Attribution ShareAlike License V2.0",
"License :: Creative Commons Attribution License :: Creative Commons Attribution ShareAlike License V2.0")) # nopep8
self.create_trove_cat(
(870, 868, "ccaslv3", "Creative Commons Attribution ShareAlike License V3.0",
"License :: Creative Commons Attribution License :: Creative Commons Attribution ShareAlike License V3.0")) # nopep8
self.create_trove_cat(
(871, 868, "ccanclv2", "Creative Commons Attribution Non-Commercial License V2.0",
"License :: Creative Commons Attribution License :: Creative Commons Attribution Non-Commercial License V2.0")) # nopep8
self.create_trove_cat(
(680, 14, "lgplv3", "GNU Library or Lesser General Public License version 3.0 (LGPLv3)",
"License :: OSI-Approved Open Source :: GNU Library or Lesser General Public License version 3.0 (LGPLv3)")) # nopep8
self.create_trove_cat(
(679, 14, "gplv3", "GNU General Public License version 3.0 (GPLv3)",
"License :: OSI-Approved Open Source :: GNU General Public License version 3.0 (GPLv3)"))
M.TroveCategory(trove_cat_id=905,
trove_parent_id=14,
shortname='mpl20',
fullname='Mozilla Public License 2.0 (MPL 2.0)',
fullpath='License :: OSI-Approved Open Source :: Mozilla Public License 2.0 (MPL 2.0)')
def m__set_show_as_skills(self):
categories_regex = '|'.join([
'Translations',
'Programming Language',
'User Interface',
'Database Environment',
'Operating System',
'Topic',
])
M.TroveCategory.query.update(
{'fullname': re.compile(r'^(%s)' % categories_regex)},
{'$set': {'show_as_skill': True}},
multi=True)
| {
"content_hash": "415817db6b6d373595ab881bb0c05d48",
"timestamp": "",
"source": "github",
"line_count": 2012,
"max_line_length": 150,
"avg_line_length": 66.45029821073558,
"alnum_prop": 0.5704946970036949,
"repo_name": "heiths/allura",
"id": "288117c1f8377c1e56ab925dbfd9f4dec6221f34",
"size": "134568",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "Allura/allura/command/create_trove_categories.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "6142"
},
{
"name": "CSS",
"bytes": "173671"
},
{
"name": "HTML",
"bytes": "751039"
},
{
"name": "JavaScript",
"bytes": "1136845"
},
{
"name": "Makefile",
"bytes": "7788"
},
{
"name": "Puppet",
"bytes": "6872"
},
{
"name": "Python",
"bytes": "4238265"
},
{
"name": "RAML",
"bytes": "26153"
},
{
"name": "Ruby",
"bytes": "7006"
},
{
"name": "Shell",
"bytes": "131827"
},
{
"name": "XSLT",
"bytes": "3357"
}
],
"symlink_target": ""
} |
from flask import current_app
from werkzeug.local import LocalProxy
# Application Context
# ===================
#
# Readability proxies
def _get_workspace():
return current_app.cubes_workspace
def _get_logger():
return current_app.cubes_workspace.logger
workspace = LocalProxy(_get_workspace)
logger = LocalProxy(_get_logger)
| {
"content_hash": "1bcd2b5ab97adf5c3449d73a025722ba",
"timestamp": "",
"source": "github",
"line_count": 17,
"max_line_length": 45,
"avg_line_length": 19.941176470588236,
"alnum_prop": 0.7168141592920354,
"repo_name": "she11c0de/cubes",
"id": "434f005eba0db12534e7bc9c3da4fbae79b029bf",
"size": "363",
"binary": false,
"copies": "10",
"ref": "refs/heads/unicode-fix",
"path": "cubes/server/local.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "38599"
},
{
"name": "HTML",
"bytes": "66157"
},
{
"name": "JavaScript",
"bytes": "362898"
},
{
"name": "Python",
"bytes": "795339"
},
{
"name": "VimL",
"bytes": "2215"
}
],
"symlink_target": ""
} |
"""Returns points that minimizes the maximum distance of any point to a center.
Implements the k-Center-Greedy method in
Ozan Sener and Silvio Savarese. A Geometric Approach to Active Learning for
Convolutional Neural Networks. https://arxiv.org/abs/1708.00489 2017
Distance metric defaults to l2 distance. Features used to calculate distance
are either raw features or if a model has transform method then uses the output
of model.transform(X).
Can be extended to a robust k centers algorithm that ignores a certain number of
outlier datapoints. Resulting centers are solution to multiple integer program.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from sklearn.metrics import pairwise_distances
from sampling_methods.sampling_def import SamplingMethod
class kCenterGreedy(SamplingMethod):
def __init__(self, X, y, seed, metric='euclidean'):
self.X = X
self.y = y
self.flat_X = self.flatten_X()
self.name = 'kcenter'
self.features = self.flat_X
self.metric = metric
self.min_distances = None
self.n_obs = self.X.shape[0]
self.already_selected = []
def update_distances(self, cluster_centers, only_new=True, reset_dist=False):
"""Update min distances given cluster centers.
Args:
cluster_centers: indices of cluster centers
only_new: only calculate distance for newly selected points and update
min_distances.
rest_dist: whether to reset min_distances.
"""
if reset_dist:
self.min_distances = None
if only_new:
cluster_centers = [d for d in cluster_centers
if d not in self.already_selected]
if cluster_centers:
# Update min_distances for all examples given new cluster center.
x = self.features[cluster_centers]
dist = pairwise_distances(self.features, x, metric=self.metric)
if self.min_distances is None:
self.min_distances = np.min(dist, axis=1).reshape(-1,1)
else:
self.min_distances = np.minimum(self.min_distances, dist)
def select_batch_(self, model, already_selected, N, **kwargs):
"""
Diversity promoting active learning method that greedily forms a batch
to minimize the maximum distance to a cluster center among all unlabeled
datapoints.
Args:
model: model with scikit-like API with decision_function implemented
already_selected: index of datapoints already selected
N: batch size
Returns:
indices of points selected to minimize distance to cluster centers
"""
try:
# Assumes that the transform function takes in original data and not
# flattened data.
print('Getting transformed features...')
self.features = model.transform(self.X)
print('Calculating distances...')
self.update_distances(already_selected, only_new=False, reset_dist=True)
except:
print('Using flat_X as features.')
self.update_distances(already_selected, only_new=True, reset_dist=False)
new_batch = []
for _ in range(N):
if self.already_selected is None:
# Initialize centers with a randomly selected datapoint
ind = np.random.choice(np.arange(self.n_obs))
else:
ind = np.argmax(self.min_distances)
# New examples should not be in already selected since those points
# should have min_distance of zero to a cluster center.
assert ind not in already_selected
self.update_distances([ind], only_new=True, reset_dist=False)
new_batch.append(ind)
print('Maximum distance from cluster centers is %0.2f'
% max(self.min_distances))
self.already_selected = already_selected
return new_batch
| {
"content_hash": "2ea461ceafdd9fb9256a033f75a43a1b",
"timestamp": "",
"source": "github",
"line_count": 109,
"max_line_length": 80,
"avg_line_length": 34.51376146788991,
"alnum_prop": 0.6972355130249868,
"repo_name": "google/active-learning",
"id": "ff7e548955921a46815f6354630b3fe1f1c5c09c",
"size": "4339",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "sampling_methods/kcenter_greedy.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "126870"
}
],
"symlink_target": ""
} |
"""
CiscoTpTcCeSSH Class
Class to manage Cisco Telepresence Endpoint on TC/CE software release. Also working for Cisco
Expressway/VCS
Written by Ahmad Barrin
"""
from __future__ import unicode_literals
import time
import re
from netmiko.cisco_base_connection import CiscoSSHConnection
class CiscoTpTcCeSSH(CiscoSSHConnection):
def __init__(self, *args, **kwargs):
default_enter = kwargs.get('default_enter')
kwargs['default_enter'] = '\r\n' if default_enter is None else default_enter
super(CiscoTpTcCeSSH, self).__init__(*args, **kwargs)
def disable_paging(self, *args, **kwargs):
"""Paging is disabled by default."""
return ""
def session_preparation(self):
"""
Prepare the session after the connection has been established
This method handles some of vagaries that occur between various devices
early on in the session.
In general, it should include:
self.set_base_prompt()
self.disable_paging()
self.set_terminal_width()
"""
self._test_channel_read()
self.set_base_prompt()
self.disable_paging()
self.set_terminal_width()
# Clear the read buffer
time.sleep(.3 * self.global_delay_factor)
self.clear_buffer()
def set_base_prompt(self, *args, **kwargs):
"""Use 'OK' as base_prompt."""
self.base_prompt = 'OK'
return self.base_prompt
def find_prompt(self, *args, **kwargs):
"""Use 'OK' as standard prompt."""
return 'OK'
def strip_prompt(self, a_string):
"""Strip the trailing router prompt from the output."""
expect_string = r'^(OK|ERROR|Command not recognized\.)$'
response_list = a_string.split(self.RESPONSE_RETURN)
last_line = response_list[-1]
if re.search(expect_string, last_line):
return self.RESPONSE_RETURN.join(response_list[:-1])
else:
return a_string
def send_command(self, *args, **kwargs):
'''
Send command to network device retrieve output until router_prompt or expect_string
By default this method will keep waiting to receive data until the network device prompt is
detected. The current network device prompt will be determined automatically.
command_string = command to execute
expect_string = pattern to search for uses re.search (use raw strings)
delay_factor = decrease the initial delay before we start looking for data
max_loops = number of iterations before we give up and raise an exception
strip_prompt = strip the trailing prompt from the output
strip_command = strip the leading command from the output
'''
if len(args) >= 2:
expect_string = args[1]
else:
expect_string = kwargs.get('expect_string')
if expect_string is None:
expect_string = r'(OK|ERROR|Command not recognized\.)'
expect_string = self.RETURN + expect_string + self.RETURN
kwargs.setdefault('expect_string', expect_string)
output = super(CiscoSSHConnection, self).send_command(*args, **kwargs)
return output
| {
"content_hash": "170062f72748936fc0f043071bf7e3f6",
"timestamp": "",
"source": "github",
"line_count": 87,
"max_line_length": 99,
"avg_line_length": 37.172413793103445,
"alnum_prop": 0.6366728509585653,
"repo_name": "isidroamv/netmiko",
"id": "0bd20714d527f4b5c713afb28e80962e21ad65ea",
"size": "3234",
"binary": false,
"copies": "2",
"ref": "refs/heads/develop",
"path": "netmiko/cisco/cisco_tp_tcce.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "244012"
},
{
"name": "Shell",
"bytes": "10760"
}
],
"symlink_target": ""
} |
import numpy as np
import cv2
import cvui
WINDOW_NAME = 'Button shortcut'
def main():
frame = np.zeros((150, 650, 3), np.uint8)
# Init cvui and tell it to use a value of 20 for cv2.waitKey()
# because we want to enable keyboard shortcut for
# all components, e.g. button with label "&Quit".
# If cvui has a value for waitKey, it will call
# waitKey() automatically for us within cvui.update().
cvui.init(WINDOW_NAME, 20);
while (True):
# Fill the frame with a nice color
frame[:] = (49, 52, 49)
cvui.text(frame, 40, 40, 'To exit this app click the button below or press Q (shortcut for the button below).')
# Exit the application if the quit button was pressed.
# It can be pressed because of a mouse click or because
# the user pressed the "q" key on the keyboard, which is
# marked as a shortcut in the button label ("&Quit").
if cvui.button(frame, 300, 80, "&Quit"):
break
# Since cvui.init() received a param regarding waitKey,
# there is no need to call cv.waitKey() anymore. cvui.update()
# will do it automatically.
cvui.update()
cv2.imshow(WINDOW_NAME, frame)
if __name__ == '__main__':
main()
| {
"content_hash": "f518686f6beee0f9c943604b10da1f66",
"timestamp": "",
"source": "github",
"line_count": 38,
"max_line_length": 113,
"avg_line_length": 30.394736842105264,
"alnum_prop": 0.6805194805194805,
"repo_name": "Dovyski/cvui",
"id": "a9bb5b186409b7994bb100626bc0356c7a2cf655",
"size": "1402",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "example/src/button-shortcut/button-shortcut.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C++",
"bytes": "191997"
},
{
"name": "CMake",
"bytes": "17560"
},
{
"name": "Python",
"bytes": "168294"
},
{
"name": "Shell",
"bytes": "2665"
}
],
"symlink_target": ""
} |
"""
https://leetcode.com/problems/basic-calculator-ii/
https://leetcode.com/submissions/detail/111848037/
"""
class Solution(object):
def calculate(self, s):
"""
:type s: str
:rtype: int
"""
array = []
index = 0
numbers = '0123456789'
number = ''
sign = '+'
while index < len(s) + 1:
if index < len(s) and s[index] in numbers:
number += s[index]
index += 1
continue
if number != '':
if sign == '+':
array.append(int(number))
elif sign == '-':
array.append(-int(number))
elif sign == '*':
array.append(array.pop() * int(number))
elif sign == '/':
prevNumber = array.pop()
if prevNumber < 0:
array.append(-int(-prevNumber / int(number)))
else:
array.append(int(prevNumber / int(number)))
number = ''
if index < len(s) and s[index] != ' ':
sign = s[index]
index += 1
return sum(array)
def calculate1(self, s):
"""
:type s: str
:rtype: int
"""
index = 0
numbers = '0123456789'
operators = '+-*/'
liftedOperators = '*/'
program = []
while index < len(s):
current = s[index]
if current == ' ':
index += 1
continue
if current in numbers:
number = ''
while index < len(s) and s[index] in numbers:
number += s[index]
index += 1
program.append({'type': 'number', 'value': int(number)})
continue
if current in operators:
program.append({'type': 'operator', 'value': current})
index += 1
continue
i = 0
while i < len(program):
if program[i].get('type') == 'number':
if i + 2 < len(program) and program[i + 1].get('value') in liftedOperators:
ope = program[i + 1].get('value')
if ope == '*':
token = {
'type': 'number',
'value': program[i].get('value') * program[i + 2].get('value')
}
else:
token = {
'type': 'number',
'value': int(program[i].get('value') / program[i + 2].get('value'))
}
program.pop(i)
program.pop(i)
program.pop(i)
program.insert(i, token)
continue
i += 1
while len(program) > 1:
if program[1].get('value') == '+':
token = {
'type': 'number',
'value': program[0].get('value') + program[2].get('value')
}
else:
token = {
'type': 'number',
'value': program[0].get('value') - program[2].get('value')
}
program.pop(0)
program.pop(0)
program.pop(0)
program.insert(0, token)
return program[0].get('value')
import unittest
class Test(unittest.TestCase):
def test(self):
solution = Solution()
self.assertEqual(solution.calculate('14-3/2'), 13)
self.assertEqual(solution.calculate('1-1+1'), 1)
self.assertEqual(solution.calculate(' 3/2 '), 1)
self.assertEqual(solution.calculate('0-2147483647'), -2147483647)
self.assertEqual(solution.calculate(' 30'), 30)
self.assertEqual(solution.calculate('3+2*2'), 7)
if __name__ == '__main__':
unittest.main()
| {
"content_hash": "1018f0c9db509cda25f8da9c05dd322d",
"timestamp": "",
"source": "github",
"line_count": 123,
"max_line_length": 95,
"avg_line_length": 32.89430894308943,
"alnum_prop": 0.4112703905091448,
"repo_name": "vivaxy/algorithms",
"id": "a6cd798f3fecf0cfe067c9d7d8af4dd381d84c54",
"size": "4046",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "python/problems/basic_calculator_ii.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "130225"
},
{
"name": "Python",
"bytes": "272982"
},
{
"name": "Shell",
"bytes": "439"
}
],
"symlink_target": ""
} |
import os
import time
import imath
import IECore
import Gaffer
import GafferScene
import GafferUI
import GafferSceneUI
scriptWindow = GafferUI.ScriptWindow.acquire( script )
viewer = scriptWindow.getLayout().editors( GafferUI.Viewer )[0]
graphEditor = scriptWindow.getLayout().editors( GafferUI.GraphEditor )[0]
hierarchyView = scriptWindow.getLayout().editors( GafferSceneUI.HierarchyView )[0]
# Delay for x seconds
def __delay( delay ) :
endtime = time.time() + delay
while time.time() < endtime :
GafferUI.EventLoop.waitForIdle( 1 )
# Default layout in main window
GafferUI.WidgetAlgo.grab( widget = scriptWindow, imagePath = "images/mainDefaultLayout.png" )
# Empty SceneReader node in main window
script["SceneReader"] = GafferScene.SceneReader()
readerNode = script["SceneReader"]
script.selection().add( readerNode )
__delay( 0.1 )
GafferUI.WidgetAlgo.grab( widget = scriptWindow, imagePath = "images/mainSceneReaderNode.png" )
script.setFocus( readerNode )
__delay( 0.1 )
viewer.view().viewportGadget().getPrimaryChild().waitForCompletion()
GafferUI.WidgetAlgo.grab( widget = scriptWindow, imagePath = "images/mainSceneReaderNodeFocussed.png" )
script["SceneReader"]["fileName"].setValue( "${GAFFER_ROOT}/resources/gafferBot/caches/gafferBot.scc" )
viewer.view().viewportGadget().frame( script["SceneReader"]["out"].bound( "/" ) )
viewer.view().viewportGadget().getPrimaryChild().waitForCompletion()
GafferUI.WidgetAlgo.grab( widget = scriptWindow, imagePath = "images/sceneReaderBound.png" )
# GafferBot bounding box in Viewer
readerNode["fileName"].setValue( "${GAFFER_ROOT}/resources/gafferBot/caches/gafferBot.scc" )
viewer.view().viewportGadget().frame( readerNode["out"].bound( "/" ) )
viewer.view().viewportGadget().getPrimaryChild().waitForCompletion()
GafferUI.WidgetAlgo.grab( widget = scriptWindow, imagePath = "images/viewerSceneReaderBounding.png" )
# GafferBot torso in Hierarchy View
GafferSceneUI.ContextAlgo.setExpandedPaths( script.context(), IECore.PathMatcher( [ "/GAFFERBOT", "/GAFFERBOT/C_torso_GRP" ] ) )
__delay( 0.1 )
GafferUI.WidgetAlgo.grab( widget = hierarchyView, imagePath = "images/hierarchyViewExpandedTwoLevels.png" )
# GafferBot head and left leg in main window
paths = IECore.PathMatcher( [ "/GAFFERBOT/C_torso_GRP/C_head_GRP", "/GAFFERBOT/C_torso_GRP/R_legUpper_GRP" ] )
GafferSceneUI.ContextAlgo.expand( script.context(), paths )
GafferSceneUI.ContextAlgo.expandDescendants( script.context(), paths, script["SceneReader"]["out"] )
GafferSceneUI.ContextAlgo.expandDescendants( script.context(), paths, readerNode["out"] )
viewer.view().viewportGadget().getPrimaryChild().waitForCompletion()
GafferUI.WidgetAlgo.grab( widget = scriptWindow, imagePath = "images/mainHeadAndLeftLegExpanded.png" )
# GafferBot head and both legs in Viewer
paths = IECore.PathMatcher( [ "/GAFFERBOT/C_torso_GRP/L_legUpper_GRP" ] )
GafferSceneUI.ContextAlgo.expand( script.context(), paths )
GafferSceneUI.ContextAlgo.expandDescendants( script.context(), paths, readerNode["out"] )
GafferSceneUI.ContextAlgo.setSelectedPaths( script.context(), paths )
viewer.view().viewportGadget().getPrimaryChild().waitForCompletion()
GafferUI.WidgetAlgo.grab( widget = viewer, imagePath = "images/viewerHeadAndLegsExpanded.png" )
# Camera and SceneReader node in main window
script["Camera"] = GafferScene.Camera()
cameraNode = script["Camera"]
script.selection().clear()
script.selection().add( cameraNode )
script.setFocus( cameraNode )
# Approximate the default viewport position
viewer.view().viewportGadget().frame( imath.Box3f( imath.V3f( 0, -1.75, 0 ), imath.V3f( 5, 5, 5 ) ) )
viewer.view().viewportGadget().getPrimaryChild().waitForCompletion()
GafferUI.WidgetAlgo.grab( widget = scriptWindow, imagePath = "images/mainCameraNode.png" )
# Grouped nodes in Gaffer
script["Group"] = GafferScene.Group()
groupNode = script["Group"]
groupNode["in"][0].setInput( readerNode["out"] )
groupNode["in"][1].setInput( cameraNode["out"] )
script.selection().clear()
script.selection().add( groupNode )
script.setFocus( groupNode )
viewer.view()["minimumExpansionDepth"].setValue( 999 )
GafferSceneUI.ContextAlgo.clearExpansion( script.context() )
GafferSceneUI.ContextAlgo.expand( script.context(), IECore.PathMatcher( [ "/group" ] ) )
viewer.view().viewportGadget().getPrimaryChild().waitForCompletion()
GafferUI.WidgetAlgo.grab( widget = scriptWindow, imagePath = "images/mainGroupNode.png" )
# Camera repositioned, with translate tool on, in Viewer
cameraNode["transform"]["translate"].setValue( imath.V3f( 16, 13, 31 ) )
viewer.view().viewportGadget().frame( groupNode["out"].bound( "/group" ) )
GafferSceneUI.ContextAlgo.setSelectedPaths( script.context(), IECore.PathMatcher( [ "/group/camera" ] ) )
for i in viewer._Viewer__toolChooser.tools():
if type( i ) == GafferSceneUI.TranslateTool:
translateTool = i
translateTool["active"].setValue( True )
viewer.view().viewportGadget().getPrimaryChild().waitForCompletion()
GafferUI.WidgetAlgo.grab( widget = viewer, imagePath = "images/viewerCameraRepositioned.png" )
# Camera rotated, with rotate tool on, in Viewer
translateTool["active"].setValue( False )
cameraNode["transform"]["rotate"].setValue( imath.V3f( 0, 30, 0 ) )
for i in viewer._Viewer__toolChooser.tools():
if type( i ) == GafferSceneUI.RotateTool:
rotateTool = i
rotateTool["active"].setValue( True )
viewer.view().viewportGadget().getPrimaryChild().waitForCompletion()
GafferUI.WidgetAlgo.grab( widget = viewer, imagePath = "images/viewerCameraRotated.png" )
# Camera node in Node Editor window
nodeEditorWindow = GafferUI.NodeEditor.acquire( cameraNode, floating=True )
nodeEditorWindow._qtWidget().setFocus()
GafferUI.PlugValueWidget.acquire( cameraNode["transform"] )
GafferUI.WidgetAlgo.grab( widget = nodeEditorWindow, imagePath = "images/nodeEditorWindowCameraTransform.png" )
del nodeEditorWindow
del readerNode
del groupNode
del cameraNode
# Render settings graph in Graph Editor
script["fileName"].setValue( os.path.abspath( "scripts/renderSettings.gfr" ) )
script.load()
script.selection().clear()
script.selection().add( script["Catalogue"] )
script.setFocus( script["Catalogue"] )
graphEditor.frame( script.children( Gaffer.Node ) )
GafferUI.WidgetAlgo.grab( widget = graphEditor, imagePath = "images/graphEditorRenderSettings.png" )
# GafferBot render without lighting in main window
def __renderAndGrab( script, widget, imagePath, delay = 15 ) :
script["variables"]["imageCataloguePort"]["value"].setValue( script["Catalogue"].displayDriverServer().portNumber() )
script["InteractiveAppleseedRender"]["state"].setValue( script["InteractiveAppleseedRender"].State.Running )
__delay( delay )
viewport = scriptWindow.getLayout().editors( GafferUI.Viewer )[0].view().viewportGadget()
viewport.frame( viewport.getPrimaryChild().bound() )
GafferUI.EventLoop.waitForIdle()
GafferUI.WidgetAlgo.grab( widget = widget, imagePath = imagePath )
script["InteractiveAppleseedRender"]["state"].setValue( script["InteractiveAppleseedRender"].State.Stopped )
__renderAndGrab( script, scriptWindow, "images/mainRenderGrey.png", delay = 1 )
# Render settings with gap in main window
script["fileName"].setValue( os.path.abspath( "scripts/renderSettingsWithGap.gfr" ) )
script.load()
script.selection().add( [ script["StandardOptions"], script["AppleseedOptions"], script["Outputs"], script["InteractiveAppleseedRender"], script["Catalogue"] ] )
graphEditor.frame( script.children( Gaffer.Node ) )
GafferUI.WidgetAlgo.grab( widget = graphEditor, imagePath = "images/mainRenderSettingsWithGap.png" )
# First shader assignment in Graph Editor
script["fileName"].setValue( os.path.abspath( "scripts/firstShaderAssignment.gfr" ) )
script.load()
script.selection().add( script["ShaderAssignment"] )
script.setFocus( script["ShaderAssignment"] )
graphEditor.frame( script.children( Gaffer.Node ) )
GafferUI.WidgetAlgo.grab( widget = graphEditor, imagePath = "images/graphEditorFirstShaderNodes.png" )
# Environment light in Graph Editor
script["fileName"].setValue( os.path.abspath( "scripts/firstLight.gfr" ) )
script.load()
script.selection().add( script["hosek_environment_edf"] )
script.setFocus( script["hosek_environment_edf"] )
graphEditor.frame( Gaffer.StandardSet( [ script["Group"], script["hosek_environment_edf"] ] ) )
GafferUI.WidgetAlgo.grab( widget = graphEditor, imagePath = "images/graphEditorEnvironmentLightNode.png" )
# GafferBot render with lighting in Viewer
script.selection().clear()
script.selection().add( script["Catalogue"] )
script.setFocus( script["Catalogue"] )
__renderAndGrab( script, viewer, "images/viewerRenderOneShader.png" )
# GafferBot render with lighting and textures in Viewer
script["fileName"].setValue( os.path.abspath( "scripts/textures.gfr" ) )
script.load()
script.selection().add( script["Catalogue"] )
script.setFocus( script["Catalogue"] )
__renderAndGrab( script, viewer, "images/viewerRenderTextures.png" )
# Second shader assignment in Graph Editor
script["fileName"].setValue( os.path.abspath( "scripts/secondShaderAssignment.gfr" ) )
script.load()
script.selection().add( script["ShaderAssignment1"] )
script.setFocus( script["ShaderAssignment1"] )
graphEditor.frame( Gaffer.StandardSet( [ script["as_disney_material"], script["as_disney_material1"], script["ShaderAssignment"], script["ShaderAssignment1"] ] ) )
GafferUI.WidgetAlgo.grab( widget = graphEditor, imagePath = "images/graphEditorSecondShaderNodes.png" )
# GafferBot render with second shader assignment in main window
script.selection().clear()
script.selection().add( script["Catalogue"] )
script.setFocus( script["Catalogue"] )
graphEditor.frame( script.children( Gaffer.Node ) )
__renderAndGrab( script, scriptWindow, "images/mainRenderTwoShaders.png" )
# PathFilter node in Graph Editor
script["fileName"].setValue( os.path.abspath( "scripts/secondShaderAssignmentFiltered.gfr" ) )
script.load()
script.selection().add( script["PathFilter"] )
script.setFocus( script["PathFilter"] )
graphEditor.frame( Gaffer.StandardSet( [ script["PathFilter"], script["ShaderAssignment1"] ] ) )
GafferUI.WidgetAlgo.grab( widget = graphEditor, imagePath = "images/graphEditorPathFilterNode.png" )
# GafferBot node and mouth selection in Viewer
script.selection().clear()
script.selection().add( script["ShaderAssignment1"] )
script.setFocus( script["ShaderAssignment1"] )
paths = IECore.PathMatcher( [ "/group/GAFFERBOT/C_torso_GRP/C_head_GRP/C_head_CPT/C_browNose001_REN", "/group/GAFFERBOT/C_torso_GRP/C_head_GRP/C_head_CPT/C_mouthGrill001_REN" ] )
GafferSceneUI.ContextAlgo.setSelectedPaths( script.context(), paths )
GafferUI.EventLoop.waitForIdle()
paths = IECore.PathMatcher( [ "/group/GAFFERBOT/C_torso_GRP/C_head_GRP/C_head_CPT" ] )
viewer.view().frame( paths, direction = imath.V3f( -0.2, -0.2, -1 ) )
__delay( 0.1 )
viewer.view().viewportGadget().getPrimaryChild().waitForCompletion()
GafferUI.WidgetAlgo.grab( widget = viewer, imagePath = "images/viewerSelectionFace.png" )
# GafferBot final render in Viewer
script.selection().clear()
script.selection().add( script["Catalogue"] )
script.setFocus( script["Catalogue"] )
__renderAndGrab( script, viewer, "images/viewerRenderFinal.png" )
| {
"content_hash": "1531b876ba70192cd7524145606ab340",
"timestamp": "",
"source": "github",
"line_count": 230,
"max_line_length": 178,
"avg_line_length": 48.56956521739131,
"alnum_prop": 0.7662698057470235,
"repo_name": "GafferHQ/gaffer",
"id": "17bdcdd5f41c9c62ae141bc9d24cf6eec83645a4",
"size": "12351",
"binary": false,
"copies": "5",
"ref": "refs/heads/main",
"path": "doc/source/GettingStarted/TutorialAssemblingTheGafferBot/screengrab.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "5790"
},
{
"name": "C",
"bytes": "61993"
},
{
"name": "C++",
"bytes": "9572701"
},
{
"name": "CMake",
"bytes": "85201"
},
{
"name": "GLSL",
"bytes": "6208"
},
{
"name": "Python",
"bytes": "10280178"
},
{
"name": "Ruby",
"bytes": "419"
},
{
"name": "Shell",
"bytes": "14580"
}
],
"symlink_target": ""
} |
'''
Mac OS X implementations of various commands in the "desktop" interface
'''
# Define the module's virtual name
__virtualname__ = 'desktop'
def __virtual__():
'''
Only load on Mac systems
'''
if __grains__['os'] == 'MacOS':
return __virtualname__
return False
def get_output_volume():
'''
Get the output volume (range 0 to 100)
CLI Example:
.. code-block:: bash
salt '*' desktop.get_output_volume
'''
cmd = 'osascript -e "get output volume of (get volume settings)"'
return __salt__['cmd.run'](cmd)
def set_output_volume(volume):
'''
Set the volume of sound (range 0 to 100)
CLI Example:
.. code-block:: bash
salt '*' desktop.set_output_volume <volume>
'''
cmd = 'osascript -e "set volume output volume {0}"'.format(volume)
__salt__['cmd.run'](cmd)
return get_output_volume()
def screensaver():
'''
Launch the screensaver
CLI Example:
.. code-block:: bash
salt '*' desktop.screensaver
'''
cmd = 'open /System/Library/Frameworks/ScreenSaver.framework/Versions/A/Resources/ScreenSaverEngine.app'
return __salt__['cmd.run'](cmd)
def lock():
'''
Lock the desktop session
CLI Example:
.. code-block:: bash
salt '*' desktop.lock
'''
cmd = '/System/Library/CoreServices/Menu\\ Extras/User.menu/Contents/Resources/CGSession -suspend'
return __salt__['cmd.run'](cmd)
def say(*words):
'''
Say some words.
CLI Example:
.. code-block:: bash
salt '*' desktop.say <word0> <word1> ... <wordN>
'''
cmd = 'say {}'.format(' '.join(words))
return __salt__['cmd.run'](cmd)
| {
"content_hash": "9f2b0795e450613f03c5fe8a8d278385",
"timestamp": "",
"source": "github",
"line_count": 91,
"max_line_length": 108,
"avg_line_length": 18.692307692307693,
"alnum_prop": 0.5843621399176955,
"repo_name": "victorywang80/Maintenance",
"id": "dd836165d2131d1db8f2ed17254256c903b61846",
"size": "1725",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "saltstack/src/salt/modules/osxdesktop.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "160954"
},
{
"name": "JavaScript",
"bytes": "1"
},
{
"name": "Python",
"bytes": "4522522"
},
{
"name": "Scheme",
"bytes": "7488"
},
{
"name": "Shell",
"bytes": "14653"
}
],
"symlink_target": ""
} |
import yaml
class AnInstance:
def __init__(self, foo, bar):
self.foo = foo
self.bar = bar
def __repr__(self):
try:
return "%s(foo=%r, bar=%r)" % (self.__class__.__name__,
self.foo, self.bar)
except RuntimeError:
return "%s(foo=..., bar=...)" % self.__class__.__name__
class AnInstanceWithState(AnInstance):
def __getstate__(self):
return {'attributes': [self.foo, self.bar]}
def __setstate__(self, state):
self.foo, self.bar = state['attributes']
def test_recursive(recursive_filename, verbose=False):
exec open(recursive_filename, 'rb').read()
value1 = value
output1 = None
value2 = None
output2 = None
try:
output1 = yaml.dump(value1)
value2 = yaml.load(output1, yaml.FullLoader)
output2 = yaml.dump(value2)
assert output1 == output2, (output1, output2)
finally:
if verbose:
#print "VALUE1:", value1
#print "VALUE2:", value2
print "OUTPUT1:"
print output1
print "OUTPUT2:"
print output2
test_recursive.unittest = ['.recursive']
if __name__ == '__main__':
import test_appliance
test_appliance.run(globals())
| {
"content_hash": "c4a070ae13ad0c056d203aca6a56ec34",
"timestamp": "",
"source": "github",
"line_count": 49,
"max_line_length": 67,
"avg_line_length": 26.020408163265305,
"alnum_prop": 0.5513725490196079,
"repo_name": "pexip/os-pyyaml",
"id": "312204ead76a334eb38e18ec5d34b84cbad12244",
"size": "1276",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/lib/test_recursive.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "476"
},
{
"name": "Cython",
"bytes": "73928"
},
{
"name": "Python",
"bytes": "598576"
}
],
"symlink_target": ""
} |
import os, sys, popen2
class Batch:
def __init__(self):
self.jobs = {}
self.outputs = {}
def add_job(self, id, input):
self.jobs[id] = input
def get_job(self, id):
return self.jobs[id]
def run(self):
abstract
class BatchCmd(Batch):
def __init__(self, command):
Batch.__init__(self)
self.command = command
def run(self):
if not self.jobs: return
output, input = popen2.popen2(self.command)
for id in self.jobs:
input.write(self.jobs[id] + '\n')
input.close()
for id in self.jobs:
self.outputs[id] = output.readline().rstrip()
output.close()
class BatchSBD(Batch):
def run(self):
if not self.jobs: return
## create a model
| {
"content_hash": "fd3f70589cfca7b39fdba32c48fbaa8e",
"timestamp": "",
"source": "github",
"line_count": 39,
"max_line_length": 57,
"avg_line_length": 21.025641025641026,
"alnum_prop": 0.5304878048780488,
"repo_name": "hectormartinez/rougexstem",
"id": "03306b4ca00705d8fef636d69135be35a5b50dc2",
"size": "820",
"binary": false,
"copies": "17",
"ref": "refs/heads/master",
"path": "taln2016/icsisumm-primary-sys34_v1/preprocess/batch.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "AMPL",
"bytes": "252646"
},
{
"name": "Batchfile",
"bytes": "2712"
},
{
"name": "C",
"bytes": "3446743"
},
{
"name": "C#",
"bytes": "3511"
},
{
"name": "CSS",
"bytes": "1240"
},
{
"name": "HTML",
"bytes": "315849"
},
{
"name": "M4",
"bytes": "4099"
},
{
"name": "Makefile",
"bytes": "199393"
},
{
"name": "Perl",
"bytes": "378641"
},
{
"name": "Perl6",
"bytes": "67212"
},
{
"name": "Python",
"bytes": "3712683"
},
{
"name": "Shell",
"bytes": "319340"
},
{
"name": "TeX",
"bytes": "536677"
},
{
"name": "XQuery",
"bytes": "5987"
},
{
"name": "XS",
"bytes": "45555"
}
],
"symlink_target": ""
} |
from __future__ import print_function
import os
from catkin_pkg.package_templates import create_package_files, PackageTemplate
# Exempt build directories
# See https://github.com/catkin/catkin_tools/issues/82
def prepare_arguments(parser):
# Workspace / profile args
# add_context_args(parser)
subparsers = parser.add_subparsers(dest='subcommand', help='sub-command help')
parser_pkg = subparsers.add_parser('pkg', help='Create a new catkin package.')
parser_pkg.description = (
"Create a new Catkin package. Note that while the "
"default options used by this command are sufficient for prototyping and "
"local usage, it is important that any publically-available packages have "
"a valid license and a valid maintainer e-mail address.")
add = parser_pkg.add_argument
add('name', metavar='PKG_NAME', nargs='+',
help='The name of one or more packages to create. This name should be '
'completely lower-case with individual words separated by undercores.')
add('-p', '--path', action='store', default=os.getcwd(),
help='The path into which the package should be generated.')
# TODO: Make this possible
# add('--manifest-only', action='store_true', default=False,
# help='Only create a package.xml manifest file and do not generate a CMakeLists.txt')
# TODO: Make this possible
# add('--build-type', type=str, choices=['catkin', 'cmake'],
# nargs=1,
# default='catkin',
# help='The buildtool to use to build the package. (default: catkin)')
rosdistro_name = os.environ['ROS_DISTRO'] if 'ROS_DISTRO' in os.environ else None
add('--rosdistro', required=rosdistro_name is None, default=rosdistro_name,
help='The ROS distro (default: environment variable ROS_DISTRO if defined)')
basic_group = parser_pkg.add_argument_group('Package Metadata')
add = basic_group.add_argument
add('-v', '--version',
metavar='MAJOR.MINOR.PATCH',
action='store',
help='Initial package version. (default 0.0.0)')
add('-l', '--license',
action='append',
help='The software license under which the code is distributed, such as '
'BSD, MIT, GPLv3, or others. (default: "TODO")')
add('-m', '--maintainer',
metavar=('NAME', 'EMAIL'),
dest='maintainers',
action='append',
nargs=2,
help='A maintainer who is responsible for the package. (default: '
'[username, [email protected]]) (multiple allowed)')
add('-a', '--author',
metavar=('NAME', 'EMAIL'),
dest='authors',
action='append',
nargs=2,
help='An author who contributed to the package. (default: no additional '
'authors) (multiple allowed)')
add('-d', '--description',
action='store',
help='Description of the package. (default: empty)')
deps_group = parser_pkg.add_argument_group('Package Dependencies')
add = deps_group.add_argument
add('--catkin-deps', '-c', metavar='DEP', nargs="*",
help='The names of one or more Catkin dependencies. These are '
'Catkin-based packages which are either built as source or installed '
'by your system\'s package manager.')
add('--system-deps', '-s', metavar='DEP', nargs="*",
help='The names of one or more system dependencies. These are other '
'packages installed by your operating system\'s package manager.')
cpp_group = parser_pkg.add_argument_group('C++ Options')
add = cpp_group.add_argument
add('--boost-components',
metavar='COMP',
nargs='*',
help='One or more boost components used by the package.')
# py_group = parser_pkg.add_argument_group('Python Options')
# add('--python-setup', action='store_true', default=False,
# help='Add a default python setup file.')
return parser
def main(opts):
try:
# Get absolute path to directory containing package
package_dest_path = os.path.abspath(opts.path)
for package_name in opts.name:
print('Creating package "%s" in "%s"...' % (package_name, package_dest_path))
target_path = os.path.join(package_dest_path, package_name)
package_template = PackageTemplate._create_package_template(
package_name=package_name,
description=opts.description,
licenses=opts.license or [],
maintainer_names=[m[0] for m in opts.maintainers] if opts.maintainers else [],
author_names=[a[0] for a in opts.authors] if opts.authors else [],
version=opts.version,
catkin_deps=opts.catkin_deps,
system_deps=opts.system_deps,
boost_comps=opts.boost_components)
# Add maintainer and author e-mails
if opts.maintainers:
for (pm, om) in zip(package_template.maintainers, opts.maintainers):
pm.email = om[1]
if opts.authors:
for (pa, oa) in zip(package_template.authors, opts.authors):
pa.email = oa[1]
# Add build type export
# if opts.build_type and opts.build_type != 'catkin':
# build_type = Export('build_type', content=opts.build_type)
# package_template.exports.append(build_type)
create_package_files(target_path=target_path,
package_template=package_template,
rosdistro=opts.rosdistro,
newfiles={})
print('Successfully created package files in %s.' % target_path)
except ValueError as vae:
print(str(vae))
return 1
return 0
| {
"content_hash": "1c8062a8018cde0bb9ef5f1ce48226ef",
"timestamp": "",
"source": "github",
"line_count": 142,
"max_line_length": 94,
"avg_line_length": 40.95070422535211,
"alnum_prop": 0.6113499570077386,
"repo_name": "iwanders/catkin_tools",
"id": "d389595f2a7ab01be0aa8d617d8e113f714a0ffc",
"size": "6417",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "catkin_tools/verbs/catkin_create/cli.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "17"
},
{
"name": "C++",
"bytes": "620"
},
{
"name": "CMake",
"bytes": "2854"
},
{
"name": "Objective-C",
"bytes": "3354"
},
{
"name": "Python",
"bytes": "362469"
},
{
"name": "Shell",
"bytes": "7269"
}
],
"symlink_target": ""
} |
from django import forms
from .models import FeriadoCalendarioAcademico
class CreateFeriadoForm(forms.ModelForm):
class Meta:
model = FeriadoCalendarioAcademico
fields = ['nome', 'data'] | {
"content_hash": "983cd6b58be34a00373a320cc0ac5fa8",
"timestamp": "",
"source": "github",
"line_count": 9,
"max_line_length": 46,
"avg_line_length": 21.555555555555557,
"alnum_prop": 0.788659793814433,
"repo_name": "bczmufrn/frequencia",
"id": "f3b6a77389c8d30ae920e1d733f99bc7eb05946e",
"size": "194",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "frequencia/calendario/forms.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "28546"
},
{
"name": "HTML",
"bytes": "182716"
},
{
"name": "JavaScript",
"bytes": "67928"
},
{
"name": "Python",
"bytes": "94322"
}
],
"symlink_target": ""
} |
from django.contrib import admin
from .models import Game, Question
from django.utils.translation import ugettext_lazy as _
class GameAdmin(admin.ModelAdmin):
pass
admin.site.register(Game, GameAdmin)
class QuestionAdmin(admin.ModelAdmin):
list_display = ('game', 'question', 'value')
fieldsets = (
(None, {
'fields': ('game', 'question', 'picture', 'value')
}),
(_('First Question'), {
'fields': ('answer_one', 'answer_one_correct',)
}),
(_('Second Question'), {
'fields': ('answer_two', 'answer_two_correct',)
}),
(_('Third Question'), {
'fields': ('answer_three', 'answer_three_correct',)
}),
(_('Fourth Question'), {
'fields': ('answer_four', 'answer_four_correct',)
}),
)
admin.site.register(Question, QuestionAdmin)
| {
"content_hash": "f7e47b397a453b1df70b3f96fb56b66c",
"timestamp": "",
"source": "github",
"line_count": 31,
"max_line_length": 63,
"avg_line_length": 28.483870967741936,
"alnum_prop": 0.5560588901472253,
"repo_name": "codefisher/web_games",
"id": "90cbfe41d125c6ec969c36e5b9083679b3df2c6e",
"size": "883",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "million/admin.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "3307"
},
{
"name": "HTML",
"bytes": "23489"
},
{
"name": "Python",
"bytes": "52373"
},
{
"name": "Shell",
"bytes": "126"
}
],
"symlink_target": ""
} |
from .exceptions import ToleoException
from .types import GenericSoftware, PypiSoftware, GithubSoftware, \
BitbucketSoftware, AurPackage, ArchPackage, YumPackage, Collection
from .utils import process
__all__ = [
'ToleoException',
'GenericSoftware',
'PypiSoftware',
'GithubSoftware',
'BitbucketSoftware',
'AurPackage',
'ArchPackage',
'YumPackage',
'Collection',
'process'
]
| {
"content_hash": "f4ea790638351bebdc9ea20f2f1decc6",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 70,
"avg_line_length": 23.333333333333332,
"alnum_prop": 0.6976190476190476,
"repo_name": "carlwgeorge/toleo-old",
"id": "bc52f3185065edff02f01acf5a46871dab1c7cf7",
"size": "420",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "toleo/__init__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "15246"
}
],
"symlink_target": ""
} |
import threading
from PyQt4.QtCore import Qt, QAbstractListModel, QModelIndex
from PyQt4.QtGui import QVBoxLayout, QHBoxLayout, QDialogButtonBox, QLabel, QPushButton, QWidget, QTreeView, \
QSizePolicy, qApp
import hex.utils as utils
import hex.hexlineedit as hexlineedit
import hex.matchers as matchers
import hex.hexwidget as hexwidget
import hex.operations as operations
import hex.documents as documents
class SearchDialog(utils.Dialog):
def __init__(self, main_win, hex_widget):
utils.Dialog.__init__(self, main_win, name='search_dialog')
self.hexWidget = hex_widget
self.setWindowTitle(utils.tr('Search'))
self.m_layout = QVBoxLayout()
self.setLayout(self.m_layout)
self.descLabel = QLabel(self)
self.descLabel.setText(utils.tr('Enter hex values to search for:'))
self.m_layout.addWidget(self.descLabel)
self.hexInput = hexlineedit.HexLineEdit(self)
self.m_layout.addWidget(self.hexInput)
self.buttonBox = QDialogButtonBox(self)
self.buttonBox.addButton(QDialogButtonBox.Close)
self.searchButton = QPushButton(utils.tr('Search'), self)
self.searchButton.setIcon(utils.getIcon('edit-find'))
self.buttonBox.addButton(self.searchButton, QDialogButtonBox.AcceptRole)
self.buttonBox.accepted.connect(self.accept)
self.buttonBox.rejected.connect(self.reject)
self.m_layout.addWidget(self.buttonBox)
@property
def matcher(self):
return matchers.BinaryMatcher(self.hexWidget.document, self.hexInput.data)
class SearchResultsWidget(QWidget):
def __init__(self, parent, hex_widget=None, match_operation=None):
QWidget.__init__(self, parent)
self._matchOperation = match_operation
self.hexWidget = hex_widget
self.setLayout(QVBoxLayout())
self.layout().setContentsMargins(0, 0, 0, 0)
self.model = SearchResultsModel(self.hexWidget, None)
self.resultsView = QTreeView(self)
self.resultsView.setModel(self.model)
self.resultsView.clicked.connect(self._onResultClicked)
self.layout().addWidget(self.resultsView)
self.progressTextLabel = operations.OperationProgressTextLabel(self, self._matchOperation)
self.progressTextLabel.setSizePolicy(QSizePolicy.Expanding, QSizePolicy.Preferred)
self.searchCancelButton = operations.OperationCancelPushButton(self, self._matchOperation)
hl = QHBoxLayout()
hl.setContentsMargins(0, 0, 0, 0)
hl.addWidget(self.progressTextLabel)
hl.addWidget(self.searchCancelButton)
self.layout().addLayout(hl)
self.matchOperation = match_operation
@property
def matchOperation(self):
return self._matchOperation
@matchOperation.setter
def matchOperation(self, match_operation):
self._matchOperation = match_operation
self.model = SearchResultsModel(self.hexWidget, match_operation)
self.resultsView.setModel(self.model)
self.searchCancelButton.operation = match_operation
self.progressTextLabel.operation = match_operation
def _onResultClicked(self, index):
if index.isValid():
rng = index.data(SearchResultsModel.MatchRangeRole)
self.hexWidget.emphasize(hexwidget.EmphasizedRange(self.hexWidget, rng.startPosition, rng.size,
hexwidget.DataRange.UnitBytes))
self.hexWidget.selectionRanges = [hexwidget.SelectionRange(self.hexWidget, rng.startPosition, rng.size,
hexwidget.DataRange.UnitBytes)]
class SearchResultsModel(QAbstractListModel):
MatchRangeRole, MatchRole = Qt.UserRole, Qt.UserRole + 1
def __init__(self, hex_widget, match_operation):
QAbstractListModel.__init__(self)
self._lock = threading.RLock()
self._newResults = []
self._matchOperation = match_operation
self._hexWidget = hex_widget
self._results = []
if self._matchOperation is not None:
with self._matchOperation.lock:
self._matchOperation.newResults.connect(self._onNewMatches, Qt.DirectConnection)
self._matchOperation.finished.connect(self._onMatchFinished, Qt.QueuedConnection)
for match in self._matchOperation.state.results.values():
self._results.append((match, self._createRange(match)))
self.startTimer(400)
def rowCount(self, index=QModelIndex()):
return len(self._results) if not index.isValid() else 0
def data(self, index, role=Qt.DisplayRole):
if index.isValid() and (0 <= index.row() < len(self._results)) and index.column() == 0:
if role == Qt.DisplayRole or role == Qt.EditRole:
rng = self._results[index.row()][1]
return utils.tr('Matched {0:#x} bytes at position {1:#x}').format(rng.size, rng.startPosition)
elif role == self.MatchRangeRole:
return self._results[index.row()][1]
elif role == self.MatchRole:
return self._results[index.row()][0]
elif role == Qt.ForegroundRole and not self._results[index.row()][1].size:
return Qt.red
def headerData(self, section, orientation, role=Qt.DisplayRole):
return None
def _onNewMatches(self, results):
with self._lock:
self._newResults += (result[1] for result in results)
def timerEvent(self, event):
has_results = False
with self._lock:
if self._newResults:
self.beginInsertRows(QModelIndex(), len(self._results), len(self._results) + len(self._newResults) - 1)
self._results += ((match, self._createRange(match)) for match in self._newResults)
self._newResults = []
has_results = True
if has_results:
self.endInsertRows()
def _onMatchFinished(self, final_status):
pass
def _onRangeUpdated(self):
for row_index in range(len(self._results)):
if self._results[row_index][1] is self.sender():
index = self.index(row_index, 0)
self.dataChanged.emit(index, index)
break
def _createRange(self, match):
match_range = hexwidget.DataRange(self._hexWidget, match.position, match.length, hexwidget.DataRange.UnitBytes)
match_range.updated.connect(self._onRangeUpdated)
return match_range
| {
"content_hash": "ec606635ebba18e3c4a9f6eea9a482b5",
"timestamp": "",
"source": "github",
"line_count": 154,
"max_line_length": 119,
"avg_line_length": 42.935064935064936,
"alnum_prop": 0.6491228070175439,
"repo_name": "zenwarr/microhex",
"id": "777c4288758456f43d10d32f953339a3425dce6c",
"size": "6612",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/hex/search.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C++",
"bytes": "147015"
},
{
"name": "HTML",
"bytes": "455"
},
{
"name": "Python",
"bytes": "392374"
},
{
"name": "QMake",
"bytes": "5050"
}
],
"symlink_target": ""
} |
import io
import itertools
import os
import re
import cherrypy
from cherrypy._cpreqbody import Part
from girder.api.rest import setResponseHeader, setContentDisposition
from girder.exceptions import GirderException, ValidationException, FilePathException
from girder.models.setting import Setting
from girder.settings import SettingKey
from girder.utility import progress, RequestBodyStream
class FileHandle:
"""
This is the base class that is returned for the file-like API into
Girder file objects. The ``open`` method of assetstore implementations
is responsible for returning an instance of this class or one of its
subclasses. This base class implementation is returned by the
abstract assetstore adapter, and does not leverage any details of the
assetstore implementations.
These file handles are stateful, and therefore not safe for concurrent
access. If used by multiple threads, mutexes should be used.
:param file: The file object to which this file-like object corresponds.
:type file: dict
:param adapter: The assetstore adapter corresponding to this file.
:type adapter: girder.utility.abstract_assetstore_adapter.AbstractAssetstoreAdapter
"""
def __init__(self, file, adapter):
self._file = file
self._adapter = adapter
self._pos = None
# If a read is requested that is longer than the specified size, raise
# an exception. This prevents unbounded memory use.
self._maximumReadSize = 16 * 1024 * 1024
self.seek(0)
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.close()
def read(self, size=None):
"""
Read *size* bytes from the file data.
:param size: The number of bytes to read from the current position. The
actual number returned could be less than this if the end of the
file is reached. An empty response indicates that the file has been
completely consumed. If None or negative, read to the end of the
file.
:type size: int
:rtype: bytes
"""
if size is None or size < 0:
size = self._file['size'] - self._pos
if size > self._maximumReadSize:
raise GirderException('Read exceeds maximum allowed size.')
data = io.BytesIO()
length = 0
for chunk in itertools.chain(self._prev, self._stream):
chunkLen = len(chunk)
if chunkLen == 0:
break
if length + chunkLen <= size:
data.write(chunk)
self._prev = []
length += chunkLen
if length == size:
break
else:
chunkLen = min(size - length, chunkLen)
data.write(chunk[:chunkLen])
self._prev = [chunk[chunkLen:]]
length += chunkLen
break
self._pos += length
return data.getvalue()
def tell(self):
return self._pos
def seek(self, offset, whence=os.SEEK_SET):
oldPos = self._pos
if whence == os.SEEK_SET:
self._pos = offset
elif whence == os.SEEK_CUR:
self._pos += offset
elif whence == os.SEEK_END:
self._pos = max(self._file['size'] + offset, 0)
if self._pos != oldPos:
self._prev = []
self._stream = self._adapter.downloadFile(self._file, offset=self._pos, headers=False)()
def close(self):
pass
class AbstractAssetstoreAdapter:
"""
This defines the interface to be used by all assetstore adapters.
"""
def __init__(self, assetstore):
self.assetstore = assetstore
@staticmethod
def validateInfo(doc):
"""
Adapters may implement this if they need to perform any validation
steps whenever the assetstore info is saved to the database. It should
return the document with any necessary alterations in the success case,
or throw an exception if validation fails.
"""
return doc
@staticmethod
def fileIndexFields():
"""
Default behavior is that no additional file fields need to be indexed
within the database.
"""
return []
def capacityInfo(self):
"""
Assetstore types that are able to report how much free and/or total
capacity they have should override this method. Default behavior is to
report both quantities as unknown.
:returns: A dict with 'free' and 'total' keys whose values are
either bytes (ints) or None for an unknown quantity.
"""
return {
'free': None,
'total': None
}
def initUpload(self, upload):
"""
This must be called before any chunks are uploaded to do any
additional behavior and optionally augment the upload document. The
method must return the upload document. Default behavior is to
simply return the upload document unmodified.
:param upload: The upload document to optionally augment.
:type upload: dict
"""
return upload
def uploadChunk(self, upload, chunk):
"""
Call this method to process each chunk of an upload.
:param upload: The upload document to update.
:type upload: dict
:param chunk: The file object representing the chunk that was uploaded.
:type chunk: file
:returns: Must return the upload document with any optional changes.
"""
raise NotImplementedError('Must override processChunk in %s.' %
self.__class__.__name__)
def finalizeUpload(self, upload, file):
"""
Call this once the last chunk has been processed. This method does not
need to delete the upload document as that will be deleted by the
caller afterward. This method may augment the File document, and must
return the File document.
:param upload: The upload document.
:type upload: dict
:param file: The file document that was created.
:type file: dict
:returns: The file document with optional modifications.
"""
return file
def requestOffset(self, upload):
"""
Request the offset for resuming an interrupted upload. Default behavior
simply returns the 'received' field of the upload document. This method
exists because in some cases, such as when the server crashes, it's
possible that the received field is not accurate, so adapters may
implement this to provide the actual next byte required.
"""
return upload['received']
def deleteFile(self, file):
"""
This is called when a File is deleted to allow the adapter to remove
the data from within the assetstore. This method should not modify
or delete the file object, as the caller will delete it afterward.
:param file: The File document about to be deleted.
:type file: dict
"""
raise NotImplementedError('Must override deleteFile in %s.' %
self.__class__.__name__)
def shouldImportFile(self, path, params):
"""
This is a helper used during the import process to determine if a file located at
the specified path should be imported, based on the request parameters. Exclusion
takes precedence over inclusion.
:param path: The path of the file.
:type path: str
:param params: The request parameters.
:type params: dict
:rtype: bool
"""
include = params.get('fileIncludeRegex')
exclude = params.get('fileExcludeRegex')
fname = os.path.basename(path)
if exclude and re.match(exclude, fname):
return False
if include:
return re.match(include, fname)
return True
def downloadFile(self, file, offset=0, headers=True, endByte=None,
contentDisposition=None, extraParameters=None, **kwargs):
"""
This method is in charge of returning a value to the RESTful endpoint
that can be used to download the file. This should either return a
generator function that yields the bytes of the file (which will stream
the file directly), or modify the response headers and raise a
`cherrypy.HTTPRedirect`.
:param file: The file document being downloaded.
:type file: dict
:param offset: Offset in bytes to start the download at.
:type offset: int
:param headers: Flag for whether headers should be sent on the response.
:type headers: bool
:param endByte: Final byte to download. If ``None``, downloads to the
end of the file.
:type endByte: int or None
:param contentDisposition: Value for Content-Disposition response
header disposition-type value.
:type contentDisposition: str or None
:type extraParameters: str or None
"""
raise NotImplementedError('Must override downloadFile in %s.' %
self.__class__.__name__)
def findInvalidFiles(self, progress=progress.noProgress, filters=None,
checkSize=True, **kwargs):
"""
Finds and yields any invalid files in the assetstore. It is left to
the caller to decide what to do with them.
:param progress: Pass a progress context to record progress.
:type progress: :py:class:`girder.utility.progress.ProgressContext`
:param filters: Additional query dictionary to restrict the search for
files. There is no need to set the ``assetstoreId`` in the filters,
since that is done automatically.
:type filters: dict or None
:param checkSize: Whether to make sure the size of the underlying
data matches the size of the file.
:type checkSize: bool
"""
raise NotImplementedError('Must override findInvalidFiles in %s.' %
self.__class__.__name__)
def copyFile(self, srcFile, destFile):
"""
This method copies the necessary fields and data so that the
destination file contains the same data as the source file.
:param srcFile: The original File document.
:type srcFile: dict
:param destFile: The File which should have the data copied to it.
:type destFile: dict
:returns: A dict with the destination file.
"""
return destFile
def getChunkSize(self, chunk):
"""
Given a chunk that is either a file-like object or a string, attempt to
determine its length. If it is a file-like object, then this relies on
being able to use fstat.
:param chunk: the chunk to get the size of
:type chunk: a file-like object or a string
:returns: the length of the chunk if known, or None.
"""
if isinstance(chunk, (io.BytesIO, RequestBodyStream, Part)):
return
elif hasattr(chunk, 'fileno'):
return os.fstat(chunk.fileno()).st_size
elif isinstance(chunk, str):
return len(chunk.encode('utf8'))
else:
return len(chunk)
def setContentHeaders(self, file, offset, endByte, contentDisposition=None):
"""
Sets the Content-Length, Content-Disposition, Content-Type, and also
the Content-Range header if this is a partial download.
:param file: The file being downloaded.
:param offset: The start byte of the download.
:type offset: int
:param endByte: The end byte of the download (non-inclusive).
:type endByte: int
:param contentDisposition: Content-Disposition response header
disposition-type value, if None, Content-Disposition will
be set to 'attachment; filename=$filename'.
:type contentDisposition: str or None
"""
isRangeRequest = cherrypy.request.headers.get('Range')
setResponseHeader(
'Content-Type',
file.get('mimeType') or 'application/octet-stream')
setContentDisposition(file['name'], contentDisposition or 'attachment')
setResponseHeader('Content-Length', max(endByte - offset, 0))
if (offset or endByte < file['size'] or isRangeRequest) and file['size']:
setResponseHeader(
'Content-Range',
'bytes %d-%d/%d' % (offset, endByte - 1, file['size']))
def checkUploadSize(self, upload, chunkSize):
"""
Check if the upload is valid based on the chunk size. If this
raises an exception, then the caller should clean up and reraise the
exception.
:param upload: the dictionary of upload information. The received and
size values are used.
:param chunkSize: the chunk size that needs to be validated.
:type chunkSize: a non-negative integer or None if unknown.
"""
if 'received' not in upload or 'size' not in upload:
return
if chunkSize is None:
return
if upload['received'] + chunkSize > upload['size']:
raise ValidationException('Received too many bytes.')
if (upload['received'] + chunkSize != upload['size']
and chunkSize < Setting().get(SettingKey.UPLOAD_MINIMUM_CHUNK_SIZE)):
raise ValidationException('Chunk is smaller than the minimum size.')
def cancelUpload(self, upload):
"""
This is called when an upload has been begun and it should be
abandoned. It must clean up temporary files, chunks, or whatever other
information the assetstore contains.
"""
raise NotImplementedError('Must override cancelUpload in %s.' %
self.__class__.__name__)
def untrackedUploads(self, knownUploads=(), delete=False):
"""
List and optionally discard uploads that are in the assetstore but not
in the known list.
:param knownUploads: a list of upload dictionaries of all known
incomplete uploads.
:type knownUploads: list
:param delete: if True, delete any unknown uploads.
:type delete: bool
:returns: a list of unknown uploads.
"""
return ()
def importData(self, parent, parentType, params, progress, user, **kwargs):
"""
Assetstores that are capable of importing pre-existing data from the
underlying storage medium can implement this method.
:param parent: The parent object to import into.
:param parentType: The model type of the parent object (folder, user,
or collection).
:type parentType: str
:param params: Additional parameters required for the import process.
Typically includes an importPath field representing a root path
on the underlying storage medium.
:type params: dict
:param progress: Object on which to record progress if possible.
:type progress: :py:class:`girder.utility.progress.ProgressContext`
:param user: The Girder user performing the import.
:type user: dict or None
"""
raise NotImplementedError(
'The %s assetstore type does not support importing existing data.'
% self.__class__.__name__)
def fileUpdated(self, file):
"""
This is called when the file document has been changed. Any assetstore
implementation that needs to do anything when the file document changes
should override this method.
:param file: The updated file document.
:type file: dict
"""
pass
def open(self, file):
"""
Exposes a Girder file as a python file-like object. At the
moment, this is a read-only interface, the equivalent of opening a
system file with 'rb' mode.
:param file: A Girder file document.
:type file: dict
:return: A file-like object containing the bytes of the file.
:rtype: FileHandle
"""
return FileHandle(file, self)
def getLocalFilePath(self, file):
"""
If an assetstore adapter supports it, return a path to the file on the
local file system. Otherwise, raise an exception.
:param file: The file document.
:type file: dict
:returns: a local path to the file.
:rtype: str
"""
raise FilePathException('This assetstore does not expose file paths')
| {
"content_hash": "5819332b96dc43052395d2ea5fb0ecf5",
"timestamp": "",
"source": "github",
"line_count": 443,
"max_line_length": 100,
"avg_line_length": 38.135440180586905,
"alnum_prop": 0.6202793891322363,
"repo_name": "RafaelPalomar/girder",
"id": "b8b43140fca79989e421cf1cbf075beaa4b1675b",
"size": "16894",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "girder/utility/abstract_assetstore_adapter.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CMake",
"bytes": "57029"
},
{
"name": "CSS",
"bytes": "53651"
},
{
"name": "HTML",
"bytes": "149014"
},
{
"name": "JavaScript",
"bytes": "1207526"
},
{
"name": "Mako",
"bytes": "8247"
},
{
"name": "Python",
"bytes": "2003341"
},
{
"name": "Roff",
"bytes": "17"
},
{
"name": "Ruby",
"bytes": "10595"
},
{
"name": "Shell",
"bytes": "10823"
}
],
"symlink_target": ""
} |
"""
This module contains functions to handle markers. Used by both the
marker functionality of `~matplotlib.axes.Axes.plot` and
`~matplotlib.axes.Axes.scatter`.
All possible markers are defined here:
============================== ===============================================
marker description
============================== ===============================================
"." point
"," pixel
"o" circle
"v" triangle_down
"^" triangle_up
"<" triangle_left
">" triangle_right
"1" tri_down
"2" tri_up
"3" tri_left
"4" tri_right
"8" octagon
"s" square
"p" pentagon
"*" star
"h" hexagon1
"H" hexagon2
"+" plus
"x" x
"D" diamond
"d" thin_diamond
"|" vline
"_" hline
TICKLEFT tickleft
TICKRIGHT tickright
TICKUP tickup
TICKDOWN tickdown
CARETLEFT caretleft
CARETRIGHT caretright
CARETUP caretup
CARETDOWN caretdown
"None" nothing
None nothing
" " nothing
"" nothing
``'$...$'`` render the string using mathtext.
`verts` a list of (x, y) pairs used for Path vertices.
path a `~matplotlib.path.Path` instance.
(`numsides`, `style`, `angle`) see below
============================== ===============================================
The marker can also be a tuple (`numsides`, `style`, `angle`), which
will create a custom, regular symbol.
`numsides`:
the number of sides
`style`:
the style of the regular symbol:
===== =============================================
Value Description
===== =============================================
0 a regular polygon
1 a star-like symbol
2 an asterisk
3 a circle (`numsides` and `angle` is ignored)
===== =============================================
`angle`:
the angle of rotation of the symbol, in degrees
For backward compatibility, the form (`verts`, 0) is also accepted,
but it is equivalent to just `verts` for giving a raw set of vertices
that define the shape.
"""
import numpy as np
from cbook import is_math_text, is_string_like, is_numlike, iterable
from matplotlib import rcParams
from path import Path
from transforms import IdentityTransform, Affine2D
# special-purpose marker identifiers:
(TICKLEFT, TICKRIGHT, TICKUP, TICKDOWN,
CARETLEFT, CARETRIGHT, CARETUP, CARETDOWN) = range(8)
class MarkerStyle(object):
markers = {
'.': 'point',
',': 'pixel',
'o': 'circle',
'v': 'triangle_down',
'^': 'triangle_up',
'<': 'triangle_left',
'>': 'triangle_right',
'1': 'tri_down',
'2': 'tri_up',
'3': 'tri_left',
'4': 'tri_right',
'8': 'octagon',
's': 'square',
'p': 'pentagon',
'*': 'star',
'h': 'hexagon1',
'H': 'hexagon2',
'+': 'plus',
'x': 'x',
'D': 'diamond',
'd': 'thin_diamond',
'|': 'vline',
'_': 'hline',
TICKLEFT: 'tickleft',
TICKRIGHT: 'tickright',
TICKUP: 'tickup',
TICKDOWN: 'tickdown',
CARETLEFT: 'caretleft',
CARETRIGHT: 'caretright',
CARETUP: 'caretup',
CARETDOWN: 'caretdown',
"None": 'nothing',
None: 'nothing',
' ': 'nothing',
'': 'nothing'
}
# Just used for informational purposes. is_filled()
# is calculated in the _set_* functions.
filled_markers = (
'o', 'v', '^', '<', '>', '8', 's', 'p', '*', 'h', 'H', 'D', 'd')
fillstyles = ('full', 'left', 'right', 'bottom', 'top', 'none')
_half_fillstyles = ('left', 'right', 'bottom', 'top')
# TODO: Is this ever used as a non-constant?
_point_size_reduction = 0.5
def __init__(self, marker=None, fillstyle='full'):
"""
MarkerStyle
Attributes
----------
markers : list of known markes
fillstyles : list of known fillstyles
filled_markers : list of known filled markers.
Parameters
----------
marker : string or array_like, optional, default: None
See the descriptions of possible markers in the module docstring.
fillstyle : string, optional, default: 'full'
'full', 'left", 'right', 'bottom', 'top', 'none'
"""
self._fillstyle = fillstyle
self.set_marker(marker)
self.set_fillstyle(fillstyle)
def __getstate__(self):
d = self.__dict__.copy()
d.pop('_marker_function')
return d
def __setstate__(self, statedict):
self.__dict__ = statedict
self.set_marker(self._marker)
self._recache()
def _recache(self):
self._path = Path(np.empty((0, 2)))
self._transform = IdentityTransform()
self._alt_path = None
self._alt_transform = None
self._snap_threshold = None
self._joinstyle = 'round'
self._capstyle = 'butt'
self._filled = True
self._marker_function()
def __nonzero__(self):
return bool(len(self._path.vertices))
def is_filled(self):
return self._filled
def get_fillstyle(self):
return self._fillstyle
def set_fillstyle(self, fillstyle):
"""
Sets fillstyle
Parameters
----------
fillstyle : string amongst known fillstyles
"""
if fillstyle not in self.fillstyles:
raise ValueError("Unrecognized fillstyle %s"
% ' '.join(self.fillstyles))
self._fillstyle = fillstyle
self._recache()
def get_joinstyle(self):
return self._joinstyle
def get_capstyle(self):
return self._capstyle
def get_marker(self):
return self._marker
def set_marker(self, marker):
if (iterable(marker) and len(marker) in (2, 3) and
marker[1] in (0, 1, 2, 3)):
self._marker_function = self._set_tuple_marker
elif isinstance(marker, np.ndarray):
self._marker_function = self._set_vertices
elif not isinstance(marker, list) and marker in self.markers:
self._marker_function = getattr(
self, '_set_' + self.markers[marker])
elif is_string_like(marker) and is_math_text(marker):
self._marker_function = self._set_mathtext_path
elif isinstance(marker, Path):
self._marker_function = self._set_path_marker
else:
try:
Path(marker)
self._marker_function = self._set_vertices
except ValueError:
raise ValueError('Unrecognized marker style {}'.format(marker))
self._marker = marker
self._recache()
def get_path(self):
return self._path
def get_transform(self):
return self._transform.frozen()
def get_alt_path(self):
return self._alt_path
def get_alt_transform(self):
return self._alt_transform.frozen()
def get_snap_threshold(self):
return self._snap_threshold
def _set_nothing(self):
self._filled = False
def _set_custom_marker(self, path):
verts = path.vertices
rescale = max(np.max(np.abs(verts[:, 0])),
np.max(np.abs(verts[:, 1])))
self._transform = Affine2D().scale(1.0 / rescale)
self._path = path
def _set_path_marker(self):
self._set_custom_marker(self._marker)
def _set_vertices(self):
verts = self._marker
marker = Path(verts)
self._set_custom_marker(marker)
def _set_tuple_marker(self):
marker = self._marker
if is_numlike(marker[0]):
if len(marker) == 2:
numsides, rotation = marker[0], 0.0
elif len(marker) == 3:
numsides, rotation = marker[0], marker[2]
symstyle = marker[1]
if symstyle == 0:
self._path = Path.unit_regular_polygon(numsides)
self._joinstyle = 'miter'
elif symstyle == 1:
self._path = Path.unit_regular_star(numsides)
self._joinstyle = 'bevel'
elif symstyle == 2:
self._path = Path.unit_regular_asterisk(numsides)
self._filled = False
self._joinstyle = 'bevel'
elif symstyle == 3:
self._path = Path.unit_circle()
self._transform = Affine2D().scale(0.5).rotate_deg(rotation)
else:
verts = np.asarray(marker[0])
path = Path(verts)
self._set_custom_marker(path)
def _set_mathtext_path(self):
"""
Draws mathtext markers '$...$' using TextPath object.
Submitted by tcb
"""
from matplotlib.text import TextPath
from matplotlib.font_manager import FontProperties
# again, the properties could be initialised just once outside
# this function
# Font size is irrelevant here, it will be rescaled based on
# the drawn size later
props = FontProperties(size=1.0)
text = TextPath(xy=(0, 0), s=self.get_marker(), fontproperties=props,
usetex=rcParams['text.usetex'])
if len(text.vertices) == 0:
return
xmin, ymin = text.vertices.min(axis=0)
xmax, ymax = text.vertices.max(axis=0)
width = xmax - xmin
height = ymax - ymin
max_dim = max(width, height)
self._transform = Affine2D() \
.translate(-xmin + 0.5 * -width, -ymin + 0.5 * -height) \
.scale(1.0 / max_dim)
self._path = text
self._snap = False
def _half_fill(self):
fs = self.get_fillstyle()
result = fs in self._half_fillstyles
return result
def _set_circle(self, reduction=1.0):
self._transform = Affine2D().scale(0.5 * reduction)
self._snap_threshold = 6.0
fs = self.get_fillstyle()
if not self._half_fill():
self._path = Path.unit_circle()
else:
# build a right-half circle
if fs == 'bottom':
rotate = 270.
elif fs == 'top':
rotate = 90.
elif fs == 'left':
rotate = 180.
else:
rotate = 0.
self._path = self._alt_path = Path.unit_circle_righthalf()
self._transform.rotate_deg(rotate)
self._alt_transform = self._transform.frozen().rotate_deg(180.)
def _set_pixel(self):
self._path = Path.unit_rectangle()
# Ideally, you'd want -0.5, -0.5 here, but then the snapping
# algorithm in the Agg backend will round this to a 2x2
# rectangle from (-1, -1) to (1, 1). By offsetting it
# slightly, we can force it to be (0, 0) to (1, 1), which both
# makes it only be a single pixel and places it correctly
# aligned to 1-width stroking (i.e. the ticks). This hack is
# the best of a number of bad alternatives, mainly because the
# backends are not aware of what marker is actually being used
# beyond just its path data.
self._transform = Affine2D().translate(-0.49999, -0.49999)
self._snap_threshold = None
def _set_point(self):
self._set_circle(reduction=self._point_size_reduction)
_triangle_path = Path(
[[0.0, 1.0], [-1.0, -1.0], [1.0, -1.0], [0.0, 1.0]],
[Path.MOVETO, Path.LINETO, Path.LINETO, Path.CLOSEPOLY])
# Going down halfway looks to small. Golden ratio is too far.
_triangle_path_u = Path(
[[0.0, 1.0], [-3 / 5., -1 / 5.], [3 / 5., -1 / 5.], [0.0, 1.0]],
[Path.MOVETO, Path.LINETO, Path.LINETO, Path.CLOSEPOLY])
_triangle_path_d = Path(
[[-3 / 5., -1 / 5.], [3 / 5., -1 / 5.], [1.0, -1.0], [-1.0, -1.0],
[-3 / 5., -1 / 5.]],
[Path.MOVETO, Path.LINETO, Path.LINETO, Path.LINETO, Path.CLOSEPOLY])
_triangle_path_l = Path(
[[0.0, 1.0], [0.0, -1.0], [-1.0, -1.0], [0.0, 1.0]],
[Path.MOVETO, Path.LINETO, Path.LINETO, Path.CLOSEPOLY])
_triangle_path_r = Path(
[[0.0, 1.0], [0.0, -1.0], [1.0, -1.0], [0.0, 1.0]],
[Path.MOVETO, Path.LINETO, Path.LINETO, Path.CLOSEPOLY])
def _set_triangle(self, rot, skip):
self._transform = Affine2D().scale(0.5, 0.5).rotate_deg(rot)
self._snap_threshold = 5.0
fs = self.get_fillstyle()
if not self._half_fill():
self._path = self._triangle_path
else:
mpaths = [self._triangle_path_u,
self._triangle_path_l,
self._triangle_path_d,
self._triangle_path_r]
if fs == 'top':
self._path = mpaths[(0 + skip) % 4]
self._alt_path = mpaths[(2 + skip) % 4]
elif fs == 'bottom':
self._path = mpaths[(2 + skip) % 4]
self._alt_path = mpaths[(0 + skip) % 4]
elif fs == 'left':
self._path = mpaths[(1 + skip) % 4]
self._alt_path = mpaths[(3 + skip) % 4]
else:
self._path = mpaths[(3 + skip) % 4]
self._alt_path = mpaths[(1 + skip) % 4]
self._alt_transform = self._transform
self._joinstyle = 'miter'
def _set_triangle_up(self):
return self._set_triangle(0.0, 0)
def _set_triangle_down(self):
return self._set_triangle(180.0, 2)
def _set_triangle_left(self):
return self._set_triangle(90.0, 3)
def _set_triangle_right(self):
return self._set_triangle(270.0, 1)
def _set_square(self):
self._transform = Affine2D().translate(-0.5, -0.5)
self._snap_threshold = 2.0
fs = self.get_fillstyle()
if not self._half_fill():
self._path = Path.unit_rectangle()
else:
# build a bottom filled square out of two rectangles, one
# filled. Use the rotation to support left, right, bottom
# or top
if fs == 'bottom':
rotate = 0.
elif fs == 'top':
rotate = 180.
elif fs == 'left':
rotate = 270.
else:
rotate = 90.
self._path = Path([[0.0, 0.0], [1.0, 0.0], [1.0, 0.5],
[0.0, 0.5], [0.0, 0.0]])
self._alt_path = Path([[0.0, 0.5], [1.0, 0.5], [1.0, 1.0],
[0.0, 1.0], [0.0, 0.5]])
self._transform.rotate_deg(rotate)
self._alt_transform = self._transform
self._joinstyle = 'miter'
def _set_diamond(self):
self._transform = Affine2D().translate(-0.5, -0.5).rotate_deg(45)
self._snap_threshold = 5.0
fs = self.get_fillstyle()
if not self._half_fill():
self._path = Path.unit_rectangle()
else:
self._path = Path([[0.0, 0.0], [1.0, 0.0], [1.0, 1.0], [0.0, 0.0]])
self._alt_path = Path([[0.0, 0.0], [0.0, 1.0],
[1.0, 1.0], [0.0, 0.0]])
if fs == 'bottom':
rotate = 270.
elif fs == 'top':
rotate = 90.
elif fs == 'left':
rotate = 180.
else:
rotate = 0.
self._transform.rotate_deg(rotate)
self._alt_transform = self._transform
self._joinstyle = 'miter'
def _set_thin_diamond(self):
self._set_diamond()
self._transform.scale(0.6, 1.0)
def _set_pentagon(self):
self._transform = Affine2D().scale(0.5)
self._snap_threshold = 5.0
polypath = Path.unit_regular_polygon(5)
fs = self.get_fillstyle()
if not self._half_fill():
self._path = polypath
else:
verts = polypath.vertices
y = (1 + np.sqrt(5)) / 4.
top = Path([verts[0], verts[1], verts[4], verts[0]])
bottom = Path([verts[1], verts[2], verts[3], verts[4], verts[1]])
left = Path([verts[0], verts[1], verts[2], [0, -y], verts[0]])
right = Path([verts[0], verts[4], verts[3], [0, -y], verts[0]])
if fs == 'top':
mpath, mpath_alt = top, bottom
elif fs == 'bottom':
mpath, mpath_alt = bottom, top
elif fs == 'left':
mpath, mpath_alt = left, right
else:
mpath, mpath_alt = right, left
self._path = mpath
self._alt_path = mpath_alt
self._alt_transform = self._transform
self._joinstyle = 'miter'
def _set_star(self):
self._transform = Affine2D().scale(0.5)
self._snap_threshold = 5.0
fs = self.get_fillstyle()
polypath = Path.unit_regular_star(5, innerCircle=0.381966)
if not self._half_fill():
self._path = polypath
else:
verts = polypath.vertices
top = Path(np.vstack((verts[0:4, :], verts[7:10, :], verts[0])))
bottom = Path(np.vstack((verts[3:8, :], verts[3])))
left = Path(np.vstack((verts[0:6, :], verts[0])))
right = Path(np.vstack((verts[0], verts[5:10, :], verts[0])))
if fs == 'top':
mpath, mpath_alt = top, bottom
elif fs == 'bottom':
mpath, mpath_alt = bottom, top
elif fs == 'left':
mpath, mpath_alt = left, right
else:
mpath, mpath_alt = right, left
self._path = mpath
self._alt_path = mpath_alt
self._alt_transform = self._transform
self._joinstyle = 'bevel'
def _set_hexagon1(self):
self._transform = Affine2D().scale(0.5)
self._snap_threshold = None
fs = self.get_fillstyle()
polypath = Path.unit_regular_polygon(6)
if not self._half_fill():
self._path = polypath
else:
verts = polypath.vertices
# not drawing inside lines
x = np.abs(np.cos(5 * np.pi / 6.))
top = Path(np.vstack(([-x, 0], verts[(1, 0, 5), :], [x, 0])))
bottom = Path(np.vstack(([-x, 0], verts[2:5, :], [x, 0])))
left = Path(verts[(0, 1, 2, 3), :])
right = Path(verts[(0, 5, 4, 3), :])
if fs == 'top':
mpath, mpath_alt = top, bottom
elif fs == 'bottom':
mpath, mpath_alt = bottom, top
elif fs == 'left':
mpath, mpath_alt = left, right
else:
mpath, mpath_alt = right, left
self._path = mpath
self._alt_path = mpath_alt
self._alt_transform = self._transform
self._joinstyle = 'miter'
def _set_hexagon2(self):
self._transform = Affine2D().scale(0.5).rotate_deg(30)
self._snap_threshold = None
fs = self.get_fillstyle()
polypath = Path.unit_regular_polygon(6)
if not self._half_fill():
self._path = polypath
else:
verts = polypath.vertices
# not drawing inside lines
x, y = np.sqrt(3) / 4, 3 / 4.
top = Path(verts[(1, 0, 5, 4, 1), :])
bottom = Path(verts[(1, 2, 3, 4), :])
left = Path(np.vstack(([x, y], verts[(0, 1, 2), :],
[-x, -y], [x, y])))
right = Path(np.vstack(([x, y], verts[(5, 4, 3), :], [-x, -y])))
if fs == 'top':
mpath, mpath_alt = top, bottom
elif fs == 'bottom':
mpath, mpath_alt = bottom, top
elif fs == 'left':
mpath, mpath_alt = left, right
else:
mpath, mpath_alt = right, left
self._path = mpath
self._alt_path = mpath_alt
self._alt_transform = self._transform
self._joinstyle = 'miter'
def _set_octagon(self):
self._transform = Affine2D().scale(0.5)
self._snap_threshold = 5.0
fs = self.get_fillstyle()
polypath = Path.unit_regular_polygon(8)
if not self._half_fill():
self._transform.rotate_deg(22.5)
self._path = polypath
else:
x = np.sqrt(2.) / 4.
half = Path([[0, -1], [0, 1], [-x, 1], [-1, x],
[-1, -x], [-x, -1], [0, -1]])
if fs == 'bottom':
rotate = 90.
elif fs == 'top':
rotate = 270.
elif fs == 'right':
rotate = 180.
else:
rotate = 0.
self._transform.rotate_deg(rotate)
self._path = self._alt_path = half
self._alt_transform = self._transform.frozen().rotate_deg(180.0)
self._joinstyle = 'miter'
_line_marker_path = Path([[0.0, -1.0], [0.0, 1.0]])
def _set_vline(self):
self._transform = Affine2D().scale(0.5)
self._snap_threshold = 1.0
self._filled = False
self._path = self._line_marker_path
def _set_hline(self):
self._transform = Affine2D().scale(0.5).rotate_deg(90)
self._snap_threshold = 1.0
self._filled = False
self._path = self._line_marker_path
_tickhoriz_path = Path([[0.0, 0.0], [1.0, 0.0]])
def _set_tickleft(self):
self._transform = Affine2D().scale(-1.0, 1.0)
self._snap_threshold = 1.0
self._filled = False
self._path = self._tickhoriz_path
def _set_tickright(self):
self._transform = Affine2D().scale(1.0, 1.0)
self._snap_threshold = 1.0
self._filled = False
self._path = self._tickhoriz_path
_tickvert_path = Path([[-0.0, 0.0], [-0.0, 1.0]])
def _set_tickup(self):
self._transform = Affine2D().scale(1.0, 1.0)
self._snap_threshold = 1.0
self._filled = False
self._path = self._tickvert_path
def _set_tickdown(self):
self._transform = Affine2D().scale(1.0, -1.0)
self._snap_threshold = 1.0
self._filled = False
self._path = self._tickvert_path
_plus_path = Path([[-1.0, 0.0], [1.0, 0.0],
[0.0, -1.0], [0.0, 1.0]],
[Path.MOVETO, Path.LINETO,
Path.MOVETO, Path.LINETO])
def _set_plus(self):
self._transform = Affine2D().scale(0.5)
self._snap_threshold = 1.0
self._filled = False
self._path = self._plus_path
_tri_path = Path([[0.0, 0.0], [0.0, -1.0],
[0.0, 0.0], [0.8, 0.5],
[0.0, 0.0], [-0.8, 0.5]],
[Path.MOVETO, Path.LINETO,
Path.MOVETO, Path.LINETO,
Path.MOVETO, Path.LINETO])
def _set_tri_down(self):
self._transform = Affine2D().scale(0.5)
self._snap_threshold = 5.0
self._filled = False
self._path = self._tri_path
def _set_tri_up(self):
self._transform = Affine2D().scale(0.5).rotate_deg(90)
self._snap_threshold = 5.0
self._filled = False
self._path = self._tri_path
def _set_tri_left(self):
self._transform = Affine2D().scale(0.5).rotate_deg(270)
self._snap_threshold = 5.0
self._filled = False
self._path = self._tri_path
def _set_tri_right(self):
self._transform = Affine2D().scale(0.5).rotate_deg(180)
self._snap_threshold = 5.0
self._filled = False
self._path = self._tri_path
_caret_path = Path([[-1.0, 1.5], [0.0, 0.0], [1.0, 1.5]])
def _set_caretdown(self):
self._transform = Affine2D().scale(0.5)
self._snap_threshold = 3.0
self._filled = False
self._path = self._caret_path
self._joinstyle = 'miter'
def _set_caretup(self):
self._transform = Affine2D().scale(0.5).rotate_deg(180)
self._snap_threshold = 3.0
self._filled = False
self._path = self._caret_path
self._joinstyle = 'miter'
def _set_caretleft(self):
self._transform = Affine2D().scale(0.5).rotate_deg(270)
self._snap_threshold = 3.0
self._filled = False
self._path = self._caret_path
self._joinstyle = 'miter'
def _set_caretright(self):
self._transform = Affine2D().scale(0.5).rotate_deg(90)
self._snap_threshold = 3.0
self._filled = False
self._path = self._caret_path
self._joinstyle = 'miter'
_x_path = Path([[-1.0, -1.0], [1.0, 1.0],
[-1.0, 1.0], [1.0, -1.0]],
[Path.MOVETO, Path.LINETO,
Path.MOVETO, Path.LINETO])
def _set_x(self):
self._transform = Affine2D().scale(0.5)
self._snap_threshold = 3.0
self._filled = False
self._path = self._x_path
| {
"content_hash": "7f544a17708222dc7ec66896b6207e90",
"timestamp": "",
"source": "github",
"line_count": 775,
"max_line_length": 79,
"avg_line_length": 33.44129032258064,
"alnum_prop": 0.48493266967627424,
"repo_name": "Solid-Mechanics/matplotlib-4-abaqus",
"id": "e1b5c9174f9839c74df506b13a6399d436cc0d1a",
"size": "25917",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "matplotlib/markers.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "5320"
},
{
"name": "HTML",
"bytes": "4474"
},
{
"name": "JavaScript",
"bytes": "13718"
},
{
"name": "Python",
"bytes": "4498306"
}
],
"symlink_target": ""
} |
import unittest
from Tests.UnitTests._BasePlot_UnitTest import _BasePlotTestCase
from Tests.UnitTests.DatasetTools_UnitTest import DatasetsToolsTestCase
from Tests.UnitTests.ModelUtils_UnitTest import ModelUtilsTestCase
from Tests.UnitTests.DataFrameManipulation_UnitTest import DataFrameManipulationTestCase
def test_suite():
"""run all unittests at once"""
suite = unittest.TestSuite()
result = unittest.TestResult()
suite.addTest(unittest.makeSuite(_BasePlotTestCase))
suite.addTest(unittest.makeSuite(DatasetsToolsTestCase))
suite.addTest(unittest.makeSuite(ModelUtilsTestCase))
suite.addTest(unittest.makeSuite(DataFrameManipulationTestCase))
runner = unittest.TextTestRunner()
print(runner.run(suite))
return suite
if __name__ == '__main__':
test_suite()
| {
"content_hash": "9a7f22ceb662b1257d2d7bd701bda1d5",
"timestamp": "",
"source": "github",
"line_count": 23,
"max_line_length": 88,
"avg_line_length": 35.21739130434783,
"alnum_prop": 0.7814814814814814,
"repo_name": "sagivba/MachineLearningUtils",
"id": "23e05a85dac0054f2e0c0f5e0306af34ee59c2a9",
"size": "810",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Tests/UnitTests/__testsuite.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "64886"
}
],
"symlink_target": ""
} |
import scrapy
from urllib import unquote
from urlparse import urljoin
from dspbooks.items import BookItem
class DSPBooksSpider(scrapy.Spider):
name = 'dspbooks'
base_url = 'http://serv.yanchick.org/Books/dsp_books/'
start_urls = ['http://serv.yanchick.org/Books/dsp_books/']
def parse(self, response):
quoted_links = response.xpath('//ul/li/a/@href').extract()[1:]
rel_links = [urljoin(self.base_url, str(unquote(x)))
for x in quoted_links]
for link in rel_links:
if link.endswith('/'):
yield scrapy.Request(url=link, callback=self.parse)
else:
yield BookItem(file_urls=[link,])
| {
"content_hash": "fe25b57a2c07c047e530a7d2b6386af6",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 70,
"avg_line_length": 36.526315789473685,
"alnum_prop": 0.6210374639769453,
"repo_name": "gpalsingh/dspbooksspider",
"id": "dc80176e18698aaedf5f223b76a884d96575a453",
"size": "721",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "dspbooks/spiders/booksspider.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "7517"
}
],
"symlink_target": ""
} |
import os
import struct
from unittest import TestCase
import numpy as np
import torch
os.environ['KMP_DUPLICATE_LIB_OK'] = 'True'
import tensorflow as tf
from pose_format.utils.reader import BufferReader, ConstStructs
class TestBufferReader(TestCase):
def test_bytes_left(self):
reader = BufferReader(bytes(range(6)))
reader.unpack_f("f")
bytes_left = reader.bytes_left()
self.assertEqual(bytes_left, 2)
def test_advance(self):
reader = BufferReader(bytes())
reader.advance(ConstStructs.float, 10)
self.assertEqual(reader.read_offset, 40)
def test_unpack(self):
buffer = struct.pack("<f", 5.5)
reader = BufferReader(buffer)
unpacked_f = reader.unpack(ConstStructs.float)
self.assertEqual(unpacked_f, 5.5)
def test_unpack_f(self):
buffer = struct.pack("<fh", 5.5, 3)
reader = BufferReader(buffer)
unpacked_f, unpacked_short = reader.unpack_f("fh")
self.assertEqual(unpacked_f, 5.5)
self.assertEqual(unpacked_short, 3)
def test_unpack_str(self):
s = "hello"
buffer = struct.pack("<H%ds" % len(s), len(s), bytes(s, 'utf8'))
reader = BufferReader(buffer)
unpacked_s = reader.unpack_str()
self.assertEqual(unpacked_s, s)
def test_unpack_numpy(self):
buffer = struct.pack("<ffff", 1., 2.5, 3.5, 4.5)
reader = BufferReader(buffer)
arr = reader.unpack_numpy(ConstStructs.float, (2, 2))
res = np.array([[1., 2.5], [3.5, 4.5]])
self.assertTrue(np.all(arr == res), msg="Numpy unpacked array is not equal to expected array")
def test_unpack_numpy_writeable(self):
buffer = struct.pack("<ffff", 1., 2.5, 3.5, 4.5)
reader = BufferReader(buffer)
arr = reader.unpack_numpy(ConstStructs.float, (2, 2))
# if array is read-only, this will raise a ValueError
arr -= 0.1
def test_unpack_torch(self):
buffer = struct.pack("<ffff", 1., 2.5, 3.5, 4.5)
reader = BufferReader(buffer)
arr = reader.unpack_torch(ConstStructs.float, (2, 2))
res = torch.tensor([[1., 2.5], [3.5, 4.5]])
self.assertTrue(torch.all(arr == res), msg="Torch unpacked array is not equal to expected array")
def test_unpack_tensorflow(self):
buffer = struct.pack("<ffff", 1., 2.5, 3.5, 4.5)
reader = BufferReader(buffer)
arr = reader.unpack_tensorflow(ConstStructs.float, (2, 2))
res = tf.constant([[1., 2.5], [3.5, 4.5]])
self.assertTrue(tf.reduce_all(tf.equal(arr, res)),
msg="Tensorflow unpacked array is not equal to expected array")
| {
"content_hash": "242f13ad7b961e701a1e84d44b203803",
"timestamp": "",
"source": "github",
"line_count": 81,
"max_line_length": 105,
"avg_line_length": 33.24691358024691,
"alnum_prop": 0.6104715930189379,
"repo_name": "AmitMY/pose-format",
"id": "de6abfcbabe60833ce81fab14915449ebd39e174",
"size": "2693",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pose_format/utils/reader_test.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "294"
},
{
"name": "HTML",
"bytes": "3186"
},
{
"name": "Python",
"bytes": "167290"
},
{
"name": "Starlark",
"bytes": "10118"
},
{
"name": "TypeScript",
"bytes": "22828"
}
],
"symlink_target": ""
} |
import msgpack
try:
import ujson as json
except ImportError:
import json
class JSONSerializer:
@staticmethod
def pack(data):
return json.dumps(data).encode()
@staticmethod
def unpack(data):
decoded = data.decode() if isinstance(data, bytes) else data
return json.loads(decoded)
class MessagePackSerializer:
@staticmethod
def pack(data):
return msgpack.packb(data, use_bin_type=True)
@staticmethod
def unpack(data):
return msgpack.unpackb(data, use_list=True, encoding='utf-8')
| {
"content_hash": "d966cbcae2f624bced91c5b7c1939535",
"timestamp": "",
"source": "github",
"line_count": 27,
"max_line_length": 69,
"avg_line_length": 20.88888888888889,
"alnum_prop": 0.6684397163120568,
"repo_name": "zhebrak/raftos",
"id": "57453759bcd42ce1e3eaea6f55d0dbbbb2be0cde",
"size": "564",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "raftos/serializers.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "40021"
}
],
"symlink_target": ""
} |
def get_number_of_app_ports(app):
"""
Get the number of ports for the given app JSON. This roughly follows the
logic in marathon-lb for finding app IPs/ports, although we are only
interested in the quantity of ports an app should have and don't consider
the specific IPs/ports of individual tasks:
https://github.com/mesosphere/marathon-lb/blob/v1.10.3/utils.py#L393-L415
:param app: The app JSON from the Marathon API.
:return: The number of ports for the app.
"""
mode = _get_networking_mode(app)
ports_list = None
if mode == 'host':
ports_list = _get_port_definitions(app)
elif mode == 'container/bridge':
ports_list = _get_port_definitions(app)
if ports_list is None:
ports_list = _get_container_port_mappings(app)
elif mode == 'container':
ports_list = _get_ip_address_discovery_ports(app)
# Marathon 1.5+: the ipAddress field is missing -> ports_list is None
# Marathon <1.5: the ipAddress field can be present, but ports_list can
# still be empty while the container port mapping is not :-/
if not ports_list:
ports_list = _get_container_port_mappings(app)
else:
raise RuntimeError(
"Unknown Marathon networking mode '{}'".format(mode))
return len(ports_list)
def _get_networking_mode(app):
"""
Get the Marathon networking mode for the app.
"""
# Marathon 1.5+: there is a `networks` field
networks = app.get('networks')
if networks:
# Modes cannot be mixed, so assigning the last mode is fine
return networks[-1].get('mode', 'container')
# Older Marathon: determine equivalent network mode
container = app.get('container')
if container is not None and 'docker' in container:
docker_network = container['docker'].get('network')
if docker_network == 'USER':
return 'container'
elif docker_network == 'BRIDGE':
return 'container/bridge'
return 'container' if _is_legacy_ip_per_task(app) else 'host'
def _get_container_port_mappings(app):
"""
Get the ``portMappings`` field for the app container.
"""
container = app['container']
# Marathon 1.5+: container.portMappings field
port_mappings = container.get('portMappings')
# Older Marathon: container.docker.portMappings field
if port_mappings is None and 'docker' in container:
port_mappings = container['docker'].get('portMappings')
return port_mappings
def _get_port_definitions(app):
"""
Get the ``portDefinitions`` field for the app if present.
"""
if 'portDefinitions' in app:
return app['portDefinitions']
# In the worst case try use the old `ports` array
# Only useful on very old Marathons
if 'ports' in app:
return app['ports']
return None
def _get_ip_address_discovery_ports(app):
"""
Get the ports from the ``ipAddress`` field for the app if present.
"""
if not _is_legacy_ip_per_task(app):
return None
return app['ipAddress']['discovery']['ports']
def _is_legacy_ip_per_task(app):
"""
Return whether the application is using IP-per-task on Marathon < 1.5.
:param app: The application to check.
:return: True if using IP per task, False otherwise.
"""
return app.get('ipAddress') is not None
| {
"content_hash": "d571d2c7ddd11fcb01da9ab056a61efa",
"timestamp": "",
"source": "github",
"line_count": 102,
"max_line_length": 79,
"avg_line_length": 33.294117647058826,
"alnum_prop": 0.6454652532391049,
"repo_name": "praekeltfoundation/certbot",
"id": "36b257c8245b66c894f350eb2eb7e24786b0ff7b",
"size": "3396",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "marathon_acme/marathon_util.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Dockerfile",
"bytes": "598"
},
{
"name": "Python",
"bytes": "271690"
},
{
"name": "Shell",
"bytes": "561"
}
],
"symlink_target": ""
} |
import pytest
import numpy as np
from datetime import date, timedelta, time, datetime
import dateutil
import pandas as pd
import pandas.util.testing as tm
from pandas.compat import lrange
from pandas.compat.numpy import np_datetime64_compat
from pandas import (DatetimeIndex, Index, date_range, DataFrame,
Timestamp, offsets)
from pandas.util.testing import assert_almost_equal
randn = np.random.randn
class TestDatetimeIndexLikeTimestamp(object):
# Tests for DatetimeIndex behaving like a vectorized Timestamp
def test_dti_date_out_of_range(self):
# see gh-1475
pytest.raises(ValueError, DatetimeIndex, ['1400-01-01'])
pytest.raises(ValueError, DatetimeIndex, [datetime(1400, 1, 1)])
def test_timestamp_fields(self):
# extra fields from DatetimeIndex like quarter and week
idx = tm.makeDateIndex(100)
fields = ['dayofweek', 'dayofyear', 'week', 'weekofyear', 'quarter',
'days_in_month', 'is_month_start', 'is_month_end',
'is_quarter_start', 'is_quarter_end', 'is_year_start',
'is_year_end', 'weekday_name']
for f in fields:
expected = getattr(idx, f)[-1]
result = getattr(Timestamp(idx[-1]), f)
assert result == expected
assert idx.freq == Timestamp(idx[-1], idx.freq).freq
assert idx.freqstr == Timestamp(idx[-1], idx.freq).freqstr
class TestDatetimeIndex(object):
def test_get_loc(self):
idx = pd.date_range('2000-01-01', periods=3)
for method in [None, 'pad', 'backfill', 'nearest']:
assert idx.get_loc(idx[1], method) == 1
assert idx.get_loc(idx[1].to_pydatetime(), method) == 1
assert idx.get_loc(str(idx[1]), method) == 1
if method is not None:
assert idx.get_loc(idx[1], method,
tolerance=pd.Timedelta('0 days')) == 1
assert idx.get_loc('2000-01-01', method='nearest') == 0
assert idx.get_loc('2000-01-01T12', method='nearest') == 1
assert idx.get_loc('2000-01-01T12', method='nearest',
tolerance='1 day') == 1
assert idx.get_loc('2000-01-01T12', method='nearest',
tolerance=pd.Timedelta('1D')) == 1
assert idx.get_loc('2000-01-01T12', method='nearest',
tolerance=np.timedelta64(1, 'D')) == 1
assert idx.get_loc('2000-01-01T12', method='nearest',
tolerance=timedelta(1)) == 1
with tm.assert_raises_regex(ValueError,
'unit abbreviation w/o a number'):
idx.get_loc('2000-01-01T12', method='nearest', tolerance='foo')
with pytest.raises(KeyError):
idx.get_loc('2000-01-01T03', method='nearest', tolerance='2 hours')
with pytest.raises(
ValueError,
match='tolerance size must match target index size'):
idx.get_loc('2000-01-01', method='nearest',
tolerance=[pd.Timedelta('1day').to_timedelta64(),
pd.Timedelta('1day').to_timedelta64()])
assert idx.get_loc('2000', method='nearest') == slice(0, 3)
assert idx.get_loc('2000-01', method='nearest') == slice(0, 3)
assert idx.get_loc('1999', method='nearest') == 0
assert idx.get_loc('2001', method='nearest') == 2
with pytest.raises(KeyError):
idx.get_loc('1999', method='pad')
with pytest.raises(KeyError):
idx.get_loc('2001', method='backfill')
with pytest.raises(KeyError):
idx.get_loc('foobar')
with pytest.raises(TypeError):
idx.get_loc(slice(2))
idx = pd.to_datetime(['2000-01-01', '2000-01-04'])
assert idx.get_loc('2000-01-02', method='nearest') == 0
assert idx.get_loc('2000-01-03', method='nearest') == 1
assert idx.get_loc('2000-01', method='nearest') == slice(0, 2)
# time indexing
idx = pd.date_range('2000-01-01', periods=24, freq='H')
tm.assert_numpy_array_equal(idx.get_loc(time(12)),
np.array([12]), check_dtype=False)
tm.assert_numpy_array_equal(idx.get_loc(time(12, 30)),
np.array([]), check_dtype=False)
with pytest.raises(NotImplementedError):
idx.get_loc(time(12, 30), method='pad')
def test_get_indexer(self):
idx = pd.date_range('2000-01-01', periods=3)
exp = np.array([0, 1, 2], dtype=np.intp)
tm.assert_numpy_array_equal(idx.get_indexer(idx), exp)
target = idx[0] + pd.to_timedelta(['-1 hour', '12 hours',
'1 day 1 hour'])
tm.assert_numpy_array_equal(idx.get_indexer(target, 'pad'),
np.array([-1, 0, 1], dtype=np.intp))
tm.assert_numpy_array_equal(idx.get_indexer(target, 'backfill'),
np.array([0, 1, 2], dtype=np.intp))
tm.assert_numpy_array_equal(idx.get_indexer(target, 'nearest'),
np.array([0, 1, 1], dtype=np.intp))
tm.assert_numpy_array_equal(
idx.get_indexer(target, 'nearest',
tolerance=pd.Timedelta('1 hour')),
np.array([0, -1, 1], dtype=np.intp))
tol_raw = [pd.Timedelta('1 hour'),
pd.Timedelta('1 hour'),
pd.Timedelta('1 hour').to_timedelta64(), ]
tm.assert_numpy_array_equal(
idx.get_indexer(target, 'nearest',
tolerance=[np.timedelta64(x) for x in tol_raw]),
np.array([0, -1, 1], dtype=np.intp))
tol_bad = [pd.Timedelta('2 hour').to_timedelta64(),
pd.Timedelta('1 hour').to_timedelta64(),
'foo', ]
with pytest.raises(
ValueError, match='abbreviation w/o a number'):
idx.get_indexer(target, 'nearest', tolerance=tol_bad)
with pytest.raises(ValueError):
idx.get_indexer(idx[[0]], method='nearest', tolerance='foo')
def test_reasonable_keyerror(self):
# GH #1062
index = DatetimeIndex(['1/3/2000'])
try:
index.get_loc('1/1/2000')
except KeyError as e:
assert '2000' in str(e)
def test_roundtrip_pickle_with_tz(self):
# GH 8367
# round-trip of timezone
index = date_range('20130101', periods=3, tz='US/Eastern', name='foo')
unpickled = tm.round_trip_pickle(index)
tm.assert_index_equal(index, unpickled)
def test_reindex_preserves_tz_if_target_is_empty_list_or_array(self):
# GH7774
index = date_range('20130101', periods=3, tz='US/Eastern')
assert str(index.reindex([])[0].tz) == 'US/Eastern'
assert str(index.reindex(np.array([]))[0].tz) == 'US/Eastern'
def test_time_loc(self): # GH8667
from datetime import time
from pandas._libs.index import _SIZE_CUTOFF
ns = _SIZE_CUTOFF + np.array([-100, 100], dtype=np.int64)
key = time(15, 11, 30)
start = key.hour * 3600 + key.minute * 60 + key.second
step = 24 * 3600
for n in ns:
idx = pd.date_range('2014-11-26', periods=n, freq='S')
ts = pd.Series(np.random.randn(n), index=idx)
i = np.arange(start, n, step)
tm.assert_numpy_array_equal(ts.index.get_loc(key), i,
check_dtype=False)
tm.assert_series_equal(ts[key], ts.iloc[i])
left, right = ts.copy(), ts.copy()
left[key] *= -10
right.iloc[i] *= -10
tm.assert_series_equal(left, right)
def test_time_overflow_for_32bit_machines(self):
# GH8943. On some machines NumPy defaults to np.int32 (for example,
# 32-bit Linux machines). In the function _generate_regular_range
# found in tseries/index.py, `periods` gets multiplied by `strides`
# (which has value 1e9) and since the max value for np.int32 is ~2e9,
# and since those machines won't promote np.int32 to np.int64, we get
# overflow.
periods = np.int_(1000)
idx1 = pd.date_range(start='2000', periods=periods, freq='S')
assert len(idx1) == periods
idx2 = pd.date_range(end='2000', periods=periods, freq='S')
assert len(idx2) == periods
def test_nat(self):
assert DatetimeIndex([np.nan])[0] is pd.NaT
def test_week_of_month_frequency(self):
# GH 5348: "ValueError: Could not evaluate WOM-1SUN" shouldn't raise
d1 = date(2002, 9, 1)
d2 = date(2013, 10, 27)
d3 = date(2012, 9, 30)
idx1 = DatetimeIndex([d1, d2])
idx2 = DatetimeIndex([d3])
result_append = idx1.append(idx2)
expected = DatetimeIndex([d1, d2, d3])
tm.assert_index_equal(result_append, expected)
result_union = idx1.union(idx2)
expected = DatetimeIndex([d1, d3, d2])
tm.assert_index_equal(result_union, expected)
# GH 5115
result = date_range("2013-1-1", periods=4, freq='WOM-1SAT')
dates = ['2013-01-05', '2013-02-02', '2013-03-02', '2013-04-06']
expected = DatetimeIndex(dates, freq='WOM-1SAT')
tm.assert_index_equal(result, expected)
def test_hash_error(self):
index = date_range('20010101', periods=10)
with tm.assert_raises_regex(TypeError, "unhashable type: %r" %
type(index).__name__):
hash(index)
def test_stringified_slice_with_tz(self):
# GH2658
import datetime
start = datetime.datetime.now()
idx = DatetimeIndex(start=start, freq="1d", periods=10)
df = DataFrame(lrange(10), index=idx)
df["2013-01-14 23:44:34.437768-05:00":] # no exception here
def test_append_join_nondatetimeindex(self):
rng = date_range('1/1/2000', periods=10)
idx = Index(['a', 'b', 'c', 'd'])
result = rng.append(idx)
assert isinstance(result[0], Timestamp)
# it works
rng.join(idx, how='outer')
def test_comparisons_coverage(self):
rng = date_range('1/1/2000', periods=10)
# raise TypeError for now
pytest.raises(TypeError, rng.__lt__, rng[3].value)
result = rng == list(rng)
exp = rng == rng
tm.assert_numpy_array_equal(result, exp)
def test_comparisons_nat(self):
fidx1 = pd.Index([1.0, np.nan, 3.0, np.nan, 5.0, 7.0])
fidx2 = pd.Index([2.0, 3.0, np.nan, np.nan, 6.0, 7.0])
didx1 = pd.DatetimeIndex(['2014-01-01', pd.NaT, '2014-03-01', pd.NaT,
'2014-05-01', '2014-07-01'])
didx2 = pd.DatetimeIndex(['2014-02-01', '2014-03-01', pd.NaT, pd.NaT,
'2014-06-01', '2014-07-01'])
darr = np.array([np_datetime64_compat('2014-02-01 00:00Z'),
np_datetime64_compat('2014-03-01 00:00Z'),
np_datetime64_compat('nat'), np.datetime64('nat'),
np_datetime64_compat('2014-06-01 00:00Z'),
np_datetime64_compat('2014-07-01 00:00Z')])
cases = [(fidx1, fidx2), (didx1, didx2), (didx1, darr)]
# Check pd.NaT is handles as the same as np.nan
with tm.assert_produces_warning(None):
for idx1, idx2 in cases:
result = idx1 < idx2
expected = np.array([True, False, False, False, True, False])
tm.assert_numpy_array_equal(result, expected)
result = idx2 > idx1
expected = np.array([True, False, False, False, True, False])
tm.assert_numpy_array_equal(result, expected)
result = idx1 <= idx2
expected = np.array([True, False, False, False, True, True])
tm.assert_numpy_array_equal(result, expected)
result = idx2 >= idx1
expected = np.array([True, False, False, False, True, True])
tm.assert_numpy_array_equal(result, expected)
result = idx1 == idx2
expected = np.array([False, False, False, False, False, True])
tm.assert_numpy_array_equal(result, expected)
result = idx1 != idx2
expected = np.array([True, True, True, True, True, False])
tm.assert_numpy_array_equal(result, expected)
with tm.assert_produces_warning(None):
for idx1, val in [(fidx1, np.nan), (didx1, pd.NaT)]:
result = idx1 < val
expected = np.array([False, False, False, False, False, False])
tm.assert_numpy_array_equal(result, expected)
result = idx1 > val
tm.assert_numpy_array_equal(result, expected)
result = idx1 <= val
tm.assert_numpy_array_equal(result, expected)
result = idx1 >= val
tm.assert_numpy_array_equal(result, expected)
result = idx1 == val
tm.assert_numpy_array_equal(result, expected)
result = idx1 != val
expected = np.array([True, True, True, True, True, True])
tm.assert_numpy_array_equal(result, expected)
# Check pd.NaT is handles as the same as np.nan
with tm.assert_produces_warning(None):
for idx1, val in [(fidx1, 3), (didx1, datetime(2014, 3, 1))]:
result = idx1 < val
expected = np.array([True, False, False, False, False, False])
tm.assert_numpy_array_equal(result, expected)
result = idx1 > val
expected = np.array([False, False, False, False, True, True])
tm.assert_numpy_array_equal(result, expected)
result = idx1 <= val
expected = np.array([True, False, True, False, False, False])
tm.assert_numpy_array_equal(result, expected)
result = idx1 >= val
expected = np.array([False, False, True, False, True, True])
tm.assert_numpy_array_equal(result, expected)
result = idx1 == val
expected = np.array([False, False, True, False, False, False])
tm.assert_numpy_array_equal(result, expected)
result = idx1 != val
expected = np.array([True, True, False, True, True, True])
tm.assert_numpy_array_equal(result, expected)
def test_map(self):
rng = date_range('1/1/2000', periods=10)
f = lambda x: x.strftime('%Y%m%d')
result = rng.map(f)
exp = Index([f(x) for x in rng], dtype='<U8')
tm.assert_index_equal(result, exp)
def test_iteration_preserves_tz(self):
# see gh-8890
index = date_range("2012-01-01", periods=3, freq='H', tz='US/Eastern')
for i, ts in enumerate(index):
result = ts
expected = index[i]
assert result == expected
index = date_range("2012-01-01", periods=3, freq='H',
tz=dateutil.tz.tzoffset(None, -28800))
for i, ts in enumerate(index):
result = ts
expected = index[i]
assert result._repr_base == expected._repr_base
assert result == expected
# 9100
index = pd.DatetimeIndex(['2014-12-01 03:32:39.987000-08:00',
'2014-12-01 04:12:34.987000-08:00'])
for i, ts in enumerate(index):
result = ts
expected = index[i]
assert result._repr_base == expected._repr_base
assert result == expected
def test_misc_coverage(self):
rng = date_range('1/1/2000', periods=5)
result = rng.groupby(rng.day)
assert isinstance(list(result.values())[0][0], Timestamp)
idx = DatetimeIndex(['2000-01-03', '2000-01-01', '2000-01-02'])
assert not idx.equals(list(idx))
non_datetime = Index(list('abc'))
assert not idx.equals(list(non_datetime))
def test_string_index_series_name_converted(self):
# #1644
df = DataFrame(np.random.randn(10, 4),
index=date_range('1/1/2000', periods=10))
result = df.loc['1/3/2000']
assert result.name == df.index[2]
result = df.T['1/3/2000']
assert result.name == df.index[2]
def test_get_duplicates(self):
idx = DatetimeIndex(['2000-01-01', '2000-01-02', '2000-01-02',
'2000-01-03', '2000-01-03', '2000-01-04'])
result = idx.get_duplicates()
ex = DatetimeIndex(['2000-01-02', '2000-01-03'])
tm.assert_index_equal(result, ex)
def test_argmin_argmax(self):
idx = DatetimeIndex(['2000-01-04', '2000-01-01', '2000-01-02'])
assert idx.argmin() == 1
assert idx.argmax() == 0
def test_sort_values(self):
idx = DatetimeIndex(['2000-01-04', '2000-01-01', '2000-01-02'])
ordered = idx.sort_values()
assert ordered.is_monotonic
ordered = idx.sort_values(ascending=False)
assert ordered[::-1].is_monotonic
ordered, dexer = idx.sort_values(return_indexer=True)
assert ordered.is_monotonic
tm.assert_numpy_array_equal(dexer, np.array([1, 2, 0], dtype=np.intp))
ordered, dexer = idx.sort_values(return_indexer=True, ascending=False)
assert ordered[::-1].is_monotonic
tm.assert_numpy_array_equal(dexer, np.array([0, 2, 1], dtype=np.intp))
def test_map_bug_1677(self):
index = DatetimeIndex(['2012-04-25 09:30:00.393000'])
f = index.asof
result = index.map(f)
expected = Index([f(index[0])])
tm.assert_index_equal(result, expected)
def test_groupby_function_tuple_1677(self):
df = DataFrame(np.random.rand(100),
index=date_range("1/1/2000", periods=100))
monthly_group = df.groupby(lambda x: (x.year, x.month))
result = monthly_group.mean()
assert isinstance(result.index[0], tuple)
def test_append_numpy_bug_1681(self):
# another datetime64 bug
dr = date_range('2011/1/1', '2012/1/1', freq='W-FRI')
a = DataFrame()
c = DataFrame({'A': 'foo', 'B': dr}, index=dr)
result = a.append(c)
assert (result['B'] == dr).all()
def test_isin(self):
index = tm.makeDateIndex(4)
result = index.isin(index)
assert result.all()
result = index.isin(list(index))
assert result.all()
assert_almost_equal(index.isin([index[2], 5]),
np.array([False, False, True, False]))
def test_time(self):
rng = pd.date_range('1/1/2000', freq='12min', periods=10)
result = pd.Index(rng).time
expected = [t.time() for t in rng]
assert (result == expected).all()
def test_date(self):
rng = pd.date_range('1/1/2000', freq='12H', periods=10)
result = pd.Index(rng).date
expected = [t.date() for t in rng]
assert (result == expected).all()
def test_does_not_convert_mixed_integer(self):
df = tm.makeCustomDataframe(10, 10,
data_gen_f=lambda *args, **kwargs: randn(),
r_idx_type='i', c_idx_type='dt')
cols = df.columns.join(df.index, how='outer')
joined = cols.join(df.columns)
assert cols.dtype == np.dtype('O')
assert cols.dtype == joined.dtype
tm.assert_numpy_array_equal(cols.values, joined.values)
def test_join_self(self):
index = date_range('1/1/2000', periods=10)
kinds = 'outer', 'inner', 'left', 'right'
for kind in kinds:
joined = index.join(index, how=kind)
assert index is joined
def assert_index_parameters(self, index):
assert index.freq == '40960N'
assert index.inferred_freq == '40960N'
def test_ns_index(self):
nsamples = 400
ns = int(1e9 / 24414)
dtstart = np.datetime64('2012-09-20T00:00:00')
dt = dtstart + np.arange(nsamples) * np.timedelta64(ns, 'ns')
freq = ns * offsets.Nano()
index = pd.DatetimeIndex(dt, freq=freq, name='time')
self.assert_index_parameters(index)
new_index = pd.DatetimeIndex(start=index[0], end=index[-1],
freq=index.freq)
self.assert_index_parameters(new_index)
def test_join_with_period_index(self):
df = tm.makeCustomDataframe(
10, 10, data_gen_f=lambda *args: np.random.randint(2),
c_idx_type='p', r_idx_type='dt')
s = df.iloc[:5, 0]
joins = 'left', 'right', 'inner', 'outer'
for join in joins:
with tm.assert_raises_regex(ValueError,
'can only call with other '
'PeriodIndex-ed objects'):
df.columns.join(s.index, how=join)
def test_factorize(self):
idx1 = DatetimeIndex(['2014-01', '2014-01', '2014-02', '2014-02',
'2014-03', '2014-03'])
exp_arr = np.array([0, 0, 1, 1, 2, 2], dtype=np.intp)
exp_idx = DatetimeIndex(['2014-01', '2014-02', '2014-03'])
arr, idx = idx1.factorize()
tm.assert_numpy_array_equal(arr, exp_arr)
tm.assert_index_equal(idx, exp_idx)
arr, idx = idx1.factorize(sort=True)
tm.assert_numpy_array_equal(arr, exp_arr)
tm.assert_index_equal(idx, exp_idx)
# tz must be preserved
idx1 = idx1.tz_localize('Asia/Tokyo')
exp_idx = exp_idx.tz_localize('Asia/Tokyo')
arr, idx = idx1.factorize()
tm.assert_numpy_array_equal(arr, exp_arr)
tm.assert_index_equal(idx, exp_idx)
idx2 = pd.DatetimeIndex(['2014-03', '2014-03', '2014-02', '2014-01',
'2014-03', '2014-01'])
exp_arr = np.array([2, 2, 1, 0, 2, 0], dtype=np.intp)
exp_idx = DatetimeIndex(['2014-01', '2014-02', '2014-03'])
arr, idx = idx2.factorize(sort=True)
tm.assert_numpy_array_equal(arr, exp_arr)
tm.assert_index_equal(idx, exp_idx)
exp_arr = np.array([0, 0, 1, 2, 0, 2], dtype=np.intp)
exp_idx = DatetimeIndex(['2014-03', '2014-02', '2014-01'])
arr, idx = idx2.factorize()
tm.assert_numpy_array_equal(arr, exp_arr)
tm.assert_index_equal(idx, exp_idx)
# freq must be preserved
idx3 = date_range('2000-01', periods=4, freq='M', tz='Asia/Tokyo')
exp_arr = np.array([0, 1, 2, 3], dtype=np.intp)
arr, idx = idx3.factorize()
tm.assert_numpy_array_equal(arr, exp_arr)
tm.assert_index_equal(idx, idx3)
def test_factorize_tz(self):
# GH 13750
for tz in [None, 'UTC', 'US/Eastern', 'Asia/Tokyo']:
base = pd.date_range('2016-11-05', freq='H', periods=100, tz=tz)
idx = base.repeat(5)
exp_arr = np.arange(100, dtype=np.intp).repeat(5)
for obj in [idx, pd.Series(idx)]:
arr, res = obj.factorize()
tm.assert_numpy_array_equal(arr, exp_arr)
tm.assert_index_equal(res, base)
def test_factorize_dst(self):
# GH 13750
idx = pd.date_range('2016-11-06', freq='H', periods=12,
tz='US/Eastern')
for obj in [idx, pd.Series(idx)]:
arr, res = obj.factorize()
tm.assert_numpy_array_equal(arr, np.arange(12, dtype=np.intp))
tm.assert_index_equal(res, idx)
idx = pd.date_range('2016-06-13', freq='H', periods=12,
tz='US/Eastern')
for obj in [idx, pd.Series(idx)]:
arr, res = obj.factorize()
tm.assert_numpy_array_equal(arr, np.arange(12, dtype=np.intp))
tm.assert_index_equal(res, idx)
| {
"content_hash": "483b27eb70a50c3721f30e3fa4c484dd",
"timestamp": "",
"source": "github",
"line_count": 608,
"max_line_length": 79,
"avg_line_length": 39.90296052631579,
"alnum_prop": 0.5459791434813075,
"repo_name": "winklerand/pandas",
"id": "076c3d6f25a896141a5647c6eb7a3fe1716f876c",
"size": "24261",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "pandas/tests/indexes/datetimes/test_datetime.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "4071"
},
{
"name": "C",
"bytes": "493226"
},
{
"name": "C++",
"bytes": "17353"
},
{
"name": "HTML",
"bytes": "551706"
},
{
"name": "Makefile",
"bytes": "907"
},
{
"name": "PowerShell",
"bytes": "2972"
},
{
"name": "Python",
"bytes": "12249109"
},
{
"name": "R",
"bytes": "1177"
},
{
"name": "Shell",
"bytes": "23114"
},
{
"name": "Smarty",
"bytes": "2045"
}
],
"symlink_target": ""
} |
from pyface.ui.wx.clipboard import *
| {
"content_hash": "f36722a135837fce5dbb3114ee5f5a22",
"timestamp": "",
"source": "github",
"line_count": 1,
"max_line_length": 36,
"avg_line_length": 37,
"alnum_prop": 0.7837837837837838,
"repo_name": "enthought/etsproxy",
"id": "7055821b29626ddac512bf89de2eb2a056a01152",
"size": "52",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "enthought/pyface/ui/wx/clipboard.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "363714"
}
],
"symlink_target": ""
} |
import random
import application.models as Models
def generator_user_id():
user = None
while not user:
id = random.randint(100000, 99999999)
user = Models.User.objects.filter(id=id).first()
if not user:
return id
def generator_post_id():
post = None
while not post:
id = random.randint(100000, 99999999)
post = Models.Post.objects.filter(id=id).first()
if not post:
return id
| {
"content_hash": "267d2fee5122a2b3a98ddf1a4b33d5a4",
"timestamp": "",
"source": "github",
"line_count": 21,
"max_line_length": 56,
"avg_line_length": 22.333333333333332,
"alnum_prop": 0.603411513859275,
"repo_name": "luke0922/mdpress",
"id": "216ba1b251f710688e856e95008c127b19428b55",
"size": "509",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "application/utils/generator.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "338714"
},
{
"name": "HTML",
"bytes": "437885"
},
{
"name": "JavaScript",
"bytes": "963919"
},
{
"name": "Nginx",
"bytes": "1767"
},
{
"name": "PHP",
"bytes": "15921"
},
{
"name": "Python",
"bytes": "69804"
},
{
"name": "Shell",
"bytes": "23"
}
],
"symlink_target": ""
} |
from __future__ import absolute_import, print_function, unicode_literals
import os
import subprocess
import time
import unittest
from .util import ShellProcess, ShellComponentTestCaseMixin, get_message
_multiprocess_can_split_ = True
class BoltTest(ShellComponentTestCaseMixin, unittest.TestCase):
component = "dummy_bolt.py"
def test_echo_tuple(self):
msg = get_message()
self.shell_proc.write_message(msg)
res = self.shell_proc.read_message()
# DummyBolt should simply echo any tuple sent in to it
self.assertEqual(res["command"], "emit")
self.assertEqual(msg["tuple"], res["tuple"])
self.shell_proc.write_message([1]) # send fake task id
def test_ack_tuple(self):
msg = get_message(id="ack_me")
self.shell_proc.write_message(msg)
res = self.shell_proc.read_message()
self.assertEqual(res["command"], "emit")
self.assertEqual(msg["tuple"], res["tuple"])
self.shell_proc.write_message([1]) # send fake task id
res = self.shell_proc.read_message()
self.assertEqual(res, {"command": "ack", "id": msg["id"]})
def test_fail_tuple(self):
msg = get_message(id="fail_me")
self.shell_proc.write_message(msg)
res = self.shell_proc.read_message()
self.assertEqual(res, {"command": "fail", "id": msg["id"]})
def test_emit_stream(self):
msg = get_message(id="stream-words")
self.shell_proc.write_message(msg)
res = self.shell_proc.read_message()
self.assertEqual(res["command"], "emit")
self.assertEqual(res["stream"], "words")
self.assertEqual(res["tuple"], msg["tuple"])
self.shell_proc.write_message([1]) # send fake task id
def test_emit_anchoring(self):
msg = get_message(id="anchor|1,2,3")
self.shell_proc.write_message(msg)
res = self.shell_proc.read_message()
self.assertEqual(res["command"], "emit")
self.assertEqual(res["anchors"], ["1", "2", "3"])
self.assertEqual(res["tuple"], msg["tuple"])
self.shell_proc.write_message([1]) # send fake task id
def test_emit_direct_task(self):
msg = get_message(id="direct_task|12")
self.shell_proc.write_message(msg)
res = self.shell_proc.read_message()
self.assertEqual(res["command"], "emit")
self.assertEqual(res["task"], 12)
self.assertEqual(res["tuple"], msg["tuple"])
def test_emit_many(self):
msg = get_message(id="emit_many")
self.shell_proc.write_message(msg)
for i in range(5):
res = self.shell_proc.read_message()
self.assertEqual(res["tuple"], msg["tuple"])
self.shell_proc.write_message([1]) # send fake task id
# TODO: test emit_many for stream, anchoring, direct_task
class BoltExceptionTest(ShellComponentTestCaseMixin, unittest.TestCase):
component = "dummy_bolt.py"
def test_exception(self):
"""Ensure that exceptions raised in the bolt send proper log messages
before exiting. In a separate test case as the process immediately
exits after an exception is raised.
"""
msg = get_message(id="exception")
self.shell_proc.write_message(msg)
res = self.shell_proc.read_message()
self.assertEqual(res["command"], "error")
self.assertIn("Exception: ", res["msg"])
res = self.shell_proc.read_message()
self.assertEqual(res["command"], "sync")
# Ensure exit code of 1 from bolt
time.sleep(0.5)
self.assertEqual(self.proc.poll(), 1)
class BoltAutoAckTest(ShellComponentTestCaseMixin, unittest.TestCase):
component = "dummy_bolt_auto_ack.py"
def test_emit_auto_ack(self):
msg = get_message()
self.shell_proc.write_message(msg)
res = self.shell_proc.read_message()
self.assertEqual(res["command"], "emit")
self.assertEqual(msg["tuple"], res["tuple"])
self.shell_proc.write_message([1]) # send fake task id
res = self.shell_proc.read_message()
self.assertEqual(res["command"], "ack")
self.assertEqual(res["id"], msg["id"])
def test_emit_many_auto_ack(self):
msg = get_message(id="emit_many")
self.shell_proc.write_message(msg)
for i in range(5):
res = self.shell_proc.read_message()
self.assertEqual(res["command"], "emit")
self.assertEqual(res["tuple"], msg["tuple"])
self.shell_proc.write_message([1]) # send fake task id
res = self.shell_proc.read_message()
self.assertEqual(res["command"], "ack")
self.assertEqual(res["id"], msg["id"])
class BoltAutoAnchorTest(ShellComponentTestCaseMixin, unittest.TestCase):
component = "dummy_bolt_auto_anchor.py"
def test_emit_auto_anchor(self):
msg = get_message()
self.shell_proc.write_message(msg)
res = self.shell_proc.read_message()
# DummyBolt should simply echo any tuple sent in to it
self.assertEqual(res["command"], "emit")
self.assertEqual(msg["tuple"], res["tuple"])
self.assertEqual(res["anchors"], [msg["id"]])
self.shell_proc.write_message([1]) # send fake task id
def test_emit_many_auto_anchor(self):
msg = get_message(id="emit_many")
self.shell_proc.write_message(msg)
for i in range(5):
res = self.shell_proc.read_message()
self.assertEqual(res["command"], "emit")
self.assertEqual(msg["tuple"], res["tuple"])
self.assertEqual(res["anchors"], [msg["id"]])
self.shell_proc.write_message([1]) # send fake task id
class BoltAutoFailTest(ShellComponentTestCaseMixin, unittest.TestCase):
component = "dummy_bolt_auto_fail.py"
def test_auto_fail(self):
msg = get_message()
self.shell_proc.write_message(msg)
res = self.shell_proc.read_message()
self.assertEqual(res["command"], "error")
self.assertIn("Exception: ", res["msg"])
res = self.shell_proc.read_message()
self.assertEqual(res["command"], "sync")
res = self.shell_proc.read_message()
self.assertEqual(res["command"], "fail")
self.assertEqual(res["id"], msg["id"])
time.sleep(0.5)
self.assertEqual(self.proc.poll(), 1)
if __name__ == '__main__':
unittest.main()
| {
"content_hash": "e75aa8e6e769efa41bb0b92323edf8e1",
"timestamp": "",
"source": "github",
"line_count": 199,
"max_line_length": 77,
"avg_line_length": 32.472361809045225,
"alnum_prop": 0.6138966264314454,
"repo_name": "scrapinghub/streamparse",
"id": "eba227e9059d9efccdc9b69ca212195eedfa139f",
"size": "6462",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test/ipc/test_bolt.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Clojure",
"bytes": "42723"
},
{
"name": "HTML",
"bytes": "82354"
},
{
"name": "Java",
"bytes": "2190"
},
{
"name": "Python",
"bytes": "1764918"
},
{
"name": "Ruby",
"bytes": "263877"
},
{
"name": "Shell",
"bytes": "12272"
}
],
"symlink_target": ""
} |
from django.test import TestCase
from django.core.urlresolvers import resolve
from lists.views import home_page
from django.http import HttpRequest
from django.template.loader import render_to_string
from lists.models import Item, List
class HomePageTest(TestCase):
def test_root_url_resolves_to_home_page_view(self):
found = resolve('/')
self.assertEqual(found.func, home_page)
def test_home_page_returns_correct_html(self):
request = HttpRequest()
response = home_page(request)
expected_html = render_to_string('home.html')
self.assertEqual(response.content.decode(), expected_html)
class ListAndItemModelTest(TestCase):
def test_saving_and_retrieving_items(self):
list_ = List()
list_.save()
first_item = Item()
first_item.text = 'The first (ever) list item'
first_item.list = list_
first_item.save()
saved_list = List.objects.first()
self.assertEqual(saved_list, list_)
second_item = Item()
second_item.text = 'Item the second'
second_item.list = list_
second_item.save()
saved_items = Item.objects.all()
self.assertEqual(saved_items.count(), 2)
first_saved_item = saved_items[0]
second_saved_item = saved_items[1]
self.assertEqual(first_saved_item.text, 'The first (ever) list item')
self.assertEqual(first_saved_item.list, list_)
self.assertEqual(second_saved_item.text, 'Item the second')
self.assertEqual(second_saved_item.list, list_)
class ListViewTest(TestCase):
def test_uses_list_template(self):
list_ = List.objects.create()
response = self.client.get('/lists/%d/' % (list_.id,))
self.assertTemplateUsed(response, 'list.html')
def test_displays_only_items_for_that_list(self):
correct_list = List.objects.create()
Item.objects.create(text='itemey 1', list=correct_list)
Item.objects.create(text='itemey 2', list=correct_list)
other_list = List.objects.create()
Item.objects.create(text='other list item 1', list=other_list)
Item.objects.create(text='other list item 2', list=other_list)
response = self.client.get('/lists/%d/' % (correct_list.id,))
self.assertContains(response, 'itemey 1')
self.assertContains(response, 'itemey 2')
self.assertNotContains(response, 'other list item 1')
self.assertNotContains(response, 'other list item 2')
def test_passes_correct_list_to_template(self):
other_list = List.objects.create()
correct_list = List.objects.create()
response = self.client.get('/lists/%d/' % (correct_list.id,))
self.assertEqual(response.context['list'], correct_list)
class NewListTest(TestCase):
def test_can_save_a_post_request_to_an_existing_list(self):
other_list = List.objects.create()
correct_list = List.objects.create()
self.client.post(
'/lists/%d/add_item' % (correct_list.id,),
data={'item_text': 'A new item for an existing list'}
)
self.assertEqual(Item.objects.count(), 1)
new_item = Item.objects.first()
self.assertEqual(new_item.text, 'A new item for an existing list')
self.assertEqual(new_item.list, correct_list)
def test_redirects_to_list_view(self):
other_list = List.objects.create()
correct_list = List.objects.create()
response = self.client.post(
'/lists/%d/add_item' % (correct_list.id,),
data={'item_text': 'A new item for an existing list'}
)
self.assertRedirects(response, '/lists/%d/' % (correct_list.id,))
def test_saving_a_post_request(self):
self.client.post(
'/lists/new',
data={'item_text': 'A new list item'}
)
self.assertEqual(Item.objects.count(), 1)
new_item = Item.objects.first()
self.assertEqual(new_item.text, 'A new list item')
def test_redirects_after_post(self):
response = self.client.post(
'/lists/new',
data={'item_text': 'A new list item'}
)
new_list = List.objects.first()
self.assertRedirects(response, '/lists/%d/' % (new_list.id,) ) | {
"content_hash": "5d61178f65caebbb2674bdb3901f4dbd",
"timestamp": "",
"source": "github",
"line_count": 124,
"max_line_length": 77,
"avg_line_length": 34.733870967741936,
"alnum_prop": 0.6264221035523566,
"repo_name": "carlosgoce/superlists",
"id": "552defd86771d546c35b75d687dd79f2b2b103d4",
"size": "4307",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lists/tests.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "37"
},
{
"name": "Python",
"bytes": "14810"
}
],
"symlink_target": ""
} |
"""Tests the graph freezing tool."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import re
from tensorflow.core.example import example_pb2
from tensorflow.core.framework import graph_pb2
from tensorflow.core.protobuf import saver_pb2
from tensorflow.python.client import session
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import graph_io
from tensorflow.python.framework import importer
from tensorflow.python.framework import ops
from tensorflow.python.framework import test_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import nn
from tensorflow.python.ops import parsing_ops
from tensorflow.python.ops import partitioned_variables
from tensorflow.python.ops import variable_scope
from tensorflow.python.ops import variables
from tensorflow.python.platform import test
from tensorflow.python.saved_model import builder as saved_model_builder
from tensorflow.python.saved_model import signature_constants
from tensorflow.python.saved_model import signature_def_utils
from tensorflow.python.saved_model import tag_constants
from tensorflow.python.tools import freeze_graph
from tensorflow.python.training import saver as saver_lib
class FreezeGraphTest(test_util.TensorFlowTestCase):
def _testFreezeGraph(self, saver_write_version):
checkpoint_prefix = os.path.join(self.get_temp_dir(), "saved_checkpoint")
checkpoint_state_name = "checkpoint_state"
input_graph_name = "input_graph.pb"
output_graph_name = "output_graph.pb"
# We'll create an input graph that has a single variable containing 1.0,
# and that then multiplies it by 2.
with ops.Graph().as_default():
variable_node = variables.VariableV1(1.0, name="variable_node")
output_node = math_ops.multiply(variable_node, 2.0, name="output_node")
sess = session.Session()
init = variables.global_variables_initializer()
sess.run(init)
output = sess.run(output_node)
self.assertNear(2.0, output, 0.00001)
saver = saver_lib.Saver(write_version=saver_write_version)
checkpoint_path = saver.save(
sess,
checkpoint_prefix,
global_step=0,
latest_filename=checkpoint_state_name)
graph_io.write_graph(sess.graph, self.get_temp_dir(), input_graph_name)
# We save out the graph to disk, and then call the const conversion
# routine.
input_graph_path = os.path.join(self.get_temp_dir(), input_graph_name)
input_saver_def_path = ""
input_binary = False
output_node_names = "output_node"
restore_op_name = "save/restore_all"
filename_tensor_name = "save/Const:0"
output_graph_path = os.path.join(self.get_temp_dir(), output_graph_name)
clear_devices = False
freeze_graph.freeze_graph(
input_graph_path,
input_saver_def_path,
input_binary,
checkpoint_path,
output_node_names,
restore_op_name,
filename_tensor_name,
output_graph_path,
clear_devices,
"",
"",
"",
checkpoint_version=saver_write_version)
# Now we make sure the variable is now a constant, and that the graph still
# produces the expected result.
with ops.Graph().as_default():
output_graph_def = graph_pb2.GraphDef()
with open(output_graph_path, "rb") as f:
output_graph_def.ParseFromString(f.read())
_ = importer.import_graph_def(output_graph_def, name="")
self.assertEqual(4, len(output_graph_def.node))
for node in output_graph_def.node:
self.assertNotEqual("VariableV2", node.op)
self.assertNotEqual("Variable", node.op)
with session.Session() as sess:
output_node = sess.graph.get_tensor_by_name("output_node:0")
output = sess.run(output_node)
self.assertNear(2.0, output, 0.00001)
def _createTFExampleString(self, feature_name, feature_value):
"""Create a serialized tensorflow example."""
example = example_pb2.Example()
example.features.feature[feature_name].float_list.value.extend([
feature_value])
return example.SerializeToString()
def _writeDummySavedModel(self, path, feature_name):
"""Writes a classifier with two input features to the given path."""
with ops.Graph().as_default():
examples = array_ops.placeholder(dtypes.string, name="input_node")
feature_configs = {
feature_name: parsing_ops.FixedLenFeature(shape=[],
dtype=dtypes.float32),
}
features = parsing_ops.parse_example(examples, feature_configs)
feature = features[feature_name]
variable_node = variables.VariableV1(1.0, name="variable_node")
scores = math_ops.multiply(variable_node, feature, name="output_node")
class_feature = array_ops.fill(array_ops.shape(feature),
"class_%s" % feature_name)
classes = array_ops.transpose(class_feature)
with session.Session() as sess:
sess.run(variables.global_variables_initializer())
signature = (
signature_def_utils.classification_signature_def(
examples=examples,
classes=classes,
scores=scores,))
builder = saved_model_builder.SavedModelBuilder(path)
builder.add_meta_graph_and_variables(
sess,
[tag_constants.SERVING],
signature_def_map={
signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY:
signature,
},)
builder.save(as_text=True)
@test_util.run_v1_only("b/120545219")
def testFreezeGraphV1(self):
self._testFreezeGraph(saver_pb2.SaverDef.V1)
@test_util.run_v1_only("b/120545219")
def testFreezeGraphV2(self):
self._testFreezeGraph(saver_pb2.SaverDef.V2)
def testFreezeMetaGraph(self):
tmp_dir = self.get_temp_dir()
checkpoint_prefix = os.path.join(tmp_dir, "meta_graph_checkpoint")
checkpoint_state_name = "checkpoint_state"
output_graph_filename = os.path.join(tmp_dir, "output_graph.pb")
with ops.Graph().as_default():
variable_node = variables.VariableV1(1.0, name="variable_node")
output_node = math_ops.multiply(variable_node, 2.0, name="output_node")
sess = session.Session()
init = variables.global_variables_initializer()
sess.run(init)
output = sess.run(output_node)
self.assertNear(2.0, output, 0.00001)
saver = saver_lib.Saver()
checkpoint_path = saver.save(
sess,
checkpoint_prefix,
global_step=0,
latest_filename=checkpoint_state_name)
input_saver_def_path = ""
input_binary = True
output_node_names = "output_node"
restore_op_name = "save/restore_all"
filename_tensor_name = "save/Const:0"
clear_devices = False
input_meta_graph = checkpoint_path + ".meta"
freeze_graph.freeze_graph(
"", input_saver_def_path, input_binary, checkpoint_path,
output_node_names, restore_op_name, filename_tensor_name,
output_graph_filename, clear_devices, "", "", "", input_meta_graph)
# Now we make sure the variable is now a constant, and that the graph still
# produces the expected result.
with ops.Graph().as_default():
output_graph_def = graph_pb2.GraphDef()
with open(output_graph_filename, "rb") as f:
output_graph_def.ParseFromString(f.read())
_ = importer.import_graph_def(output_graph_def, name="")
self.assertEqual(4, len(output_graph_def.node))
for node in output_graph_def.node:
self.assertNotEqual("VariableV2", node.op)
self.assertNotEqual("Variable", node.op)
with session.Session() as sess:
output_node = sess.graph.get_tensor_by_name("output_node:0")
output = sess.run(output_node)
self.assertNear(2.0, output, 0.00001)
def testFreezeSavedModel(self):
tmp_dir = self.get_temp_dir()
saved_model_dir = os.path.join(tmp_dir, "saved_model_dir")
feature_name = "feature"
self._writeDummySavedModel(saved_model_dir, feature_name)
output_graph_filename = os.path.join(tmp_dir, "output_graph.pb")
input_saved_model_dir = saved_model_dir
output_node_names = "output_node"
input_binary = False
input_saver_def_path = False
restore_op_name = None
filename_tensor_name = None
clear_devices = False
input_meta_graph = False
checkpoint_path = None
input_graph_filename = None
saved_model_tags = tag_constants.SERVING
freeze_graph.freeze_graph(input_graph_filename, input_saver_def_path,
input_binary, checkpoint_path, output_node_names,
restore_op_name, filename_tensor_name,
output_graph_filename, clear_devices, "", "", "",
input_meta_graph, input_saved_model_dir,
saved_model_tags)
# Now we make sure the variable is now a constant, and that the graph still
# produces the expected result.
with ops.Graph().as_default():
output_graph_def = graph_pb2.GraphDef()
with open(output_graph_filename, "rb") as f:
output_graph_def.ParseFromString(f.read())
_ = importer.import_graph_def(output_graph_def, name="")
if any(u"ParseExampleV2" in node.name for node in output_graph_def.node):
expected_node_count = 10
else:
expected_node_count = 8
self.assertEqual(expected_node_count, len(output_graph_def.node))
for node in output_graph_def.node:
self.assertNotEqual("VariableV2", node.op)
self.assertNotEqual("Variable", node.op)
feature_value = 2.0
example = self._createTFExampleString(feature_name, feature_value)
with session.Session() as sess:
input_node = sess.graph.get_tensor_by_name("input_node:0")
output_node = sess.graph.get_tensor_by_name("output_node:0")
output = sess.run(output_node, feed_dict={input_node: [example]})
self.assertNear(feature_value, output, 0.00001)
def testSinglePartitionedVariable(self):
"""Ensures partitioned variables fail cleanly with freeze graph."""
checkpoint_prefix = os.path.join(self.get_temp_dir(), "saved_checkpoint")
checkpoint_state_name = "checkpoint_state"
input_graph_name = "input_graph.pb"
output_graph_name = "output_graph.pb"
# Create a graph with partition variables. When weights are partitioned into
# a single partition, the weights variable is followed by a identity ->
# identity (an additional identity node).
partitioner = partitioned_variables.fixed_size_partitioner(1)
with ops.Graph().as_default():
with variable_scope.variable_scope("part", partitioner=partitioner):
batch_size, height, width, depth = 5, 128, 128, 3
input1 = array_ops.zeros(
(batch_size, height, width, depth), name="input1")
input2 = array_ops.zeros(
(batch_size, height, width, depth), name="input2")
num_nodes = depth
filter1 = variable_scope.get_variable("filter", [num_nodes, num_nodes])
filter2 = array_ops.reshape(filter1, [1, 1, num_nodes, num_nodes])
conv = nn.conv2d(
input=input1, filter=filter2, strides=[1, 1, 1, 1], padding="SAME")
node = math_ops.add(conv, input2, name="test/add")
node = nn.relu6(node, name="test/relu6")
# Save graph and checkpoints.
sess = session.Session()
sess.run(variables.global_variables_initializer())
saver = saver_lib.Saver()
checkpoint_path = saver.save(
sess,
checkpoint_prefix,
global_step=0,
latest_filename=checkpoint_state_name)
graph_io.write_graph(sess.graph, self.get_temp_dir(), input_graph_name)
# Ensure this graph has partition variables.
self.assertTrue([
tensor.name.split(":")[0]
for op in sess.graph.get_operations()
for tensor in op.values()
if re.search(r"/part_\d+/", tensor.name)
])
# Test freezing graph doesn't make it crash.
output_node_names = "save/restore_all"
output_graph_path = os.path.join(self.get_temp_dir(), output_graph_name)
with self.assertRaises(ValueError):
freeze_graph.freeze_graph_with_def_protos(
input_graph_def=sess.graph_def,
input_saver_def=None,
input_checkpoint=checkpoint_path,
output_node_names=output_node_names,
restore_op_name="save/restore_all", # default value
filename_tensor_name="save/Const:0", # default value
output_graph=output_graph_path,
clear_devices=False,
initializer_nodes="")
if __name__ == "__main__":
test.main()
| {
"content_hash": "3f32595c960daada96f347345d66c5d6",
"timestamp": "",
"source": "github",
"line_count": 323,
"max_line_length": 80,
"avg_line_length": 40.037151702786375,
"alnum_prop": 0.6575162387875039,
"repo_name": "adit-chandra/tensorflow",
"id": "a27058655ade57008efe4cbe41f6458b8d472dc3",
"size": "13621",
"binary": false,
"copies": "7",
"ref": "refs/heads/master",
"path": "tensorflow/python/tools/freeze_graph_test.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "5003"
},
{
"name": "Batchfile",
"bytes": "45988"
},
{
"name": "C",
"bytes": "773694"
},
{
"name": "C#",
"bytes": "8562"
},
{
"name": "C++",
"bytes": "76734263"
},
{
"name": "CMake",
"bytes": "6545"
},
{
"name": "Dockerfile",
"bytes": "81136"
},
{
"name": "Go",
"bytes": "1679107"
},
{
"name": "HTML",
"bytes": "4686483"
},
{
"name": "Java",
"bytes": "952944"
},
{
"name": "Jupyter Notebook",
"bytes": "567243"
},
{
"name": "LLVM",
"bytes": "6536"
},
{
"name": "MLIR",
"bytes": "1299322"
},
{
"name": "Makefile",
"bytes": "61397"
},
{
"name": "Objective-C",
"bytes": "104706"
},
{
"name": "Objective-C++",
"bytes": "297753"
},
{
"name": "PHP",
"bytes": "24055"
},
{
"name": "Pascal",
"bytes": "3752"
},
{
"name": "Pawn",
"bytes": "17546"
},
{
"name": "Perl",
"bytes": "7536"
},
{
"name": "Python",
"bytes": "38764318"
},
{
"name": "RobotFramework",
"bytes": "891"
},
{
"name": "Ruby",
"bytes": "7459"
},
{
"name": "Shell",
"bytes": "643787"
},
{
"name": "Smarty",
"bytes": "34727"
},
{
"name": "Swift",
"bytes": "62814"
}
],
"symlink_target": ""
} |
import functools
from . import DEFAULT_ADDR, DEFAULT_BASE
from .. import multipart, http
def returns_single_item(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
result = func(*args, **kwargs)
if isinstance(result, list):
if len(result) != 1:
print(result)
assert len(result) == 1, ("Called IPFS HTTP-Client function should "
"only ever return one item")
return result[0]
assert kwargs.get("stream", False), ("Called IPFS HTTP-Client function "
"should only ever return a list, "
"when not streaming a response")
return result
return wrapper
def returns_no_item(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
result = func(*args, **kwargs)
if isinstance(result, (list, bytes)):
assert len(result) == 0, ("Called IPFS HTTP-Client function should "
"never return an item")
return
assert kwargs.get("stream", False), ("Called IPFS HTTP-Client function "
"should only ever return a list "
"or bytes, when not streaming a "
"response")
return result
return wrapper
class SectionProperty:
def __init__(self, cls):
self.__prop_cls__ = cls
def __get__(self, client_object, type=None):
if client_object is not None: # We are invoked on object
try:
return client_object.__prop_objs__[self]
except AttributeError:
client_object.__prop_objs__ = {
self: self.__prop_cls__(client_object)
}
return client_object.__prop_objs__[self]
except KeyError:
client_object.__prop_objs__[self] = self.__prop_cls__(client_object)
return client_object.__prop_objs__[self]
else: # We are invoked on class
return self.__prop_cls__
class SectionBase:
# Accept parent object from property descriptor
def __init__(self, parent):
self.__parent = parent
# Proxy the parent's properties
@property
def _client(self):
return self.__parent._client
@property
def chunk_size(self):
return self.__parent.chunk_size
@chunk_size.setter
def chunk_size(self, value):
self.__parent.chunk_size = value
class ClientBase:
"""
Parameters
----------
addr : Union[bytes, str, multiaddr.Multiaddr]
The `Multiaddr <dweb:/ipns/multiformats.io/multiaddr/>`_ describing the
API daemon location, as used in the *API* key of `go-ipfs Addresses
section
<https://github.com/ipfs/go-ipfs/blob/master/docs/config.md#addresses>`_
Supported addressing patterns are currently:
* ``/{dns,dns4,dns6,ip4,ip6}/<host>/tcp/<port>`` (HTTP)
* ``/{dns,dns4,dns6,ip4,ip6}/<host>/tcp/<port>/http`` (HTTP)
* ``/{dns,dns4,dns6,ip4,ip6}/<host>/tcp/<port>/https`` (HTTPS)
Additional forms (proxying) may be supported in the future.
base : str
The HTTP URL path prefix (or “base”) at which the API is exposed on the
API daemon
username : str
HTTP basic authentication username to send to the API daemon
password : str
HTTP basic authentication password to send to the API daemon
chunk_size : int
The size of the chunks to break uploaded files and text content into
session : bool
Create this :class:`~ipfshttpclient.Client` instance with a session
already open? (Useful for long-running client objects.)
"""
_clientfactory = http.HTTPClient
def __init__(self, addr=DEFAULT_ADDR, base=DEFAULT_BASE, *,
username=None, password=None,
chunk_size=multipart.default_chunk_size,
session=False, **defaults):
"""Connects to the API port of an IPFS node."""
self.chunk_size = chunk_size
self._client = self._clientfactory(
addr, base, username=username, password=password, **defaults
)
if session:
self._client.open_session()
self._workarounds = self._client.workarounds
| {
"content_hash": "c504cd08b65ba728ffaf57e79f4d4ec2",
"timestamp": "",
"source": "github",
"line_count": 127,
"max_line_length": 74,
"avg_line_length": 30.763779527559056,
"alnum_prop": 0.6414128487330433,
"repo_name": "alexander255/py-ipfs-api",
"id": "0e8c6dd6e6cdf9d43f407c70863b38aa6c425a0a",
"size": "3911",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ipfshttpclient/client/base.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "222072"
},
{
"name": "Shell",
"bytes": "452"
}
],
"symlink_target": ""
} |
"""Finds desktop mandoline browsers that can be controlled by telemetry."""
import os
import logging
import sys
from telemetry.core import exceptions
from telemetry.core import platform as platform_module
from telemetry.internal.backends.mandoline import desktop_mandoline_backend
from telemetry.internal.browser import browser
from telemetry.internal.browser import possible_browser
from telemetry.internal.platform import desktop_device
from telemetry.internal.util import path
class PossibleDesktopMandolineBrowser(possible_browser.PossibleBrowser):
"""A desktop mandoline browser that can be controlled."""
def __init__(self, browser_type, finder_options, executable,
browser_directory):
target_os = sys.platform.lower()
super(PossibleDesktopMandolineBrowser, self).__init__(
browser_type, target_os, supports_tab_control=False)
assert browser_type in FindAllBrowserTypes(finder_options), (
'Please add %s to desktop_mandoline_finder.FindAllBrowserTypes' %
browser_type)
self._local_executable = executable
self._browser_directory = browser_directory
def __repr__(self):
return 'PossibleDesktopMandolineBrowser(type=%s, executable=%s)' % (
self.browser_type, self._local_executable)
def _InitPlatformIfNeeded(self):
if self._platform:
return
self._platform = platform_module.GetHostPlatform()
# pylint: disable=protected-access
self._platform_backend = self._platform._platform_backend
def Create(self, finder_options):
self._InitPlatformIfNeeded()
mandoline_backend = desktop_mandoline_backend.DesktopMandolineBackend(
self._platform_backend, finder_options.browser_options,
self._local_executable, self._browser_directory)
return browser.Browser(
mandoline_backend, self._platform_backend, self._credentials_path)
def SupportsOptions(self, finder_options):
if len(finder_options.extensions_to_load) != 0:
return False
return True
def UpdateExecutableIfNeeded(self):
pass
def last_modification_time(self):
if os.path.exists(self._local_executable):
return os.path.getmtime(self._local_executable)
return -1
def SelectDefaultBrowser(possible_browsers):
if not possible_browsers:
return None
return max(possible_browsers, key=lambda b: b.last_modification_time())
def CanFindAvailableBrowsers():
os_name = platform_module.GetHostPlatform().GetOSName()
return os_name == 'win' or os_name == 'linux'
def CanPossiblyHandlePath(target_path):
_, extension = os.path.splitext(target_path.lower())
if sys.platform.startswith('linux'):
return not extension
elif sys.platform.startswith('win'):
return extension == '.exe'
return False
def FindAllBrowserTypes(_):
return [
'exact',
'mandoline-debug',
'mandoline-debug_x64',
'mandoline-default',
'mandoline-release',
'mandoline-release_x64',]
def FindAllAvailableBrowsers(finder_options, device):
"""Finds all the desktop mandoline browsers available on this machine."""
if not isinstance(device, desktop_device.DesktopDevice):
return []
browsers = []
if not CanFindAvailableBrowsers():
return []
if sys.platform.startswith('linux'):
mandoline_app_name = 'mandoline'
elif sys.platform.startswith('win'):
mandoline_app_name = 'mandoline.exe'
else:
raise Exception('Platform not recognized')
# Add the explicit browser executable if given and we can handle it.
if (finder_options.browser_executable and
CanPossiblyHandlePath(finder_options.browser_executable)):
app_name = os.path.basename(finder_options.browser_executable)
# It is okay if the executable name doesn't match any of known chrome
# browser executables, since it may be of a different browser (say,
# chrome).
if app_name == mandoline_app_name:
normalized_executable = os.path.expanduser(
finder_options.browser_executable)
if path.IsExecutable(normalized_executable):
browser_directory = os.path.dirname(finder_options.browser_executable)
browsers.append(PossibleDesktopMandolineBrowser('exact', finder_options,
normalized_executable,
browser_directory))
else:
raise exceptions.PathMissingError(
'%s specified by --browser-executable does not exist',
normalized_executable)
if not finder_options.chrome_root:
logging.warning('Chrome build directory is not specified. Skip looking for'
'for madonline build in the chrome build directories.')
return browsers
def AddIfFound(browser_type, build_dir, type_dir, app_name):
browser_directory = os.path.join(
finder_options.chrome_root, build_dir, type_dir)
app = os.path.join(browser_directory, app_name)
if path.IsExecutable(app):
browsers.append(PossibleDesktopMandolineBrowser(
browser_type, finder_options, app, browser_directory))
return True
return False
# Add local builds.
for build_dir, build_type in path.GetBuildDirectories():
AddIfFound('mandoline-' + build_type.lower(), build_dir, build_type,
mandoline_app_name)
return browsers
| {
"content_hash": "5442c5fd6e7d7915d786103443d73754",
"timestamp": "",
"source": "github",
"line_count": 149,
"max_line_length": 80,
"avg_line_length": 35.691275167785236,
"alnum_prop": 0.7038360285821738,
"repo_name": "XiaosongWei/chromium-crosswalk",
"id": "0a7daa9352aad3b7cbc778bfd42869b3b7dda6db",
"size": "5480",
"binary": false,
"copies": "8",
"ref": "refs/heads/master",
"path": "tools/telemetry/telemetry/internal/backends/mandoline/desktop_mandoline_finder.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
} |
"""Unit tests for the instance module."""
import os
import time
from unittest import mock
import ddt
import netaddr
from oslo_config import cfg
from oslo_utils import importutils
from manila import exception
from manila.share import configuration
from manila.share import driver # noqa
from manila.share.drivers import service_instance
from manila import test
from manila.tests import fake_compute
from manila.tests import fake_network
from manila.tests import utils as test_utils
CONF = cfg.CONF
def fake_get_config_option(key):
if key == 'driver_handles_share_servers':
return True
elif key == 'service_instance_password':
return None
elif key == 'service_instance_user':
return 'fake_user'
elif key == 'service_network_name':
return 'fake_service_network_name'
elif key == 'service_instance_flavor_id':
return '100'
elif key == 'service_instance_name_template':
return 'fake_manila_service_instance_%s'
elif key == 'service_image_name':
return 'fake_service_image_name'
elif key == 'manila_service_keypair_name':
return 'fake_manila_service_keypair_name'
elif key == 'path_to_private_key':
return 'fake_path_to_private_key'
elif key == 'path_to_public_key':
return 'fake_path_to_public_key'
elif key == 'max_time_to_build_instance':
return 500
elif key == 'connect_share_server_to_tenant_network':
return False
elif key == 'service_network_cidr':
return '99.254.0.0/24'
elif key == 'service_network_division_mask':
return 27
elif key == 'service_network_name':
return 'fake_service_network_name'
elif key == 'interface_driver':
return 'i.am.fake.VifDriver'
elif key == 'admin_network_id':
return None
elif key == 'admin_subnet_id':
return None
elif key == 'backend_availability_zone':
return None
else:
return mock.Mock()
class FakeServiceInstance(object):
def __init__(self, driver_config=None):
super(FakeServiceInstance, self).__init__()
self.compute_api = service_instance.compute.API()
self.admin_context = service_instance.context.get_admin_context()
self.driver_config = driver_config
def get_config_option(self, key):
return fake_get_config_option(key)
class FakeNetworkHelper(service_instance.BaseNetworkhelper):
@property
def NAME(self):
return service_instance.NEUTRON_NAME
@property
def neutron_api(self):
if not hasattr(self, '_neutron_api'):
self._neutron_api = mock.Mock()
return self._neutron_api
def __init__(self, service_instance_manager):
self.get_config_option = service_instance_manager.get_config_option
def get_network_name(self, network_info):
"""Return name of network."""
return 'fake_network_name'
def setup_connectivity_with_service_instances(self):
"""Nothing to do in fake network helper."""
def setup_network(self, network_info):
"""Combine fake network data."""
return dict()
def teardown_network(self, server_details):
"""Nothing to do in fake network helper."""
@ddt.ddt
class ServiceInstanceManagerTestCase(test.TestCase):
"""Test suite for service instance manager."""
def setUp(self):
super(ServiceInstanceManagerTestCase, self).setUp()
self.instance_id = 'fake_instance_id'
self.config = configuration.Configuration(None)
self.config.safe_get = mock.Mock(side_effect=fake_get_config_option)
self.mock_object(service_instance.compute, 'API', fake_compute.API)
self.mock_object(
service_instance.os.path, 'exists', mock.Mock(return_value=True))
self.mock_object(service_instance, 'NeutronNetworkHelper',
mock.Mock(side_effect=FakeNetworkHelper))
self._manager = service_instance.ServiceInstanceManager(self.config)
self._manager._execute = mock.Mock(return_value=('', ''))
self.mock_object(time, 'sleep')
def test_get_config_option_from_driver_config(self):
username1 = 'fake_username_1_%s' % self.id()
username2 = 'fake_username_2_%s' % self.id()
config_data = dict(
DEFAULT=dict(service_instance_user=username1),
CUSTOM=dict(service_instance_user=username2))
with test_utils.create_temp_config_with_opts(config_data):
self.config = configuration.Configuration(
service_instance.common_opts, config_group='CUSTOM')
self._manager = service_instance.ServiceInstanceManager(
self.config)
result = self._manager.get_config_option('service_instance_user')
self.assertEqual(username2, result)
def test_get_config_option_from_common_config(self):
username = 'fake_username_%s' % self.id()
config_data = dict(DEFAULT=dict(service_instance_user=username))
with test_utils.create_temp_config_with_opts(config_data):
self._manager = service_instance.ServiceInstanceManager()
result = self._manager.get_config_option('service_instance_user')
self.assertEqual(username, result)
def test_get_neutron_network_helper(self):
# Mock it again, because it was called in setUp method.
self.mock_object(service_instance, 'NeutronNetworkHelper')
config_data = dict(DEFAULT=dict(service_instance_user='fake_username',
driver_handles_share_servers=True))
with test_utils.create_temp_config_with_opts(config_data):
self._manager = service_instance.ServiceInstanceManager()
self._manager.network_helper
service_instance.NeutronNetworkHelper.assert_called_once_with(
self._manager)
def test_init_with_driver_config_and_handling_of_share_servers(self):
self.mock_object(service_instance, 'NeutronNetworkHelper')
config_data = dict(CUSTOM=dict(
driver_handles_share_servers=True,
service_instance_user='fake_user'))
opts = service_instance.common_opts + driver.share_opts
with test_utils.create_temp_config_with_opts(config_data):
self.config = configuration.Configuration(opts, 'CUSTOM')
self._manager = service_instance.ServiceInstanceManager(
self.config)
self.assertTrue(
self._manager.get_config_option("driver_handles_share_servers"))
self.assertIsNotNone(self._manager.driver_config)
self.assertTrue(hasattr(self._manager, 'network_helper'))
self.assertTrue(service_instance.NeutronNetworkHelper.called)
def test_init_with_driver_config_and_wo_handling_of_share_servers(self):
self.mock_object(service_instance, 'NeutronNetworkHelper')
config_data = dict(CUSTOM=dict(
driver_handles_share_servers=False,
service_instance_user='fake_user'))
opts = service_instance.common_opts + driver.share_opts
with test_utils.create_temp_config_with_opts(config_data):
self.config = configuration.Configuration(opts, 'CUSTOM')
self._manager = service_instance.ServiceInstanceManager(
self.config)
self.assertIsNotNone(self._manager.driver_config)
self.assertFalse(hasattr(self._manager, 'network_helper'))
self.assertFalse(service_instance.NeutronNetworkHelper.called)
def test_init_with_common_config_and_handling_of_share_servers(self):
self.mock_object(service_instance, 'NeutronNetworkHelper')
config_data = dict(DEFAULT=dict(
service_instance_user='fake_username',
driver_handles_share_servers=True))
with test_utils.create_temp_config_with_opts(config_data):
self._manager = service_instance.ServiceInstanceManager()
self.assertTrue(
self._manager.get_config_option("driver_handles_share_servers"))
self.assertIsNone(self._manager.driver_config)
self.assertTrue(hasattr(self._manager, 'network_helper'))
self.assertTrue(service_instance.NeutronNetworkHelper.called)
def test_init_with_common_config_and_wo_handling_of_share_servers(self):
self.mock_object(service_instance, 'NeutronNetworkHelper')
config_data = dict(DEFAULT=dict(
service_instance_user='fake_username',
driver_handles_share_servers=False))
with test_utils.create_temp_config_with_opts(config_data):
self._manager = service_instance.ServiceInstanceManager()
self.assertEqual(
False,
self._manager.get_config_option("driver_handles_share_servers"))
self.assertIsNone(self._manager.driver_config)
self.assertFalse(hasattr(self._manager, 'network_helper'))
self.assertFalse(service_instance.NeutronNetworkHelper.called)
def test_no_service_user_defined(self):
group_name = 'GROUP_%s' % self.id()
config_data = {group_name: dict()}
with test_utils.create_temp_config_with_opts(config_data):
config = configuration.Configuration(
service_instance.common_opts, config_group=group_name)
self.assertRaises(
exception.ServiceInstanceException,
service_instance.ServiceInstanceManager, config)
def test_get_service_instance_name_using_driver_config(self):
fake_server_id = 'fake_share_server_id_%s' % self.id()
self.mock_object(service_instance, 'NeutronNetworkHelper')
config_data = dict(CUSTOM=dict(
driver_handles_share_servers=True,
service_instance_user='fake_user'))
opts = service_instance.common_opts + driver.share_opts
with test_utils.create_temp_config_with_opts(config_data):
self.config = configuration.Configuration(opts, 'CUSTOM')
self._manager = service_instance.ServiceInstanceManager(
self.config)
result = self._manager._get_service_instance_name(fake_server_id)
self.assertIsNotNone(self._manager.driver_config)
self.assertEqual(
self._manager.get_config_option(
"service_instance_name_template") % "%s_%s" % (
self._manager.driver_config.config_group, fake_server_id),
result)
self.assertTrue(
self._manager.get_config_option("driver_handles_share_servers"))
self.assertTrue(hasattr(self._manager, 'network_helper'))
self.assertTrue(service_instance.NeutronNetworkHelper.called)
def test_get_service_instance_name_using_default_config(self):
fake_server_id = 'fake_share_server_id_%s' % self.id()
config_data = dict(CUSTOM=dict(
service_instance_user='fake_user'))
with test_utils.create_temp_config_with_opts(config_data):
self._manager = service_instance.ServiceInstanceManager()
result = self._manager._get_service_instance_name(fake_server_id)
self.assertIsNone(self._manager.driver_config)
self.assertEqual(
self._manager.get_config_option(
"service_instance_name_template") % fake_server_id, result)
def test__check_server_availability_available_from_start(self):
fake_server = dict(id='fake_server', ip='127.0.0.1')
self.mock_object(service_instance.socket.socket, 'connect')
self.mock_object(service_instance.time, 'sleep')
self.mock_object(service_instance.time, 'time',
mock.Mock(return_value=0))
result = self._manager._check_server_availability(fake_server)
self.assertTrue(result)
service_instance.socket.socket.connect.assert_called_once_with(
(fake_server['ip'], 22))
service_instance.time.time.assert_has_calls([
mock.call(), mock.call()])
service_instance.time.time.assert_has_calls([])
@ddt.data(True, False)
def test__check_server_availability_with_recall(self, is_ok):
fake_server = dict(id='fake_server', ip='fake_ip_address')
self.fake_time = 0
def fake_connect(addr):
if not(is_ok and self.fake_time > 1):
raise service_instance.socket.error
def fake_time():
return self.fake_time
def fake_sleep(time):
self.fake_time += 5
self.mock_object(service_instance.time, 'sleep',
mock.Mock(side_effect=fake_sleep))
self.mock_object(service_instance.socket.socket, 'connect',
mock.Mock(side_effect=fake_connect))
self.mock_object(service_instance.time, 'time',
mock.Mock(side_effect=fake_time))
self._manager.max_time_to_build_instance = 6
result = self._manager._check_server_availability(fake_server)
if is_ok:
self.assertTrue(result)
else:
self.assertFalse(result)
service_instance.socket.socket.connect.assert_has_calls([
mock.call((fake_server['ip'], 22)),
mock.call((fake_server['ip'], 22))])
service_instance.time.time.assert_has_calls([
mock.call(), mock.call(), mock.call()])
service_instance.time.time.assert_has_calls([mock.call()])
def test_get_server_ip_found_in_networks_section(self):
ip = '10.0.0.1'
net_name = self._manager.get_config_option('service_network_name')
fake_server = dict(networks={net_name: [ip]})
result = self._manager._get_server_ip(fake_server, net_name)
self.assertEqual(ip, result)
def test_get_server_ip_found_in_addresses_section(self):
ip = '10.0.0.1'
net_name = self._manager.get_config_option('service_network_name')
fake_server = dict(addresses={net_name: [dict(addr=ip, version=4)]})
result = self._manager._get_server_ip(fake_server, net_name)
self.assertEqual(ip, result)
@ddt.data(
{},
{'networks': {fake_get_config_option('service_network_name'): []}},
{'addresses': {fake_get_config_option('service_network_name'): []}})
def test_get_server_ip_not_found(self, data):
self.assertRaises(
exception.ManilaException,
self._manager._get_server_ip, data,
fake_get_config_option('service_network_name'))
def test_security_group_name_not_specified(self):
self.mock_object(self._manager, 'get_config_option',
mock.Mock(return_value=None))
result = self._manager._get_or_create_security_groups(
self._manager.admin_context)
self.assertIsNone(result)
self._manager.get_config_option.assert_called_once_with(
'service_instance_security_group')
def test_security_group_name_from_config_and_sg_exist(self):
name = "fake_sg_name_from_config"
desc = "fake_sg_description"
fake_secgroup = {'id': 'fake_sg_id', 'name': name, 'description': desc}
self.mock_object(self._manager, 'get_config_option',
mock.Mock(return_value=name))
neutron_api = self._manager.network_helper.neutron_api
neutron_api.security_group_list.return_value = {
'security_groups': [fake_secgroup]}
result = self._manager._get_or_create_security_groups(
self._manager.admin_context)
self.assertEqual([fake_secgroup, ], result)
self._manager.get_config_option.assert_called_once_with(
'service_instance_security_group')
neutron_api.security_group_list.assert_called_once_with({"name": name})
@ddt.data(None, 'fake_name')
def test_security_group_creation_with_name_from_config(self, name):
config_name = "fake_sg_name_from_config"
desc = "fake_sg_description"
fake_secgroup = {'id': 'fake_sg_id', 'name': name, 'description': desc}
self.mock_object(self._manager, 'get_config_option',
mock.Mock(return_value=name or config_name))
neutron_api = self._manager.network_helper.neutron_api
neutron_api.security_group_list.return_value = {'security_groups': []}
neutron_api.security_group_create.return_value = {
'security_group': fake_secgroup,
}
result = self._manager._get_or_create_security_groups(
context=self._manager.admin_context,
name=name,
description=desc,
)
self.assertEqual([fake_secgroup, ], result)
if not name:
self._manager.get_config_option.assert_called_once_with(
'service_instance_security_group')
neutron_api.security_group_list.assert_called_once_with(
{"name": name or config_name})
neutron_api.security_group_create.assert_called_once_with(
name or config_name, desc)
@ddt.data(None, 'fake_name')
def test_security_group_creation_with_name_from_conf_allow_ssh(self, name):
def fake_secgroup(*args, **kwargs):
return {'security_group': {'id': 'fake_sg_id', 'name': args[0],
'description': args[1]}}
config_name = "fake_sg_name_from_config"
desc = "fake_sg_description"
self.mock_object(self._manager, 'get_config_option',
mock.Mock(return_value=name or config_name))
neutron_api = self._manager.network_helper.neutron_api
neutron_api.security_group_list.return_value = {'security_groups': []}
self.mock_object(neutron_api, 'security_group_create',
mock.Mock(side_effect=fake_secgroup))
fake_ssh_allow_subnet = dict(cidr="10.254.0.1/24",
id='allow_subnet_id')
ssh_sg_name = 'manila-service-subnet-{}'.format(
fake_ssh_allow_subnet['id'])
result = self._manager._get_or_create_security_groups(
context=self._manager.admin_context,
name=name,
description=desc,
allow_ssh_subnet=fake_ssh_allow_subnet
)
self.assertEqual([fake_secgroup(name if name else config_name,
desc)['security_group'],
fake_secgroup(ssh_sg_name, desc)['security_group']],
result)
if not name:
self._manager.get_config_option.assert_called_with(
'service_instance_security_group')
neutron_api.security_group_list.assert_has_calls([
mock.call({"name": name or config_name}),
mock.call({"name": ssh_sg_name})])
neutron_api.security_group_create.assert_has_calls([
mock.call(name or config_name, desc),
mock.call(ssh_sg_name, desc)])
def test_security_group_limit_ssh_invalid_subnet(self):
def fake_secgroup(*args, **kwargs):
return {'security_group': {'id': 'fake_sg_id', 'name': args[0],
'description': args[1]}}
config_name = "fake_sg_name_from_config"
desc = "fake_sg_description"
self.mock_object(self._manager, 'get_config_option',
mock.Mock(config_name))
neutron_api = self._manager.network_helper.neutron_api
neutron_api.security_group_list.return_value = {'security_groups': []}
self.mock_object(neutron_api, 'security_group_create',
mock.Mock(side_effect=fake_secgroup))
fake_ssh_allow_subnet = dict(id='allow_subnet_id')
self.assertRaises(exception.ManilaException,
self._manager._get_or_create_security_groups,
context=self._manager.admin_context,
name=None,
description=desc,
allow_ssh_subnet=fake_ssh_allow_subnet)
def test_security_group_two_sg_in_list(self):
name = "fake_name"
fake_secgroup1 = {'id': 'fake_sg_id1', 'name': name}
fake_secgroup2 = {'id': 'fake_sg_id2', 'name': name}
neutron_api = self._manager.network_helper.neutron_api
neutron_api.security_group_list.return_value = {
'security_groups': [fake_secgroup1, fake_secgroup2]}
self.assertRaises(exception.ServiceInstanceException,
self._manager._get_or_create_security_groups,
self._manager.admin_context,
name)
neutron_api.security_group_list.assert_called_once_with(
{"name": name})
@ddt.data(
dict(),
dict(service_port_id='fake_service_port_id'),
dict(public_port_id='fake_public_port_id'),
dict(service_port_id='fake_service_port_id',
public_port_id='fake_public_port_id'),
)
def test_set_up_service_instance(self, update_data):
fake_network_info = {'foo': 'bar', 'server_id': 'fake_server_id'}
fake_server = {
'id': 'fake', 'ip': '1.2.3.4', 'public_address': '1.2.3.4',
'pk_path': None, 'subnet_id': 'fake-subnet-id',
'router_id': 'fake-router-id',
'username': self._manager.get_config_option(
'service_instance_user'),
'admin_ip': 'admin_ip'}
fake_server.update(update_data)
expected_details = fake_server.copy()
expected_details.pop('pk_path')
expected_details['instance_id'] = expected_details.pop('id')
expected_instance_name = self._manager._get_service_instance_name(
fake_network_info['server_id'])
self.mock_object(self._manager, '_create_service_instance',
mock.Mock(return_value=fake_server))
self.mock_object(self._manager, '_check_server_availability')
result = self._manager.set_up_service_instance(
self._manager.admin_context, fake_network_info)
self._manager._create_service_instance.assert_called_once_with(
self._manager.admin_context,
expected_instance_name, fake_network_info)
self._manager._check_server_availability.assert_called_once_with(
expected_details)
self.assertEqual(expected_details, result)
def test_set_up_service_instance_not_available(self):
fake_network_info = {'foo': 'bar', 'server_id': 'fake_server_id'}
fake_server = {
'id': 'fake', 'ip': '1.2.3.4', 'public_address': '1.2.3.4',
'pk_path': None, 'subnet_id': 'fake-subnet-id',
'router_id': 'fake-router-id',
'username': self._manager.get_config_option(
'service_instance_user'),
'admin_ip': 'admin_ip'}
expected_details = fake_server.copy()
expected_details.pop('pk_path')
expected_details['instance_id'] = expected_details.pop('id')
expected_instance_name = self._manager._get_service_instance_name(
fake_network_info['server_id'])
self.mock_object(self._manager, '_create_service_instance',
mock.Mock(return_value=fake_server))
self.mock_object(self._manager, '_check_server_availability',
mock.Mock(return_value=False))
result = self.assertRaises(
exception.ServiceInstanceException,
self._manager.set_up_service_instance,
self._manager.admin_context, fake_network_info)
self.assertTrue(hasattr(result, 'detail_data'))
self.assertEqual(
{'server_details': expected_details}, result.detail_data)
self._manager._create_service_instance.assert_called_once_with(
self._manager.admin_context,
expected_instance_name, fake_network_info)
self._manager._check_server_availability.assert_called_once_with(
expected_details)
def test_ensure_server(self):
server_details = {'instance_id': 'fake_inst_id', 'ip': '1.2.3.4'}
fake_server = fake_compute.FakeServer()
self.mock_object(self._manager, '_check_server_availability',
mock.Mock(return_value=True))
self.mock_object(self._manager.compute_api, 'server_get',
mock.Mock(return_value=fake_server))
result = self._manager.ensure_service_instance(
self._manager.admin_context, server_details)
self._manager.compute_api.server_get.assert_called_once_with(
self._manager.admin_context, server_details['instance_id'])
self._manager._check_server_availability.assert_called_once_with(
server_details)
self.assertTrue(result)
def test_ensure_server_not_exists(self):
server_details = {'instance_id': 'fake_inst_id', 'ip': '1.2.3.4'}
self.mock_object(self._manager, '_check_server_availability',
mock.Mock(return_value=True))
self.mock_object(self._manager.compute_api, 'server_get',
mock.Mock(side_effect=exception.InstanceNotFound(
instance_id=server_details['instance_id'])))
result = self._manager.ensure_service_instance(
self._manager.admin_context, server_details)
self._manager.compute_api.server_get.assert_called_once_with(
self._manager.admin_context, server_details['instance_id'])
self.assertFalse(self._manager._check_server_availability.called)
self.assertFalse(result)
def test_ensure_server_exception(self):
server_details = {'instance_id': 'fake_inst_id', 'ip': '1.2.3.4'}
self.mock_object(self._manager, '_check_server_availability',
mock.Mock(return_value=True))
self.mock_object(self._manager.compute_api, 'server_get',
mock.Mock(side_effect=exception.ManilaException))
self.assertRaises(exception.ManilaException,
self._manager.ensure_service_instance,
self._manager.admin_context,
server_details)
self._manager.compute_api.server_get.assert_called_once_with(
self._manager.admin_context, server_details['instance_id'])
self.assertFalse(self._manager._check_server_availability.called)
def test_ensure_server_non_active(self):
server_details = {'instance_id': 'fake_inst_id', 'ip': '1.2.3.4'}
fake_server = fake_compute.FakeServer(status='ERROR')
self.mock_object(self._manager.compute_api, 'server_get',
mock.Mock(return_value=fake_server))
self.mock_object(self._manager, '_check_server_availability',
mock.Mock(return_value=True))
result = self._manager.ensure_service_instance(
self._manager.admin_context, server_details)
self.assertFalse(self._manager._check_server_availability.called)
self.assertFalse(result)
def test_ensure_server_no_instance_id(self):
# Tests that we avoid a KeyError if the share details don't have an
# instance_id key set (so we can't find the share instance).
self.assertFalse(self._manager.ensure_service_instance(
self._manager.admin_context, {'ip': '1.2.3.4'}))
def test_get_key_create_new(self):
keypair_name = self._manager.get_config_option(
'manila_service_keypair_name')
fake_keypair = fake_compute.FakeKeypair(name=keypair_name)
self.mock_object(self._manager.compute_api, 'keypair_list',
mock.Mock(return_value=[]))
self.mock_object(self._manager.compute_api, 'keypair_import',
mock.Mock(return_value=fake_keypair))
result = self._manager._get_key(self._manager.admin_context)
self.assertEqual(
(fake_keypair.name,
os.path.expanduser(self._manager.get_config_option(
'path_to_private_key'))),
result)
self._manager.compute_api.keypair_list.assert_called_once_with(
self._manager.admin_context)
self._manager.compute_api.keypair_import.assert_called_once_with(
self._manager.admin_context, keypair_name, '')
def test_get_key_exists(self):
fake_keypair = fake_compute.FakeKeypair(
name=self._manager.get_config_option(
'manila_service_keypair_name'),
public_key='fake_public_key')
self.mock_object(self._manager.compute_api, 'keypair_list',
mock.Mock(return_value=[fake_keypair]))
self.mock_object(self._manager.compute_api, 'keypair_import',
mock.Mock(return_value=fake_keypair))
self.mock_object(self._manager, '_execute',
mock.Mock(return_value=('fake_public_key', '')))
result = self._manager._get_key(self._manager.admin_context)
self._manager.compute_api.keypair_list.assert_called_once_with(
self._manager.admin_context)
self.assertFalse(self._manager.compute_api.keypair_import.called)
self.assertEqual(
(fake_keypair.name,
os.path.expanduser(self._manager.get_config_option(
'path_to_private_key'))),
result)
def test_get_key_exists_recreate(self):
fake_keypair = fake_compute.FakeKeypair(
name=self._manager.get_config_option(
'manila_service_keypair_name'),
public_key='fake_public_key1')
self.mock_object(self._manager.compute_api, 'keypair_list',
mock.Mock(return_value=[fake_keypair]))
self.mock_object(self._manager.compute_api, 'keypair_import',
mock.Mock(return_value=fake_keypair))
self.mock_object(self._manager.compute_api, 'keypair_delete')
self.mock_object(self._manager, '_execute',
mock.Mock(return_value=('fake_public_key2', '')))
result = self._manager._get_key(self._manager.admin_context)
self._manager.compute_api.keypair_list.assert_called_once_with(
self._manager.admin_context)
self._manager.compute_api.keypair_delete.assert_called_once_with(
self._manager.admin_context, fake_keypair.id)
self._manager.compute_api.keypair_import.assert_called_once_with(
self._manager.admin_context, fake_keypair.name, 'fake_public_key2')
self.assertEqual(
(fake_keypair.name,
os.path.expanduser(self._manager.get_config_option(
'path_to_private_key'))),
result)
def test_get_key_more_than_one_exist(self):
fake_keypair = fake_compute.FakeKeypair(
name=self._manager.get_config_option(
'manila_service_keypair_name'),
public_key='fake_public_key1')
self.mock_object(self._manager.compute_api, 'keypair_list',
mock.Mock(return_value=[fake_keypair, fake_keypair]))
self.assertRaises(
exception.ServiceInstanceException,
self._manager._get_key, self._manager.admin_context)
self._manager.compute_api.keypair_list.assert_called_once_with(
self._manager.admin_context)
def test_get_key_keypath_to_public_not_set(self):
self._manager.path_to_public_key = None
result = self._manager._get_key(self._manager.admin_context)
self.assertEqual((None, None), result)
def test_get_key_keypath_to_private_not_set(self):
self._manager.path_to_private_key = None
result = self._manager._get_key(self._manager.admin_context)
self.assertEqual((None, None), result)
def test_get_key_incorrect_keypath_to_public(self):
def exists_side_effect(path):
return False if path == 'fake_path' else True
self._manager.path_to_public_key = 'fake_path'
os_path_exists_mock = mock.Mock(side_effect=exists_side_effect)
with mock.patch.object(os.path, 'exists', os_path_exists_mock):
with mock.patch.object(os.path, 'expanduser',
mock.Mock(side_effect=lambda value: value)):
result = self._manager._get_key(self._manager.admin_context)
self.assertEqual((None, None), result)
def test_get_key_incorrect_keypath_to_private(self):
def exists_side_effect(path):
return False if path == 'fake_path' else True
self._manager.path_to_private_key = 'fake_path'
os_path_exists_mock = mock.Mock(side_effect=exists_side_effect)
with mock.patch.object(os.path, 'exists', os_path_exists_mock):
with mock.patch.object(os.path, 'expanduser',
mock.Mock(side_effect=lambda value: value)):
result = self._manager._get_key(self._manager.admin_context)
self.assertEqual((None, None), result)
def test_get_service_image(self):
fake_image1 = fake_compute.FakeImage(
name=self._manager.get_config_option('service_image_name'),
status='active')
fake_image2 = fake_compute.FakeImage(
name='service_image_name',
status='error')
fake_image3 = fake_compute.FakeImage(
name='another-image',
status='active')
self.mock_object(self._manager.image_api, 'image_list',
mock.Mock(return_value=[fake_image1,
fake_image2,
fake_image3]))
result = self._manager._get_service_image(self._manager.admin_context)
self.assertEqual(fake_image1.id, result)
def test_get_service_image_not_found(self):
self.mock_object(self._manager.image_api, 'image_list',
mock.Mock(return_value=[]))
self.assertRaises(
exception.ServiceInstanceException,
self._manager._get_service_image, self._manager.admin_context)
def test_get_service_image_not_active(self):
fake_error_image = fake_compute.FakeImage(
name='service_image_name',
status='error')
self.mock_object(self._manager.image_api, 'image_list',
mock.Mock(return_value=[fake_error_image]))
self.assertRaises(
exception.ServiceInstanceException,
self._manager._get_service_image, self._manager.admin_context)
def test_get_service_image_ambiguous(self):
fake_image = fake_compute.FakeImage(
name=fake_get_config_option('service_image_name'),
status='active')
fake_images = [fake_image, fake_image]
self.mock_object(self._manager.image_api, 'image_list',
mock.Mock(return_value=fake_images))
self.assertRaises(
exception.ServiceInstanceException,
self._manager._get_service_image, self._manager.admin_context)
def test__delete_server_not_found(self):
self.mock_object(self._manager.compute_api, 'server_delete')
self.mock_object(
self._manager.compute_api, 'server_get',
mock.Mock(side_effect=exception.InstanceNotFound(
instance_id=self.instance_id)))
self._manager._delete_server(
self._manager.admin_context, self.instance_id)
self.assertFalse(self._manager.compute_api.server_delete.called)
self._manager.compute_api.server_get.assert_called_once_with(
self._manager.admin_context, self.instance_id)
def test__delete_server(self):
def fake_server_get(*args, **kwargs):
ctx = args[0]
if not hasattr(ctx, 'called'):
ctx.called = True
return
else:
raise exception.InstanceNotFound(instance_id=self.instance_id)
self.mock_object(self._manager.compute_api, 'server_delete')
self.mock_object(self._manager.compute_api, 'server_get',
mock.Mock(side_effect=fake_server_get))
self._manager._delete_server(
self._manager.admin_context, self.instance_id)
self._manager.compute_api.server_delete.assert_called_once_with(
self._manager.admin_context, self.instance_id)
self._manager.compute_api.server_get.assert_has_calls([
mock.call(self._manager.admin_context, self.instance_id),
mock.call(self._manager.admin_context, self.instance_id)])
def test__delete_server_found_always(self):
self.fake_time = 0
def fake_time():
return self.fake_time
def fake_sleep(time):
self.fake_time += 1
server_details = {'instance_id': 'fake_inst_id', 'status': 'ACTIVE'}
self.mock_object(self._manager.compute_api, 'server_delete')
self.mock_object(self._manager.compute_api, 'server_get',
mock.Mock(return_value=server_details))
self.mock_object(service_instance, 'time')
self.mock_object(
service_instance.time, 'time', mock.Mock(side_effect=fake_time))
self.mock_object(
service_instance.time, 'sleep', mock.Mock(side_effect=fake_sleep))
self.mock_object(self._manager, 'max_time_to_build_instance', 2)
self.assertRaises(
exception.ServiceInstanceException, self._manager._delete_server,
self._manager.admin_context, self.instance_id)
self._manager.compute_api.server_delete.assert_called_once_with(
self._manager.admin_context, self.instance_id)
service_instance.time.sleep.assert_has_calls(
[mock.call(mock.ANY) for i in range(2)])
service_instance.time.time.assert_has_calls(
[mock.call() for i in range(4)])
self._manager.compute_api.server_get.assert_has_calls(
[mock.call(self._manager.admin_context,
self.instance_id) for i in range(3)])
def test_delete_server_soft_deleted(self):
server_details = {'instance_id': 'fake_inst_id',
'status': 'SOFT_DELETED'}
self.mock_object(self._manager.compute_api, 'server_delete')
self.mock_object(self._manager.compute_api, 'server_get',
mock.Mock(return_value=server_details))
self._manager._delete_server(
self._manager.admin_context, self.instance_id)
self._manager.compute_api.server_delete.assert_called_once_with(
self._manager.admin_context, self.instance_id)
self._manager.compute_api.server_get.assert_has_calls([
mock.call(self._manager.admin_context, self.instance_id),
mock.call(self._manager.admin_context, self.instance_id)])
def test_delete_service_instance(self):
fake_server_details = dict(
router_id='foo', subnet_id='bar', instance_id='quuz')
self.mock_object(self._manager, '_delete_server')
self.mock_object(self._manager.network_helper, 'teardown_network')
self._manager.delete_service_instance(
self._manager.admin_context, fake_server_details)
self._manager._delete_server.assert_called_once_with(
self._manager.admin_context, fake_server_details['instance_id'])
self._manager.network_helper.teardown_network.assert_called_once_with(
fake_server_details)
@ddt.data(
*[{'service_config': service_config,
'tenant_config': tenant_config,
'server': server}
for service_config, tenant_config in (
('fake_net_s', 'fake_net_t'),
('fake_net_s', '12.34.56.78'),
('98.76.54.123', 'fake_net_t'),
('98.76.54.123', '12.34.56.78'))
for server in (
{'networks': {
'fake_net_s': ['foo', '98.76.54.123', 'bar'],
'fake_net_t': ['baar', '12.34.56.78', 'quuz']}},
{'addresses': {
'fake_net_s': [
{'addr': 'fake1'},
{'addr': '98.76.54.123'},
{'addr': 'fake2'}],
'fake_net_t': [
{'addr': 'fake3'},
{'addr': '12.34.56.78'},
{'addr': 'fake4'}],
}})])
@ddt.unpack
def test_get_common_server_valid_cases(self, service_config,
tenant_config, server):
self._get_common_server(service_config, tenant_config, server,
'98.76.54.123', '12.34.56.78', True)
@ddt.data(
*[{'service_config': service_config,
'tenant_config': tenant_config,
'server': server}
for service_config, tenant_config in (
('fake_net_s', 'fake'),
('fake', 'fake_net_t'),
('fake', 'fake'),
('98.76.54.123', '12.12.12.1212'),
('12.12.12.1212', '12.34.56.78'),
('12.12.12.1212', '12.12.12.1212'),
('1001::1001', '1001::100G'),
('1001::10G1', '1001::1001'),
)
for server in (
{'networks': {
'fake_net_s': ['foo', '98.76.54.123', 'bar'],
'fake_net_t': ['baar', '12.34.56.78', 'quuz']}},
{'addresses': {
'fake_net_s': [
{'addr': 'fake1'},
{'addr': '98.76.54.123'},
{'addr': 'fake2'}],
'fake_net_t': [
{'addr': 'fake3'},
{'addr': '12.34.56.78'},
{'addr': 'fake4'}],
}})])
@ddt.unpack
def test_get_common_server_invalid_cases(self, service_config,
tenant_config, server):
self._get_common_server(service_config, tenant_config, server,
'98.76.54.123', '12.34.56.78', False)
@ddt.data(
*[{'service_config': service_config,
'tenant_config': tenant_config,
'server': server}
for service_config, tenant_config in (
('fake_net_s', '1001::1002'),
('1001::1001', 'fake_net_t'),
('1001::1001', '1001::1002'))
for server in (
{'networks': {
'fake_net_s': ['foo', '1001::1001'],
'fake_net_t': ['bar', '1001::1002']}},
{'addresses': {
'fake_net_s': [{'addr': 'foo'}, {'addr': '1001::1001'}],
'fake_net_t': [{'addr': 'bar'}, {'addr': '1001::1002'}]}})])
@ddt.unpack
def test_get_common_server_valid_ipv6_address(self, service_config,
tenant_config, server):
self._get_common_server(service_config, tenant_config, server,
'1001::1001', '1001::1002', True)
def _get_common_server(self, service_config, tenant_config,
server, service_address, network_address,
is_valid=True):
fake_instance_id = 'fake_instance_id'
fake_user = 'fake_user'
fake_pass = 'fake_pass'
fake_server = {'id': fake_instance_id}
fake_server.update(server)
expected = {
'backend_details': {
'username': fake_user,
'password': fake_pass,
'pk_path': self._manager.path_to_private_key,
'ip': service_address,
'public_address': network_address,
'instance_id': fake_instance_id,
}
}
def fake_get_config_option(attr):
if attr == 'service_net_name_or_ip':
return service_config
elif attr == 'tenant_net_name_or_ip':
return tenant_config
elif attr == 'service_instance_name_or_id':
return fake_instance_id
elif attr == 'service_instance_user':
return fake_user
elif attr == 'service_instance_password':
return fake_pass
else:
raise exception.ManilaException("Wrong test data provided.")
self.mock_object(
self._manager.compute_api, 'server_get_by_name_or_id',
mock.Mock(return_value=fake_server))
self.mock_object(
self._manager, 'get_config_option',
mock.Mock(side_effect=fake_get_config_option))
if is_valid:
actual = self._manager.get_common_server()
self.assertEqual(expected, actual)
else:
self.assertRaises(
exception.ManilaException,
self._manager.get_common_server)
self.assertTrue(
self._manager.compute_api.server_get_by_name_or_id.called)
def test___create_service_instance_with_sg_success(self):
self.mock_object(service_instance, 'NeutronNetworkHelper',
mock.Mock(side_effect=FakeNetworkHelper))
config_data = dict(DEFAULT=dict(
driver_handles_share_servers=True,
service_instance_user='fake_user',
limit_ssh_access=True))
with test_utils.create_temp_config_with_opts(config_data):
self._manager = service_instance.ServiceInstanceManager()
server_create = dict(id='fakeid', status='CREATING', networks=dict())
net_name = self._manager.get_config_option("service_network_name")
sg = [{'id': 'fakeid', 'name': 'fakename'}, ]
ip_address = 'fake_ip_address'
service_image_id = 'fake_service_image_id'
key_data = 'fake_key_name', 'fake_key_path'
instance_name = 'fake_instance_name'
network_info = dict()
network_data = {'nics': ['fake_nic1', 'fake_nic2']}
network_data['router'] = dict(id='fake_router_id')
server_get = dict(
id='fakeid', status='ACTIVE', networks={net_name: [ip_address]})
network_data.update(dict(
router_id='fake_router_id', subnet_id='fake_subnet_id',
public_port=dict(id='fake_public_port',
fixed_ips=[dict(ip_address=ip_address)]),
service_port=dict(id='fake_service_port',
fixed_ips=[{'ip_address': ip_address}]),
admin_port={'id': 'fake_admin_port',
'fixed_ips': [{'ip_address': ip_address}]},
service_subnet={'id': 'fake_subnet_id',
'cidr': '10.254.0.0/28'})
)
self.mock_object(service_instance.time, 'time',
mock.Mock(return_value=5))
self.mock_object(self._manager.network_helper, 'setup_network',
mock.Mock(return_value=network_data))
self.mock_object(self._manager.network_helper, 'get_network_name',
mock.Mock(return_value=net_name))
self.mock_object(self._manager, '_get_service_image',
mock.Mock(return_value=service_image_id))
self.mock_object(self._manager, '_get_key',
mock.Mock(return_value=key_data))
self.mock_object(self._manager, '_get_or_create_security_groups',
mock.Mock(return_value=sg))
self.mock_object(self._manager.compute_api, 'server_create',
mock.Mock(return_value=server_create))
self.mock_object(self._manager.compute_api, 'server_get',
mock.Mock(return_value=server_get))
self.mock_object(self._manager.compute_api,
'add_security_group_to_server')
expected = {
'id': server_get['id'],
'status': server_get['status'],
'pk_path': key_data[1],
'public_address': ip_address,
'router_id': network_data.get('router_id'),
'subnet_id': network_data.get('subnet_id'),
'instance_id': server_get['id'],
'ip': ip_address,
'networks': server_get['networks'],
'public_port_id': 'fake_public_port',
'service_port_id': 'fake_service_port',
'admin_port_id': 'fake_admin_port',
'admin_ip': 'fake_ip_address',
}
result = self._manager._create_service_instance(
self._manager.admin_context, instance_name, network_info)
self.assertEqual(expected, result)
self.assertTrue(service_instance.time.time.called)
self._manager.network_helper.setup_network.assert_called_once_with(
network_info)
self._manager._get_service_image.assert_called_once_with(
self._manager.admin_context)
self._manager._get_key.assert_called_once_with(
self._manager.admin_context)
self._manager._get_or_create_security_groups.assert_called_once_with(
self._manager.admin_context,
allow_ssh_subnet=network_data['service_subnet'])
self._manager.compute_api.server_create.assert_called_once_with(
self._manager.admin_context, name=instance_name,
image=service_image_id, flavor='100',
key_name=key_data[0], nics=network_data['nics'],
availability_zone=service_instance.CONF.storage_availability_zone)
self._manager.compute_api.server_get.assert_called_once_with(
self._manager.admin_context, server_create['id'])
(self._manager.compute_api.add_security_group_to_server.
assert_called_once_with(
self._manager.admin_context,
server_get['id'],
sg[0]['id']))
self._manager.network_helper.get_network_name.assert_has_calls([])
def test___create_service_instance_neutron_no_admin_ip(self):
self.mock_object(service_instance, 'NeutronNetworkHelper',
mock.Mock(side_effect=FakeNetworkHelper))
config_data = {'DEFAULT': {
'driver_handles_share_servers': True,
'service_instance_user': 'fake_user',
'limit_ssh_access': True}}
with test_utils.create_temp_config_with_opts(config_data):
self._manager = service_instance.ServiceInstanceManager()
server_create = {'id': 'fakeid', 'status': 'CREATING', 'networks': {}}
net_name = self._manager.get_config_option("service_network_name")
sg = {'id': 'fakeid', 'name': 'fakename'}
ip_address = 'fake_ip_address'
service_image_id = 'fake_service_image_id'
key_data = 'fake_key_name', 'fake_key_path'
instance_name = 'fake_instance_name'
network_info = {}
network_data = {
'nics': ['fake_nic1', 'fake_nic2'],
'router_id': 'fake_router_id', 'subnet_id': 'fake_subnet_id',
'public_port': {'id': 'fake_public_port',
'fixed_ips': [{'ip_address': ip_address}]},
'service_port': {'id': 'fake_service_port',
'fixed_ips': [{'ip_address': ip_address}]},
'admin_port': {'id': 'fake_admin_port',
'fixed_ips': []},
'router': {'id': 'fake_router_id'},
'service_subnet': {'id': 'fake_id',
'cidr': '10.254.0.0/28'}
}
server_get = {
'id': 'fakeid', 'status': 'ACTIVE', 'networks':
{net_name: [ip_address]}}
self.mock_object(service_instance.time, 'time',
mock.Mock(return_value=5))
self.mock_object(self._manager.network_helper, 'setup_network',
mock.Mock(return_value=network_data))
self.mock_object(self._manager.network_helper, 'get_network_name',
mock.Mock(return_value=net_name))
self.mock_object(self._manager, '_get_service_image',
mock.Mock(return_value=service_image_id))
self.mock_object(self._manager, '_get_key',
mock.Mock(return_value=key_data))
self.mock_object(self._manager, '_get_or_create_security_groups',
mock.Mock(return_value=[sg, ]))
self.mock_object(self._manager.compute_api, 'server_create',
mock.Mock(return_value=server_create))
self.mock_object(self._manager.compute_api, 'server_get',
mock.Mock(return_value=server_get))
self.mock_object(self._manager.compute_api,
'add_security_group_to_server')
self.assertRaises(
exception.AdminIPNotFound, self._manager._create_service_instance,
self._manager.admin_context, instance_name, network_info)
self.assertTrue(service_instance.time.time.called)
self._manager.network_helper.setup_network.assert_called_once_with(
network_info)
self._manager._get_service_image.assert_called_once_with(
self._manager.admin_context)
self._manager._get_key.assert_called_once_with(
self._manager.admin_context)
self._manager._get_or_create_security_groups.assert_called_once_with(
self._manager.admin_context,
allow_ssh_subnet=network_data['service_subnet'])
self._manager.compute_api.server_create.assert_called_once_with(
self._manager.admin_context, name=instance_name,
image=service_image_id, flavor='100',
key_name=key_data[0], nics=network_data['nics'],
availability_zone=service_instance.CONF.storage_availability_zone)
self._manager.compute_api.server_get.assert_called_once_with(
self._manager.admin_context, server_create['id'])
(self._manager.compute_api.add_security_group_to_server.
assert_called_once_with(
self._manager.admin_context, server_get['id'], sg['id']))
self._manager.network_helper.get_network_name.assert_has_calls([])
@ddt.data(
dict(
instance_id_included=False,
mockobj=mock.Mock(side_effect=exception.ServiceInstanceException)),
dict(
instance_id_included=True,
mockobj=mock.Mock(return_value=dict(id='fakeid', status='ERROR'))))
@ddt.unpack
def test___create_service_instance_failed_to_create(
self, instance_id_included, mockobj):
service_image_id = 'fake_service_image_id'
key_data = 'fake_key_name', 'fake_key_path'
instance_name = 'fake_instance_name'
network_info = dict()
network_data = dict(
nics=['fake_nic1', 'fake_nic2'],
router_id='fake_router_id', subnet_id='fake_subnet_id')
self.mock_object(self._manager.network_helper, 'setup_network',
mock.Mock(return_value=network_data))
self.mock_object(self._manager, '_get_service_image',
mock.Mock(return_value=service_image_id))
self.mock_object(self._manager, '_get_key',
mock.Mock(return_value=key_data))
self.mock_object(
self._manager.compute_api, 'server_create', mockobj)
self.mock_object(
self._manager, 'wait_for_instance_to_be_active',
mock.Mock(side_effect=exception.ServiceInstanceException))
try:
self._manager._create_service_instance(
self._manager.admin_context, instance_name, network_info)
except exception.ServiceInstanceException as e:
expected = dict(server_details=dict(
subnet_id=network_data['subnet_id'],
router_id=network_data['router_id']))
if instance_id_included:
expected['server_details']['instance_id'] = 'fakeid'
self.assertEqual(expected, e.detail_data)
else:
raise exception.ManilaException('Expected error was not raised.')
self._manager.network_helper.setup_network.assert_called_once_with(
network_info)
self._manager._get_service_image.assert_called_once_with(
self._manager.admin_context)
self._manager._get_key.assert_called_once_with(
self._manager.admin_context)
self._manager.compute_api.server_create.assert_called_once_with(
self._manager.admin_context, name=instance_name,
image=service_image_id, flavor='100',
key_name=key_data[0], nics=network_data['nics'],
availability_zone=service_instance.CONF.storage_availability_zone)
def test___create_service_instance_limit_ssh_no_service_subnet(self):
self.mock_object(service_instance, 'NeutronNetworkHelper',
mock.Mock(side_effect=FakeNetworkHelper))
config_data = dict(DEFAULT=dict(
driver_handles_share_servers=True,
service_instance_user='fake_user',
limit_ssh_access=True))
with test_utils.create_temp_config_with_opts(config_data):
self._manager = service_instance.ServiceInstanceManager()
server_create = dict(id='fakeid', status='CREATING', networks=dict())
net_name = self._manager.get_config_option("service_network_name")
ip_address = 'fake_ip_address'
service_image_id = 'fake_service_image_id'
key_data = 'fake_key_name', 'fake_key_path'
instance_name = 'fake_instance_name'
network_info = dict()
network_data = {'nics': ['fake_nic1', 'fake_nic2']}
network_data['router'] = dict(id='fake_router_id')
server_get = dict(
id='fakeid', status='ACTIVE', networks={net_name: [ip_address]})
network_data.update(dict(
router_id='fake_router_id', subnet_id='fake_subnet_id',
public_port=dict(id='fake_public_port',
fixed_ips=[dict(ip_address=ip_address)]),
service_port=dict(id='fake_service_port',
fixed_ips=[{'ip_address': ip_address}]),
admin_port={'id': 'fake_admin_port',
'fixed_ips': [{'ip_address': ip_address}]},)
)
self.mock_object(service_instance.time, 'time',
mock.Mock(return_value=5))
self.mock_object(self._manager.network_helper, 'setup_network',
mock.Mock(return_value=network_data))
self.mock_object(self._manager.network_helper, 'get_network_name',
mock.Mock(return_value=net_name))
self.mock_object(self._manager, '_get_service_image',
mock.Mock(return_value=service_image_id))
self.mock_object(self._manager, '_get_key',
mock.Mock(return_value=key_data))
self.mock_object(self._manager.compute_api, 'server_create',
mock.Mock(return_value=server_create))
self.mock_object(self._manager.compute_api, 'server_get',
mock.Mock(return_value=server_get))
self.assertRaises(exception.ManilaException,
self._manager._create_service_instance,
self._manager.admin_context, instance_name,
network_info)
def test___create_service_instance_failed_to_build(self):
server_create = dict(id='fakeid', status='CREATING', networks=dict())
service_image_id = 'fake_service_image_id'
key_data = 'fake_key_name', 'fake_key_path'
instance_name = 'fake_instance_name'
network_info = dict()
network_data = dict(
nics=['fake_nic1', 'fake_nic2'],
router_id='fake_router_id', subnet_id='fake_subnet_id')
self.mock_object(self._manager.network_helper, 'setup_network',
mock.Mock(return_value=network_data))
self.mock_object(self._manager, '_get_service_image',
mock.Mock(return_value=service_image_id))
self.mock_object(self._manager, '_get_key',
mock.Mock(return_value=key_data))
self.mock_object(self._manager.compute_api, 'server_create',
mock.Mock(return_value=server_create))
self.mock_object(
self._manager, 'wait_for_instance_to_be_active',
mock.Mock(side_effect=exception.ServiceInstanceException))
try:
self._manager._create_service_instance(
self._manager.admin_context, instance_name, network_info)
except exception.ServiceInstanceException as e:
self.assertEqual(
dict(server_details=dict(subnet_id=network_data['subnet_id'],
router_id=network_data['router_id'],
instance_id=server_create['id'])),
e.detail_data)
else:
raise exception.ManilaException('Expected error was not raised.')
self._manager.network_helper.setup_network.assert_called_once_with(
network_info)
self._manager._get_service_image.assert_called_once_with(
self._manager.admin_context)
self._manager._get_key.assert_called_once_with(
self._manager.admin_context)
self._manager.compute_api.server_create.assert_called_once_with(
self._manager.admin_context, name=instance_name,
image=service_image_id, flavor='100',
key_name=key_data[0], nics=network_data['nics'],
availability_zone=service_instance.CONF.storage_availability_zone)
@ddt.data(
dict(name=None, path=None),
dict(name=None, path='/tmp'))
@ddt.unpack
def test__create_service_instance_no_key_and_no_path(self, name, path):
key_data = name, path
self.mock_object(self._manager, '_get_service_image')
self.mock_object(self._manager, '_get_key',
mock.Mock(return_value=key_data))
self.assertRaises(
exception.ServiceInstanceException,
self._manager._create_service_instance,
self._manager.admin_context, 'fake_instance_name', dict())
self._manager._get_service_image.assert_called_once_with(
self._manager.admin_context)
self._manager._get_key.assert_called_once_with(
self._manager.admin_context)
@mock.patch('time.sleep')
@mock.patch('time.time')
def _test_wait_for_instance(self, mock_time, mock_sleep,
server_get_side_eff=None,
expected_try_count=1,
expected_sleep_count=0,
expected_ret_val=None,
expected_exc=None):
mock_server_get = mock.Mock(side_effect=server_get_side_eff)
self.mock_object(self._manager.compute_api, 'server_get',
mock_server_get)
self.fake_time = 0
def fake_time():
return self.fake_time
def fake_sleep(sleep_time):
self.fake_time += sleep_time
# Note(lpetrut): LOG methods can call time.time
mock_time.side_effect = fake_time
mock_sleep.side_effect = fake_sleep
timeout = 3
if expected_exc:
self.assertRaises(
expected_exc,
self._manager.wait_for_instance_to_be_active,
instance_id=mock.sentinel.instance_id,
timeout=timeout)
else:
instance = self._manager.wait_for_instance_to_be_active(
instance_id=mock.sentinel.instance_id,
timeout=timeout)
self.assertEqual(expected_ret_val, instance)
mock_server_get.assert_has_calls(
[mock.call(self._manager.admin_context,
mock.sentinel.instance_id)] * expected_try_count)
mock_sleep.assert_has_calls([mock.call(1)] * expected_sleep_count)
def test_wait_for_instance_timeout(self):
server_get_side_eff = [
exception.InstanceNotFound(
instance_id=mock.sentinel.instance_id),
{'status': 'BUILDING'},
{'status': 'ACTIVE'}]
# Note that in this case, although the status is active, the
# 'networks' field is missing.
self._test_wait_for_instance( # pylint: disable=no-value-for-parameter
server_get_side_eff=server_get_side_eff,
expected_exc=exception.ServiceInstanceException,
expected_try_count=3,
expected_sleep_count=3)
def test_wait_for_instance_error_state(self):
mock_instance = {'status': 'ERROR'}
self._test_wait_for_instance( # pylint: disable=no-value-for-parameter
server_get_side_eff=[mock_instance],
expected_exc=exception.ServiceInstanceException,
expected_try_count=1)
def test_wait_for_instance_available(self):
mock_instance = {'status': 'ACTIVE',
'networks': mock.sentinel.networks}
self._test_wait_for_instance( # pylint: disable=no-value-for-parameter
server_get_side_eff=[mock_instance],
expected_try_count=1,
expected_ret_val=mock_instance)
def test_reboot_server(self):
fake_server = {'instance_id': mock.sentinel.instance_id}
soft_reboot = True
mock_reboot = mock.Mock()
self.mock_object(self._manager.compute_api, 'server_reboot',
mock_reboot)
self._manager.reboot_server(fake_server, soft_reboot)
mock_reboot.assert_called_once_with(self._manager.admin_context,
fake_server['instance_id'],
soft_reboot)
class BaseNetworkHelperTestCase(test.TestCase):
"""Tests Base network helper for service instance."""
def test_instantiate_valid(self):
class FakeNetworkHelper(service_instance.BaseNetworkhelper):
@property
def NAME(self):
return 'fake_NAME'
def __init__(self, service_instance_manager):
self.fake_init = 'fake_init_value'
def get_network_name(self, network_info):
return 'fake_network_name'
def setup_connectivity_with_service_instances(self):
return 'fake_setup_connectivity_with_service_instances'
def setup_network(self, network_info):
return 'fake_setup_network'
def teardown_network(self, server_details):
return 'fake_teardown_network'
instance = FakeNetworkHelper('fake')
attrs = [
'fake_init', 'NAME', 'get_network_name', 'teardown_network',
'setup_connectivity_with_service_instances', 'setup_network',
]
for attr in attrs:
self.assertTrue(hasattr(instance, attr))
self.assertEqual('fake_init_value', instance.fake_init)
self.assertEqual('fake_NAME', instance.NAME)
self.assertEqual(
'fake_network_name', instance.get_network_name('fake'))
self.assertEqual(
'fake_setup_connectivity_with_service_instances',
instance.setup_connectivity_with_service_instances())
self.assertEqual('fake_setup_network', instance.setup_network('fake'))
self.assertEqual(
'fake_teardown_network', instance.teardown_network('fake'))
def test_instantiate_invalid(self):
self.assertRaises(
TypeError, service_instance.BaseNetworkhelper, 'fake')
@ddt.ddt
class NeutronNetworkHelperTestCase(test.TestCase):
"""Tests Neutron network helper for service instance."""
def setUp(self):
super(NeutronNetworkHelperTestCase, self).setUp()
self.mock_object(importutils, 'import_class')
self.fake_manager = FakeServiceInstance()
def _init_neutron_network_plugin(self):
self.mock_object(
service_instance.NeutronNetworkHelper, '_get_service_network_id',
mock.Mock(return_value='fake_service_network_id'))
return service_instance.NeutronNetworkHelper(self.fake_manager)
def test_init_neutron_network_plugin(self):
instance = self._init_neutron_network_plugin()
self.assertEqual(service_instance.NEUTRON_NAME, instance.NAME)
attrs = [
'neutron_api', 'vif_driver', 'service_network_id',
'connect_share_server_to_tenant_network', 'get_config_option']
for attr in attrs:
self.assertTrue(hasattr(instance, attr), "No attr '%s'" % attr)
(service_instance.NeutronNetworkHelper._get_service_network_id.
assert_called_once_with())
self.assertEqual('DEFAULT', instance.neutron_api.config_group_name)
def test_init_neutron_network_plugin_with_driver_config_group(self):
self.fake_manager.driver_config = mock.Mock()
self.fake_manager.driver_config.config_group = (
'fake_config_group')
self.fake_manager.driver_config.network_config_group = None
instance = self._init_neutron_network_plugin()
self.assertEqual('fake_config_group',
instance.neutron_api.config_group_name)
def test_init_neutron_network_plugin_with_network_config_group(self):
self.fake_manager.driver_config = mock.Mock()
self.fake_manager.driver_config.config_group = (
"fake_config_group")
self.fake_manager.driver_config.network_config_group = (
"fake_network_config_group")
instance = self._init_neutron_network_plugin()
self.assertEqual('fake_network_config_group',
instance.neutron_api.config_group_name)
def test_admin_project_id(self):
instance = self._init_neutron_network_plugin()
admin_project_id = 'fake_admin_project_id'
self.mock_class('manila.network.neutron.api.API', mock.Mock())
instance.neutron_api.admin_project_id = admin_project_id
self.assertEqual(admin_project_id, instance.admin_project_id)
def test_get_network_name(self):
network_info = dict(neutron_net_id='fake_neutron_net_id')
network = dict(name='fake_network_name')
instance = self._init_neutron_network_plugin()
self.mock_object(
instance.neutron_api, 'get_network',
mock.Mock(return_value=network))
result = instance.get_network_name(network_info)
self.assertEqual(network['name'], result)
instance.neutron_api.get_network.assert_called_once_with(
network_info['neutron_net_id'])
def test_get_service_network_id_none_exist(self):
service_network_name = fake_get_config_option('service_network_name')
network = dict(id='fake_network_id')
admin_project_id = 'fake_admin_project_id'
self.mock_object(
service_instance.neutron.API, 'get_all_admin_project_networks',
mock.Mock(return_value=[]))
self.mock_object(
service_instance.neutron.API, 'admin_project_id',
mock.Mock(return_value=admin_project_id))
self.mock_object(
service_instance.neutron.API, 'network_create',
mock.Mock(return_value=network))
instance = service_instance.NeutronNetworkHelper(self.fake_manager)
result = instance._get_service_network_id()
self.assertEqual(network['id'], result)
self.assertTrue(service_instance.neutron.API.
get_all_admin_project_networks.called)
service_instance.neutron.API.network_create.assert_has_calls([
mock.call(instance.admin_project_id, service_network_name)])
def test_get_service_network_id_one_exist(self):
service_network_name = fake_get_config_option('service_network_name')
network = dict(id='fake_network_id', name=service_network_name)
admin_project_id = 'fake_admin_project_id'
self.mock_object(
service_instance.neutron.API, 'get_all_admin_project_networks',
mock.Mock(return_value=[network]))
self.mock_object(
service_instance.neutron.API, 'admin_project_id',
mock.Mock(return_value=admin_project_id))
instance = service_instance.NeutronNetworkHelper(self.fake_manager)
result = instance._get_service_network_id()
self.assertEqual(network['id'], result)
self.assertTrue(service_instance.neutron.API.
get_all_admin_project_networks.called)
def test_get_service_network_id_two_exist(self):
service_network_name = fake_get_config_option('service_network_name')
network = dict(id='fake_network_id', name=service_network_name)
self.mock_object(
service_instance.neutron.API, 'get_all_admin_project_networks',
mock.Mock(return_value=[network, network]))
helper = service_instance.NeutronNetworkHelper(self.fake_manager)
self.assertRaises(exception.ManilaException,
lambda: helper.service_network_id)
(service_instance.neutron.API.get_all_admin_project_networks.
assert_has_calls([mock.call()]))
@ddt.data(dict(), dict(subnet_id='foo'), dict(router_id='bar'))
def test_teardown_network_no_service_data(self, server_details):
instance = self._init_neutron_network_plugin()
self.mock_object(
service_instance.neutron.API, 'router_remove_interface')
instance.teardown_network(server_details)
self.assertFalse(
service_instance.neutron.API.router_remove_interface.called)
@ddt.data(
*[dict(server_details=sd, fail=f) for f in (True, False)
for sd in (dict(service_port_id='fake_service_port_id'),
dict(public_port_id='fake_public_port_id'),
dict(service_port_id='fake_service_port_id',
public_port_id='fake_public_port_id'))]
)
@ddt.unpack
def test_teardown_network_with_ports(self, server_details, fail):
instance = self._init_neutron_network_plugin()
self.mock_object(
service_instance.neutron.API, 'router_remove_interface')
if fail:
delete_port_mock = mock.Mock(
side_effect=exception.NetworkException(code=404))
else:
delete_port_mock = mock.Mock()
self.mock_object(instance.neutron_api, 'delete_port', delete_port_mock)
self.mock_object(service_instance.LOG, 'debug')
instance.teardown_network(server_details)
self.assertFalse(instance.neutron_api.router_remove_interface.called)
self.assertEqual(
len(server_details),
len(instance.neutron_api.delete_port.mock_calls))
for k, v in server_details.items():
self.assertIn(
mock.call(v), instance.neutron_api.delete_port.mock_calls)
if fail:
service_instance.LOG.debug.assert_has_calls([
mock.call(mock.ANY, mock.ANY) for sd in server_details
])
else:
service_instance.LOG.debug.assert_has_calls([])
@ddt.data(
dict(service_port_id='fake_service_port_id'),
dict(public_port_id='fake_public_port_id'),
dict(service_port_id='fake_service_port_id',
public_port_id='fake_public_port_id'),
)
def test_teardown_network_with_ports_unhandled_exception(self,
server_details):
instance = self._init_neutron_network_plugin()
self.mock_object(
service_instance.neutron.API, 'router_remove_interface')
delete_port_mock = mock.Mock(
side_effect=exception.NetworkException(code=500))
self.mock_object(
service_instance.neutron.API, 'delete_port', delete_port_mock)
self.mock_object(service_instance.LOG, 'debug')
self.assertRaises(
exception.NetworkException,
instance.teardown_network,
server_details,
)
self.assertFalse(
service_instance.neutron.API.router_remove_interface.called)
service_instance.neutron.API.delete_port.assert_called_once_with(
mock.ANY)
service_instance.LOG.debug.assert_has_calls([])
def test_teardown_network_with_wrong_ports(self):
instance = self._init_neutron_network_plugin()
self.mock_object(
service_instance.neutron.API, 'router_remove_interface')
self.mock_object(
service_instance.neutron.API, 'delete_port')
self.mock_object(service_instance.LOG, 'debug')
instance.teardown_network(dict(foo_id='fake_service_port_id'))
service_instance.neutron.API.router_remove_interface.assert_has_calls(
[])
service_instance.neutron.API.delete_port.assert_has_calls([])
service_instance.LOG.debug.assert_has_calls([])
def test_teardown_network_subnet_is_used(self):
server_details = dict(subnet_id='foo', router_id='bar')
fake_ports = [
{'device_id': 'fake_device_id',
'device_owner': 'compute:foo'},
]
instance = self._init_neutron_network_plugin()
self.mock_object(
service_instance.neutron.API, 'router_remove_interface')
self.mock_object(
service_instance.neutron.API, 'update_subnet')
self.mock_object(
service_instance.neutron.API, 'list_ports',
mock.Mock(return_value=fake_ports))
instance.teardown_network(server_details)
self.assertFalse(
service_instance.neutron.API.router_remove_interface.called)
self.assertFalse(service_instance.neutron.API.update_subnet.called)
service_instance.neutron.API.list_ports.assert_called_once_with(
fields=['device_id', 'device_owner'], fixed_ips=['subnet_id=foo'])
def test_teardown_network_subnet_not_used(self):
server_details = dict(subnet_id='foo', router_id='bar')
fake_ports = [
{'device_id': 'fake_device_id',
'device_owner': 'network:router_interface'},
{'device_id': 'fake_device_id',
'device_owner': 'compute'},
{'device_id': '',
'device_owner': 'compute'},
]
instance = self._init_neutron_network_plugin()
self.mock_object(
service_instance.neutron.API, 'router_remove_interface')
self.mock_object(
service_instance.neutron.API, 'update_subnet')
self.mock_object(
service_instance.neutron.API, 'list_ports',
mock.Mock(return_value=fake_ports))
instance.teardown_network(server_details)
(service_instance.neutron.API.router_remove_interface.
assert_called_once_with('bar', 'foo'))
(service_instance.neutron.API.update_subnet.
assert_called_once_with('foo', ''))
service_instance.neutron.API.list_ports.assert_called_once_with(
fields=['device_id', 'device_owner'], fixed_ips=['subnet_id=foo'])
def test_teardown_network_subnet_not_used_and_get_error_404(self):
server_details = dict(subnet_id='foo', router_id='bar')
fake_ports = [
{'device_id': 'fake_device_id',
'device_owner': 'fake'},
]
instance = self._init_neutron_network_plugin()
self.mock_object(
service_instance.neutron.API, 'router_remove_interface',
mock.Mock(side_effect=exception.NetworkException(code=404)))
self.mock_object(
service_instance.neutron.API, 'update_subnet')
self.mock_object(
service_instance.neutron.API, 'list_ports',
mock.Mock(return_value=fake_ports))
instance.teardown_network(server_details)
(service_instance.neutron.API.router_remove_interface.
assert_called_once_with('bar', 'foo'))
(service_instance.neutron.API.update_subnet.
assert_called_once_with('foo', ''))
service_instance.neutron.API.list_ports.assert_called_once_with(
fields=['device_id', 'device_owner'], fixed_ips=['subnet_id=foo'])
def test_teardown_network_subnet_not_used_get_unhandled_error(self):
server_details = dict(subnet_id='foo', router_id='bar')
fake_ports = [
{'device_id': 'fake_device_id',
'device_owner': 'fake'},
]
instance = self._init_neutron_network_plugin()
self.mock_object(
service_instance.neutron.API, 'router_remove_interface',
mock.Mock(side_effect=exception.NetworkException(code=500)))
self.mock_object(
service_instance.neutron.API, 'update_subnet')
self.mock_object(
service_instance.neutron.API, 'list_ports',
mock.Mock(return_value=fake_ports))
self.assertRaises(
exception.NetworkException,
instance.teardown_network, server_details)
(service_instance.neutron.API.router_remove_interface.
assert_called_once_with('bar', 'foo'))
self.assertFalse(service_instance.neutron.API.update_subnet.called)
service_instance.neutron.API.list_ports.assert_called_once_with(
fields=['device_id', 'device_owner'], fixed_ips=['subnet_id=foo'])
def test_setup_network_and_connect_share_server_to_tenant_net(self):
def fake_create_port(*aargs, **kwargs):
if aargs[1] == 'fake_service_network_id':
return self.service_port
elif aargs[1] == 'fake_tenant_network_id':
return self.public_port
else:
raise exception.ManilaException('Got unexpected data')
admin_project_id = 'fake_admin_project_id'
network_info = dict(
neutron_net_id='fake_tenant_network_id',
neutron_subnet_id='fake_tenant_subnet_id')
cidr = '13.0.0.0/24'
self.service_port = dict(
id='fake_service_port_id',
fixed_ips=[dict(ip_address='fake_service_port_ip_address')])
self.public_port = dict(
id='fake_tenant_port_id',
fixed_ips=[dict(ip_address='fake_public_port_ip_address')])
service_subnet = dict(id='fake_service_subnet')
instance = self._init_neutron_network_plugin()
instance.connect_share_server_to_tenant_network = True
self.mock_object(instance, '_get_service_network_id',
mock.Mock(return_value='fake_service_network_id'))
self.mock_object(
service_instance.neutron.API, 'admin_project_id',
mock.Mock(return_value=admin_project_id))
self.mock_object(
service_instance.neutron.API, 'create_port',
mock.Mock(side_effect=fake_create_port))
self.mock_object(
service_instance.neutron.API, 'subnet_create',
mock.Mock(return_value=service_subnet))
self.mock_object(
instance, 'setup_connectivity_with_service_instances',
mock.Mock(return_value=service_subnet))
self.mock_object(
instance, '_get_cidr_for_subnet', mock.Mock(return_value=cidr))
self.mock_object(
instance, '_get_service_subnet', mock.Mock(return_value=None))
expected = {
'ip_address': self.public_port['fixed_ips'][0]['ip_address'],
'public_port': self.public_port,
'service_port': self.service_port,
'service_subnet': service_subnet,
'ports': [self.public_port, self.service_port],
'nics': [{'port-id': self.public_port['id']},
{'port-id': self.service_port['id']}]}
result = instance.setup_network(network_info)
self.assertEqual(expected, result)
(instance.setup_connectivity_with_service_instances.
assert_called_once_with())
instance._get_service_subnet.assert_called_once_with(mock.ANY)
instance._get_cidr_for_subnet.assert_called_once_with()
self.assertTrue(service_instance.neutron.API.subnet_create.called)
self.assertTrue(service_instance.neutron.API.create_port.called)
def test_setup_network_and_connect_share_server_to_tenant_net_admin(self):
def fake_create_port(*aargs, **kwargs):
if aargs[1] == 'fake_admin_network_id':
return self.admin_port
elif aargs[1] == 'fake_tenant_network_id':
return self.public_port
else:
raise exception.ManilaException('Got unexpected data')
admin_project_id = 'fake_admin_project_id'
network_info = {
'neutron_net_id': 'fake_tenant_network_id',
'neutron_subnet_id': 'fake_tenant_subnet_id'}
self.admin_port = {
'id': 'fake_admin_port_id',
'fixed_ips': [{'ip_address': 'fake_admin_port_ip_address'}]}
self.public_port = {
'id': 'fake_tenant_port_id',
'fixed_ips': [{'ip_address': 'fake_public_port_ip_address'}]}
instance = self._init_neutron_network_plugin()
instance.use_admin_port = True
instance.use_service_network = False
instance.admin_network_id = 'fake_admin_network_id'
instance.admin_subnet_id = 'fake_admin_subnet_id'
instance.connect_share_server_to_tenant_network = True
self.mock_object(
service_instance.neutron.API, 'admin_project_id',
mock.Mock(return_value=admin_project_id))
self.mock_object(
service_instance.neutron.API, 'create_port',
mock.Mock(side_effect=fake_create_port))
self.mock_object(
instance, 'setup_connectivity_with_service_instances')
expected = {
'ip_address': self.public_port['fixed_ips'][0]['ip_address'],
'public_port': self.public_port,
'admin_port': self.admin_port,
'ports': [self.public_port, self.admin_port],
'nics': [{'port-id': self.public_port['id']},
{'port-id': self.admin_port['id']}]}
result = instance.setup_network(network_info)
self.assertEqual(expected, result)
(instance.setup_connectivity_with_service_instances.
assert_called_once_with())
self.assertTrue(service_instance.neutron.API.create_port.called)
@ddt.data(None, exception.NetworkException(code=400))
def test_setup_network_using_router_success(self, return_obj):
admin_project_id = 'fake_admin_project_id'
network_info = dict(
neutron_net_id='fake_tenant_network_id',
neutron_subnet_id='fake_tenant_subnet_id')
cidr = '13.0.0.0/24'
self.admin_port = {
'id': 'fake_admin_port_id',
'fixed_ips': [{'ip_address': 'fake_admin_port_ip_address'}]}
self.service_port = dict(
id='fake_service_port_id',
fixed_ips=[dict(ip_address='fake_service_port_ip_address')])
service_subnet = dict(id='fake_service_subnet')
instance = self._init_neutron_network_plugin()
instance.use_admin_port = True
instance.admin_network_id = 'fake_admin_network_id'
instance.admin_subnet_id = 'fake_admin_subnet_id'
instance.connect_share_server_to_tenant_network = False
self.mock_object(instance, '_get_service_network_id',
mock.Mock(return_value='fake_service_network_id'))
router = dict(id='fake_router_id')
self.mock_object(
service_instance.neutron.API, 'admin_project_id',
mock.Mock(return_value=admin_project_id))
self.mock_object(
service_instance.neutron.API, 'create_port',
mock.Mock(side_effect=[self.service_port, self.admin_port]))
self.mock_object(
service_instance.neutron.API, 'subnet_create',
mock.Mock(return_value=service_subnet))
self.mock_object(
instance, '_get_private_router', mock.Mock(return_value=router))
self.mock_object(
service_instance.neutron.API, 'router_add_interface',
mock.Mock(side_effect=return_obj))
self.mock_object(instance, 'setup_connectivity_with_service_instances')
self.mock_object(
instance, '_get_cidr_for_subnet', mock.Mock(return_value=cidr))
self.mock_object(
instance, '_get_service_subnet', mock.Mock(return_value=None))
expected = {
'ip_address': self.service_port['fixed_ips'][0]['ip_address'],
'service_port': self.service_port,
'service_subnet': service_subnet,
'admin_port': self.admin_port, 'router': router,
'ports': [self.service_port, self.admin_port],
'nics': [{'port-id': self.service_port['id']},
{'port-id': self.admin_port['id']}]}
result = instance.setup_network(network_info)
self.assertEqual(expected, result)
(instance.setup_connectivity_with_service_instances.
assert_called_once_with())
instance._get_service_subnet.assert_called_once_with(mock.ANY)
instance._get_cidr_for_subnet.assert_called_once_with()
self.assertTrue(service_instance.neutron.API.subnet_create.called)
self.assertTrue(service_instance.neutron.API.create_port.called)
instance._get_private_router.assert_called_once_with(
network_info['neutron_net_id'], network_info['neutron_subnet_id'])
(service_instance.neutron.API.router_add_interface.
assert_called_once_with(router['id'], service_subnet['id']))
def test_setup_network_using_router_addon_of_interface_failed(self):
network_info = dict(
neutron_net_id='fake_tenant_network_id',
neutron_subnet_id='fake_tenant_subnet_id')
service_subnet = dict(id='fake_service_subnet')
instance = self._init_neutron_network_plugin()
instance.connect_share_server_to_tenant_network = False
self.mock_object(instance, '_get_service_network_id',
mock.Mock(return_value='fake_service_network_id'))
router = dict(id='fake_router_id')
self.mock_object(
instance, '_get_private_router', mock.Mock(return_value=router))
self.mock_object(
service_instance.neutron.API, 'router_add_interface',
mock.Mock(side_effect=exception.NetworkException(code=500)))
self.mock_object(
instance, '_get_service_subnet',
mock.Mock(return_value=service_subnet))
self.assertRaises(
exception.NetworkException,
instance.setup_network, network_info)
instance._get_service_subnet.assert_called_once_with(mock.ANY)
instance._get_private_router.assert_called_once_with(
network_info['neutron_net_id'], network_info['neutron_subnet_id'])
(service_instance.neutron.API.router_add_interface.
assert_called_once_with(router['id'], service_subnet['id']))
def test_setup_network_using_router_connectivity_verification_fail(self):
admin_project_id = 'fake_admin_project_id'
network_info = dict(
neutron_net_id='fake_tenant_network_id',
neutron_subnet_id='fake_tenant_subnet_id')
cidr = '13.0.0.0/24'
self.service_port = dict(
id='fake_service_port_id',
fixed_ips=[dict(ip_address='fake_service_port_ip_address')])
service_subnet = dict(id='fake_service_subnet')
instance = self._init_neutron_network_plugin()
instance.connect_share_server_to_tenant_network = False
self.mock_object(instance, '_get_service_network_id',
mock.Mock(return_value='fake_service_network_id'))
router = dict(id='fake_router_id')
self.mock_object(
service_instance.neutron.API, 'admin_project_id',
mock.Mock(return_value=admin_project_id))
self.mock_object(
service_instance.neutron.API, 'create_port',
mock.Mock(return_value=self.service_port))
self.mock_object(
service_instance.neutron.API, 'subnet_create',
mock.Mock(return_value=service_subnet))
self.mock_object(service_instance.neutron.API, 'delete_port')
self.mock_object(
instance, '_get_private_router', mock.Mock(return_value=router))
self.mock_object(
service_instance.neutron.API, 'router_add_interface')
self.mock_object(
instance, 'setup_connectivity_with_service_instances',
mock.Mock(side_effect=exception.ManilaException('Fake')))
self.mock_object(
instance, '_get_cidr_for_subnet', mock.Mock(return_value=cidr))
self.mock_object(
instance, '_get_service_subnet', mock.Mock(return_value=None))
self.assertRaises(
exception.ManilaException, instance.setup_network, network_info)
(instance.setup_connectivity_with_service_instances.
assert_called_once_with())
instance._get_service_subnet.assert_called_once_with(mock.ANY)
instance._get_cidr_for_subnet.assert_called_once_with()
self.assertTrue(service_instance.neutron.API.subnet_create.called)
self.assertTrue(service_instance.neutron.API.create_port.called)
instance._get_private_router.assert_called_once_with(
network_info['neutron_net_id'], network_info['neutron_subnet_id'])
(service_instance.neutron.API.router_add_interface.
assert_called_once_with(router['id'], service_subnet['id']))
service_instance.neutron.API.delete_port.assert_has_calls([
mock.call(self.service_port['id'])])
def test__get_cidr_for_subnet_success(self):
expected = (
fake_get_config_option('service_network_cidr').split('/')[0] +
'/' + str(
fake_get_config_option('service_network_division_mask')))
instance = self._init_neutron_network_plugin()
self.mock_object(
instance, '_get_all_service_subnets', mock.Mock(return_value=[]))
result = instance._get_cidr_for_subnet()
self.assertEqual(expected, result)
instance._get_all_service_subnets.assert_called_once_with()
def test__get_cidr_for_subnet_failure(self):
subnets = []
serv_cidr = netaddr.IPNetwork(
fake_get_config_option('service_network_cidr'))
division_mask = fake_get_config_option('service_network_division_mask')
for subnet in serv_cidr.subnet(division_mask):
subnets.append(dict(cidr=str(subnet.cidr)))
instance = self._init_neutron_network_plugin()
self.mock_object(
instance, '_get_all_service_subnets',
mock.Mock(return_value=subnets))
self.assertRaises(
exception.ServiceInstanceException,
instance._get_cidr_for_subnet)
instance._get_all_service_subnets.assert_called_once_with()
def test_setup_connectivity_with_service_instances(self):
instance = self._init_neutron_network_plugin()
instance.use_admin_port = True
instance.admin_network_id = 'fake_admin_network_id'
instance.admin_subnet_id = 'fake_admin_subnet_id'
interface_name_service = 'fake_interface_name_service'
interface_name_admin = 'fake_interface_name_admin'
fake_division_mask = fake_get_config_option(
'service_network_division_mask')
fake_subnet_service = fake_network.FakeSubnet(
cidr='10.254.0.0/%s' % fake_division_mask)
fake_subnet_admin = fake_network.FakeSubnet(id='fake_admin_subnet_id',
cidr='10.0.0.0/24')
fake_service_port = fake_network.FakePort(fixed_ips=[
{'subnet_id': fake_subnet_service['id'],
'ip_address': '10.254.0.2'}], mac_address='fake_mac_address')
fake_admin_port = fake_network.FakePort(fixed_ips=[
{'subnet_id': fake_subnet_admin['id'], 'ip_address': '10.0.0.4'}],
mac_address='fake_mac_address')
self.mock_object(instance, '_get_service_port',
mock.Mock(side_effect=[fake_service_port,
fake_admin_port]))
self.mock_object(instance, '_add_fixed_ips_to_service_port',
mock.Mock(return_value=fake_service_port))
self.mock_object(instance.vif_driver, 'get_device_name',
mock.Mock(side_effect=[interface_name_service,
interface_name_admin]))
self.mock_object(instance.neutron_api, 'get_subnet',
mock.Mock(side_effect=[fake_subnet_service,
fake_subnet_admin,
fake_subnet_admin]))
self.mock_object(instance.vif_driver, 'plug')
device_mock = mock.Mock()
self.mock_object(service_instance.ip_lib, 'IPDevice',
mock.Mock(return_value=device_mock))
instance.setup_connectivity_with_service_instances()
instance._get_service_port.assert_has_calls([
mock.call(instance.service_network_id, None, 'manila-share'),
mock.call('fake_admin_network_id', 'fake_admin_subnet_id',
'manila-admin-share')])
instance.vif_driver.get_device_name.assert_has_calls([
mock.call(fake_service_port), mock.call(fake_admin_port)])
instance.vif_driver.plug.assert_has_calls([
mock.call(interface_name_service, fake_service_port['id'],
fake_service_port['mac_address']),
mock.call(interface_name_admin, fake_admin_port['id'],
fake_admin_port['mac_address'])])
instance.neutron_api.get_subnet.assert_has_calls([
mock.call(fake_subnet_service['id']),
mock.call(fake_subnet_admin['id'])])
instance.vif_driver.init_l3.assert_has_calls([
mock.call(interface_name_service,
['10.254.0.2/%s' % fake_division_mask],
clear_cidrs=[]),
mock.call(interface_name_admin, ['10.0.0.4/24'],
clear_cidrs=[fake_subnet_admin['cidr']])])
service_instance.ip_lib.IPDevice.assert_has_calls([
mock.call(interface_name_service),
mock.call(interface_name_admin)])
def test__get_service_port_none_exist(self):
instance = self._init_neutron_network_plugin()
admin_project_id = 'fake_admin_project_id'
fake_port_values = {'device_id': 'manila-share',
'binding:host_id': 'fake_host'}
self.mock_object(
service_instance.neutron.API, 'admin_project_id',
mock.Mock(return_value=admin_project_id))
fake_service_port = fake_network.FakePort(device_id='manila-share')
self.mock_object(instance.neutron_api, 'list_ports',
mock.Mock(return_value=[]))
self.mock_object(service_instance.socket, 'gethostname',
mock.Mock(return_value='fake_host'))
self.mock_object(instance.neutron_api, 'create_port',
mock.Mock(return_value=fake_service_port))
self.mock_object(instance.neutron_api, 'update_port_fixed_ips',
mock.Mock(return_value=fake_service_port))
result = instance._get_service_port(instance.service_network_id,
None, 'manila-share')
instance.neutron_api.list_ports.assert_called_once_with(
**fake_port_values)
instance.neutron_api.create_port.assert_called_once_with(
instance.admin_project_id, instance.service_network_id,
device_id='manila-share', device_owner='manila:share',
host_id='fake_host', subnet_id=None, port_security_enabled=False)
service_instance.socket.gethostname.assert_called_once_with()
self.assertFalse(instance.neutron_api.update_port_fixed_ips.called)
self.assertEqual(fake_service_port, result)
def test__get_service_port_one_exist_on_same_host(self):
instance = self._init_neutron_network_plugin()
fake_port_values = {'device_id': 'manila-share',
'binding:host_id': 'fake_host'}
fake_service_port = fake_network.FakePort(**fake_port_values)
self.mock_object(service_instance.socket, 'gethostname',
mock.Mock(return_value='fake_host'))
self.mock_object(instance.neutron_api, 'list_ports',
mock.Mock(return_value=[fake_service_port]))
self.mock_object(instance.neutron_api, 'create_port',
mock.Mock(return_value=fake_service_port))
self.mock_object(instance.neutron_api, 'update_port_fixed_ips',
mock.Mock(return_value=fake_service_port))
result = instance._get_service_port(instance.service_network_id,
None, 'manila-share')
instance.neutron_api.list_ports.assert_called_once_with(
**fake_port_values)
self.assertFalse(instance.neutron_api.create_port.called)
self.assertFalse(instance.neutron_api.update_port_fixed_ips.called)
self.assertEqual(fake_service_port, result)
def test__get_service_port_one_exist_on_different_host(self):
instance = self._init_neutron_network_plugin()
admin_project_id = 'fake_admin_project_id'
fake_port = {'device_id': 'manila-share',
'binding:host_id': 'fake_host'}
self.mock_object(
service_instance.neutron.API, 'admin_project_id',
mock.Mock(return_value=admin_project_id))
fake_service_port = fake_network.FakePort(**fake_port)
self.mock_object(instance.neutron_api, 'list_ports',
mock.Mock(return_value=[]))
self.mock_object(service_instance.socket, 'gethostname',
mock.Mock(return_value='fake_host'))
self.mock_object(instance.neutron_api, 'create_port',
mock.Mock(return_value=fake_service_port))
self.mock_object(instance.neutron_api, 'update_port_fixed_ips',
mock.Mock(return_value=fake_service_port))
result = instance._get_service_port(instance.service_network_id,
None, 'manila-share')
instance.neutron_api.list_ports.assert_called_once_with(
**fake_port)
instance.neutron_api.create_port.assert_called_once_with(
instance.admin_project_id, instance.service_network_id,
device_id='manila-share', device_owner='manila:share',
host_id='fake_host', subnet_id=None, port_security_enabled=False)
service_instance.socket.gethostname.assert_called_once_with()
self.assertFalse(instance.neutron_api.update_port_fixed_ips.called)
self.assertEqual(fake_service_port, result)
def test__get_service_port_two_exist_on_same_host(self):
instance = self._init_neutron_network_plugin()
fake_service_port = fake_network.FakePort(**{
'device_id': 'manila-share', 'binding:host_id': 'fake_host'})
self.mock_object(
instance.neutron_api, 'list_ports',
mock.Mock(return_value=[fake_service_port, fake_service_port]))
self.mock_object(service_instance.socket, 'gethostname',
mock.Mock(return_value='fake_host'))
self.mock_object(instance.neutron_api, 'create_port',
mock.Mock(return_value=fake_service_port))
self.assertRaises(
exception.ServiceInstanceException, instance._get_service_port,
instance.service_network_id, None, 'manila-share')
self.assertFalse(instance.neutron_api.create_port.called)
def test__add_fixed_ips_to_service_port(self):
ip_address1 = '13.0.0.13'
subnet_id1 = 'fake_subnet_id1'
subnet_id2 = 'fake_subnet_id2'
port = dict(id='fooport', fixed_ips=[dict(
subnet_id=subnet_id1, ip_address=ip_address1)])
expected = mock.Mock()
network = dict(subnets=[subnet_id1, subnet_id2])
instance = self._init_neutron_network_plugin()
self.mock_object(instance.neutron_api, 'get_network',
mock.Mock(return_value=network))
self.mock_object(instance.neutron_api, 'update_port_fixed_ips',
mock.Mock(return_value=expected))
result = instance._add_fixed_ips_to_service_port(port)
self.assertEqual(expected, result)
instance.neutron_api.get_network.assert_called_once_with(
instance.service_network_id)
instance.neutron_api.update_port_fixed_ips.assert_called_once_with(
port['id'], dict(fixed_ips=[
dict(subnet_id=subnet_id1, ip_address=ip_address1),
dict(subnet_id=subnet_id2)]))
def test__get_private_router_success(self):
instance = self._init_neutron_network_plugin()
network = fake_network.FakeNetwork()
subnet = fake_network.FakeSubnet(gateway_ip='fake_ip')
router = fake_network.FakeRouter(id='fake_router_id')
port = fake_network.FakePort(fixed_ips=[
dict(subnet_id=subnet['id'],
ip_address=subnet['gateway_ip'])],
device_id=router['id'])
self.mock_object(instance.neutron_api, 'get_subnet',
mock.Mock(return_value=subnet))
self.mock_object(instance.neutron_api, 'list_ports',
mock.Mock(return_value=[port]))
self.mock_object(instance.neutron_api, 'show_router',
mock.Mock(return_value=router))
result = instance._get_private_router(network['id'], subnet['id'])
self.assertEqual(router, result)
instance.neutron_api.get_subnet.assert_called_once_with(subnet['id'])
instance.neutron_api.list_ports.assert_called_once_with(
network_id=network['id'])
instance.neutron_api.show_router.assert_called_once_with(router['id'])
def test__get_private_router_no_gateway(self):
instance = self._init_neutron_network_plugin()
subnet = fake_network.FakeSubnet(gateway_ip='')
self.mock_object(instance.neutron_api, 'get_subnet',
mock.Mock(return_value=subnet))
self.assertRaises(
exception.ServiceInstanceException,
instance._get_private_router, 'fake_network_id', subnet['id'])
instance.neutron_api.get_subnet.assert_called_once_with(
subnet['id'])
def test__get_private_router_subnet_is_not_attached_to_the_router(self):
instance = self._init_neutron_network_plugin()
network_id = 'fake_network_id'
subnet = fake_network.FakeSubnet(gateway_ip='fake_ip')
self.mock_object(instance.neutron_api, 'get_subnet',
mock.Mock(return_value=subnet))
self.mock_object(instance.neutron_api, 'list_ports',
mock.Mock(return_value=[]))
self.assertRaises(
exception.ServiceInstanceException,
instance._get_private_router, network_id, subnet['id'])
instance.neutron_api.get_subnet.assert_called_once_with(
subnet['id'])
instance.neutron_api.list_ports.assert_called_once_with(
network_id=network_id)
def test__get_service_subnet_none_found(self):
subnet_name = 'fake_subnet_name'
instance = self._init_neutron_network_plugin()
self.mock_object(instance, '_get_all_service_subnets',
mock.Mock(return_value=[]))
result = instance._get_service_subnet(subnet_name)
self.assertIsNone(result)
instance._get_all_service_subnets.assert_called_once_with()
def test__get_service_subnet_unused_found(self):
subnet_name = 'fake_subnet_name'
subnets = [fake_network.FakeSubnet(id='foo', name=''),
fake_network.FakeSubnet(id='bar', name='quuz')]
instance = self._init_neutron_network_plugin()
self.mock_object(instance.neutron_api, 'update_subnet')
self.mock_object(instance, '_get_all_service_subnets',
mock.Mock(return_value=subnets))
result = instance._get_service_subnet(subnet_name)
self.assertEqual(subnets[0], result)
instance._get_all_service_subnets.assert_called_once_with()
instance.neutron_api.update_subnet.assert_called_once_with(
subnets[0]['id'], subnet_name)
def test__get_service_subnet_one_found(self):
subnet_name = 'fake_subnet_name'
subnets = [fake_network.FakeSubnet(id='foo', name='quuz'),
fake_network.FakeSubnet(id='bar', name=subnet_name)]
instance = self._init_neutron_network_plugin()
self.mock_object(instance, '_get_all_service_subnets',
mock.Mock(return_value=subnets))
result = instance._get_service_subnet(subnet_name)
self.assertEqual(subnets[1], result)
instance._get_all_service_subnets.assert_called_once_with()
def test__get_service_subnet_two_found(self):
subnet_name = 'fake_subnet_name'
subnets = [fake_network.FakeSubnet(id='foo', name=subnet_name),
fake_network.FakeSubnet(id='bar', name=subnet_name)]
instance = self._init_neutron_network_plugin()
self.mock_object(instance, '_get_all_service_subnets',
mock.Mock(return_value=subnets))
self.assertRaises(
exception.ServiceInstanceException,
instance._get_service_subnet, subnet_name)
instance._get_all_service_subnets.assert_called_once_with()
def test__get_all_service_subnets(self):
subnet_id1 = 'fake_subnet_id1'
subnet_id2 = 'fake_subnet_id2'
instance = self._init_neutron_network_plugin()
network = dict(subnets=[subnet_id1, subnet_id2])
self.mock_object(instance.neutron_api, 'get_subnet',
mock.Mock(side_effect=lambda s_id: dict(id=s_id)))
self.mock_object(instance.neutron_api, 'get_network',
mock.Mock(return_value=network))
result = instance._get_all_service_subnets()
self.assertEqual([dict(id=subnet_id1), dict(id=subnet_id2)], result)
instance.neutron_api.get_network.assert_called_once_with(
instance.service_network_id)
instance.neutron_api.get_subnet.assert_has_calls([
mock.call(subnet_id1), mock.call(subnet_id2)])
| {
"content_hash": "51f2ae015ddee294a0dc783895e6e81d",
"timestamp": "",
"source": "github",
"line_count": 2393,
"max_line_length": 79,
"avg_line_length": 46.69410781445884,
"alnum_prop": 0.5971505025103142,
"repo_name": "openstack/manila",
"id": "fcd500466eb7aeb2fe3c8d2d25e6e638e6b637c4",
"size": "112407",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "manila/tests/share/drivers/test_service_instance.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Mako",
"bytes": "953"
},
{
"name": "Python",
"bytes": "12728998"
},
{
"name": "Shell",
"bytes": "107601"
}
],
"symlink_target": ""
} |
from odoo import fields, models
class ProductTemplate(models.Model):
_inherit = "product.template"
pos_notes = fields.Text("Auto-applied Note for Kitchen", translate=True)
class PosProductNotes(models.Model):
_name = "pos.product_notes"
_description = "POS Product Notes"
sequence = fields.Integer(string="Sequence")
name = fields.Char(string="Note")
pos_category_ids = fields.Many2many(
"pos.category",
string="Point of Sale Categories",
help="The note will be available for this group of POS categories. "
"Leave the field empty so that the note is available for all POS categories.",
)
| {
"content_hash": "ea1897fb51874b6ec20e68502129dd7a",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 86,
"avg_line_length": 32.9,
"alnum_prop": 0.6854103343465046,
"repo_name": "it-projects-llc/pos-addons",
"id": "d6b17bf792a955827f49ea710da2422247d7cca0",
"size": "658",
"binary": false,
"copies": "1",
"ref": "refs/heads/13.0",
"path": "pos_order_note/models/models.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "131667"
},
{
"name": "Dockerfile",
"bytes": "330"
},
{
"name": "HTML",
"bytes": "240053"
},
{
"name": "JavaScript",
"bytes": "1277518"
},
{
"name": "Python",
"bytes": "362916"
}
],
"symlink_target": ""
} |
__all__ = ['client', 'drench', 'listener', 'peer', 'reactor',
'setup', 'switchboard', 'tparser', 'visualizer']
| {
"content_hash": "9f6bf4696d9b71055c3b4f568eb39b06",
"timestamp": "",
"source": "github",
"line_count": 2,
"max_line_length": 61,
"avg_line_length": 61,
"alnum_prop": 0.5491803278688525,
"repo_name": "jefflovejapan/drench",
"id": "a80676b70ae3d272dccb8579532703dac3b2ebbd",
"size": "122",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "drench/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "42524"
}
],
"symlink_target": ""
} |
from __future__ import with_statement
import uuid
from django.conf import settings
from django.contrib.sites.models import Site
from django.core import management
from django.core.management import CommandError
from django.test.utils import override_settings
try:
from django.test.utils import override_system_checks
except ImportError:
override_system_checks = None
from django.utils.six.moves import StringIO
from cms.api import create_page, add_plugin, create_title
from cms.management.commands import cms
from cms.management.commands.subcommands.list import plugin_report
from cms.models import Page, StaticPlaceholder
from cms.models.placeholdermodel import Placeholder
from cms.models.pluginmodel import CMSPlugin
from cms.test_utils.fixtures.navextenders import NavextendersFixture
from cms.test_utils.testcases import CMSTestCase
from djangocms_text_ckeditor.cms_plugins import TextPlugin
APPHOOK = "SampleApp"
PLUGIN = "TextPlugin"
TEST_INSTALLED_APPS = [
"django.contrib.auth",
"cms",
"menus",
"sekizai",
"cms.test_utils.project.sampleapp",
"treebeard",
]
if settings.AUTH_USER_MODEL == "emailuserapp.EmailUser":
TEST_INSTALLED_APPS.append("cms.test_utils.project.emailuserapp")
if settings.AUTH_USER_MODEL == "customuserapp.User":
TEST_INSTALLED_APPS.append("cms.test_utils.project.customuserapp")
class ManagementTestCase(CMSTestCase):
@override_settings(INSTALLED_APPS=TEST_INSTALLED_APPS)
def test_list_apphooks(self):
out = StringIO()
create_page('Hello Title', "nav_playground.html", "en", apphook=APPHOOK)
self.assertEqual(Page.objects.filter(application_urls=APPHOOK).count(), 1)
command = cms.Command()
command.stdout = out
command.handle("list", "apphooks", interactive=False)
self.assertEqual(out.getvalue(), "SampleApp (draft)\n")
def test_uninstall_apphooks_without_apphook(self):
out = StringIO()
command = cms.Command()
command.stdout = out
command.handle("uninstall", "apphooks", APPHOOK, interactive=False)
self.assertEqual(out.getvalue(), "no 'SampleApp' apphooks found\n")
def test_fix_tree(self):
create_page("home", "nav_playground.html", "en")
page1 = create_page("page", "nav_playground.html", "en")
page1.depth = 3
page1.numchild = 4
page1.path = "00100010"
page1.save()
out = StringIO()
command = cms.Command()
command.stdout = out
command.handle("fix-tree", interactive=False)
self.assertEqual(out.getvalue(), 'fixing page treefixing plugin treeall done')
page1 = page1.reload()
self.assertEqual(page1.path, "0002")
self.assertEqual(page1.depth, 1)
self.assertEqual(page1.numchild, 0)
@override_settings(INSTALLED_APPS=TEST_INSTALLED_APPS)
def test_uninstall_apphooks_with_apphook(self):
out = StringIO()
create_page('Hello Title', "nav_playground.html", "en", apphook=APPHOOK)
self.assertEqual(Page.objects.filter(application_urls=APPHOOK).count(), 1)
command = cms.Command()
command.stdout = out
command.handle("uninstall", "apphooks", APPHOOK, interactive=False)
self.assertEqual(out.getvalue(), "1 'SampleApp' apphooks uninstalled\n")
self.assertEqual(Page.objects.filter(application_urls=APPHOOK).count(), 0)
@override_settings(INSTALLED_APPS=TEST_INSTALLED_APPS)
def test_list_plugins(self):
placeholder = Placeholder.objects.create(slot="test")
add_plugin(placeholder, TextPlugin, "en", body="en body")
add_plugin(placeholder, TextPlugin, "en", body="en body")
link_plugin = add_plugin(placeholder, "LinkPlugin", "en",
name="A Link", url="https://www.django-cms.org")
self.assertEqual(
CMSPlugin.objects.filter(plugin_type=PLUGIN).count(),
2)
self.assertEqual(
CMSPlugin.objects.filter(plugin_type="LinkPlugin").count(),
1)
# create a CMSPlugin with an unsaved instance
instanceless_plugin = CMSPlugin(language="en", plugin_type="TextPlugin")
instanceless_plugin.save()
# create a bogus CMSPlugin to simulate one which used to exist but
# is no longer installed
bogus_plugin = CMSPlugin(language="en", plugin_type="BogusPlugin")
bogus_plugin.save()
report = plugin_report()
# there should be reports for three plugin types
self.assertEqual(
len(report),
3)
# check the bogus plugin
bogus_plugins_report = report[0]
self.assertEqual(
bogus_plugins_report["model"],
None)
self.assertEqual(
bogus_plugins_report["type"],
u'BogusPlugin')
self.assertEqual(
bogus_plugins_report["instances"][0],
bogus_plugin)
# check the link plugin
link_plugins_report = report[1]
self.assertEqual(
link_plugins_report["model"],
link_plugin.__class__)
self.assertEqual(
link_plugins_report["type"],
u'LinkPlugin')
self.assertEqual(
link_plugins_report["instances"][0].get_plugin_instance()[0],
link_plugin)
# check the text plugins
text_plugins_report = report[2]
self.assertEqual(
text_plugins_report["model"],
TextPlugin.model)
self.assertEqual(
text_plugins_report["type"],
u'TextPlugin')
self.assertEqual(
len(text_plugins_report["instances"]),
3)
self.assertEqual(
text_plugins_report["instances"][2],
instanceless_plugin)
self.assertEqual(
text_plugins_report["unsaved_instances"],
[instanceless_plugin])
@override_settings(INSTALLED_APPS=TEST_INSTALLED_APPS)
def test_delete_orphaned_plugins(self):
placeholder = Placeholder.objects.create(slot="test")
add_plugin(placeholder, TextPlugin, "en", body="en body")
add_plugin(placeholder, TextPlugin, "en", body="en body")
add_plugin(placeholder, "LinkPlugin", "en",
name="A Link", url="https://www.django-cms.org")
instanceless_plugin = CMSPlugin(
language="en", plugin_type="TextPlugin")
instanceless_plugin.save()
# create a bogus CMSPlugin to simulate one which used to exist but
# is no longer installed
bogus_plugin = CMSPlugin(language="en", plugin_type="BogusPlugin")
bogus_plugin.save()
report = plugin_report()
# there should be reports for three plugin types
self.assertEqual(
len(report),
3)
# check the bogus plugin
bogus_plugins_report = report[0]
self.assertEqual(
len(bogus_plugins_report["instances"]),
1)
# check the link plugin
link_plugins_report = report[1]
self.assertEqual(
len(link_plugins_report["instances"]),
1)
# check the text plugins
text_plugins_report = report[2]
self.assertEqual(
len(text_plugins_report["instances"]),
3)
self.assertEqual(
len(text_plugins_report["unsaved_instances"]),
1)
management.call_command(
'cms', 'delete_orphaned_plugins',
stdout=StringIO(), interactive=False)
report = plugin_report()
# there should be reports for two plugin types (one should have been deleted)
self.assertEqual(
len(report),
2)
# check the link plugin
link_plugins_report = report[0]
self.assertEqual(
len(link_plugins_report["instances"]),
1)
# check the text plugins
text_plugins_report = report[1]
self.assertEqual(
len(text_plugins_report["instances"]),
2)
self.assertEqual(
len(text_plugins_report["unsaved_instances"]),
0)
# in Django 1.7 (but not in 1.8), call_command() runs system checks. This
# can be removed when support for 1.7 is dropped
if override_system_checks:
test_delete_orphaned_plugins = override_system_checks([])(test_delete_orphaned_plugins)
def test_uninstall_plugins_without_plugin(self):
out = StringIO()
command = cms.Command()
command.stdout = out
command.handle("uninstall", "plugins", PLUGIN, interactive=False)
self.assertEqual(out.getvalue(), "no 'TextPlugin' plugins found\n")
@override_settings(INSTALLED_APPS=TEST_INSTALLED_APPS)
def test_uninstall_plugins_with_plugin(self):
out = StringIO()
placeholder = Placeholder.objects.create(slot="test")
add_plugin(placeholder, TextPlugin, "en", body="en body")
self.assertEqual(CMSPlugin.objects.filter(plugin_type=PLUGIN).count(), 1)
command = cms.Command()
command.stdout = out
command.handle("uninstall", "plugins", PLUGIN, interactive=False)
self.assertEqual(out.getvalue(), "1 'TextPlugin' plugins uninstalled\n")
self.assertEqual(CMSPlugin.objects.filter(plugin_type=PLUGIN).count(), 0)
class PageFixtureManagementTestCase(NavextendersFixture, CMSTestCase):
def _fill_page_body(self, page, lang):
ph_en = page.placeholders.get(slot="body")
# add misc plugins
mcol1 = add_plugin(ph_en, "MultiColumnPlugin", lang, position="first-child")
add_plugin(ph_en, "ColumnPlugin", lang, position="first-child", target=mcol1)
col2 = add_plugin(ph_en, "ColumnPlugin", lang, position="first-child", target=mcol1)
mcol2 = add_plugin(ph_en, "MultiColumnPlugin", lang, position="first-child", target=col2)
add_plugin(ph_en, "ColumnPlugin", lang, position="first-child", target=mcol2)
col4 = add_plugin(ph_en, "ColumnPlugin", lang, position="first-child", target=mcol2)
# add a *nested* link plugin
add_plugin(ph_en, "LinkPlugin", lang, target=col4,
name="A Link", url="https://www.django-cms.org")
static_placeholder = StaticPlaceholder(code=str(uuid.uuid4()), site_id=1)
static_placeholder.save()
add_plugin(static_placeholder.draft, "TextPlugin", lang, body="example content")
def setUp(self):
pages = Page.objects.drafts()
for page in pages:
self._fill_page_body(page, "en")
def test_copy_langs(self):
"""
Various checks here:
* plugins are exactly doubled, half per language with no orphaned plugin
* the bottom-most plugins in the nesting chain maintain the same position and the same content
* the top-most plugin are of the same type
"""
site = 1
number_start_plugins = CMSPlugin.objects.all().count()
out = StringIO()
command = cms.Command()
command.stdout = out
command.handle("copy-lang", "en", "de")
pages = Page.objects.on_site(site).drafts()
for page in pages:
self.assertEqual(set((u'en', u'de')), set(page.get_languages()))
# These asserts that no orphaned plugin exists
self.assertEqual(CMSPlugin.objects.all().count(), number_start_plugins*2)
self.assertEqual(CMSPlugin.objects.filter(language='en').count(), number_start_plugins)
self.assertEqual(CMSPlugin.objects.filter(language='de').count(), number_start_plugins)
root_page = Page.objects.on_site(site).get_home()
root_plugins = CMSPlugin.objects.filter(placeholder=root_page.placeholders.get(slot="body"))
first_plugin_en, _ = root_plugins.get(language='en', parent=None).get_plugin_instance()
first_plugin_de, _ = root_plugins.get(language='de', parent=None).get_plugin_instance()
self.assertEqual(first_plugin_en.plugin_type, first_plugin_de.plugin_type)
link_en, _ = root_plugins.get(language='en', plugin_type='LinkPlugin').get_plugin_instance()
link_de, _ = root_plugins.get(language='de', plugin_type='LinkPlugin').get_plugin_instance()
self.assertEqual(link_en.url, link_de.url)
self.assertEqual(link_en.get_position_in_placeholder(), link_de.get_position_in_placeholder())
stack_plugins = CMSPlugin.objects.filter(placeholder=StaticPlaceholder.objects.order_by('?')[0].draft)
stack_text_en, _ = stack_plugins.get(language='en', plugin_type='TextPlugin').get_plugin_instance()
stack_text_de, _ = stack_plugins.get(language='de', plugin_type='TextPlugin').get_plugin_instance()
self.assertEqual(stack_text_en.plugin_type, stack_text_de.plugin_type)
self.assertEqual(stack_text_en.body, stack_text_de.body)
def test_copy_sites(self):
"""
Various checks here:
* plugins are exactly doubled, half per site with no orphaned plugin
* the bottom-most plugins in the nesting chain maintain the same position and the same content
* the top-most plugin are of the same type
"""
site_1_pk = 1
site_2 = Site.objects.create(name='site 2')
site_2_pk = site_2.pk
phs = []
for page in Page.objects.on_site(site_1_pk).drafts():
phs.extend(page.placeholders.values_list('pk', flat=True))
number_start_plugins = CMSPlugin.objects.filter(placeholder__in=phs).count()
out = StringIO()
command = cms.Command()
command.stdout = out
command.handle("copy-site", site_1_pk, site_2_pk)
for page in Page.objects.on_site(site_1_pk).drafts():
page.publish('en')
for page in Page.objects.on_site(site_2_pk).drafts():
page.publish('en')
pages_1 = list(Page.objects.on_site(site_1_pk).drafts())
pages_2 = list(Page.objects.on_site(site_2_pk).drafts())
for index, page in enumerate(pages_1):
self.assertEqual(page.get_title('en'), pages_2[index].get_title('en'))
self.assertEqual(page.depth, pages_2[index].depth)
phs_1 = []
phs_2 = []
for page in Page.objects.on_site(site_1_pk).drafts():
phs_1.extend(page.placeholders.values_list('pk', flat=True))
for page in Page.objects.on_site(site_2_pk).drafts():
phs_2.extend(page.placeholders.values_list('pk', flat=True))
# These asserts that no orphaned plugin exists
self.assertEqual(CMSPlugin.objects.filter(placeholder__in=phs_1).count(), number_start_plugins)
self.assertEqual(CMSPlugin.objects.filter(placeholder__in=phs_2).count(), number_start_plugins)
root_page_1 = Page.objects.on_site(site_1_pk).get_home(site_1_pk)
root_page_2 = Page.objects.on_site(site_2_pk).get_home(site_2_pk)
root_plugins_1 = CMSPlugin.objects.filter(placeholder=root_page_1.placeholders.get(slot="body"))
root_plugins_2 = CMSPlugin.objects.filter(placeholder=root_page_2.placeholders.get(slot="body"))
first_plugin_1, _ = root_plugins_1.get(language='en', parent=None).get_plugin_instance()
first_plugin_2, _ = root_plugins_2.get(language='en', parent=None).get_plugin_instance()
self.assertEqual(first_plugin_1.plugin_type, first_plugin_2.plugin_type)
link_1, _ = root_plugins_1.get(language='en', plugin_type='LinkPlugin').get_plugin_instance()
link_2, _ = root_plugins_2.get(language='en', plugin_type='LinkPlugin').get_plugin_instance()
self.assertEqual(link_1.url, link_2.url)
self.assertEqual(link_1.get_position_in_placeholder(), link_2.get_position_in_placeholder())
def test_copy_existing_title(self):
"""
Even if a title already exists the copy is successfull, the original
title remains untouched
"""
site = 1
number_start_plugins = CMSPlugin.objects.all().count()
# create an empty title language
root_page = Page.objects.on_site(site).get_home()
create_title("de", "root page de", root_page)
out = StringIO()
command = cms.Command()
command.stdout = out
command.handle("copy-lang", "en", "de")
pages = Page.objects.on_site(site).drafts()
for page in pages:
self.assertEqual(set((u'en', u'de')), set(page.get_languages()))
# Original Title untouched
self.assertEqual("root page de", Page.objects.on_site(site).get_home().get_title("de"))
# Plugins still copied
self.assertEqual(CMSPlugin.objects.all().count(), number_start_plugins*2)
self.assertEqual(CMSPlugin.objects.filter(language='en').count(), number_start_plugins)
self.assertEqual(CMSPlugin.objects.filter(language='de').count(), number_start_plugins)
def test_copy_filled_placeholder(self):
"""
If an existing title in the target language has plugins in a placeholder
that placeholder is skipped
"""
site = 1
number_start_plugins = CMSPlugin.objects.all().count()
# create an empty title language
root_page = Page.objects.on_site(site).get_home()
create_title("de", "root page de", root_page)
ph = root_page.placeholders.get(slot="body")
add_plugin(ph, "TextPlugin", "de", body="Hello World")
out = StringIO()
command = cms.Command()
command.stdout = out
command.handle("copy-lang", "en", "de")
self.assertEqual(CMSPlugin.objects.filter(language='en').count(), number_start_plugins)
# one placeholder (with 7 plugins) is skipped, so the difference must be 6
self.assertEqual(CMSPlugin.objects.filter(language='de').count(), number_start_plugins-6)
def test_copy_filled_placeholder_force_copy(self):
"""
If an existing title in the target language has plugins in a placeholder
and the command is called with *force-copy*, the plugins are copied on
top of the existing one
"""
site = 1
number_start_plugins = CMSPlugin.objects.all().count()
# create an empty title language
root_page = Page.objects.on_site(site).get_home()
create_title("de", "root page de", root_page)
ph = root_page.placeholders.get(slot="body")
add_plugin(ph, "TextPlugin", "de", body="Hello World")
root_plugins = CMSPlugin.objects.filter(placeholder=ph)
text_de_orig, _ = root_plugins.get(language='de', plugin_type='TextPlugin').get_plugin_instance()
out = StringIO()
command = cms.Command()
command.stdout = out
command.handle("copy-lang", "en", "de", "force-copy")
CMSPlugin.objects.filter(placeholder=root_page.placeholders.get(slot="body"))
self.assertEqual(CMSPlugin.objects.filter(language='en').count(), number_start_plugins)
# we have an existing plugin in one placeholder, so we have one more
self.assertEqual(CMSPlugin.objects.filter(language='de').count(), number_start_plugins+1)
def test_copy_from_non_existing_lang(self):
"""
If an existing title in the target language has plugins in a placeholder
and the command is called with *force-copy*, the plugins are copied on
top of the existing one
"""
site = 1
out = StringIO()
command = cms.Command()
command.stdout = out
command.handle("copy-lang", "de", "fr", "verbose")
text = out.getvalue()
page_count = Page.objects.on_site(site).drafts().count() + 1
for idx in range(1, page_count):
self.assertTrue(text.find("Skipping page page%d, language de not defined" % idx) > -1)
def test_copy_site_safe(self):
"""
Check that copy of languages on one site does not interfere with other
sites
"""
site_other = 1
site_active = 2
origina_site1_langs = {}
number_start_plugins = CMSPlugin.objects.all().count()
site_obj = Site.objects.create(domain="sample2.com", name="sample2.com", pk=site_active)
for page in Page.objects.on_site(1).drafts():
origina_site1_langs[page.pk] = set(page.get_languages())
p1 = create_page('page1', published=True, in_navigation=True, language='de', template='nav_playground.html', site=site_obj)
create_page('page4', published=True, in_navigation=True, language='de', template='nav_playground.html', site=site_obj)
create_page('page2', published=True, in_navigation=True, parent=p1, language='de', template='nav_playground.html', site=site_obj)
for page in Page.objects.on_site(site_active).drafts():
self._fill_page_body(page, 'de')
number_site2_plugins = CMSPlugin.objects.all().count() - number_start_plugins
out = StringIO()
command = cms.Command()
command.stdout = out
command.handle("copy-lang", "de", "fr", "site=%s" % site_active)
for page in Page.objects.on_site(site_other).drafts():
self.assertEqual(origina_site1_langs[page.pk], set(page.get_languages()))
for page in Page.objects.on_site(site_active).drafts():
self.assertEqual(set(('de', 'fr')), set(page.get_languages()))
# plugins for site 1
self.assertEqual(CMSPlugin.objects.filter(language='en').count(), number_start_plugins)
# plugins for site 2 de
self.assertEqual(CMSPlugin.objects.filter(language='de').count(), number_site2_plugins)
# plugins for site 2 fr
self.assertEqual(CMSPlugin.objects.filter(language='fr').count(), number_site2_plugins)
# global number of plugins
self.assertEqual(CMSPlugin.objects.all().count(), number_start_plugins + number_site2_plugins*2)
def test_copy_bad_languages(self):
out = StringIO()
command = cms.Command()
command.stdout = out
with self.assertRaises(CommandError) as command_error:
command.handle("copy-lang", "it", "fr")
self.assertEqual(str(command_error.exception), 'Both languages have to be present in settings.LANGUAGES and settings.CMS_LANGUAGES')
| {
"content_hash": "ba48bfbb00a7923809757e350bbc0794",
"timestamp": "",
"source": "github",
"line_count": 538,
"max_line_length": 140,
"avg_line_length": 41.53903345724907,
"alnum_prop": 0.6347771612672275,
"repo_name": "dhorelik/django-cms",
"id": "abb0bdcb944d75a4ed7d457c161bee40d98d75ab",
"size": "22372",
"binary": false,
"copies": "3",
"ref": "refs/heads/develop",
"path": "cms/tests/test_management.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "127976"
},
{
"name": "HTML",
"bytes": "104044"
},
{
"name": "JavaScript",
"bytes": "664079"
},
{
"name": "Logos",
"bytes": "10461"
},
{
"name": "Python",
"bytes": "3588457"
},
{
"name": "XSLT",
"bytes": "5917"
}
],
"symlink_target": ""
} |
import cv2
import numpy as np
def rectify(h):
h = h.reshape((4,2))
hnew = np.zeros((4,2),dtype = np.float32)
add = h.sum(1)
hnew[0] = h[np.argmin(add)]
hnew[2] = h[np.argmax(add)]
diff = np.diff(h,axis = 1)
hnew[1] = h[np.argmin(diff)]
hnew[3] = h[np.argmax(diff)]
return hnew
def check_include(centre_list, x_centre, y_centre):
for point in centre_list:
x_difference = point[0] - x_centre
y_difference = point[1] - y_centre
if abs(x_difference) < 10 and abs(y_difference) < 10:
return False
return True
def find_centre(cnts):
# x_axis is a list, store all the x_axis data of one contour
# y_axis is a list, store all the y_axis data of same contour
# cnts[0] is a list of point, which is one rectangle
centre_list = []
for cnt in cnts:
x_axis = []
y_axis = []
for point in cnt:
x_axis.append(point[0][0])
y_axis.append(point[0][1])
# print cnts[0][0][0][0]
x_axis = sorted(x_axis)
y_axis = sorted(y_axis)
x_centre = int((x_axis[0] + x_axis[-1]) / 2)
y_centre = int((y_axis[0] + y_axis[-1]) / 2)
# print "The smallest x coordinate is",x_axis[0]
# print "The smallest y coordinate is",y_axis[0]
# print "The biggest x coordinate is",x_axis[-1]
# print "The biggest y coordinate is",y_axis[-1]
# print "The centre of this rectangle is (%d,%d)" %(x_centre, y_centre)
if (check_include(centre_list, x_centre, y_centre)):
centre_list.append((x_centre, y_centre))
# print "The centre of this rectangle is (%d,%d)" %(x_centre, y_centre)
return centre_list
def process_centre_list(centre_list):
# this function loop want to put same rows of answer area into same list.
# And use a list to hold all of rows. So it is a 2D list.
# the centre_list is in the order of y-axis from small to large.
# In this particular case, every row has three question and each question has 4 rectangles.
# In each line, the y-axis is almost same, so we can calculate the difference between different
# y-axis to determine whether the two rectangle is in same line.
# current_total_delta is total difference of y-axis in one row.
# current_total_delta_copy tries to store the old data in for loop.
# current_average_number is number of rectangles we calculate
current_total_delta = 0
current_total_delta_copy = 0
current_average_number = 1
# current_average_delta = current_total_delta/current_average_number
# current_average_delta_copy tries to store the old data.
current_average_delta = 0
current_average_delta_copy = 0
# row_list is a list of column_list
# column_list is a list of point of every line of answer area
row_list = []
column_list = []
for i in range(len(centre_list) - 1):
delta_y1 = (centre_list[i + 1][1] - centre_list[i][1])
# print delta_y1
current_total_delta_copy = current_total_delta
current_total_delta += delta_y1
current_average_delta = 1.0 * current_total_delta / current_average_number
current_average_number += 1
if current_average_delta > current_average_delta_copy * 3 and current_average_delta_copy != 0:
# print "this is average number ",current_average_number
# print "This is current_average_delta " , current_average_delta
# print "This is current_average_delta_copy " , current_average_delta_copy
current_total_delta = current_total_delta_copy # restore total delta from copy
column_list.append(centre_list[i])
row_list.append(column_list)
column_list = []
current_total_delta = 0
current_total_delta_copy = 0
current_average_number = 1
continue
column_list.append(centre_list[i])
current_average_delta_copy = current_average_delta
return row_list
# This function want to find the answer student choose.
# centre_list: list. Hold all the coordinate of centre of rectangle.
# thresh1: image object. The image after threshold.
def find_answer(centre_list, thresh1):
# the point is the centre of rectangle.
# We choose a 80*80 square, to detect whether there is black pixel in this square.
for point in centre_list:
px = 0
x_start, x_end = point[0] - 40, point[0] + 40
y_start, y_end = point[1] - 40, point[1] + 40
for x in range(x_start, x_end):
for y in range(y_start, y_end):
px += thresh1[y, x]
# print "this is pixel " , px
# 1532000 is a threshold. The value under the 1532000 means student has handwriting
# in this region.
if px < 1532000:
cv2.circle(thresh1, (x - 40, y - 40), 40, (0, 0, 0))
# this function want to find the answer rectangle which are not found by findContours
# function
def find_missing_rectangle(centre_list, centre_list_col, x_uncertainty, y_uncertainty):
row_list = []
total_list = []
# print centre_list_col
base = centre_list_col[0][1] # use column point as the base
y_max = base + y_uncertainty # add base and y_uncertainty
for i in range(len(centre_list_col)):
if centre_list_col[i][1] < y_max:
row_list.append(centre_list_col[i])
else:
# in this case, we end up one line, and change to another line
# so I set a new base.
y_max = centre_list_col[i][1] + y_uncertainty
total_list.append(row_list)
row_list = [] # renew the row_list
# add the first element of next line into new row_list
row_list.append(centre_list_col[i])
# add final row list into total list.
total_list.append(row_list)
# ============================================================
# for test
# ============================================================
# sum = 0
# for i in range(len(total_list)):
# # pass
# print sorted(total_list[i])
# print "length is ", len(total_list[i])
# sum += len(total_list[i])
# print("\n")
# # print "\n"
# # print(total_list)
# print sum
# ============================================================
# end test
# ============================================================
# to get the max_length of a row of question.
# and then get a base_list of row_list
max_length = len(total_list[0])
base_list = []
for row_list in total_list:
if len(row_list) > max_length:
max_length = len(row_list)
base_list = row_list
# print "length of half rectangle is ", x_uncertainty
total_list_copy = []
# sort base list
base_list = sorted(base_list)
for row_list in total_list:
# print "this is row_list" , row_list
# print '\n'
row_list = sorted(row_list)
if len(row_list) == max_length:
total_list_copy.append(row_list)
continue
for i in range(max_length):
try:
base = base_list[i][0] - x_uncertainty
if row_list[i][0] > base:
x_axis = base_list[i][0]
y_axis = row_list[0][1]
row_list.insert(i, (x_axis, y_axis))
centre_list.append((x_axis, y_axis))
print "length of row list is ", len(row_list)
if len(row_list) == max_length:
total_list_copy.append(row_list)
break
except:
x_axis = base_list[i][0]
y_axis = row_list[0][1]
row_list.insert(i, (x_axis, y_axis))
centre_list.append((x_axis, y_axis))
if len(row_list) == max_length:
total_list_copy.append(row_list)
break
return total_list_copy
# answer_list is a list. It contains x elements, x is rows of the answer sheet. It is also list
# every row_list contains also list which are centre points of rectangle.
def find_answer2(answer_list,number_of_choice,thresh1,pixel=40, number_of_question=40):
column = len(answer_list[0])/number_of_choice
assert(column == 3)
answer = ""
number_of_question = 0
for i in range(column):
for j in range(len(answer_list)):
boundary = 1532000
number_of_answer = 0
while(True):
# print boundary
# print number_of_answer
# print "i j k" , i ,j
for k in range(i*4,i*4+number_of_choice):
point = answer_list[j][k]
px = 0
x_start, x_end = point[0] - pixel, point[0] + pixel
y_start, y_end = point[1] - pixel, point[1] + pixel
for x in range(x_start, x_end):
for y in range(y_start, y_end):
px += thresh1[y, x]
# print "this is pixel " , px
# 1532000 is a threshold. The value under the 1532000 means student has handwriting
# in this region.
# print px
if px < boundary:
cv2.circle(thresh1, (x - pixel, y - pixel), 40, (0, 0, 0))
number_of_answer += 1
choice = str(k)
if number_of_answer == 1:
number_of_question += 1
answer += choice
break
if number_of_question==40:
break
if number_of_answer == 0:
boundary = boundary * (1.01)
number_of_answer = 0
else:
boundary = boundary / 1.01
number_of_answer = 0
if number_of_question==40:
break
return answer
if __name__ == '__main__':
image = cv2.imread("sheet.jpg")
# ratio = 1000.0 / image.shape[1]
# # new dimension for image
# dim = (1000, int(image.shape[0] * ratio))
# # perform the actual resizing of the image and show it
# # interpolation = cv2.INTER_AREA this is the algorithm we used. Do worry now
# image = cv2.resize(image, dim, interpolation = cv2.INTER_AREA)
ratio = image.shape[0] / 500.0
#orig = image.copy()
res = cv2.resize(image,None,fx=0.4, fy=0.4, interpolation = cv2.INTER_LANCZOS4)
# res = cv2.resize(image, dst, interpolation=CV_INTER_LINEAR)
# convert image to grayscale
gray = cv2.cvtColor(res, cv2.COLOR_BGR2GRAY)
# blur the image slightly to remove noise.
#gray = cv2.bilateralFilter(gray, 11, 17, 17)
gray = cv2.GaussianBlur(gray, (5, 5), 0) #is an alternative way to blur the image
# canny edge detection
edged = cv2.Canny(gray, 30, 200)
# two threshold method.
# The first one is normal threshold method
# The second one is use Gaussian method which has better effect.
# ret,thresh1 = cv2.threshold(gray,150,150,cv2.THRESH_BINARY)
# thresh1= cv2.adaptiveThreshold(gray,255,cv2.ADAPTIVE_THRESH_GAUSSIAN_C,cv2.THRESH_BINARY,11,2)
cv2.imshow("Outline", res)
(_, cnts, _) =cv2.findContours(edged.copy(), cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
#Now we're only trying to get the largest contour so we only keep the first 10 elements
cnts = sorted(cnts, key = cv2.contourArea,reverse=True)[:10]
for c in cnts:
# approximate the contour
peri = cv2.arcLength(c, True)
approx = cv2.approxPolyDP(c, 0.005* peri, True)
#break when we find the first rectangle
if len(approx) == 4:
screenCnt = approx
break
#draw out the contour
cv2.drawContours(res, [screenCnt], -1, (0, 255, 0), 2)
cv2.imshow("Contours",res)
#warped = four_point_transform(res, screenCnt.reshape(4, 2) * ratio)
lel = rectify(screenCnt)
pts2 = np.float32([[0,0],[840,0],[840,1164],[0,1164]])
M = cv2.getPerspectiveTransform(lel,pts2)
dst = cv2.warpPerspective(res,M,(840,1164))
crop_img = dst[440:945,130:700]
#dst = cv2.resize(dst, (1050, 1455))
cv2.imshow("Warped",dst)
#print len(screenCnt)
# convert the warped image to grayscale, then threshold it
# to give it that 'black and white' paper effect
gray2=cv2.cvtColor(dst, cv2.COLOR_BGR2GRAY)
cv2.imshow("Answer area",gray2)
cv2.imshow("Answer area",crop_img)
#reset the image to the answer area and redo the whole contour detecting process
image = crop_img
orig = image.copy()
# convert image to grayscale
gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
# blur the image slightly to remove noise.
gray = cv2.bilateralFilter(gray, 11, 17, 17)
# gray = cv2.GaussianBlur(gray, (5, 5), 0) is an alternative way to blur the image
# canny edge detection
edged = cv2.Canny(gray, 30, 200)
# two threshold method.
# The first one is normal threshold method
# The second one is use Gaussian method which has better effect.
# ret,thresh1 = cv2.threshold(gray,150,150,cv2.THRESH_BINARY)
thresh1 = cv2.adaptiveThreshold(gray, 255, cv2.ADAPTIVE_THRESH_GAUSSIAN_C, cv2.THRESH_BINARY, 11, 2)
# find contours in the edged image, keep only the largest ones, and initialize
# our screen contour
# findContours takes three parameter:
# First parameter: the image we want to find counter. Need to copy since this method will
# destroy the image.
# Second parameter: cv2.RETR_TREE tells OpenCV to compute the hierarchy (relationship)
# between contours
# Third parameter: compress the contours to save space using cv2.CV_CHAIN_APPROX_SIMPLE
try:
(cnts, _) = cv2.findContours(edged.copy(), cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
except:
(_, cnts, _) = cv2.findContours(edged.copy(), cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
# (cnts, _) = cv2.findContours(edged.copy(), cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
# the number of returned parameter is different depending on the version of openCV
# for 2.x it is (cnts, _) = cv2.findContours(edged.copy(), cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
# for 3.x it is (_, cnts, _) = cv2.findContours(edged.copy(), cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
# sort the counter. The reference is the countourArea. Since we are trying to get all the boxes in
#the answer area we keep 1000 elements in the list so we don't miss any possible boxes.
cnts = sorted(cnts, key=cv2.contourArea, reverse=True)[:1000]
# a new list to store all the rectangle counter
cnts_rect = []
# initialize the screenCnt.
screenCnt = None
# loop over our contours
for c in cnts:
# approximate the contour
peri = cv2.arcLength(c, True)
# This function gives the number of vertices of the figure
# For example, approx returns 4 if the shape is rectangle and 5 if the shape is pentagon
# k is constant, it can be changing from 0.005 to 0.1
# k = 0.005
k = 0.005
approx = cv2.approxPolyDP(c, k * peri, True)
# if our approximated contour has four points, then
# we can assume that we have found our screen
if len(approx) == 4 and cv2.contourArea(c) > 15000:
screenCnt = approx
cnts_rect.append(approx)
# print "this is coutour area ", cv2.contourArea(c)
# the print is for test
# print screenCnt[0][0]
# to draw the contours in the original image.
# print len(cnts_rect)
cv2.drawContours(image, cnts_rect, -1, (0, 255, 0), 3)
# to find height and length of the rectangle
height = cnts_rect[0][2][0][1] - cnts_rect[0][0][0][1]
length = cnts_rect[0][2][0][0] - cnts_rect[0][0][0][0]
# x_axis is a list, store all the x_axis data of one contour
# y_axis is a list, store all the y_axis data of same contour
# cnts[0] is a list of point, which is one rectangle
centre_list = find_centre(cnts_rect)
# print len(centre_list)
# print "this length of centre_list is ", len(centre_list)
centre_list_col = sorted(centre_list, key=lambda point: point[1])
# answer_list is a list. It contains x elements, x is rows of the answer sheet. It is also list
# every row_list contains also list which are centre points of rectangle.
answer_list = find_missing_rectangle(centre_list, centre_list_col, length // 2, height // 2)
# ============================================================
# for test print point in centre list
# ============================================================
# print len(answer_list)
# for list1 in answer_list:
# print("the length of list1 is ", len(list1))
# for element in list1:
# print element
# print len(answer_list)
# ============================================================
# end test
# ============================================================
number_of_choice = 4
answer = find_answer2(answer_list,number_of_choice,thresh1,pixel=40,number_of_question=40)
print answer
# i = 0
# print len(centre_list_col)
# for i in range(150):
# print centre_list_col[i]
centre_list = sorted(centre_list, key=lambda point: point[0])
# print "The number of centre point " , len(centre_list)
# # for test.
# i = 0
# print len(centre_list)
# for i in range(138):
# print centre_list[i]
# cv2.circle(image,centre_list[i],20,(0,0,0))
# row_list = process_centre_list(centre_list)
# find_answer(centre_list, thresh1)
# cv2.imshow("Game Boy Screen", image)
# cv2.imshow("gray image", thresh1)
cv2.imwrite('contours.png', image)
cv2.imwrite('thresh1.png',thresh1)
# cv2.waitKey(15000)
# apply the four point transform to obtain a top-down
# view of the original image
warped = four_point_transform(orig, screenCnt.reshape(4, 2) * ratio)
warped = four_point_transform(orig, screenCnt.reshape(4, 2) * ratio)
warped = cv2.cvtColor(warped, cv2.COLOR_BGR2GRAY)
ret, thresh1 = cv2.threshold(warped, 80, 85, cv2.THRESH_BINARY)
# cv2.imshow("Binary",thresh1 )
warped = warped.astype("uint8") * 255
cv2.waitKey(10000)
cv2.imwrite('messigray.png', image)
| {
"content_hash": "5118dad9cc4abc136658c86560cf41fd",
"timestamp": "",
"source": "github",
"line_count": 461,
"max_line_length": 105,
"avg_line_length": 40.13882863340564,
"alnum_prop": 0.5837656722870731,
"repo_name": "HuimingCheng/AutoGrading",
"id": "9117fb8689c910dd34ce233c8abb0df5f4919881",
"size": "18708",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "learning/edge_detection.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "1232"
},
{
"name": "C",
"bytes": "400177"
},
{
"name": "C++",
"bytes": "197133"
},
{
"name": "CMake",
"bytes": "14482"
},
{
"name": "CSS",
"bytes": "10474"
},
{
"name": "HTML",
"bytes": "26684"
},
{
"name": "JavaScript",
"bytes": "6748"
},
{
"name": "Makefile",
"bytes": "13303"
},
{
"name": "PowerShell",
"bytes": "8175"
},
{
"name": "Python",
"bytes": "5769059"
},
{
"name": "Tcl",
"bytes": "1295070"
}
],
"symlink_target": ""
} |
from django.shortcuts import render
from django.contrib.auth.decorators import login_required
# Create your views here.
@login_required
def index(request):
return render(request, 'chess/index.html', {})
| {
"content_hash": "a965dd2b164330e72404ab538695a885",
"timestamp": "",
"source": "github",
"line_count": 8,
"max_line_length": 57,
"avg_line_length": 25.75,
"alnum_prop": 0.7815533980582524,
"repo_name": "manumartin/examples",
"id": "57af5110427346039cec3b14578f97974e861b54",
"size": "206",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ml/mouse_recognition/webapp/chess/views.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "4881"
},
{
"name": "C++",
"bytes": "141422"
},
{
"name": "CMake",
"bytes": "2278"
},
{
"name": "CSS",
"bytes": "28620"
},
{
"name": "GLSL",
"bytes": "2586"
},
{
"name": "HTML",
"bytes": "81647"
},
{
"name": "Java",
"bytes": "96981"
},
{
"name": "JavaScript",
"bytes": "768612"
},
{
"name": "Jupyter Notebook",
"bytes": "2212214"
},
{
"name": "Python",
"bytes": "42778"
},
{
"name": "Shell",
"bytes": "476"
},
{
"name": "TypeScript",
"bytes": "59903"
},
{
"name": "Vue",
"bytes": "4524"
}
],
"symlink_target": ""
} |
import unittest
from rx.observable import Observable
from rx.testing import TestScheduler, ReactiveTest, is_prime
on_next = ReactiveTest.on_next
on_completed = ReactiveTest.on_completed
on_error = ReactiveTest.on_error
subscribe = ReactiveTest.subscribe
subscribed = ReactiveTest.subscribed
disposed = ReactiveTest.disposed
created = ReactiveTest.created
class TestTakeWithTime(unittest.TestCase):
def test_take_zero(self):
scheduler = TestScheduler()
xs = scheduler.create_hot_observable(
on_next(210, 1),
on_next(220, 2),
on_completed(230))
def create():
return xs.take_with_time(0, scheduler)
res = scheduler.start(create)
res.messages.assert_equal(on_completed(201))
xs.subscriptions.assert_equal(subscribe(200, 201))
def test_take_some(self):
scheduler = TestScheduler()
xs = scheduler.create_hot_observable(on_next(210, 1), on_next(220, 2), on_next(230, 3), on_completed(240))
def create():
return xs.take_with_time(25, scheduler)
res = scheduler.start(create)
res.messages.assert_equal(on_next(210, 1), on_next(220, 2), on_completed(225))
xs.subscriptions.assert_equal(subscribe(200, 225))
def test_take_late(self):
scheduler = TestScheduler()
xs = scheduler.create_hot_observable(on_next(210, 1), on_next(220, 2), on_completed(230))
def create():
return xs.take_with_time(50, scheduler)
res = scheduler.start(create)
res.messages.assert_equal(on_next(210, 1), on_next(220, 2), on_completed(230))
xs.subscriptions.assert_equal(subscribe(200, 230))
def test_take_Error(self):
scheduler = TestScheduler()
ex = 'ex'
xs = scheduler.create_hot_observable(on_error(210, ex))
def create():
return xs.take_with_time(50, scheduler)
res = scheduler.start(create)
res.messages.assert_equal(on_error(210, ex))
xs.subscriptions.assert_equal(subscribe(200, 210))
def test_take_never(self):
scheduler = TestScheduler()
xs = scheduler.create_hot_observable()
def create():
return xs.take_with_time(50, scheduler)
res = scheduler.start(create)
res.messages.assert_equal(on_completed(250))
xs.subscriptions.assert_equal(subscribe(200, 250))
def test_take_twice1(self):
scheduler = TestScheduler()
xs = scheduler.create_hot_observable(on_next(210, 1), on_next(220, 2), on_next(230, 3), on_next(240, 4), on_next(250, 5), on_next(260, 6), on_completed(270))
def create():
return xs.take_with_time(55, scheduler).take_with_time(35, scheduler)
res = scheduler.start(create)
res.messages.assert_equal(on_next(210, 1), on_next(220, 2), on_next(230, 3), on_completed(235))
xs.subscriptions.assert_equal(subscribe(200, 235))
def test_take_twice2(self):
scheduler = TestScheduler()
xs = scheduler.create_hot_observable(on_next(210, 1), on_next(220, 2), on_next(230, 3), on_next(240, 4), on_next(250, 5), on_next(260, 6), on_completed(270))
def create():
return xs.take_with_time(35, scheduler).take_with_time(55, scheduler)
res = scheduler.start(create)
res.messages.assert_equal(on_next(210, 1), on_next(220, 2), on_next(230, 3), on_completed(235))
xs.subscriptions.assert_equal(subscribe(200, 235))
| {
"content_hash": "7ac33f3ac2e2b61f269097a717267151",
"timestamp": "",
"source": "github",
"line_count": 98,
"max_line_length": 165,
"avg_line_length": 35.704081632653065,
"alnum_prop": 0.639039725635896,
"repo_name": "dbrattli/RxPY",
"id": "785b26235c0fea04a17966218fad9a159ff9c193",
"size": "3499",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/test_observable/test_takewithtime.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "1334787"
}
],
"symlink_target": ""
} |
import json
from urlparse import urlparse
from django.core.urlresolvers import reverse
from django.test.client import RequestFactory
from mock import patch
from nose.tools import eq_, ok_
import mkt
from mkt.api.tests import BaseAPI
from mkt.api.tests.test_oauth import RestOAuth
from mkt.constants.base import STATUS_PUBLIC
from mkt.extensions.models import Extension, ExtensionPopularity
from mkt.fireplace.serializers import (FireplaceAppSerializer,
FireplaceWebsiteSerializer)
from mkt.search.forms import COLOMBIA_WEBSITE
from mkt.site.fixtures import fixture
from mkt.site.tests import app_factory, ESTestCase, TestCase
from mkt.tags.models import Tag
from mkt.webapps.indexers import HomescreenIndexer
from mkt.webapps.models import AddonUser, Installed, Webapp
from mkt.websites.models import Website, WebsitePopularity
from mkt.websites.utils import website_factory
# https://bugzilla.mozilla.org/show_bug.cgi?id=958608#c1 and #c2.
FIREPLACE_APP_EXCLUDED_FIELDS = (
'absolute_url', 'app_type', 'created', 'default_locale', 'payment_account',
'regions', 'resource_uri', 'supported_locales', 'upsold', 'versions')
FIREPLACE_WEBSITE_EXCLUDED_FIELDS = ('title', 'tv_url')
def assert_fireplace_app(data):
for field in FIREPLACE_APP_EXCLUDED_FIELDS:
ok_(field not in data, field)
for field in FireplaceAppSerializer.Meta.fields:
ok_(field in data, field)
def assert_fireplace_website(data):
for field in FIREPLACE_WEBSITE_EXCLUDED_FIELDS:
ok_(field not in data, field)
for field in FireplaceWebsiteSerializer.Meta.fields:
ok_(field in data, field)
class TestAppDetail(BaseAPI):
fixtures = fixture('webapp_337141')
def setUp(self):
super(TestAppDetail, self).setUp()
self.url = reverse('fireplace-app-detail', kwargs={'pk': 337141})
def test_get(self):
res = self.client.get(self.url)
data = json.loads(res.content)
eq_(data['id'], 337141)
assert_fireplace_app(data)
def test_get_slug(self):
Webapp.objects.get(pk=337141).update(app_slug='foo')
res = self.client.get(reverse('fireplace-app-detail',
kwargs={'pk': 'foo'}))
data = json.loads(res.content)
eq_(data['id'], 337141)
def test_others(self):
url = reverse('fireplace-app-list')
self._allowed_verbs(self.url, ['get'])
self._allowed_verbs(url, [])
def test_file_size(self):
self.app = Webapp.objects.get(pk=337141)
res = self.client.get(self.url)
data = json.loads(res.content)
eq_(data['file_size'], u'379.0\xa0KB')
file_ = self.app.current_version.all_files[0]
file_.update(size=1024 * 1024 * 1.1)
res = self.client.get(self.url)
data = json.loads(res.content)
eq_(data['file_size'], u'1.1\xa0MB')
file_.update(size=0)
res = self.client.get(self.url)
data = json.loads(res.content)
eq_(data['file_size'], None)
class TestFeaturedSearchView(RestOAuth, ESTestCase):
fixtures = fixture('user_2519', 'webapp_337141')
def setUp(self):
super(TestFeaturedSearchView, self).setUp()
self.webapp = Webapp.objects.get(pk=337141)
self.reindex(Webapp)
self.url = reverse('fireplace-featured-search-api')
def test_get(self):
res = self.client.get(self.url)
eq_(res.status_code, 200)
objects = res.json['objects']
eq_(len(objects), 1)
data = objects[0]
eq_(data['id'], 337141)
assert_fireplace_app(data)
# fireplace-featured-search-api is only kept for yogafire, which does
# not care about collection data, so we don't even need to add empty
# arrays for backwards-compatibility.
ok_('collections' not in res.json)
ok_('featured' not in res.json)
ok_('operator' not in res.json)
class TestSearchView(RestOAuth, ESTestCase):
fixtures = fixture('user_2519', 'webapp_337141')
def setUp(self):
super(TestSearchView, self).setUp()
self.webapp = Webapp.objects.get(pk=337141)
self.reindex(Webapp)
self.url = reverse('fireplace-search-api')
def test_get(self):
res = self.client.get(self.url)
eq_(res.status_code, 200)
objects = res.json['objects']
eq_(len(objects), 1)
data = objects[0]
eq_(data['id'], 337141)
assert_fireplace_app(data)
ok_('featured' not in res.json)
ok_('collections' not in res.json)
ok_('operator' not in res.json)
def test_anonymous_user(self):
res = self.anon.get(self.url)
eq_(res.status_code, 200)
data = res.json['objects'][0]
eq_(data['user'], None)
res = self.client.get(self.url)
eq_(res.status_code, 200)
data = res.json['objects'][0]
eq_(data['user'], None)
def test_icons(self):
res = self.client.get(self.url)
eq_(res.status_code, 200)
objects = res.json['objects']
data = objects[0]['icons']
eq_(len(data), 2)
eq_(urlparse(data['64'])[0:3],
urlparse(self.webapp.get_icon_url(64))[0:3])
eq_(urlparse(data['128'])[0:3],
urlparse(self.webapp.get_icon_url(128))[0:3])
class TestMultiSearchView(RestOAuth, ESTestCase):
fixtures = fixture('user_2519', 'webapp_337141')
def setUp(self):
super(TestMultiSearchView, self).setUp()
self.url = reverse('fireplace-multi-search-api')
self.website = website_factory()
self.website.popularity.add(WebsitePopularity(region=0, value=666))
self.website.save()
self.webapp = Webapp.objects.get(pk=337141)
self.webapp.save()
self.extension = Extension.objects.create(name='test-ext-lol')
self.extension.versions.create(status=STATUS_PUBLIC)
self.extension.popularity.add(ExtensionPopularity(region=0, value=999))
self.extension.save()
self.refresh(doctypes=('extension', 'webapp', 'website',
'homescreen'))
def tearDown(self):
for o in Webapp.objects.all():
o.delete()
for o in Website.objects.all():
o.delete()
for o in Extension.objects.all():
o.delete()
super(TestMultiSearchView, self).tearDown()
# Make sure to delete and unindex *all* things. Normally we wouldn't
# care about stray deleted content staying in the index, but they can
# have an impact on relevancy scoring so we need to make sure. This
# needs to happen after super() has been called since it'll process the
# indexing tasks that should happen post_request, and we need to wait
# for ES to have done everything before continuing.
Webapp.get_indexer().unindexer(_all=True)
Website.get_indexer().unindexer(_all=True)
Extension.get_indexer().unindexer(_all=True)
HomescreenIndexer.unindexer(_all=True)
self.refresh(('webapp', 'website', 'extension', 'homescreen'))
def make_homescreen(self):
self.homescreen = app_factory(name=u'Elegant Waffle',
description=u'homescreen runner',
created=self.days_ago(5),
manifest_url='http://h.testmanifest.com')
Tag(tag_text='homescreen').save_tag(self.homescreen)
self.homescreen.addondevicetype_set.create(
device_type=mkt.DEVICE_GAIA.id)
self.homescreen.update(categories=['health-fitness', 'productivity'])
self.homescreen.update_version()
HomescreenIndexer.index_ids([self.homescreen.pk], no_delay=True)
self.refresh(('webapp', 'website', 'extension', 'homescreen'))
return self.homescreen
def _add_co_tag(self, website):
co = Tag.objects.get_or_create(tag_text=COLOMBIA_WEBSITE)[0]
website.keywords.add(co)
self.reindex(Website)
def test_get_multi(self):
res = self.client.get(self.url)
objects = res.json['objects']
eq_(len(objects), 2) # By default we don't include extensions for now.
eq_(objects[0]['doc_type'], 'website')
assert_fireplace_website(objects[0])
eq_(objects[0]['slug'], '{website-%d}' % self.website.pk)
eq_(objects[1]['doc_type'], 'webapp')
assert_fireplace_app(objects[1])
def test_multi_with_extensions(self):
res = self.client.get(self.url + '?doc_type=webapp,extension,website')
objects = res.json['objects']
eq_(len(objects), 3)
eq_(objects[0]['doc_type'], 'extension')
eq_(objects[0]['slug'], self.extension.slug)
eq_(objects[1]['doc_type'], 'website')
assert_fireplace_website(objects[1])
eq_(objects[1]['slug'], '{website-%d}' % self.website.pk)
eq_(objects[2]['doc_type'], 'webapp')
assert_fireplace_app(objects[2])
def test_multi_with_homescreen(self):
h = self.make_homescreen()
res = self.client.get(self.url + '?doc_type=webapp,homescreen,website')
objects = res.json['objects']
eq_(len(objects), 3)
eq_(objects[0]['doc_type'], 'website')
assert_fireplace_website(objects[0])
eq_(objects[0]['slug'], '{website-%d}' % self.website.pk)
eq_(objects[1]['doc_type'], 'webapp')
assert_fireplace_app(objects[1])
eq_(objects[2]['doc_type'], 'homescreen')
eq_(objects[2]['slug'], h.app_slug)
def test_get_multi_colombia(self):
self._add_co_tag(self.website)
res = self.client.get(self.url, {'doc_type': 'website',
'region': 'mx'})
eq_(res.json['meta']['total_count'], 0)
res_co = self.client.get(self.url, {'doc_type': 'website',
'region': 'co'})
eq_(res_co.json['meta']['total_count'], 1)
ok_(COLOMBIA_WEBSITE in res_co.json['objects'][0]['keywords'])
assert_fireplace_website(res_co.json['objects'][0])
def test_icons(self):
res = self.client.get(self.url)
eq_(res.status_code, 200)
objects = res.json['objects']
eq_(objects[0]['doc_type'], 'website')
data = objects[0]['icons']
eq_(len(data), 2)
eq_(urlparse(data['64'])[0:3],
urlparse(self.website.get_icon_url(64))[0:3])
eq_(urlparse(data['128'])[0:3],
urlparse(self.website.get_icon_url(128))[0:3])
eq_(objects[1]['doc_type'], 'webapp')
data = objects[1]['icons']
eq_(len(data), 2)
eq_(urlparse(data['64'])[0:3],
urlparse(self.webapp.get_icon_url(64))[0:3])
eq_(urlparse(data['128'])[0:3],
urlparse(self.webapp.get_icon_url(128))[0:3])
class TestConsumerInfoView(RestOAuth, TestCase):
fixtures = fixture('user_2519')
def setUp(self):
super(TestConsumerInfoView, self).setUp()
self.request = RequestFactory().get('/')
self.url = reverse('fireplace-consumer-info')
@patch('mkt.regions.middleware.GeoIP.lookup')
def test_geoip_called_api_v1(self, mock_lookup):
# When we increment settings.API_CURRENT_VERSION, we'll need to update
# this test to make sure it's still only using v1.
self.url = reverse('fireplace-consumer-info')
ok_('/api/v1/' in self.url)
mock_lookup.return_value = mkt.regions.GBR
res = self.anon.get(self.url)
data = json.loads(res.content)
eq_(data['region'], 'uk')
eq_(mock_lookup.call_count, 1)
@patch('mkt.regions.middleware.GeoIP.lookup')
def test_geoip_called_api_v2(self, mock_lookup):
self.url = reverse('api-v2:fireplace-consumer-info')
mock_lookup.return_value = mkt.regions.GBR
res = self.anon.get(self.url)
data = json.loads(res.content)
eq_(data['region'], 'uk')
eq_(mock_lookup.call_count, 1)
@patch('mkt.regions.middleware.RegionMiddleware.region_from_request')
def test_no_user_just_region(self, region_from_request):
region_from_request.return_value = mkt.regions.GBR
res = self.anon.get(self.url)
data = json.loads(res.content)
eq_(len(data.keys()), 1)
eq_(data['region'], 'uk')
@patch('mkt.regions.middleware.RegionMiddleware.region_from_request')
def test_recommendation_opt_out(self, region_from_request):
region_from_request.return_value = mkt.regions.BRA
for opt in (True, False):
self.user.update(enable_recommendations=opt)
res = self.client.get(self.url)
data = json.loads(res.content)
eq_(data['enable_recommendations'], opt)
@patch('mkt.regions.middleware.RegionMiddleware.region_from_request')
def test_with_user_developed(self, region_from_request):
region_from_request.return_value = mkt.regions.BRA
developed_app = app_factory()
AddonUser.objects.create(user=self.user, addon=developed_app)
res = self.client.get(self.url)
data = json.loads(res.content)
eq_(data['region'], 'br')
eq_(data['apps']['installed'], [])
eq_(data['apps']['developed'], [developed_app.pk])
eq_(data['apps']['purchased'], [])
@patch('mkt.regions.middleware.RegionMiddleware.region_from_request')
def test_with_user_installed(self, region_from_request):
region_from_request.return_value = mkt.regions.BRA
installed_app = app_factory()
Installed.objects.create(user=self.user, addon=installed_app)
res = self.client.get(self.url)
data = json.loads(res.content)
eq_(data['region'], 'br')
eq_(data['apps']['installed'], [installed_app.pk])
eq_(data['apps']['developed'], [])
eq_(data['apps']['purchased'], [])
@patch('mkt.users.models.UserProfile.purchase_ids')
@patch('mkt.regions.middleware.RegionMiddleware.region_from_request')
def test_with_user_purchased(self, region_from_request, purchase_ids):
region_from_request.return_value = mkt.regions.BRA
purchased_app = app_factory()
purchase_ids.return_value = [purchased_app.pk]
res = self.client.get(self.url)
data = json.loads(res.content)
eq_(data['region'], 'br')
eq_(data['apps']['installed'], [])
eq_(data['apps']['developed'], [])
eq_(data['apps']['purchased'], [purchased_app.pk])
class TestRocketFuelRedirect(TestCase):
def setUp(self):
super(TestRocketFuelRedirect, self).setUp()
self.url = '/api/v1/fireplace/collection/tarako-featured/'
self.target_url = '/api/v2/fireplace/feed/collections/tarako-featured/'
def test_redirect(self):
response = self.client.get(self.url)
self.assertCORS(response, 'GET')
self.assert3xx(response, self.target_url,
status_code=301)
def test_redirect_with_query_params(self):
self.url += u'?foo=bar&re=diré'
self.target_url += '?foo=bar&re=dir%C3%A9'
self.test_redirect()
| {
"content_hash": "bbd483545fea9ab13c33227f19dbff47",
"timestamp": "",
"source": "github",
"line_count": 394,
"max_line_length": 79,
"avg_line_length": 38.67258883248731,
"alnum_prop": 0.6132440769180285,
"repo_name": "washort/zamboni",
"id": "014ad61f1d54076cf0b88a82accf93c50065d00d",
"size": "15262",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "mkt/fireplace/tests/test_views.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "354243"
},
{
"name": "HTML",
"bytes": "2383319"
},
{
"name": "JavaScript",
"bytes": "532109"
},
{
"name": "Makefile",
"bytes": "4313"
},
{
"name": "Python",
"bytes": "4735484"
},
{
"name": "Shell",
"bytes": "11135"
},
{
"name": "Smarty",
"bytes": "1159"
}
],
"symlink_target": ""
} |
"""Tensor summaries for exporting information about a model.
See the @{$python/summary} guide.
@@FileWriter
@@FileWriterCache
@@tensor_summary
@@scalar
@@histogram
@@audio
@@image
@@text
@@merge
@@merge_all
@@get_summary_description
@@PluginAsset
@@get_plugin_asset
@@get_all_plugin_assets
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from google.protobuf import json_format as _json_format
# exports Summary, SummaryDescription, Event, TaggedRunMetadata, SessionLog
# pylint: disable=unused-import
from tensorflow.core.framework.summary_pb2 import Summary
from tensorflow.core.framework.summary_pb2 import SummaryDescription
from tensorflow.core.util.event_pb2 import Event
from tensorflow.core.util.event_pb2 import SessionLog
from tensorflow.core.util.event_pb2 import TaggedRunMetadata
# pylint: enable=unused-import
from tensorflow.python.eager import context as _context
from tensorflow.python.framework import dtypes as _dtypes
from tensorflow.python.framework import ops as _ops
from tensorflow.python.ops import gen_logging_ops as _gen_logging_ops
from tensorflow.python.ops import summary_op_util as _summary_op_util
# exports tensor-related summaries
# pylint: disable=unused-import
from tensorflow.python.ops.summary_ops import tensor_summary
# pylint: enable=unused-import
# exports text
# pylint: disable=unused-import
from tensorflow.python.summary.text_summary import text_summary as text
# pylint: enable=unused-import
# exports FileWriter, FileWriterCache
# pylint: disable=unused-import
from tensorflow.python.summary.writer.writer import FileWriter
from tensorflow.python.summary.writer.writer_cache import FileWriterCache
# pylint: enable=unused-import
from tensorflow.python.util import compat as _compat
from tensorflow.python.util.all_util import remove_undocumented
def scalar(name, tensor, collections=None, family=None):
"""Outputs a `Summary` protocol buffer containing a single scalar value.
The generated Summary has a Tensor.proto containing the input Tensor.
Args:
name: A name for the generated node. Will also serve as the series name in
TensorBoard.
tensor: A real numeric Tensor containing a single value.
collections: Optional list of graph collections keys. The new summary op is
added to these collections. Defaults to `[GraphKeys.SUMMARIES]`.
family: Optional; if provided, used as the prefix of the summary tag name,
which controls the tab name used for display on Tensorboard.
Returns:
A scalar `Tensor` of type `string`. Which contains a `Summary` protobuf.
Raises:
ValueError: If tensor has the wrong shape or type.
"""
with _summary_op_util.summary_scope(
name, family, values=[tensor]) as (tag, scope):
# pylint: disable=protected-access
val = _gen_logging_ops._scalar_summary(tags=tag, values=tensor, name=scope)
_summary_op_util.collect(val, collections, [_ops.GraphKeys.SUMMARIES])
return val
def image(name, tensor, max_outputs=3, collections=None, family=None):
"""Outputs a `Summary` protocol buffer with images.
The summary has up to `max_outputs` summary values containing images. The
images are built from `tensor` which must be 4-D with shape `[batch_size,
height, width, channels]` and where `channels` can be:
* 1: `tensor` is interpreted as Grayscale.
* 3: `tensor` is interpreted as RGB.
* 4: `tensor` is interpreted as RGBA.
The images have the same number of channels as the input tensor. For float
input, the values are normalized one image at a time to fit in the range
`[0, 255]`. `uint8` values are unchanged. The op uses two different
normalization algorithms:
* If the input values are all positive, they are rescaled so the largest one
is 255.
* If any input value is negative, the values are shifted so input value 0.0
is at 127. They are then rescaled so that either the smallest value is 0,
or the largest one is 255.
The `tag` in the outputted Summary.Value protobufs is generated based on the
name, with a suffix depending on the max_outputs setting:
* If `max_outputs` is 1, the summary value tag is '*name*/image'.
* If `max_outputs` is greater than 1, the summary value tags are
generated sequentially as '*name*/image/0', '*name*/image/1', etc.
Args:
name: A name for the generated node. Will also serve as a series name in
TensorBoard.
tensor: A 4-D `uint8` or `float32` `Tensor` of shape `[batch_size, height,
width, channels]` where `channels` is 1, 3, or 4.
max_outputs: Max number of batch elements to generate images for.
collections: Optional list of ops.GraphKeys. The collections to add the
summary to. Defaults to [_ops.GraphKeys.SUMMARIES]
family: Optional; if provided, used as the prefix of the summary tag name,
which controls the tab name used for display on Tensorboard.
Returns:
A scalar `Tensor` of type `string`. The serialized `Summary` protocol
buffer.
"""
with _summary_op_util.summary_scope(
name, family, values=[tensor]) as (tag, scope):
# pylint: disable=protected-access
val = _gen_logging_ops._image_summary(
tag=tag, tensor=tensor, max_images=max_outputs, name=scope)
_summary_op_util.collect(val, collections, [_ops.GraphKeys.SUMMARIES])
return val
def histogram(name, values, collections=None, family=None):
# pylint: disable=line-too-long
"""Outputs a `Summary` protocol buffer with a histogram.
Adding a histogram summary makes it possible to visualize your data's
distribution in TensorBoard. You can see a detailed explanation of the
TensorBoard histogram dashboard
[here](https://www.tensorflow.org/get_started/tensorboard_histograms).
The generated
[`Summary`](https://www.tensorflow.org/code/tensorflow/core/framework/summary.proto)
has one summary value containing a histogram for `values`.
This op reports an `InvalidArgument` error if any value is not finite.
Args:
name: A name for the generated node. Will also serve as a series name in
TensorBoard.
values: A real numeric `Tensor`. Any shape. Values to use to
build the histogram.
collections: Optional list of graph collections keys. The new summary op is
added to these collections. Defaults to `[GraphKeys.SUMMARIES]`.
family: Optional; if provided, used as the prefix of the summary tag name,
which controls the tab name used for display on Tensorboard.
Returns:
A scalar `Tensor` of type `string`. The serialized `Summary` protocol
buffer.
"""
with _summary_op_util.summary_scope(
name, family, values=[values],
default_name='HistogramSummary') as (tag, scope):
# pylint: disable=protected-access
val = _gen_logging_ops._histogram_summary(
tag=tag, values=values, name=scope)
_summary_op_util.collect(val, collections, [_ops.GraphKeys.SUMMARIES])
return val
def audio(name, tensor, sample_rate, max_outputs=3, collections=None,
family=None):
# pylint: disable=line-too-long
"""Outputs a `Summary` protocol buffer with audio.
The summary has up to `max_outputs` summary values containing audio. The
audio is built from `tensor` which must be 3-D with shape `[batch_size,
frames, channels]` or 2-D with shape `[batch_size, frames]`. The values are
assumed to be in the range of `[-1.0, 1.0]` with a sample rate of
`sample_rate`.
The `tag` in the outputted Summary.Value protobufs is generated based on the
name, with a suffix depending on the max_outputs setting:
* If `max_outputs` is 1, the summary value tag is '*name*/audio'.
* If `max_outputs` is greater than 1, the summary value tags are
generated sequentially as '*name*/audio/0', '*name*/audio/1', etc
Args:
name: A name for the generated node. Will also serve as a series name in
TensorBoard.
tensor: A 3-D `float32` `Tensor` of shape `[batch_size, frames, channels]`
or a 2-D `float32` `Tensor` of shape `[batch_size, frames]`.
sample_rate: A Scalar `float32` `Tensor` indicating the sample rate of the
signal in hertz.
max_outputs: Max number of batch elements to generate audio for.
collections: Optional list of ops.GraphKeys. The collections to add the
summary to. Defaults to [_ops.GraphKeys.SUMMARIES]
family: Optional; if provided, used as the prefix of the summary tag name,
which controls the tab name used for display on Tensorboard.
Returns:
A scalar `Tensor` of type `string`. The serialized `Summary` protocol
buffer.
"""
with _summary_op_util.summary_scope(
name, family=family, values=[tensor]) as (tag, scope):
# pylint: disable=protected-access
sample_rate = _ops.convert_to_tensor(
sample_rate, dtype=_dtypes.float32, name='sample_rate')
val = _gen_logging_ops._audio_summary_v2(
tag=tag, tensor=tensor, max_outputs=max_outputs,
sample_rate=sample_rate, name=scope)
_summary_op_util.collect(val, collections, [_ops.GraphKeys.SUMMARIES])
return val
def merge(inputs, collections=None, name=None):
# pylint: disable=line-too-long
"""Merges summaries.
This op creates a
[`Summary`](https://www.tensorflow.org/code/tensorflow/core/framework/summary.proto)
protocol buffer that contains the union of all the values in the input
summaries.
When the Op is run, it reports an `InvalidArgument` error if multiple values
in the summaries to merge use the same tag.
Args:
inputs: A list of `string` `Tensor` objects containing serialized `Summary`
protocol buffers.
collections: Optional list of graph collections keys. The new summary op is
added to these collections. Defaults to `[]`.
name: A name for the operation (optional).
Returns:
A scalar `Tensor` of type `string`. The serialized `Summary` protocol
buffer resulting from the merging.
Raises:
RuntimeError: If called with eager mode enabled.
@compatibility(eager)
Not compatible with eager execution. To write TensorBoard
summaries under eager execution, use `tf.contrib.summary` instead.
@end_compatbility
"""
# pylint: enable=line-too-long
if _context.in_eager_mode():
raise RuntimeError(
'Merging tf.summary.* ops is not compatible with eager execution. '
'Use tf.contrib.summary instead.')
name = _summary_op_util.clean_tag(name)
with _ops.name_scope(name, 'Merge', inputs):
# pylint: disable=protected-access
val = _gen_logging_ops._merge_summary(inputs=inputs, name=name)
_summary_op_util.collect(val, collections, [])
return val
def merge_all(key=_ops.GraphKeys.SUMMARIES, scope=None):
"""Merges all summaries collected in the default graph.
Args:
key: `GraphKey` used to collect the summaries. Defaults to
`GraphKeys.SUMMARIES`.
scope: Optional scope used to filter the summary ops, using `re.match`
Returns:
If no summaries were collected, returns None. Otherwise returns a scalar
`Tensor` of type `string` containing the serialized `Summary` protocol
buffer resulting from the merging.
Raises:
RuntimeError: If called with eager execution enabled.
@compatibility(eager)
Not compatible with eager execution. To write TensorBoard
summaries under eager execution, use `tf.contrib.summary` instead.
@end_compatbility
"""
if _context.in_eager_mode():
raise RuntimeError(
'Merging tf.summary.* ops is not compatible with eager execution. '
'Use tf.contrib.summary instead.')
summary_ops = _ops.get_collection(key, scope=scope)
if not summary_ops:
return None
else:
return merge(summary_ops)
def get_summary_description(node_def):
"""Given a TensorSummary node_def, retrieve its SummaryDescription.
When a Summary op is instantiated, a SummaryDescription of associated
metadata is stored in its NodeDef. This method retrieves the description.
Args:
node_def: the node_def_pb2.NodeDef of a TensorSummary op
Returns:
a summary_pb2.SummaryDescription
Raises:
ValueError: if the node is not a summary op.
@compatibility(eager)
Not compatible with eager execution. To write TensorBoard
summaries under eager execution, use `tf.contrib.summary` instead.
@end_compatbility
"""
if node_def.op != 'TensorSummary':
raise ValueError("Can't get_summary_description on %s" % node_def.op)
description_str = _compat.as_str_any(node_def.attr['description'].s)
summary_description = SummaryDescription()
_json_format.Parse(description_str, summary_description)
return summary_description
_allowed_symbols = [
'Summary', 'SummaryDescription', 'Event', 'TaggedRunMetadata', 'SessionLog',
]
remove_undocumented(__name__, _allowed_symbols)
| {
"content_hash": "29553eac4a616e013ceff6f6d5d3aa1a",
"timestamp": "",
"source": "github",
"line_count": 339,
"max_line_length": 86,
"avg_line_length": 37.77581120943953,
"alnum_prop": 0.7268467905669218,
"repo_name": "av8ramit/tensorflow",
"id": "92c1fcadd29c7858da1d31375c209bf1b21f3103",
"size": "13496",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "tensorflow/python/summary/summary.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "9096"
},
{
"name": "C",
"bytes": "332331"
},
{
"name": "C++",
"bytes": "37144977"
},
{
"name": "CMake",
"bytes": "193247"
},
{
"name": "Go",
"bytes": "1061627"
},
{
"name": "HTML",
"bytes": "4680032"
},
{
"name": "Java",
"bytes": "544069"
},
{
"name": "Jupyter Notebook",
"bytes": "1940884"
},
{
"name": "LLVM",
"bytes": "6536"
},
{
"name": "Makefile",
"bytes": "48122"
},
{
"name": "Objective-C",
"bytes": "12456"
},
{
"name": "Objective-C++",
"bytes": "94385"
},
{
"name": "PHP",
"bytes": "1487"
},
{
"name": "Perl",
"bytes": "6179"
},
{
"name": "Perl 6",
"bytes": "1357"
},
{
"name": "PureBasic",
"bytes": "24932"
},
{
"name": "Python",
"bytes": "32711532"
},
{
"name": "Ruby",
"bytes": "547"
},
{
"name": "Shell",
"bytes": "422931"
}
],
"symlink_target": ""
} |
class FileBackend(file): # noqa
def __init__(self, path, mode='ab+'):
try:
super(FileBackend, self).__init__(path, mode)
except IOError:
# File not found
super(FileBackend, self).__init__(path, 'wb+')
| {
"content_hash": "7e7419d302b1a06420ac1dcbc0a072eb",
"timestamp": "",
"source": "github",
"line_count": 7,
"max_line_length": 58,
"avg_line_length": 36.857142857142854,
"alnum_prop": 0.5232558139534884,
"repo_name": "muraliselva10/cloudkitty",
"id": "6608ed091803c879322782ec0fd2bb3a8a374faf",
"size": "922",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "cloudkitty/backend/file.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Mako",
"bytes": "2060"
},
{
"name": "Python",
"bytes": "526205"
},
{
"name": "Shell",
"bytes": "12562"
}
],
"symlink_target": ""
} |
"""
Management class for operations related to remote display.
"""
import re
import threading
from oslo_config import cfg
from oslo_log import log as logging
from nova.console import type as console_type
from nova import exception
from nova import i18n
from nova.virt.virtualbox import constants
from nova.virt.virtualbox import exception as vbox_exc
from nova.virt.virtualbox import hostutils
from nova.virt.virtualbox import manage
REMOTE_DISPLAY = [
cfg.BoolOpt(
'remote_display', default=False,
help='Enable or disable the VRDE Server.'),
cfg.BoolOpt(
'vrde_unique_port', default=False,
help='Whether to use an unique port for each instance.'),
cfg.StrOpt(
'vrde_module', default='Oracle VM VirtualBox Extension Pack',
help='The module used by VRDE Server'),
cfg.IntOpt(
'vrde_password_length', default=None,
help='VRDE maximum length for password.'),
cfg.StrOpt(
'vrde_port', default='3389',
help='A port or a range of ports the VRDE server can bind to.'),
cfg.BoolOpt(
'vrde_require_instance_uuid_as_password', default=False,
help="Use the instance uuid as password for the VRDE server."),
]
ENCRIPTED_RDP = [
cfg.BoolOpt(
'encrypted_rdp', default=False,
help='Enable or disable the rdp encryption.'),
cfg.StrOpt(
'security_method', default=constants.VRDE_SECURITY_RDP,
help='The security method used for encryption. (RDP, TLS, Negotiate)'
),
cfg.StrOpt(
'server_certificate', default=None,
help='The Server Certificate.'),
cfg.StrOpt(
'server_private_key', default=None,
help='The Server Private Key.'),
cfg.StrOpt(
'server_ca', default=None,
help='The Certificate Authority (CA) Certificate.'),
]
CONF = cfg.CONF
CONF.register_opts(REMOTE_DISPLAY, 'virtualbox')
CONF.register_opts(ENCRIPTED_RDP, 'rdp')
LOG = logging.getLogger(__name__)
def _get_ports():
"""Process infromation regarding ports from config and return a list of
unique ports.
"""
ports = []
for group in CONF.virtualbox.vrde_port.split(','):
if '-' not in group:
try:
ports.append(int(group))
except ValueError:
continue
else:
start, stop = group.split('-', 1)
try:
ports.extend(range(int(start), int(stop) + 1))
except ValueError:
continue
return sorted(set(ports), reverse=True)
class ConsoleOps(object):
"""Management class for operations related to remote display."""
_EXTPACK_NAME_REGEXP = r'Pack no. \d+:\s+(?P<name>.+)'
def __init__(self):
self._vbox_manage = manage.VBoxManage()
self._ext_packs = []
self._ports = {
'available': [], 'free': [], 'used': {},
'unique': CONF.virtualbox.vrde_unique_port
}
self._lock = threading.Lock()
self._remote_display = CONF.virtualbox.remote_display
self._vrde_module = CONF.virtualbox.vrde_module
self._load()
@property
def remote_display(self):
"""VirtualBox remote desktop extension (VRDE) server status."""
return self._remote_display
@property
def vrde_module(self):
"""Library witch implements the VRDE."""
return self._vrde_module
def _load(self):
"""Process information from hypervisor and config file."""
if self._remote_display:
self._get_ext_packs()
if self._ports['unique']:
self._ports['free'] = _get_ports()
else:
self._ports['available'] = _get_ports()
def _get_ext_packs(self):
"""Get package name for each extension pack installed."""
pack_name = re.compile(self._EXTPACK_NAME_REGEXP)
extpacks_output = self._vbox_manage.list(constants.EXTPACKS)
for line in extpacks_output.splitlines():
extpack = pack_name.search(line)
if not extpack:
continue
self._ext_packs.append(extpack.group('name').strip())
def _get_available_port(self):
"""Return first available port found."""
with self._lock:
if not self._ports['free']:
if self._ports['unique']:
return None
if not self._ports['available']:
return None
self._ports['free'] = list(self._ports['available'])
return self._ports['free'].pop()
def _get_vrde_port(self, instance):
"""Return the VRDE port for the received instance."""
port = self._ports['used'].get(instance.name, None)
if not port:
try:
instance_info = self._vbox_manage.show_vm_info(instance)
port = int(instance_info[constants.VM_VRDE_PORT])
except (ValueError, KeyError) as exc:
LOG.debug("Failed to get port for instance: %(reason)s",
{"reason": exc}, instance=instance)
except (exception.InstanceNotFound, vbox_exc.VBoxException) as exc:
LOG.debug("Failed to get information regarding "
"instance: %(reason)s",
{"reason": exc}, instance=instance)
return port
def _setup_rdp(self, instance):
"""Setup the RDP VRDE module."""
if not CONF.rdp.encrypted_rdp:
return
security_method = CONF.rdp.security_method
self._vbox_manage.modify_vrde(
instance=instance, field=constants.FIELD_VRDE_PROPERTY,
value=constants.VRDE_SECURITY_METHOD %
{"method": security_method})
if security_method in (constants.VRDE_SECURITY_TLS,
constants.VRDE_SECURITY_NEGOTIATE):
# NOTE(alexandrucoman): If the Security/Method property is set to
# either Negotiate or TLS, the TLS protocol will be automatically
# used by the server, if the client supports TLS.
# However, in order to use TLS the server must possess
# the Server Certificate, the Server Private Key and
# the Certificate Authority (CA) Certificate.
self._vbox_manage.modify_vrde(
instance=instance, field=constants.FIELD_VRDE_PROPERTY,
value=constants.VRDE_SECURITY_CA %
{"path": CONF.rdp.server_ca})
self._vbox_manage.modify_vrde(
instance=instance, field=constants.FIELD_VRDE_PROPERTY,
value=constants.VRDE_SECURITY_SERVER_CERT %
{"path": CONF.rdp.server_certificate})
self._vbox_manage.modify_vrde(
instance=instance, field=constants.FIELD_VRDE_PROPERTY,
value=constants.VRDE_SERCURITY_SERVER_PRIVATE_KEY %
{"path": CONF.rdp.server_private_key})
def _setup_vnc(self, instance):
"""Setup the VNC VRDE module."""
password = instance.uuid
if CONF.virtualbox.vrde_password_length:
password = password[:CONF.virtualbox.vrde_password_length]
self._vbox_manage.modify_vrde(
instance=instance, field=constants.FIELD_VRDE_PROPERTY,
value=constants.PROPERTY_VNC_PASSWORD %
{"password": password})
def setup_host(self):
"""Setup VirtualBox to use the received VirtualBox Remote
Desktop Extension if `remote_display` is enabled.
"""
if not self.remote_display:
LOG.debug("VRDE server is disabled.")
return
if self.vrde_module not in self._ext_packs:
LOG.warning(
i18n._LW("The `%(vrde_module)s` VRDE Module is not "
"available."),
{"vrde_module": self.vrde_module})
return
try:
self._vbox_manage.set_property(constants.VBOX_VRDE_EXTPACK,
self.vrde_module)
except vbox_exc.VBoxManageError as exc:
LOG.warning(
i18n._LW("Failed to set VRDE Module `%(vrde_module)s`: "
"%(reason)s"),
{"vrde_module": self.vrde_module, "reason": exc})
return False
LOG.info(i18n._LI("The VRDE Module used is %(vrde_module)s"),
{"vrde_module": self.vrde_module})
return True
def _enable_vrde(self, instance):
port = self._get_available_port()
if not port:
raise vbox_exc.VBoxException(
i18n._LE("No available port was found."))
self._ports['used'][instance.name] = port
self._vbox_manage.modify_vrde(instance=instance,
field=constants.FIELD_VRDE_SERVER,
value=constants.ON)
self._vbox_manage.modify_vrde(instance=instance,
field=constants.FIELD_VRDE_PORT,
value=port)
if self.vrde_module == constants.EXTPACK_VNC:
self._setup_vnc(instance)
elif self.vrde_module == constants.EXTPACK_RDP:
self._setup_rdp(instance)
def prepare_instance(self, instance):
"""Modify the instance settings in order to properly work remote
display.
"""
if self.remote_display:
# Enable VRDE Server
LOG.debug("Try to enable the VRDE Server.")
try:
self._enable_vrde(instance)
except vbox_exc.VBoxException as error:
LOG.warning(i18n._LW("Enabling VRDE Server failed: %(error)s"),
{"error": error})
else:
return
# Disable VRDE Server
LOG.debug("Try to disable the VRDE Server.")
try:
self._vbox_manage.modify_vrde(instance=instance,
field=constants.FIELD_VRDE_SERVER,
value=constants.OFF)
except vbox_exc.VBoxManageError as error:
LOG.warning(i18n._LW("Disabling VRDE Server failed: %(error)s"),
{"error": error})
def cleanup(self, instance):
"""Clean up the resources allocated for the instance."""
LOG.debug("cleanup called", instance=instance)
with self._lock:
self._ports['used'].pop(instance.name, None)
port = self._get_vrde_port(instance)
if port and self._ports['unique']:
self._ports['free'].append(port)
def get_vnc_console(self, instance):
"""Get connection info for a vnc console."""
LOG.debug("get_vnc_console called", instance=instance)
if self.remote_display and self.vrde_module == constants.EXTPACK_VNC:
host = hostutils.get_ip()
port = self._get_vrde_port(instance)
if port:
LOG.debug("VNC console: %(host)s:%(port)s",
{"host": host, "port": port})
return console_type.ConsoleVNC(host=host, port=port)
else:
LOG.warning(i18n._LW("VNC port not found!"), instance=instance)
else:
LOG.warning(i18n._LW("VNC console is not available for this"
" instance."),
instance=instance)
raise exception.ConsoleTypeUnavailable(console_type='vnc')
def get_rdp_console(self, instance):
"""Get connection info for a rdp console."""
LOG.debug("get_rdp_console called", instance=instance)
if self.remote_display and self.vrde_module == constants.EXTPACK_RDP:
host = hostutils.get_ip()
access_path = None if self._ports['unique'] else instance.name
port = self._get_vrde_port(instance)
if port:
LOG.debug("RDP console: %(host)s:%(port)s, %(path)s",
{"host": host, "port": port, "path": access_path})
return console_type.ConsoleRDP(
host=host, port=port, internal_access_path=access_path)
else:
LOG.warning(i18n._LW("RDP port not found."), instance=instance)
else:
LOG.warning(i18n._LW("VNC console is not available for this "
"instance."),
instance=instance)
raise exception.ConsoleTypeUnavailable(console_type='rdp')
| {
"content_hash": "b5e2bb8fe66c971f6ab8d0926be024db",
"timestamp": "",
"source": "github",
"line_count": 323,
"max_line_length": 79,
"avg_line_length": 39.06501547987616,
"alnum_prop": 0.5656205420827389,
"repo_name": "alexandrucoman/vbox-nova-driver",
"id": "e2fff7f6635a661925540a69128ae09ee5818341",
"size": "13257",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "nova/virt/virtualbox/consoleops.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "16479479"
},
{
"name": "Shell",
"bytes": "20716"
},
{
"name": "Smarty",
"bytes": "406109"
}
],
"symlink_target": ""
} |
from firefox.webdriver import WebDriver as Firefox
from firefox.firefox_profile import FirefoxProfile
from chrome.webdriver import WebDriver as Chrome
from chrome.options import Options as ChromeOptions
from ie.webdriver import WebDriver as Ie
from opera.webdriver import WebDriver as Opera
from remote.webdriver import WebDriver as Remote
from common.desired_capabilities import DesiredCapabilities
from common.action_chains import ActionChains
from common.touch_actions import TouchActions
from common.proxy import Proxy
__version__ = '2.25.0'
| {
"content_hash": "cf1eb8c297ae58ad6669abc826daddd7",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 59,
"avg_line_length": 42.07692307692308,
"alnum_prop": 0.8464351005484461,
"repo_name": "virajs/selenium-1",
"id": "1860aca8a06346dc293785724dc3c83b372eb83e",
"size": "1196",
"binary": false,
"copies": "2",
"ref": "refs/heads/trunk",
"path": "py/selenium/webdriver/__init__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ASP",
"bytes": "853"
},
{
"name": "AppleScript",
"bytes": "2614"
},
{
"name": "Batchfile",
"bytes": "146"
},
{
"name": "C",
"bytes": "49553"
},
{
"name": "C#",
"bytes": "2121309"
},
{
"name": "C++",
"bytes": "1378200"
},
{
"name": "CSS",
"bytes": "23536"
},
{
"name": "HTML",
"bytes": "1941562"
},
{
"name": "Java",
"bytes": "7891822"
},
{
"name": "JavaScript",
"bytes": "3974471"
},
{
"name": "Makefile",
"bytes": "4655"
},
{
"name": "Objective-C",
"bytes": "383340"
},
{
"name": "Objective-C++",
"bytes": "21844"
},
{
"name": "Python",
"bytes": "587982"
},
{
"name": "Ragel in Ruby Host",
"bytes": "3086"
},
{
"name": "Ruby",
"bytes": "760140"
},
{
"name": "Shell",
"bytes": "6393"
},
{
"name": "XSLT",
"bytes": "1083"
}
],
"symlink_target": ""
} |
import pickle
import re
import nltk
from itertools import islice,izip
def get_words_in_tweet(tweet):
pairs = []
#for (words) in tweet:
# all_words.extend(words)
#all_words=re.sub("[^\w]", " ", tweet).split()
tokens=nltk.word_tokenize(tweet)
#pairs = [ " ".join(pair) for pair in nltk.bigrams(tokens)]
pairs=nltk.bigrams(tokens)
#print pairs
return pairs
def get_word_features(wordlist):
# print wordlist
wordlist = nltk.FreqDist(wordlist)
# print wordlist
word_features = wordlist.keys()
# print word_features
return word_features
def extract_features(document):
document_words = set(document)
features = {}
for word in word_features:
features['contains(%s %s)' % (word)] = (word in document_words)
# print features
return features
f = open('bigram_classifier.pickle', 'rb')
classifier = pickle.load(f)
open('classified.txt', 'w').close()
open('hello.txt', 'w').close()
execfile('pre_processing.py')
File=open("twitDB46.csv")
File1=open("pre_processing.txt")
N=50
for i in range(N):
original_line=File.next().strip()
original_tweet=original_line
processed_line=File1.next().strip()
processed_tweet=processed_line
# print "ello"
word_features = get_word_features(get_words_in_tweet(processed_tweet))
#print word_features
classified_Tweet=classifier.classify(extract_features(processed_tweet.split()))+ " "+original_tweet
saveFile=open('classified.txt','a')
saveFile.write(classified_Tweet)
saveFile.write('\n')
f.close();
File.close();
File1.close();
saveFile.close()
| {
"content_hash": "10b7db65ed99e515a98051b43c399f86",
"timestamp": "",
"source": "github",
"line_count": 63,
"max_line_length": 103,
"avg_line_length": 25.714285714285715,
"alnum_prop": 0.6697530864197531,
"repo_name": "anuragaroraaa/Sentiment-Analysis-of-Online-Reviews",
"id": "bb2adaf100f06773ed7f8b19f48b24548e1cc4ea",
"size": "1620",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "analysisbigram.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "132140"
},
{
"name": "JavaScript",
"bytes": "5678"
},
{
"name": "Python",
"bytes": "14255"
},
{
"name": "Tcl",
"bytes": "1237789"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
from django.db import models, migrations
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
('base', '0002_initial_data'),
]
operations = [
migrations.AlterField(
model_name='userprofile',
name='user',
field=models.OneToOneField(related_name='profile', to=settings.AUTH_USER_MODEL),
),
]
| {
"content_hash": "3a98d9f08b668ee7c2cc15676ef1c782",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 92,
"avg_line_length": 23.526315789473685,
"alnum_prop": 0.6263982102908278,
"repo_name": "yfdyh000/pontoon",
"id": "d0dc866c200e724bdff6b3029b67f967941d860c",
"size": "471",
"binary": false,
"copies": "8",
"ref": "refs/heads/master",
"path": "pontoon/base/migrations/0003_auto_user_profile_related_name.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "118157"
},
{
"name": "HTML",
"bytes": "65167"
},
{
"name": "JavaScript",
"bytes": "122349"
},
{
"name": "Python",
"bytes": "491835"
},
{
"name": "Shell",
"bytes": "126"
}
],
"symlink_target": ""
} |
"""Tests for tf.strings.to_bytes op."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl.testing import parameterized
from tensorflow.python.eager import def_function
from tensorflow.python.framework import tensor_spec
from tensorflow.python.framework import test_util
from tensorflow.python.ops.ragged import ragged_factory_ops
from tensorflow.python.ops.ragged import ragged_string_ops
from tensorflow.python.platform import test
class StringsToBytesOpTest(test_util.TensorFlowTestCase,
parameterized.TestCase):
@parameterized.parameters(
# Scalar input -> vector output
(b'hello', [b'h', b'e', b'l', b'l', b'o']),
# Vector input -> 2D ragged output
([b'hello', b'123'],
[[b'h', b'e', b'l', b'l', b'o'], [b'1', b'2', b'3']]),
# 2D tensor input -> 3D ragged output
([[b'abc', b'de'], [b'fgh', b'']],
[[[b'a', b'b', b'c'], [b'd', b'e']], [[b'f', b'g', b'h'], []]]),
# 2D ragged input -> 3D ragged output
(ragged_factory_ops.constant_value([[b'abc', b'de'], [b'f']]),
[[[b'a', b'b', b'c'], [b'd', b'e']], [[b'f']]]),
# 3D input -> 4D ragged output
(ragged_factory_ops.constant_value(
[[[b'big', b'small'], [b'red']], [[b'cat', b'dog'], [b'ox']]]),
[[[[b'b', b'i', b'g'], [b's', b'm', b'a', b'l', b'l']],
[[b'r', b'e', b'd']]],
[[[b'c', b'a', b't'], [b'd', b'o', b'g']],
[[b'o', b'x']]]]),
# Empty string
(b'', []),
# Null byte
(b'\x00', [b'\x00']),
# Unicode
(u'仅今年前'.encode('utf-8'),
[b'\xe4', b'\xbb', b'\x85', b'\xe4', b'\xbb', b'\x8a', b'\xe5',
b'\xb9', b'\xb4', b'\xe5', b'\x89', b'\x8d']),
)
def testStringToBytes(self, source, expected):
expected = ragged_factory_ops.constant_value(expected, dtype=object)
result = ragged_string_ops.string_bytes_split(source)
self.assertAllEqual(expected, result)
def testUnknownInputRankError(self):
# Use a tf.function that erases shape information.
@def_function.function(input_signature=[tensor_spec.TensorSpec(None)])
def f(v):
return ragged_string_ops.string_bytes_split(v)
with self.assertRaisesRegexp(ValueError,
'input must have a statically-known rank'):
f(['foo'])
if __name__ == '__main__':
test.main()
| {
"content_hash": "8432799a7625fe1989526d5deac36549",
"timestamp": "",
"source": "github",
"line_count": 66,
"max_line_length": 76,
"avg_line_length": 36.696969696969695,
"alnum_prop": 0.5664739884393064,
"repo_name": "gunan/tensorflow",
"id": "8a4f5edc5193fd33dbffa9b68ea5d4e3fc69a9c1",
"size": "3143",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "tensorflow/python/kernel_tests/string_bytes_split_op_test.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "5003"
},
{
"name": "Batchfile",
"bytes": "45924"
},
{
"name": "C",
"bytes": "774953"
},
{
"name": "C#",
"bytes": "8562"
},
{
"name": "C++",
"bytes": "77908225"
},
{
"name": "CMake",
"bytes": "6500"
},
{
"name": "Dockerfile",
"bytes": "104215"
},
{
"name": "Go",
"bytes": "1841471"
},
{
"name": "HTML",
"bytes": "4686483"
},
{
"name": "Java",
"bytes": "962443"
},
{
"name": "Jupyter Notebook",
"bytes": "556650"
},
{
"name": "LLVM",
"bytes": "6536"
},
{
"name": "MLIR",
"bytes": "1479029"
},
{
"name": "Makefile",
"bytes": "58603"
},
{
"name": "Objective-C",
"bytes": "104667"
},
{
"name": "Objective-C++",
"bytes": "297830"
},
{
"name": "PHP",
"bytes": "23994"
},
{
"name": "Pascal",
"bytes": "3739"
},
{
"name": "Pawn",
"bytes": "17039"
},
{
"name": "Perl",
"bytes": "7536"
},
{
"name": "Python",
"bytes": "39476740"
},
{
"name": "RobotFramework",
"bytes": "891"
},
{
"name": "Roff",
"bytes": "2472"
},
{
"name": "Ruby",
"bytes": "7459"
},
{
"name": "Shell",
"bytes": "650007"
},
{
"name": "Smarty",
"bytes": "34649"
},
{
"name": "Swift",
"bytes": "62814"
},
{
"name": "Vim Snippet",
"bytes": "58"
}
],
"symlink_target": ""
} |
"""Implementation of JSONEncoder
"""
import re
from decimal import Decimal
def _import_speedups():
try:
from simplejson import _speedups
return _speedups.encode_basestring_ascii, _speedups.make_encoder
except ImportError:
return None, None
c_encode_basestring_ascii, c_make_encoder = _import_speedups()
from simplejson.decoder import PosInf
ESCAPE = re.compile(r'[\x00-\x1f\\"\b\f\n\r\t]')
ESCAPE_ASCII = re.compile(r'([\\"]|[^\ -~])')
HAS_UTF8 = re.compile(r'[\x80-\xff]')
ESCAPE_DCT = {
'\\': '\\\\',
'"': '\\"',
'\b': '\\b',
'\f': '\\f',
'\n': '\\n',
'\r': '\\r',
'\t': '\\t',
}
for i in range(0x20):
#ESCAPE_DCT.setdefault(chr(i), '\\u{0:04x}'.format(i))
ESCAPE_DCT.setdefault(chr(i), '\\u%04x' % (i,))
FLOAT_REPR = repr
def encode_basestring(s):
"""Return a JSON representation of a Python string
"""
if isinstance(s, str) and HAS_UTF8.search(s) is not None:
s = s.decode('utf-8')
def replace(match):
return ESCAPE_DCT[match.group(0)]
return u'"' + ESCAPE.sub(replace, s) + u'"'
def py_encode_basestring_ascii(s):
"""Return an ASCII-only JSON representation of a Python string
"""
if isinstance(s, str) and HAS_UTF8.search(s) is not None:
s = s.decode('utf-8')
def replace(match):
s = match.group(0)
try:
return ESCAPE_DCT[s]
except KeyError:
n = ord(s)
if n < 0x10000:
#return '\\u{0:04x}'.format(n)
return '\\u%04x' % (n,)
else:
# surrogate pair
n -= 0x10000
s1 = 0xd800 | ((n >> 10) & 0x3ff)
s2 = 0xdc00 | (n & 0x3ff)
#return '\\u{0:04x}\\u{1:04x}'.format(s1, s2)
return '\\u%04x\\u%04x' % (s1, s2)
return '"' + str(ESCAPE_ASCII.sub(replace, s)) + '"'
encode_basestring_ascii = (
c_encode_basestring_ascii or py_encode_basestring_ascii)
class JSONEncoder(object):
"""Extensible JSON <http://json.org> encoder for Python data structures.
Supports the following objects and types by default:
+-------------------+---------------+
| Python | JSON |
+===================+===============+
| dict | object |
+-------------------+---------------+
| list, tuple | array |
+-------------------+---------------+
| str, unicode | string |
+-------------------+---------------+
| int, long, float | number |
+-------------------+---------------+
| True | true |
+-------------------+---------------+
| False | false |
+-------------------+---------------+
| None | null |
+-------------------+---------------+
To extend this to recognize other objects, subclass and implement a
``.default()`` method with another method that returns a serializable
object for ``o`` if possible, otherwise it should call the superclass
implementation (to raise ``TypeError``).
"""
item_separator = ', '
key_separator = ': '
def __init__(self, skipkeys=False, ensure_ascii=True,
check_circular=True, allow_nan=True, sort_keys=False,
indent=None, separators=None, encoding='utf-8', default=None,
use_decimal=False):
"""Constructor for JSONEncoder, with sensible defaults.
If skipkeys is false, then it is a TypeError to attempt
encoding of keys that are not str, int, long, float or None. If
skipkeys is True, such items are simply skipped.
If ensure_ascii is true, the output is guaranteed to be str
objects with all incoming unicode characters escaped. If
ensure_ascii is false, the output will be unicode object.
If check_circular is true, then lists, dicts, and custom encoded
objects will be checked for circular references during encoding to
prevent an infinite recursion (which would cause an OverflowError).
Otherwise, no such check takes place.
If allow_nan is true, then NaN, Infinity, and -Infinity will be
encoded as such. This behavior is not JSON specification compliant,
but is consistent with most JavaScript based encoders and decoders.
Otherwise, it will be a ValueError to encode such floats.
If sort_keys is true, then the output of dictionaries will be
sorted by key; this is useful for regression tests to ensure
that JSON serializations can be compared on a day-to-day basis.
If indent is a string, then JSON array elements and object members
will be pretty-printed with a newline followed by that string repeated
for each level of nesting. ``None`` (the default) selects the most compact
representation without any newlines. For backwards compatibility with
versions of simplejson earlier than 2.1.0, an integer is also accepted
and is converted to a string with that many spaces.
If specified, separators should be a (item_separator, key_separator)
tuple. The default is (', ', ': '). To get the most compact JSON
representation you should specify (',', ':') to eliminate whitespace.
If specified, default is a function that gets called for objects
that can't otherwise be serialized. It should return a JSON encodable
version of the object or raise a ``TypeError``.
If encoding is not None, then all input strings will be
transformed into unicode using that encoding prior to JSON-encoding.
The default is UTF-8.
If use_decimal is true (not the default), ``decimal.Decimal`` will
be supported directly by the encoder. For the inverse, decode JSON
with ``parse_float=decimal.Decimal``.
"""
self.skipkeys = skipkeys
self.ensure_ascii = ensure_ascii
self.check_circular = check_circular
self.allow_nan = allow_nan
self.sort_keys = sort_keys
self.use_decimal = use_decimal
if isinstance(indent, (int, long)):
indent = ' ' * indent
self.indent = indent
if separators is not None:
self.item_separator, self.key_separator = separators
if default is not None:
self.default = default
self.encoding = encoding
def default(self, o):
"""Implement this method in a subclass such that it returns
a serializable object for ``o``, or calls the base implementation
(to raise a ``TypeError``).
For example, to support arbitrary iterators, you could
implement default like this::
def default(self, o):
try:
iterable = iter(o)
except TypeError:
pass
else:
return list(iterable)
return JSONEncoder.default(self, o)
"""
raise TypeError(repr(o) + " is not JSON serializable")
def encode(self, o):
"""Return a JSON string representation of a Python data structure.
>>> from simplejson import JSONEncoder
>>> JSONEncoder().encode({"foo": ["bar", "baz"]})
'{"foo": ["bar", "baz"]}'
"""
# This is for extremely simple cases and benchmarks.
if isinstance(o, basestring):
if isinstance(o, str):
_encoding = self.encoding
if (_encoding is not None
and not (_encoding == 'utf-8')):
o = o.decode(_encoding)
if self.ensure_ascii:
return encode_basestring_ascii(o)
else:
return encode_basestring(o)
# This doesn't pass the iterator directly to ''.join() because the
# exceptions aren't as detailed. The list call should be roughly
# equivalent to the PySequence_Fast that ''.join() would do.
chunks = self.iterencode(o, _one_shot=True)
if not isinstance(chunks, (list, tuple)):
chunks = list(chunks)
if self.ensure_ascii:
return ''.join(chunks)
else:
return u''.join(chunks)
def iterencode(self, o, _one_shot=False):
"""Encode the given object and yield each string
representation as available.
For example::
for chunk in JSONEncoder().iterencode(bigobject):
mysocket.write(chunk)
"""
if self.check_circular:
markers = {}
else:
markers = None
if self.ensure_ascii:
_encoder = encode_basestring_ascii
else:
_encoder = encode_basestring
if self.encoding != 'utf-8':
def _encoder(o, _orig_encoder=_encoder, _encoding=self.encoding):
if isinstance(o, str):
o = o.decode(_encoding)
return _orig_encoder(o)
def floatstr(o, allow_nan=self.allow_nan,
_repr=FLOAT_REPR, _inf=PosInf, _neginf=-PosInf):
# Check for specials. Note that this type of test is processor
# and/or platform-specific, so do tests which don't depend on
# the internals.
if o != o:
text = 'NaN'
elif o == _inf:
text = 'Infinity'
elif o == _neginf:
text = '-Infinity'
else:
return _repr(o)
if not allow_nan:
raise ValueError(
"Out of range float values are not JSON compliant: " +
repr(o))
return text
key_memo = {}
if (_one_shot and c_make_encoder is not None
and not self.indent and not self.sort_keys):
_iterencode = c_make_encoder(
markers, self.default, _encoder, self.indent,
self.key_separator, self.item_separator, self.sort_keys,
self.skipkeys, self.allow_nan, key_memo, self.use_decimal)
else:
_iterencode = _make_iterencode(
markers, self.default, _encoder, self.indent, floatstr,
self.key_separator, self.item_separator, self.sort_keys,
self.skipkeys, _one_shot, self.use_decimal)
try:
return _iterencode(o, 0)
finally:
key_memo.clear()
class JSONEncoderForHTML(JSONEncoder):
"""An encoder that produces JSON safe to embed in HTML.
To embed JSON content in, say, a script tag on a web page, the
characters &, < and > should be escaped. They cannot be escaped
with the usual entities (e.g. &) because they are not expanded
within <script> tags.
"""
def encode(self, o):
# Override JSONEncoder.encode because it has hacks for
# performance that make things more complicated.
chunks = self.iterencode(o, True)
if self.ensure_ascii:
return ''.join(chunks)
else:
return u''.join(chunks)
def iterencode(self, o, _one_shot=False):
chunks = super(JSONEncoderForHTML, self).iterencode(o, _one_shot)
for chunk in chunks:
chunk = chunk.replace('&', '\\u0026')
chunk = chunk.replace('<', '\\u003c')
chunk = chunk.replace('>', '\\u003e')
yield chunk
def _make_iterencode(markers, _default, _encoder, _indent, _floatstr,
_key_separator, _item_separator, _sort_keys, _skipkeys, _one_shot,
_use_decimal,
## HACK: hand-optimized bytecode; turn globals into locals
False=False,
True=True,
ValueError=ValueError,
basestring=basestring,
Decimal=Decimal,
dict=dict,
float=float,
id=id,
int=int,
isinstance=isinstance,
list=list,
long=long,
str=str,
tuple=tuple,
):
def _iterencode_list(lst, _current_indent_level):
if not lst:
yield '[]'
return
if markers is not None:
markerid = id(lst)
if markerid in markers:
raise ValueError("Circular reference detected")
markers[markerid] = lst
buf = '['
if _indent is not None:
_current_indent_level += 1
newline_indent = '\n' + (_indent * _current_indent_level)
separator = _item_separator + newline_indent
buf += newline_indent
else:
newline_indent = None
separator = _item_separator
first = True
for value in lst:
if first:
first = False
else:
buf = separator
if isinstance(value, basestring):
yield buf + _encoder(value)
elif value is None:
yield buf + 'null'
elif value is True:
yield buf + 'true'
elif value is False:
yield buf + 'false'
elif isinstance(value, (int, long)):
yield buf + str(value)
elif isinstance(value, float):
yield buf + _floatstr(value)
elif _use_decimal and isinstance(value, Decimal):
yield buf + str(value)
else:
yield buf
if isinstance(value, (list, tuple)):
chunks = _iterencode_list(value, _current_indent_level)
elif isinstance(value, dict):
chunks = _iterencode_dict(value, _current_indent_level)
else:
chunks = _iterencode(value, _current_indent_level)
for chunk in chunks:
yield chunk
if newline_indent is not None:
_current_indent_level -= 1
yield '\n' + (_indent * _current_indent_level)
yield ']'
if markers is not None:
del markers[markerid]
def _iterencode_dict(dct, _current_indent_level):
if not dct:
yield '{}'
return
if markers is not None:
markerid = id(dct)
if markerid in markers:
raise ValueError("Circular reference detected")
markers[markerid] = dct
yield '{'
if _indent is not None:
_current_indent_level += 1
newline_indent = '\n' + (_indent * _current_indent_level)
item_separator = _item_separator + newline_indent
yield newline_indent
else:
newline_indent = None
item_separator = _item_separator
first = True
if _sort_keys:
items = dct.items()
items.sort(key=lambda kv: kv[0])
else:
items = dct.iteritems()
for key, value in items:
if isinstance(key, basestring):
pass
# JavaScript is weakly typed for these, so it makes sense to
# also allow them. Many encoders seem to do something like this.
elif isinstance(key, float):
key = _floatstr(key)
elif key is True:
key = 'true'
elif key is False:
key = 'false'
elif key is None:
key = 'null'
elif isinstance(key, (int, long)):
key = str(key)
elif _skipkeys:
continue
else:
raise TypeError("key " + repr(key) + " is not a string")
if first:
first = False
else:
yield item_separator
yield _encoder(key)
yield _key_separator
if isinstance(value, basestring):
yield _encoder(value)
elif value is None:
yield 'null'
elif value is True:
yield 'true'
elif value is False:
yield 'false'
elif isinstance(value, (int, long)):
yield str(value)
elif isinstance(value, float):
yield _floatstr(value)
elif _use_decimal and isinstance(value, Decimal):
yield str(value)
else:
if isinstance(value, (list, tuple)):
chunks = _iterencode_list(value, _current_indent_level)
elif isinstance(value, dict):
chunks = _iterencode_dict(value, _current_indent_level)
else:
chunks = _iterencode(value, _current_indent_level)
for chunk in chunks:
yield chunk
if newline_indent is not None:
_current_indent_level -= 1
yield '\n' + (_indent * _current_indent_level)
yield '}'
if markers is not None:
del markers[markerid]
def _iterencode(o, _current_indent_level):
if isinstance(o, basestring):
yield _encoder(o)
elif o is None:
yield 'null'
elif o is True:
yield 'true'
elif o is False:
yield 'false'
elif isinstance(o, (int, long)):
yield str(o)
elif isinstance(o, float):
yield _floatstr(o)
elif isinstance(o, (list, tuple)):
for chunk in _iterencode_list(o, _current_indent_level):
yield chunk
elif isinstance(o, dict):
for chunk in _iterencode_dict(o, _current_indent_level):
yield chunk
elif _use_decimal and isinstance(o, Decimal):
yield str(o)
else:
if markers is not None:
markerid = id(o)
if markerid in markers:
raise ValueError("Circular reference detected")
markers[markerid] = o
o = _default(o)
for chunk in _iterencode(o, _current_indent_level):
yield chunk
if markers is not None:
del markers[markerid]
return _iterencode
| {
"content_hash": "43a588b43e1b6fa7194ab57ab3540e8e",
"timestamp": "",
"source": "github",
"line_count": 501,
"max_line_length": 82,
"avg_line_length": 37.355289421157686,
"alnum_prop": 0.5177130643868555,
"repo_name": "sunner/buzz2weibo",
"id": "2d7b8eb7444f0342ed01c6cd66192252c821b4cd",
"size": "18715",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "simplejson/encoder.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "183135"
}
],
"symlink_target": ""
} |
"""
__init__.py
~~~~~~~~~~~
no description available
:copyright: (c) 2015 by diamondman.
:license: see LICENSE for more details.
"""
from __future__ import print_function
__title__ = 's3streamer'
__version__ = '0.0.1'
__author__ = 'diamondman'
from .streamer import S3Streamer
| {
"content_hash": "bf54ef4c62dfe38c76b51138550fb868",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 43,
"avg_line_length": 18.8125,
"alnum_prop": 0.6079734219269103,
"repo_name": "diamondman/pys3streamer",
"id": "3f1100d1a7e4e8465714e85803c436685454c635",
"size": "348",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "s3streamer/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "10348"
}
],
"symlink_target": ""
} |
from lib.common import helpers
class Module:
def __init__(self, mainMenu, params=[]):
# Metadata info about the module, not modified during runtime
self.info = {
# Name for the module that will appear in module menus
'Name': 'Invoke-DropboxUpload',
# List of one or more authors for the module
'Author': ['[email protected]','Laurent Kempe'],
# More verbose multi-line description of the module
'Description': ('Upload a file to dropbox '),
# True if the module needs to run in the background
'Background': False,
# File extension to save the file as
'OutputExtension': None,
# True if the module needs admin rights to run
'NeedsAdmin': False,
# True if the method doesn't touch disk/is reasonably opsec safe
'OpsecSafe': True,
# The language for this module
'Language': 'powershell',
# The minimum PowerShell version needed for the module to run
'MinLanguageVersion': '2',
# List of any references/other comments
'Comments': [
'Uploads specified file to dropbox ',
'Ported to powershell2 from script by Laurent Kempe: http://laurentkempe.com/2016/04/07/Upload-files-to-DropBox-from-PowerShell/',
'Use forward slashes for the TargetFilePath'
]
}
# Any options needed by the module, settable during runtime
self.options = {
# Format:
# value_name : {description, required, default_value}
'Agent': {
# The 'Agent' option is the only one that MUST be in a module
'Description': 'Agent to use',
'Required' : True,
'Value' : ''
},
'SourceFilePath': {
'Description': '/path/to/file',
'Required' : True,
'Value' : ''
},
'TargetFilePath': {
'Description': '/path/to/dropbox/file',
'Required': True,
'Value': ''
},
'ApiKey': {
'Description': 'Your dropbox api key',
'Required': True,
'Value': ''
}
}
# Save off a copy of the mainMenu object to access external
# functionality like listeners/agent handlers/etc.
self.mainMenu = mainMenu
# During instantiation, any settable option parameters are passed as
# an object set to the module and the options dictionary is
# automatically set. This is mostly in case options are passed on
# the command line.
if params:
for param in params:
# Parameter format is [Name, Value]
option, value = param
if option in self.options:
self.options[option]['Value'] = value
def generate(self):
script = """
function Invoke-DropboxUpload {
Param(
[Parameter(Mandatory=$true)]
[string]$SourceFilePath,
[Parameter(Mandatory=$true)]
[string]$TargetFilePath,
[Parameter(mandatory=$true)]
[string]$ApiKey
)
$url = "https://content.dropboxapi.com/2/files/upload"
$file = [IO.File]::ReadAllBytes($SourceFilePath)
[net.httpWebRequest] $req = [net.webRequest]::create($url)
$arg = '{ "path": "' + $TargetFilePath + '", "mode": "add", "autorename": true, "mute": false }'
$authorization = "Bearer " + $ApiKey
$req.method = "POST"
$req.Headers.Add("Authorization", $authorization)
$req.Headers.Add("Dropbox-API-Arg", $arg)
$req.ContentType = 'application/octet-stream'
$req.ContentLength = $file.length
$req.TimeOut = 50000
$req.KeepAlive = $true
$req.Headers.Add("Keep-Alive: 300");
$reqst = $req.getRequestStream()
$reqst.write($file, 0, $file.length)
$reqst.flush()
$reqst.close()
[net.httpWebResponse] $res = $req.getResponse()
$resst = $res.getResponseStream()
$sr = new-object IO.StreamReader($resst)
$result = $sr.ReadToEnd()
$result
$res.close()
}
Invoke-DropboxUpload """
# Add any arguments to the end execution of the script
for option, values in self.options.iteritems():
if option.lower() != "agent":
if values['Value'] and values['Value'] != '':
if values['Value'].lower() == "true":
# if we're just adding a switch
script += " -" + str(option)
else:
script += " -" + str(option) + " " + str(values['Value'])
return script
| {
"content_hash": "bac492abbba4598acd746ecf5a5eb4bd",
"timestamp": "",
"source": "github",
"line_count": 141,
"max_line_length": 146,
"avg_line_length": 33.57446808510638,
"alnum_prop": 0.5521757498943811,
"repo_name": "frohoff/Empire",
"id": "a6b70b01a45cf98d216010c0add1e58a89e3474c",
"size": "4734",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "lib/modules/powershell/exfiltration/exfil_dropbox.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "1966"
},
{
"name": "Java",
"bytes": "496"
},
{
"name": "Objective-C",
"bytes": "2664"
},
{
"name": "PHP",
"bytes": "2041"
},
{
"name": "PowerShell",
"bytes": "16082240"
},
{
"name": "Python",
"bytes": "2601583"
},
{
"name": "Shell",
"bytes": "7945"
}
],
"symlink_target": ""
} |
import codecs
import os
import re
from setuptools import setup, find_packages
##################################################################
NAME = 'auacm'
PACKAGES = find_packages(where='src')
META_PATH = os.path.join('src', 'auacm', '__init__.py')
KEYWORDS = ['competitive', 'icpc', 'auacm', 'auburn']
CLASSIFIERS = [
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.4'
]
INSTALL_REQUIRES = ['requests']
##################################################################
HERE = os.path.abspath(os.path.dirname(__file__))
def read(*parts):
"""
Build an absolute file path from *parts* and return the contents
of the resulting file. Assume UTF-8 encoding
"""
try:
with codecs.open(os.path.join(HERE, *parts), "rb", "utf-8") as fil:
return fil.read()
except:
pass
META_FILE = read(META_PATH)
def find_meta(meta):
"""Extract __*meta*__ from META_FILE"""
meta_match = re.search(
r"^__{meta}__ = ['\"]([^'\"]*)['\"]".format(meta=meta),
META_FILE, re.M
)
if meta_match:
return meta_match.group(1)
raise RuntimeError('Unable to find __{meta}__ string.'.format(meta=meta))
if __name__ == '__main__':
setup(
name=NAME,
description=find_meta('description'),
license=find_meta('license'),
url=find_meta('uri'),
download_url=find_meta('uri') + '/tarball/' + find_meta('version'),
version=find_meta('version'),
author=find_meta('author'),
author_email=find_meta('email'),
maintainer=find_meta('author'),
maintainer_email=find_meta('email'),
keywords=KEYWORDS,
long_description=read('README.md'),
packages=PACKAGES,
package_dir={'': 'src'},
scripts=['bin/auacm'],
zip_safe=False,
classifiers=CLASSIFIERS,
install_requires=INSTALL_REQUIRES,
)
| {
"content_hash": "4f217d53f29fc238eba21d7246f79275",
"timestamp": "",
"source": "github",
"line_count": 68,
"max_line_length": 77,
"avg_line_length": 29.573529411764707,
"alnum_prop": 0.5494778717056191,
"repo_name": "BrandonLMorris/auacm-cli",
"id": "53da2bfd5f4665c100745db837b4e631c72a8870",
"size": "2011",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "37369"
}
],
"symlink_target": ""
} |
"""
Tts API
Description # noqa: E501
The version of the OpenAPI document: 2.0.0
Contact: [email protected]
Generated by: https://openapi-generator.tech
"""
from setuptools import setup, find_packages # noqa: H301
NAME = "telestream-cloud-tts"
VERSION = "2.1.0"
# To install the library, run the following
#
# python setup.py install
#
# prerequisite: setuptools
# http://pypi.python.org/pypi/setuptools
REQUIRES = ["urllib3 >= 1.15", "six >= 1.10", "certifi", "python-dateutil"]
setup(
name=NAME,
version=VERSION,
description="Tts API",
author="Telestream, LLC.",
author_email="[email protected]",
url="https://github.com/telestream/telestream-cloud-python-sdk",
keywords=["Telestream"],
install_requires=REQUIRES,
packages=find_packages(exclude=["test", "tests"]),
include_package_data=True,
license="Apache 2.0",
long_description="""\
Description # noqa: E501
"""
)
| {
"content_hash": "5f8bac9414aeb23d834e6f52c031c8cc",
"timestamp": "",
"source": "github",
"line_count": 40,
"max_line_length": 75,
"avg_line_length": 24.35,
"alnum_prop": 0.6704312114989733,
"repo_name": "Telestream/telestream-cloud-python-sdk",
"id": "e5aa6f244bd7a00968c326ff14b20f0eaf586b35",
"size": "991",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "telestream_cloud_tts_sdk/setup.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "1339719"
},
{
"name": "Shell",
"bytes": "6712"
}
],
"symlink_target": ""
} |
from aiohttp import web
from pinkerton.app import app
from pinkerton.settings import BIND_HOST, BIND_PORT
if __name__ == '__main__':
web.run_app(app, host=BIND_HOST, port=BIND_PORT)
| {
"content_hash": "db643a965b118959eed4948320512e3f",
"timestamp": "",
"source": "github",
"line_count": 7,
"max_line_length": 52,
"avg_line_length": 26.857142857142858,
"alnum_prop": 0.7127659574468085,
"repo_name": "bureaucratic-labs/pinkerton",
"id": "0b90aa21a6c003471a2438cb28bbbcbf42678036",
"size": "188",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pinkerton/__main__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "24150"
}
],
"symlink_target": ""
} |
"""Tests for distutils.command.install."""
import os
import os.path
import sys
import unittest
import site
from distutils.command.install import install
from distutils.command import install as install_module
from distutils.command.install import INSTALL_SCHEMES
from distutils.core import Distribution
from distutils.errors import DistutilsOptionError
from distutils.tests import support
class InstallTestCase(support.TempdirManager,
support.LoggingSilencer,
unittest.TestCase):
def test_home_installation_scheme(self):
# This ensure two things:
# - that --home generates the desired set of directory names
# - test --home is supported on all platforms
builddir = self.mkdtemp()
destination = os.path.join(builddir, "installation")
dist = Distribution({"name": "foopkg"})
# script_name need not exist, it just need to be initialized
dist.script_name = os.path.join(builddir, "setup.py")
dist.command_obj["build"] = support.DummyCommand(
build_base=builddir,
build_lib=os.path.join(builddir, "lib"),
)
cmd = install(dist)
cmd.home = destination
cmd.ensure_finalized()
self.assertEqual(cmd.install_base, destination)
self.assertEqual(cmd.install_platbase, destination)
def check_path(got, expected):
got = os.path.normpath(got)
expected = os.path.normpath(expected)
self.assertEqual(got, expected)
libdir = os.path.join(destination, "lib", "python")
check_path(cmd.install_lib, libdir)
check_path(cmd.install_platlib, libdir)
check_path(cmd.install_purelib, libdir)
check_path(cmd.install_headers,
os.path.join(destination, "include", "python", "foopkg"))
check_path(cmd.install_scripts, os.path.join(destination, "bin"))
check_path(cmd.install_data, destination)
def test_user_site(self):
# site.USER_SITE was introduced in 2.6
if sys.version < '2.6':
return
# preparing the environement for the test
self.old_user_base = site.USER_BASE
self.old_user_site = site.USER_SITE
self.tmpdir = self.mkdtemp()
self.user_base = os.path.join(self.tmpdir, 'B')
self.user_site = os.path.join(self.tmpdir, 'S')
site.USER_BASE = self.user_base
site.USER_SITE = self.user_site
install_module.USER_BASE = self.user_base
install_module.USER_SITE = self.user_site
def _expanduser(path):
return self.tmpdir
self.old_expand = os.path.expanduser
os.path.expanduser = _expanduser
try:
# this is the actual test
self._test_user_site()
finally:
site.USER_BASE = self.old_user_base
site.USER_SITE = self.old_user_site
install_module.USER_BASE = self.old_user_base
install_module.USER_SITE = self.old_user_site
os.path.expanduser = self.old_expand
def _test_user_site(self):
for key in ('nt_user', 'unix_user', 'os2_home'):
self.assert_(key in INSTALL_SCHEMES)
dist = Distribution({'name': 'xx'})
cmd = install(dist)
# making sure the user option is there
options = [name for name, short, lable in
cmd.user_options]
self.assert_('user' in options)
# setting a value
cmd.user = 1
# user base and site shouldn't be created yet
self.assert_(not os.path.exists(self.user_base))
self.assert_(not os.path.exists(self.user_site))
# let's run finalize
cmd.ensure_finalized()
# now they should
self.assert_(os.path.exists(self.user_base))
self.assert_(os.path.exists(self.user_site))
self.assert_('userbase' in cmd.config_vars)
self.assert_('usersite' in cmd.config_vars)
def test_handle_extra_path(self):
dist = Distribution({'name': 'xx', 'extra_path': 'path,dirs'})
cmd = install(dist)
# two elements
cmd.handle_extra_path()
self.assertEquals(cmd.extra_path, ['path', 'dirs'])
self.assertEquals(cmd.extra_dirs, 'dirs')
self.assertEquals(cmd.path_file, 'path')
# one element
cmd.extra_path = ['path']
cmd.handle_extra_path()
self.assertEquals(cmd.extra_path, ['path'])
self.assertEquals(cmd.extra_dirs, 'path')
self.assertEquals(cmd.path_file, 'path')
# none
dist.extra_path = cmd.extra_path = None
cmd.handle_extra_path()
self.assertEquals(cmd.extra_path, None)
self.assertEquals(cmd.extra_dirs, '')
self.assertEquals(cmd.path_file, None)
# three elements (no way !)
cmd.extra_path = 'path,dirs,again'
self.assertRaises(DistutilsOptionError, cmd.handle_extra_path)
def test_finalize_options(self):
dist = Distribution({'name': 'xx'})
cmd = install(dist)
# must supply either prefix/exec-prefix/home or
# install-base/install-platbase -- not both
cmd.prefix = 'prefix'
cmd.install_base = 'base'
self.assertRaises(DistutilsOptionError, cmd.finalize_options)
# must supply either home or prefix/exec-prefix -- not both
cmd.install_base = None
cmd.home = 'home'
self.assertRaises(DistutilsOptionError, cmd.finalize_options)
# can't combine user with with prefix/exec_prefix/home or
# install_(plat)base
cmd.prefix = None
cmd.user = 'user'
self.assertRaises(DistutilsOptionError, cmd.finalize_options)
def test_record(self):
install_dir = self.mkdtemp()
pkgdir, dist = self.create_dist()
dist = Distribution()
cmd = install(dist)
dist.command_obj['install'] = cmd
cmd.root = install_dir
cmd.record = os.path.join(pkgdir, 'RECORD')
cmd.ensure_finalized()
cmd.run()
# let's check the RECORD file was created with one
# line (the egg info file)
with open(cmd.record) as f:
self.assertEquals(len(f.readlines()), 1)
def test_suite():
return unittest.makeSuite(InstallTestCase)
if __name__ == "__main__":
unittest.main(defaultTest="test_suite")
| {
"content_hash": "6cd7e84182a097aa1c9594052fc43955",
"timestamp": "",
"source": "github",
"line_count": 190,
"max_line_length": 76,
"avg_line_length": 33.76315789473684,
"alnum_prop": 0.612626656274357,
"repo_name": "MalloyPower/parsing-python",
"id": "8d7e97227c3a3a9d637c0fa30ce7225c68231349",
"size": "6415",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "front-end/testsuite-python-lib/Python-3.1/Lib/distutils/tests/test_install.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "1963"
},
{
"name": "Lex",
"bytes": "238458"
},
{
"name": "Makefile",
"bytes": "4513"
},
{
"name": "OCaml",
"bytes": "412695"
},
{
"name": "Python",
"bytes": "17319"
},
{
"name": "Rascal",
"bytes": "523063"
},
{
"name": "Yacc",
"bytes": "429659"
}
],
"symlink_target": ""
} |
"""
VMware vCloud driver.
"""
import copy
import sys
import re
import base64
import os
from libcloud.utils.py3 import httplib
from libcloud.utils.py3 import urlencode
from libcloud.utils.py3 import urlparse
from libcloud.utils.py3 import b
from libcloud.utils.py3 import next
urlparse = urlparse.urlparse
import time
try:
from lxml import etree as ET
except ImportError:
from xml.etree import ElementTree as ET
from xml.parsers.expat import ExpatError
from libcloud.common.base import XmlResponse, ConnectionUserAndKey
from libcloud.common.types import InvalidCredsError, LibcloudError
from libcloud.compute.providers import Provider
from libcloud.compute.types import NodeState
from libcloud.compute.base import Node, NodeDriver, NodeLocation
from libcloud.compute.base import NodeSize, NodeImage
"""
From vcloud api "The VirtualQuantity element defines the number of MB
of memory. This should be either 512 or a multiple of 1024 (1 GB)."
"""
VIRTUAL_MEMORY_VALS = [512] + [1024 * i for i in range(1, 9)]
# Default timeout (in seconds) for long running tasks
DEFAULT_TASK_COMPLETION_TIMEOUT = 600
DEFAULT_API_VERSION = '0.8'
"""
Valid vCloud API v1.5 input values.
"""
VIRTUAL_CPU_VALS_1_5 = [i for i in range(1, 9)]
FENCE_MODE_VALS_1_5 = ['bridged', 'isolated', 'natRouted']
IP_MODE_VALS_1_5 = ['POOL', 'DHCP', 'MANUAL', 'NONE']
def fixxpath(root, xpath):
"""ElementTree wants namespaces in its xpaths, so here we add them."""
namespace, root_tag = root.tag[1:].split("}", 1)
fixed_xpath = "/".join(["{%s}%s" % (namespace, e)
for e in xpath.split("/")])
return fixed_xpath
def get_url_path(url):
return urlparse(url.strip()).path
class Vdc(object):
"""
Virtual datacenter (vDC) representation
"""
def __init__(self, id, name, driver, allocation_model=None, cpu=None,
memory=None, storage=None):
self.id = id
self.name = name
self.driver = driver
self.allocation_model = allocation_model
self.cpu = cpu
self.memory = memory
self.storage = storage
def __repr__(self):
return ('<Vdc: id=%s, name=%s, driver=%s ...>'
% (self.id, self.name, self.driver.name))
class Capacity(object):
"""
Represents CPU, Memory or Storage capacity of vDC.
"""
def __init__(self, limit, used, units):
self.limit = limit
self.used = used
self.units = units
def __repr__(self):
return ('<Capacity: limit=%s, used=%s, units=%s>'
% (self.limit, self.used, self.units))
class ControlAccess(object):
"""
Represents control access settings of a node
"""
class AccessLevel(object):
READ_ONLY = 'ReadOnly'
CHANGE = 'Change'
FULL_CONTROL = 'FullControl'
def __init__(self, node, everyone_access_level, subjects=None):
self.node = node
self.everyone_access_level = everyone_access_level
if not subjects:
subjects = []
self.subjects = subjects
def __repr__(self):
return ('<ControlAccess: node=%s, everyone_access_level=%s, '
'subjects=%s>'
% (self.node, self.everyone_access_level, self.subjects))
class Subject(object):
"""
User or group subject
"""
def __init__(self, type, name, access_level, id=None):
self.type = type
self.name = name
self.access_level = access_level
self.id = id
def __repr__(self):
return ('<Subject: type=%s, name=%s, access_level=%s>'
% (self.type, self.name, self.access_level))
class InstantiateVAppXML(object):
def __init__(self, name, template, net_href, cpus, memory,
password=None, row=None, group=None):
self.name = name
self.template = template
self.net_href = net_href
self.cpus = cpus
self.memory = memory
self.password = password
self.row = row
self.group = group
self._build_xmltree()
def tostring(self):
return ET.tostring(self.root)
def _build_xmltree(self):
self.root = self._make_instantiation_root()
self._add_vapp_template(self.root)
instantiation_params = ET.SubElement(self.root,
"InstantiationParams")
# product and virtual hardware
self._make_product_section(instantiation_params)
self._make_virtual_hardware(instantiation_params)
network_config_section = ET.SubElement(instantiation_params,
"NetworkConfigSection")
network_config = ET.SubElement(network_config_section,
"NetworkConfig")
self._add_network_association(network_config)
def _make_instantiation_root(self):
return ET.Element(
"InstantiateVAppTemplateParams",
{'name': self.name,
'xml:lang': 'en',
'xmlns': "http://www.vmware.com/vcloud/v0.8",
'xmlns:xsi': "http://www.w3.org/2001/XMLSchema-instance"}
)
def _add_vapp_template(self, parent):
return ET.SubElement(
parent,
"VAppTemplate",
{'href': self.template}
)
def _make_product_section(self, parent):
prod_section = ET.SubElement(
parent,
"ProductSection",
{'xmlns:q1': "http://www.vmware.com/vcloud/v0.8",
'xmlns:ovf': "http://schemas.dmtf.org/ovf/envelope/1"}
)
if self.password:
self._add_property(prod_section, 'password', self.password)
if self.row:
self._add_property(prod_section, 'row', self.row)
if self.group:
self._add_property(prod_section, 'group', self.group)
return prod_section
def _add_property(self, parent, ovfkey, ovfvalue):
return ET.SubElement(
parent,
"Property",
{'xmlns': 'http://schemas.dmtf.org/ovf/envelope/1',
'ovf:key': ovfkey,
'ovf:value': ovfvalue}
)
def _make_virtual_hardware(self, parent):
vh = ET.SubElement(
parent,
"VirtualHardwareSection",
{'xmlns:q1': "http://www.vmware.com/vcloud/v0.8"}
)
self._add_cpu(vh)
self._add_memory(vh)
return vh
def _add_cpu(self, parent):
cpu_item = ET.SubElement(
parent,
"Item",
{'xmlns': "http://schemas.dmtf.org/ovf/envelope/1"}
)
self._add_instance_id(cpu_item, '1')
self._add_resource_type(cpu_item, '3')
self._add_virtual_quantity(cpu_item, self.cpus)
return cpu_item
def _add_memory(self, parent):
mem_item = ET.SubElement(
parent,
'Item',
{'xmlns': "http://schemas.dmtf.org/ovf/envelope/1"}
)
self._add_instance_id(mem_item, '2')
self._add_resource_type(mem_item, '4')
self._add_virtual_quantity(mem_item, self.memory)
return mem_item
def _add_instance_id(self, parent, id):
elm = ET.SubElement(
parent,
'InstanceID',
{'xmlns': 'http://schemas.dmtf.org/wbem/wscim/1/cim-schema/2/'
'CIM_ResourceAllocationSettingData'}
)
elm.text = id
return elm
def _add_resource_type(self, parent, type):
elm = ET.SubElement(
parent,
'ResourceType',
{'xmlns': 'http://schemas.dmtf.org/wbem/wscim/1/cim-schema/2/'
'CIM_ResourceAllocationSettingData'}
)
elm.text = type
return elm
def _add_virtual_quantity(self, parent, amount):
elm = ET.SubElement(
parent,
'VirtualQuantity',
{'xmlns': 'http://schemas.dmtf.org/wbem/wscim/1/cim-schema/2/'
'CIM_ResourceAllocationSettingData'}
)
elm.text = amount
return elm
def _add_network_association(self, parent):
return ET.SubElement(
parent,
'NetworkAssociation',
{'href': self.net_href}
)
class VCloudResponse(XmlResponse):
def success(self):
return self.status in (httplib.OK, httplib.CREATED,
httplib.NO_CONTENT, httplib.ACCEPTED)
class VCloudConnection(ConnectionUserAndKey):
"""
Connection class for the vCloud driver
"""
responseCls = VCloudResponse
token = None
host = None
def request(self, *args, **kwargs):
self._get_auth_token()
return super(VCloudConnection, self).request(*args, **kwargs)
def check_org(self):
# the only way to get our org is by logging in.
self._get_auth_token()
def _get_auth_headers(self):
"""Some providers need different headers than others"""
return {
'Authorization': "Basic %s" % base64.b64encode(
b('%s:%s' % (self.user_id, self.key))).decode('utf-8'),
'Content-Length': '0',
'Accept': 'application/*+xml'
}
def _get_auth_token(self):
if not self.token:
self.connection.request(method='POST', url='/api/v0.8/login',
headers=self._get_auth_headers())
resp = self.connection.getresponse()
headers = dict(resp.getheaders())
body = ET.XML(resp.read())
try:
self.token = headers['set-cookie']
except KeyError:
raise InvalidCredsError()
self.driver.org = get_url_path(
body.find(fixxpath(body, 'Org')).get('href')
)
def add_default_headers(self, headers):
headers['Cookie'] = self.token
headers['Accept'] = 'application/*+xml'
return headers
class VCloudNodeDriver(NodeDriver):
"""
vCloud node driver
"""
type = Provider.VCLOUD
name = 'vCloud'
website = 'http://www.vmware.com/products/vcloud/'
connectionCls = VCloudConnection
org = None
_vdcs = None
NODE_STATE_MAP = {'0': NodeState.PENDING,
'1': NodeState.PENDING,
'2': NodeState.PENDING,
'3': NodeState.PENDING,
'4': NodeState.RUNNING}
features = {'create_node': ['password']}
def __new__(cls, key, secret=None, secure=True, host=None, port=None,
api_version=DEFAULT_API_VERSION, **kwargs):
if cls is VCloudNodeDriver:
if api_version == '0.8':
cls = VCloudNodeDriver
elif api_version == '1.5':
cls = VCloud_1_5_NodeDriver
elif api_version == '5.1':
cls = VCloud_5_1_NodeDriver
else:
raise NotImplementedError(
"No VCloudNodeDriver found for API version %s" %
(api_version))
return super(VCloudNodeDriver, cls).__new__(cls)
@property
def vdcs(self):
"""
vCloud virtual data centers (vDCs).
:return: list of vDC objects
:rtype: ``list`` of :class:`Vdc`
"""
if not self._vdcs:
self.connection.check_org() # make sure the org is set.
res = self.connection.request(self.org)
self._vdcs = [
self._to_vdc(
self.connection.request(get_url_path(i.get('href'))).object
)
for i in res.object.findall(fixxpath(res.object, "Link"))
if i.get('type') == 'application/vnd.vmware.vcloud.vdc+xml'
]
return self._vdcs
def _to_vdc(self, vdc_elm):
return Vdc(vdc_elm.get('href'), vdc_elm.get('name'), self)
def _get_vdc(self, vdc_name):
vdc = None
if not vdc_name:
# Return the first organisation VDC found
vdc = self.vdcs[0]
else:
for v in self.vdcs:
if v.name == vdc_name:
vdc = v
if vdc is None:
raise ValueError('%s virtual data centre could not be found',
vdc_name)
return vdc
@property
def networks(self):
networks = []
for vdc in self.vdcs:
res = self.connection.request(get_url_path(vdc.id)).object
networks.extend(
[network
for network in res.findall(
fixxpath(res, 'AvailableNetworks/Network')
)]
)
return networks
def _to_image(self, image):
image = NodeImage(id=image.get('href'),
name=image.get('name'),
driver=self.connection.driver)
return image
def _to_node(self, elm):
state = self.NODE_STATE_MAP[elm.get('status')]
name = elm.get('name')
public_ips = []
private_ips = []
# Following code to find private IPs works for Terremark
connections = elm.findall('%s/%s' % (
'{http://schemas.dmtf.org/ovf/envelope/1}NetworkConnectionSection',
fixxpath(elm, 'NetworkConnection'))
)
if not connections:
connections = elm.findall(
fixxpath(
elm,
'Children/Vm/NetworkConnectionSection/NetworkConnection'))
for connection in connections:
ips = [ip.text
for ip
in connection.findall(fixxpath(elm, "IpAddress"))]
if connection.get('Network') == 'Internal':
private_ips.extend(ips)
else:
public_ips.extend(ips)
node = Node(id=elm.get('href'),
name=name,
state=state,
public_ips=public_ips,
private_ips=private_ips,
driver=self.connection.driver)
return node
def _get_catalog_hrefs(self):
res = self.connection.request(self.org)
catalogs = [
i.get('href')
for i in res.object.findall(fixxpath(res.object, "Link"))
if i.get('type') == 'application/vnd.vmware.vcloud.catalog+xml'
]
return catalogs
def _wait_for_task_completion(self, task_href,
timeout=DEFAULT_TASK_COMPLETION_TIMEOUT):
start_time = time.time()
res = self.connection.request(get_url_path(task_href))
status = res.object.get('status')
while status != 'success':
if status == 'error':
# Get error reason from the response body
error_elem = res.object.find(fixxpath(res.object, 'Error'))
error_msg = "Unknown error"
if error_elem is not None:
error_msg = error_elem.get('message')
raise Exception("Error status returned by task %s.: %s"
% (task_href, error_msg))
if status == 'canceled':
raise Exception("Canceled status returned by task %s."
% task_href)
if (time.time() - start_time >= timeout):
raise Exception("Timeout (%s sec) while waiting for task %s."
% (timeout, task_href))
time.sleep(5)
res = self.connection.request(get_url_path(task_href))
status = res.object.get('status')
def destroy_node(self, node):
node_path = get_url_path(node.id)
# blindly poweroff node, it will throw an exception if already off
try:
res = self.connection.request('%s/power/action/poweroff'
% node_path,
method='POST')
self._wait_for_task_completion(res.object.get('href'))
except Exception:
pass
try:
res = self.connection.request('%s/action/undeploy' % node_path,
method='POST')
self._wait_for_task_completion(res.object.get('href'))
except ExpatError:
# The undeploy response is malformed XML atm.
# We can remove this whent he providers fix the problem.
pass
except Exception:
# Some vendors don't implement undeploy at all yet,
# so catch this and move on.
pass
res = self.connection.request(node_path, method='DELETE')
return res.status == httplib.ACCEPTED
def reboot_node(self, node):
res = self.connection.request('%s/power/action/reset'
% get_url_path(node.id),
method='POST')
return res.status in [httplib.ACCEPTED, httplib.NO_CONTENT]
def list_nodes(self):
return self.ex_list_nodes()
def ex_list_nodes(self, vdcs=None):
"""
List all nodes across all vDCs. Using 'vdcs' you can specify which vDCs
should be queried.
:param vdcs: None, vDC or a list of vDCs to query. If None all vDCs
will be queried.
:type vdcs: :class:`Vdc`
:rtype: ``list`` of :class:`Node`
"""
if not vdcs:
vdcs = self.vdcs
if not isinstance(vdcs, (list, tuple)):
vdcs = [vdcs]
nodes = []
for vdc in vdcs:
res = self.connection.request(get_url_path(vdc.id))
elms = res.object.findall(fixxpath(
res.object, "ResourceEntities/ResourceEntity")
)
vapps = [
(i.get('name'), i.get('href'))
for i in elms if
i.get('type') == 'application/vnd.vmware.vcloud.vApp+xml' and
i.get('name')
]
for vapp_name, vapp_href in vapps:
try:
res = self.connection.request(
get_url_path(vapp_href),
headers={'Content-Type':
'application/vnd.vmware.vcloud.vApp+xml'}
)
nodes.append(self._to_node(res.object))
except Exception:
# The vApp was probably removed since the previous vDC
# query, ignore
e = sys.exc_info()[1]
if not (e.args[0].tag.endswith('Error') and
e.args[0].get('minorErrorCode') ==
'ACCESS_TO_RESOURCE_IS_FORBIDDEN'):
raise
return nodes
def _to_size(self, ram):
ns = NodeSize(
id=None,
name="%s Ram" % ram,
ram=ram,
disk=None,
bandwidth=None,
price=None,
driver=self.connection.driver
)
return ns
def list_sizes(self, location=None):
sizes = [self._to_size(i) for i in VIRTUAL_MEMORY_VALS]
return sizes
def _get_catalogitems_hrefs(self, catalog):
"""Given a catalog href returns contained catalog item hrefs"""
res = self.connection.request(
get_url_path(catalog),
headers={
'Content-Type': 'application/vnd.vmware.vcloud.catalog+xml'
}
).object
cat_items = res.findall(fixxpath(res, "CatalogItems/CatalogItem"))
cat_item_hrefs = [i.get('href')
for i in cat_items
if i.get('type') ==
'application/vnd.vmware.vcloud.catalogItem+xml']
return cat_item_hrefs
def _get_catalogitem(self, catalog_item):
"""Given a catalog item href returns elementree"""
res = self.connection.request(
get_url_path(catalog_item),
headers={
'Content-Type': 'application/vnd.vmware.vcloud.catalogItem+xml'
}
).object
return res
def list_images(self, location=None):
images = []
for vdc in self.vdcs:
res = self.connection.request(get_url_path(vdc.id)).object
res_ents = res.findall(fixxpath(
res, "ResourceEntities/ResourceEntity")
)
images += [
self._to_image(i)
for i in res_ents
if i.get('type') ==
'application/vnd.vmware.vcloud.vAppTemplate+xml'
]
for catalog in self._get_catalog_hrefs():
for cat_item in self._get_catalogitems_hrefs(catalog):
res = self._get_catalogitem(cat_item)
res_ents = res.findall(fixxpath(res, 'Entity'))
images += [
self._to_image(i)
for i in res_ents
if i.get('type') ==
'application/vnd.vmware.vcloud.vAppTemplate+xml'
]
def idfun(image):
return image.id
return self._uniquer(images, idfun)
def _uniquer(self, seq, idfun=None):
if idfun is None:
def idfun(x):
return x
seen = {}
result = []
for item in seq:
marker = idfun(item)
if marker in seen:
continue
seen[marker] = 1
result.append(item)
return result
def create_node(self, **kwargs):
"""
Creates and returns node.
:keyword ex_network: link to a "Network" e.g.,
``https://services.vcloudexpress...``
:type ex_network: ``str``
:keyword ex_vdc: Name of organisation's virtual data
center where vApp VMs will be deployed.
:type ex_vdc: ``str``
:keyword ex_cpus: number of virtual cpus (limit depends on provider)
:type ex_cpus: ``int``
:type ex_row: ``str``
:type ex_group: ``str``
"""
name = kwargs['name']
image = kwargs['image']
size = kwargs['size']
# Some providers don't require a network link
try:
network = kwargs.get('ex_network', self.networks[0].get('href'))
except IndexError:
network = ''
password = None
auth = self._get_and_check_auth(kwargs.get('auth'))
password = auth.password
instantiate_xml = InstantiateVAppXML(
name=name,
template=image.id,
net_href=network,
cpus=str(kwargs.get('ex_cpus', 1)),
memory=str(size.ram),
password=password,
row=kwargs.get('ex_row', None),
group=kwargs.get('ex_group', None)
)
vdc = self._get_vdc(kwargs.get('ex_vdc', None))
# Instantiate VM and get identifier.
content_type = \
'application/vnd.vmware.vcloud.instantiateVAppTemplateParams+xml'
res = self.connection.request(
'%s/action/instantiateVAppTemplate' % get_url_path(vdc.id),
data=instantiate_xml.tostring(),
method='POST',
headers={'Content-Type': content_type}
)
vapp_path = get_url_path(res.object.get('href'))
# Deploy the VM from the identifier.
res = self.connection.request('%s/action/deploy' % vapp_path,
method='POST')
self._wait_for_task_completion(res.object.get('href'))
# Power on the VM.
res = self.connection.request('%s/power/action/powerOn' % vapp_path,
method='POST')
res = self.connection.request(vapp_path)
node = self._to_node(res.object)
if getattr(auth, "generated", False):
node.extra['password'] = auth.password
return node
class HostingComConnection(VCloudConnection):
"""
vCloud connection subclass for Hosting.com
"""
host = "vcloud.safesecureweb.com"
def _get_auth_headers(self):
"""hosting.com doesn't follow the standard vCloud authentication API"""
return {
'Authentication': base64.b64encode(b('%s:%s' % (self.user_id,
self.key))),
'Content-Length': '0'
}
class HostingComDriver(VCloudNodeDriver):
"""
vCloud node driver for Hosting.com
"""
connectionCls = HostingComConnection
class TerremarkConnection(VCloudConnection):
"""
vCloud connection subclass for Terremark
"""
host = "services.vcloudexpress.terremark.com"
class TerremarkDriver(VCloudNodeDriver):
"""
vCloud node driver for Terremark
"""
connectionCls = TerremarkConnection
def list_locations(self):
return [NodeLocation(0, "Terremark Texas", 'US', self)]
class VCloud_1_5_Connection(VCloudConnection):
def _get_auth_headers(self):
"""Compatibility for using v1.5 API under vCloud Director 5.1"""
return {
'Authorization': "Basic %s" % base64.b64encode(
b('%s:%s' % (self.user_id, self.key))).decode('utf-8'),
'Content-Length': '0',
'Accept': 'application/*+xml;version=1.5'
}
def _get_auth_token(self):
if not self.token:
# Log In
self.connection.request(method='POST', url='/api/sessions',
headers=self._get_auth_headers())
resp = self.connection.getresponse()
headers = dict(resp.getheaders())
# Set authorization token
try:
self.token = headers['x-vcloud-authorization']
except KeyError:
raise InvalidCredsError()
# Get the URL of the Organization
body = ET.XML(resp.read())
self.org_name = body.get('org')
org_list_url = get_url_path(
next((link for link in body.findall(fixxpath(body, 'Link'))
if link.get('type') ==
'application/vnd.vmware.vcloud.orgList+xml')).get('href')
)
self.connection.set_http_proxy(self.proxy_url)
self.connection.request(method='GET', url=org_list_url,
headers=self.add_default_headers({}))
body = ET.XML(self.connection.getresponse().read())
self.driver.org = get_url_path(
next((org for org in body.findall(fixxpath(body, 'Org'))
if org.get('name') == self.org_name)).get('href')
)
def add_default_headers(self, headers):
headers['Accept'] = 'application/*+xml;version=1.5'
headers['x-vcloud-authorization'] = self.token
return headers
class Instantiate_1_5_VAppXML(object):
def __init__(self, name, template, network, vm_network=None,
vm_fence=None):
self.name = name
self.template = template
self.network = network
self.vm_network = vm_network
self.vm_fence = vm_fence
self._build_xmltree()
def tostring(self):
return ET.tostring(self.root)
def _build_xmltree(self):
self.root = self._make_instantiation_root()
if self.network is not None:
instantionation_params = ET.SubElement(self.root,
'InstantiationParams')
network_config_section = ET.SubElement(instantionation_params,
'NetworkConfigSection')
ET.SubElement(
network_config_section,
'Info',
{'xmlns': 'http://schemas.dmtf.org/ovf/envelope/1'}
)
network_config = ET.SubElement(network_config_section,
'NetworkConfig')
self._add_network_association(network_config)
self._add_vapp_template(self.root)
def _make_instantiation_root(self):
return ET.Element(
'InstantiateVAppTemplateParams',
{'name': self.name,
'deploy': 'false',
'powerOn': 'false',
'xml:lang': 'en',
'xmlns': 'http://www.vmware.com/vcloud/v1.5',
'xmlns:xsi': 'http://www.w3.org/2001/XMLSchema-instance'}
)
def _add_vapp_template(self, parent):
return ET.SubElement(
parent,
'Source',
{'href': self.template}
)
def _add_network_association(self, parent):
if self.vm_network is None:
# Don't set a custom vApp VM network name
parent.set('networkName', self.network.get('name'))
else:
# Set a custom vApp VM network name
parent.set('networkName', self.vm_network)
configuration = ET.SubElement(parent, 'Configuration')
ET.SubElement(configuration, 'ParentNetwork',
{'href': self.network.get('href')})
if self.vm_fence is None:
fencemode = self.network.find(fixxpath(self.network,
'Configuration/FenceMode')).text
else:
fencemode = self.vm_fence
ET.SubElement(configuration, 'FenceMode').text = fencemode
class VCloud_1_5_NodeDriver(VCloudNodeDriver):
connectionCls = VCloud_1_5_Connection
# Based on
# http://pubs.vmware.com/vcloud-api-1-5/api_prog/
# GUID-843BE3AD-5EF6-4442-B864-BCAE44A51867.html
NODE_STATE_MAP = {'-1': NodeState.UNKNOWN,
'0': NodeState.PENDING,
'1': NodeState.PENDING,
'2': NodeState.PENDING,
'3': NodeState.PENDING,
'4': NodeState.RUNNING,
'5': NodeState.RUNNING,
'6': NodeState.UNKNOWN,
'7': NodeState.UNKNOWN,
'8': NodeState.STOPPED,
'9': NodeState.UNKNOWN,
'10': NodeState.UNKNOWN}
def list_locations(self):
return [NodeLocation(id=self.connection.host,
name=self.connection.host, country="N/A", driver=self)]
def ex_find_node(self, node_name, vdcs=None):
"""
Searches for node across specified vDCs. This is more effective than
querying all nodes to get a single instance.
:param node_name: The name of the node to search for
:type node_name: ``str``
:param vdcs: None, vDC or a list of vDCs to search in. If None all vDCs
will be searched.
:type vdcs: :class:`Vdc`
:return: node instance or None if not found
:rtype: :class:`Node` or ``None``
"""
if not vdcs:
vdcs = self.vdcs
if not getattr(vdcs, '__iter__', False):
vdcs = [vdcs]
for vdc in vdcs:
res = self.connection.request(get_url_path(vdc.id))
xpath = fixxpath(res.object, "ResourceEntities/ResourceEntity")
entity_elems = res.object.findall(xpath)
for entity_elem in entity_elems:
if entity_elem.get('type') == \
'application/vnd.vmware.vcloud.vApp+xml' and \
entity_elem.get('name') == node_name:
path = get_url_path(entity_elem.get('href'))
headers = {'Content-Type':
'application/vnd.vmware.vcloud.vApp+xml'}
res = self.connection.request(path,
headers=headers)
return self._to_node(res.object)
return None
def destroy_node(self, node):
try:
self.ex_undeploy_node(node)
except Exception:
# Some vendors don't implement undeploy at all yet,
# so catch this and move on.
pass
res = self.connection.request(get_url_path(node.id), method='DELETE')
return res.status == httplib.ACCEPTED
def reboot_node(self, node):
res = self.connection.request('%s/power/action/reset'
% get_url_path(node.id),
method='POST')
if res.status in [httplib.ACCEPTED, httplib.NO_CONTENT]:
self._wait_for_task_completion(res.object.get('href'))
return True
else:
return False
def ex_deploy_node(self, node):
"""
Deploys existing node. Equal to vApp "start" operation.
:param node: The node to be deployed
:type node: :class:`Node`
:rtype: :class:`Node`
"""
data = {'powerOn': 'true',
'xmlns': 'http://www.vmware.com/vcloud/v1.5'}
deploy_xml = ET.Element('DeployVAppParams', data)
path = get_url_path(node.id)
headers = {
'Content-Type':
'application/vnd.vmware.vcloud.deployVAppParams+xml'
}
res = self.connection.request('%s/action/deploy' % path,
data=ET.tostring(deploy_xml),
method='POST',
headers=headers)
self._wait_for_task_completion(res.object.get('href'))
res = self.connection.request(get_url_path(node.id))
return self._to_node(res.object)
def ex_undeploy_node(self, node):
"""
Undeploys existing node. Equal to vApp "stop" operation.
:param node: The node to be deployed
:type node: :class:`Node`
:rtype: :class:`Node`
"""
data = {'xmlns': 'http://www.vmware.com/vcloud/v1.5'}
undeploy_xml = ET.Element('UndeployVAppParams', data)
undeploy_power_action_xml = ET.SubElement(undeploy_xml,
'UndeployPowerAction')
undeploy_power_action_xml.text = 'shutdown'
headers = {
'Content-Type':
'application/vnd.vmware.vcloud.undeployVAppParams+xml'
}
try:
res = self.connection.request(
'%s/action/undeploy' % get_url_path(node.id),
data=ET.tostring(undeploy_xml),
method='POST',
headers=headers)
self._wait_for_task_completion(res.object.get('href'))
except Exception:
undeploy_power_action_xml.text = 'powerOff'
res = self.connection.request(
'%s/action/undeploy' % get_url_path(node.id),
data=ET.tostring(undeploy_xml),
method='POST',
headers=headers)
self._wait_for_task_completion(res.object.get('href'))
res = self.connection.request(get_url_path(node.id))
return self._to_node(res.object)
def ex_power_off_node(self, node):
"""
Powers on all VMs under specified node. VMs need to be This operation
is allowed only when the vApp/VM is powered on.
:param node: The node to be powered off
:type node: :class:`Node`
:rtype: :class:`Node`
"""
return self._perform_power_operation(node, 'powerOff')
def ex_power_on_node(self, node):
"""
Powers on all VMs under specified node. This operation is allowed
only when the vApp/VM is powered off or suspended.
:param node: The node to be powered on
:type node: :class:`Node`
:rtype: :class:`Node`
"""
return self._perform_power_operation(node, 'powerOn')
def ex_shutdown_node(self, node):
"""
Shutdowns all VMs under specified node. This operation is allowed only
when the vApp/VM is powered on.
:param node: The node to be shut down
:type node: :class:`Node`
:rtype: :class:`Node`
"""
return self._perform_power_operation(node, 'shutdown')
def ex_suspend_node(self, node):
"""
Suspends all VMs under specified node. This operation is allowed only
when the vApp/VM is powered on.
:param node: The node to be suspended
:type node: :class:`Node`
:rtype: :class:`Node`
"""
return self._perform_power_operation(node, 'suspend')
def _perform_power_operation(self, node, operation):
res = self.connection.request(
'%s/power/action/%s' % (get_url_path(node.id), operation),
method='POST')
self._wait_for_task_completion(res.object.get('href'))
res = self.connection.request(get_url_path(node.id))
return self._to_node(res.object)
def ex_get_control_access(self, node):
"""
Returns the control access settings for specified node.
:param node: node to get the control access for
:type node: :class:`Node`
:rtype: :class:`ControlAccess`
"""
res = self.connection.request(
'%s/controlAccess' % get_url_path(node.id))
everyone_access_level = None
is_shared_elem = res.object.find(
fixxpath(res.object, "IsSharedToEveryone"))
if is_shared_elem is not None and is_shared_elem.text == 'true':
everyone_access_level = res.object.find(
fixxpath(res.object, "EveryoneAccessLevel")).text
# Parse all subjects
subjects = []
xpath = fixxpath(res.object, "AccessSettings/AccessSetting")
for elem in res.object.findall(xpath):
access_level = elem.find(fixxpath(res.object, "AccessLevel")).text
subject_elem = elem.find(fixxpath(res.object, "Subject"))
if subject_elem.get('type') == \
'application/vnd.vmware.admin.group+xml':
subj_type = 'group'
else:
subj_type = 'user'
path = get_url_path(subject_elem.get('href'))
res = self.connection.request(path)
name = res.object.get('name')
subject = Subject(type=subj_type,
name=name,
access_level=access_level,
id=subject_elem.get('href'))
subjects.append(subject)
return ControlAccess(node, everyone_access_level, subjects)
def ex_set_control_access(self, node, control_access):
"""
Sets control access for the specified node.
:param node: node
:type node: :class:`Node`
:param control_access: control access settings
:type control_access: :class:`ControlAccess`
:rtype: ``None``
"""
xml = ET.Element('ControlAccessParams',
{'xmlns': 'http://www.vmware.com/vcloud/v1.5'})
shared_to_everyone = ET.SubElement(xml, 'IsSharedToEveryone')
if control_access.everyone_access_level:
shared_to_everyone.text = 'true'
everyone_access_level = ET.SubElement(xml, 'EveryoneAccessLevel')
everyone_access_level.text = control_access.everyone_access_level
else:
shared_to_everyone.text = 'false'
# Set subjects
if control_access.subjects:
access_settings_elem = ET.SubElement(xml, 'AccessSettings')
for subject in control_access.subjects:
setting = ET.SubElement(access_settings_elem, 'AccessSetting')
if subject.id:
href = subject.id
else:
res = self.ex_query(type=subject.type, filter='name==' +
subject.name)
if not res:
raise LibcloudError('Specified subject "%s %s" not found '
% (subject.type, subject.name))
href = res[0]['href']
ET.SubElement(setting, 'Subject', {'href': href})
ET.SubElement(setting, 'AccessLevel').text = subject.access_level
headers = {
'Content-Type': 'application/vnd.vmware.vcloud.controlAccess+xml'
}
self.connection.request(
'%s/action/controlAccess' % get_url_path(node.id),
data=ET.tostring(xml),
headers=headers,
method='POST')
def ex_get_metadata(self, node):
"""
:param node: node
:type node: :class:`Node`
:return: dictionary mapping metadata keys to metadata values
:rtype: dictionary mapping ``str`` to ``str``
"""
res = self.connection.request('%s/metadata' % (get_url_path(node.id)))
xpath = fixxpath(res.object, 'MetadataEntry')
metadata_entries = res.object.findall(xpath)
res_dict = {}
for entry in metadata_entries:
key = entry.findtext(fixxpath(res.object, 'Key'))
value = entry.findtext(fixxpath(res.object, 'Value'))
res_dict[key] = value
return res_dict
def ex_set_metadata_entry(self, node, key, value):
"""
:param node: node
:type node: :class:`Node`
:param key: metadata key to be set
:type key: ``str``
:param value: metadata value to be set
:type value: ``str``
:rtype: ``None``
"""
metadata_elem = ET.Element(
'Metadata',
{'xmlns': "http://www.vmware.com/vcloud/v1.5",
'xmlns:xsi': "http://www.w3.org/2001/XMLSchema-instance"}
)
entry = ET.SubElement(metadata_elem, 'MetadataEntry')
key_elem = ET.SubElement(entry, 'Key')
key_elem.text = key
value_elem = ET.SubElement(entry, 'Value')
value_elem.text = value
# send it back to the server
res = self.connection.request(
'%s/metadata' % get_url_path(node.id),
data=ET.tostring(metadata_elem),
headers={
'Content-Type': 'application/vnd.vmware.vcloud.metadata+xml'
},
method='POST')
self._wait_for_task_completion(res.object.get('href'))
def ex_query(self, type, filter=None, page=1, page_size=100, sort_asc=None,
sort_desc=None):
"""
Queries vCloud for specified type. See
http://www.vmware.com/pdf/vcd_15_api_guide.pdf for details. Each
element of the returned list is a dictionary with all attributes from
the record.
:param type: type to query (r.g. user, group, vApp etc.)
:type type: ``str``
:param filter: filter expression (see documentation for syntax)
:type filter: ``str``
:param page: page number
:type page: ``int``
:param page_size: page size
:type page_size: ``int``
:param sort_asc: sort in ascending order by specified field
:type sort_asc: ``str``
:param sort_desc: sort in descending order by specified field
:type sort_desc: ``str``
:rtype: ``list`` of dict
"""
# This is a workaround for filter parameter encoding
# the urllib encodes (name==Developers%20Only) into
# %28name%3D%3DDevelopers%20Only%29) which is not accepted by vCloud
params = {
'type': type,
'pageSize': page_size,
'page': page,
}
if sort_asc:
params['sortAsc'] = sort_asc
if sort_desc:
params['sortDesc'] = sort_desc
url = '/api/query?' + urlencode(params)
if filter:
if not filter.startswith('('):
filter = '(' + filter + ')'
url += '&filter=' + filter.replace(' ', '+')
results = []
res = self.connection.request(url)
for elem in res.object:
if not elem.tag.endswith('Link'):
result = elem.attrib
result['type'] = elem.tag.split('}')[1]
results.append(result)
return results
def create_node(self, **kwargs):
"""
Creates and returns node. If the source image is:
- vApp template - a new vApp is instantiated from template
- existing vApp - a new vApp is cloned from the source vApp. Can
not clone more vApps is parallel otherwise
resource busy error is raised.
@inherits: :class:`NodeDriver.create_node`
:keyword image: OS Image to boot on node. (required). Can be a
NodeImage or existing Node that will be cloned.
:type image: :class:`NodeImage` or :class:`Node`
:keyword ex_network: Organisation's network name for attaching vApp
VMs to.
:type ex_network: ``str``
:keyword ex_vdc: Name of organisation's virtual data center where
vApp VMs will be deployed.
:type ex_vdc: ``str``
:keyword ex_vm_names: list of names to be used as a VM and computer
name. The name must be max. 15 characters
long and follow the host name requirements.
:type ex_vm_names: ``list`` of ``str``
:keyword ex_vm_cpu: number of virtual CPUs/cores to allocate for
each vApp VM.
:type ex_vm_cpu: ``int``
:keyword ex_vm_memory: amount of memory in MB to allocate for each
vApp VM.
:type ex_vm_memory: ``int``
:keyword ex_vm_script: full path to file containing guest
customisation script for each vApp VM.
Useful for creating users & pushing out
public SSH keys etc.
:type ex_vm_script: ``str``
:keyword ex_vm_network: Override default vApp VM network name.
Useful for when you've imported an OVF
originating from outside of the vCloud.
:type ex_vm_network: ``str``
:keyword ex_vm_fence: Fence mode for connecting the vApp VM network
(ex_vm_network) to the parent
organisation network (ex_network).
:type ex_vm_fence: ``str``
:keyword ex_vm_ipmode: IP address allocation mode for all vApp VM
network connections.
:type ex_vm_ipmode: ``str``
:keyword ex_deploy: set to False if the node shouldn't be deployed
(started) after creation
:type ex_deploy: ``bool``
:keyword ex_clone_timeout: timeout in seconds for clone/instantiate
VM operation.
Cloning might be a time consuming
operation especially when linked clones
are disabled or VMs are created on
different datastores.
Overrides the default task completion
value.
:type ex_clone_timeout: ``int``
"""
name = kwargs['name']
image = kwargs['image']
ex_vm_names = kwargs.get('ex_vm_names')
ex_vm_cpu = kwargs.get('ex_vm_cpu')
ex_vm_memory = kwargs.get('ex_vm_memory')
ex_vm_script = kwargs.get('ex_vm_script')
ex_vm_fence = kwargs.get('ex_vm_fence', None)
ex_network = kwargs.get('ex_network', None)
ex_vm_network = kwargs.get('ex_vm_network', None)
ex_vm_ipmode = kwargs.get('ex_vm_ipmode', None)
ex_deploy = kwargs.get('ex_deploy', True)
ex_vdc = kwargs.get('ex_vdc', None)
ex_clone_timeout = kwargs.get('ex_clone_timeout',
DEFAULT_TASK_COMPLETION_TIMEOUT)
self._validate_vm_names(ex_vm_names)
self._validate_vm_cpu(ex_vm_cpu)
self._validate_vm_memory(ex_vm_memory)
self._validate_vm_fence(ex_vm_fence)
self._validate_vm_ipmode(ex_vm_ipmode)
ex_vm_script = self._validate_vm_script(ex_vm_script)
# Some providers don't require a network link
if ex_network:
network_href = self._get_network_href(ex_network)
network_elem = self.connection.request(
get_url_path(network_href)).object
else:
network_elem = None
vdc = self._get_vdc(ex_vdc)
if self._is_node(image):
vapp_name, vapp_href = self._clone_node(name,
image,
vdc,
ex_clone_timeout)
else:
vapp_name, vapp_href = self._instantiate_node(name, image,
network_elem,
vdc, ex_vm_network,
ex_vm_fence,
ex_clone_timeout)
self._change_vm_names(vapp_href, ex_vm_names)
self._change_vm_cpu(vapp_href, ex_vm_cpu)
self._change_vm_memory(vapp_href, ex_vm_memory)
self._change_vm_script(vapp_href, ex_vm_script)
self._change_vm_ipmode(vapp_href, ex_vm_ipmode)
# Power on the VM.
if ex_deploy:
# Retry 3 times: when instantiating large number of VMs at the same
# time some may fail on resource allocation
retry = 3
while True:
try:
res = self.connection.request(
'%s/power/action/powerOn' % get_url_path(vapp_href),
method='POST')
self._wait_for_task_completion(res.object.get('href'))
break
except Exception:
if retry <= 0:
raise
retry -= 1
time.sleep(10)
res = self.connection.request(get_url_path(vapp_href))
node = self._to_node(res.object)
return node
def _instantiate_node(self, name, image, network_elem, vdc, vm_network,
vm_fence, instantiate_timeout):
instantiate_xml = Instantiate_1_5_VAppXML(
name=name,
template=image.id,
network=network_elem,
vm_network=vm_network,
vm_fence=vm_fence
)
# Instantiate VM and get identifier.
headers = {
'Content-Type':
'application/vnd.vmware.vcloud.instantiateVAppTemplateParams+xml'
}
res = self.connection.request(
'%s/action/instantiateVAppTemplate' % get_url_path(vdc.id),
data=instantiate_xml.tostring(),
method='POST',
headers=headers
)
vapp_name = res.object.get('name')
vapp_href = res.object.get('href')
task_href = res.object.find(fixxpath(res.object, "Tasks/Task")).get(
'href')
self._wait_for_task_completion(task_href, instantiate_timeout)
return vapp_name, vapp_href
def _clone_node(self, name, sourceNode, vdc, clone_timeout):
clone_xml = ET.Element(
"CloneVAppParams",
{'name': name, 'deploy': 'false', 'powerOn': 'false',
'xmlns': "http://www.vmware.com/vcloud/v1.5",
'xmlns:xsi': "http://www.w3.org/2001/XMLSchema-instance"}
)
ET.SubElement(clone_xml,
'Description').text = 'Clone of ' + sourceNode.name
ET.SubElement(clone_xml, 'Source', {'href': sourceNode.id})
headers = {
'Content-Type': 'application/vnd.vmware.vcloud.cloneVAppParams+xml'
}
res = self.connection.request(
'%s/action/cloneVApp' % get_url_path(vdc.id),
data=ET.tostring(clone_xml),
method='POST',
headers=headers
)
vapp_name = res.object.get('name')
vapp_href = res.object.get('href')
task_href = res.object.find(
fixxpath(res.object, "Tasks/Task")).get('href')
self._wait_for_task_completion(task_href, clone_timeout)
res = self.connection.request(get_url_path(vapp_href))
vms = res.object.findall(fixxpath(res.object, "Children/Vm"))
# Fix the networking for VMs
for i, vm in enumerate(vms):
# Remove network
network_xml = ET.Element("NetworkConnectionSection", {
'ovf:required': 'false',
'xmlns': "http://www.vmware.com/vcloud/v1.5",
'xmlns:ovf': 'http://schemas.dmtf.org/ovf/envelope/1'})
ET.SubElement(network_xml, "ovf:Info").text = \
'Specifies the available VM network connections'
headers = {
'Content-Type':
'application/vnd.vmware.vcloud.networkConnectionSection+xml'
}
res = self.connection.request(
'%s/networkConnectionSection' % get_url_path(vm.get('href')),
data=ET.tostring(network_xml),
method='PUT',
headers=headers
)
self._wait_for_task_completion(res.object.get('href'))
# Re-add network
network_xml = vm.find(fixxpath(vm, 'NetworkConnectionSection'))
network_conn_xml = network_xml.find(
fixxpath(network_xml, 'NetworkConnection'))
network_conn_xml.set('needsCustomization', 'true')
network_conn_xml.remove(
network_conn_xml.find(fixxpath(network_xml, 'IpAddress')))
network_conn_xml.remove(
network_conn_xml.find(fixxpath(network_xml, 'MACAddress')))
headers = {
'Content-Type':
'application/vnd.vmware.vcloud.networkConnectionSection+xml'
}
res = self.connection.request(
'%s/networkConnectionSection' % get_url_path(vm.get('href')),
data=ET.tostring(network_xml),
method='PUT',
headers=headers
)
self._wait_for_task_completion(res.object.get('href'))
return vapp_name, vapp_href
def ex_set_vm_cpu(self, vapp_or_vm_id, vm_cpu):
"""
Sets the number of virtual CPUs for the specified VM or VMs under
the vApp. If the vapp_or_vm_id param represents a link to an vApp
all VMs that are attached to this vApp will be modified.
Please ensure that hot-adding a virtual CPU is enabled for the
powered on virtual machines. Otherwise use this method on undeployed
vApp.
:keyword vapp_or_vm_id: vApp or VM ID that will be modified. If
a vApp ID is used here all attached VMs
will be modified
:type vapp_or_vm_id: ``str``
:keyword vm_cpu: number of virtual CPUs/cores to allocate for
specified VMs
:type vm_cpu: ``int``
:rtype: ``None``
"""
self._validate_vm_cpu(vm_cpu)
self._change_vm_cpu(vapp_or_vm_id, vm_cpu)
def ex_set_vm_memory(self, vapp_or_vm_id, vm_memory):
"""
Sets the virtual memory in MB to allocate for the specified VM or
VMs under the vApp. If the vapp_or_vm_id param represents a link
to an vApp all VMs that are attached to this vApp will be modified.
Please ensure that hot-change of virtual memory is enabled for the
powered on virtual machines. Otherwise use this method on undeployed
vApp.
:keyword vapp_or_vm_id: vApp or VM ID that will be modified. If
a vApp ID is used here all attached VMs
will be modified
:type vapp_or_vm_id: ``str``
:keyword vm_memory: virtual memory in MB to allocate for the
specified VM or VMs
:type vm_memory: ``int``
:rtype: ``None``
"""
self._validate_vm_memory(vm_memory)
self._change_vm_memory(vapp_or_vm_id, vm_memory)
def ex_add_vm_disk(self, vapp_or_vm_id, vm_disk_size):
"""
Adds a virtual disk to the specified VM or VMs under the vApp. If the
vapp_or_vm_id param represents a link to an vApp all VMs that are
attached to this vApp will be modified.
:keyword vapp_or_vm_id: vApp or VM ID that will be modified. If a
vApp ID is used here all attached VMs
will be modified
:type vapp_or_vm_id: ``str``
:keyword vm_disk_size: the disk capacity in GB that will be added
to the specified VM or VMs
:type vm_disk_size: ``int``
:rtype: ``None``
"""
self._validate_vm_disk_size(vm_disk_size)
self._add_vm_disk(vapp_or_vm_id, vm_disk_size)
@staticmethod
def _validate_vm_names(names):
if names is None:
return
hname_re = re.compile(
'^(([a-zA-Z]|[a-zA-Z][a-zA-Z0-9]*)[\-])*([A-Za-z]|[A-Za-z][A-Za-z0-9]*[A-Za-z0-9])$') # NOQA
for name in names:
if len(name) > 15:
raise ValueError(
'The VM name "' + name + '" is too long for the computer '
'name (max 15 chars allowed).')
if not hname_re.match(name):
raise ValueError('The VM name "' + name + '" can not be '
'used. "' + name + '" is not a valid '
'computer name for the VM.')
@staticmethod
def _validate_vm_memory(vm_memory):
if vm_memory is None:
return
elif vm_memory not in VIRTUAL_MEMORY_VALS:
raise ValueError(
'%s is not a valid vApp VM memory value' % vm_memory)
@staticmethod
def _validate_vm_cpu(vm_cpu):
if vm_cpu is None:
return
elif vm_cpu not in VIRTUAL_CPU_VALS_1_5:
raise ValueError('%s is not a valid vApp VM CPU value' % vm_cpu)
@staticmethod
def _validate_vm_disk_size(vm_disk):
if vm_disk is None:
return
elif int(vm_disk) < 0:
raise ValueError('%s is not a valid vApp VM disk space value',
vm_disk)
@staticmethod
def _validate_vm_script(vm_script):
if vm_script is None:
return
# Try to locate the script file
if not os.path.isabs(vm_script):
vm_script = os.path.expanduser(vm_script)
vm_script = os.path.abspath(vm_script)
if not os.path.isfile(vm_script):
raise LibcloudError(
"%s the VM script file does not exist" % vm_script)
try:
open(vm_script).read()
except:
raise
return vm_script
@staticmethod
def _validate_vm_fence(vm_fence):
if vm_fence is None:
return
elif vm_fence not in FENCE_MODE_VALS_1_5:
raise ValueError('%s is not a valid fencing mode value' % vm_fence)
@staticmethod
def _validate_vm_ipmode(vm_ipmode):
if vm_ipmode is None:
return
elif vm_ipmode == 'MANUAL':
raise NotImplementedError(
'MANUAL IP mode: The interface for supplying '
'IPAddress does not exist yet')
elif vm_ipmode not in IP_MODE_VALS_1_5:
raise ValueError(
'%s is not a valid IP address allocation mode value'
% vm_ipmode)
def _change_vm_names(self, vapp_or_vm_id, vm_names):
if vm_names is None:
return
vms = self._get_vm_elements(vapp_or_vm_id)
for i, vm in enumerate(vms):
if len(vm_names) <= i:
return
# Get GuestCustomizationSection
res = self.connection.request(
'%s/guestCustomizationSection' % get_url_path(vm.get('href')))
# Update GuestCustomizationSection
res.object.find(
fixxpath(res.object, 'ComputerName')).text = vm_names[i]
# Remove AdminPassword from customization section
admin_pass = res.object.find(fixxpath(res.object, 'AdminPassword'))
if admin_pass is not None:
res.object.remove(admin_pass)
headers = {
'Content-Type':
'application/vnd.vmware.vcloud.guestCustomizationSection+xml'
}
res = self.connection.request(
'%s/guestCustomizationSection' % get_url_path(vm.get('href')),
data=ET.tostring(res.object),
method='PUT',
headers=headers
)
self._wait_for_task_completion(res.object.get('href'))
# Update Vm name
req_xml = ET.Element("Vm", {
'name': vm_names[i],
'xmlns': "http://www.vmware.com/vcloud/v1.5"})
res = self.connection.request(
get_url_path(vm.get('href')),
data=ET.tostring(req_xml),
method='PUT',
headers={
'Content-Type': 'application/vnd.vmware.vcloud.vm+xml'}
)
self._wait_for_task_completion(res.object.get('href'))
def _change_vm_cpu(self, vapp_or_vm_id, vm_cpu):
if vm_cpu is None:
return
vms = self._get_vm_elements(vapp_or_vm_id)
for vm in vms:
# Get virtualHardwareSection/cpu section
res = self.connection.request(
'%s/virtualHardwareSection/cpu' % get_url_path(vm.get('href')))
# Update VirtualQuantity field
xpath = ('{http://schemas.dmtf.org/wbem/wscim/1/cim-schema/2/'
'CIM_ResourceAllocationSettingData}VirtualQuantity')
res.object.find(xpath).text = str(vm_cpu)
headers = {
'Content-Type': 'application/vnd.vmware.vcloud.rasdItem+xml'
}
res = self.connection.request(
'%s/virtualHardwareSection/cpu' % get_url_path(vm.get('href')),
data=ET.tostring(res.object),
method='PUT',
headers=headers
)
self._wait_for_task_completion(res.object.get('href'))
def _change_vm_memory(self, vapp_or_vm_id, vm_memory):
if vm_memory is None:
return
vms = self._get_vm_elements(vapp_or_vm_id)
for vm in vms:
# Get virtualHardwareSection/memory section
res = self.connection.request(
'%s/virtualHardwareSection/memory' %
get_url_path(vm.get('href')))
# Update VirtualQuantity field
xpath = ('{http://schemas.dmtf.org/wbem/wscim/1/cim-schema/2/'
'CIM_ResourceAllocationSettingData}VirtualQuantity')
res.object.find(xpath).text = str(vm_memory)
headers = {
'Content-Type': 'application/vnd.vmware.vcloud.rasdItem+xml'
}
res = self.connection.request(
'%s/virtualHardwareSection/memory' % get_url_path(
vm.get('href')),
data=ET.tostring(res.object),
method='PUT',
headers=headers
)
self._wait_for_task_completion(res.object.get('href'))
def _add_vm_disk(self, vapp_or_vm_id, vm_disk):
if vm_disk is None:
return
rasd_ns = ('{http://schemas.dmtf.org/wbem/wscim/1/cim-schema/2/'
'CIM_ResourceAllocationSettingData}')
vms = self._get_vm_elements(vapp_or_vm_id)
for vm in vms:
# Get virtualHardwareSection/disks section
res = self.connection.request(
'%s/virtualHardwareSection/disks' %
get_url_path(vm.get('href')))
existing_ids = []
new_disk = None
for item in res.object.findall(fixxpath(res.object, 'Item')):
# Clean Items from unnecessary stuff
for elem in item:
if elem.tag == '%sInstanceID' % rasd_ns:
existing_ids.append(int(elem.text))
if elem.tag in ['%sAddressOnParent' % rasd_ns,
'%sParent' % rasd_ns]:
item.remove(elem)
if item.find('%sHostResource' % rasd_ns) is not None:
new_disk = item
new_disk = copy.deepcopy(new_disk)
disk_id = max(existing_ids) + 1
new_disk.find('%sInstanceID' % rasd_ns).text = str(disk_id)
new_disk.find('%sElementName' %
rasd_ns).text = 'Hard Disk ' + str(disk_id)
new_disk.find('%sHostResource' % rasd_ns).set(
fixxpath(new_disk, 'capacity'), str(int(vm_disk) * 1024))
res.object.append(new_disk)
headers = {
'Content-Type':
'application/vnd.vmware.vcloud.rasditemslist+xml'
}
res = self.connection.request(
'%s/virtualHardwareSection/disks' % get_url_path(
vm.get('href')),
data=ET.tostring(res.object),
method='PUT',
headers=headers
)
self._wait_for_task_completion(res.object.get('href'))
def _change_vm_script(self, vapp_or_vm_id, vm_script):
if vm_script is None:
return
vms = self._get_vm_elements(vapp_or_vm_id)
try:
script = open(vm_script).read()
except:
return
# ElementTree escapes script characters automatically. Escape
# requirements:
# http://www.vmware.com/support/vcd/doc/rest-api-doc-1.5-html/types/
# GuestCustomizationSectionType.html
for vm in vms:
# Get GuestCustomizationSection
res = self.connection.request(
'%s/guestCustomizationSection' % get_url_path(vm.get('href')))
# Attempt to update any existing CustomizationScript element
try:
res.object.find(
fixxpath(res.object, 'CustomizationScript')).text = script
except:
# CustomizationScript section does not exist, insert it just
# before ComputerName
for i, e in enumerate(res.object):
if e.tag == \
'{http://www.vmware.com/vcloud/v1.5}ComputerName':
break
e = ET.Element(
'{http://www.vmware.com/vcloud/v1.5}CustomizationScript')
e.text = script
res.object.insert(i, e)
# Remove AdminPassword from customization section due to an API
# quirk
admin_pass = res.object.find(fixxpath(res.object, 'AdminPassword'))
if admin_pass is not None:
res.object.remove(admin_pass)
# Update VM's GuestCustomizationSection
headers = {
'Content-Type':
'application/vnd.vmware.vcloud.guestCustomizationSection+xml'
}
res = self.connection.request(
'%s/guestCustomizationSection' % get_url_path(vm.get('href')),
data=ET.tostring(res.object),
method='PUT',
headers=headers
)
self._wait_for_task_completion(res.object.get('href'))
def _change_vm_ipmode(self, vapp_or_vm_id, vm_ipmode):
if vm_ipmode is None:
return
vms = self._get_vm_elements(vapp_or_vm_id)
for vm in vms:
res = self.connection.request(
'%s/networkConnectionSection' % get_url_path(vm.get('href')))
net_conns = res.object.findall(
fixxpath(res.object, 'NetworkConnection'))
for c in net_conns:
c.find(fixxpath(c, 'IpAddressAllocationMode')).text = vm_ipmode
headers = {
'Content-Type':
'application/vnd.vmware.vcloud.networkConnectionSection+xml'
}
res = self.connection.request(
'%s/networkConnectionSection' % get_url_path(vm.get('href')),
data=ET.tostring(res.object),
method='PUT',
headers=headers
)
self._wait_for_task_completion(res.object.get('href'))
def _get_network_href(self, network_name):
network_href = None
# Find the organisation's network href
res = self.connection.request(self.org)
links = res.object.findall(fixxpath(res.object, 'Link'))
for l in links:
if l.attrib['type'] == \
'application/vnd.vmware.vcloud.orgNetwork+xml' \
and l.attrib['name'] == network_name:
network_href = l.attrib['href']
if network_href is None:
raise ValueError(
'%s is not a valid organisation network name' % network_name)
else:
return network_href
def _get_vm_elements(self, vapp_or_vm_id):
res = self.connection.request(get_url_path(vapp_or_vm_id))
if res.object.tag.endswith('VApp'):
vms = res.object.findall(fixxpath(res.object, 'Children/Vm'))
elif res.object.tag.endswith('Vm'):
vms = [res.object]
else:
raise ValueError(
'Specified ID value is not a valid VApp or Vm identifier.')
return vms
def _is_node(self, node_or_image):
return isinstance(node_or_image, Node)
def _to_node(self, node_elm):
# Parse VMs as extra field
vms = []
for vm_elem in node_elm.findall(fixxpath(node_elm, 'Children/Vm')):
public_ips = []
private_ips = []
xpath = fixxpath(vm_elem,
'NetworkConnectionSection/NetworkConnection')
for connection in vm_elem.findall(xpath):
ip = connection.find(fixxpath(connection, "IpAddress"))
if ip is not None:
private_ips.append(ip.text)
external_ip = connection.find(
fixxpath(connection, "ExternalIpAddress"))
if external_ip is not None:
public_ips.append(external_ip.text)
elif ip is not None:
public_ips.append(ip.text)
xpath = ('{http://schemas.dmtf.org/ovf/envelope/1}'
'OperatingSystemSection')
os_type_elem = vm_elem.find(xpath)
if os_type_elem is not None:
os_type = os_type_elem.get(
'{http://www.vmware.com/schema/ovf}osType')
else:
os_type = None
vm = {
'id': vm_elem.get('href'),
'name': vm_elem.get('name'),
'state': self.NODE_STATE_MAP[vm_elem.get('status')],
'public_ips': public_ips,
'private_ips': private_ips,
'os_type': os_type
}
vms.append(vm)
# Take the node IP addresses from all VMs
public_ips = []
private_ips = []
for vm in vms:
public_ips.extend(vm['public_ips'])
private_ips.extend(vm['private_ips'])
# Find vDC
vdc_id = next(link.get('href') for link
in node_elm.findall(fixxpath(node_elm, 'Link'))
if link.get('type') ==
'application/vnd.vmware.vcloud.vdc+xml')
vdc = next(vdc for vdc in self.vdcs if vdc.id == vdc_id)
node = Node(id=node_elm.get('href'),
name=node_elm.get('name'),
state=self.NODE_STATE_MAP[node_elm.get('status')],
public_ips=public_ips,
private_ips=private_ips,
driver=self.connection.driver,
extra={'vdc': vdc.name, 'vms': vms})
return node
def _to_vdc(self, vdc_elm):
def get_capacity_values(capacity_elm):
if capacity_elm is None:
return None
limit = int(capacity_elm.findtext(fixxpath(capacity_elm, 'Limit')))
used = int(capacity_elm.findtext(fixxpath(capacity_elm, 'Used')))
units = capacity_elm.findtext(fixxpath(capacity_elm, 'Units'))
return Capacity(limit, used, units)
cpu = get_capacity_values(
vdc_elm.find(fixxpath(vdc_elm, 'ComputeCapacity/Cpu')))
memory = get_capacity_values(
vdc_elm.find(fixxpath(vdc_elm, 'ComputeCapacity/Memory')))
storage = get_capacity_values(
vdc_elm.find(fixxpath(vdc_elm, 'StorageCapacity')))
return Vdc(id=vdc_elm.get('href'),
name=vdc_elm.get('name'),
driver=self,
allocation_model=vdc_elm.findtext(
fixxpath(vdc_elm, 'AllocationModel')),
cpu=cpu,
memory=memory,
storage=storage)
class VCloud_5_1_NodeDriver(VCloud_1_5_NodeDriver):
@staticmethod
def _validate_vm_memory(vm_memory):
if vm_memory is None:
return None
elif (vm_memory % 4) != 0:
# The vcd 5.1 virtual machine memory size must be a multiple of 4
# MB
raise ValueError(
'%s is not a valid vApp VM memory value' % (vm_memory))
| {
"content_hash": "c96951cdcab46280f81e7ad785a14406",
"timestamp": "",
"source": "github",
"line_count": 2073,
"max_line_length": 105,
"avg_line_length": 36.00096478533526,
"alnum_prop": 0.5277100361784806,
"repo_name": "jimbobhickville/libcloud",
"id": "efe33e41d24649cad4fd3e4140a10840dc9ff53a",
"size": "75411",
"binary": false,
"copies": "7",
"ref": "refs/heads/trunk",
"path": "libcloud/compute/drivers/vcloud.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "2545"
},
{
"name": "Python",
"bytes": "4397714"
},
{
"name": "Shell",
"bytes": "13868"
}
],
"symlink_target": ""
} |
import functools
import itertools
import socket
import ssl
import time
import uuid
import eventlet
import greenlet
import kombu
import kombu.connection
import kombu.entity
import kombu.messaging
from oslo.config import cfg
from trove_guestagent.openstack.common import excutils
from trove_guestagent.openstack.common.gettextutils import _ # noqa
from trove_guestagent.openstack.common import network_utils
from trove_guestagent.openstack.common.rpc import amqp as rpc_amqp
from trove_guestagent.openstack.common.rpc import common as rpc_common
from trove_guestagent.openstack.common import sslutils
kombu_opts = [
cfg.StrOpt('kombu_ssl_version',
default='',
help='SSL version to use (valid only if SSL enabled). '
'valid values are TLSv1, SSLv23 and SSLv3. SSLv2 may '
'be available on some distributions'
),
cfg.StrOpt('kombu_ssl_keyfile',
default='',
help='SSL key file (valid only if SSL enabled)'),
cfg.StrOpt('kombu_ssl_certfile',
default='',
help='SSL cert file (valid only if SSL enabled)'),
cfg.StrOpt('kombu_ssl_ca_certs',
default='',
help=('SSL certification authority file '
'(valid only if SSL enabled)')),
cfg.StrOpt('rabbit_host',
default='localhost',
help='The RabbitMQ broker address where a single node is used'),
cfg.IntOpt('rabbit_port',
default=5672,
help='The RabbitMQ broker port where a single node is used'),
cfg.ListOpt('rabbit_hosts',
default=['$rabbit_host:$rabbit_port'],
help='RabbitMQ HA cluster host:port pairs'),
cfg.BoolOpt('rabbit_use_ssl',
default=False,
help='connect over SSL for RabbitMQ'),
cfg.StrOpt('rabbit_userid',
default='guest',
help='the RabbitMQ userid'),
cfg.StrOpt('rabbit_password',
default='guest',
help='the RabbitMQ password',
secret=True),
cfg.StrOpt('rabbit_virtual_host',
default='/',
help='the RabbitMQ virtual host'),
cfg.IntOpt('rabbit_retry_interval',
default=1,
help='how frequently to retry connecting with RabbitMQ'),
cfg.IntOpt('rabbit_retry_backoff',
default=2,
help='how long to backoff for between retries when connecting '
'to RabbitMQ'),
cfg.IntOpt('rabbit_max_retries',
default=0,
help='maximum retries with trying to connect to RabbitMQ '
'(the default of 0 implies an infinite retry count)'),
cfg.BoolOpt('rabbit_ha_queues',
default=False,
help='use H/A queues in RabbitMQ (x-ha-policy: all).'
'You need to wipe RabbitMQ database when '
'changing this option.'),
]
cfg.CONF.register_opts(kombu_opts)
LOG = rpc_common.LOG
def _get_queue_arguments(conf):
"""Construct the arguments for declaring a queue.
If the rabbit_ha_queues option is set, we declare a mirrored queue
as described here:
http://www.rabbitmq.com/ha.html
Setting x-ha-policy to all means that the queue will be mirrored
to all nodes in the cluster.
"""
return {'x-ha-policy': 'all'} if conf.rabbit_ha_queues else {}
class ConsumerBase(object):
"""Consumer base class."""
def __init__(self, channel, callback, tag, **kwargs):
"""Declare a queue on an amqp channel.
'channel' is the amqp channel to use
'callback' is the callback to call when messages are received
'tag' is a unique ID for the consumer on the channel
queue name, exchange name, and other kombu options are
passed in here as a dictionary.
"""
self.callback = callback
self.tag = str(tag)
self.kwargs = kwargs
self.queue = None
self.ack_on_error = kwargs.get('ack_on_error', True)
self.reconnect(channel)
def reconnect(self, channel):
"""Re-declare the queue after a rabbit reconnect."""
self.channel = channel
self.kwargs['channel'] = channel
self.queue = kombu.entity.Queue(**self.kwargs)
self.queue.declare()
def _callback_handler(self, message, callback):
"""Call callback with deserialized message.
Messages that are processed without exception are ack'ed.
If the message processing generates an exception, it will be
ack'ed if ack_on_error=True. Otherwise it will be .requeue()'ed.
"""
try:
msg = rpc_common.deserialize_msg(message.payload)
callback(msg)
except Exception:
if self.ack_on_error:
LOG.exception(_("Failed to process message"
" ... skipping it."))
message.ack()
else:
LOG.exception(_("Failed to process message"
" ... will requeue."))
message.requeue()
else:
message.ack()
def consume(self, *args, **kwargs):
"""Actually declare the consumer on the amqp channel. This will
start the flow of messages from the queue. Using the
Connection.iterconsume() iterator will process the messages,
calling the appropriate callback.
If a callback is specified in kwargs, use that. Otherwise,
use the callback passed during __init__()
If kwargs['nowait'] is True, then this call will block until
a message is read.
"""
options = {'consumer_tag': self.tag}
options['nowait'] = kwargs.get('nowait', False)
callback = kwargs.get('callback', self.callback)
if not callback:
raise ValueError("No callback defined")
def _callback(raw_message):
message = self.channel.message_to_python(raw_message)
self._callback_handler(message, callback)
self.queue.consume(*args, callback=_callback, **options)
def cancel(self):
"""Cancel the consuming from the queue, if it has started."""
try:
self.queue.cancel(self.tag)
except KeyError as e:
# NOTE(comstud): Kludge to get around a amqplib bug
if str(e) != "u'%s'" % self.tag:
raise
self.queue = None
class DirectConsumer(ConsumerBase):
"""Queue/consumer class for 'direct'."""
def __init__(self, conf, channel, msg_id, callback, tag, **kwargs):
"""Init a 'direct' queue.
'channel' is the amqp channel to use
'msg_id' is the msg_id to listen on
'callback' is the callback to call when messages are received
'tag' is a unique ID for the consumer on the channel
Other kombu options may be passed
"""
# Default options
options = {'durable': False,
'queue_arguments': _get_queue_arguments(conf),
'auto_delete': True,
'exclusive': False}
options.update(kwargs)
exchange = kombu.entity.Exchange(name=msg_id,
type='direct',
durable=options['durable'],
auto_delete=options['auto_delete'])
super(DirectConsumer, self).__init__(channel,
callback,
tag,
name=msg_id,
exchange=exchange,
routing_key=msg_id,
**options)
class TopicConsumer(ConsumerBase):
"""Consumer class for 'topic'."""
def __init__(self, conf, channel, topic, callback, tag, name=None,
exchange_name=None, **kwargs):
"""Init a 'topic' queue.
:param channel: the amqp channel to use
:param topic: the topic to listen on
:paramtype topic: str
:param callback: the callback to call when messages are received
:param tag: a unique ID for the consumer on the channel
:param name: optional queue name, defaults to topic
:paramtype name: str
Other kombu options may be passed as keyword arguments
"""
# Default options
options = {'durable': conf.amqp_durable_queues,
'queue_arguments': _get_queue_arguments(conf),
'auto_delete': conf.amqp_auto_delete,
'exclusive': False}
options.update(kwargs)
exchange_name = exchange_name or rpc_amqp.get_control_exchange(conf)
exchange = kombu.entity.Exchange(name=exchange_name,
type='topic',
durable=options['durable'],
auto_delete=options['auto_delete'])
super(TopicConsumer, self).__init__(channel,
callback,
tag,
name=name or topic,
exchange=exchange,
routing_key=topic,
**options)
class FanoutConsumer(ConsumerBase):
"""Consumer class for 'fanout'."""
def __init__(self, conf, channel, topic, callback, tag, **kwargs):
"""Init a 'fanout' queue.
'channel' is the amqp channel to use
'topic' is the topic to listen on
'callback' is the callback to call when messages are received
'tag' is a unique ID for the consumer on the channel
Other kombu options may be passed
"""
unique = uuid.uuid4().hex
exchange_name = '%s_fanout' % topic
queue_name = '%s_fanout_%s' % (topic, unique)
# Default options
options = {'durable': False,
'queue_arguments': _get_queue_arguments(conf),
'auto_delete': True,
'exclusive': False}
options.update(kwargs)
exchange = kombu.entity.Exchange(name=exchange_name, type='fanout',
durable=options['durable'],
auto_delete=options['auto_delete'])
super(FanoutConsumer, self).__init__(channel, callback, tag,
name=queue_name,
exchange=exchange,
routing_key=topic,
**options)
class Publisher(object):
"""Base Publisher class."""
def __init__(self, channel, exchange_name, routing_key, **kwargs):
"""Init the Publisher class with the exchange_name, routing_key,
and other options
"""
self.exchange_name = exchange_name
self.routing_key = routing_key
self.kwargs = kwargs
self.reconnect(channel)
def reconnect(self, channel):
"""Re-establish the Producer after a rabbit reconnection."""
self.exchange = kombu.entity.Exchange(name=self.exchange_name,
**self.kwargs)
self.producer = kombu.messaging.Producer(exchange=self.exchange,
channel=channel,
routing_key=self.routing_key)
def send(self, msg, timeout=None):
"""Send a message."""
if timeout:
#
# AMQP TTL is in milliseconds when set in the header.
#
self.producer.publish(msg, headers={'ttl': (timeout * 1000)})
else:
self.producer.publish(msg)
class DirectPublisher(Publisher):
"""Publisher class for 'direct'."""
def __init__(self, conf, channel, msg_id, **kwargs):
"""init a 'direct' publisher.
Kombu options may be passed as keyword args to override defaults
"""
options = {'durable': False,
'auto_delete': True,
'exclusive': False}
options.update(kwargs)
super(DirectPublisher, self).__init__(channel, msg_id, msg_id,
type='direct', **options)
class TopicPublisher(Publisher):
"""Publisher class for 'topic'."""
def __init__(self, conf, channel, topic, **kwargs):
"""init a 'topic' publisher.
Kombu options may be passed as keyword args to override defaults
"""
options = {'durable': conf.amqp_durable_queues,
'auto_delete': conf.amqp_auto_delete,
'exclusive': False}
options.update(kwargs)
exchange_name = rpc_amqp.get_control_exchange(conf)
super(TopicPublisher, self).__init__(channel,
exchange_name,
topic,
type='topic',
**options)
class FanoutPublisher(Publisher):
"""Publisher class for 'fanout'."""
def __init__(self, conf, channel, topic, **kwargs):
"""init a 'fanout' publisher.
Kombu options may be passed as keyword args to override defaults
"""
options = {'durable': False,
'auto_delete': True,
'exclusive': False}
options.update(kwargs)
super(FanoutPublisher, self).__init__(channel, '%s_fanout' % topic,
None, type='fanout', **options)
class NotifyPublisher(TopicPublisher):
"""Publisher class for 'notify'."""
def __init__(self, conf, channel, topic, **kwargs):
self.durable = kwargs.pop('durable', conf.amqp_durable_queues)
self.queue_arguments = _get_queue_arguments(conf)
super(NotifyPublisher, self).__init__(conf, channel, topic, **kwargs)
def reconnect(self, channel):
super(NotifyPublisher, self).reconnect(channel)
# NOTE(jerdfelt): Normally the consumer would create the queue, but
# we do this to ensure that messages don't get dropped if the
# consumer is started after we do
queue = kombu.entity.Queue(channel=channel,
exchange=self.exchange,
durable=self.durable,
name=self.routing_key,
routing_key=self.routing_key,
queue_arguments=self.queue_arguments)
queue.declare()
class Connection(object):
"""Connection object."""
pool = None
def __init__(self, conf, server_params=None):
self.consumers = []
self.consumer_thread = None
self.proxy_callbacks = []
self.conf = conf
self.max_retries = self.conf.rabbit_max_retries
# Try forever?
if self.max_retries <= 0:
self.max_retries = None
self.interval_start = self.conf.rabbit_retry_interval
self.interval_stepping = self.conf.rabbit_retry_backoff
# max retry-interval = 30 seconds
self.interval_max = 30
self.memory_transport = False
if server_params is None:
server_params = {}
# Keys to translate from server_params to kombu params
server_params_to_kombu_params = {'username': 'userid'}
ssl_params = self._fetch_ssl_params()
params_list = []
for adr in self.conf.rabbit_hosts:
hostname, port = network_utils.parse_host_port(
adr, default_port=self.conf.rabbit_port)
params = {
'hostname': hostname,
'port': port,
'userid': self.conf.rabbit_userid,
'password': self.conf.rabbit_password,
'virtual_host': self.conf.rabbit_virtual_host,
}
for sp_key, value in server_params.iteritems():
p_key = server_params_to_kombu_params.get(sp_key, sp_key)
params[p_key] = value
if self.conf.fake_rabbit:
params['transport'] = 'memory'
if self.conf.rabbit_use_ssl:
params['ssl'] = ssl_params
params_list.append(params)
self.params_list = params_list
self.memory_transport = self.conf.fake_rabbit
self.connection = None
self.reconnect()
def _fetch_ssl_params(self):
"""Handles fetching what ssl params should be used for the connection
(if any).
"""
ssl_params = dict()
# http://docs.python.org/library/ssl.html - ssl.wrap_socket
if self.conf.kombu_ssl_version:
ssl_params['ssl_version'] = sslutils.validate_ssl_version(
self.conf.kombu_ssl_version)
if self.conf.kombu_ssl_keyfile:
ssl_params['keyfile'] = self.conf.kombu_ssl_keyfile
if self.conf.kombu_ssl_certfile:
ssl_params['certfile'] = self.conf.kombu_ssl_certfile
if self.conf.kombu_ssl_ca_certs:
ssl_params['ca_certs'] = self.conf.kombu_ssl_ca_certs
# We might want to allow variations in the
# future with this?
ssl_params['cert_reqs'] = ssl.CERT_REQUIRED
# Return the extended behavior or just have the default behavior
return ssl_params or True
def _connect(self, params):
"""Connect to rabbit. Re-establish any queues that may have
been declared before if we are reconnecting. Exceptions should
be handled by the caller.
"""
if self.connection:
LOG.info(_("Reconnecting to AMQP server on "
"%(hostname)s:%(port)d") % params)
try:
self.connection.release()
except self.connection_errors:
pass
# Setting this in case the next statement fails, though
# it shouldn't be doing any network operations, yet.
self.connection = None
self.connection = kombu.connection.BrokerConnection(**params)
self.connection_errors = self.connection.connection_errors
if self.memory_transport:
# Kludge to speed up tests.
self.connection.transport.polling_interval = 0.0
self.consumer_num = itertools.count(1)
self.connection.connect()
self.channel = self.connection.channel()
# work around 'memory' transport bug in 1.1.3
if self.memory_transport:
self.channel._new_queue('ae.undeliver')
for consumer in self.consumers:
consumer.reconnect(self.channel)
LOG.info(_('Connected to AMQP server on %(hostname)s:%(port)d') %
params)
def reconnect(self):
"""Handles reconnecting and re-establishing queues.
Will retry up to self.max_retries number of times.
self.max_retries = 0 means to retry forever.
Sleep between tries, starting at self.interval_start
seconds, backing off self.interval_stepping number of seconds
each attempt.
"""
attempt = 0
while True:
params = self.params_list[attempt % len(self.params_list)]
attempt += 1
try:
self._connect(params)
return
except (IOError, self.connection_errors) as e:
pass
except Exception as e:
# NOTE(comstud): Unfortunately it's possible for amqplib
# to return an error not covered by its transport
# connection_errors in the case of a timeout waiting for
# a protocol response. (See paste link in LP888621)
# So, we check all exceptions for 'timeout' in them
# and try to reconnect in this case.
if 'timeout' not in str(e):
raise
log_info = {}
log_info['err_str'] = str(e)
log_info['max_retries'] = self.max_retries
log_info.update(params)
if self.max_retries and attempt == self.max_retries:
msg = _('Unable to connect to AMQP server on '
'%(hostname)s:%(port)d after %(max_retries)d '
'tries: %(err_str)s') % log_info
LOG.error(msg)
raise rpc_common.RPCException(msg)
if attempt == 1:
sleep_time = self.interval_start or 1
elif attempt > 1:
sleep_time += self.interval_stepping
if self.interval_max:
sleep_time = min(sleep_time, self.interval_max)
log_info['sleep_time'] = sleep_time
LOG.error(_('AMQP server on %(hostname)s:%(port)d is '
'unreachable: %(err_str)s. Trying again in '
'%(sleep_time)d seconds.') % log_info)
time.sleep(sleep_time)
def ensure(self, error_callback, method, *args, **kwargs):
while True:
try:
return method(*args, **kwargs)
except (self.connection_errors, socket.timeout, IOError) as e:
if error_callback:
error_callback(e)
except Exception as e:
# NOTE(comstud): Unfortunately it's possible for amqplib
# to return an error not covered by its transport
# connection_errors in the case of a timeout waiting for
# a protocol response. (See paste link in LP888621)
# So, we check all exceptions for 'timeout' in them
# and try to reconnect in this case.
if 'timeout' not in str(e):
raise
if error_callback:
error_callback(e)
self.reconnect()
def get_channel(self):
"""Convenience call for bin/clear_rabbit_queues."""
return self.channel
def close(self):
"""Close/release this connection."""
self.cancel_consumer_thread()
self.wait_on_proxy_callbacks()
self.connection.release()
self.connection = None
def reset(self):
"""Reset a connection so it can be used again."""
self.cancel_consumer_thread()
self.wait_on_proxy_callbacks()
self.channel.close()
self.channel = self.connection.channel()
# work around 'memory' transport bug in 1.1.3
if self.memory_transport:
self.channel._new_queue('ae.undeliver')
self.consumers = []
def declare_consumer(self, consumer_cls, topic, callback):
"""Create a Consumer using the class that was passed in and
add it to our list of consumers
"""
def _connect_error(exc):
log_info = {'topic': topic, 'err_str': str(exc)}
LOG.error(_("Failed to declare consumer for topic '%(topic)s': "
"%(err_str)s") % log_info)
def _declare_consumer():
consumer = consumer_cls(self.conf, self.channel, topic, callback,
self.consumer_num.next())
self.consumers.append(consumer)
return consumer
return self.ensure(_connect_error, _declare_consumer)
def iterconsume(self, limit=None, timeout=None):
"""Return an iterator that will consume from all queues/consumers."""
info = {'do_consume': True}
def _error_callback(exc):
if isinstance(exc, socket.timeout):
LOG.debug(_('Timed out waiting for RPC response: %s') %
str(exc))
raise rpc_common.Timeout()
else:
LOG.exception(_('Failed to consume message from queue: %s') %
str(exc))
info['do_consume'] = True
def _consume():
if info['do_consume']:
queues_head = self.consumers[:-1] # not fanout.
queues_tail = self.consumers[-1] # fanout
for queue in queues_head:
queue.consume(nowait=True)
queues_tail.consume(nowait=False)
info['do_consume'] = False
return self.connection.drain_events(timeout=timeout)
for iteration in itertools.count(0):
if limit and iteration >= limit:
raise StopIteration
yield self.ensure(_error_callback, _consume)
def cancel_consumer_thread(self):
"""Cancel a consumer thread."""
if self.consumer_thread is not None:
self.consumer_thread.kill()
try:
self.consumer_thread.wait()
except greenlet.GreenletExit:
pass
self.consumer_thread = None
def wait_on_proxy_callbacks(self):
"""Wait for all proxy callback threads to exit."""
for proxy_cb in self.proxy_callbacks:
proxy_cb.wait()
def publisher_send(self, cls, topic, msg, timeout=None, **kwargs):
"""Send to a publisher based on the publisher class."""
def _error_callback(exc):
log_info = {'topic': topic, 'err_str': str(exc)}
LOG.exception(_("Failed to publish message to topic "
"'%(topic)s': %(err_str)s") % log_info)
def _publish():
publisher = cls(self.conf, self.channel, topic, **kwargs)
publisher.send(msg, timeout)
self.ensure(_error_callback, _publish)
def declare_direct_consumer(self, topic, callback):
"""Create a 'direct' queue.
In nova's use, this is generally a msg_id queue used for
responses for call/multicall
"""
self.declare_consumer(DirectConsumer, topic, callback)
def declare_topic_consumer(self, topic, callback=None, queue_name=None,
exchange_name=None, ack_on_error=True):
"""Create a 'topic' consumer."""
self.declare_consumer(functools.partial(TopicConsumer,
name=queue_name,
exchange_name=exchange_name,
ack_on_error=ack_on_error,
),
topic, callback)
def declare_fanout_consumer(self, topic, callback):
"""Create a 'fanout' consumer."""
self.declare_consumer(FanoutConsumer, topic, callback)
def direct_send(self, msg_id, msg):
"""Send a 'direct' message."""
self.publisher_send(DirectPublisher, msg_id, msg)
def topic_send(self, topic, msg, timeout=None):
"""Send a 'topic' message."""
self.publisher_send(TopicPublisher, topic, msg, timeout)
def fanout_send(self, topic, msg):
"""Send a 'fanout' message."""
self.publisher_send(FanoutPublisher, topic, msg)
def notify_send(self, topic, msg, **kwargs):
"""Send a notify message on a topic."""
self.publisher_send(NotifyPublisher, topic, msg, None, **kwargs)
def consume(self, limit=None):
"""Consume from all queues/consumers."""
it = self.iterconsume(limit=limit)
while True:
try:
it.next()
except StopIteration:
return
def consume_in_thread(self):
"""Consumer from all queues/consumers in a greenthread."""
@excutils.forever_retry_uncaught_exceptions
def _consumer_thread():
try:
self.consume()
except greenlet.GreenletExit:
return
if self.consumer_thread is None:
self.consumer_thread = eventlet.spawn(_consumer_thread)
return self.consumer_thread
def create_consumer(self, topic, proxy, fanout=False):
"""Create a consumer that calls a method in a proxy object."""
proxy_cb = rpc_amqp.ProxyCallback(
self.conf, proxy,
rpc_amqp.get_connection_pool(self.conf, Connection))
self.proxy_callbacks.append(proxy_cb)
if fanout:
self.declare_fanout_consumer(topic, proxy_cb)
else:
self.declare_topic_consumer(topic, proxy_cb)
def create_worker(self, topic, proxy, pool_name):
"""Create a worker that calls a method in a proxy object."""
proxy_cb = rpc_amqp.ProxyCallback(
self.conf, proxy,
rpc_amqp.get_connection_pool(self.conf, Connection))
self.proxy_callbacks.append(proxy_cb)
self.declare_topic_consumer(topic, proxy_cb, pool_name)
def join_consumer_pool(self, callback, pool_name, topic,
exchange_name=None, ack_on_error=True):
"""Register as a member of a group of consumers for a given topic from
the specified exchange.
Exactly one member of a given pool will receive each message.
A message will be delivered to multiple pools, if more than
one is created.
"""
callback_wrapper = rpc_amqp.CallbackWrapper(
conf=self.conf,
callback=callback,
connection_pool=rpc_amqp.get_connection_pool(self.conf,
Connection),
)
self.proxy_callbacks.append(callback_wrapper)
self.declare_topic_consumer(
queue_name=pool_name,
topic=topic,
exchange_name=exchange_name,
callback=callback_wrapper,
ack_on_error=ack_on_error,
)
def create_connection(conf, new=True):
"""Create a connection."""
return rpc_amqp.create_connection(
conf, new,
rpc_amqp.get_connection_pool(conf, Connection))
def multicall(conf, context, topic, msg, timeout=None):
"""Make a call that returns multiple times."""
return rpc_amqp.multicall(
conf, context, topic, msg, timeout,
rpc_amqp.get_connection_pool(conf, Connection))
def call(conf, context, topic, msg, timeout=None):
"""Sends a message on a topic and wait for a response."""
return rpc_amqp.call(
conf, context, topic, msg, timeout,
rpc_amqp.get_connection_pool(conf, Connection))
def cast(conf, context, topic, msg):
"""Sends a message on a topic without waiting for a response."""
return rpc_amqp.cast(
conf, context, topic, msg,
rpc_amqp.get_connection_pool(conf, Connection))
def fanout_cast(conf, context, topic, msg):
"""Sends a message on a fanout exchange without waiting for a response."""
return rpc_amqp.fanout_cast(
conf, context, topic, msg,
rpc_amqp.get_connection_pool(conf, Connection))
def cast_to_server(conf, context, server_params, topic, msg):
"""Sends a message on a topic to a specific server."""
return rpc_amqp.cast_to_server(
conf, context, server_params, topic, msg,
rpc_amqp.get_connection_pool(conf, Connection))
def fanout_cast_to_server(conf, context, server_params, topic, msg):
"""Sends a message on a fanout exchange to a specific server."""
return rpc_amqp.fanout_cast_to_server(
conf, context, server_params, topic, msg,
rpc_amqp.get_connection_pool(conf, Connection))
def notify(conf, context, topic, msg, envelope):
"""Sends a notification event on a topic."""
return rpc_amqp.notify(
conf, context, topic, msg,
rpc_amqp.get_connection_pool(conf, Connection),
envelope)
def cleanup():
return rpc_amqp.cleanup(Connection.pool)
| {
"content_hash": "6155847f39f66f4fedc9341c0a0dc1f9",
"timestamp": "",
"source": "github",
"line_count": 839,
"max_line_length": 79,
"avg_line_length": 38.24791418355185,
"alnum_prop": 0.5549703957619196,
"repo_name": "denismakogon/trove-guestagent",
"id": "7057c0b1a5a4c773096b4610ec531e2275e1a0fb",
"size": "32751",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "trove_guestagent/openstack/common/rpc/impl_kombu.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "19900"
},
{
"name": "JavaScript",
"bytes": "7403"
},
{
"name": "Python",
"bytes": "1023022"
}
],
"symlink_target": ""
} |
__all__ = ["System"]
import os
import sys
from functools import wraps
from subprocess import Popen, PIPE
def lazy_property(undecorated):
name = '_' + undecorated.__name__
@property
@wraps(undecorated)
def decorated(self):
try:
return getattr(self, name)
except AttributeError:
v = undecorated(self)
setattr(self, name, v)
return v
return decorated
class System(object):
@lazy_property
def os(self):
platform = sys.platform
if platform.startswith('linux'):
return "linux"
elif platform == "darwin":
return "darwin"
else:
return "unknown"
def unquote(self, val):
if val[0] == '"':
val = val[1:-1]
return val
@lazy_property
def arch(self):
machine = self.machine
if machine in ("i386", "i486", "i686"):
return "x86_32"
return machine
@lazy_property
def machine(self):
p = Popen(["/bin/uname", "-m"], stdout=PIPE, stderr=PIPE)
return p.communicate()[0].strip()
@lazy_property
def lsb(self):
if os.path.exists("/etc/lsb-release"):
with open("/etc/lsb-release", "rb") as fp:
lsb = (x.split('=') for x in fp.read().strip().split('\n'))
return dict((k.split('_', 1)[-1].lower(), self.unquote(v)) for k, v in lsb)
elif os.path.exists("/usr/bin/lsb_release"):
p = Popen(["/usr/bin/lsb_release","-a"], stdout=PIPE, stderr=PIPE)
lsb = {}
for l in p.communicate()[0].split('\n'):
v = l.split(':', 1)
if len(v) != 2:
continue
lsb[v[0].strip().lower()] = self.unquote(v[1].strip().lower())
lsb['id'] = lsb.pop('distributor id')
return lsb
@lazy_property
def platform(self):
operatingsystem = self.os
if operatingsystem == "linux":
lsb = self.lsb
if not lsb:
if os.path.exists("/etc/redhat-release"):
return "redhat"
if os.path.exists("/etc/fedora-release"):
return "fedora"
if os.path.exists("/etc/debian_version"):
return "debian"
if os.path.exists("/etc/gentoo-release"):
return "gentoo"
if os.path.exists("/etc/system-release"):
with open("/etc/system-release", "rb") as fp:
release = fp.read()
if "Amazon Linux" in release:
return "amazon"
return "unknown"
return lsb['id'].lower()
elif operatingsystem == "darwin":
out = Popen("/usr/bin/sw_vers", stdout=PIPE).communicate()[0]
sw_vers = dict([y.strip() for y in x.split(':', 1)] for x in out.strip().split('\n'))
# ProductName, ProductVersion, BuildVersion
return sw_vers['ProductName'].lower().replace(' ', '_')
else:
return "unknown"
@lazy_property
def locales(self):
p = Popen("locale -a", shell=True, stdout=PIPE)
out = p.communicate()[0]
return out.strip().split("\n")
@lazy_property
def ec2(self):
if not os.path.exists("/proc/xen"):
return False
if os.path.exists("/etc/ec2_version"):
return True
return False
@lazy_property
def vm(self):
if os.path.exists("/usr/bin/VBoxControl"):
return "vbox"
elif os.path.exists("/usr/bin/vmware-toolbox-cmd") or os.path.exists("/usr/sbin/vmware-toolbox-cmd"):
return "vmware"
elif os.path.exists("/proc/xen"):
return "xen"
return None
@classmethod
def get_instance(cls):
try:
return cls._instance
except AttributeError:
cls._instance = cls()
return cls._instance
| {
"content_hash": "34fa1edea36264304026b227ad008867",
"timestamp": "",
"source": "github",
"line_count": 125,
"max_line_length": 109,
"avg_line_length": 32.256,
"alnum_prop": 0.5086805555555556,
"repo_name": "samuel/kokki",
"id": "f06bf80366eed646c6fa920a805e0695c8bc78b4",
"size": "4033",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "kokki/system.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "211476"
}
],
"symlink_target": ""
} |
import unittest
class TestAlwaysTrue(unittest.TestCase):
def test_assertTrue(self):
"""
always_true returns a truthy value
"""
result = True
self.assertTrue(result)
| {
"content_hash": "bdda286f21b5181984a486f121522e95",
"timestamp": "",
"source": "github",
"line_count": 12,
"max_line_length": 42,
"avg_line_length": 17.75,
"alnum_prop": 0.6056338028169014,
"repo_name": "federico123579/Trading212-API",
"id": "48e9abdb32533b422f225440b7a36ee0c5cfc05d",
"size": "213",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/test.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "489"
},
{
"name": "Python",
"bytes": "41298"
},
{
"name": "Shell",
"bytes": "1245"
}
],
"symlink_target": ""
} |
from distutils.core import setup
setup(
name = 'wl_parsers',
packages = ['wl_parsers'],
install_requires = [
'requests>=2.10.0',
],
version = '0.1.2',
description = 'a library to parse the Warlight.net site',
author = 'knyte',
author_email = '[email protected]',
url = 'https://github.com/knyte/wl_parsers',
download_url = 'https://github.com/knyte/wl_parsers/tarball/0.1.2',
keywords = ['warlight', 'parser', 'scraping'],
classifiers = [],
)
| {
"content_hash": "c6631c902051297a65417f76fdbf8084",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 71,
"avg_line_length": 31.5,
"alnum_prop": 0.6091269841269841,
"repo_name": "knyte/wl_parsers",
"id": "4d55dbf19068514a8480689a3c9a62273ba092c9",
"size": "504",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "102556"
}
],
"symlink_target": ""
} |
"""
APIs for dealing with input and output definitions for Software Configurations.
"""
import collections
import copy
import six
from heat.common.i18n import _
from heat.common import exception
from heat.engine import constraints
from heat.engine import parameters
from heat.engine import properties
(
IO_NAME, DESCRIPTION, TYPE,
DEFAULT, REPLACE_ON_CHANGE, VALUE,
ERROR_OUTPUT,
) = (
'name', 'description', 'type',
'default', 'replace_on_change', 'value',
'error_output',
)
TYPES = (
STRING_TYPE, NUMBER_TYPE, LIST_TYPE, JSON_TYPE, BOOLEAN_TYPE,
) = (
'String', 'Number', 'CommaDelimitedList', 'Json', 'Boolean',
)
input_config_schema = {
IO_NAME: properties.Schema(
properties.Schema.STRING,
_('Name of the input.'),
required=True
),
DESCRIPTION: properties.Schema(
properties.Schema.STRING,
_('Description of the input.')
),
TYPE: properties.Schema(
properties.Schema.STRING,
_('Type of the value of the input.'),
default=STRING_TYPE,
constraints=[constraints.AllowedValues(TYPES)]
),
DEFAULT: properties.Schema(
properties.Schema.ANY,
_('Default value for the input if none is specified.'),
),
REPLACE_ON_CHANGE: properties.Schema(
properties.Schema.BOOLEAN,
_('Replace the deployment instead of updating it when the input '
'value changes.'),
default=False,
),
}
output_config_schema = {
IO_NAME: properties.Schema(
properties.Schema.STRING,
_('Name of the output.'),
required=True
),
DESCRIPTION: properties.Schema(
properties.Schema.STRING,
_('Description of the output.')
),
TYPE: properties.Schema(
properties.Schema.STRING,
_('Type of the value of the output.'),
default=STRING_TYPE,
constraints=[constraints.AllowedValues(TYPES)]
),
ERROR_OUTPUT: properties.Schema(
properties.Schema.BOOLEAN,
_('Denotes that the deployment is in an error state if this '
'output has a value.'),
default=False
)
}
class IOConfig(object):
"""Base class for the configuration data for a single input or output."""
def __init__(self, **config):
self._props = properties.Properties(self.schema, config)
try:
self._props.validate()
except exception.StackValidationFailed as exc:
raise ValueError(six.text_type(exc))
def name(self):
"""Return the name of the input or output."""
return self._props[IO_NAME]
def as_dict(self):
"""Return a dict representation suitable for persisting."""
return {k: v for k, v in self._props.items() if v is not None}
def __repr__(self):
return '%s(%s)' % (type(self).__name__,
', '.join('%s=%s' % (k, repr(v))
for k, v in self.as_dict().items()))
_no_value = object()
class InputConfig(IOConfig):
"""Class representing the configuration data for a single input."""
schema = input_config_schema
def __init__(self, value=_no_value, **config):
if TYPE in config and DEFAULT in config:
if config[DEFAULT] == '' and config[TYPE] != STRING_TYPE:
# This is a legacy path, because default used to be of string
# type, so we need to skip schema validation in this case.
pass
else:
self.schema = copy.deepcopy(self.schema)
config_param = parameters.Schema.from_dict(
'config', {'Type': config[TYPE]})
self.schema[DEFAULT] = properties.Schema.from_parameter(
config_param)
super(InputConfig, self).__init__(**config)
self._value = value
def default(self):
"""Return the default value of the input."""
return self._props[DEFAULT]
def replace_on_change(self):
return self._props[REPLACE_ON_CHANGE]
def as_dict(self):
"""Return a dict representation suitable for persisting."""
d = super(InputConfig, self).as_dict()
if not self._props[REPLACE_ON_CHANGE]:
del d[REPLACE_ON_CHANGE]
if self._value is not _no_value:
d[VALUE] = self._value
return d
def input_data(self):
"""Return a name, value pair for the input."""
value = self._value if self._value is not _no_value else None
return self.name(), value
class OutputConfig(IOConfig):
"""Class representing the configuration data for a single output."""
schema = output_config_schema
def error_output(self):
"""Return True if the presence of the output indicates an error."""
return self._props[ERROR_OUTPUT]
def check_io_schema_list(io_configs):
"""Check that an input or output schema list is of the correct type.
Raises TypeError if the list itself is not a list, or if any of the
members are not dicts.
"""
if (not isinstance(io_configs, collections.Sequence) or
isinstance(io_configs, collections.Mapping) or
isinstance(io_configs, six.string_types)):
raise TypeError('Software Config I/O Schema must be in a list')
if not all(isinstance(conf, collections.Mapping) for conf in io_configs):
raise TypeError('Software Config I/O Schema must be a dict')
| {
"content_hash": "76af35d9662271db5048237abb9b5255",
"timestamp": "",
"source": "github",
"line_count": 175,
"max_line_length": 79,
"avg_line_length": 31.205714285714286,
"alnum_prop": 0.6105108954403955,
"repo_name": "noironetworks/heat",
"id": "8ca2d28f7b935691bfa954cd9ccec2878cf0a80b",
"size": "6036",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "heat/engine/software_config_io.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "8804896"
},
{
"name": "Shell",
"bytes": "64533"
}
],
"symlink_target": ""
} |
import unittest
import numpy
import chainer
from chainer import cuda
from chainer import functions
from chainer import gradient_check
from chainer import links
from chainer import testing
from chainer.testing import attr
@testing.parameterize(
{'in_size': 10, 'out_size': 10},
{'in_size': 10, 'out_size': 40},
)
class TestLSTM(unittest.TestCase):
def setUp(self):
self.link = links.LSTM(self.in_size, self.out_size)
upward = self.link.upward.W.data
upward[...] = numpy.random.uniform(-1, 1, upward.shape)
lateral = self.link.lateral.W.data
lateral[...] = numpy.random.uniform(-1, 1, lateral.shape)
self.link.zerograds()
self.upward = upward.copy() # fixed on CPU
self.lateral = lateral.copy() # fixed on CPU
x_shape = (4, self.in_size)
self.x = numpy.random.uniform(-1, 1, x_shape).astype(numpy.float32)
def check_forward(self, x_data):
xp = self.link.xp
x = chainer.Variable(x_data)
h1 = self.link(x)
c0 = chainer.Variable(xp.zeros((len(self.x), self.out_size),
dtype=self.x.dtype))
c1_expect, h1_expect = functions.lstm(c0, self.link.upward(x))
gradient_check.assert_allclose(h1.data, h1_expect.data)
gradient_check.assert_allclose(self.link.h.data, h1_expect.data)
gradient_check.assert_allclose(self.link.c.data, c1_expect.data)
h2 = self.link(x)
c2_expect, h2_expect = \
functions.lstm(c1_expect,
self.link.upward(x) + self.link.lateral(h1))
gradient_check.assert_allclose(h2.data, h2_expect.data)
def test_forward_cpu(self):
self.check_forward(self.x)
@attr.gpu
def test_forward_gpu(self):
self.link.to_gpu()
self.check_forward(cuda.to_gpu(self.x))
class TestLSSTMRestState(unittest.TestCase):
def setUp(self):
self.link = links.LSTM(5, 7)
self.x = chainer.Variable(
numpy.random.uniform(-1, 1, (3, 5)).astype(numpy.float32))
def check_state(self):
self.assertIsNone(self.link.c)
self.assertIsNone(self.link.h)
self.link(self.x)
self.assertIsNotNone(self.link.c)
self.assertIsNotNone(self.link.h)
def test_state_cpu(self):
self.check_state()
@attr.gpu
def test_state_gpu(self):
self.link.to_gpu()
self.x.to_gpu()
self.check_state()
def check_reset_state(self):
self.link(self.x)
self.link.reset_state()
self.assertIsNone(self.link.c)
self.assertIsNone(self.link.h)
def test_reset_state_cpu(self):
self.check_reset_state()
@attr.gpu
def test_reset_state_gpu(self):
self.link.to_gpu()
self.x.to_gpu()
self.check_reset_state()
class TestLSTMToCPUToGPU(unittest.TestCase):
def setUp(self):
self.link = links.LSTM(5, 7)
self.x = chainer.Variable(
numpy.random.uniform(-1, 1, (3, 5)).astype(numpy.float32))
def check_to_cpu(self, s):
self.link.to_cpu()
self.assertIsInstance(s.data, self.link.xp.ndarray)
self.link.to_cpu()
self.assertIsInstance(s.data, self.link.xp.ndarray)
def test_to_cpu_cpu(self):
self.link(self.x)
self.check_to_cpu(self.link.c)
self.check_to_cpu(self.link.h)
@attr.gpu
def test_to_cpu_gpu(self):
self.link.to_gpu()
self.x.to_gpu()
self.link(self.x)
self.check_to_cpu(self.link.c)
self.check_to_cpu(self.link.h)
def check_to_cpu_to_gpu(self, s):
self.link.to_gpu()
self.assertIsInstance(s.data, self.link.xp.ndarray)
self.link.to_gpu()
self.assertIsInstance(s.data, self.link.xp.ndarray)
self.link.to_cpu()
self.assertIsInstance(s.data, self.link.xp.ndarray)
self.link.to_gpu()
self.assertIsInstance(s.data, self.link.xp.ndarray)
@attr.gpu
def test_to_cpu_to_gpu_cpu(self):
self.link(self.x)
self.check_to_cpu_to_gpu(self.link.c)
self.check_to_cpu_to_gpu(self.link.h)
@attr.gpu
def test_to_cpu_to_gpu_gpu(self):
self.link.to_gpu()
self.x.to_gpu()
self.link(self.x)
self.check_to_cpu_to_gpu(self.link.c)
self.check_to_cpu_to_gpu(self.link.h)
@testing.parameterize(
{'in_size': 10, 'out_size': 10},
{'in_size': 10, 'out_size': 40},
)
class TestStatelessLSTM(unittest.TestCase):
def setUp(self):
self.link = links.StatelessLSTM(self.in_size, self.out_size)
upward = self.link.upward.W.data
upward[...] = numpy.random.uniform(-1, 1, upward.shape)
lateral = self.link.lateral.W.data
lateral[...] = numpy.random.uniform(-1, 1, lateral.shape)
self.link.zerograds()
self.upward = upward.copy() # fixed on CPU
self.lateral = lateral.copy() # fixed on CPU
x_shape = (4, self.in_size)
self.x = numpy.random.uniform(-1, 1, x_shape).astype(numpy.float32)
def check_forward(self, x_data):
xp = self.link.xp
x = chainer.Variable(x_data)
c1, h1 = self.link(None, None, x)
c0 = chainer.Variable(xp.zeros((len(self.x), self.out_size),
dtype=self.x.dtype))
c1_expect, h1_expect = functions.lstm(c0, self.link.upward(x))
gradient_check.assert_allclose(h1.data, h1_expect.data)
gradient_check.assert_allclose(c1.data, c1_expect.data)
c2, h2 = self.link(c1, h1, x)
c2_expect, h2_expect = \
functions.lstm(c1_expect,
self.link.upward(x) + self.link.lateral(h1))
gradient_check.assert_allclose(h2.data, h2_expect.data)
gradient_check.assert_allclose(c2.data, c2_expect.data)
def test_forward_cpu(self):
self.check_forward(self.x)
@attr.gpu
def test_forward_gpu(self):
self.link.to_gpu()
self.check_forward(cuda.to_gpu(self.x))
testing.run_module(__name__, __file__)
| {
"content_hash": "580a04b58fc432df91f7c4db0a419cfe",
"timestamp": "",
"source": "github",
"line_count": 196,
"max_line_length": 75,
"avg_line_length": 31.107142857142858,
"alnum_prop": 0.598983106445793,
"repo_name": "benob/chainer",
"id": "88b09903c8d1a5281e642cae1b01115832bdb1a2",
"size": "6097",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/chainer_tests/links_tests/connection_tests/test_lstm.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "3366"
},
{
"name": "C",
"bytes": "29678"
},
{
"name": "Cuda",
"bytes": "6634"
},
{
"name": "PowerShell",
"bytes": "7195"
},
{
"name": "Python",
"bytes": "1690503"
}
],
"symlink_target": ""
} |
from ..excel_comparsion_test import ExcelComparisonTest
from ...workbook import Workbook
class TestCompareXLSXFiles(ExcelComparisonTest):
"""
Test file created by XlsxWriter against a file created by Excel.
"""
def setUp(self):
self.maxDiff = None
filename = 'chart_gradient01.xlsx'
test_dir = 'xlsxwriter/test/comparison/'
self.got_filename = test_dir + '_test_' + filename
self.exp_filename = test_dir + 'xlsx_files/' + filename
self.ignore_files = []
self.ignore_elements = {}
def test_create_file(self):
"""Test the creation of a simple XlsxWriter file."""
workbook = Workbook(self.got_filename)
worksheet = workbook.add_worksheet()
chart = workbook.add_chart({'type': 'column'})
chart.axis_ids = [61365248, 64275200]
data = [
[1, 2, 3, 4, 5],
[2, 4, 6, 8, 10],
[3, 6, 9, 12, 15],
]
worksheet.write_column('A1', data[0])
worksheet.write_column('B1', data[1])
worksheet.write_column('C1', data[2])
chart.add_series({
'values': '=Sheet1!$A$1:$A$5',
'gradient': {'colors': ['#DDEBCF', '#9CB86E', '#156B13']}
})
chart.add_series({'values': '=Sheet1!$B$1:$B$5'})
chart.add_series({'values': '=Sheet1!$C$1:$C$5'})
worksheet.insert_chart('E9', chart)
workbook.close()
self.assertExcelEqual()
| {
"content_hash": "6da9524e336f7c23bd3ad3ab81caaa77",
"timestamp": "",
"source": "github",
"line_count": 56,
"max_line_length": 69,
"avg_line_length": 26.428571428571427,
"alnum_prop": 0.5574324324324325,
"repo_name": "jkyeung/XlsxWriter",
"id": "51eaf8f5e441829321d0258f6250f9d144235695",
"size": "1653",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "xlsxwriter/test/comparison/test_chart_gradient01.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Batchfile",
"bytes": "5113"
},
{
"name": "CSS",
"bytes": "16544"
},
{
"name": "HTML",
"bytes": "13100"
},
{
"name": "Makefile",
"bytes": "7819"
},
{
"name": "Perl",
"bytes": "3504"
},
{
"name": "Python",
"bytes": "2430294"
},
{
"name": "Shell",
"bytes": "6064"
}
],
"symlink_target": ""
} |
"""!wiki <topic> returns a wiki link for <topic>"""
import json
import re
from urllib import quote
import sys
import requests
from bs4 import BeautifulSoup
def wiki(searchterm):
"""return the top wiki search result for the term"""
searchterm = quote(searchterm)
url = "https://en.wikipedia.org/w/api.php?action=query&list=search&srsearch={0}&format=json"
url = url.format(searchterm)
result = requests.get(url).json()
pages = result["query"]["search"]
# try to reject disambiguation pages
pages = [p for p in pages if not 'may refer to' in p["snippet"]]
if not pages:
return ""
page = quote(pages[0]["title"].encode("utf8"))
link = "http://en.wikipedia.org/wiki/{0}".format(page)
r = requests.get("http://en.wikipedia.org/w/api.php?format=json&action=parse&page={0}".format(page)).json()
soup = BeautifulSoup(r["parse"]["text"]["*"])
p = soup.find('p').get_text()
p = p[:8000]
return u"{0}\n{1}".format(p, link)
def on_message(msg, server):
text = msg.get("text", "")
match = re.findall(r"!wiki (.*)", text)
if not match: return
searchterm = match[0]
return wiki(searchterm)
| {
"content_hash": "c25c39f43848471198540717b67c6fff",
"timestamp": "",
"source": "github",
"line_count": 43,
"max_line_length": 111,
"avg_line_length": 27.41860465116279,
"alnum_prop": 0.636980491942324,
"repo_name": "kepler-/ragebot",
"id": "44e8c3f8d01dcf175c4784deb23ef7db9d0c191c",
"size": "1179",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "plugins/wiki.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "74950"
}
],
"symlink_target": ""
} |
"""Implementation of Fisher-BRAC from pixels."""
import typing
from dm_env import specs as dm_env_specs
import numpy as np
from seed_rl.agents.policy_gradient.modules import popart
from seed_rl.agents.policy_gradient.modules import running_statistics
import tensorflow as tf
import tensorflow_probability as tfp
from tf_agents.specs.tensor_spec import TensorSpec
from representation_batch_rl.batch_rl import critic as criticCL
from representation_batch_rl.batch_rl.encoders import ConvStack
from representation_batch_rl.batch_rl.encoders import ImageEncoder
from representation_batch_rl.batch_rl.encoders import make_impala_cnn_network
from representation_batch_rl.representation_batch_rl import tf_utils
tfd = tfp.distributions
# Lower-bound on possible returns for 200 easy levels of ProcGen based on PPO.
# See https://arxiv.org/abs/1912.01588 appendix.
reward_lowerbound_procgen = {
'bigfish': 1.,
'bossfight': 0.5,
'caveflyer': 3.5,
'chaser': 0.5,
'climber': 2.,
'coinrun': 5.,
'dodgeball': 1.5,
'fruitbot': -1.5,
'heist': 3.5,
'jumper': 3.,
'leaper': 10.,
'maze': 5.,
'miner': 1.5,
'ninja': 3.5,
'plunder': 4.5,
'starpilot': 2.5
}
# Upper-bound on possible returns for 200 easy levels of ProcGen based on PPO.
# See https://arxiv.org/abs/1912.01588 appendix.
reward_upperbound_procgen = {
'bigfish': 40.,
'bossfight': 13.,
'caveflyer': 12.,
'chaser': 13,
'climber': 12.6,
'coinrun': 10.,
'dodgeball': 19.,
'fruitbot': 32.4,
'heist': 10,
'jumper': 10.,
'leaper': 10.,
'maze': 10.,
'miner': 13.,
'ninja': 10.,
'plunder': 30.,
'starpilot': 64.
}
class OURS(object):
"""Class performing F-BRAC + SSL training."""
def __init__(self,
observation_spec,
action_spec,
actor_lr = 3e-4,
critic_lr = 3e-4,
alpha_lr = 3e-4,
discount = 0.99,
tau = 0.005,
target_entropy = 0.0,
f_reg = 1.0,
reward_bonus = 5.0,
num_augmentations = 1,
rep_learn_keywords = 'outer',
env_name = '',
batch_size = 256,
n_quantiles = 5,
temp = 0.1,
num_training_levels = 200,
latent_dim = 256,
n_levels_nce = 5,
popart_norm_beta = 0.1):
"""Creates networks.
Args:
observation_spec: environment observation spec.
action_spec: Action spec.
actor_lr: Actor learning rate.
critic_lr: Critic learning rate.
alpha_lr: Temperature learning rate.
discount: MDP discount.
tau: Soft target update parameter.
target_entropy: Target entropy.
f_reg: Critic regularization weight.
reward_bonus: Bonus added to the rewards.
num_augmentations: Number of DrQ augmentations (crops)
rep_learn_keywords: Representation learning loss to add (see below)
env_name: Env name
batch_size: Batch size
n_quantiles: Number of GVF quantiles
temp: Temperature of NCE softmax
num_training_levels: Number of training MDPs (Procgen=200)
latent_dim: Latent dimensions of auxiliary MLPs
n_levels_nce: Number of MDPs to use contrastive loss on
popart_norm_beta: PopArt normalization constant
For `rep_learn_keywords`, pick from:
stop_grad_FQI: whether to stop_grad TD/FQI critic updates?
linear_Q: use a linear critic?
successor_features: uses ||SF|| as cumulant
gvf_termination: uses +1 if done else 0 as cumulant
gvf_action_count: uses state-cond. action counts as cumulant
nce: uses the multi-class dot-product InfoNCE objective
cce: uses MoCo Categorical CrossEntropy objective
energy: uses SimCLR + pairwise GVF distance (not fully tested)
If no cumulant is specified, the reward will be taken as default one.
"""
del actor_lr, critic_lr, alpha_lr, target_entropy
self.action_spec = action_spec
self.num_augmentations = num_augmentations
self.rep_learn_keywords = rep_learn_keywords.split('__')
self.batch_size = batch_size
self.env_name = env_name
self.stop_grad_fqi = 'stop_grad_FQI' in self.rep_learn_keywords
critic_kwargs = {'hidden_dims': (1024, 1024)}
self.latent_dim = latent_dim
self.n_levels_nce = n_levels_nce
hidden_dims = hidden_dims_per_level = (self.latent_dim, self.latent_dim)
self.num_training_levels = int(num_training_levels)
self.n_quantiles = n_quantiles
self.temp = temp
# Make 2 sets of weights:
# - Critic
# - Critic (target)
# Optionally, make a 3rd set for per-level critics
if observation_spec.shape == (64, 64, 3):
# IMPALA for Procgen
def conv_stack():
return make_impala_cnn_network(
depths=[16, 32, 32], use_batch_norm=False, dropout_rate=0.)
state_dim = 256
else:
# Reduced architecture for DMC
def conv_stack():
return ConvStack(observation_spec.shape)
state_dim = 50
conv_stack_critic = conv_stack()
conv_target_stack_critic = conv_stack()
if observation_spec.shape == (64, 64, 3):
conv_stack_critic.output_size = state_dim
conv_target_stack_critic.output_size = state_dim
critic_kwargs['encoder'] = ImageEncoder(
conv_stack_critic, feature_dim=state_dim, bprop_conv_stack=True)
# Note: the target critic does not share any weights.
critic_kwargs['encoder_target'] = ImageEncoder(
conv_target_stack_critic, feature_dim=state_dim, bprop_conv_stack=True)
conv_stack_critic_per_level = conv_stack()
conv_target_stack_critic_per_level = conv_stack()
if observation_spec.shape == (64, 64, 3):
conv_stack_critic_per_level.output_size = state_dim
conv_target_stack_critic_per_level.output_size = state_dim
self.encoder_per_level = ImageEncoder(
conv_stack_critic_per_level,
feature_dim=state_dim,
bprop_conv_stack=True)
self.encoder_per_level_target = ImageEncoder(
conv_target_stack_critic_per_level,
feature_dim=state_dim,
bprop_conv_stack=True)
criticCL.soft_update(
self.encoder_per_level, self.encoder_per_level_target, tau=1.0)
if self.num_augmentations == 0:
dummy_state = tf.constant(np.zeros([1] + list(observation_spec.shape)))
else: # account for padding of +4 everywhere and then cropping out 68
dummy_state = tf.constant(np.zeros(shape=[1, 68, 68, 3]))
dummy_enc = critic_kwargs['encoder'](dummy_state)
@tf.function
def init_models():
"""This function initializes all auxiliary networks (state and action encoders) with dummy input (Procgen-specific, 68x68x3, 15 actions).
"""
critic_kwargs['encoder'](dummy_state)
critic_kwargs['encoder_target'](dummy_state)
self.encoder_per_level(dummy_state)
self.encoder_per_level_target(dummy_state)
init_models()
action_dim = action_spec.maximum.item() + 1
self.action_dim = action_dim
self.discount = discount
self.tau = tau
self.reg = f_reg
self.reward_bonus = reward_bonus
self.critic = criticCL.Critic(
state_dim,
action_dim,
hidden_dims=hidden_dims,
encoder=critic_kwargs['encoder'],
discrete_actions=True,
linear='linear_Q' in self.rep_learn_keywords)
self.critic_target = criticCL.Critic(
state_dim,
action_dim,
hidden_dims=hidden_dims,
encoder=critic_kwargs['encoder_target'],
discrete_actions=True,
linear='linear_Q' in self.rep_learn_keywords)
self.critic_optimizer = tf.keras.optimizers.Adam(learning_rate=3e-4)
self.task_critic_optimizer = tf.keras.optimizers.Adam(learning_rate=3e-4)
self.br_optimizer = tf.keras.optimizers.Adam(learning_rate=3e-4)
if 'cce' in self.rep_learn_keywords:
self.classifier = tf.keras.Sequential(
[
tf.keras.layers.Dense(self.latent_dim, use_bias=True),
tf.keras.layers.ReLU(),
tf.keras.layers.Dense(self.n_quantiles, use_bias=True)
],
name='classifier')
elif 'nce' in self.rep_learn_keywords:
self.embedding = tf.keras.Sequential(
[
tf.keras.layers.Dense(self.latent_dim, use_bias=True),
tf.keras.layers.ReLU(),
tf.keras.layers.Dense(self.latent_dim, use_bias=True)
],
name='embedding')
# This snipet initializes all auxiliary networks (state and action encoders)
# with dummy input (Procgen-specific, 68x68x3, 15 actions).
dummy_state = tf.zeros((1, 68, 68, 3), dtype=tf.float32)
phi_s = self.critic.encoder(dummy_state)
phi_a = tf.eye(action_dim, dtype=tf.float32)
if 'linear_Q' in self.rep_learn_keywords:
_ = self.critic.critic1.state_encoder(phi_s)
_ = self.critic.critic2.state_encoder(phi_s)
_ = self.critic.critic1.action_encoder(phi_a)
_ = self.critic.critic2.action_encoder(phi_a)
_ = self.critic_target.critic1.state_encoder(phi_s)
_ = self.critic_target.critic2.state_encoder(phi_s)
_ = self.critic_target.critic1.action_encoder(phi_a)
_ = self.critic_target.critic2.action_encoder(phi_a)
if 'cce' in self.rep_learn_keywords:
self.classifier(phi_s)
elif 'nce' in self.rep_learn_keywords:
self.embedding(phi_s)
self.target_critic_to_use = self.critic_target
self.critic_to_use = self.critic
criticCL.soft_update(self.critic, self.critic_target, tau=1.0)
self.cce = tf.keras.losses.SparseCategoricalCrossentropy(
reduction=tf.keras.losses.Reduction.NONE, from_logits=True)
self.bc = None
if 'successor_features' in self.rep_learn_keywords:
self.output_dim_level = self.latent_dim
elif 'gvf_termination' in self.rep_learn_keywords:
self.output_dim_level = 1
elif 'gvf_action_count' in self.rep_learn_keywords:
self.output_dim_level = action_dim
else:
self.output_dim_level = action_dim
self.task_critic_one = criticCL.Critic(
state_dim,
self.output_dim_level * self.num_training_levels,
hidden_dims=hidden_dims_per_level,
encoder=None, # critic_kwargs['encoder'],
discrete_actions=True,
cross_norm=False)
self.task_critic_target_one = criticCL.Critic(
state_dim,
self.output_dim_level * 200,
hidden_dims=hidden_dims_per_level,
encoder=None, # critic_kwargs['encoder'],
discrete_actions=True,
cross_norm=False)
self.task_critic_one(
dummy_enc,
actions=None,
training=False,
return_features=False,
stop_grad_features=False)
self.task_critic_target_one(
dummy_enc,
actions=None,
training=False,
return_features=False,
stop_grad_features=False)
criticCL.soft_update(
self.task_critic_one, self.task_critic_target_one, tau=1.0)
# Normalization constant beta, set to best default value as per PopArt paper
self.reward_normalizer = popart.PopArt(
running_statistics.EMAMeanStd(popart_norm_beta))
self.reward_normalizer.init()
if 'CLIP' in self.rep_learn_keywords or 'clip' in self.rep_learn_keywords:
self.loss_temp = tf.Variable(
tf.constant(0.0, dtype=tf.float32), name='loss_temp', trainable=True)
self.model_dict = {
'critic': self.critic,
'critic_target': self.critic_target,
'critic_optimizer': self.critic_optimizer,
'br_optimizer': self.br_optimizer
}
self.model_dict['encoder_perLevel'] = self.encoder_per_level
self.model_dict['encoder_perLevel_target'] = self.encoder_per_level_target
self.model_dict['task_critic'] = self.task_critic_one
self.model_dict['task_critic_target'] = self.task_critic_target_one
@tf.function
def infonce_by_class(self,
features,
classes,
target_features=None,
temp=1.,
n_batch=None):
"""InfoNCE between features of a given class vs other clases.
Args:
features: n_batch x n_features
classes: n_batch x n_features
target_features: optional target features for dot product
temp: temperature parameter for softmax
n_batch: int, optional dimension param
Returns:
nce_scores
"""
if n_batch is None:
n_batch = self.batch_size
# \sum_ij A_i:A_:j
# Picks elements of A which are the same class as A_i
class_mapping = tf.einsum('ik,jk->ij', classes, classes)
# outer_prod: n_batch x n_batch
if target_features is None:
outer_prod = tf.einsum('ik,jk->ij', features, features)
else:
outer_prod = tf.einsum('ik,jk->ij', features, target_features)
scores = tf.nn.softmax(outer_prod / temp, -1)
# Add all instances with class=i to numerator by summing over axis 1
scores = tf.reduce_mean(class_mapping * scores, -1)
# Apply log after softmax
scores = tf.math.log(scores)
return tf.reduce_mean(scores)
@tf.function
def infonce_by_class_level(self,
features,
classes,
target_features=None,
levels=None,
temp=1.,
n_batch=None):
"""InfoNCE between features of a given class vs other classes.
Args:
features: n_batch x n_features
classes: n_batch x n_features
target_features: optional target features for dot product
levels: n_batch x n_levels, optional level ids
temp: temperature parameter
n_batch: int, optional dimension param
Returns:
nce_scores
"""
assert temp > 0.
if levels is None:
return self.infonce_by_class(features, classes, target_features, temp,
n_batch)
if n_batch is None:
n_batch = self.batch_size
# \sum_ij A_i:A_:j
# Picks elements of A which are the same class as A_i
class_mapping = tf.einsum('ik,jk->ij', classes, classes)
# outer_prod: n_batch x n_batch
if target_features is None:
outer_prod = tf.einsum('ik,jk->ij', features, features)
else:
outer_prod = tf.einsum('ik,jk->ij', features, target_features)
level_mapping = tf.einsum('ik,jk->ij', (1.-levels), levels)
scores = tf.nn.softmax(outer_prod / temp, -1)
# Add all instances with class=i to numerator by summing over axis 1
scores = tf.reduce_mean(level_mapping * class_mapping * scores, -1)
# Apply log after softmax
scores = tf.math.log(scores)
return tf.reduce_mean(scores)
@tf.function
def fit_critic(self, states, actions,
next_states, next_actions, rewards,
discounts):
"""Updates critic parameters.
Args:
states: Batch of states.
actions: Batch of actions.
next_states: Batch of next states.
next_actions: Batch of next actions from training policy.
rewards: Batch of rewards.
discounts: Batch of masks indicating the end of the episodes.
Returns:
Dictionary with information to track.
"""
action_indices = tf.stack(
[tf.range(tf.shape(actions)[0], dtype=tf.int64), actions], axis=-1)
next_action_indices = tf.stack(
[tf.range(tf.shape(next_actions)[0], dtype=tf.int64), next_actions],
axis=-1)
if self.num_augmentations > 1:
target_q = 0.
for i in range(self.num_augmentations):
next_q1_i, next_q2_i = self.critic_target(
next_states[i], actions=None, stop_grad_features=self.stop_grad_fqi)
target_q_i = tf.expand_dims(
rewards, 1) + self.discount * tf.expand_dims(
discounts, 1) * tf.minimum(next_q1_i, next_q2_i)
target_q += target_q_i
target_q /= self.num_augmentations
elif self.num_augmentations == 1:
next_q1, next_q2 = self.critic_target(
next_states[0], actions=None, stop_grad_features=self.stop_grad_fqi)
target_q = tf.expand_dims(
rewards, 1) + self.discount * tf.expand_dims(
discounts, 1) * tf.minimum(next_q1, next_q2)
else:
next_q1, next_q2 = self.target_critic_to_use(
next_states, actions=None, stop_grad_features=self.stop_grad_fqi)
target_q = tf.expand_dims(rewards, 1) + self.discount * tf.expand_dims(
discounts, 1) * tf.minimum(next_q1, next_q2)
target_q = tf.gather_nd(target_q, indices=next_action_indices)
trainable_variables = self.critic.trainable_variables
with tf.GradientTape(watch_accessed_variables=False) as tape:
tape.watch(trainable_variables)
if self.num_augmentations > 1:
critic_loss = 0.
q1 = 0.
q2 = 0.
for i in range(self.num_augmentations):
q1_i, q2_i = self.critic_to_use(
states[i], actions=None, stop_grad_features=self.stop_grad_fqi)
critic_loss_i = (
tf.losses.mean_squared_error(
target_q, tf.gather_nd(q1_i, indices=action_indices)) +
tf.losses.mean_squared_error(
target_q, tf.gather_nd(q2_i, indices=action_indices)))
q1 += q1_i
q2 += q2_i
critic_loss += critic_loss_i
q1 /= self.num_augmentations
q2 /= self.num_augmentations
critic_loss /= self.num_augmentations
elif self.num_augmentations == 1:
q1, q2 = self.critic_to_use(
states[0], actions=None, stop_grad_features=self.stop_grad_fqi)
q = tf.minimum(q1, q2)
critic_loss = (
tf.losses.mean_squared_error(
target_q, tf.gather_nd(q1, indices=action_indices)) +
tf.losses.mean_squared_error(
target_q, tf.gather_nd(q2, indices=action_indices)))
else:
q1, q2 = self.critic_to_use(
states, actions=None, stop_grad_features=self.stop_grad_fqi)
q = tf.minimum(q1, q2)
critic_loss = (
tf.losses.mean_squared_error(
target_q, tf.gather_nd(q1, indices=action_indices)) +
tf.losses.mean_squared_error(
target_q, tf.gather_nd(q2, indices=action_indices)))
# LSE from CQL
cql_logsumexp = tf.reduce_logsumexp(q, 1)
cql_loss = tf.reduce_mean(cql_logsumexp -
tf.gather_nd(q, indices=action_indices))
# Jointly optimize both losses
critic_loss = critic_loss + cql_loss
critic_grads = tape.gradient(critic_loss,
trainable_variables)
self.critic_optimizer.apply_gradients(
zip(critic_grads, trainable_variables))
criticCL.soft_update(
self.critic, self.critic_target, tau=self.tau)
gn = tf.reduce_mean(
[tf.linalg.norm(v) for v in critic_grads if v is not None])
return {
'q1': tf.reduce_mean(q1),
'q2': tf.reduce_mean(q2),
'critic_loss': critic_loss,
'cql_loss': cql_loss,
'critic_grad_norm': gn
}
@tf.function
def fit_embedding(self, states, actions,
next_states, next_actions, rewards,
discounts, level_ids):
"""Fit embedding using contrastive objectives.
Args:
states: batch of states
actions: batch of actions
next_states: batch of next states
next_actions: batch of next actions
rewards: batch of next rewards
discounts: batch of discounts
level_ids: batch of level ids
Returns:
Dictionary with losses
"""
del next_actions, discounts, next_states, rewards
ssl_variables = self.critic.trainable_variables
# Number of MDPs for which to compute quantiles
n_levels = self.n_levels_nce
if 'cce' in self.rep_learn_keywords or 'clip' in self.rep_learn_keywords:
ssl_variables = ssl_variables + self.classifier.trainable_variables
if 'nce' in self.rep_learn_keywords:
ssl_variables = ssl_variables + self.embedding.trainable_variables
# Track whether need to backprop over representation
with tf.GradientTape(watch_accessed_variables=False) as tape:
tape.watch(ssl_variables)
# Compute Q(s,a) as well as phi(s)
q1, q2 = self.critic(
states[0],
actions=None,
return_features=False,
stop_grad_features=self.stop_grad_fqi)
q = tf.minimum(q1, q2)
rep_loss = tf.constant(0., dtype=tf.float32)
n_quantiles = self.n_quantiles
states_enc = [self.critic.encoder(states[0])]
# Use encoder_per_level (NOT CQL encoder)
states_level_enc = [tf.stop_gradient(self.encoder_per_level(states[0]))]
q_level = tf.minimum(
*self.task_critic_one(states_level_enc[0], actions=None))
actions_argmax = tf.argmax(q, 1)
action_indices = tf.stack([
tf.range(tf.shape(actions)[0],
dtype=tf.int32), tf.cast(actions_argmax, dtype=tf.int32)
],
axis=-1)
level_indices = tf.stack([
tf.range(tf.shape(actions)[0],
dtype=tf.int32), tf.cast(level_ids, dtype=tf.int32)
],
axis=-1)
q_level = tf.gather_nd(
tf.reshape(q_level, (-1, 200, self.output_dim_level)),
indices=level_indices)
if ('successor_features' in self.rep_learn_keywords or
'gvf_termination' in self.rep_learn_keywords):
q_level_gathered = tf.norm(q_level, ord=1, axis=1)
else:
q_level_gathered = tf.gather_nd(q_level, indices=action_indices)
if 'cce' in self.rep_learn_keywords:
states_psi = states_enc[0]
states_psi_target = tf.stop_gradient(states_enc[0])
elif 'nce' in self.rep_learn_keywords:
states_psi = self.embedding(states_enc[0])
states_psi_target = tf.stop_gradient(
self.embedding(states_level_enc[0]))
uniques, _, counts = tf.unique_with_counts(level_ids)
uniques = tf.cast(uniques, dtype=tf.int32)
def compute_quantile_bins(level):
idx = tf.math.equal(level_ids, level)
quantiles_q_level = tfp.stats.quantiles(q_level_gathered[idx],
n_quantiles)
quantile_labels = tf.cast(
tf.one_hot(
tf.cast(
tfp.stats.find_bins(q_level_gathered[idx],
quantiles_q_level),
dtype=tf.int32),
depth=n_quantiles), tf.float32)
return quantile_labels
def compute_quantile_features(level):
idx = tf.math.equal(level_ids, level)
return states_psi[idx]
def compute_target_quantile_features(level):
idx = tf.math.equal(level_ids, level)
return states_psi_target[idx]
def rec_compute_quantile_levels(levels, ctr):
if ctr <= 0:
return tf.reshape(tf.one_hot(0, depth=200), (1, -1))
else:
return tf.concat([
tf.one_hot(
level_ids[tf.math.equal(level_ids, levels[0])], depth=200),
rec_compute_quantile_levels(levels[1:], ctr - 1)
], 0)
def rec_compute_quantile_bins(levels, ctr):
if ctr <= 0:
return tf.zeros(shape=(1, n_quantiles))
else:
return tf.concat([
compute_quantile_bins(levels[0]),
rec_compute_quantile_bins(levels[1:], ctr - 1)
], 0)
def rec_compute_quantile_features(levels, ctr):
if ctr <= 0:
return tf.zeros(shape=(1, self.latent_dim))
else:
return tf.concat([
compute_quantile_features(levels[0]),
rec_compute_quantile_features(levels[1:], ctr - 1)
], 0)
def rec_compute_target_quantile_features(levels, ctr):
if ctr <= 0:
return tf.zeros(shape=(1, self.latent_dim))
else:
return tf.concat([
compute_target_quantile_features(levels[0]),
rec_compute_target_quantile_features(levels[1:], ctr - 1)
], 0)
sorted_unique_levels = tf.gather(
uniques, tf.argsort(counts, direction='DESCENDING'))
quantile_bins = rec_compute_quantile_bins(sorted_unique_levels,
n_levels)[:-1]
quantile_features = rec_compute_quantile_features(
sorted_unique_levels, n_levels)[:-1]
quantile_levels = None
if 'nce' in self.rep_learn_keywords:
quantile_target_features = rec_compute_target_quantile_features(
sorted_unique_levels, n_levels)[:-1]
quantile_features = tf.linalg.l2_normalize(quantile_features, 1)
quantile_target_features = tf.linalg.l2_normalize(
quantile_target_features, 1)
rep_loss += -self.infonce_by_class_level(
features=quantile_features,
target_features=quantile_target_features,
classes=quantile_bins,
levels=quantile_levels,
temp=self.temp,
n_batch=tf.shape(quantile_bins)[0])
elif 'cce' in self.rep_learn_keywords:
quantile_features = quantile_features / self.temp
logits = self.classifier(quantile_features)
rep_loss += tf.reduce_mean(
self.cce(tf.argmax(quantile_bins, 1), logits))
elif 'energy' in self.rep_learn_keywords:
energy = tf.exp(
-tf.reduce_sum(tf.abs(tf.expand_dims(q_level, 1) - q_level), -1))
outer_prod = tf.einsum('ik,jk->ij', states_enc[0],
states_level_enc[0])
scores = tf.nn.log_softmax(outer_prod / self.temp, -1)
rep_loss += -tf.reduce_mean(tf.reduce_mean(energy * scores, -1))
embedding_loss = self.reg * (rep_loss)
br_grads = tape.gradient(embedding_loss, ssl_variables)
self.br_optimizer.apply_gradients(zip(br_grads, ssl_variables))
gn = tf.reduce_mean([tf.linalg.norm(v)
for v in br_grads if v is not None])
metrics_dict = {
'embedding_loss': embedding_loss,
'embedding_grad_norm': gn
}
return metrics_dict
@tf.function
def fit_task_critics(self, mb_states, mb_actions,
mb_next_states, mb_next_actions,
mb_rewards, mb_discounts,
level_ids):
"""Updates per-level critic parameters.
Args:
mb_states: Batch of states.
mb_actions: Batch of actions.
mb_next_states: Batch of next states.
mb_next_actions: Batch of next actions from training policy.
mb_rewards: Batch of rewards.
mb_discounts: Batch of masks indicating the end of the episodes.
level_ids: Batch of level ids
Returns:
Dictionary with information to track.
"""
if 'popart' in self.rep_learn_keywords:
# The PopArt normalization normalizes the GVF's cumulant signal so that
# it's not affected by the difference in scales across MDPs.
mb_rewards = self.reward_normalizer.normalize_target(mb_rewards)
trainable_variables = self.encoder_per_level.trainable_variables + self.task_critic_one.trainable_variables
next_action_indices = tf.stack([
tf.range(tf.shape(mb_next_actions)[0],
dtype=tf.int32), level_ids * self.output_dim_level +
tf.cast(mb_next_actions, dtype=tf.int32)
],
axis=-1)
action_indices = tf.stack([
tf.range(tf.shape(mb_actions)[0], dtype=tf.int32),
level_ids * self.output_dim_level + tf.cast(mb_actions, dtype=tf.int32)
],
axis=-1)
level_ids = tf.stack([
tf.range(tf.shape(mb_next_actions)[0],
dtype=tf.int32), tf.cast(level_ids, dtype=tf.int32)
],
axis=-1)
next_states = [self.encoder_per_level_target(mb_next_states[0])]
next_q1, next_q2 = self.task_critic_target_one(
next_states[0], actions=None)
# Learn d-dimensional successor features
if 'successor_features' in self.rep_learn_keywords:
target_q = tf.concat(
[next_states[0]] * 200, 1) + self.discount * tf.expand_dims(
mb_discounts, 1) * tf.minimum(next_q1, next_q2)
# Learn discounted episode termination
elif 'gvf_termination' in self.rep_learn_keywords:
target_q = tf.expand_dims(
mb_discounts, 1) + self.discount * tf.expand_dims(
mb_discounts, 1) * tf.minimum(next_q1, next_q2)
# Learn discounted future action counts
elif 'gvf_action_count' in self.rep_learn_keywords:
target_q = tf.concat(
[tf.one_hot(mb_actions, depth=self.action_dim)] * 200,
1) + self.discount * tf.expand_dims(mb_discounts, 1) * tf.minimum(
next_q1, next_q2)
else:
target_q = tf.expand_dims(
mb_rewards, 1) + self.discount * tf.expand_dims(
mb_discounts, 1) * tf.minimum(next_q1, next_q2)
if ('successor_features' in self.rep_learn_keywords or
'gvf_termination' in self.rep_learn_keywords or
'gvf_action_count' in self.rep_learn_keywords):
target_q = tf.reshape(target_q, (-1, 200, self.output_dim_level))
target_q = tf.gather_nd(target_q, indices=level_ids)
else:
target_q = tf.gather_nd(target_q, indices=next_action_indices)
with tf.GradientTape(watch_accessed_variables=False) as tape:
tape.watch(trainable_variables)
states = [self.encoder_per_level(mb_states[0])]
q1_all, q2_all = self.task_critic_one(states[0], actions=None)
q = tf.minimum(q1_all, q2_all)
if ('successor_features' in self.rep_learn_keywords or
'gvf_termination' in self.rep_learn_keywords or
'gvf_action_count' in self.rep_learn_keywords):
q1_all = tf.reshape(q1_all, (-1, 200, self.output_dim_level))
q2_all = tf.reshape(q2_all, (-1, 200, self.output_dim_level))
critic_loss = (
tf.losses.mean_squared_error(
target_q, tf.gather_nd(q1_all, indices=level_ids)) +
tf.losses.mean_squared_error(
target_q, tf.gather_nd(q2_all, indices=level_ids)))
else:
critic_loss = (
tf.losses.mean_squared_error(
target_q, tf.gather_nd(q1_all, indices=action_indices)) +
tf.losses.mean_squared_error(
target_q, tf.gather_nd(q2_all, indices=action_indices)))
critic_grads = tape.gradient(critic_loss, trainable_variables)
self.task_critic_optimizer.apply_gradients(zip(critic_grads,
trainable_variables))
criticCL.soft_update(
self.encoder_per_level, self.encoder_per_level_target, tau=self.tau)
criticCL.soft_update(
self.task_critic_one, self.task_critic_target_one, tau=self.tau)
gn = tf.reduce_mean(
[tf.linalg.norm(v) for v in critic_grads if v is not None])
return {
'avg_level_critic_loss': tf.reduce_mean(critic_loss),
'avg_q': tf.reduce_mean(q),
'level_critic_grad_norm': gn
}
@tf.function
def update_step(self, replay_buffer_iter, train_target='both'):
"""Performs a single training step for critic and embedding.
Args:
replay_buffer_iter: A tensorflow graph iteratable object.
train_target: string specifying whether update RL and or representation
Returns:
Dictionary with losses to track.
"""
transition = next(replay_buffer_iter)
numpy_dataset = isinstance(replay_buffer_iter, np.ndarray)
# observation: n_batch x n_timesteps x 1 x H*W*3*n_frames x 1
# -> n_batch x H x W x 3*n_frames
if not numpy_dataset:
states = transition.observation[:, 0]
next_states = transition.observation[:, 1]
actions = transition.action[:, 0]
rewards = transition.reward
level_ids = transition.policy_info[:, 0]
if tf.shape(transition.reward)[1] > 2:
rewards = tf.einsum(
'ij,j->i', rewards,
self.discount**tf.range(
0, tf.shape(transition.reward)[1], dtype=tf.float32))
self.n_step_rewards = tf.shape(transition.reward)[1]
else:
rewards = transition.reward[:, 0]
self.n_step_rewards = 1
discounts = transition.discount[:, 0]
if transition.observation.dtype == tf.uint8:
states = tf.cast(states, tf.float32) / 255.
next_states = tf.cast(next_states, tf.float32) / 255.
else:
states, actions, rewards, next_states, discounts = transition
self.reward_normalizer.update_normalization_statistics(rewards)
if self.num_augmentations > 0:
states, next_states = tf_utils.image_aug(
states,
next_states,
img_pad=4,
num_augmentations=self.num_augmentations,
obs_dim=64,
channels=3,
cropped_shape=[self.batch_size, 68, 68, 3])
next_actions_pi = self.act(next_states, data_aug=True)
next_actions_mu = transition.action[:, 1]
next_actions_pi_per_level = next_actions_mu
states_b1 = states
next_states_b1 = next_states
actions_b1 = actions
next_actions_b1 = next_actions_pi
rewards_b1 = rewards
discounts_b1 = discounts
level_ids_b1 = level_ids
states_b2 = states
next_states_b2 = next_states
actions_b2 = actions
next_actions_b2 = next_actions_pi
rewards_b2 = rewards
discounts_b2 = discounts
if train_target == 'both':
critic_dict = self.fit_critic(states_b2, actions_b2, next_states_b2,
next_actions_b2, rewards_b2, discounts_b2)
print('Updating per-task critics')
ssl_dict = {}
critic_distillation_dict = self.fit_task_critics(
states_b1, actions_b1, next_states_b1, next_actions_pi_per_level,
rewards_b1,
discounts_b1, level_ids_b1)
print('Done updating per-task critics')
return {**ssl_dict, **critic_dict, **critic_distillation_dict}
elif train_target == 'encoder':
print('Updating per-task critics')
critic_distillation_dict = self.fit_task_critics(
states_b1, actions_b1, next_states_b1, next_actions_pi_per_level,
rewards_b1,
discounts_b1, level_ids_b1)
print('Done updating per-task critics')
ssl_dict = {}
critic_dict = {}
return {**ssl_dict, **critic_distillation_dict}
elif train_target == 'rl':
critic_distillation_dict = {}
critic_dict = self.fit_critic(states_b2, actions_b2, next_states_b2,
next_actions_b2, rewards_b2, discounts_b2)
ssl_dict = self.fit_embedding(states_b1, actions_b1, next_states_b1,
next_actions_b1, rewards_b1, discounts_b1,
level_ids)
return {**ssl_dict, **critic_dict, **critic_distillation_dict}
@tf.function
def act(self, states, data_aug=False):
"""Act from a batch of states.
Args:
states: batch of states
data_aug: optional flag
Returns:
actions
"""
if data_aug and self.num_augmentations > 0:
states = states[0]
if self.num_augmentations > 0:
# use pad of 2 to bump 64 to 68 with 2 + 64 + 2 on each side
img_pad = 2
paddings = tf.constant(
[[0, 0], [img_pad, img_pad], [img_pad, img_pad], [0, 0]],
dtype=tf.int32)
states = tf.cast(
tf.pad(tf.cast(states * 255., tf.int32), paddings, 'SYMMETRIC'),
tf.float32) / 255.
q1, q2 = self.critic_to_use(states, actions=None)
q = tf.minimum(q1, q2)
actions = tf.argmax(q, -1)
return actions
@tf.function
def act_per_level(self, states, level_ids, data_aug=False):
"""Act from a batch of states, but with per-level critics.
Args:
states: batch of states
level_ids: batch of level ids
data_aug: optional flag
Returns:
actions
"""
if data_aug and self.num_augmentations > 0:
states = states[0]
if self.num_augmentations > 0:
# use pad of 2 to bump 64 to 68 with 2 + 64 + 2 on each side
img_pad = 2
paddings = tf.constant(
[[0, 0], [img_pad, img_pad], [img_pad, img_pad], [0, 0]],
dtype=tf.int32)
states = tf.cast(
tf.pad(tf.cast(states * 255., tf.int32), paddings, 'SYMMETRIC'),
tf.float32) / 255.
features = self.encoder_per_level(states)
# n_batch x 200 x 15
q1, q2 = self.task_critic_one(features, actions=None)
# n_batch x 200 x 15
q = tf.minimum(q1, q2)
# n_batch x 15
level_ids = tf.stack([
tf.range(tf.shape(q)[0], dtype=tf.int32),
tf.cast(level_ids, dtype=tf.int32)
],
axis=-1)
actions = tf.gather_nd(
tf.argmax(tf.reshape(q, (-1, 200, self.action_dim)), -1), level_ids)
return actions
def save(self, path, step, overwrite_latest=True):
"""Saves all submodels into pre-defined directory.
Args:
path: str specifying model save path
step: which iteration to save from
overwrite_latest: depreceated, now handled via tf Checkpoints
Returns:
None
"""
del overwrite_latest
dir_list = tf.io.gfile.glob(path)
if dir_list:
for file in dir_list:
# Delete all files from previous epoch
tf.io.gfile.remove(file)
for model_name, model in self.model_dict.items():
model.save_weights(path + '/%s_%d' % (model_name, step))
print('[Step %d] Saved model to %s' % (step, path))
| {
"content_hash": "fbe63e021ce980c3a58814060c5aa168",
"timestamp": "",
"source": "github",
"line_count": 1024,
"max_line_length": 143,
"avg_line_length": 36.9345703125,
"alnum_prop": 0.6078897966738056,
"repo_name": "google-research/google-research",
"id": "81a35596d3f5caea0a61dc33c2f9ab73151dacc1",
"size": "38429",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "representation_batch_rl/representation_batch_rl/ours.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "9817"
},
{
"name": "C++",
"bytes": "4166670"
},
{
"name": "CMake",
"bytes": "6412"
},
{
"name": "CSS",
"bytes": "27092"
},
{
"name": "Cuda",
"bytes": "1431"
},
{
"name": "Dockerfile",
"bytes": "7145"
},
{
"name": "Gnuplot",
"bytes": "11125"
},
{
"name": "HTML",
"bytes": "77599"
},
{
"name": "ImageJ Macro",
"bytes": "50488"
},
{
"name": "Java",
"bytes": "487585"
},
{
"name": "JavaScript",
"bytes": "896512"
},
{
"name": "Julia",
"bytes": "67986"
},
{
"name": "Jupyter Notebook",
"bytes": "71290299"
},
{
"name": "Lua",
"bytes": "29905"
},
{
"name": "MATLAB",
"bytes": "103813"
},
{
"name": "Makefile",
"bytes": "5636"
},
{
"name": "NASL",
"bytes": "63883"
},
{
"name": "Perl",
"bytes": "8590"
},
{
"name": "Python",
"bytes": "53790200"
},
{
"name": "R",
"bytes": "101058"
},
{
"name": "Roff",
"bytes": "1208"
},
{
"name": "Rust",
"bytes": "2389"
},
{
"name": "Shell",
"bytes": "730444"
},
{
"name": "Smarty",
"bytes": "5966"
},
{
"name": "Starlark",
"bytes": "245038"
}
],
"symlink_target": ""
} |
"""Functions to construct sparse matrices
"""
from __future__ import division, print_function, absolute_import
__docformat__ = "restructuredtext en"
__all__ = ['spdiags', 'eye', 'identity', 'kron', 'kronsum',
'hstack', 'vstack', 'bmat', 'rand', 'random', 'diags', 'block_diag']
import numpy as np
from scipy._lib.six import xrange
from .sputils import upcast, get_index_dtype
from .csr import csr_matrix
from .csc import csc_matrix
from .bsr import bsr_matrix
from .coo import coo_matrix
from .dia import dia_matrix
from .base import issparse
def spdiags(data, diags, m, n, format=None):
"""
Return a sparse matrix from diagonals.
Parameters
----------
data : array_like
matrix diagonals stored row-wise
diags : diagonals to set
- k = 0 the main diagonal
- k > 0 the k-th upper diagonal
- k < 0 the k-th lower diagonal
m, n : int
shape of the result
format : str, optional
Format of the result. By default (format=None) an appropriate sparse
matrix format is returned. This choice is subject to change.
See Also
--------
diags : more convenient form of this function
dia_matrix : the sparse DIAgonal format.
Examples
--------
>>> data = np.array([[1, 2, 3, 4], [1, 2, 3, 4], [1, 2, 3, 4]])
>>> diags = np.array([0, -1, 2])
>>> spdiags(data, diags, 4, 4).toarray()
array([[1, 0, 3, 0],
[1, 2, 0, 4],
[0, 2, 3, 0],
[0, 0, 3, 4]])
"""
return dia_matrix((data, diags), shape=(m,n)).asformat(format)
def diags(diagonals, offsets, shape=None, format=None, dtype=None):
"""
Construct a sparse matrix from diagonals.
Parameters
----------
diagonals : sequence of array_like
Sequence of arrays containing the matrix diagonals,
corresponding to `offsets`.
offsets : sequence of int
Diagonals to set:
- k = 0 the main diagonal
- k > 0 the k-th upper diagonal
- k < 0 the k-th lower diagonal
shape : tuple of int, optional
Shape of the result. If omitted, a square matrix large enough
to contain the diagonals is returned.
format : {"dia", "csr", "csc", "lil", ...}, optional
Matrix format of the result. By default (format=None) an
appropriate sparse matrix format is returned. This choice is
subject to change.
dtype : dtype, optional
Data type of the matrix.
See Also
--------
spdiags : construct matrix from diagonals
Notes
-----
This function differs from `spdiags` in the way it handles
off-diagonals.
The result from `diags` is the sparse equivalent of::
np.diag(diagonals[0], offsets[0])
+ ...
+ np.diag(diagonals[k], offsets[k])
Repeated diagonal offsets are disallowed.
.. versionadded:: 0.11
Examples
--------
>>> diagonals = [[1, 2, 3, 4], [1, 2, 3], [1, 2]]
>>> diags(diagonals, [0, -1, 2]).toarray()
array([[1, 0, 1, 0],
[1, 2, 0, 2],
[0, 2, 3, 0],
[0, 0, 3, 4]])
Broadcasting of scalars is supported (but shape needs to be
specified):
>>> diags([1, -2, 1], [-1, 0, 1], shape=(4, 4)).toarray()
array([[-2., 1., 0., 0.],
[ 1., -2., 1., 0.],
[ 0., 1., -2., 1.],
[ 0., 0., 1., -2.]])
If only one diagonal is wanted (as in `numpy.diag`), the following
works as well:
>>> diags([1, 2, 3], 1).toarray()
array([[ 0., 1., 0., 0.],
[ 0., 0., 2., 0.],
[ 0., 0., 0., 3.],
[ 0., 0., 0., 0.]])
"""
# if offsets is not a sequence, assume that there's only one diagonal
try:
iter(offsets)
except TypeError:
# now check that there's actually only one diagonal
try:
iter(diagonals[0])
except TypeError:
diagonals = [np.atleast_1d(diagonals)]
else:
raise ValueError("Different number of diagonals and offsets.")
else:
diagonals = list(map(np.atleast_1d, diagonals))
offsets = np.atleast_1d(offsets)
# Basic check
if len(diagonals) != len(offsets):
raise ValueError("Different number of diagonals and offsets.")
# Determine shape, if omitted
if shape is None:
m = len(diagonals[0]) + abs(int(offsets[0]))
shape = (m, m)
# Determine data type, if omitted
if dtype is None:
dtype = np.common_type(*diagonals)
# Construct data array
m, n = shape
M = max([min(m + offset, n - offset) + max(0, offset)
for offset in offsets])
M = max(0, M)
data_arr = np.zeros((len(offsets), M), dtype=dtype)
for j, diagonal in enumerate(diagonals):
offset = offsets[j]
k = max(0, offset)
length = min(m + offset, n - offset)
if length <= 0:
raise ValueError("Offset %d (index %d) out of bounds" % (offset, j))
try:
data_arr[j, k:k+length] = diagonal
except ValueError:
if len(diagonal) != length and len(diagonal) != 1:
raise ValueError(
"Diagonal length (index %d: %d at offset %d) does not "
"agree with matrix size (%d, %d)." % (
j, len(diagonal), offset, m, n))
raise
return dia_matrix((data_arr, offsets), shape=(m, n)).asformat(format)
def identity(n, dtype='d', format=None):
"""Identity matrix in sparse format
Returns an identity matrix with shape (n,n) using a given
sparse format and dtype.
Parameters
----------
n : int
Shape of the identity matrix.
dtype : dtype, optional
Data type of the matrix
format : str, optional
Sparse format of the result, e.g. format="csr", etc.
Examples
--------
>>> identity(3).toarray()
array([[ 1., 0., 0.],
[ 0., 1., 0.],
[ 0., 0., 1.]])
>>> identity(3, dtype='int8', format='dia')
<3x3 sparse matrix of type '<type 'numpy.int8'>'
with 3 stored elements (1 diagonals) in DIAgonal format>
"""
return eye(n, n, dtype=dtype, format=format)
def eye(m, n=None, k=0, dtype=float, format=None):
"""Sparse matrix with ones on diagonal
Returns a sparse (m x n) matrix where the k-th diagonal
is all ones and everything else is zeros.
Parameters
----------
m : int
Number of rows in the matrix.
n : int, optional
Number of columns. Default: `m`.
k : int, optional
Diagonal to place ones on. Default: 0 (main diagonal).
dtype : dtype, optional
Data type of the matrix.
format : str, optional
Sparse format of the result, e.g. format="csr", etc.
Examples
--------
>>> from scipy import sparse
>>> sparse.eye(3).toarray()
array([[ 1., 0., 0.],
[ 0., 1., 0.],
[ 0., 0., 1.]])
>>> sparse.eye(3, dtype=np.int8)
<3x3 sparse matrix of type '<type 'numpy.int8'>'
with 3 stored elements (1 diagonals) in DIAgonal format>
"""
if n is None:
n = m
m,n = int(m),int(n)
if m == n and k == 0:
# fast branch for special formats
if format in ['csr', 'csc']:
idx_dtype = get_index_dtype(maxval=n)
indptr = np.arange(n+1, dtype=idx_dtype)
indices = np.arange(n, dtype=idx_dtype)
data = np.ones(n, dtype=dtype)
cls = {'csr': csr_matrix, 'csc': csc_matrix}[format]
return cls((data,indices,indptr),(n,n))
elif format == 'coo':
idx_dtype = get_index_dtype(maxval=n)
row = np.arange(n, dtype=idx_dtype)
col = np.arange(n, dtype=idx_dtype)
data = np.ones(n, dtype=dtype)
return coo_matrix((data,(row,col)),(n,n))
diags = np.ones((1, max(0, min(m + k, n))), dtype=dtype)
return spdiags(diags, k, m, n).asformat(format)
def kron(A, B, format=None):
"""kronecker product of sparse matrices A and B
Parameters
----------
A : sparse or dense matrix
first matrix of the product
B : sparse or dense matrix
second matrix of the product
format : str, optional
format of the result (e.g. "csr")
Returns
-------
kronecker product in a sparse matrix format
Examples
--------
>>> from scipy import sparse
>>> A = sparse.csr_matrix(np.array([[0, 2], [5, 0]]))
>>> B = sparse.csr_matrix(np.array([[1, 2], [3, 4]]))
>>> sparse.kron(A, B).toarray()
array([[ 0, 0, 2, 4],
[ 0, 0, 6, 8],
[ 5, 10, 0, 0],
[15, 20, 0, 0]])
>>> sparse.kron(A, [[1, 2], [3, 4]]).toarray()
array([[ 0, 0, 2, 4],
[ 0, 0, 6, 8],
[ 5, 10, 0, 0],
[15, 20, 0, 0]])
"""
B = coo_matrix(B)
if (format is None or format == "bsr") and 2*B.nnz >= B.shape[0] * B.shape[1]:
# B is fairly dense, use BSR
A = csr_matrix(A,copy=True)
output_shape = (A.shape[0]*B.shape[0], A.shape[1]*B.shape[1])
if A.nnz == 0 or B.nnz == 0:
# kronecker product is the zero matrix
return coo_matrix(output_shape)
B = B.toarray()
data = A.data.repeat(B.size).reshape(-1,B.shape[0],B.shape[1])
data = data * B
return bsr_matrix((data,A.indices,A.indptr), shape=output_shape)
else:
# use COO
A = coo_matrix(A)
output_shape = (A.shape[0]*B.shape[0], A.shape[1]*B.shape[1])
if A.nnz == 0 or B.nnz == 0:
# kronecker product is the zero matrix
return coo_matrix(output_shape)
# expand entries of a into blocks
row = A.row.repeat(B.nnz)
col = A.col.repeat(B.nnz)
data = A.data.repeat(B.nnz)
row *= B.shape[0]
col *= B.shape[1]
# increment block indices
row,col = row.reshape(-1,B.nnz),col.reshape(-1,B.nnz)
row += B.row
col += B.col
row,col = row.reshape(-1),col.reshape(-1)
# compute block entries
data = data.reshape(-1,B.nnz) * B.data
data = data.reshape(-1)
return coo_matrix((data,(row,col)), shape=output_shape).asformat(format)
def kronsum(A, B, format=None):
"""kronecker sum of sparse matrices A and B
Kronecker sum of two sparse matrices is a sum of two Kronecker
products kron(I_n,A) + kron(B,I_m) where A has shape (m,m)
and B has shape (n,n) and I_m and I_n are identity matrices
of shape (m,m) and (n,n) respectively.
Parameters
----------
A
square matrix
B
square matrix
format : str
format of the result (e.g. "csr")
Returns
-------
kronecker sum in a sparse matrix format
Examples
--------
"""
A = coo_matrix(A)
B = coo_matrix(B)
if A.shape[0] != A.shape[1]:
raise ValueError('A is not square')
if B.shape[0] != B.shape[1]:
raise ValueError('B is not square')
dtype = upcast(A.dtype, B.dtype)
L = kron(eye(B.shape[0],dtype=dtype), A, format=format)
R = kron(B, eye(A.shape[0],dtype=dtype), format=format)
return (L+R).asformat(format) # since L + R is not always same format
def _compressed_sparse_stack(blocks, axis):
"""
Stacking fast path for CSR/CSC matrices
(i) vstack for CSR, (ii) hstack for CSC.
"""
other_axis = 1 if axis == 0 else 0
data = np.concatenate([b.data for b in blocks])
indices = np.concatenate([b.indices for b in blocks])
indptr = []
last_indptr = 0
constant_dim = blocks[0].shape[other_axis]
sum_dim = 0
for b in blocks:
if b.shape[other_axis] != constant_dim:
raise ValueError('incompatible dimensions for axis %d' % other_axis)
sum_dim += b.shape[axis]
indptr.append(b.indptr[:-1] + last_indptr)
last_indptr += b.indptr[-1]
indptr.append([last_indptr])
indptr = np.concatenate(indptr)
if axis == 0:
return csr_matrix((data, indices, indptr),
shape=(sum_dim, constant_dim))
else:
return csc_matrix((data, indices, indptr),
shape=(constant_dim, sum_dim))
def hstack(blocks, format=None, dtype=None):
"""
Stack sparse matrices horizontally (column wise)
Parameters
----------
blocks
sequence of sparse matrices with compatible shapes
format : str
sparse format of the result (e.g. "csr")
by default an appropriate sparse matrix format is returned.
This choice is subject to change.
dtype : dtype, optional
The data-type of the output matrix. If not given, the dtype is
determined from that of `blocks`.
See Also
--------
vstack : stack sparse matrices vertically (row wise)
Examples
--------
>>> from scipy.sparse import coo_matrix, hstack
>>> A = coo_matrix([[1, 2], [3, 4]])
>>> B = coo_matrix([[5], [6]])
>>> hstack([A,B]).toarray()
array([[1, 2, 5],
[3, 4, 6]])
"""
return bmat([blocks], format=format, dtype=dtype)
def vstack(blocks, format=None, dtype=None):
"""
Stack sparse matrices vertically (row wise)
Parameters
----------
blocks
sequence of sparse matrices with compatible shapes
format : str, optional
sparse format of the result (e.g. "csr")
by default an appropriate sparse matrix format is returned.
This choice is subject to change.
dtype : dtype, optional
The data-type of the output matrix. If not given, the dtype is
determined from that of `blocks`.
See Also
--------
hstack : stack sparse matrices horizontally (column wise)
Examples
--------
>>> from scipy.sparse import coo_matrix, vstack
>>> A = coo_matrix([[1, 2], [3, 4]])
>>> B = coo_matrix([[5, 6]])
>>> vstack([A, B]).toarray()
array([[1, 2],
[3, 4],
[5, 6]])
"""
return bmat([[b] for b in blocks], format=format, dtype=dtype)
def bmat(blocks, format=None, dtype=None):
"""
Build a sparse matrix from sparse sub-blocks
Parameters
----------
blocks : array_like
Grid of sparse matrices with compatible shapes.
An entry of None implies an all-zero matrix.
format : {'bsr', 'coo', 'csc', 'csr', 'dia', 'dok', 'lil'}, optional
The sparse format of the result (e.g. "csr"). By default an
appropriate sparse matrix format is returned.
This choice is subject to change.
dtype : dtype, optional
The data-type of the output matrix. If not given, the dtype is
determined from that of `blocks`.
Returns
-------
bmat : sparse matrix
See Also
--------
block_diag, diags
Examples
--------
>>> from scipy.sparse import coo_matrix, bmat
>>> A = coo_matrix([[1, 2], [3, 4]])
>>> B = coo_matrix([[5], [6]])
>>> C = coo_matrix([[7]])
>>> bmat([[A, B], [None, C]]).toarray()
array([[1, 2, 5],
[3, 4, 6],
[0, 0, 7]])
>>> bmat([[A, None], [None, C]]).toarray()
array([[1, 2, 0],
[3, 4, 0],
[0, 0, 7]])
"""
blocks = np.asarray(blocks, dtype='object')
if blocks.ndim != 2:
raise ValueError('blocks must be 2-D')
M,N = blocks.shape
# check for fast path cases
if (N == 1 and format in (None, 'csr') and all(isinstance(b, csr_matrix)
for b in blocks.flat)):
A = _compressed_sparse_stack(blocks[:,0], 0)
if dtype is not None:
A = A.astype(dtype)
return A
elif (M == 1 and format in (None, 'csc')
and all(isinstance(b, csc_matrix) for b in blocks.flat)):
A = _compressed_sparse_stack(blocks[0,:], 1)
if dtype is not None:
A = A.astype(dtype)
return A
block_mask = np.zeros(blocks.shape, dtype=np.bool)
brow_lengths = np.zeros(M, dtype=np.int64)
bcol_lengths = np.zeros(N, dtype=np.int64)
# convert everything to COO format
for i in range(M):
for j in range(N):
if blocks[i,j] is not None:
A = coo_matrix(blocks[i,j])
blocks[i,j] = A
block_mask[i,j] = True
if brow_lengths[i] == 0:
brow_lengths[i] = A.shape[0]
else:
if brow_lengths[i] != A.shape[0]:
raise ValueError('blocks[%d,:] has incompatible row dimensions' % i)
if bcol_lengths[j] == 0:
bcol_lengths[j] = A.shape[1]
else:
if bcol_lengths[j] != A.shape[1]:
raise ValueError('blocks[:,%d] has incompatible column dimensions' % j)
# ensure that at least one value in each row and col is not None
if brow_lengths.min() == 0:
raise ValueError('blocks[%d,:] is all None' % brow_lengths.argmin())
if bcol_lengths.min() == 0:
raise ValueError('blocks[:,%d] is all None' % bcol_lengths.argmin())
nnz = sum([block.nnz for block in blocks[block_mask]])
if dtype is None:
dtype = upcast(*tuple([blk.dtype for blk in blocks[block_mask]]))
row_offsets = np.concatenate(([0], np.cumsum(brow_lengths)))
col_offsets = np.concatenate(([0], np.cumsum(bcol_lengths)))
shape = (np.sum(brow_lengths), np.sum(bcol_lengths))
data = np.empty(nnz, dtype=dtype)
idx_dtype = get_index_dtype(maxval=max(shape))
row = np.empty(nnz, dtype=idx_dtype)
col = np.empty(nnz, dtype=idx_dtype)
nnz = 0
for i in range(M):
for j in range(N):
if blocks[i,j] is not None:
B = blocks[i,j]
data[nnz:nnz + B.nnz] = B.data
row[nnz:nnz + B.nnz] = B.row
col[nnz:nnz + B.nnz] = B.col
row[nnz:nnz + B.nnz] += row_offsets[i]
col[nnz:nnz + B.nnz] += col_offsets[j]
nnz += B.nnz
return coo_matrix((data, (row, col)), shape=shape).asformat(format)
def block_diag(mats, format=None, dtype=None):
"""
Build a block diagonal sparse matrix from provided matrices.
Parameters
----------
mats : sequence of matrices
Input matrices.
format : str, optional
The sparse format of the result (e.g. "csr"). If not given, the matrix
is returned in "coo" format.
dtype : dtype specifier, optional
The data-type of the output matrix. If not given, the dtype is
determined from that of `blocks`.
Returns
-------
res : sparse matrix
Notes
-----
.. versionadded:: 0.11.0
See Also
--------
bmat, diags
Examples
--------
>>> A = coo_matrix([[1, 2], [3, 4]])
>>> B = coo_matrix([[5], [6]])
>>> C = coo_matrix([[7]])
>>> block_diag((A, B, C)).toarray()
array([[1, 2, 0, 0],
[3, 4, 0, 0],
[0, 0, 5, 0],
[0, 0, 6, 0],
[0, 0, 0, 7]])
"""
nmat = len(mats)
rows = []
for ia, a in enumerate(mats):
row = [None]*nmat
if issparse(a):
row[ia] = a
else:
row[ia] = coo_matrix(a)
rows.append(row)
return bmat(rows, format=format, dtype=dtype)
def random(m, n, density=0.01, format='coo', dtype=None,
random_state=None, data_rvs=None):
"""Generate a sparse matrix of the given shape and density with randomly
distributed values.
Parameters
----------
m, n : int
shape of the matrix
density : real, optional
density of the generated matrix: density equal to one means a full
matrix, density of 0 means a matrix with no non-zero items.
format : str, optional
sparse matrix format.
dtype : dtype, optional
type of the returned matrix values.
random_state : {numpy.random.RandomState, int}, optional
Random number generator or random seed. If not given, the singleton
numpy.random will be used. This random state will be used
for sampling the sparsity structure, but not necessarily for sampling
the values of the structurally nonzero entries of the matrix.
data_rvs : callable, optional
Samples a requested number of random values.
This function should take a single argument specifying the length
of the ndarray that it will return. The structurally nonzero entries
of the sparse random matrix will be taken from the array sampled
by this function. By default, uniform [0, 1) random values will be
sampled using the same random state as is used for sampling
the sparsity structure.
Examples
--------
>>> from scipy.sparse import construct
>>> from scipy import stats
>>> class CustomRandomState(object):
... def randint(self, k):
... i = np.random.randint(k)
... return i - i % 2
>>> rs = CustomRandomState()
>>> rvs = stats.poisson(25, loc=10).rvs
>>> S = construct.random(3, 4, density=0.25, random_state=rs, data_rvs=rvs)
>>> S.A
array([[ 36., 0., 33., 0.],
[ 0., 0., 0., 0.],
[ 0., 0., 36., 0.]])
Notes
-----
Only float types are supported for now.
"""
if density < 0 or density > 1:
raise ValueError("density expected to be 0 <= density <= 1")
if dtype and (dtype not in [np.float32, np.float64, np.longdouble]):
raise NotImplementedError("type %s not supported" % dtype)
mn = m * n
tp = np.intc
if mn > np.iinfo(tp).max:
tp = np.int64
if mn > np.iinfo(tp).max:
msg = """\
Trying to generate a random sparse matrix such as the product of dimensions is
greater than %d - this is not supported on this machine
"""
raise ValueError(msg % np.iinfo(tp).max)
# Number of non zero values
k = int(density * m * n)
if random_state is None:
random_state = np.random
elif isinstance(random_state, (int, np.integer)):
random_state = np.random.RandomState(random_state)
if data_rvs is None:
data_rvs = random_state.rand
# Use the algorithm from python's random.sample for k < mn/3.
if mn < 3*k:
# We should use this line, but choice is only available in numpy >= 1.7
# ind = random_state.choice(mn, size=k, replace=False)
ind = random_state.permutation(mn)[:k]
else:
ind = np.empty(k, dtype=tp)
selected = set()
for i in xrange(k):
j = random_state.randint(mn)
while j in selected:
j = random_state.randint(mn)
selected.add(j)
ind[i] = j
j = np.floor(ind * 1. / m).astype(tp)
i = (ind - j * m).astype(tp)
vals = data_rvs(k).astype(dtype)
return coo_matrix((vals, (i, j)), shape=(m, n)).asformat(format)
def rand(m, n, density=0.01, format="coo", dtype=None, random_state=None):
"""Generate a sparse matrix of the given shape and density with uniformly
distributed values.
Parameters
----------
m, n : int
shape of the matrix
density : real, optional
density of the generated matrix: density equal to one means a full
matrix, density of 0 means a matrix with no non-zero items.
format : str, optional
sparse matrix format.
dtype : dtype, optional
type of the returned matrix values.
random_state : {numpy.random.RandomState, int}, optional
Random number generator or random seed. If not given, the singleton
numpy.random will be used.
Notes
-----
Only float types are supported for now.
"""
return random(m, n, density, format, dtype, random_state)
| {
"content_hash": "6d9ba269a1d14ddc1778dc75add366e1",
"timestamp": "",
"source": "github",
"line_count": 794,
"max_line_length": 95,
"avg_line_length": 30.197732997481108,
"alnum_prop": 0.5532802268840973,
"repo_name": "aman-iitj/scipy",
"id": "4ab2162508036a63678e901d1e79f64bd22a3e95",
"size": "23977",
"binary": false,
"copies": "9",
"ref": "refs/heads/master",
"path": "scipy/sparse/construct.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "4433773"
},
{
"name": "C++",
"bytes": "3618764"
},
{
"name": "FORTRAN",
"bytes": "5586152"
},
{
"name": "HTML",
"bytes": "124328"
},
{
"name": "Makefile",
"bytes": "4903"
},
{
"name": "Matlab",
"bytes": "4346"
},
{
"name": "Python",
"bytes": "9351653"
},
{
"name": "Shell",
"bytes": "3172"
},
{
"name": "TeX",
"bytes": "52106"
}
],
"symlink_target": ""
} |
import autonetkit
import autonetkit.load.graphml as graphml
import os
import gzip
import json
import unittest
import subprocess
import shutil
def gzip_to_json(filename):
with gzip.open(filename, "r") as json_fh:
return json.load(json_fh)
def json_to_gzip(data, filename):
with gzip.open(filename, "wb") as json_fh:
return json.dump(data, json_fh)
automated = True # whether to open ksdiff, log to file...
if __name__ == "__main__":
automated = False
dirname, filename = os.path.split(os.path.abspath(__file__))
anm = autonetkit.ANM()
input_file = os.path.join(dirname, "small_internet.graphml")
input_graph = graphml.load_graphml(input_file)
import autonetkit.build_network as build_network
anm = build_network.initialise(input_graph)
anm = build_network.apply_design_rules(anm)
import autonetkit.console_script as console_script
render_hostname = "localhost"
nidb = console_script.create_nidb(anm)
import autonetkit.compilers.platform.netkit as pl_netkit
nk_compiler = pl_netkit.NetkitCompiler(nidb, anm, render_hostname)
nk_compiler.compile()
import autonetkit.render
autonetkit.render.render(nidb)
import os
dst_folder = nidb.topology['localhost'].render_dst_folder
# test folder structure
dir_structure = {}
for path, dirs, files in os.walk(dst_folder):
dir_structure[path] = list(files)
# record folder structure
structure_filename = os.path.join(dirname, "dir_structure_expected.tar.gz")
json_to_gzip(dir_structure, structure_filename)
dir_structure_expected = gzip_to_json(structure_filename)
assert dir_structure == dir_structure_expected
routernames = ["as1r1", "as20r3"]
config_files = ["bgpd.conf", "ospfd.conf", "zebra.conf"]
for routername in routernames:
router = nidb.node(routername)
zebra_dir = os.path.join(router.render.base_dst_folder, "etc", "zebra")
for conf_file in config_files:
expected_filename = os.path.join(dirname, "%s_%s" % (routername, conf_file))
with open(expected_filename, "r") as fh:
expected_result = fh.read()
actual_filename = os.path.join(zebra_dir, conf_file)
with open(actual_filename, "r") as fh:
actual_result = fh.read()
if expected_result != actual_result:
if automated:
#TODO: use difflib
print "Expected"
print expected_result
print "Actual"
print actual_result
raise AssertionError("Invalid result")
else:
cmd = ["ksdiff", expected_filename, actual_filename]
child = subprocess.Popen(cmd)
answer = raw_input("Merge (answer yes to merge): ")
if answer == "yes":
print "Replacing expected with output"
shutil.move(actual_filename, expected_filename)
| {
"content_hash": "6ceb40582e4c52d682ca5964eaa0ca12",
"timestamp": "",
"source": "github",
"line_count": 87,
"max_line_length": 84,
"avg_line_length": 32.735632183908045,
"alnum_prop": 0.6667837078651685,
"repo_name": "sysbot/autonetkit",
"id": "8c7f8f0bd8bfde848c4806e040c8b0d6bcde628f",
"size": "2848",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/small_internet/test_small_internet.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
} |
"""
Preprocessing of the Monroe Plan Corpus for copct:
- Converts from lisp data to Python tuples
- Extracts intermediate states for every plan in the corpus
- Reformats intentions into (state, task, parameters) form
"""
from monroe_static import locs, watercos, powercos, poslocs, sleaders, gens, food, pcrews
from monroe_utils import unify, single_unify
def parse_monroe(infilename='monroe5000.txt', outfilename='monroe5000.py'):
"""
Rewrite the Monroe corpus lisp data file as a tuple in a python script.
All symbols are converted to strings.
Inputs:
infilename: filename from which the lisp data is read
outfilename: filename to which the python script is written
"""
infile = open(infilename,"r")
outfile = open(outfilename,"w")
outfile.write("corpus = (\n")
syntax_chars = "() \t\n" # lisp syntax
previous_char = " "
for line in infile:
for char in line:
if (previous_char in syntax_chars) != (char in syntax_chars):
# just changed between syntax and symbol, quote symbol for python
outfile.write("\"")
# separate symbols with commas for python
if char in syntax_chars: outfile.write(",")
# separate sub-lists with commas for python
if previous_char == ")": outfile.write(",")
# write current character and advance
outfile.write(char)
previous_char = char
outfile.write(")")
infile.close()
outfile.close()
def populate_states_from_op(pre_states, op):
"""
Infers additional facts that must have been true in the previous states for the op to be applied successfully.
Returns the states with the additional facts added.
This implementation has a separate case for every operator in the Monroe domain.
Inputs:
op: a grounded operator of the form (name, arg1, arg2, ...)
pre_states: the states leading up to the application of the operator.
pre_states[i] is the i^{th} state, of the form (objs, facts).
objs is a list of possible parameter values, facts is a list of relations over those objects.
Outputs:
states[i]: the states with additional facts added.
if op is a primitive action, the last element is a new state after op was applied.
"""
objs = pre_states[-1][0]
# facts true before and after operator is applied (may be altered)
pre_facts = set(pre_states[-1][1])
task = op[0]
"""
(:operator (!navegate-vehicle ?person ?veh ?loc)
((person ?person) (vehicle ?veh) (atloc ?veh ?vehloc)
(atloc ?person ?vehloc) (can-drive ?person ?veh)
(not (wrecked-car ?veh)))
((atloc ?veh ?vehloc) (atloc ?person ?vehloc))
((atloc ?veh ?loc) (atloc ?person ?loc)))
"""
if task == '!NAVEGATE-VEHICLE':
person, veh, loc = op[1:]
for s in range(len(pre_states)):
pre_states[s] = (objs, tuple(set(pre_states[s][1]) | set((('PERSON', person), ('VEHICLE', veh)))))
post_facts = pre_facts | set((('ATLOC', veh, loc), ('ATLOC', person, loc)))
vehloc, = single_unify(pre_facts, ('ATLOC', veh, None), ('ATLOC', person, None))
if vehloc is not None:
pre_facts |= set((('ATLOC', veh, vehloc), ('ATLOC', person, vehloc)))
post_facts -= set((('ATLOC', veh, vehloc), ('ATLOC', person, vehloc)))
pre_states[-1] = (objs, tuple(pre_facts))
post_state = (objs, tuple(post_facts))
return pre_states + [post_state]
"""
(:operator (!climb-in ?obj ?veh)
((atloc ?obj ?objloc) (atloc ?veh ?objloc) (fit-in ?obj ?veh))
((atloc ?obj ?objloc))
((atloc ?obj ?veh)))
"""
if task == '!CLIMB-IN':
obj, veh = op[1:]
post_facts = pre_facts | set((('ATLOC', obj, veh),))
objloc, = single_unify(pre_facts, ('ATLOC', obj, None), ('ATLOC', veh, None))
if objloc is not None:
pre_facts.add(('ATLOC', obj, objloc))
post_facts.discard(('ATLOC', obj, objloc))
pre_states[-1] = (objs, tuple(pre_facts))
post_state = (objs, tuple(post_facts))
return pre_states + [post_state]
"""
(:operator (!climb-out ?obj ?veh)
((atloc ?obj ?veh) (atloc ?veh ?vehloc))
((atloc ?obj ?veh))
((atloc ?obj ?vehloc)))
"""
if task == '!CLIMB-OUT':
obj, veh = op[1:]
pre_facts.add(('ATLOC', obj, veh))
post_facts = pre_facts - set((('ATLOC', obj, veh),))
vehloc, = single_unify(pre_facts, ('ATLOC', veh, None))
if vehloc is not None:
post_facts.add(('ATLOC', obj, vehloc))
pre_states[-1] = (objs, tuple(pre_facts))
post_state = (objs, tuple(post_facts))
return pre_states + [post_state]
"""
(:operator (!load ?person ?obj ?veh)
((atloc ?obj ?objloc)
(atloc ?veh ?objloc)
(atloc ?person ?objloc)
(fit-in ?obj ?veh))
((atloc ?obj ?objloc))
((atloc ?obj ?veh)))
"""
if task == '!LOAD':
person, obj, veh = op[1:]
for s in range(len(pre_states)):
pre_states[s] = (objs, tuple(set(pre_states[s][1]) | set((('FIT-IN', obj, veh),))))
post_facts = set(pre_facts) | set((('ATLOC', obj, veh),))
objloc, = single_unify(pre_facts, *[('ATLOC', param, None) for param in op[1:]])
if objloc is not None:
pre_facts |= set(tuple(('ATLOC', param, objloc) for param in op[1:]))
post_facts.discard(('ATLOC', obj, objloc))
pre_states[-1] = (objs, tuple(pre_facts))
post_state = (objs, tuple(post_facts))
return pre_states + [post_state]
"""
(:operator (!unload ?person ?obj ?veh)
((atloc ?obj ?veh) (atloc ?veh ?vehloc) (atloc ?person ?vehloc))
((atloc ?obj ?veh))
((atloc ?obj ?vehloc)))
"""
if task == '!UNLOAD':
person, obj, veh = op[1:]
pre_facts |= set((('ATLOC', obj, veh),))
post_facts = set(pre_facts) - set((('ATLOC', obj, veh),))
vehloc, = single_unify(pre_facts, *[('ATLOC', param, None) for param in [veh, person]])
if vehloc is not None:
pre_facts |= set(tuple(('ATLOC', param, vehloc) for param in [veh, person]))
post_facts.add(('ATLOC', obj, vehloc))
pre_states[-1] = (objs, tuple(pre_facts))
post_state = (objs, tuple(post_facts))
return pre_states + [post_state]
"""
(:operator (!treat ?emt ?person)
((atloc ?person ?ploc) (atloc ?emt ?ploc))
()
())
"""
if task == '!TREAT':
emt, person = op[1:]
ploc, = single_unify(pre_facts, *[('ATLOC', param, None) for param in [emt, person]])
if ploc is not None:
pre_facts |= set(tuple(('ATLOC', param, ploc) for param in [emt, person]))
post_facts = set(pre_facts)
pre_states[-1] = (objs, tuple(pre_facts))
post_state = (objs, tuple(post_facts))
return pre_states + [post_state]
"""
(:operator (!treat-in-hospital ?person ?hospital)
((atloc ?person ?hospital))
()
())
"""
if task == 'TREAT-IN-HOSPITAL':
pre_facts |= set((('ATLOC', op[1], op[2]),))
post_facts = set(pre_facts)
pre_states[-1] = (objs, tuple(pre_facts))
post_state = (objs, tuple(post_facts))
return pre_states + [post_state]
"""
;;set-up-shelter sets up a shelter at a certain location
(:method (set-up-shelter ?loc)
normal
((shelter-leader ?leader)
(not (assigned-to-shelter ?leader ?other-shelter))
(food ?food))
((get-electricity ?loc) (get-to ?leader ?loc) (get-to ?food ?loc)))
"""
if task == 'SET-UP-SHELTER': return pre_states # could do better with tree?
"""
;;fix-water-main
(:method (fix-water-main ?from ?to)
normal
()
((shut-off-water ?from ?to)
(repair-pipe ?from ?to)
(turn-on-water ?from ?to)))
"""
if task == 'FIX-WATER-MAIN': return pre_states # no information
"""
;; clear-road-hazard - cleans up a hazardous spill
(:method (clear-road-hazard ?from ?to)
normal
()
((block-road ?from ?to)
(clean-up-hazard ?from ?to)
(unblock-road ?from ?to)))
"""
if task == 'CLEAR-ROAD-HAZARD': return pre_states # no information
"""
;; clear-road-wreck - gets a wreck out of the road
(:method (clear-road-wreck ?from ?to)
normal
()
((set-up-cones ?from ?to)
(clear-wreck ?from ?to)
(take-down-cones ?from ?to)))
"""
if task == 'CLEAR-ROAD-WRECK': return pre_states # no information
"""
;; clear-road-tree
(:method (clear-road-tree ?from ?to) ;; clears a tree that's in the road
normal
((tree-blocking-road ?from ?to ?tree))
((set-up-cones ?from ?to)
(clear-tree ?tree)
(take-down-cones ?from ?to)))
"""
if task == 'CLEAR-ROAD-TREE': return pre_states # no information not already in subs
"""
;; plow-road
(:method (plow-road ?from ?to)
plow
((road-snowy ?from ?to)
(snowplow ?plow)
(atloc ?plow ?plowloc)
(plowdriver ?driver)
)
((get-to ?driver ?plowloc)
(!navegate-snowplow ?driver ?plow ?from) ;; must use nav-snowplow
;; since regular cars can't drive if snowy
(!engage-plow ?driver ?plow)
(!navegate-snowplow ?driver ?plow ?to)
(!disengage-plow ?driver ?plow)))
"""
if task == 'PLOW-ROAD': return pre_states # road-snowy worth it?
"""
;;quell-riot
(:method (quell-riot ?loc)
with-police
((in-town ?loc ?town)
(police-unit ?p1) (police-unit ?p2) (not (equal ?p1 ?p2)))
((declare-curfew ?town) (get-to ?p1 ?loc) (get-to ?p2 ?loc)
(!set-up-barricades ?p1) (!set-up-barricades ?p2)))
"""
if task == 'QUELL-RIOT': return pre_states #
"""
;;provide-temp-heat
(:method (provide-temp-heat ?person)
to-shelter
((person ?person) (shelter ?shelter))
((get-to ?person ?shelter)))
(:method (provide-temp-heat ?person)
local-electricity
((person ?person) (atloc ?person ?ploc))
((generate-temp-electricity ?ploc) (!turn-on-heat ?ploc)))
"""
if task == 'PROVIDE-TEMP-HEAT': return pre_states #
"""
;;fix-power-line
(:method (fix-power-line ?lineloc)
normal
((power-crew ?crew) (power-van ?van))
((get-to ?crew ?lineloc) (get-to ?van ?lineloc)
(repair-line ?crew ?lineloc)))
"""
if task == 'FIX-POWER-LINE': return pre_states #
"""
;;provide-medical-attention
(:method (provide-medical-attention ?person)
in-hospital
((hospital ?hosp) (has-condition ?person ?cond)
(not (hospital-doesnt-treat ?hosp ?cond)))
((get-to ?person ?hosp) (!treat-in-hospital ?person ?hosp)))
(:method (provide-medical-attention ?person)
simple-on-site
((has-condition ?person ?cond) (not (serious-condition ?cond)))
((emt-treat ?person)))
"""
if task == 'PROVIDE-MEDICAL-ATTENTION': return pre_states
"""
;;;;;;;;;;;;;;;;;;; subgoals ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; clean-up-hazard
(:method (clean-up-hazard ?from ?to)
very-hazardous ;; just call the feds
((hazard-seriousness ?from ?to very-hazardous))
((!call fema))
normal ;; we can take care of it
((hazard-team ?ht))
((get-to ?ht ?from) (!clean-hazard ?ht ?from ?to)))
"""
if task == 'CLEAN-UP-HAZARD':
# kludge: should only add if child is call fema (needs tree not just op)
fromloc, toloc = op[1:]
pre_states[-1] = (objs, tuple(set(pre_states[-1][1]) | set((('HAZARD-SERIOUSNESS', fromloc, toloc, 'VERY-HAZARDOUS'),))))
return pre_states
"""
;; block-road - blocks off a road
(:method (block-road ?from ?to)
normal
((police-unit ?police))
(:unordered (set-up-cones ?from ?to)
(get-to ?police ?from)))
"""
if task == 'BLOCK-ROAD': return pre_states #
"""
;; unblock-road - unblocks a road
(:method (unblock-road ?from ?to)
normal
()
((take-down-cones ?from ?to)))
"""
if task == 'UNBLOCK-ROAD': return pre_states #
"""
;; get-electricity provides electricity to a site (if not already there)
(:method (get-electricity ?loc)
already-has-electricity ;; do nothing
((not (no-electricity ?loc)))
()
no-electricity
()
((generate-temp-electricity ?loc))
)
"""
if task == 'GET-ELECTRICITY': return pre_states #
"""
;; repair-pipe
(:method (repair-pipe ?from ?to) ;; repairs a pipe at location
normal
((water-crew ?crew))
((get-to ?crew ?from)
(set-up-cones ?from ?to)
(open-hole ?from ?to)
(!replace-pipe ?crew ?from ?to)
(close-hole ?from ?to)
(take-down-cones ?from ?to)))
"""
if task == 'REPAIR-PIPE': return pre_states
"""
;; open-hole
(:method (open-hole ?from ?to) ;; opens a hole in the street
normal
((backhoe ?backhoe))
((get-to ?backhoe ?from)
(!dig ?backhoe ?from)))
"""
if task == 'OPEN-HOLE': return pre_states # want toloc but no way to get it
"""
;; close-hole
(:method (close-hole ?from ?to) ;; opens a hole in the street
normal
((backhoe ?backhoe))
((get-to ?backhoe ?from)
(!fill-in ?backhoe ?from)))
"""
if task == 'CLOSE-HOLE': return pre_states # want toloc but no way to get it
"""
;; set-up-cones
(:method (set-up-cones ?from ?to) ;; sets up orange cones at road
normal
((work-crew ?crew))
((get-to ?crew ?from) (!place-cones ?crew)))
"""
if task == 'SET-UP-CONES': return pre_states # want toloc but no way to get it
"""
;; take-down-cones
(:method (take-down-cones ?from ?to) ;; takes down cones
normal
((work-crew ?crew))
((get-to ?crew ?from) (!pickup-cones ?crew)))
"""
if task == 'TAKE-DOWN-CONES': return pre_states # want toloc but no way to get it
"""
;; clear-wreck
(:method (clear-wreck ?from ?to) ;; gets rid of a wreck in any loc
normal
((wrecked-vehicle ?from ?to ?veh) (garbage-dump ?dump))
((tow-to ?veh ?dump)))
"""
if task == 'CLEAR-WRECK':
# kludge - can't get ?veh, use None as placeholder (it's never used by causes function)
fromloc, toloc = op[1:]
pre_states[-1] = (objs, tuple(pre_facts | set((('WRECKED-VEHICLE', fromloc, toloc, None),))))
return pre_states
"""
;; tow-to - tows a vehicle somewhere
(:method (tow-to ?veh ?to)
normal
((tow-truck ?ttruck) (vehicle ?veh) (atloc ?veh ?vehloc))
((get-to ?ttruck ?vehloc)
(!hook-to-tow-truck ?ttruck ?veh)
(get-to ?ttruck ?to)
(!unhook-from-tow-truck ?ttruck ?veh)))
"""
if task == 'TOW-TO': return pre_states #
"""
;; clear-tree
(:method (clear-tree ?tree) ;; this gets rid of a tree in any loc
normal
((tree-crew ?tcrew) (tree ?tree)
(atloc ?tree ?treeloc))
((get-to ?tcrew ?treeloc) (!cut-tree ?tcrew ?tree)
(remove-blockage ?tree)))
"""
if task == 'CLEAR-TREE': return pre_states #
"""
;; remove-blockage
(:method (remove-blockage ?stuff)
move-to-side-of-street
((work-crew ?crew) (atloc ?stuff ?loc))
((get-to ?crew ?loc)
(!carry-blockage-out-of-way ?crew ?stuff)))
(:method (remove-blockage ?stuff)
carry-away
((garbage-dump ?dump))
((get-to ?stuff ?dump)))
"""
if task == 'REMOVE-BLOCKAGE': return pre_states #
"""
;; declare-curfew
(:method (declare-curfew ?town)
normal
()
(:unordered (!call EBS) (!call police-chief)))
"""
if task == 'REMOVE-BLOCKAGE': return pre_states
"""
;; generate-temp-electricity
(:method (generate-temp-electricity ?loc)
with-generator
((generator ?gen))
((make-full-fuel ?gen) (get-to ?gen ?loc) (!hook-up ?gen ?loc)
(!turn-on ?gen)))
"""
if task == 'GENERATE-TEMP-ELECTRICITY': return pre_states #
"""
;; make-full-fuel - makes sure arg1 is full of fuel
(:method (make-full-fuel ?gen)
with-gas-can
((gas-can ?gc) (atloc ?gen ?genloc) (service-station ?ss))
((get-to ?gc ?ss) (add-fuel ?ss ?gc) (get-to ?gc ?genloc)
(!pour-into ?gc ?gen)))
(:method (make-full-fuel ?gen)
at-service-station
((service-station ?ss))
((get-to ?gen ?ss) (add-fuel ?ss ?gen)))
"""
if task == 'MAKE-FULL-FUEL': return pre_states #
"""
;; add-fuel (at service-station)
(:method (add-fuel ?ss ?obj)
normal
()
(:unordered (!pay ?ss) (!pump-gas-into ?ss ?obj)))
"""
if task == 'ADD-FUEL': return pre_states
"""
;; repair-line
(:method (repair-line ?crew ?lineloc)
with-tree
((tree ?tree) (atloc ?tree ?lineloc)
(atloc ?crew ?lineloc))
((shut-off-power ?crew ?lineloc)
(:unordered (clear-tree ?tree)
(!remove-wire ?crew ?lineloc))
(!string-wire ?crew ?lineloc) (turn-on-power ?crew ?lineloc))
without-tree
((atloc ?crew ?lineloc))
((shut-off-power ?crew ?lineloc)
(!remove-wire ?crew ?lineloc)
(!string-wire ?crew ?lineloc) (turn-on-power ?crew ?lineloc)))
"""
if task == 'REPAIR-LINE': return pre_states #
"""
;; shut-off-power
(:method (shut-off-power ?crew ?loc)
normal
((in-town ?loc ?town) (powerco-of ?town ?powerco))
(!call ?powerco))
"""
if task == 'SHUT-OFF-POWER': return pre_states # narrow loc to town through fixed state in causes
"""
;; turn-on-power
(:method (turn-on-power ?crew ?loc)
normal
((in-town ?loc ?town) (powerco-of ?town ?powerco))
(!call ?powerco))
"""
if task == 'TURN-ON-POWER': return pre_states # narrow loc to town through fixed state in causes
"""
;; shut-off-water
(:method (shut-off-water ?from ?to)
normal
((in-town ?from ?town) (waterco-of ?town ?waterco))
((!call ?waterco)))
"""
if task == 'SHUT-OFF-WATER': return pre_states # narrow loc to town through fixed state in causes
"""
;; turn-on-water
(:method (turn-on-water ?from ?to)
normal
((in-town ?from ?town) (waterco-of ?town ?waterco))
((!call ?waterco)))
"""
if task == 'TURN-ON-WATER': return pre_states # narrow loc to town through fixed state in causes
"""
;; emt-treat
(:method (emt-treat ?person)
emt
((emt-crew ?emt) (atloc ?person ?personloc))
((get-to ?emt ?personloc) (!treat ?emt ?person)))
"""
if task == 'EMT-TREAT': return pre_states
"""
;; stabilize
(:method (stabilize ?person)
emt
()
((emt-treat ?person)))
"""
if task == 'STABILIZE': return pre_states
"""
;; get-to
(:method (get-to ?obj ?place)
already-there
((atloc ?obj ?place))
())
(:method (get-to ?person ?place)
person-drives-themself
((not (atloc ?person ?place))
(person ?person) (vehicle ?veh) (atloc ?veh ?vehloc)
(atloc ?person ?vehloc))
((drive-to ?person ?veh ?place)))
(:method (get-to ?veh ?place)
vehicle-gets-driven
((not (atloc ?veh ?place))
(person ?person)
(vehicle ?veh) (atloc ?veh ?vehloc)
(atloc ?person ?vehloc)
)
((drive-to ?person ?veh ?place)))
(:method (get-to ?obj ?place)
as-cargo
((not (atloc ?obj ?place))
(vehicle ?veh)
(atloc ?obj ?objloc) (fit-in ?obj ?veh)
(not (non-ambulatory ?obj)))
((get-to ?veh ?objloc) (get-in ?obj ?veh) (get-to ?veh ?place)
(get-out ?obj ?veh))
with-ambulance ;; same as above, just with ambulance
((not (atloc ?obj ?place))
(atloc ?obj ?objloc) (ambulance ?veh) (fit-in ?obj ?veh)
)
((get-to ?veh ?objloc) (stabilize ?obj) (get-in ?obj ?veh)
(get-to ?veh ?place) (get-out ?obj ?veh))
)
"""
if task == 'GET-TO': return pre_states # all info in subs except for nop case
"""
(:method (drive-to ?person ?veh ?loc)
normal
((person ?person) (vehicle ?veh) (atloc ?veh ?vehloc)
(atloc ?person ?vehloc) (can-drive ?person ?veh))
((!navegate-vehicle ?person ?veh ?loc)))
"""
if task == 'DRIVE-TO': return pre_states # all info in subs
"""
(:method (get-in ?obj ?veh)
ambulatory-person
((atloc ?obj ?objloc) (atloc ?veh ?objloc)
(person ?obj) (not (non-ambulatory ?obj)))
(!climb-in ?obj ?veh)
load-in
((atloc ?obj ?objloc) (atloc ?veh ?objloc)
(person ?person) (can-lift ?person ?obj))
((get-to ?person ?objloc) (!load ?person ?obj ?veh)))
"""
if task == 'GET-IN': return pre_states # all info in subs
"""
(:method (get-out ?obj ?veh)
ambulatory-person
((person ?obj) (not (non-ambulatory ?obj)))
(!climb-out ?obj ?veh)
unload
((atloc ?veh ?vehloc) (person ?person) (can-lift ?person ?obj))
((get-to ?person ?vehloc) (!unload ?person ?obj ?veh)))
"""
if task == 'GET-OUT': return pre_states # all info in subs
# remaining operators (all primitive, empty preconds/adds/deletes)
return pre_states + pre_states[-1:]
def extract_leaves(tree):
"""
Extract the leaves of a plan decomposition tree in the Monroe corpus.
Inputs:
tree: the plan tree, of the form (node, subtree1, subtree2, ...)
node is a grounded operator of the form (name, arg1, arg2, ...)
Outputs:
leaves[i]: The i^{th} leaf, also a grounded operator of the form (name, arg1, arg2, ...)
"""
if type(tree[0])==str: # base case, "tree" is a node
return (tree,)
else: # recursive case, tree is a tree, recurse on subtrees
return reduce(lambda x,y: x+y, map(extract_leaves, tree[1:]))
def extract_objects(tree):
"""
Extract all "objects," the arguments occurring in any operator in a plan decomposition tree.
This omits static objects always present in every plan of the corpus (locations, etc)
Inputs:
tree: the plan tree, as in extract_leaves
Outputs:
objs: the set of all distinct objects occurring in the tree
"""
objs = set()
if type(tree[0])==str: # base case, "tree" is a node
objs |= set(tree[1:])
else: # recursive case
objs |= set(tree[0][1:])
for sub in tree[1:]:
objs |= extract_objects(sub)
objs -= set(locs) | set(watercos) | set(powercos) # remove static objects
return objs
def extract_children(tree):
"""
Extract the immediate child nodes of a tree root
Inputs:
tree: a plan decomposition tree
Outputs:
children: the immediate child nodes of root (with their own subtrees omitted)
"""
return tuple(child if type(child[0])==str else child[0] for child in tree[1:])
# def search_tree(tree):
# # used to rule out empty-case of get-to
# if type(tree[0]) != str:
# if tree[0][0]=='GET-TO' and len(tree)==1: return True
# return any([search_tree(sub) for sub in tree[1:]])
# return False
def populate_tree_states(leading_states, next_tree):
"""
Uses populate_states_from_op on every operator in a plan tree.
Implementation is recursive; should be called at the top level with:
leading_states = [(objs, ())]
next_tree = the full plan tree
Inputs:
leading_states: a list of states leading up to next_tree
next_tree: the next plan tree of operators being applied
Outputs:
states: leading states with new facts added, and new states resulting from the next_tree
"""
if type(next_tree[0])==str: # base case, "tree" is primitive operator
states = populate_states_from_op(leading_states, next_tree) # = pre_states + [post_state]
else: # recursive case, process each op in next_tree, starting with root
states = populate_states_from_op(leading_states, next_tree[0]) # = pre_states
for sub in next_tree[1:]:
states = populate_tree_states(states, sub) # = pre_states + post_states
return states
def preprocess_plan(plan_tree):
"""
Preprocess a single plan tree from the corpus, populating intermediate states.
The returned sequences contain elements of the form (state, task_name, (arg1, arg2, ...))
Inputs:
plan_tree: a plan tree from the monroe corpus, in python tuple format (as written by parse_monroe).
Outputs:
u: the top-level ground-truth (singleton) sequence
v: the immediate child sequence of u (ground-truth for modified Monroe experiments)
w: the bottom-level observed actions
"""
# pull out data
root = plan_tree[0]
children = extract_children(plan_tree)
objs = extract_objects(plan_tree)
actions = extract_leaves(plan_tree)
states = populate_tree_states([(tuple(objs), ())], plan_tree)
# recover the action indices covered by each child, so that the correct intermediate states are associated
indices = [0]
for subtree in plan_tree[1:]:
indices.append(indices[-1] + len(extract_leaves(subtree)))
# convert to (state, task, args) format
u = ((states[0], root[0], root[1:]),)
v = tuple((states[indices[k]], children[k][0], children[k][1:]) for k in range(len(children)))
w = tuple((states[i], actions[i][0], actions[i][1:]) for i in range(len(actions)))
return u, v, w
if __name__ == "__main__":
# Parse Monroe lisp to python
print('Parsing lisp...')
parse_monroe()
# preprocess each plan tree
print('Preprocessing plan trees...')
from monroe5000 import corpus
corpus = tuple(preprocess_plan(plan_tree) for plan_tree in corpus)
# Write preprocessed corpus to file
print('Writing to file...')
corpus_file = open('monroe_corpus.py','w')
corpus_file.write('corpus = [')
for example in corpus:
corpus_file.write('%s,\n'%str(example))
corpus_file.write(']\n')
corpus_file.close()
| {
"content_hash": "4c656938b4743fcc3ede437d6078b5c1",
"timestamp": "",
"source": "github",
"line_count": 710,
"max_line_length": 129,
"avg_line_length": 36.36338028169014,
"alnum_prop": 0.5833526996668991,
"repo_name": "jhomble/electron435",
"id": "4116b0725714601af8ce591ed5866558fd5129e1",
"size": "25818",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "copct-master/monroe_corpus/monroe_preprocessing.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C++",
"bytes": "774"
},
{
"name": "CSS",
"bytes": "465554"
},
{
"name": "HTML",
"bytes": "284682"
},
{
"name": "JavaScript",
"bytes": "815770"
},
{
"name": "Matlab",
"bytes": "5019"
},
{
"name": "Python",
"bytes": "29692657"
},
{
"name": "Shell",
"bytes": "565"
}
],
"symlink_target": ""
} |
from django.core.management.base import BaseCommand, CommandError
from kstore.models import BasicConfiguration
class Command(BaseCommand):
'''
Establece la configuracion inicial, que despues debera
ser cambiada para adaptar a la empresa.
'''
def handle(self, *args, **options):
try:
config = BasicConfiguration.objects.all().first()
self.stdout.write("Ya existe configuracion basica como "+config.company_name)
except Exception as e:
self.stdout.write("Estableciendo configuracion inicial.")
bc = BasicConfiguration(company_name="Company Name")
bc.save()
| {
"content_hash": "c7ddb4ea47a3f43783be56ca833cdcdf",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 89,
"avg_line_length": 36.77777777777778,
"alnum_prop": 0.6676737160120846,
"repo_name": "KeoH/django-keoh-kstore",
"id": "dca6aa01394177d619e9a72475b9a80f042c93eb",
"size": "678",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "kstore/management/commands/basic_config.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "HTML",
"bytes": "14773"
},
{
"name": "Python",
"bytes": "24012"
}
],
"symlink_target": ""
} |
from metaworld.envs.mujoco.sawyer_xyz.v1.sawyer_assembly_peg import SawyerNutAssemblyEnv
from metaworld.envs.mujoco.sawyer_xyz.v1.sawyer_basketball import SawyerBasketballEnv
from metaworld.envs.mujoco.sawyer_xyz.v1.sawyer_bin_picking import SawyerBinPickingEnv
from metaworld.envs.mujoco.sawyer_xyz.v1.sawyer_box_close import SawyerBoxCloseEnv
from metaworld.envs.mujoco.sawyer_xyz.v1.sawyer_button_press import SawyerButtonPressEnv
from metaworld.envs.mujoco.sawyer_xyz.v1.sawyer_button_press_topdown import SawyerButtonPressTopdownEnv
from metaworld.envs.mujoco.sawyer_xyz.v1.sawyer_button_press_topdown_wall import SawyerButtonPressTopdownWallEnv
from metaworld.envs.mujoco.sawyer_xyz.v1.sawyer_button_press_wall import SawyerButtonPressWallEnv
from metaworld.envs.mujoco.sawyer_xyz.v1.sawyer_coffee_button import SawyerCoffeeButtonEnv
from metaworld.envs.mujoco.sawyer_xyz.v1.sawyer_coffee_pull import SawyerCoffeePullEnv
from metaworld.envs.mujoco.sawyer_xyz.v1.sawyer_coffee_push import SawyerCoffeePushEnv
from metaworld.envs.mujoco.sawyer_xyz.v1.sawyer_dial_turn import SawyerDialTurnEnv
from metaworld.envs.mujoco.sawyer_xyz.v1.sawyer_disassemble_peg import SawyerNutDisassembleEnv
from metaworld.envs.mujoco.sawyer_xyz.v1.sawyer_door import SawyerDoorEnv
from metaworld.envs.mujoco.sawyer_xyz.v1.sawyer_door_close import SawyerDoorCloseEnv
from metaworld.envs.mujoco.sawyer_xyz.v1.sawyer_door_lock import SawyerDoorLockEnv
from metaworld.envs.mujoco.sawyer_xyz.v1.sawyer_door_unlock import SawyerDoorUnlockEnv
from metaworld.envs.mujoco.sawyer_xyz.v1.sawyer_drawer_close import SawyerDrawerCloseEnv
from metaworld.envs.mujoco.sawyer_xyz.v1.sawyer_drawer_open import SawyerDrawerOpenEnv
from metaworld.envs.mujoco.sawyer_xyz.v1.sawyer_faucet_close import SawyerFaucetCloseEnv
from metaworld.envs.mujoco.sawyer_xyz.v1.sawyer_faucet_open import SawyerFaucetOpenEnv
from metaworld.envs.mujoco.sawyer_xyz.v1.sawyer_hammer import SawyerHammerEnv
from metaworld.envs.mujoco.sawyer_xyz.v1.sawyer_hand_insert import SawyerHandInsertEnv
from metaworld.envs.mujoco.sawyer_xyz.v1.sawyer_handle_press import SawyerHandlePressEnv
from metaworld.envs.mujoco.sawyer_xyz.v1.sawyer_handle_press_side import SawyerHandlePressSideEnv
from metaworld.envs.mujoco.sawyer_xyz.v1.sawyer_handle_pull import SawyerHandlePullEnv
from metaworld.envs.mujoco.sawyer_xyz.v1.sawyer_handle_pull_side import SawyerHandlePullSideEnv
from metaworld.envs.mujoco.sawyer_xyz.v1.sawyer_lever_pull import SawyerLeverPullEnv
from metaworld.envs.mujoco.sawyer_xyz.v1.sawyer_peg_insertion_side import SawyerPegInsertionSideEnv
from metaworld.envs.mujoco.sawyer_xyz.v1.sawyer_peg_unplug_side import SawyerPegUnplugSideEnv
from metaworld.envs.mujoco.sawyer_xyz.v1.sawyer_pick_out_of_hole import SawyerPickOutOfHoleEnv
from metaworld.envs.mujoco.sawyer_xyz.v1.sawyer_plate_slide import SawyerPlateSlideEnv
from metaworld.envs.mujoco.sawyer_xyz.v1.sawyer_plate_slide_back import SawyerPlateSlideBackEnv
from metaworld.envs.mujoco.sawyer_xyz.v1.sawyer_plate_slide_back_side import SawyerPlateSlideBackSideEnv
from metaworld.envs.mujoco.sawyer_xyz.v1.sawyer_plate_slide_side import SawyerPlateSlideSideEnv
from metaworld.envs.mujoco.sawyer_xyz.v1.sawyer_push_back import SawyerPushBackEnv
from metaworld.envs.mujoco.sawyer_xyz.v1.sawyer_reach_push_pick_place import SawyerReachPushPickPlaceEnv
from metaworld.envs.mujoco.sawyer_xyz.v1.sawyer_reach_push_pick_place_wall import SawyerReachPushPickPlaceWallEnv
from metaworld.envs.mujoco.sawyer_xyz.v1.sawyer_shelf_place import SawyerShelfPlaceEnv
from metaworld.envs.mujoco.sawyer_xyz.v1.sawyer_soccer import SawyerSoccerEnv
from metaworld.envs.mujoco.sawyer_xyz.v1.sawyer_stick_pull import SawyerStickPullEnv
from metaworld.envs.mujoco.sawyer_xyz.v1.sawyer_stick_push import SawyerStickPushEnv
from metaworld.envs.mujoco.sawyer_xyz.v1.sawyer_sweep import SawyerSweepEnv
from metaworld.envs.mujoco.sawyer_xyz.v1.sawyer_sweep_into_goal import SawyerSweepIntoGoalEnv
from metaworld.envs.mujoco.sawyer_xyz.v1.sawyer_window_close import SawyerWindowCloseEnv
from metaworld.envs.mujoco.sawyer_xyz.v1.sawyer_window_open import SawyerWindowOpenEnv
__all__ = [
'SawyerNutAssemblyEnv',
'SawyerBasketballEnv',
'SawyerBinPickingEnv',
'SawyerBoxCloseEnv',
'SawyerButtonPressEnv',
'SawyerButtonPressTopdownEnv',
'SawyerButtonPressTopdownWallEnv',
'SawyerButtonPressWallEnv',
'SawyerCoffeeButtonEnv',
'SawyerCoffeePullEnv',
'SawyerCoffeePushEnv',
'SawyerDialTurnEnv',
'SawyerNutDisassembleEnv',
'SawyerDoorEnv',
'SawyerDoorCloseEnv',
'SawyerDoorLockEnv',
'SawyerDoorUnlockEnv',
'SawyerDrawerCloseEnv',
'SawyerDrawerOpenEnv',
'SawyerFaucetCloseEnv',
'SawyerFaucetOpenEnv',
'SawyerHammerEnv',
'SawyerHandInsertEnv',
'SawyerHandlePressEnv',
'SawyerHandlePressSideEnv',
'SawyerHandlePullEnv',
'SawyerHandlePullSideEnv',
'SawyerLeverPullEnv',
'SawyerPegInsertionSideEnv',
'SawyerPegUnplugSideEnv',
'SawyerPickOutOfHoleEnv',
'SawyerPlateSlideEnv',
'SawyerPlateSlideBackEnv',
'SawyerPlateSlideBackSideEnv',
'SawyerPlateSlideSideEnv',
'SawyerPushBackEnv',
'SawyerReachPushPickPlaceEnv',
'SawyerReachPushPickPlaceWallEnv',
'SawyerShelfPlaceEnv',
'SawyerSoccerEnv',
'SawyerStickPullEnv',
'SawyerStickPushEnv',
'SawyerSweepEnv',
'SawyerSweepIntoGoalEnv',
'SawyerWindowCloseEnv',
'SawyerWindowOpenEnv',
]
| {
"content_hash": "98f4bcf2c4c639720e8c1dab1d7f39e2",
"timestamp": "",
"source": "github",
"line_count": 96,
"max_line_length": 113,
"avg_line_length": 57.010416666666664,
"alnum_prop": 0.8267860405627626,
"repo_name": "rlworkgroup/metaworld",
"id": "d39810f0d802b384701d741e74f9e65f7034301a",
"size": "5473",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "metaworld/envs/mujoco/sawyer_xyz/v1/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Dockerfile",
"bytes": "379"
},
{
"name": "Jupyter Notebook",
"bytes": "340927"
},
{
"name": "Makefile",
"bytes": "1853"
},
{
"name": "Python",
"bytes": "820606"
},
{
"name": "Shell",
"bytes": "810"
}
],
"symlink_target": ""
} |
def extractBittercoffeetranslationsCom(item):
'''
Parser for 'bittercoffeetranslations.com'
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return None
tagmap = [
('PRC', 'PRC', 'translated'),
('Loiterous', 'Loiterous', 'oel'),
]
for tagname, name, tl_type in tagmap:
if tagname in item['tags']:
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False
| {
"content_hash": "b1a32121b39ef5c39a60f64d71183c1b",
"timestamp": "",
"source": "github",
"line_count": 21,
"max_line_length": 104,
"avg_line_length": 27.047619047619047,
"alnum_prop": 0.6426056338028169,
"repo_name": "fake-name/ReadableWebProxy",
"id": "b611587d0a0ae8051710b93fca74ceb4fa374e5b",
"size": "569",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "WebMirror/management/rss_parser_funcs/feed_parse_extractBittercoffeetranslationsCom.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "105811"
},
{
"name": "Dockerfile",
"bytes": "1178"
},
{
"name": "HTML",
"bytes": "119737"
},
{
"name": "JavaScript",
"bytes": "3006524"
},
{
"name": "Jupyter Notebook",
"bytes": "148075"
},
{
"name": "Mako",
"bytes": "1454"
},
{
"name": "Python",
"bytes": "5264346"
},
{
"name": "Shell",
"bytes": "1059"
}
],
"symlink_target": ""
} |
"""This example gets all LocationCriterion.
The LoadFromStorage method is pulling credentials and properties from a
"googleads.yaml" file. By default, it looks for this file in your home
directory. For more information, see the "Caching authentication information"
section of our README.
Tags: LocationCriterionService.get
"""
__author__ = ('[email protected] (Kevin Winter)'
'Joseph DiLallo')
from googleads import adwords
def GetLocationString(location):
return '%s (%s)' % (location['locationName'], location['displayType']
if 'displayType' in location else None)
def main(client):
# Initialize appropriate service.
location_criterion_service = client.GetService(
'LocationCriterionService', version='v201502')
location_names = ['Paris', 'Quebec', 'Spain', 'Deutchland']
# Create the selector.
selector = {
'fields': ['Id', 'LocationName', 'DisplayType', 'CanonicalName',
'ParentLocations', 'Reach', 'TargetingStatus'],
'predicates': [{
'field': 'LocationName',
'operator': 'IN',
'values': location_names
}, {
'field': 'Locale',
'operator': 'EQUALS',
'values': ['en']
}]
}
# Make the get request.
location_criteria = location_criterion_service.get(selector)
# Display the resulting location criteria.
for location_criterion in location_criteria:
parent_string = ''
if ('parentLocations' in location_criterion['location']
and location_criterion['location']['parentLocations']):
parent_string = ', '.join([GetLocationString(parent)for parent in
location_criterion['location']
['parentLocations']])
print ('The search term \'%s\' returned the location \'%s\' of type \'%s\''
' with parent locations \'%s\', reach \'%s\' and id \'%s\' (%s)'
% (location_criterion['searchTerm'],
location_criterion['location']['locationName'],
location_criterion['location']['displayType'], parent_string,
location_criterion['reach']
if 'reach' in location_criterion else None,
location_criterion['location']['id'],
location_criterion['location']['targetingStatus']))
if __name__ == '__main__':
# Initialize client object.
adwords_client = adwords.AdWordsClient.LoadFromStorage()
main(adwords_client)
| {
"content_hash": "8417eb7b74acb31ed5c22f20e0c60f62",
"timestamp": "",
"source": "github",
"line_count": 70,
"max_line_length": 79,
"avg_line_length": 35.285714285714285,
"alnum_prop": 0.6222672064777328,
"repo_name": "coxmediagroup/googleads-python-lib",
"id": "761ddb1e2d1f611f6476f15fbde77bace29741e7",
"size": "3088",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "examples/adwords/v201502/targeting/lookup_location.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "492"
},
{
"name": "HTML",
"bytes": "8336"
},
{
"name": "JavaScript",
"bytes": "504"
},
{
"name": "Python",
"bytes": "2535137"
}
],
"symlink_target": ""
} |
import os
from jenkins_jobs.modules import publishers
from tests import base
class TestCaseModulePublishers(base.BaseScenariosTestCase):
fixtures_path = os.path.join(os.path.dirname(__file__), 'fixtures')
scenarios = base.get_scenarios(fixtures_path)
klass = publishers.Publishers
| {
"content_hash": "128602365f963bfa03492ebecc2efa19",
"timestamp": "",
"source": "github",
"line_count": 10,
"max_line_length": 71,
"avg_line_length": 29.6,
"alnum_prop": 0.7702702702702703,
"repo_name": "onshape/jenkins-job-builder",
"id": "e44a4dbaaa6bea04e58ce28248f9d13f6f682776",
"size": "985",
"binary": false,
"copies": "11",
"ref": "refs/heads/master",
"path": "tests/publishers/test_publishers.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "246"
},
{
"name": "C++",
"bytes": "197"
},
{
"name": "PHP",
"bytes": "1186"
},
{
"name": "Python",
"bytes": "1091900"
},
{
"name": "Shell",
"bytes": "1344"
},
{
"name": "SourcePawn",
"bytes": "16"
}
],
"symlink_target": ""
} |
import json
from twisted.trial import unittest
from twisted.internet import defer
from twisted.application import service
from ..servers.relay import RelayServer
from ..twisted.transcribe import SymmetricWormhole, UsageError
from ..twisted.util import allocate_ports
from .. import __version__
#from twisted.python import log
#import sys
#log.startLogging(sys.stdout)
class Basic(unittest.TestCase):
def setUp(self):
self.sp = service.MultiService()
self.sp.startService()
d = allocate_ports()
def _got_ports(ports):
relayport, transitport = ports
s = RelayServer("tcp:%d:interface=127.0.0.1" % relayport,
"tcp:%s:interface=127.0.0.1" % transitport,
__version__)
s.setServiceParent(self.sp)
self.relayurl = "http://127.0.0.1:%d/wormhole-relay/" % relayport
self.transit = "tcp:127.0.0.1:%d" % transitport
d.addCallback(_got_ports)
return d
def tearDown(self):
return self.sp.stopService()
def test_basic(self):
appid = "appid"
w1 = SymmetricWormhole(appid, self.relayurl)
w2 = SymmetricWormhole(appid, self.relayurl)
d = w1.get_code()
def _got_code(code):
w2.set_code(code)
d1 = w1.get_data("data1")
d2 = w2.get_data("data2")
return defer.DeferredList([d1,d2], fireOnOneErrback=False)
d.addCallback(_got_code)
def _done(dl):
((success1, dataX), (success2, dataY)) = dl
r1,r2 = dl
self.assertTrue(success1)
self.assertTrue(success2)
self.assertEqual(dataX, "data2")
self.assertEqual(dataY, "data1")
d.addCallback(_done)
return d
def test_fixed_code(self):
appid = "appid"
w1 = SymmetricWormhole(appid, self.relayurl)
w2 = SymmetricWormhole(appid, self.relayurl)
w1.set_code("123-purple-elephant")
w2.set_code("123-purple-elephant")
d1 = w1.get_data("data1")
d2 = w2.get_data("data2")
d = defer.DeferredList([d1,d2], fireOnOneErrback=False)
def _done(dl):
((success1, dataX), (success2, dataY)) = dl
r1,r2 = dl
self.assertTrue(success1)
self.assertTrue(success2)
self.assertEqual(dataX, "data2")
self.assertEqual(dataY, "data1")
d.addCallback(_done)
return d
def test_errors(self):
appid = "appid"
w1 = SymmetricWormhole(appid, self.relayurl)
self.assertRaises(UsageError, w1.get_verifier)
self.assertRaises(UsageError, w1.get_data, "data")
w1.set_code("123-purple-elephant")
self.assertRaises(UsageError, w1.set_code, "123-nope")
self.assertRaises(UsageError, w1.get_code)
w2 = SymmetricWormhole(appid, self.relayurl)
d = w2.get_code()
self.assertRaises(UsageError, w2.get_code)
return d
def test_serialize(self):
appid = "appid"
w1 = SymmetricWormhole(appid, self.relayurl)
self.assertRaises(UsageError, w1.serialize) # too early
w2 = SymmetricWormhole(appid, self.relayurl)
d = w1.get_code()
def _got_code(code):
self.assertRaises(UsageError, w2.serialize) # too early
w2.set_code(code)
w2.serialize() # ok
s = w1.serialize()
self.assertEqual(type(s), type(""))
unpacked = json.loads(s) # this is supposed to be JSON
self.assertEqual(type(unpacked), dict)
new_w1 = SymmetricWormhole.from_serialized(s)
d1 = new_w1.get_data("data1")
d2 = w2.get_data("data2")
return defer.DeferredList([d1,d2], fireOnOneErrback=False)
d.addCallback(_got_code)
def _done(dl):
((success1, dataX), (success2, dataY)) = dl
r1,r2 = dl
self.assertTrue(success1)
self.assertTrue(success2)
self.assertEqual(dataX, "data2")
self.assertEqual(dataY, "data1")
self.assertRaises(UsageError, w2.serialize) # too late
d.addCallback(_done)
return d
| {
"content_hash": "4222c170a9d8053f1dabb7f700b01ca6",
"timestamp": "",
"source": "github",
"line_count": 113,
"max_line_length": 77,
"avg_line_length": 37.64601769911504,
"alnum_prop": 0.5846262341325811,
"repo_name": "negativo/magic-wormhole",
"id": "fe016f4f948515d28e0f596b60020040e07d57e8",
"size": "4254",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "src/wormhole/test/test_twisted.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "PLSQL",
"bytes": "602"
},
{
"name": "Python",
"bytes": "194834"
}
],
"symlink_target": ""
} |
from django.contrib import admin
# Register your models here.
from models import *
from dbtemplates.admin import TemplateAdmin, TemplateAdminForm
from dbtemplates.models import Template
from django_ace import AceWidget
class ProxyTemplateAdminForm(TemplateAdminForm):
def __init__(self, *args, **kwargs):
super(ProxyTemplateAdminForm, self).__init__(*args, **kwargs)
self.fields['content'].widget = AceWidget(mode='html', theme='monokai', width="1000px", height="500px")
class MyTemplateAdmin(TemplateAdmin):
form = ProxyTemplateAdminForm
admin.site.unregister(Template)
admin.site.register(Template, MyTemplateAdmin)
class ProductsURLsAdmin(admin.ModelAdmin):
list_display = ('url',)
search_fields = ('url',)
admin.site.register(ProductsURLs, ProductsURLsAdmin)
| {
"content_hash": "f6149249be3a1b161d70dd23eb8cf041",
"timestamp": "",
"source": "github",
"line_count": 24,
"max_line_length": 111,
"avg_line_length": 33.375,
"alnum_prop": 0.7578027465667915,
"repo_name": "arbin/aloeveraofforever",
"id": "6d6813b55665f6ad1a02d2fd006bdb6c02e8daa8",
"size": "801",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "aloeveraofforever/aloeveraofforever/flp/admin.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "521782"
},
{
"name": "JavaScript",
"bytes": "8101552"
},
{
"name": "Makefile",
"bytes": "5612"
},
{
"name": "PHP",
"bytes": "2199"
},
{
"name": "Python",
"bytes": "134593"
},
{
"name": "Ruby",
"bytes": "990"
},
{
"name": "Shell",
"bytes": "5120"
},
{
"name": "XSLT",
"bytes": "5122"
}
],
"symlink_target": ""
} |
import logging
import os
from wsgiref import simple_server
from oslo.config import cfg
from oslo_log import log
from paste import deploy
import pecan
from cloudkitty.api import config as api_config
from cloudkitty.api import hooks
from cloudkitty.api import middleware
from cloudkitty import rpc
from cloudkitty import storage
LOG = log.getLogger(__name__)
auth_opts = [
cfg.StrOpt('api_paste_config',
default="api_paste.ini",
help="Configuration file for WSGI definition of API."
),
cfg.StrOpt('auth_strategy',
choices=['noauth', 'keystone'],
default='keystone',
help=("The strategy to use for auth. Supports noauth and "
"keystone")),
]
api_opts = [
cfg.StrOpt('host_ip',
default="0.0.0.0",
help="Host serving the API."
),
cfg.IntOpt('port',
default=8888,
help="Host port serving the API."
),
]
CONF = cfg.CONF
CONF.register_opts(auth_opts)
CONF.register_opts(api_opts, group='api')
def get_pecan_config():
# Set up the pecan configuration
filename = api_config.__file__.replace('.pyc', '.py')
return pecan.configuration.conf_from_file(filename)
def setup_app(pecan_config=None, extra_hooks=None):
app_conf = get_pecan_config()
client = rpc.get_client()
storage_backend = storage.get_storage()
app_hooks = [
hooks.RPCHook(client),
hooks.StorageHook(storage_backend),
]
if CONF.auth_strategy == 'keystone':
app_hooks.append(hooks.ContextHook())
app = pecan.make_app(
app_conf.app.root,
static_root=app_conf.app.static_root,
template_path=app_conf.app.template_path,
debug=CONF.debug,
force_canonical=getattr(app_conf.app, 'force_canonical', True),
hooks=app_hooks,
guess_content_type_from_ext=False
)
if CONF.auth_strategy == 'keystone':
return middleware.AuthTokenMiddleware(app, dict(CONF),
app_conf.app.acl_public_routes)
else:
return app
def setup_wsgi():
cfg_file = cfg.CONF.api_paste_config
if not os.path.exists(cfg_file):
raise Exception('api_paste_config file not found')
return deploy.loadapp("config:" + cfg_file)
def build_server():
# Create the WSGI server and start it
host = CONF.api.host_ip
port = CONF.api.port
LOG.info('Starting server in PID %s' % os.getpid())
LOG.info("Configuration:")
cfg.CONF.log_opt_values(LOG, logging.INFO)
if host == '0.0.0.0':
LOG.info('serving on 0.0.0.0:%(sport)s, view at http://127.0.0.1:%'
'(vport)s' % {'sport': port, 'vport': port})
else:
LOG.info("serving on http://%(host)s:%(port)s" %
{'host': host, 'port': port})
server_cls = simple_server.WSGIServer
handler_cls = simple_server.WSGIRequestHandler
app = setup_app()
srv = simple_server.make_server(
host,
port,
app,
server_cls,
handler_cls)
return srv
| {
"content_hash": "e6584d45307fb0b11fdefcd73a8f734e",
"timestamp": "",
"source": "github",
"line_count": 120,
"max_line_length": 77,
"avg_line_length": 26.333333333333332,
"alnum_prop": 0.5965189873417721,
"repo_name": "FNST-OpenStack/cloudkitty",
"id": "76407f764cc91bc39bf2a97ec547af1137d6a709",
"size": "3822",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "cloudkitty/api/app.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Mako",
"bytes": "1730"
},
{
"name": "Python",
"bytes": "535850"
},
{
"name": "Shell",
"bytes": "11169"
}
],
"symlink_target": ""
} |
import os
from pymatgen.io.vasp.inputs import Incar
from mpinterfaces import QUEUE_SYSTEM
from mpinterfaces.utils import get_magmom_string
from mpinterfaces.mat2d.stability import INCAR_DICT
__author__ = "Michael Ashton"
__copyright__ = "Copyright 2017, Henniggroup"
__maintainer__ = "Michael Ashton"
__email__ = "[email protected]"
__status__ = "Production"
__date__ = "March 3, 2017"
def run_major_axis_anisotropy_calculations(submit=True):
"""
Perform static calculations with the magnetic axis along
100, 010, and 001.
Args:
submit (bool): Whether or not to submit the job.
"""
if not os.path.isdir('MAE'):
os.mkdir('MAE')
os.chdir('MAE')
for d in ['100', '010', '001']:
if not os.path.isdir(d):
os.mkdir(d)
os.chdir(d)
os.system('cp ../CONTCAR POSCAR')
os.system('cp ../POTCAR .')
axis = [float(char) for char in d]
# Small positive number, see vasp manual
if d in ['001', '010']:
axis[0] = 0.00000001
else:
axis[1] = 0.00000001
saxis = ' '.join(axis)
incar_dict = INCAR_DICT
incar_dict.update({'EDIFF': 1e-8,
'GGA_COMPAT': False,
'ISMEAR': -5,
'LORBIT': 11,
'LSORBIT': True,
'LWAVE': False,
'LCHARG': False,
'LAECHG': False,
'MAGMOM': get_magmom_string(
Structure.from_file('POSCAR')
),
'SAXIS': saxis})
Incar.from_dict(incar_dict).write_file('INCAR')
if QUEUE_SYSTEM == 'pbs':
utl.write_pbs_runjob(directory, 1, 16, '800mb', '6:00:00',
VASP_TWOD_BIN)
submission_command = 'qsub runjob'
elif QUEUE_SYSTEM == 'slurm':
utl.write_slurm_runjob(directory, 16, '800mb', '6:00:00',
VASP_TWOD_BIN)
submission_command = 'sbatch runjob'
if submit:
os.system(submission_command)
def run_xy_anisotropy_calculations(resolution=10, submit=True):
"""
Perform static calculations with the magnetic axis along
several directions between 100 and 010.
Args:
resolution (int): step size between axes. The total
number of calculations will be 90 / `resolution`.
submit (bool): Whether or not to submit the job.
"""
pass
def run_xz_anisotropy_calculations(resolution=10, submit=True):
"""
Perform static calculations with the magnetic axis along
several directions between 100 and 001.
Args:
resolution (int): step size between axes. The total
number of calculations will be 90 / `resolution`.
submit (bool): Whether or not to submit the job.
"""
pass
| {
"content_hash": "3cab3039cdfc7d19b82cdc2c7b0acc05",
"timestamp": "",
"source": "github",
"line_count": 95,
"max_line_length": 70,
"avg_line_length": 31.126315789473683,
"alnum_prop": 0.5461616503212715,
"repo_name": "henniggroup/MPInterfaces",
"id": "3d8b624f07f59397798ce41b9fa750c2c339e832",
"size": "2957",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "mpinterfaces/mat2d/magnetism/startup.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "573231"
},
{
"name": "Shell",
"bytes": "1931"
}
],
"symlink_target": ""
} |
__all__ = ["interface","vimui"]
| {
"content_hash": "5672310375519f4e4bfcef6e119365a9",
"timestamp": "",
"source": "github",
"line_count": 1,
"max_line_length": 31,
"avg_line_length": 32,
"alnum_prop": 0.53125,
"repo_name": "AndBicScadMedia/vdebug",
"id": "2f00741bb557a181400ae814c1e1f231fac1cc5f",
"size": "32",
"binary": false,
"copies": "9",
"ref": "refs/heads/master",
"path": "plugin/python/vdebug/ui/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Cucumber",
"bytes": "3993"
},
{
"name": "Python",
"bytes": "136120"
},
{
"name": "Ruby",
"bytes": "18137"
},
{
"name": "Shell",
"bytes": "2715"
},
{
"name": "VimL",
"bytes": "11733"
}
],
"symlink_target": ""
} |
from bs4 import BeautifulSoup
import requests
def get_team_info(soup):
container = soup.find_all('div', 'games-univ-mod3')[0]
data = {
'name': list(container.find_all('h3', 'team-name')[0].children)[0],
'abbr': list(container.find_all('h3', 'team-name')
[0].children)[1].string[1:-1],
'league': list(container.children)[1].li.a.strong.string,
'owner': container.find_all('li', 'per-info')[0].string
}
record = soup.find_all('div', 'games-univ-mod4')[0]
data['position'] = list(record.find_all('em')[0])[0][1:-1]
data['record'] = list(record.find_all('h4')[0].children)[1].strip()
opponent = soup.find_all('div', 'games-univ-mod5')[0]
data['opponent'] = (list(opponent.find_all('li', 'games-firstlist')[0]
.children)[2].attrs['title'])
return data
def scrape(league_id, team_id, year):
kwargs = {
'league': league_id,
'team': team_id,
'year': year
}
url = ('http://games.espn.go.com/ffl/clubhouse?leagueId={league}'
'&teamId={team}&seasonId={year}'.format(**kwargs))
content = requests.get(url).content
soup = BeautifulSoup(content)
return get_team_info(soup)
| {
"content_hash": "2f3a898d35b7ab545368e0a9a8d0f5a0",
"timestamp": "",
"source": "github",
"line_count": 37,
"max_line_length": 75,
"avg_line_length": 33.62162162162162,
"alnum_prop": 0.5787781350482315,
"repo_name": "pcsforeducation/fantasy",
"id": "c1fa2e8f547ece3030cb36e75c4a3094fa5c8dbc",
"size": "1244",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "fantasy/team.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "9233"
}
],
"symlink_target": ""
} |
import abc
import datetime
import functools
import pprint
import sys
from python_utils import converters
from python_utils import types
from . import base
from . import utils
if types.TYPE_CHECKING:
from .bar import ProgressBar
MAX_DATE = datetime.date.max
MAX_TIME = datetime.time.max
MAX_DATETIME = datetime.datetime.max
def string_or_lambda(input_):
if isinstance(input_, str):
def render_input(progress, data, width):
return input_ % data
return render_input
else:
return input_
def create_wrapper(wrapper):
'''Convert a wrapper tuple or format string to a format string
>>> create_wrapper('')
>>> print(create_wrapper('a{}b'))
a{}b
>>> print(create_wrapper(('a', 'b')))
a{}b
'''
if isinstance(wrapper, tuple) and len(wrapper) == 2:
a, b = wrapper
wrapper = (a or '') + '{}' + (b or '')
elif not wrapper:
return
if isinstance(wrapper, str):
assert '{}' in wrapper, 'Expected string with {} for formatting'
else:
raise RuntimeError('Pass either a begin/end string as a tuple or a'
' template string with {}')
return wrapper
def wrapper(function, wrapper):
'''Wrap the output of a function in a template string or a tuple with
begin/end strings
'''
wrapper = create_wrapper(wrapper)
if not wrapper:
return function
@functools.wraps(function)
def wrap(*args, **kwargs):
return wrapper.format(function(*args, **kwargs))
return wrap
def create_marker(marker, wrap=None):
def _marker(progress, data, width):
if progress.max_value is not base.UnknownLength \
and progress.max_value > 0:
length = int(progress.value / progress.max_value * width)
return (marker * length)
else:
return marker
if isinstance(marker, str):
marker = converters.to_unicode(marker)
assert utils.len_color(marker) == 1, \
'Markers are required to be 1 char'
return wrapper(_marker, wrap)
else:
return wrapper(marker, wrap)
class FormatWidgetMixin(object):
'''Mixin to format widgets using a formatstring
Variables available:
- max_value: The maximum value (can be None with iterators)
- value: The current value
- total_seconds_elapsed: The seconds since the bar started
- seconds_elapsed: The seconds since the bar started modulo 60
- minutes_elapsed: The minutes since the bar started modulo 60
- hours_elapsed: The hours since the bar started modulo 24
- days_elapsed: The hours since the bar started
- time_elapsed: Shortcut for HH:MM:SS time since the bar started including
days
- percentage: Percentage as a float
'''
required_values = []
def __init__(self, format, new_style=False, **kwargs):
self.new_style = new_style
self.format = format
def get_format(self, progress, data, format=None):
return format or self.format
def __call__(self, progress, data, format=None):
'''Formats the widget into a string'''
format = self.get_format(progress, data, format)
try:
if self.new_style:
return format.format(**data)
else:
return format % data
except (TypeError, KeyError):
print('Error while formatting %r' % format, file=sys.stderr)
pprint.pprint(data, stream=sys.stderr)
raise
class WidthWidgetMixin(object):
'''Mixing to make sure widgets are only visible if the screen is within a
specified size range so the progressbar fits on both large and small
screens..
Variables available:
- min_width: Only display the widget if at least `min_width` is left
- max_width: Only display the widget if at most `max_width` is left
>>> class Progress(object):
... term_width = 0
>>> WidthWidgetMixin(5, 10).check_size(Progress)
False
>>> Progress.term_width = 5
>>> WidthWidgetMixin(5, 10).check_size(Progress)
True
>>> Progress.term_width = 10
>>> WidthWidgetMixin(5, 10).check_size(Progress)
True
>>> Progress.term_width = 11
>>> WidthWidgetMixin(5, 10).check_size(Progress)
False
'''
def __init__(self, min_width=None, max_width=None, **kwargs):
self.min_width = min_width
self.max_width = max_width
def check_size(self, progress: 'ProgressBar'):
if self.min_width and self.min_width > progress.term_width:
return False
elif self.max_width and self.max_width < progress.term_width:
return False
else:
return True
class WidgetBase(WidthWidgetMixin):
__metaclass__ = abc.ABCMeta
'''The base class for all widgets
The ProgressBar will call the widget's update value when the widget should
be updated. The widget's size may change between calls, but the widget may
display incorrectly if the size changes drastically and repeatedly.
The boolean INTERVAL informs the ProgressBar that it should be
updated more often because it is time sensitive.
The widgets are only visible if the screen is within a
specified size range so the progressbar fits on both large and small
screens.
WARNING: Widgets can be shared between multiple progressbars so any state
information specific to a progressbar should be stored within the
progressbar instead of the widget.
Variables available:
- min_width: Only display the widget if at least `min_width` is left
- max_width: Only display the widget if at most `max_width` is left
- weight: Widgets with a higher `weigth` will be calculated before widgets
with a lower one
- copy: Copy this widget when initializing the progress bar so the
progressbar can be reused. Some widgets such as the FormatCustomText
require the shared state so this needs to be optional
'''
copy = True
@abc.abstractmethod
def __call__(self, progress, data):
'''Updates the widget.
progress - a reference to the calling ProgressBar
'''
class AutoWidthWidgetBase(WidgetBase):
'''The base class for all variable width widgets.
This widget is much like the \\hfill command in TeX, it will expand to
fill the line. You can use more than one in the same line, and they will
all have the same width, and together will fill the line.
'''
@abc.abstractmethod
def __call__(self, progress, data, width):
'''Updates the widget providing the total width the widget must fill.
progress - a reference to the calling ProgressBar
width - The total width the widget must fill
'''
class TimeSensitiveWidgetBase(WidgetBase):
'''The base class for all time sensitive widgets.
Some widgets like timers would become out of date unless updated at least
every `INTERVAL`
'''
INTERVAL = datetime.timedelta(milliseconds=100)
class FormatLabel(FormatWidgetMixin, WidgetBase):
'''Displays a formatted label
>>> label = FormatLabel('%(value)s', min_width=5, max_width=10)
>>> class Progress(object):
... pass
>>> label = FormatLabel('{value} :: {value:^6}', new_style=True)
>>> str(label(Progress, dict(value='test')))
'test :: test '
'''
mapping = {
'finished': ('end_time', None),
'last_update': ('last_update_time', None),
'max': ('max_value', None),
'seconds': ('seconds_elapsed', None),
'start': ('start_time', None),
'elapsed': ('total_seconds_elapsed', utils.format_time),
'value': ('value', None),
}
def __init__(self, format: str, **kwargs):
FormatWidgetMixin.__init__(self, format=format, **kwargs)
WidgetBase.__init__(self, **kwargs)
def __call__(self, progress, data, **kwargs):
for name, (key, transform) in self.mapping.items():
try:
if transform is None:
data[name] = data[key]
else:
data[name] = transform(data[key])
except (KeyError, ValueError, IndexError): # pragma: no cover
pass
return FormatWidgetMixin.__call__(self, progress, data, **kwargs)
class Timer(FormatLabel, TimeSensitiveWidgetBase):
'''WidgetBase which displays the elapsed seconds.'''
def __init__(self, format='Elapsed Time: %(elapsed)s', **kwargs):
if '%s' in format and '%(elapsed)s' not in format:
format = format.replace('%s', '%(elapsed)s')
FormatLabel.__init__(self, format=format, **kwargs)
TimeSensitiveWidgetBase.__init__(self, **kwargs)
# This is exposed as a static method for backwards compatibility
format_time = staticmethod(utils.format_time)
class SamplesMixin(TimeSensitiveWidgetBase):
'''
Mixing for widgets that average multiple measurements
Note that samples can be either an integer or a timedelta to indicate a
certain amount of time
>>> class progress:
... last_update_time = datetime.datetime.now()
... value = 1
... extra = dict()
>>> samples = SamplesMixin(samples=2)
>>> samples(progress, None, True)
(None, None)
>>> progress.last_update_time += datetime.timedelta(seconds=1)
>>> samples(progress, None, True) == (datetime.timedelta(seconds=1), 0)
True
>>> progress.last_update_time += datetime.timedelta(seconds=1)
>>> samples(progress, None, True) == (datetime.timedelta(seconds=1), 0)
True
>>> samples = SamplesMixin(samples=datetime.timedelta(seconds=1))
>>> _, value = samples(progress, None)
>>> value
[1, 1]
>>> samples(progress, None, True) == (datetime.timedelta(seconds=1), 0)
True
'''
def __init__(self, samples=datetime.timedelta(seconds=2), key_prefix=None,
**kwargs):
self.samples = samples
self.key_prefix = (self.__class__.__name__ or key_prefix) + '_'
TimeSensitiveWidgetBase.__init__(self, **kwargs)
def get_sample_times(self, progress, data):
return progress.extra.setdefault(self.key_prefix + 'sample_times', [])
def get_sample_values(self, progress, data):
return progress.extra.setdefault(self.key_prefix + 'sample_values', [])
def __call__(self, progress, data, delta=False):
sample_times = self.get_sample_times(progress, data)
sample_values = self.get_sample_values(progress, data)
if sample_times:
sample_time = sample_times[-1]
else:
sample_time = datetime.datetime.min
if progress.last_update_time - sample_time > self.INTERVAL:
# Add a sample but limit the size to `num_samples`
sample_times.append(progress.last_update_time)
sample_values.append(progress.value)
if isinstance(self.samples, datetime.timedelta):
minimum_time = progress.last_update_time - self.samples
minimum_value = sample_values[-1]
while (sample_times[2:] and
minimum_time > sample_times[1] and
minimum_value > sample_values[1]):
sample_times.pop(0)
sample_values.pop(0)
else:
if len(sample_times) > self.samples:
sample_times.pop(0)
sample_values.pop(0)
if delta:
delta_time = sample_times[-1] - sample_times[0]
delta_value = sample_values[-1] - sample_values[0]
if delta_time:
return delta_time, delta_value
else:
return None, None
else:
return sample_times, sample_values
class ETA(Timer):
'''WidgetBase which attempts to estimate the time of arrival.'''
def __init__(
self,
format_not_started='ETA: --:--:--',
format_finished='Time: %(elapsed)8s',
format='ETA: %(eta)8s',
format_zero='ETA: 00:00:00',
format_NA='ETA: N/A',
**kwargs):
if '%s' in format and '%(eta)s' not in format:
format = format.replace('%s', '%(eta)s')
Timer.__init__(self, **kwargs)
self.format_not_started = format_not_started
self.format_finished = format_finished
self.format = format
self.format_zero = format_zero
self.format_NA = format_NA
def _calculate_eta(self, progress, data, value, elapsed):
'''Updates the widget to show the ETA or total time when finished.'''
if elapsed:
# The max() prevents zero division errors
per_item = elapsed.total_seconds() / max(value, 1e-6)
remaining = progress.max_value - data['value']
eta_seconds = remaining * per_item
else:
eta_seconds = 0
return eta_seconds
def __call__(self, progress, data, value=None, elapsed=None):
'''Updates the widget to show the ETA or total time when finished.'''
if value is None:
value = data['value']
if elapsed is None:
elapsed = data['time_elapsed']
ETA_NA = False
try:
data['eta_seconds'] = self._calculate_eta(
progress, data, value=value, elapsed=elapsed)
except TypeError:
data['eta_seconds'] = None
ETA_NA = True
data['eta'] = None
if data['eta_seconds']:
try:
data['eta'] = utils.format_time(data['eta_seconds'])
except (ValueError, OverflowError): # pragma: no cover
pass
if data['value'] == progress.min_value:
format = self.format_not_started
elif progress.end_time:
format = self.format_finished
elif data['eta']:
format = self.format
elif ETA_NA:
format = self.format_NA
else:
format = self.format_zero
return Timer.__call__(self, progress, data, format=format)
class AbsoluteETA(ETA):
'''Widget which attempts to estimate the absolute time of arrival.'''
def _calculate_eta(self, progress, data, value, elapsed):
eta_seconds = ETA._calculate_eta(self, progress, data, value, elapsed)
now = datetime.datetime.now()
try:
return now + datetime.timedelta(seconds=eta_seconds)
except OverflowError: # pragma: no cover
return datetime.datetime.max
def __init__(
self,
format_not_started='Estimated finish time: ----/--/-- --:--:--',
format_finished='Finished at: %(elapsed)s',
format='Estimated finish time: %(eta)s',
**kwargs):
ETA.__init__(self, format_not_started=format_not_started,
format_finished=format_finished, format=format, **kwargs)
class AdaptiveETA(ETA, SamplesMixin):
'''WidgetBase which attempts to estimate the time of arrival.
Uses a sampled average of the speed based on the 10 last updates.
Very convenient for resuming the progress halfway.
'''
def __init__(self, **kwargs):
ETA.__init__(self, **kwargs)
SamplesMixin.__init__(self, **kwargs)
def __call__(self, progress, data):
elapsed, value = SamplesMixin.__call__(self, progress, data,
delta=True)
if not elapsed:
value = None
elapsed = 0
return ETA.__call__(self, progress, data, value=value, elapsed=elapsed)
class DataSize(FormatWidgetMixin, WidgetBase):
'''
Widget for showing an amount of data transferred/processed.
Automatically formats the value (assumed to be a count of bytes) with an
appropriate sized unit, based on the IEC binary prefixes (powers of 1024).
'''
def __init__(
self, variable='value',
format='%(scaled)5.1f %(prefix)s%(unit)s', unit='B',
prefixes=('', 'Ki', 'Mi', 'Gi', 'Ti', 'Pi', 'Ei', 'Zi', 'Yi'),
**kwargs):
self.variable = variable
self.unit = unit
self.prefixes = prefixes
FormatWidgetMixin.__init__(self, format=format, **kwargs)
WidgetBase.__init__(self, **kwargs)
def __call__(self, progress, data):
value = data[self.variable]
if value is not None:
scaled, power = utils.scale_1024(value, len(self.prefixes))
else:
scaled = power = 0
data['scaled'] = scaled
data['prefix'] = self.prefixes[power]
data['unit'] = self.unit
return FormatWidgetMixin.__call__(self, progress, data)
class FileTransferSpeed(FormatWidgetMixin, TimeSensitiveWidgetBase):
'''
WidgetBase for showing the transfer speed (useful for file transfers).
'''
def __init__(
self, format='%(scaled)5.1f %(prefix)s%(unit)-s/s',
inverse_format='%(scaled)5.1f s/%(prefix)s%(unit)-s', unit='B',
prefixes=('', 'Ki', 'Mi', 'Gi', 'Ti', 'Pi', 'Ei', 'Zi', 'Yi'),
**kwargs):
self.unit = unit
self.prefixes = prefixes
self.inverse_format = inverse_format
FormatWidgetMixin.__init__(self, format=format, **kwargs)
TimeSensitiveWidgetBase.__init__(self, **kwargs)
def _speed(self, value, elapsed):
speed = float(value) / elapsed
return utils.scale_1024(speed, len(self.prefixes))
def __call__(self, progress, data, value=None, total_seconds_elapsed=None):
'''Updates the widget with the current SI prefixed speed.'''
if value is None:
value = data['value']
elapsed = utils.deltas_to_seconds(
total_seconds_elapsed,
data['total_seconds_elapsed'])
if value is not None and elapsed is not None \
and elapsed > 2e-6 and value > 2e-6: # =~ 0
scaled, power = self._speed(value, elapsed)
else:
scaled = power = 0
data['unit'] = self.unit
if power == 0 and scaled < 0.1:
if scaled > 0:
scaled = 1 / scaled
data['scaled'] = scaled
data['prefix'] = self.prefixes[0]
return FormatWidgetMixin.__call__(self, progress, data,
self.inverse_format)
else:
data['scaled'] = scaled
data['prefix'] = self.prefixes[power]
return FormatWidgetMixin.__call__(self, progress, data)
class AdaptiveTransferSpeed(FileTransferSpeed, SamplesMixin):
'''WidgetBase for showing the transfer speed, based on the last X samples
'''
def __init__(self, **kwargs):
FileTransferSpeed.__init__(self, **kwargs)
SamplesMixin.__init__(self, **kwargs)
def __call__(self, progress, data):
elapsed, value = SamplesMixin.__call__(self, progress, data,
delta=True)
return FileTransferSpeed.__call__(self, progress, data, value, elapsed)
class AnimatedMarker(TimeSensitiveWidgetBase):
'''An animated marker for the progress bar which defaults to appear as if
it were rotating.
'''
def __init__(self, markers='|/-\\', default=None, fill='',
marker_wrap=None, fill_wrap=None, **kwargs):
self.markers = markers
self.marker_wrap = create_wrapper(marker_wrap)
self.default = default or markers[0]
self.fill_wrap = create_wrapper(fill_wrap)
self.fill = create_marker(fill, self.fill_wrap) if fill else None
WidgetBase.__init__(self, **kwargs)
def __call__(self, progress, data, width=None):
'''Updates the widget to show the next marker or the first marker when
finished'''
if progress.end_time:
return self.default
marker = self.markers[data['updates'] % len(self.markers)]
if self.marker_wrap:
marker = self.marker_wrap.format(marker)
if self.fill:
# Cut the last character so we can replace it with our marker
fill = self.fill(progress, data, width - progress.custom_len(
marker))
else:
fill = ''
# Python 3 returns an int when indexing bytes
if isinstance(marker, int): # pragma: no cover
marker = bytes(marker)
fill = fill.encode()
else:
# cast fill to the same type as marker
fill = type(marker)(fill)
return fill + marker
# Alias for backwards compatibility
RotatingMarker = AnimatedMarker
class Counter(FormatWidgetMixin, WidgetBase):
'''Displays the current count'''
def __init__(self, format='%(value)d', **kwargs):
FormatWidgetMixin.__init__(self, format=format, **kwargs)
WidgetBase.__init__(self, format=format, **kwargs)
def __call__(self, progress, data, format=None):
return FormatWidgetMixin.__call__(self, progress, data, format)
class Percentage(FormatWidgetMixin, WidgetBase):
'''Displays the current percentage as a number with a percent sign.'''
def __init__(self, format='%(percentage)3d%%', na='N/A%%', **kwargs):
self.na = na
FormatWidgetMixin.__init__(self, format=format, **kwargs)
WidgetBase.__init__(self, format=format, **kwargs)
def get_format(self, progress, data, format=None):
# If percentage is not available, display N/A%
percentage = data.get('percentage', base.Undefined)
if not percentage and percentage != 0:
return self.na
return FormatWidgetMixin.get_format(self, progress, data, format)
class SimpleProgress(FormatWidgetMixin, WidgetBase):
'''Returns progress as a count of the total (e.g.: "5 of 47")'''
DEFAULT_FORMAT = '%(value_s)s of %(max_value_s)s'
def __init__(self, format=DEFAULT_FORMAT, **kwargs):
FormatWidgetMixin.__init__(self, format=format, **kwargs)
WidgetBase.__init__(self, format=format, **kwargs)
self.max_width_cache = dict(default=self.max_width)
def __call__(self, progress, data, format=None):
# If max_value is not available, display N/A
if data.get('max_value'):
data['max_value_s'] = data.get('max_value')
else:
data['max_value_s'] = 'N/A'
# if value is not available it's the zeroth iteration
if data.get('value'):
data['value_s'] = data['value']
else:
data['value_s'] = 0
formatted = FormatWidgetMixin.__call__(self, progress, data,
format=format)
# Guess the maximum width from the min and max value
key = progress.min_value, progress.max_value
max_width = self.max_width_cache.get(key, self.max_width)
if not max_width:
temporary_data = data.copy()
for value in key:
if value is None: # pragma: no cover
continue
temporary_data['value'] = value
width = progress.custom_len(FormatWidgetMixin.__call__(
self, progress, temporary_data, format=format))
if width: # pragma: no branch
max_width = max(max_width or 0, width)
self.max_width_cache[key] = max_width
# Adjust the output to have a consistent size in all cases
if max_width: # pragma: no branch
formatted = formatted.rjust(max_width)
return formatted
class Bar(AutoWidthWidgetBase):
'''A progress bar which stretches to fill the line.'''
def __init__(self, marker='#', left='|', right='|', fill=' ',
fill_left=True, marker_wrap=None, **kwargs):
'''Creates a customizable progress bar.
The callable takes the same parameters as the `__call__` method
marker - string or callable object to use as a marker
left - string or callable object to use as a left border
right - string or callable object to use as a right border
fill - character to use for the empty part of the progress bar
fill_left - whether to fill from the left or the right
'''
self.marker = create_marker(marker, marker_wrap)
self.left = string_or_lambda(left)
self.right = string_or_lambda(right)
self.fill = string_or_lambda(fill)
self.fill_left = fill_left
AutoWidthWidgetBase.__init__(self, **kwargs)
def __call__(self, progress, data, width):
'''Updates the progress bar and its subcomponents'''
left = converters.to_unicode(self.left(progress, data, width))
right = converters.to_unicode(self.right(progress, data, width))
width -= progress.custom_len(left) + progress.custom_len(right)
marker = converters.to_unicode(self.marker(progress, data, width))
fill = converters.to_unicode(self.fill(progress, data, width))
# Make sure we ignore invisible characters when filling
width += len(marker) - progress.custom_len(marker)
if self.fill_left:
marker = marker.ljust(width, fill)
else:
marker = marker.rjust(width, fill)
return left + marker + right
class ReverseBar(Bar):
'''A bar which has a marker that goes from right to left'''
def __init__(self, marker='#', left='|', right='|', fill=' ',
fill_left=False, **kwargs):
'''Creates a customizable progress bar.
marker - string or updatable object to use as a marker
left - string or updatable object to use as a left border
right - string or updatable object to use as a right border
fill - character to use for the empty part of the progress bar
fill_left - whether to fill from the left or the right
'''
Bar.__init__(self, marker=marker, left=left, right=right, fill=fill,
fill_left=fill_left, **kwargs)
class BouncingBar(Bar, TimeSensitiveWidgetBase):
'''A bar which has a marker which bounces from side to side.'''
INTERVAL = datetime.timedelta(milliseconds=100)
def __call__(self, progress, data, width):
'''Updates the progress bar and its subcomponents'''
left = converters.to_unicode(self.left(progress, data, width))
right = converters.to_unicode(self.right(progress, data, width))
width -= progress.custom_len(left) + progress.custom_len(right)
marker = converters.to_unicode(self.marker(progress, data, width))
fill = converters.to_unicode(self.fill(progress, data, width))
if width: # pragma: no branch
value = int(
data['total_seconds_elapsed'] / self.INTERVAL.total_seconds())
a = value % width
b = width - a - 1
if value % (width * 2) >= width:
a, b = b, a
if self.fill_left:
marker = a * fill + marker + b * fill
else:
marker = b * fill + marker + a * fill
return left + marker + right
class FormatCustomText(FormatWidgetMixin, WidgetBase):
mapping = {}
copy = False
def __init__(self, format, mapping=mapping, **kwargs):
self.format = format
self.mapping = mapping
FormatWidgetMixin.__init__(self, format=format, **kwargs)
WidgetBase.__init__(self, **kwargs)
def update_mapping(self, **mapping):
self.mapping.update(mapping)
def __call__(self, progress, data):
return FormatWidgetMixin.__call__(
self, progress, self.mapping, self.format)
class VariableMixin(object):
'''Mixin to display a custom user variable '''
def __init__(self, name, **kwargs):
if not isinstance(name, str):
raise TypeError('Variable(): argument must be a string')
if len(name.split()) > 1:
raise ValueError('Variable(): argument must be single word')
self.name = name
class MultiRangeBar(Bar, VariableMixin):
'''
A bar with multiple sub-ranges, each represented by a different symbol
The various ranges are represented on a user-defined variable, formatted as
.. code-block:: python
[
['Symbol1', amount1],
['Symbol2', amount2],
...
]
'''
def __init__(self, name, markers, **kwargs):
VariableMixin.__init__(self, name)
Bar.__init__(self, **kwargs)
self.markers = [
string_or_lambda(marker)
for marker in markers
]
def get_values(self, progress, data):
return data['variables'][self.name] or []
def __call__(self, progress, data, width):
'''Updates the progress bar and its subcomponents'''
left = converters.to_unicode(self.left(progress, data, width))
right = converters.to_unicode(self.right(progress, data, width))
width -= progress.custom_len(left) + progress.custom_len(right)
values = self.get_values(progress, data)
values_sum = sum(values)
if width and values_sum:
middle = ''
values_accumulated = 0
width_accumulated = 0
for marker, value in zip(self.markers, values):
marker = converters.to_unicode(marker(progress, data, width))
assert progress.custom_len(marker) == 1
values_accumulated += value
item_width = int(values_accumulated / values_sum * width)
item_width -= width_accumulated
width_accumulated += item_width
middle += item_width * marker
else:
fill = converters.to_unicode(self.fill(progress, data, width))
assert progress.custom_len(fill) == 1
middle = fill * width
return left + middle + right
class MultiProgressBar(MultiRangeBar):
def __init__(self,
name,
# NOTE: the markers are not whitespace even though some
# terminals don't show the characters correctly!
markers=' ▁▂▃▄▅▆▇█',
**kwargs):
MultiRangeBar.__init__(self, name=name,
markers=list(reversed(markers)), **kwargs)
def get_values(self, progress, data):
ranges = [0] * len(self.markers)
for progress in data['variables'][self.name] or []:
if not isinstance(progress, (int, float)):
# Progress is (value, max)
progress_value, progress_max = progress
progress = float(progress_value) / float(progress_max)
if progress < 0 or progress > 1:
raise ValueError(
'Range value needs to be in the range [0..1], got %s' %
progress)
range_ = progress * (len(ranges) - 1)
pos = int(range_)
frac = range_ % 1
ranges[pos] += (1 - frac)
if (frac):
ranges[pos + 1] += (frac)
if self.fill_left:
ranges = list(reversed(ranges))
return ranges
class GranularMarkers:
smooth = ' ▏▎▍▌▋▊▉█'
bar = ' ▁▂▃▄▅▆▇█'
snake = ' ▖▌▛█'
fade_in = ' ░▒▓█'
dots = ' ⡀⡄⡆⡇⣇⣧⣷⣿'
growing_circles = ' .oO'
class GranularBar(AutoWidthWidgetBase):
'''A progressbar that can display progress at a sub-character granularity
by using multiple marker characters.
Examples of markers:
- Smooth: ` ▏▎▍▌▋▊▉█` (default)
- Bar: ` ▁▂▃▄▅▆▇█`
- Snake: ` ▖▌▛█`
- Fade in: ` ░▒▓█`
- Dots: ` ⡀⡄⡆⡇⣇⣧⣷⣿`
- Growing circles: ` .oO`
The markers can be accessed through GranularMarkers. GranularMarkers.dots
for example
'''
def __init__(self, markers=GranularMarkers.smooth, left='|', right='|',
**kwargs):
'''Creates a customizable progress bar.
markers - string of characters to use as granular progress markers. The
first character should represent 0% and the last 100%.
Ex: ` .oO`.
left - string or callable object to use as a left border
right - string or callable object to use as a right border
'''
self.markers = markers
self.left = string_or_lambda(left)
self.right = string_or_lambda(right)
AutoWidthWidgetBase.__init__(self, **kwargs)
def __call__(self, progress, data, width):
left = converters.to_unicode(self.left(progress, data, width))
right = converters.to_unicode(self.right(progress, data, width))
width -= progress.custom_len(left) + progress.custom_len(right)
if progress.max_value is not base.UnknownLength \
and progress.max_value > 0:
percent = progress.value / progress.max_value
else:
percent = 0
num_chars = percent * width
marker = self.markers[-1] * int(num_chars)
marker_idx = int((num_chars % 1) * (len(self.markers) - 1))
if marker_idx:
marker += self.markers[marker_idx]
marker = converters.to_unicode(marker)
# Make sure we ignore invisible characters when filling
width += len(marker) - progress.custom_len(marker)
marker = marker.ljust(width, self.markers[0])
return left + marker + right
class FormatLabelBar(FormatLabel, Bar):
'''A bar which has a formatted label in the center.'''
def __init__(self, format, **kwargs):
FormatLabel.__init__(self, format, **kwargs)
Bar.__init__(self, **kwargs)
def __call__(self, progress, data, width, format=None):
center = FormatLabel.__call__(self, progress, data, format=format)
bar = Bar.__call__(self, progress, data, width)
# Aligns the center of the label to the center of the bar
center_len = progress.custom_len(center)
center_left = int((width - center_len) / 2)
center_right = center_left + center_len
return bar[:center_left] + center + bar[center_right:]
class PercentageLabelBar(Percentage, FormatLabelBar):
'''A bar which displays the current percentage in the center.'''
# %3d adds an extra space that makes it look off-center
# %2d keeps the label somewhat consistently in-place
def __init__(self, format='%(percentage)2d%%', na='N/A%%', **kwargs):
Percentage.__init__(self, format, na=na, **kwargs)
FormatLabelBar.__init__(self, format, **kwargs)
class Variable(FormatWidgetMixin, VariableMixin, WidgetBase):
'''Displays a custom variable.'''
def __init__(self, name, format='{name}: {formatted_value}',
width=6, precision=3, **kwargs):
'''Creates a Variable associated with the given name.'''
self.format = format
self.width = width
self.precision = precision
VariableMixin.__init__(self, name=name)
WidgetBase.__init__(self, **kwargs)
def __call__(self, progress, data):
value = data['variables'][self.name]
context = data.copy()
context['value'] = value
context['name'] = self.name
context['width'] = self.width
context['precision'] = self.precision
try:
# Make sure to try and cast the value first, otherwise the
# formatting will generate warnings/errors on newer Python releases
value = float(value)
fmt = '{value:{width}.{precision}}'
context['formatted_value'] = fmt.format(**context)
except (TypeError, ValueError):
if value:
context['formatted_value'] = '{value:{width}}'.format(
**context)
else:
context['formatted_value'] = '-' * self.width
return self.format.format(**context)
class DynamicMessage(Variable):
'''Kept for backwards compatibility, please use `Variable` instead.'''
pass
class CurrentTime(FormatWidgetMixin, TimeSensitiveWidgetBase):
'''Widget which displays the current (date)time with seconds resolution.'''
INTERVAL = datetime.timedelta(seconds=1)
def __init__(self, format='Current Time: %(current_time)s',
microseconds=False, **kwargs):
self.microseconds = microseconds
FormatWidgetMixin.__init__(self, format=format, **kwargs)
TimeSensitiveWidgetBase.__init__(self, **kwargs)
def __call__(self, progress, data):
data['current_time'] = self.current_time()
data['current_datetime'] = self.current_datetime()
return FormatWidgetMixin.__call__(self, progress, data)
def current_datetime(self):
now = datetime.datetime.now()
if not self.microseconds:
now = now.replace(microsecond=0)
return now
def current_time(self):
return self.current_datetime().time()
| {
"content_hash": "f8174402d864099e2e693e51853d9ff6",
"timestamp": "",
"source": "github",
"line_count": 1075,
"max_line_length": 79,
"avg_line_length": 34.50232558139535,
"alnum_prop": 0.5921811809112969,
"repo_name": "WoLpH/python-progressbar",
"id": "8d81bb6d7c566c00c595ad64c9f057c099867240",
"size": "37258",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "progressbar/widgets.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "160879"
}
],
"symlink_target": ""
} |
from __future__ import absolute_import
import unittest
from pyes.tests import ESTestCase
from pyes.query import Search, QueryStringQuery, HighLighter
class QuerySearchTestCase(ESTestCase):
def setUp(self):
super(QuerySearchTestCase, self).setUp()
mapping = {u'parsedtext': {'boost': 1.0,
'index': 'analyzed',
'store': 'yes',
'type': u'string',
"term_vector": "with_positions_offsets"},
u'name': {'boost': 1.0,
'index': 'analyzed',
'store': 'yes',
'type': u'string',
"term_vector": "with_positions_offsets"},
u'title': {'boost': 1.0,
'index': 'analyzed',
'store': 'yes',
'type': u'string',
"term_vector": "with_positions_offsets"},
u'pos': {'store': 'yes',
'type': u'integer'},
u'uuid': {'boost': 1.0,
'index': 'not_analyzed',
'store': 'yes',
'type': u'string'}}
self.conn.indices.create_index(self.index_name)
self.conn.indices.put_mapping(self.document_type, {'properties': mapping}, self.index_name)
self.conn.index({"name": "Joe Tester", "parsedtext": "Joe Testere nice guy", "uuid": "11111", "position": 1},
self.index_name, self.document_type, 1)
self.conn.index({"name": "Bill Baloney", "parsedtext": "Joe Testere nice guy", "uuid": "22222", "position": 2},
self.index_name, self.document_type, 2)
self.conn.index({"parsedtext": "Joe Testere nice guy", "uuid": "22222", "position": 2}, self.index_name,
self.document_type, 2)
self.conn.indices.refresh(self.index_name)
def test_QueryHighlight(self):
q = Search(QueryStringQuery("joe"))
q.add_highlight("parsedtext")
q.add_highlight("name")
resultset = self.conn.search(q, indices=self.index_name)
self.assertEqual(resultset.total, 2)
self.assertNotEqual(resultset[0]._meta.highlight, None)
self.assertEqual(resultset[0]._meta.highlight[u"parsedtext"][0].strip(),
u'<em>Joe</em> Testere nice guy')
def test_QueryHighlightWithHighLighter(self):
h = HighLighter(['<b>'], ['</b>'])
q = Search(QueryStringQuery("joe"), highlight=h)
q.add_highlight("parsedtext")
q.add_highlight("name")
resultset = self.conn.search(q, indices=self.index_name)
self.assertEqual(resultset.total, 2)
self.assertNotEqual(resultset[0]._meta.highlight, None)
self.assertEqual(resultset[0]._meta.highlight[u"parsedtext"][0].strip(),
u'<b>Joe</b> Testere nice guy')
if __name__ == "__main__":
unittest.main()
| {
"content_hash": "3b31783e767d826013f097c084dc5e54",
"timestamp": "",
"source": "github",
"line_count": 64,
"max_line_length": 119,
"avg_line_length": 47.9375,
"alnum_prop": 0.5153194263363755,
"repo_name": "mavarick/pyes",
"id": "52d79b2cc39b6453d6cb9844d7f8a58ec162cf50",
"size": "3092",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "tests/test_highlight.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "HTML",
"bytes": "1143"
},
{
"name": "Python",
"bytes": "602954"
},
{
"name": "Shell",
"bytes": "1438"
}
],
"symlink_target": ""
} |
import pyman
class BaseTest:
def setup( self ):
self.cli = pyman.Main("PyMan - Test Interface")
| {
"content_hash": "8c343a9830f790d2fa61b16459d4d3cd",
"timestamp": "",
"source": "github",
"line_count": 6,
"max_line_length": 55,
"avg_line_length": 18.333333333333332,
"alnum_prop": 0.6363636363636364,
"repo_name": "MarkLark/pyman",
"id": "921896e5895289d173ed2f90173338ab7de4e35c",
"size": "110",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/BaseTest.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "10584"
}
],
"symlink_target": ""
} |
import binascii
import os
import random
import string
import uuid
def generate_key(length: int = 20):
"""
Generates a random key (used for access tokens).
:param length:
:return:
"""
return binascii.hexlify(os.urandom(length)).decode()
def generate_random_string(string_length: int = 6, upper_case: bool = False) -> str:
"""
Generates a random string of letters and digits.
"""
letter_and_digits = string.ascii_letters + string.digits
random_string = "".join(
random.choice(letter_and_digits) for i in range(string_length)
)
if upper_case:
return random_string.upper()
return random_string
def generate_random_uuid4() -> uuid.UUID:
"""
Generates a uuid4.
"""
return uuid.uuid4()
| {
"content_hash": "3c0556013e93c1819f98db57ac35bfad",
"timestamp": "",
"source": "github",
"line_count": 36,
"max_line_length": 84,
"avg_line_length": 21.52777777777778,
"alnum_prop": 0.6464516129032258,
"repo_name": "rhazdon/django-sonic-screwdriver",
"id": "3c7b33af020da5059f220a60174229133311607d",
"size": "775",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "django_sonic_screwdriver/random.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "219"
},
{
"name": "Makefile",
"bytes": "683"
},
{
"name": "Python",
"bytes": "59036"
}
],
"symlink_target": ""
} |
import pygame
from utils import *
from ability import Ability
from characters.transformed import Transformed
from characters.directions import Directions
# Revert is called in switch, it is used to revert Biggie back to his normal size
class Revert(Ability):
def __init__(self):
Ability.__init__(self, "revert", pygame.K_r)
# Revert before switching so Biggie = AI
# Revert his rec/image and unlock him
def cast(self, c):
if c.AI.status == Transformed.Ladder:
c.AI.image = pygame.transform.scale(c.AI.image, (BIGGIE_WIDTH, BIGGIE_HEIGHT))
c.AI.rect = pygame.Rect(c.AI.rect.left, c.AI.rect.top - BIGGIE_HEIGHT, BIGGIE_WIDTH, BIGGIE_HEIGHT)
elif c.AI.status == Transformed.Bridge:
c.lvl_current.platform_list.remove(c.AI)
c.AI.image = pygame.transform.scale(c.AI.image, (BIGGIE_WIDTH, BIGGIE_HEIGHT))
if c.AI.heading == Directions.Left:
c.AI.rect = pygame.Rect(c.AI.rect.left - BIGGIE_WIDTH, c.AI.rect.top - BIGGIE_HEIGHT, BIGGIE_WIDTH, BIGGIE_HEIGHT)
elif c.AI.heading == Directions.Right:
c.AI.rect = pygame.Rect(c.AI.rect.right, c.AI.rect.top - BIGGIE_HEIGHT, BIGGIE_WIDTH, BIGGIE_HEIGHT)
else:
print "PROBLEM#"
elif c.AI.status == Transformed.Wall:
c.lvl_current.special_platforms.remove(c.AI)
c.AI.image = pygame.transform.scale(c.AI.image, (BIGGIE_WIDTH, BIGGIE_HEIGHT))
c.AI.rect = pygame.Rect(c.AI.rect.left, c.AI.rect.top + BIGGIE_HEIGHT, BIGGIE_WIDTH, BIGGIE_HEIGHT)
c.AI.rect.right = max(c.player.rect.left, BIGGIE_WIDTH + 1)
c.AI.rect.bottom = c.player.rect.bottom
c.AI.locked = False
c.AI.status = Transformed.Default
| {
"content_hash": "c4f63b5c484425a42ab10730145a88c7",
"timestamp": "",
"source": "github",
"line_count": 36,
"max_line_length": 118,
"avg_line_length": 44.30555555555556,
"alnum_prop": 0.7178683385579937,
"repo_name": "450W16/MODACT",
"id": "119f225a95d8cf738cfe0de9e64dbb4d8b7dad70",
"size": "1595",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/abilities/revert.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "63777"
}
],
"symlink_target": ""
} |
from .fetchers import NUMetadatasFetcher
from .fetchers import NUGlobalMetadatasFetcher
from bambou import NURESTObject
class NUDomainFIPAclTemplateEntry(NURESTObject):
""" Represents a DomainFIPAclTemplateEntry in the VSD
Notes:
Defines the template of Egress Domain ACL Template entries
"""
__rest_name__ = "egressdomainfloatingipaclentrytemplate"
__resource_name__ = "egressdomainfloatingipaclentrytemplates"
## Constants
CONST_NETWORK_TYPE_NETWORK_MACRO_GROUP = "NETWORK_MACRO_GROUP"
CONST_ACTION_DROP = "DROP"
CONST_LOCATION_TYPE_ZONE = "ZONE"
CONST_ACTION_REDIRECT = "REDIRECT"
CONST_ENTITY_SCOPE_ENTERPRISE = "ENTERPRISE"
CONST_NETWORK_TYPE_PUBLIC_NETWORK = "PUBLIC_NETWORK"
CONST_ACTION_FORWARD = "FORWARD"
CONST_NETWORK_TYPE_POLICYGROUP = "POLICYGROUP"
CONST_LOCATION_TYPE_ANY = "ANY"
CONST_NETWORK_TYPE_ENDPOINT_DOMAIN = "ENDPOINT_DOMAIN"
CONST_NETWORK_TYPE_ENTERPRISE_NETWORK = "ENTERPRISE_NETWORK"
CONST_NETWORK_TYPE_ANY = "ANY"
CONST_LOCATION_TYPE_POLICYGROUP = "POLICYGROUP"
CONST_NETWORK_TYPE_SUBNET = "SUBNET"
CONST_NETWORK_TYPE_ZONE = "ZONE"
CONST_ASSOCIATED_TRAFFIC_TYPE_L4_SERVICE_GROUP = "L4_SERVICE_GROUP"
CONST_NETWORK_TYPE_ENDPOINT_SUBNET = "ENDPOINT_SUBNET"
CONST_LOCATION_TYPE_VPORTTAG = "VPORTTAG"
CONST_LOCATION_TYPE_SUBNET = "SUBNET"
CONST_POLICY_STATE_DRAFT = "DRAFT"
CONST_ASSOCIATED_TRAFFIC_TYPE_L4_SERVICE = "L4_SERVICE"
CONST_WEB_FILTER_TYPE_WEB_DOMAIN_NAME = "WEB_DOMAIN_NAME"
CONST_LOCATION_TYPE_REDIRECTIONTARGET = "REDIRECTIONTARGET"
CONST_POLICY_STATE_LIVE = "LIVE"
CONST_ENTITY_SCOPE_GLOBAL = "GLOBAL"
CONST_WEB_FILTER_TYPE_WEB_CATEGORY = "WEB_CATEGORY"
CONST_NETWORK_TYPE_INTERNET_POLICYGROUP = "INTERNET_POLICYGROUP"
CONST_NETWORK_TYPE_ENDPOINT_ZONE = "ENDPOINT_ZONE"
def __init__(self, **kwargs):
""" Initializes a DomainFIPAclTemplateEntry instance
Notes:
You can specify all parameters while calling this methods.
A special argument named `data` will enable you to load the
object from a Python dictionary
Examples:
>>> domainfipacltemplateentry = NUDomainFIPAclTemplateEntry(id=u'xxxx-xxx-xxx-xxx', name=u'DomainFIPAclTemplateEntry')
>>> domainfipacltemplateentry = NUDomainFIPAclTemplateEntry(data=my_dict)
"""
super(NUDomainFIPAclTemplateEntry, self).__init__()
# Read/Write Attributes
self._acl_template_name = None
self._icmp_code = None
self._icmp_type = None
self._ipv6_address_override = None
self._dscp = None
self._last_updated_by = None
self._action = None
self._address_override = None
self._web_filter_id = None
self._web_filter_type = None
self._description = None
self._destination_port = None
self._network_id = None
self._network_type = None
self._mirror_destination_id = None
self._flow_logging_enabled = None
self._enterprise_name = None
self._entity_scope = None
self._location_id = None
self._location_type = None
self._policy_state = None
self._domain_name = None
self._source_port = None
self._priority = None
self._protocol = None
self._associated_live_entity_id = None
self._associated_live_template_id = None
self._associated_traffic_type = None
self._associated_traffic_type_id = None
self._stateful = None
self._stats_id = None
self._stats_logging_enabled = None
self._ether_type = None
self._external_id = None
self.expose_attribute(local_name="acl_template_name", remote_name="ACLTemplateName", attribute_type=str, is_required=True, is_unique=False)
self.expose_attribute(local_name="icmp_code", remote_name="ICMPCode", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="icmp_type", remote_name="ICMPType", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="ipv6_address_override", remote_name="IPv6AddressOverride", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="dscp", remote_name="DSCP", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="last_updated_by", remote_name="lastUpdatedBy", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="action", remote_name="action", attribute_type=str, is_required=False, is_unique=False, choices=[u'DROP', u'FORWARD', u'REDIRECT'])
self.expose_attribute(local_name="address_override", remote_name="addressOverride", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="web_filter_id", remote_name="webFilterID", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="web_filter_type", remote_name="webFilterType", attribute_type=str, is_required=False, is_unique=False, choices=[u'WEB_CATEGORY', u'WEB_DOMAIN_NAME'])
self.expose_attribute(local_name="description", remote_name="description", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="destination_port", remote_name="destinationPort", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="network_id", remote_name="networkID", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="network_type", remote_name="networkType", attribute_type=str, is_required=False, is_unique=False, choices=[u'ANY', u'ENDPOINT_DOMAIN', u'ENDPOINT_SUBNET', u'ENDPOINT_ZONE', u'ENTERPRISE_NETWORK', u'INTERNET_POLICYGROUP', u'NETWORK_MACRO_GROUP', u'POLICYGROUP', u'PUBLIC_NETWORK', u'SUBNET', u'ZONE'])
self.expose_attribute(local_name="mirror_destination_id", remote_name="mirrorDestinationID", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="flow_logging_enabled", remote_name="flowLoggingEnabled", attribute_type=bool, is_required=False, is_unique=False)
self.expose_attribute(local_name="enterprise_name", remote_name="enterpriseName", attribute_type=str, is_required=True, is_unique=False)
self.expose_attribute(local_name="entity_scope", remote_name="entityScope", attribute_type=str, is_required=False, is_unique=False, choices=[u'ENTERPRISE', u'GLOBAL'])
self.expose_attribute(local_name="location_id", remote_name="locationID", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="location_type", remote_name="locationType", attribute_type=str, is_required=False, is_unique=False, choices=[u'ANY', u'POLICYGROUP', u'REDIRECTIONTARGET', u'SUBNET', u'VPORTTAG', u'ZONE'])
self.expose_attribute(local_name="policy_state", remote_name="policyState", attribute_type=str, is_required=False, is_unique=False, choices=[u'DRAFT', u'LIVE'])
self.expose_attribute(local_name="domain_name", remote_name="domainName", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="source_port", remote_name="sourcePort", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="priority", remote_name="priority", attribute_type=int, is_required=False, is_unique=False)
self.expose_attribute(local_name="protocol", remote_name="protocol", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="associated_live_entity_id", remote_name="associatedLiveEntityID", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="associated_live_template_id", remote_name="associatedLiveTemplateID", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="associated_traffic_type", remote_name="associatedTrafficType", attribute_type=str, is_required=False, is_unique=False, choices=[u'L4_SERVICE', u'L4_SERVICE_GROUP'])
self.expose_attribute(local_name="associated_traffic_type_id", remote_name="associatedTrafficTypeID", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="stateful", remote_name="stateful", attribute_type=bool, is_required=False, is_unique=False)
self.expose_attribute(local_name="stats_id", remote_name="statsID", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="stats_logging_enabled", remote_name="statsLoggingEnabled", attribute_type=bool, is_required=False, is_unique=False)
self.expose_attribute(local_name="ether_type", remote_name="etherType", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="external_id", remote_name="externalID", attribute_type=str, is_required=False, is_unique=True)
# Fetchers
self.metadatas = NUMetadatasFetcher.fetcher_with_object(parent_object=self, relationship="child")
self.global_metadatas = NUGlobalMetadatasFetcher.fetcher_with_object(parent_object=self, relationship="child")
self._compute_args(**kwargs)
# Properties
@property
def acl_template_name(self):
""" Get acl_template_name value.
Notes:
The name of the parent Template for this acl entry
This attribute is named `ACLTemplateName` in VSD API.
"""
return self._acl_template_name
@acl_template_name.setter
def acl_template_name(self, value):
""" Set acl_template_name value.
Notes:
The name of the parent Template for this acl entry
This attribute is named `ACLTemplateName` in VSD API.
"""
self._acl_template_name = value
@property
def icmp_code(self):
""" Get icmp_code value.
Notes:
The ICMP Code when protocol selected is ICMP
This attribute is named `ICMPCode` in VSD API.
"""
return self._icmp_code
@icmp_code.setter
def icmp_code(self, value):
""" Set icmp_code value.
Notes:
The ICMP Code when protocol selected is ICMP
This attribute is named `ICMPCode` in VSD API.
"""
self._icmp_code = value
@property
def icmp_type(self):
""" Get icmp_type value.
Notes:
The ICMP Type when protocol selected is ICMP
This attribute is named `ICMPType` in VSD API.
"""
return self._icmp_type
@icmp_type.setter
def icmp_type(self, value):
""" Set icmp_type value.
Notes:
The ICMP Type when protocol selected is ICMP
This attribute is named `ICMPType` in VSD API.
"""
self._icmp_type = value
@property
def ipv6_address_override(self):
""" Get ipv6_address_override value.
Notes:
Overrides the source IPv6 for Ingress and destination IP for Egress, MAC entries will use this address as the match criteria.
This attribute is named `IPv6AddressOverride` in VSD API.
"""
return self._ipv6_address_override
@ipv6_address_override.setter
def ipv6_address_override(self, value):
""" Set ipv6_address_override value.
Notes:
Overrides the source IPv6 for Ingress and destination IP for Egress, MAC entries will use this address as the match criteria.
This attribute is named `IPv6AddressOverride` in VSD API.
"""
self._ipv6_address_override = value
@property
def dscp(self):
""" Get dscp value.
Notes:
DSCP match condition to be set in the rule. It is either * or from 0-63
This attribute is named `DSCP` in VSD API.
"""
return self._dscp
@dscp.setter
def dscp(self, value):
""" Set dscp value.
Notes:
DSCP match condition to be set in the rule. It is either * or from 0-63
This attribute is named `DSCP` in VSD API.
"""
self._dscp = value
@property
def last_updated_by(self):
""" Get last_updated_by value.
Notes:
ID of the user who last updated the object.
This attribute is named `lastUpdatedBy` in VSD API.
"""
return self._last_updated_by
@last_updated_by.setter
def last_updated_by(self, value):
""" Set last_updated_by value.
Notes:
ID of the user who last updated the object.
This attribute is named `lastUpdatedBy` in VSD API.
"""
self._last_updated_by = value
@property
def action(self):
""" Get action value.
Notes:
The action of the ACL entry DROP or FORWARD or REDIRECT. Action REDIRECT is allowed only for IngressAdvancedForwardingEntry
"""
return self._action
@action.setter
def action(self, value):
""" Set action value.
Notes:
The action of the ACL entry DROP or FORWARD or REDIRECT. Action REDIRECT is allowed only for IngressAdvancedForwardingEntry
"""
self._action = value
@property
def address_override(self):
""" Get address_override value.
Notes:
Overrides the source IP for Ingress and destination IP for Egress, MAC entries will use this address as the match criteria.
This attribute is named `addressOverride` in VSD API.
"""
return self._address_override
@address_override.setter
def address_override(self, value):
""" Set address_override value.
Notes:
Overrides the source IP for Ingress and destination IP for Egress, MAC entries will use this address as the match criteria.
This attribute is named `addressOverride` in VSD API.
"""
self._address_override = value
@property
def web_filter_id(self):
""" Get web_filter_id value.
Notes:
ID of web filter category or web domain name entity used
This attribute is named `webFilterID` in VSD API.
"""
return self._web_filter_id
@web_filter_id.setter
def web_filter_id(self, value):
""" Set web_filter_id value.
Notes:
ID of web filter category or web domain name entity used
This attribute is named `webFilterID` in VSD API.
"""
self._web_filter_id = value
@property
def web_filter_type(self):
""" Get web_filter_type value.
Notes:
Indicates type of web filter being set
This attribute is named `webFilterType` in VSD API.
"""
return self._web_filter_type
@web_filter_type.setter
def web_filter_type(self, value):
""" Set web_filter_type value.
Notes:
Indicates type of web filter being set
This attribute is named `webFilterType` in VSD API.
"""
self._web_filter_type = value
@property
def description(self):
""" Get description value.
Notes:
Description of the ACL entry
"""
return self._description
@description.setter
def description(self, value):
""" Set description value.
Notes:
Description of the ACL entry
"""
self._description = value
@property
def destination_port(self):
""" Get destination_port value.
Notes:
The destination port to be matched if protocol is UDP or TCP. Value should be either * or single port number or a port range
This attribute is named `destinationPort` in VSD API.
"""
return self._destination_port
@destination_port.setter
def destination_port(self, value):
""" Set destination_port value.
Notes:
The destination port to be matched if protocol is UDP or TCP. Value should be either * or single port number or a port range
This attribute is named `destinationPort` in VSD API.
"""
self._destination_port = value
@property
def network_id(self):
""" Get network_id value.
Notes:
The destination network entity that is referenced(subnet/zone/macro)
This attribute is named `networkID` in VSD API.
"""
return self._network_id
@network_id.setter
def network_id(self, value):
""" Set network_id value.
Notes:
The destination network entity that is referenced(subnet/zone/macro)
This attribute is named `networkID` in VSD API.
"""
self._network_id = value
@property
def network_type(self):
""" Get network_type value.
Notes:
Type of the source network - VM_SUBNET or VM_ZONE or VM_DOMAIN or SUBNET or ZONE or ENTERPRISE_NETWORK or PUBLIC_NETWORK or ANY
This attribute is named `networkType` in VSD API.
"""
return self._network_type
@network_type.setter
def network_type(self, value):
""" Set network_type value.
Notes:
Type of the source network - VM_SUBNET or VM_ZONE or VM_DOMAIN or SUBNET or ZONE or ENTERPRISE_NETWORK or PUBLIC_NETWORK or ANY
This attribute is named `networkType` in VSD API.
"""
self._network_type = value
@property
def mirror_destination_id(self):
""" Get mirror_destination_id value.
Notes:
This is the ID of the mirrorDestination entity associated with this entity
This attribute is named `mirrorDestinationID` in VSD API.
"""
return self._mirror_destination_id
@mirror_destination_id.setter
def mirror_destination_id(self, value):
""" Set mirror_destination_id value.
Notes:
This is the ID of the mirrorDestination entity associated with this entity
This attribute is named `mirrorDestinationID` in VSD API.
"""
self._mirror_destination_id = value
@property
def flow_logging_enabled(self):
""" Get flow_logging_enabled value.
Notes:
Is flow logging enabled for this particular template
This attribute is named `flowLoggingEnabled` in VSD API.
"""
return self._flow_logging_enabled
@flow_logging_enabled.setter
def flow_logging_enabled(self, value):
""" Set flow_logging_enabled value.
Notes:
Is flow logging enabled for this particular template
This attribute is named `flowLoggingEnabled` in VSD API.
"""
self._flow_logging_enabled = value
@property
def enterprise_name(self):
""" Get enterprise_name value.
Notes:
The name of the enterprise for the domains parent
This attribute is named `enterpriseName` in VSD API.
"""
return self._enterprise_name
@enterprise_name.setter
def enterprise_name(self, value):
""" Set enterprise_name value.
Notes:
The name of the enterprise for the domains parent
This attribute is named `enterpriseName` in VSD API.
"""
self._enterprise_name = value
@property
def entity_scope(self):
""" Get entity_scope value.
Notes:
Specify if scope of entity is Data center or Enterprise level
This attribute is named `entityScope` in VSD API.
"""
return self._entity_scope
@entity_scope.setter
def entity_scope(self, value):
""" Set entity_scope value.
Notes:
Specify if scope of entity is Data center or Enterprise level
This attribute is named `entityScope` in VSD API.
"""
self._entity_scope = value
@property
def location_id(self):
""" Get location_id value.
Notes:
The ID of the location entity (Subnet/Zone/VportTag)
This attribute is named `locationID` in VSD API.
"""
return self._location_id
@location_id.setter
def location_id(self, value):
""" Set location_id value.
Notes:
The ID of the location entity (Subnet/Zone/VportTag)
This attribute is named `locationID` in VSD API.
"""
self._location_id = value
@property
def location_type(self):
""" Get location_type value.
Notes:
Type of the location entity - ANY or SUBNET or ZONE or VPORTTAG
This attribute is named `locationType` in VSD API.
"""
return self._location_type
@location_type.setter
def location_type(self, value):
""" Set location_type value.
Notes:
Type of the location entity - ANY or SUBNET or ZONE or VPORTTAG
This attribute is named `locationType` in VSD API.
"""
self._location_type = value
@property
def policy_state(self):
""" Get policy_state value.
Notes:
State of the policy.
This attribute is named `policyState` in VSD API.
"""
return self._policy_state
@policy_state.setter
def policy_state(self, value):
""" Set policy_state value.
Notes:
State of the policy.
This attribute is named `policyState` in VSD API.
"""
self._policy_state = value
@property
def domain_name(self):
""" Get domain_name value.
Notes:
The name of the domain/domain template for the aclTemplateNames parent
This attribute is named `domainName` in VSD API.
"""
return self._domain_name
@domain_name.setter
def domain_name(self, value):
""" Set domain_name value.
Notes:
The name of the domain/domain template for the aclTemplateNames parent
This attribute is named `domainName` in VSD API.
"""
self._domain_name = value
@property
def source_port(self):
""" Get source_port value.
Notes:
Source port to be matched if protocol is UDP or TCP. Value can be either * or single port number or a port range
This attribute is named `sourcePort` in VSD API.
"""
return self._source_port
@source_port.setter
def source_port(self, value):
""" Set source_port value.
Notes:
Source port to be matched if protocol is UDP or TCP. Value can be either * or single port number or a port range
This attribute is named `sourcePort` in VSD API.
"""
self._source_port = value
@property
def priority(self):
""" Get priority value.
Notes:
The priority of the ACL entry that determines the order of entries
"""
return self._priority
@priority.setter
def priority(self, value):
""" Set priority value.
Notes:
The priority of the ACL entry that determines the order of entries
"""
self._priority = value
@property
def protocol(self):
""" Get protocol value.
Notes:
Protocol number that must be matched
"""
return self._protocol
@protocol.setter
def protocol(self, value):
""" Set protocol value.
Notes:
Protocol number that must be matched
"""
self._protocol = value
@property
def associated_live_entity_id(self):
""" Get associated_live_entity_id value.
Notes:
ID of the associated live entity
This attribute is named `associatedLiveEntityID` in VSD API.
"""
return self._associated_live_entity_id
@associated_live_entity_id.setter
def associated_live_entity_id(self, value):
""" Set associated_live_entity_id value.
Notes:
ID of the associated live entity
This attribute is named `associatedLiveEntityID` in VSD API.
"""
self._associated_live_entity_id = value
@property
def associated_live_template_id(self):
""" Get associated_live_template_id value.
Notes:
In the draft mode, the ACL entity refers to this live entity parent. In non-drafted mode, this is null
This attribute is named `associatedLiveTemplateID` in VSD API.
"""
return self._associated_live_template_id
@associated_live_template_id.setter
def associated_live_template_id(self, value):
""" Set associated_live_template_id value.
Notes:
In the draft mode, the ACL entity refers to this live entity parent. In non-drafted mode, this is null
This attribute is named `associatedLiveTemplateID` in VSD API.
"""
self._associated_live_template_id = value
@property
def associated_traffic_type(self):
""" Get associated_traffic_type value.
Notes:
The associated Traffic type. L4 Service / L4 Service Group
This attribute is named `associatedTrafficType` in VSD API.
"""
return self._associated_traffic_type
@associated_traffic_type.setter
def associated_traffic_type(self, value):
""" Set associated_traffic_type value.
Notes:
The associated Traffic type. L4 Service / L4 Service Group
This attribute is named `associatedTrafficType` in VSD API.
"""
self._associated_traffic_type = value
@property
def associated_traffic_type_id(self):
""" Get associated_traffic_type_id value.
Notes:
The associated Traffic Type ID
This attribute is named `associatedTrafficTypeID` in VSD API.
"""
return self._associated_traffic_type_id
@associated_traffic_type_id.setter
def associated_traffic_type_id(self, value):
""" Set associated_traffic_type_id value.
Notes:
The associated Traffic Type ID
This attribute is named `associatedTrafficTypeID` in VSD API.
"""
self._associated_traffic_type_id = value
@property
def stateful(self):
""" Get stateful value.
Notes:
True means that this ACL entry is stateful, so there will be a corresponding rule that will be created by OVS in the network. False means that there is no corresponding rule created by OVS in the network.
"""
return self._stateful
@stateful.setter
def stateful(self, value):
""" Set stateful value.
Notes:
True means that this ACL entry is stateful, so there will be a corresponding rule that will be created by OVS in the network. False means that there is no corresponding rule created by OVS in the network.
"""
self._stateful = value
@property
def stats_id(self):
""" Get stats_id value.
Notes:
The statsID that is created in the VSD and identifies this ACL Template Entry. This is auto-generated by VSD
This attribute is named `statsID` in VSD API.
"""
return self._stats_id
@stats_id.setter
def stats_id(self, value):
""" Set stats_id value.
Notes:
The statsID that is created in the VSD and identifies this ACL Template Entry. This is auto-generated by VSD
This attribute is named `statsID` in VSD API.
"""
self._stats_id = value
@property
def stats_logging_enabled(self):
""" Get stats_logging_enabled value.
Notes:
Is stats logging enabled for this particular template
This attribute is named `statsLoggingEnabled` in VSD API.
"""
return self._stats_logging_enabled
@stats_logging_enabled.setter
def stats_logging_enabled(self, value):
""" Set stats_logging_enabled value.
Notes:
Is stats logging enabled for this particular template
This attribute is named `statsLoggingEnabled` in VSD API.
"""
self._stats_logging_enabled = value
@property
def ether_type(self):
""" Get ether_type value.
Notes:
Ether type of the packet to be matched. etherType can be * or a valid hexadecimal value
This attribute is named `etherType` in VSD API.
"""
return self._ether_type
@ether_type.setter
def ether_type(self, value):
""" Set ether_type value.
Notes:
Ether type of the packet to be matched. etherType can be * or a valid hexadecimal value
This attribute is named `etherType` in VSD API.
"""
self._ether_type = value
@property
def external_id(self):
""" Get external_id value.
Notes:
External object ID. Used for integration with third party systems
This attribute is named `externalID` in VSD API.
"""
return self._external_id
@external_id.setter
def external_id(self, value):
""" Set external_id value.
Notes:
External object ID. Used for integration with third party systems
This attribute is named `externalID` in VSD API.
"""
self._external_id = value
| {
"content_hash": "9da768e8a7a1c09d56e9c935a263d561",
"timestamp": "",
"source": "github",
"line_count": 1080,
"max_line_length": 342,
"avg_line_length": 30.63425925925926,
"alnum_prop": 0.56820311319329,
"repo_name": "nuagenetworks/vspk-python",
"id": "44cb2569070bc19c3add6efe43fc688150c3f112",
"size": "34698",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "vspk/v5_0/nudomainfipacltemplateentry.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "12909327"
}
],
"symlink_target": ""
} |
from oslo_config import cfg
compute_group = cfg.OptGroup('compute',
title='Compute Options')
# some config options here
driver_path = cfg.StrOpt(
'driver_path',
default='calplus.v1.compute.drivers',
help='Default path to compute drivers',
)
ALL_OPTS = ([driver_path])
def register_opts(conf):
conf.register_group(compute_group)
conf.register_opts(ALL_OPTS, group=compute_group)
def list_opts():
return {compute_group: ALL_OPTS}
| {
"content_hash": "e6a28ec044109e2dff8cb914819128b4",
"timestamp": "",
"source": "github",
"line_count": 24,
"max_line_length": 53,
"avg_line_length": 20.416666666666668,
"alnum_prop": 0.6612244897959184,
"repo_name": "cloudcomputinghust/CAL",
"id": "0efc397471690fbbd2d18b6f869aedf3b299470c",
"size": "490",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "calplus/conf/compute.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "243156"
}
],
"symlink_target": ""
} |
import logging
import warnings
from collections import OrderedDict
import paddle
import paddle.distributed as dist
from paddle.fluid import core
from paddle.optimizer import Optimizer
from paddle.fluid.clip import ClipGradByGlobalNorm
from paddle.distributed import fleet, ParallelMode
HybridParallelClipGrad = (
fleet.meta_optimizers.dygraph_optimizer.hybrid_parallel_optimizer.HybridParallelClipGrad
)
from paddle.distributed.collective import (
_get_global_group,
new_group,
)
from .group_sharded_storage import ParamStorage, GradStorage
from .group_sharded_utils import Type, device_guard, GroupShardedClipGrad
# CUDA alignment 256 bytes, cpu alignment 4096 bytes
alignment = {"gpu": 256, "cpu": 4096}
align = {
Type.fp16.value: 2,
Type.bf16.value: 2,
Type.fp32.value: 4,
}
class GroupShardedOptimizerStage2(Optimizer):
"""
A wrapper for Sharding Stage2 Optimizer in Dygraph.
.. warning: ShardingOptimizer encapsulates the optimization strategy and integrates it into the optimizer.
.. ZeRO: 1.https://arxiv.org/pdf/1910.02054.pdf 2.https://arxiv.org/pdf/1910.02054.pdf.
"""
# TODO (Baibaifan)
# Feature Notes:
# 1. Unified memory for parameters and parameters.grad to InternalStorage.
# 2. Support the segmentation of optimizer parameters and partial updating of parameters.
# 3. Dynamically adjust training parameters and models.
# 4. Support offload function.
# 5. Support the establishment of independent communication groups.
# 6. Broadcast_fp16 is not supported now.
def __init__(
self,
params,
optim,
group=None,
offload=False,
device="gpu",
pertrain_sync_models=True,
dp_group=None,
**kw
):
super().__init__(learning_rate=optim._learning_rate, parameters=params)
assert core.is_compiled_with_cuda(), "Only GPU is supported now"
# Segmentation information
self._dtype_rank_params = (
OrderedDict()
) # {dtype:[param1,param2]} device, rank, params
self._param2rank = {}
self.__segment_params = []
self._rank_buffer_size = {} # {dtype: {rank: numel+alignment}}
self._param2align = {} # {param.name: align}
# Default information
self._optim = optim
# sharing stage 2 comm overlap flag
self._reduce_overlap = False
# record the last task used for comm overlap for sharding stage 2
self._comm_task = None
assert hasattr(
self._optim, "_master_weights"
), "Must use optimizer with _master_weights attribute"
# Support parameter group and parameter list
self._local_params = []
if isinstance(params[0], dict):
for param_group in params:
self._local_params.extend(list(param_group["params"]))
else:
self._local_params.extend(list(params))
self._default_device = device
self._pfp16 = (
len(
list(
filter(
lambda x: x.trainable and x.dtype == Type.fp16.value,
self._local_params,
)
)
)
> 0
)
self._broadcast_overlap = False
self._forward_pre_hook_remove_helper = []
try:
# The fp32 params such as layer_norm_0.w_0 will be at the end of param_list.
# Have to sort the params to make sure all params are in the forward using order.
self._broadcast_order_params = sorted(
self.local_params,
key=lambda x: int(x.name.split('.')[0].split('_')[-1]),
)
except ValueError:
self._broadcast_order_params = None
self._group = (
new_group(_get_global_group().ranks) if group is None else group
)
# only support to combine stage2 and dp hybrid parallel now.
self._dp_group = dp_group
self.world_size = self._group.nranks
self._rank = self._group.rank
self._global_root_rank = self._group.ranks[0]
# Synchronous all ranks models
if pertrain_sync_models:
self._sync_params_and_buffers()
self.param_storages = {} # {dtype: {rank: InternalStorage}}
if isinstance(self._optim._grad_clip, ClipGradByGlobalNorm):
logging.warning(
"While using ClipGradByGlobalNorm in GroupShardedOptimizerStage2, the grad clip of original optimizer will be changed."
)
hcg = fleet.fleet._hcg if hasattr(fleet.fleet, "_hcg") else None
if (
hcg
and hcg.get_parallel_mode() is not ParallelMode.DATA_PARALLEL
):
self._optim._grad_clip = HybridParallelClipGrad(
self._optim._grad_clip, hcg
)
else:
self._optim._grad_clip = GroupShardedClipGrad(
self._optim._grad_clip, paddle.get_device(), self._group
)
if self._optim._parameter_list and isinstance(
self._optim._parameter_list[0], dict
):
for item in self._optim._param_groups:
if "grad_clip" in item.keys():
item["grad_clip"] = self._optim._grad_clip
if offload:
assert (
self._pfp16
), "Only support offload strategy while using \'Adam\', \'AdamW\' and \'Momentum\' optimizer with AMP/Pure FP16"
self.offload = offload # Using for offload
self.offload_device = "cpu"
self.offload_buffer_size = 0
self.offload_param2align = {}
self.offload_params = None
self.offload_grads = None
self.dev_id = int(paddle.get_device().split(":")[1])
self._master_params = {}
# Update optimizer parameters and adjust parameter storage and use according to rank.
self._update_opt_status()
@paddle.autograd.no_grad()
def _sync_params_and_buffers(self):
"""
Sync all model states for all ranks
"""
for p in self._local_params:
dist.broadcast(
p, src=self._global_root_rank, group=self._group, sync_op=True
)
if self._dp_group:
dist.broadcast(
p,
src=self._dp_group.ranks[0],
group=self._dp_group,
sync_op=True,
)
def _update_task(self, task):
if self._reduce_overlap:
assert task is not None
# Only track of the last reduce task.
# Since all tasks are on the same stream, only need to wait the last one.
# After waiting for the last reduce task, all reduce tasks before have already finished.
self._comm_task = task
def _set_reduce_overlap(self, reduce_overlap):
# Enable gradients' reduces overlap with backward calculation.
self._reduce_overlap = reduce_overlap
def _set_broadcast_overlap(
self, broadcast_overlap, layers=None, num_groups=None
):
# Enable post optimizer broadcasts overlap with the forward calculation of next batch.
self._broadcast_overlap = broadcast_overlap
if self._broadcast_overlap:
assert (
layers is not None
), "To enable broadcast overlap forward, please pass the module to the function."
self._layers = layers
warnings.warn(
"Setting overlap broadcast means the `paddle.device.cuda.synchronize()` "
"must be called manually before calling `paddle.save()` and before and inference."
)
if self._broadcast_order_params is None:
# Params' names should be like column_linear_32.w_0 patter to get the best performance.
warnings.warn(
r"The param name passed to the optimizer doesn't follow .+_[0-9]+\..+ patter, "
"overlap broadcast may harm the performance."
)
self._broadcast_order_params = self._local_params
if num_groups is None or num_groups > len(self._broadcast_order_params):
warnings.warn(
"The num_groups for broadcast is larger than the number of params to be broadcast. "
"It will set to default value: 1 (use the default sharding group)."
)
num_groups = 1
assert (
isinstance(num_groups, int) and num_groups > 0
), "num_groups should be a positive integer"
self._number_of_broadcast_groups = num_groups
self._broadcast_groups = [
None for _ in range(self._number_of_broadcast_groups)
]
self._broadcast_groups[0] = self._group
ranks = self._group.ranks
for i in range(1, self._number_of_broadcast_groups):
self._broadcast_groups[i] = new_group(ranks)
def _generate_master_params(self, trainable_params):
if self.offload:
for param in trainable_params:
if param.name not in self._master_params.keys():
self._master_params[param.name] = core.eager.Tensor(
name=param.name,
value=param.cast(dtype=Type.fp32.value).numpy(),
place=core.CPUPlace(),
stop_gradient=param.stop_gradient,
)
else:
for param in trainable_params:
if param.dtype == Type.fp16.value:
master_tensor = paddle.cast(param, Type.fp32.value)
master_tensor.name = param.name
self._optim._master_weights[param.name] = master_tensor
def _update_opt_status(self):
"""Update optimizer status and parameter storage information, and special functions to be developed."""
# func 1
self._integration_params()
# Segement helpers
def _segment_params(self):
"""
Divide all optimizer parameters equally into rank.
"""
if len(self.__segment_params) == 0:
self.__segment_params, param_lists = [
[] for _ in range(self.world_size)
], [[] for _ in range(self.world_size)]
sizes = [0] * self.world_size
for param in self._local_params:
# Add this param to rank with smallest size.
rank = sizes.index(min(sizes))
param_lists[rank].append(param)
# Statistical real numels
sizes[rank] += param._numel() if param.trainable else 0
for rank, params in enumerate(param_lists):
self.__segment_params[rank].extend(params)
return self.__segment_params
@property
def local_params(self):
return self._local_params
@property
def param2rank(self):
"""Map the params to the rank which owns them"""
if len(self._param2rank) == 0:
for rank, params in enumerate(self._segment_params()):
for param in params:
self._param2rank[param.name] = rank
return self._param2rank
@property
def dtype_rank_params(self):
"""
Divide the parameters into groups according to rank and dtype.
"""
if len(self._dtype_rank_params) == 0:
# Assign the parameters of each rank according to the type
trainable_params = list(
filter(lambda x: x.trainable, self._local_params)
)
for param in trainable_params:
if param.dtype not in self._dtype_rank_params.keys():
self._dtype_rank_params[param.dtype] = [
[] for _ in range(self.world_size)
]
self._dtype_rank_params[param.dtype][
self.param2rank[param.name]
].append(param)
# Sort per rank params by size
for dtype in self._dtype_rank_params.keys():
for rank_params in self._dtype_rank_params[dtype]:
rank_params.sort(key=lambda x: x._numel())
return self._dtype_rank_params
@property
def rank_buffer_size(self):
"""
Count the memory size of the parameters corresponding to rank under the corresponding dtype.
"""
# CUDA alignment 256 bytes
if len(self._rank_buffer_size) == 0:
for dtype in self.dtype_rank_params.keys():
if dtype not in self._rank_buffer_size.keys():
self._rank_buffer_size[dtype] = {}
for dst_rank, per_rank_params in enumerate(
self.dtype_rank_params[dtype]
):
if dst_rank not in self._rank_buffer_size[dtype].keys():
self._rank_buffer_size[dtype][dst_rank] = 0
for param in per_rank_params:
if not param.trainable:
continue
size = param._numel() * align[dtype]
remaining = size % alignment[self._default_device]
ali = (
0
if remaining == 0
else alignment[self._default_device] - remaining
)
align_ = ali // align[dtype]
self._rank_buffer_size[dtype][dst_rank] += (
param._numel() + align_
)
self._param2align[param.name] = align_
return self._rank_buffer_size
def _integration_params(self):
"""
Integrate the parameters into a continuous memory according to rank, and support the update of training parameters.
"""
for dtype, per_rank_params in self.dtype_rank_params.items():
if dtype not in self.param_storages.keys():
self.param_storages[dtype] = {}
for dst_rank, params in enumerate(per_rank_params):
if len(params) > 0:
# Merge all the trainable params in a single InternalStorage
trainable_params = list(
filter(lambda x: x.trainable, params)
)
if self._pfp16 and dst_rank == self._rank:
self._generate_master_params(trainable_params)
if trainable_params:
param_storage = ParamStorage(
size=self.rank_buffer_size[dtype][dst_rank],
dtype=dtype,
device=self._default_device,
)
param_storage.add_rank_params(
trainable_params, self._param2align
)
self.param_storages[dtype][dst_rank] = param_storage
# Clear the InternalStorage keys which are not in use anymore
dtype_in_use = list(self.dtype_rank_params.keys())
dtype_to_pop = list(
filter(lambda x: x not in dtype_in_use, self.param_storages.keys())
)
for d in dtype_to_pop:
self.param_storages.pop(d)
if self.offload:
self._optim._master_weights = self._master_params
cpu_master_params = [p for p in self._master_params.values()]
for param in cpu_master_params:
size = param._numel() * align[Type.fp32.value]
remaining = size % alignment[self.offload_device]
ali = (
0
if remaining == 0
else alignment[self.offload_device] - remaining
)
align_ = ali // align[Type.fp32.value]
self.offload_buffer_size += param._numel() + align_
self.offload_param2align[param.name] = align_
if cpu_master_params:
with device_guard(self._rank, self.offload_device):
self.offload_params = ParamStorage(
size=self.offload_buffer_size,
dtype=Type.fp32.value,
device=self.offload_device,
)
self.offload_params.buffer.name = "offload_buffer"
self.offload_params.add_rank_params(
cpu_master_params, self.offload_param2align, False
)
self.offload_params.buffer.stop_gradient = False
self.offload_grads = GradStorage(
size=self.offload_buffer_size,
dtype=Type.fp32.value,
device=self.offload_device,
destination=self._rank,
parm2align=self.offload_param2align,
convert_cpu=True,
)
for p in cpu_master_params:
self.offload_grads.add_grad(
p, self.offload_param2align[p.name]
)
self._optim._master_weights[
self.offload_params.buffer.name
] = self.offload_params.buffer
def _offload_acc_grad(self, param_name, grad_fp32_cpu):
"""accumulate grads with offload strategy"""
with device_guard(self._rank, self.offload_device):
if param_name in self._master_params.keys():
if self._master_params[param_name].grad is None:
self._master_params[param_name]._copy_gradient_from(
grad_fp32_cpu
)
else:
self._master_params[param_name].grad.add_(grad_fp32_cpu)
self.offload_params.buffer._copy_gradient_from(
self.offload_grads.buffer
)
def _offload_scale_grad(self, scale_size):
"""scale grads with offload strategy"""
with device_guard(self._rank, self.offload_device):
self.offload_grads.buffer.scale_(scale=scale_size)
def _offload_clear_grad(self):
"""clear grads with offload strategy"""
with device_guard(self._rank, self.offload_device):
self.offload_grads.buffer.zero_()
def _step(self):
if self._broadcast_overlap:
# Clear the pre forward hook in the optimizer step.
for hook_remove in self._forward_pre_hook_remove_helper:
hook_remove.remove()
self._forward_pre_hook_remove_helper = []
if self.offload:
params_list = [self.offload_params.buffer]
# TODO(Baibaifan): Offload will support param_groups later
if not isinstance(self._optim._param_groups[0], dict):
self._optim._parameter_list = params_list
self._optim._param_groups = params_list
# Run the optimizer of the current rank step
if self.offload:
with device_guard(device=self.offload_device):
self._optim.step()
for param in self._local_params:
if param.name in self._master_params.keys():
param.set_value(
self._master_params[param.name]
.cuda(self.dev_id)
.cast(dtype=param.dtype)
)
else:
self._optim.step()
# Synchronize all the updated shards in between the ranks
self._broadcast_params()
def step(self):
"""
A wrapper for Optimizer's step function to finish the update operation of the optimizer.
"""
# This method won't be called directly by opt.step()!
# The _redefine_opt_step() in class GroupShardedStage2 will wrap this function.
self._step()
def minimize(self):
raise RuntimeError(
"optimizer.minimize() not support now, please use optimizer.step()"
)
def set_state_dict(self, state_dict):
self._optim.set_state_dict(state_dict)
def state_dict(self):
return self._optim.state_dict()
def _clear_cache(self):
self.__segment_params.clear()
self._dtype_rank_params.clear()
self._param2rank.clear()
@paddle.autograd.no_grad()
def _broadcast_params(self):
"""Broadcast the parameters of the current rank to each rank"""
# Exchange all the shards with the other ranks
if self._broadcast_overlap:
self._broadcast_params_overlap_forward()
else:
for dtype_per_rank in self.param_storages.values():
for dst_rank, internal_storage in dtype_per_rank.items():
dist.broadcast(
tensor=internal_storage.buffer,
src=self._group.ranks[dst_rank],
group=self._group,
sync_op=True,
)
def _forward_pre_hook_function(self, tasks):
# Since the layers will call pre hook by `forward_pre_hook(self, inputs)`,
# the helper functions needs the x and y to take those params.
def __impl__(x, y):
for task in tasks:
# Wait for broadcast task before using the result of the broadcast.
task.wait()
return __impl__
@paddle.autograd.no_grad()
def _broadcast_params_overlap_forward(self):
# Exchange all the shards with the other ranks,
# but overlap the broadcast with next batch's calculation.
group_idx = 0
param2task = {}
for x in self._broadcast_order_params:
if x.trainable:
group = self._broadcast_groups[group_idx]
group_idx = (group_idx + 1) % self._number_of_broadcast_groups
task = dist.broadcast(
tensor=x,
src=group.ranks[self._param2rank[x.name]],
group=group,
sync_op=False,
)
assert x.name not in param2task
param2task[x.name] = task
for layer in self._layers.sublayers():
if len(layer.sublayers()) == 0:
# Register forward pre hood for leaf layers. This will get the best performance.
tasks = []
for param in layer.parameters():
if param.trainable:
if param.name in param2task:
tasks.append(param2task[param.name])
self._forward_pre_hook_remove_helper.append(
layer.register_forward_pre_hook(
self._forward_pre_hook_function(tasks)
)
)
| {
"content_hash": "cf77079e00cbb618161d91c65e4d4d81",
"timestamp": "",
"source": "github",
"line_count": 593,
"max_line_length": 135,
"avg_line_length": 38.97976391231029,
"alnum_prop": 0.5435431537962362,
"repo_name": "luotao1/Paddle",
"id": "38b03225616ed4c9280f909b073da470c3df8275",
"size": "24177",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "python/paddle/distributed/fleet/meta_parallel/sharding/group_sharded_optimizer_stage2.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "58544"
},
{
"name": "C",
"bytes": "210300"
},
{
"name": "C++",
"bytes": "36771446"
},
{
"name": "CMake",
"bytes": "903079"
},
{
"name": "Cuda",
"bytes": "5200715"
},
{
"name": "Dockerfile",
"bytes": "4361"
},
{
"name": "Go",
"bytes": "49796"
},
{
"name": "Java",
"bytes": "16630"
},
{
"name": "Jinja",
"bytes": "23852"
},
{
"name": "MLIR",
"bytes": "39982"
},
{
"name": "Python",
"bytes": "36248258"
},
{
"name": "R",
"bytes": "1332"
},
{
"name": "Shell",
"bytes": "553175"
}
],
"symlink_target": ""
} |
'''
LEGO:
Classes and methods for the creation and manipulation of regular expression
objects and components.
* A regular expression is a "pattern" object.
* Each pattern alternates (with a pipe, "|") between zero or more "conc"
(concatenation) objects.
* Each conc is a concatenation of zero or more "mult" (multiplication)
objects.
* Each mult consists of a multiplicand and a multiplier. A multiplier consists
of a minimum and a maximum, e.g. min = 0, max = 1 indicates the "?"
multiplier. The multiplicand is either a nested pattern object, or a
charclass object.
* A charclass is a set of chars, such as "a", "[a-z]", "\\d", ".", with a
possible "negated" flag as in "[^a]".
* Since these can be combined together freely they are, in the absence of a
better metaphor, collectively referred to as lego pieces.
We also include methods for parsing a string into a pattern object,
serialising a pattern object out as a string (or "regular expression", if you
will), and for concatenating or alternating between arbitrary "pieces of
lego", using overloaded operators.
If the FSM module is available, call lego.fsm() on any lego piece to return
a finite state machine capable of accepting strings described by that piece.
Most important are the reduce() methods present in charclass, mult, conc and
pattern. While there is no such thing as a canonical form for a given regex
pattern, these procedures can drastically simplify a regex structure for
readability. They're also pretty easy to extend.
'''
# http://qntm.org/lego
# http://qntm.org/greenery
class nomatch(Exception):
'''Thrown when parsing fails. Almost always caught and almost never fatal'''
pass
def reduce_after(method):
'''reduce() the result of this method call (unless you already reduced it).'''
def new_method(self, *args, **kwargs):
result = method(self, *args, **kwargs)
if method.__name__ == "reduce" and result == self:
return result
return result.reduce()
return new_method
@reduce_after
def parse(string):
'''
Parse a full string and return a lego piece. Fail if the whole string
wasn't parsed
'''
p, i = pattern.match(string, 0)
if i != len(string):
raise Exception("Could not parse '" + string + "' beyond index " + str(i))
return p
def static(string, i, static):
j = i+len(static)
if string[i:j] == static:
return j
raise nomatch
class lego:
'''
Parent class for all lego pieces.
All lego pieces have some things in common. This parent class mainly
hosts documentation though.
'''
def __setattr__(self, name, value):
'''
Lego pieces are immutable. It caused some pretty serious problems when
I didn't have this.
'''
raise Exception("This object is immutable.")
def fsm(self, alphabet):
'''
Return the present lego piece in the form of a finite state machine,
as imported from the fsm module.
If no alphabet is explicitly supplied, which seems quite probable,
we use the lego.alphabet() method (later) to list all the characters
mentioned in self. However, if we intend to connect this FSM to another
one which uses different characters, we may need to supply an alphabet
which is a superset of both sets.
'''
raise Exception("Not implemented")
def __repr__(self):
'''
Return a string approximating the instantiation line
for the present lego piece.
'''
raise Exception("Not implemented")
def __str__(self):
'''
Render the present lego piece in the form of a regular expression.
Some lego pieces may be created which cannot be rendered in this way.
In particular: a pattern containing no concs; a multiplier of zero.
'''
raise Exception("Not implemented")
def match(cls, string, i):
'''
Start at index i in the supplied string and try to match one of the
present class. Elementary recursive descent parsing with very little
need for flair. The opposite of __str__(), above. (In most cases.)
Throws a nomatch in the event of failure.
'''
raise Exception("Not implemented")
@reduce_after
def reduce(self):
'''
The most important and algorithmically complex method. Takes the current
lego piece and simplifies it in every way possible, returning a simpler
lego piece which is quite probably not of the same class as the original.
Approaches vary by the class of the present lego piece.
It is critically important to (1) always call reduce() on whatever you're
returning before you return it and therefore (2) always return something
STRICTLY SIMPLER than the current object. Otherwise, infinite loops become
possible in reduce() calls.
'''
raise Exception("Not implemented")
@reduce_after
def __add__(self, other):
'''
Concatenate any two lego pieces, regardless of differing classes. Because
reduce() (above) is always called afterwards, the result is as simplified
as possible.
Call using "a = b + c"
'''
raise Exception("Not implemented")
@reduce_after
def __mul__(self, multiplier):
'''
Equivalent to repeated concatenation. Multiplier consists of a minimum
and a maximum; maximum may be infinite (for Kleene star closure).
Call using "a = b * qm"
Reduce() is always called afterwards.
'''
raise Exception("Not implemented")
@reduce_after
def __or__(self, other):
'''
Alternate between any two lego pieces, regardless of differing classes.
Again, reduce() is called afterwards, usually with excellent results.
Call using "a = b | c".
This method MUST NOT call the fsm() method, because this method is used
in turn when converting an FSM back to a regex.
'''
raise Exception("Not implemented")
@reduce_after
def __and__(self, other):
'''
Intersection function. Return a lego piece that can match any string
that both self and other can match. Fairly elementary results relating
to regular languages and finite state machines show that this is
possible, but implementation is a BEAST in many cases. Here, we convert
both lego pieces to FSMs (see fsm(), above) for the intersection, then
back to lego afterwards.
Call using "a = b & c"
'''
raise Exception("Not implemented")
def alphabet(self):
'''
Return a set of all unique characters used in this lego piece.
In theory this could be a static property, self.alphabet, not
a function, self.alphabet(), but in the vast majority of cases
this will never be queried so it's a waste of computation to
calculate it every time a lego piece is instantiated.
By convention, otherchars is always included in this result.
'''
raise Exception("Not implemented")
@reduce_after
def everythingbut(self):
'''
Return a lego object which will match any string not matched by self,
and which will not match any string matched by self.
Another task which is very difficult in general (and typically returns
utter garbage when actually printed), but becomes trivial to code
thanks to FSM routines.
'''
return self.fsm().everythingbut().lego()
def __reversed__(self):
'''
Return a lego object which will match any string which, when reversed,
self would match. E.g. if self matches "beer" then reversed(self) will
match "reeb".
'''
raise Exception("Not implemented")
def empty(self):
'''
Return False if there exists a string which the present lego piece
can match. Return True if no such string exists. Examples of empty
lego pieces are charclass() and pattern()
'''
raise Exception("Not implemented")
def strings(self, otherchar=None):
'''
Each time next() is called on this iterator, a new string is returned
which will the present lego piece can match. StopIteration is raised once
all such strings have been returned, although a regex with a * in may
match infinitely many strings.
'''
# In the case of a regex like "[^abc]", there are infinitely many (well, a
# very large finite number of) single characters which will match. It's not
# productive to iterate over all of these giving every single example.
# You must supply your own "otherchar" to stand in for all of these
# possibilities.
for string in self.fsm().strings():
# Have to represent "otherchars" somehow.
if otherchars in string:
if otherchar == None:
raise Exception("Please choose an 'otherchar'")
string = [
otherchar if char == otherchars else char
for char in string
]
yield "".join(string)
class charclass(lego):
'''
A charclass is basically a frozenset of symbols. The reason for the
charclass object instead of using frozenset directly is to allow us to
set a "negated" flag. A charclass with the negation flag set is assumed
to contain every symbol that is in the alphabet of all symbols but not
explicitly listed inside the frozenset. e.g. [^a]. This is very handy
if the full alphabet is extremely large, but also requires dedicated
combination functions.
'''
def __init__(self, chars=set(), negateMe=False):
# chars should consist only of chars
assert otherchars not in set(chars)
self.__dict__["chars"] = frozenset(chars)
self.__dict__["negated"] = negateMe
def __eq__(self, other):
try:
return self.chars == other.chars and self.negated == other.negated
except AttributeError:
return False
def __ne__(self, other):
return not self.__eq__(other)
def __hash__(self):
return hash((self.chars, self.negated))
@reduce_after
def __mul__(self, ier):
# e.g. "a" * {0,1} = "a?"
if ier == one:
return self
return mult(self, ier)
# These are the characters carrying special meanings when they appear "outdoors"
# within a regular expression. To be interpreted literally, they must be
# escaped with a backslash.
allSpecial = set("\\[]|().?*+{}$^/")
# These are the characters carrying special meanings when they appear INSIDE a
# character class (delimited by square brackets) within a regular expression.
# To be interpreted literally, they must be escaped with a backslash.
# Notice how much smaller this class is than the one above; note also that the
# hyphen and caret do NOT appear above.
classSpecial = set("\\[]^-")
# Shorthand codes for use inside charclasses e.g. [abc\d]
w = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ_abcdefghijklmnopqrstuvwxyz"
d = "0123456789"
s = "\t\n\v\f\r "
shorthand = {
w : "\\w",
d : "\\d",
s : "\\s",
}
def __str__(self):
# e.g. \w
if self in shorthand.keys():
return shorthand[self]
# e.g. [^a]
if self.negated:
return "[^" + self.escape() + "]"
# single character, not contained inside square brackets.
if len(self.chars) == 1:
# Python lacks the Axiom of Choice
char = "".join(self.chars)
# e.g. if char is "\t", return "\\t"
if char in escapes.keys():
return escapes[char]
if char in charclass.allSpecial:
return "\\" + char
# If char is an ASCII control character, don't print it directly,
# return a hex escape sequence e.g. "\\x00". Note that this includes
# tab and other characters already handled above
if 0 <= ord(char) <= 0x1F or ord(char) == 0x7f:
return "\\x" + "{0:02x}".format(ord(char))
return char
# multiple characters (or possibly 0 characters)
return "[" + self.escape() + "]"
def escape(self):
def escapeChar(char):
if char in charclass.classSpecial:
return "\\" + char
if char in escapes.keys():
return escapes[char]
# If char is an ASCII control character, don't print it directly,
# return a hex escape sequence e.g. "\\x00". Note that this includes
# tab and other characters already handled above
if 0 <= ord(char) <= 0x1F or ord(char) == 0x7f:
return "\\x" + "{0:02x}".format(ord(char))
return char
def recordRange():
# there's no point in putting a range when the whole thing is
# 3 characters or fewer.
if len(currentRange) < 4:
return "".join(escapeChar(char) for char in currentRange)
else:
return escapeChar(currentRange[0]) + "-" + escapeChar(currentRange[-1])
output = ""
# use shorthand for known character ranges
# note the nested processing order. DO NOT process \d before processing
# \w. if more character class constants arise which do not nest nicely,
# a problem will arise because there is no clear ordering to use...
# look for ranges
currentRange = ""
for char in sorted(self.chars, key=ord):
# range is not empty: new char must fit after previous one
if len(currentRange) > 0:
i = ord(char)
# char doesn't fit old range: restart
if i != ord(currentRange[-1]) + 1:
output += recordRange()
currentRange = ""
currentRange += char
output += recordRange()
return output
def fsm(self, alphabet=None):
from greenery.fsm import fsm
if alphabet is None:
alphabet = self.alphabet()
# 0 is initial, 1 is final, 2 is oblivion
# If negated, make a singular FSM accepting any other characters
if self.negated:
map = {
0: dict([(symbol, 2 if symbol in self.chars else 1) for symbol in alphabet]),
1: dict([(symbol, 2) for symbol in alphabet]),
2: dict([(symbol, 2) for symbol in alphabet]),
}
# If normal, make a singular FSM accepting only these characters
else:
map = {
0: dict([(symbol, 1 if symbol in self.chars else 2) for symbol in alphabet]),
1: dict([(symbol, 2) for symbol in alphabet]),
2: dict([(symbol, 2) for symbol in alphabet]),
}
return fsm(
alphabet = alphabet,
states = set([0, 1, 2]),
initial = 0,
finals = set([1]),
map = map,
)
def __repr__(self):
string = ""
if self.negated is True:
string += "~"
string += "charclass("
if len(self.chars) > 0:
string += repr("".join(str(char) for char in sorted(self.chars, key=str)))
string += ")"
return string
@reduce_after
def reduce(self):
# Charclasses cannot be reduced().
return self
@reduce_after
def __add__(self, other):
return mult(self, one) + other
def alphabet(self):
return set([otherchars]) | self.chars
def empty(self):
return len(self.chars) == 0 and self.negated == False
@classmethod
def match(cls, string, i):
if i >= len(string):
raise nomatch
# Turn e.g. "\\x40" into "@". Exactly two hex digits
def unescapeHex(string, i):
hex_digits = "0123456789AaBbCcDdEeFf"
j = static(string, i, "\\x")
hex1 = string[j] # e.g. "4"
if not hex1 in hex_digits:
raise nomatch
j += len(hex1)
hex2 = string[j] # e.g. "0"
if not hex2 in hex_digits:
raise nomatch
j += len(hex2)
codepoint = int(hex1 + hex2, 16) # e.g. 64
char = chr(codepoint) # "@"
return char, j
def matchInternalChar(string, i):
# e.g. if we see "\\t", return "\t"
for key in escapes.keys():
try:
return key, static(string, i, escapes[key])
except nomatch:
pass
# special chars e.g. "\\-" returns "-"
for char in charclass.classSpecial:
try:
return char, static(string, i, "\\" + char)
except nomatch:
pass
# hex escape e.g. "\\x40" returns "@"
try:
return unescapeHex(string, i)
except nomatch:
pass
# single non-special character, not contained
# inside square brackets
char, j = string[i], i+1
if char in charclass.classSpecial:
raise nomatch
return char, j
def matchClassInterior1(string, i):
# Attempt 1: shorthand e.g. "\w"
for key in charclass.shorthand:
try:
return key, static(string, i, charclass.shorthand[key])
except nomatch:
pass
# Attempt 2: a range e.g. "d-h"
try:
first, j = matchInternalChar(string, i) # `first` is "d"
k = static(string, j, "-")
last, k = matchInternalChar(string, k) # `last` is "h"
firstIndex = ord(first) # 100
lastIndex = ord(last) # 104
# Be strict here, "d-d" is not allowed
if firstIndex >= lastIndex:
raise nomatch("Range '" + first + "' to '" + last + "' not allowed")
chars = "".join([
chr(i) for i in range(firstIndex, lastIndex + 1)
])
return chars, k
except nomatch:
pass
# Attempt 3: just a character on its own
return matchInternalChar(string, i)
def matchClassInterior(string, i):
internals = ""
try:
while True:
internal, i = matchClassInterior1(string, i)
internals += internal
except nomatch:
pass
return internals, i
# wildcard ".", "\\w", "\\d", etc.
for key in shorthand.keys():
try:
return key, static(string, i, shorthand[key])
except nomatch:
pass
# "[^dsgsdg]"
try:
j = static(string, i, "[^")
chars, j = matchClassInterior(string, j)
j = static(string, j, "]")
return ~charclass(chars), j
except nomatch:
pass
# "[sdfsf]"
try:
j = static(string, i, "[")
chars, j = matchClassInterior(string, j)
j = static(string, j, "]")
return charclass(chars), j
except nomatch:
pass
# e.g. if seeing "\\t", return "\t"
for key in escapes.keys():
try:
return charclass(key), static(string, i, escapes[key])
except nomatch:
pass
# e.g. if seeing "\\{", return "{"
for char in charclass.allSpecial:
try:
return charclass(char), static(string, i, "\\" + char)
except nomatch:
pass
# e.g. if seeing "\\x40", return "@"
try:
char, j = unescapeHex(string, i)
return charclass(char), j
except nomatch:
pass
# single non-special character, not contained inside square brackets
char, i = string[i], i+1
if char in charclass.allSpecial:
raise nomatch
return charclass(char), i
# set operations
def __invert__(self):
'''
Negate the current charclass. e.g. [ab] becomes [^ab]. Call
using "charclass2 = ~charclass1"
'''
return charclass(self.chars, negateMe=not self.negated)
@reduce_after
def __or__(self, other):
try:
# ¬A OR ¬B = ¬(A AND B)
# ¬A OR B = ¬(A - B)
# A OR ¬B = ¬(B - A)
# A OR B
if self.negated:
if other.negated:
return ~charclass(self.chars & other.chars)
return ~charclass(self.chars - other.chars)
if other.negated:
return ~charclass(other.chars - self.chars)
return charclass(self.chars | other.chars)
# "other" lacks attribute "negated" or "chars"
# "other" is not a charclass
# Never mind!
except AttributeError:
return mult(self, one) | other
@reduce_after
def __and__(self, other):
try:
# ¬A AND ¬B = ¬(A OR B)
# ¬A AND B = B - A
# A AND ¬B = A - B
# A AND B
if self.negated:
if other.negated:
return ~charclass(self.chars | other.chars)
return charclass(other.chars - self.chars)
if other.negated:
return charclass(self.chars - other.chars)
return charclass(self.chars & other.chars)
# "other" lacks attribute "negated" or "chars"
# "other" is not a charclass
# Never mind!
except AttributeError:
return mult(self, one) & other
def __reversed__(self):
return self
class bound:
'''An integer but sometimes also possibly infinite (None)'''
def __init__(self, v):
assert v is None or v >= 0
self.__dict__['v'] = v
def __repr__(self):
if self == inf:
return "inf"
return repr(self.v)
def __str__(self):
if self == inf:
# This only happens for an unlimited upper bound
return ""
return str(self.v)
def __eq__(self, other):
try:
return self.v == other.v
except AttributeError:
return False
def __ne__(self, other):
return not self.__eq__(other)
def __hash__(self):
return hash(self.v)
def __lt__(self, other):
if self == inf:
return False
if other == inf:
return True
return self.v < other.v
def __ge__(self, other):
return not self < other
def __gt__(self, other):
if other == inf:
return False
if self == inf:
return True
return self.v > other.v
def __mul__(self, other):
'''Multiply this bound by another'''
if self == inf or other == inf:
return inf
return bound(self.v * other.v)
def __add__(self, other):
'''Add this bound to another'''
if self == inf or other == inf:
return inf
return bound(self.v + other.v)
def __sub__(self, other):
'''
Subtract another bound from this one.
Caution: this operation is not meaningful for all bounds.
'''
if other == inf:
assert self == inf
# Infinity minus infinity is zero. This has to be true so that
# we can for example subtract multiplier(bound(0), inf) from
# multiplier(bound(1), inf) to get multiplier(bound(1), bound(1))
return bound(0)
if self == inf:
return self
return bound(self.v - other.v)
class multiplier:
'''
A min and a max. The vast majority of characters in regular
expressions occur without a specific multiplier, which is implicitly
equivalent to a min of 1 and a max of 1, but many more have explicit
multipliers like "*" (min = 0, max = inf) and so on.
Although it seems odd and can lead to some confusing edge cases, we do
also permit a max of 0 (iff min is 0 too). This allows the multiplier
"zero" to exist, which actually are quite useful in their own special way.
'''
def __init__(self, min, max):
assert min != inf
assert min <= max
# More useful than "min" and "max" in many situations
# are "mandatory" and "optional".
mandatory = min
optional = max - min
self.__dict__['min'] = min
self.__dict__['max'] = max
self.__dict__['mandatory'] = mandatory
self.__dict__['optional'] = optional
def __eq__(self, other):
try:
return self.min == other.min and self.max == other.max
except AttributeError:
return False
def __ne__(self, other):
return not self.__eq__(other)
def __hash__(self):
return hash((self.min, self.max))
def __repr__(self):
return "multiplier(" + repr(self.min) + ", " + repr(self.max) + ")"
def __str__(self):
assert self.max != bound(0)
if self in symbolic.keys():
return symbolic[self]
if self.max == inf:
return "{" + str(self.min) + ",}"
if self.min == self.max:
return "{" + str(self.min) + "}"
return "{" + str(self.min) + "," + str(self.max) + "}"
@classmethod
def match(cls, string, i):
def matchAnyOf(string, i, collection):
for char in collection:
try:
return char, static(string, i, char)
except nomatch:
pass
raise nomatch
def matchInteger(string, i):
try:
return 0, static(string, i, "0")
except nomatch:
pass
digit, i = matchAnyOf(string, i, "123456789")
integer = int(digit)
try:
while True:
digit, i = matchAnyOf(string, i, "0123456789")
integer *= 10
integer += int(digit)
except nomatch:
return integer, i
# {2,3}
try:
j = static(string, i, "{")
min, j = matchInteger(string, j)
j = static(string, j, ",")
max, j = matchInteger(string, j)
j = static(string, j, "}")
return multiplier(bound(min), bound(max)), j
except nomatch:
pass
# {2,}
try:
j = static(string, i, "{")
min, j = matchInteger(string, j)
j = static(string, j, ",}")
return multiplier(bound(min), inf), j
except nomatch:
pass
# {2}
try:
j = static(string, i, "{")
min, j = matchInteger(string, j)
j = static(string, j, "}")
return multiplier(bound(min), bound(min)), j
except nomatch:
pass
# "?"/"*"/"+"/""
# we do these in reverse order of symbol length, because
# that forces "" to be done last
for key in sorted(symbolic, key=lambda key: -len(symbolic[key])):
try:
return key, static(string, i, symbolic[key])
except nomatch:
pass
raise nomatch
def canmultiplyby(self, other):
'''
Multiplication is not well-defined for all pairs of multipliers because
the resulting possibilities do not necessarily form a continuous range.
For example:
{0,x} * {0,y} = {0,x*y}
{2} * {3} = {6}
{2} * {1,2} = ERROR
The proof isn't simple but suffice it to say that {p,p+q} * {r,r+s} is
equal to {pr, (p+q)(r+s)} only if s=0 or qr+1 >= p. If not, then at least
one gap appears in the range. The first inaccessible number is (p+q)r + 1.
'''
return self.mandatory == zero or \
self.optional * other.mandatory + bound(1) >= self.mandatory
def __mul__(self, other):
'''Multiply this multiplier by another'''
assert self.canmultiplyby(other)
return multiplier(self.min * other.min, self.max * other.max)
def __add__(self, other):
'''Add two multipliers together'''
return multiplier(self.min + other.min, self.max + other.max)
def __sub__(self, other):
'''
Subtract another multiplier from this one.
Caution: multipliers are not totally ordered.
This operation is not meaningful for all pairs of multipliers.
'''
mandatory = self.mandatory - other.mandatory
optional = self.optional - other.optional
return multiplier(mandatory, mandatory + optional)
def canintersect(self, other):
'''
Intersection is not well-defined for all pairs of multipliers.
For example:
{2,3} & {3,4} = {3}
{2,} & {1,7} = {2,7}
{2} & {5} = ERROR
'''
return not (self.max < other.min or other.max < self.min)
def __and__(self, other):
'''
Find the intersection of two multipliers: that is, a third multiplier
expressing the range covered by both of the originals. This is not
defined for all multipliers.
'''
assert self.canintersect(other)
a = max(self.min, other.min)
b = min(self.max, other.max)
return multiplier(a, b)
def common(self, other):
'''
Find the shared part of two multipliers. This is the largest multiplier
which can be safely subtracted from both the originals. This may
return the "zero" multiplier.
'''
mandatory = min(self.mandatory, other.mandatory)
optional = min(self.optional, other.optional)
return multiplier(mandatory, mandatory + optional)
class mult(lego):
'''
A mult is a combination of a multiplicand with
a multiplier (a min and a max). The vast majority of characters in regular
expressions occur without a specific multiplier, which is implicitly
equivalent to a min of 1 and a max of 1, but many more have explicit
multipliers like "*" (min = 0, max = inf) and so on.
e.g. a, b{2}, c?, d*, [efg]{2,5}, f{2,}, (anysubpattern)+, .*, and so on
'''
def __init__(self, cand, ier):
self.__dict__["multiplicand"] = cand
self.__dict__["multiplier"] = ier
def __eq__(self, other):
try:
return self.multiplicand == other.multiplicand \
and self.multiplier == other.multiplier
except AttributeError:
return False
def __ne__(self, other):
return not self.__eq__(other)
def __hash__(self):
return hash((self.multiplicand, self.multiplier))
def __repr__(self):
string = "mult("
string += repr(self.multiplicand)
string += ", " + repr(self.multiplier)
string += ")"
return string
@reduce_after
def __mul__(self, multiplier):
if multiplier == one:
return self
if self.multiplier.canmultiplyby(multiplier):
return mult(self.multiplicand, self.multiplier * multiplier)
return mult(pattern(conc(self)), multiplier)
@reduce_after
def __add__(self, other):
return conc(self) + other
@reduce_after
def __or__(self, other):
return conc(self) | other
def __sub__(self, other):
'''
Subtract another mult from this one and return the result.
The reverse of concatenation. This is a lot trickier.
e.g. a{4,5} - a{3} = a{1,2}
'''
assert other.multiplicand == self.multiplicand
return mult(self.multiplicand, self.multiplier - other.multiplier)
def common(self, other):
'''
Return the common part of these two mults. This is the largest mult
which can be safely subtracted from both the originals. The multiplier
on this mult could be zero: this is the case if, for example, the
multiplicands disagree.
'''
if self.multiplicand == other.multiplicand:
return mult(self.multiplicand, self.multiplier.common(other.multiplier))
# Multiplicands disagree, no common part at all.
return mult(nothing, zero)
@reduce_after
def __and__(self, other):
if hasattr(other, "chars"):
other = mult(other, one)
# If two mults are given which have a common multiplicand, the shortcut
# is just to take the intersection of the two multiplicands.
try:
if self.multiplicand == other.multiplicand \
and self.canintersect(other):
return mult(self.multiplicand, self.multiplier & other.multiplier)
except AttributeError:
# "other" isn't a mult; lacks either a multiplicand or a multiplier.
# Never mind!
pass
# This situation is substantially more complicated if the multiplicand is,
# for example, a pattern. It's difficult to reason sensibly about this
# kind of thing.
return conc(self) & other
def alphabet(self):
return set([otherchars]) | self.multiplicand.alphabet()
def empty(self):
return self.multiplicand.empty() and self.multiplier.min > bound(0)
@reduce_after
def reduce(self):
# Can't match anything: reduce to nothing
if self.empty():
return nothing
# If our multiplicand is a pattern containing an empty conc()
# we can pull that "optional" bit out into our own multiplier
# instead.
# e.g. (A|B|C|)D -> (A|B|C)?D
# e.g. (A|B|C|){2} -> (A|B|C){0,2}
try:
if emptystring in self.multiplicand.concs \
and self.multiplier.canmultiplyby(qm):
return mult(
pattern(
*self.multiplicand.concs.difference(set([emptystring]))
),
self.multiplier * qm,
)
except AttributeError:
# self.multiplicand has no attribute "concs"; isn't a pattern; never mind
pass
# If we have an empty multiplicand, we can only match it
# zero times
if self.multiplicand.empty() \
and self.multiplier.min == bound(0):
return emptystring
# Failing that, we have a positive multiplicand which we
# intend to match zero times. In this case the only possible
# match is the empty string.
if self.multiplier == zero:
return emptystring
# no point multiplying in the singular
if self.multiplier == one:
return self.multiplicand
# Try recursively reducing our internal.
reduced = self.multiplicand.reduce()
# "bulk up" smaller lego pieces to pattern if need be
if hasattr(reduced, "multiplicand"):
reduced = conc(reduced)
if hasattr(reduced, "mults"):
reduced = pattern(reduced)
if reduced != self.multiplicand:
return mult(reduced, self.multiplier)
# If our multiplicand is a pattern containing a single conc
# containing a single mult, we can separate that out a lot
# e.g. ([ab])* -> [ab]*
try:
if len(self.multiplicand.concs) == 1:
singleton = [c for c in self.multiplicand.concs][0]
if len(singleton.mults) == 1:
singlemult = singleton.mults[0]
if singlemult.multiplier.canmultiplyby(self.multiplier):
return mult(
singlemult.multiplicand,
singlemult.multiplier * self.multiplier
)
except AttributeError:
# self.multiplicand has no attribute "concs"; isn't a pattern; never mind
pass
return self
def __str__(self):
# recurse into subpattern
if hasattr(self.multiplicand, "concs"):
output = "(" + str(self.multiplicand) + ")"
else:
output = str(self.multiplicand)
suffix = str(self.multiplier)
# Pick whatever is shorter/more comprehensible.
# e.g. "aa" beats "a{2}", "ababab" beats "(ab){3}"
if self.multiplier.min == self.multiplier.max \
and len(output) * self.multiplier.min.v <= len(output) + len(suffix):
return output * self.multiplier.min.v
return output + suffix
def fsm(self, alphabet=None):
from greenery.fsm import epsilon
if alphabet is None:
alphabet = self.alphabet()
# worked example: (min, max) = (5, 7) or (5, inf)
# (mandatory, optional) = (5, 2) or (5, inf)
unit = self.multiplicand.fsm(alphabet)
# accepts e.g. "ab"
# accepts "ababababab"
mandatory = unit * self.multiplier.mandatory.v
# unlimited additional copies
if self.multiplier.optional == inf:
optional = unit.star()
# accepts "(ab)*"
else:
optional = epsilon(alphabet) | unit
# accepts "(ab)?"
optional *= self.multiplier.optional.v
# accepts "(ab)?(ab)?"
return mandatory + optional
@classmethod
def match(cls, string, i):
def matchMultiplicand(string, i):
# explicitly non-capturing "(?:...)" syntax. No special significance
try:
j = static(string, i, "(?:")
multiplicand, j = pattern.match(string, j)
j = static(string, j, ")")
return multiplicand, j
except nomatch:
pass
# normal "(...)" syntax
try:
j = static(string, i, "(")
multiplicand, j = pattern.match(string, j)
j = static(string, j, ")")
return multiplicand, j
except nomatch:
pass
# Just a charclass on its own
return charclass.match(string, i)
multiplicand, j = matchMultiplicand(string, i)
multiplier_, j = multiplier.match(string, j)
return mult(multiplicand, multiplier_), j
def __reversed__(self):
return mult(reversed(self.multiplicand), self.multiplier)
class conc(lego):
'''
A conc (short for "concatenation") is a tuple of mults i.e. an unbroken
string of mults occurring one after the other.
e.g. abcde[^fg]*h{4}[a-z]+(subpattern)(subpattern2)
To express the empty string, use an empty conc, conc().
'''
def __init__(self, *mults):
self.__dict__["mults"] = tuple(mults)
def __eq__(self, other):
try:
return self.mults == other.mults
except AttributeError:
return False
def __ne__(self, other):
return not self.__eq__(other)
def __hash__(self):
return hash(self.mults)
def __repr__(self):
string = "conc("
string += ", ".join(repr(m) for m in self.mults)
string += ")"
return string
@reduce_after
def __mul__(self, multiplier):
if multiplier == one:
return self
# Have to replace self with a pattern unfortunately
return pattern(self) * multiplier
@reduce_after
def __add__(self, other):
# other must be a conc too
if hasattr(other, "chars") or hasattr(other, "concs"):
other = mult(other, one)
if hasattr(other, "multiplicand"):
other = conc(other)
return conc(*(self.mults + other.mults))
@reduce_after
def __or__(self, other):
return pattern(self) | other
@reduce_after
def __and__(self, other):
return pattern(self) & other
@reduce_after
def reduce(self):
# Can't match anything
if self.empty():
return nothing
# no point concatenating one thing (note: concatenating 0 things is
# entirely valid)
if len(self.mults) == 1:
return self.mults[0]
# Try recursively reducing our internals
reduced = [m.reduce() for m in self.mults]
# "bulk up" smaller lego pieces to concs if need be
reduced = [
pattern(x) if hasattr(x, "mults") else x
for x in reduced
]
reduced = [
mult(x, one) if hasattr(x, "chars") or hasattr(x, "concs") else x
for x in reduced
]
reduced = tuple(reduced)
if reduced != self.mults:
return conc(*reduced)
# multiple mults with identical multiplicands in a row?
# squish those together
# e.g. ab?b?c -> ab{0,2}c
if len(self.mults) > 1:
for i in range(len(self.mults)-1):
if self.mults[i].multiplicand == self.mults[i+1].multiplicand:
squished = mult(
self.mults[i].multiplicand,
self.mults[i].multiplier + self.mults[i+1].multiplier
)
new = self.mults[:i] + (squished,) + self.mults[i+2:]
return conc(*new)
# Conc contains (among other things) a *singleton* mult containing a pattern
# with only one internal conc? Flatten out.
# e.g. "a(d(ab|a*c))" -> "ad(ab|a*c)"
# BUT NOT "a(d(ab|a*c)){2,}"
# AND NOT "a(d(ab|a*c)|y)"
for i in range(len(self.mults)):
m = self.mults[i]
try:
if m.multiplier == one and len(m.multiplicand.concs) == 1:
single = [c for c in m.multiplicand.concs][0]
new = self.mults[:i] + single.mults + self.mults[i+1:]
return conc(*new)
except AttributeError:
# m.multiplicand has no attribute "concs"; isn't a pattern; never mind
pass
return self
def fsm(self, alphabet=None):
from greenery.fsm import epsilon
if alphabet is None:
alphabet = self.alphabet()
# start with a component accepting only the empty string
fsm1 = epsilon(alphabet)
for m in self.mults:
fsm1 += m.fsm(alphabet)
return fsm1
def alphabet(self):
return set([otherchars]).union(*[m.alphabet() for m in self.mults])
def empty(self):
for m in self.mults:
if m.empty():
return True
return False
def __str__(self):
return "".join(str(m) for m in self.mults)
@classmethod
def match(cls, string, i):
mults = list()
try:
while True:
m, i = mult.match(string, i)
mults.append(m)
except nomatch:
pass
return conc(*mults), i
def common(self, other, suffix=False):
'''
Return the common prefix of these two concs; that is, the largest conc
which can be safely beheaded() from the front of both.
The result could be emptystring.
"ZYAA, ZYBB" -> "ZY"
"CZ, CZ" -> "CZ"
"YC, ZC" -> ""
With the "suffix" flag set, works from the end. E.g.:
"AAZY, BBZY" -> "ZY"
"CZ, CZ" -> "CZ"
"CY, CZ" -> ""
'''
mults = []
indices = range(min(len(self.mults), len(other.mults))) # e.g. [0, 1, 2, 3]
# Work backwards from the end of both concs instead.
if suffix:
indices = [-i - 1 for i in indices] # e.g. [-1, -2, -3, -4]
for i in indices:
common = self.mults[i].common(other.mults[i])
# Happens when multiplicands disagree (e.g. "A.common(B)") or if
# the multiplicand is shared but the common multiplier is zero
# (e.g. "ABZ*.common(CZ)".)
if common.multiplier == zero:
break
mults.append(common)
# If we did not remove the entirety of both mults, we cannot continue.
if common != self.mults[i] or common != other.mults[i]:
break
if suffix:
mults = reversed(mults)
return conc(*mults)
def __sub__(self, other):
'''
Subtract another conc from this one.
This is the opposite of concatenation. For example, if ABC + DEF = ABCDEF,
then logically ABCDEF - DEF = ABC.
'''
# e.g. self has mults at indices [0, 1, 2, 3, 4, 5, 6] len=7
# e.g. other has mults at indices [0, 1, 2] len=3
new = list(self.mults)
for i in reversed(range(len(other.mults))): # [2, 1, 0]
# e.g. i = 1, j = 7 - 3 + 1 = 5
j = len(self.mults) - len(other.mults) + i
new[j] -= other.mults[i]
if new[j].multiplier == zero:
# omit that mult entirely since it has been factored out
del new[j]
# If the subtraction is incomplete but there is more to
# other.mults, then we have a problem. For example, "ABC{2} - BC"
# subtracts the C successfully but leaves something behind,
# then tries to subtract the B too, which isn't possible
else:
assert i == 0
return conc(*new)
def behead(self, other):
'''
As with __sub__ but the other way around. For example, if
ABC + DEF = ABCDEF, then ABCDEF.behead(AB) = CDEF.
'''
# Observe that FEDCBA - BA = FEDC.
return reversed(reversed(self) - reversed(other))
def __reversed__(self):
return conc(*reversed([reversed(m) for m in self.mults]))
class pattern(lego):
'''
A pattern (also known as an "alt", short for "alternation") is a
set of concs. A pattern expresses multiple alternate possibilities.
When written out as a regex, these would separated by pipes. A pattern
containing no possibilities is possible and represents a regular expression
matching no strings whatsoever (there is no conventional string form for
this).
e.g. "abc|def(ghi|jkl)" is an alt containing two concs: "abc" and
"def(ghi|jkl)". The latter is a conc containing four mults: "d", "e", "f"
and "(ghi|jkl)". The latter in turn is a mult consisting of an upper bound
1, a lower bound 1, and a multiplicand which is a new subpattern, "ghi|jkl".
This new subpattern again consists of two concs: "ghi" and "jkl".
'''
def __init__(self, *concs):
self.__dict__["concs"] = frozenset(concs)
def __eq__(self, other):
try:
return self.concs == other.concs
except AttributeError:
return False
def __ne__(self, other):
return not self.__eq__(other)
def __hash__(self):
return hash(self.concs)
def __repr__(self):
string = "pattern("
string += ", ".join(repr(c) for c in self.concs)
string += ")"
return string
@reduce_after
def __mul__(self, multiplier):
if multiplier == one:
return self
return mult(self, multiplier)
@reduce_after
def __add__(self, other):
return mult(self, one) + other
def alphabet(self):
return set([otherchars]).union(*[c.alphabet() for c in self.concs])
def empty(self):
for c in self.concs:
if not c.empty():
return False
return True
@reduce_after
def __and__(self, other):
# A deceptively simple method for an astoundingly difficult operation
alphabet = self.alphabet() | other.alphabet()
# Which means that we can build finite state machines sharing that alphabet
combined = self.fsm(alphabet) & other.fsm(alphabet)
return combined.lego()
@reduce_after
def __or__(self, other):
# other must be a pattern too
if hasattr(other, "chars"):
other = mult(other, one)
if hasattr(other, "multiplicand"):
other = conc(other)
if hasattr(other, "mults"):
other = pattern(other)
return pattern(*(self.concs | other.concs))
def __str__(self):
assert len(self.concs) >= 1
# take the alternation of the input collection of regular expressions.
# i.e. jam "|" between each element
# 1+ elements.
return "|".join(sorted(str(c) for c in self.concs))
@reduce_after
def reduce(self):
# emptiness
if self.empty():
return nothing
# If one of our internal concs is empty, remove it
for c in self.concs:
if c.empty():
new = self.concs - set([c])
return pattern(*new)
# no point alternating among one possibility
if len(self.concs) == 1:
return [e for e in self.concs][0]
# Try recursively reducing our internals first.
reduced = [c.reduce() for c in self.concs]
# "bulk up" smaller lego pieces to concs if need be
reduced = [
mult(x, one) if hasattr(x, "chars") or hasattr(x, "concs") else x
for x in reduced
]
reduced = [
conc(x) if hasattr(x, "multiplicand") else x
for x in reduced
]
reduced = frozenset(reduced)
if reduced != self.concs:
return pattern(*reduced)
# If this pattern contains several concs each containing just 1 mult
# each containing just a charclass, with a multiplier of 1,
# then we can merge those branches together.
# e.g. "0|[1-9]|ab" -> "[0-9]|ab"
changed = False
merger = None
rest = []
for c in self.concs:
if len(c.mults) == 1 \
and c.mults[0].multiplier == one \
and hasattr(c.mults[0].multiplicand, "chars"):
if merger is None:
merger = c.mults[0].multiplicand
else:
merger |= c.mults[0].multiplicand
changed = True
else:
rest.append(c)
if changed:
rest.append(conc(mult(merger, one)))
return pattern(*rest)
# If one of the present pattern's concs is the empty string, and
# there is another conc with a single mult whose lower bound is 0, we
# can omit the empty string.
# E.g. "|(ab)*|def" => "(ab)*|def".
# If there is another conc with a single mult whose lower bound is 1,
# we can merge the empty string into that.
# E.g. "|(ab)+|def" => "(ab)*|def".
if conc() in self.concs:
for c in self.concs:
if len(c.mults) != 1:
continue
m = c.mults[0]
if m.multiplier.min == bound(0):
rest = self.concs - set([conc()])
return pattern(*rest)
if m.multiplier.min == bound(1):
rest = self.concs - set([conc(), c]) | set([m * qm])
return pattern(*rest)
# If the present pattern's concs all have a common prefix, split
# that out. This increases the depth of the object
# but it is still arguably simpler/ripe for further reduction
# e.g. "abc|ade" -> a(bc|de)"
prefix = self._commonconc()
if prefix != emptystring:
leftovers = self.behead(prefix)
mults = prefix.mults + (mult(leftovers, one),)
return conc(*mults)
# Same but for suffixes.
# e.g. "xyz|stz -> (xy|st)z"
suffix = self._commonconc(suffix=True)
if suffix != emptystring:
leftovers = self - suffix
mults = (mult(leftovers, one),) + suffix.mults
return conc(*mults)
return self
@classmethod
def match(cls, string, i):
concs = list()
# first one
c, i = conc.match(string, i)
concs.append(c)
# the rest
while True:
try:
i = static(string, i, "|")
c, i = conc.match(string, i)
concs.append(c)
except nomatch:
return pattern(*concs), i
def __sub__(self, other):
'''
The opposite of concatenation. Remove a common suffix from the present
pattern; that is, from each of its constituent concs.
AYZ|BYZ|CYZ - YZ = A|B|C.
'''
return pattern(*[c - other for c in self.concs])
def behead(self, other):
'''
Like __sub__ but the other way around. Remove a common prefix from the
present pattern; that is, from each of its constituent concs.
ZA|ZB|ZC.behead(Z) = A|B|C
'''
return pattern(*[c.behead(other) for c in self.concs])
def _commonconc(self, suffix=False):
'''
Find the longest conc which acts as prefix to every conc in this pattern.
This could be the empty string. Return the common prefix along with all
the leftovers after truncating that common prefix from each conc.
"ZA|ZB|ZC" -> "Z", "(A|B|C)"
"ZA|ZB|ZC|Z" -> "Z", "(A|B|C|)"
"CZ|CZ" -> "CZ", "()"
If "suffix" is True, the same result but for suffixes.
'''
assert len(self.concs) >= 1
from functools import reduce
return reduce(
lambda x, y: x.common(y, suffix=suffix),
self.concs
)
def fsm(self, alphabet=None):
from greenery.fsm import null
if alphabet is None:
alphabet = self.alphabet()
fsm1 = null(alphabet)
for c in self.concs:
fsm1 |= c.fsm(alphabet)
return fsm1
def __reversed__(self):
return pattern(*(reversed(c) for c in self.concs))
# Special and useful values go here.
# We need to add an extra character in the alphabet which can stand for
# "everything else". For example, if the regex is "abc.", then at the moment
# our alphabet is {"a", "b", "c"}. But "." could match anything else not yet
# specified. This extra letter stands for that ("[^abc]" in this case).
otherchars = None
# Standard character classes
w = charclass("0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ_abcdefghijklmnopqrstuvwxyz")
d = charclass("0123456789")
s = charclass("\t\n\v\f\r ")
W = ~w
D = ~d
S = ~s
dot = ~charclass()
# This charclasses expresses "no possibilities at all"
# and can never match anything.
nothing = charclass()
# Textual representations of standard character classes
shorthand = {
w : "\\w", d : "\\d", s : "\\s",
W : "\\W", D : "\\D", S : "\\S",
dot : ".",
}
# Characters which users may escape in a regex instead of inserting them
# literally. In ASCII order:
escapes = {
"\t" : "\\t", # tab
"\n" : "\\n", # line feed
"\v" : "\\v", # vertical tab
"\f" : "\\f", # form feed
"\r" : "\\r", # carriage return
}
# Use this for cases where no upper bound is needed
inf = bound(None)
# Preset multipliers. These get used ALL THE TIME in unit tests
zero = multiplier(bound(0), bound(0)) # has some occasional uses
qm = multiplier(bound(0), bound(1))
one = multiplier(bound(1), bound(1))
star = multiplier(bound(0), inf)
plus = multiplier(bound(1), inf)
# Symbol lookup table for preset multipliers.
symbolic = {
qm : "?",
one : "" ,
star : "*",
plus : "+",
}
# A very special conc expressing the empty string, ""
emptystring = conc()
| {
"content_hash": "8f3d8c337c871508520a258bf4fc3064",
"timestamp": "",
"source": "github",
"line_count": 1675,
"max_line_length": 81,
"avg_line_length": 28.19223880597015,
"alnum_prop": 0.6588666299606116,
"repo_name": "AdeebNqo/sublimegen",
"id": "33d1d0820cfd8c4619c20782514f880c63c67839",
"size": "47259",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/greenery/greenery/lego.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Go",
"bytes": "38648"
},
{
"name": "Makefile",
"bytes": "1710"
},
{
"name": "Python",
"bytes": "137567"
},
{
"name": "Shell",
"bytes": "661"
}
],
"symlink_target": ""
} |
from collections import namedtuple, OrderedDict
from itertools import chain
import json
from urllib import urlencode
import uuid
from django import forms
from django.core.urlresolvers import reverse
from django.forms import Widget
from django.forms.utils import flatatt
from django.template.loader import render_to_string
from django.utils.html import format_html
from django.utils.safestring import mark_safe
from django.utils.translation import ugettext as _, ugettext_noop, ugettext_lazy
from corehq.apps.userreports.reports.builder.columns import \
QuestionColumnOption, ColumnOption, CountColumn, MultiselectQuestionColumnOption
from crispy_forms import layout as crispy
from crispy_forms.bootstrap import StrictButton
from crispy_forms.helper import FormHelper
from corehq.apps.style import crispy as hqcrispy
from corehq.apps.app_manager.fields import ApplicationDataSourceUIHelper
from corehq.apps.app_manager.models import (
Application,
Form,
)
from corehq.apps.app_manager.util import get_case_properties
from corehq.apps.app_manager.xform import XForm
from corehq.apps.style.crispy import FieldWithHelpBubble
from corehq.apps.userreports import tasks
from corehq.apps.userreports.app_manager import _clean_table_name
from corehq.apps.userreports.models import (
DataSourceBuildInformation,
DataSourceConfiguration,
DataSourceMeta,
ReportConfiguration,
ReportMeta,
)
from corehq.apps.userreports.reports.builder import (
DEFAULT_CASE_PROPERTY_DATATYPES,
FORM_METADATA_PROPERTIES,
make_case_data_source_filter,
make_case_property_indicator,
make_form_data_source_filter,
make_form_meta_block_indicator,
make_form_question_indicator,
make_owner_name_indicator,
get_filter_format_from_question_type,
make_user_name_indicator, make_multiselect_question_indicator)
from corehq.apps.userreports.exceptions import BadBuilderConfigError
from corehq.apps.userreports.sql import get_column_name
from corehq.apps.userreports.ui.fields import JsonField
from dimagi.utils.decorators.memoized import memoized
class FilterField(JsonField):
"""
A form field with a little bit of validation for report builder report
filter configuration.
"""
def validate(self, value):
super(FilterField, self).validate(value)
for filter_conf in value:
if filter_conf.get('format', None) not in ['', 'Choice', 'Date', 'Numeric']:
raise forms.ValidationError("Invalid filter format!")
class Select2(Widget):
"""
A widget for rendering an input with our knockout "select2" binding.
Requires knockout to be included on the page.
"""
def __init__(self, attrs=None, choices=()):
super(Select2, self).__init__(attrs)
self.choices = list(choices)
def render(self, name, value, attrs=None, choices=()):
value = '' if value is None else value
final_attrs = self.build_attrs(attrs, name=name)
return format_html(
'<input{0} type="text" data-bind="select2: {1}, {2}">',
flatatt(final_attrs),
json.dumps(self._choices_for_binding(choices)),
'value: {}'.format(json.dumps(value)) if value else ""
)
def _choices_for_binding(self, choices):
return [{'id': id, 'text': text} for id, text in chain(self.choices, choices)]
class QuestionSelect(Widget):
"""
A widget for rendering an input with our knockout "questionsSelect" binding.
Requires knockout to be included on the page.
"""
def __init__(self, attrs=None, choices=()):
super(QuestionSelect, self).__init__(attrs)
self.choices = list(choices)
def render(self, name, value, attrs=None, choices=()):
value = '' if value is None else value
final_attrs = self.build_attrs(attrs, name=name)
return format_html(
"""
<input{0} data-bind='
questionsSelect: {1},
value: "{2}",
optionsCaption: " "
'/>
""",
flatatt(final_attrs),
mark_safe(self.render_options(choices)),
value
)
def render_options(self, choices):
def escape(literal):
return literal.replace('&', '&').replace("'", "'")
return json.dumps(
[{'value': escape(v), 'label': escape(l)} for v, l in chain(self.choices, choices)]
)
class DataSourceProperty(namedtuple(
"DataSourceProperty", ["type", "id", "text", "column_id", "source", "is_non_numeric"]
)):
"""
A container class for information about data source properties
Class attributes:
type -- either "case_property", "question", or "meta"
id -- A string that uniquely identifies this property. For question based
properties this is the question id, for case based properties this is
the case property name.
text -- A human readable representation of the property source. For
questions this is the question label.
source -- For questions, this is a dict representing the question as returned
by Xform.get_questions(), for case properties and form metadata it is just
the name of the property.
column_id -- A string to be used as the column_id for data source indicators
based on this property.
is_non_numeric -- True if we know that the property associated with this property
is never numeric. This would be True for form meta properties, static case
properties like closed and owner, and non-numeric form questions.
Note that a value of False does not imply that the property contains
numeric data, just that we don't know for sure that it doesn't (e.g.
case properties).
"""
class DataSourceBuilder(object):
"""
When configuring a report, one can use DataSourceBuilder to determine some
of the properties of the required report data source, such as:
- referenced doc type
- filter
- indicators
"""
def __init__(self, domain, app, source_type, source_id):
assert (source_type in ['case', 'form'])
self.domain = domain
self.app = app
self.source_type = source_type
# source_id is a case type of form id
self.source_id = source_id
if self.source_type == 'form':
self.source_form = Form.get_form(self.source_id)
self.source_xform = XForm(self.source_form.source)
if self.source_type == 'case':
prop_map = get_case_properties(
self.app, [self.source_id], defaults=DEFAULT_CASE_PROPERTY_DATATYPES.keys()
)
self.case_properties = sorted(set(prop_map[self.source_id]) | {'closed'})
@property
@memoized
def source_doc_type(self):
if self.source_type == "case":
return "CommCareCase"
if self.source_type == "form":
return "XFormInstance"
@property
@memoized
def filter(self):
"""
Return the filter configuration for the DataSourceConfiguration.
"""
if self.source_type == "case":
return make_case_data_source_filter(self.source_id)
if self.source_type == "form":
return make_form_data_source_filter(self.source_xform.data_node.tag_xmlns)
def base_item_expression(self, is_multiselect_chart_report, multiselect_field=None):
"""
Return the base_item_expression for the DataSourceConfiguration.
Normally this is {}, but if this is a data source for a chart report that is aggregated by a multiselect
question, then we want one row per multiselect answer.
:param is_multiselect_chart_report: True if the data source will be used for a chart report aggregated by
a multiselect question.
:param multiselect_field: The field that the multiselect aggregated report is aggregated by.
:return: A base item expression.
"""
if not is_multiselect_chart_report:
return {}
else:
assert multiselect_field, "multiselect_field is required if is_multiselect_chart_report is True"
property = self.data_source_properties[multiselect_field]
path = ['form'] + property.source['value'].split('/')[2:]
choices = [c['value'] for c in property.source['options']]
def sub_doc(path):
if not path:
return {"type": "property_name", "property_name": "choice"}
else:
return {
"type": "dict",
"properties": {
path[0]: sub_doc(path[1:])
}
}
return {
"type": "map_items",
"items_expression": {
"type": "iterator",
"expressions": [
{
"type": "dict",
"properties": {
"choice": c,
"doc": {"type": "identity"}
}
}
for c in choices
],
"test": {
"type": "boolean_expression",
"expression": {
"type": "property_path",
"property_path": ["doc"] + path
},
"operator": "in_multi",
"property_value": {"type": "property_name", "property_name": "choice"}
}
},
"map_expression": sub_doc(path)
}
def indicators(self, number_columns=None, is_multiselect_chart_report=False):
"""
Return all the dict data source indicator configurations that could be
used by a report that uses the same case type/form as this DataSourceConfiguration.
"""
ret = []
for prop in self.data_source_properties.values():
if prop.type == 'meta':
ret.append(make_form_meta_block_indicator(
prop.source, prop.column_id, root_doc=is_multiselect_chart_report
))
elif prop.type == "question":
if prop.source['type'] == "MSelect":
# For filters and aggregation:
ret.append(make_form_question_indicator(prop.source, prop.column_id))
# For column display:
if prop.source['options']:
# A choice list indicator with no choices will throw a BadSpecError
ret.append(make_multiselect_question_indicator(prop.source, prop.column_id))
else:
indicator = make_form_question_indicator(
prop.source, prop.column_id, root_doc=is_multiselect_chart_report
)
if prop.source['type'] == "DataBindOnly" and number_columns:
if indicator['column_id'] in number_columns:
indicator['datatype'] = 'decimal'
ret.append(indicator)
elif prop.type == 'case_property' and prop.source == 'computed/owner_name':
ret.append(make_owner_name_indicator(prop.column_id))
elif prop.type == 'case_property' and prop.source == 'computed/user_name':
ret.append(make_user_name_indicator(prop.column_id))
elif prop.type == 'case_property':
indicator = make_case_property_indicator(
prop.source, prop.column_id
)
if number_columns:
if indicator['column_id'] in number_columns:
indicator['datatype'] = 'decimal'
ret.append(indicator)
ret.append({
"display_name": "Count",
"type": "count",
"column_id": "count"
})
return ret
@property
@memoized
def data_source_properties(self):
"""
A dictionary containing the various properties that may be used as indicators
or columns in the data source or report.
Keys are strings that uniquely identify properties.
Values are DataSourceProperty instances.
>> self.data_source_properties
{
"/data/question1": DataSourceProperty(
type="question",
id="/data/question1",
text="Enter the child's name",
column_id="data--question1",
source={
'repeat': None,
'group': None,
'value': '/data/question1',
'label': 'question1',
'tag': 'input',
'type': 'Text'
},
is_non_numeric=True
),
"meta/deviceID": DataSourceProperty(
type="meta",
id="meta/deviceID",
text="deviceID",
column_id="meta--deviceID",
source=("deviceID", "string"),
is_non_numeric=True
)
}
"""
if self.source_type == 'case':
return self._get_data_source_properties_from_case(self.case_properties)
if self.source_type == 'form':
return self._get_data_source_properties_from_form(self.source_form, self.source_xform)
@classmethod
def _get_data_source_properties_from_case(cls, case_properties):
property_map = {
'closed': _('Case Closed'),
'user_id': _('User ID Last Updating Case'),
'owner_name': _('Case Owner'),
'mobile worker': _('Mobile Worker Last Updating Case'),
}
static_case_props = [
"closed",
"modified_on",
"name",
"opened_on",
"owner_id",
"user_id",
]
properties = OrderedDict()
for property in case_properties:
properties[property] = DataSourceProperty(
type='case_property',
id=property,
column_id=get_column_name(property),
text=property_map.get(property, property.replace('_', ' ')),
source=property,
is_non_numeric=property in static_case_props,
)
properties['computed/owner_name'] = cls._get_owner_name_pseudo_property()
properties['computed/user_name'] = cls._get_user_name_pseudo_property()
return properties
@staticmethod
def _get_owner_name_pseudo_property():
# owner_name is a special pseudo-case property for which
# the report builder will create a related_doc indicator based
# on the owner_id of the case.
return DataSourceProperty(
type='case_property',
id='computed/owner_name',
column_id=get_column_name('computed/owner_name'),
text=_('Case Owner'),
source='computed/owner_name',
is_non_numeric=True,
)
@staticmethod
def _get_user_name_pseudo_property():
# user_name is a special pseudo case property for which
# the report builder will create a related_doc indicator based on the
# user_id of the case
return DataSourceProperty(
type='case_property',
id='computed/user_name',
column_id=get_column_name('computed/user_name'),
text=_('Mobile Worker Last Updating Case'),
source='computed/user_name',
is_non_numeric=True,
)
@staticmethod
def _get_data_source_properties_from_form(form, form_xml):
property_map = {
'username': _('User Name'),
'userID': _('User ID'),
'timeStart': _('Date Form Started'),
'timeEnd': _('Date Form Completed'),
}
properties = OrderedDict()
questions = form_xml.get_questions([])
for prop in FORM_METADATA_PROPERTIES:
properties[prop[0]] = DataSourceProperty(
type="meta",
id=prop[0],
column_id=get_column_name(prop[0].strip("/")),
text=property_map.get(prop[0], prop[0]),
source=prop,
is_non_numeric=True,
)
for question in questions:
properties[question['value']] = DataSourceProperty(
type="question",
id=question['value'],
column_id=get_column_name(question['value'].strip("/")),
text=question['label'],
source=question,
is_non_numeric=question['type'] not in ("DataBindOnly", "Int", "Double", "Long"),
)
if form.get_app().auto_gps_capture:
properties['location'] = DataSourceProperty(
type="meta",
id='location',
column_id=get_column_name('location'),
text='location',
source=(['location', '#text'], 'Text'),
is_non_numeric=True,
)
return properties
@property
@memoized
def data_source_name(self):
if self.source_type == 'form':
return u"{} (v{})".format(self.source_form.default_name(), self.app.version)
if self.source_type == 'case':
return u"{} (v{})".format(self.source_id, self.app.version)
def _legend(title, subtext):
"""
Return a string to be used in a crispy form Fieldset legend.
This function is just a light wrapped around some simple templating.
"""
return '{title}</br><div class="subtext"><small>{subtext}</small></div>'.format(
title=title, subtext=subtext
)
class DataSourceForm(forms.Form):
report_name = forms.CharField()
chart_type = forms.ChoiceField(
choices=[
('bar', _('Bar')),
('pie', _("Pie")),
],
)
def __init__(self, domain, report_type, max_allowed_reports, *args, **kwargs):
super(DataSourceForm, self).__init__(*args, **kwargs)
self.domain = domain
self.report_type = report_type
self.max_allowed_reports = max_allowed_reports
self.app_source_helper = ApplicationDataSourceUIHelper()
self.app_source_helper.source_type_field.label = _('Forms or Cases')
self.app_source_helper.source_type_field.choices = [("case", _("Cases")), ("form", _("Forms"))]
self.app_source_helper.source_field.label = '<span data-bind="text: labelMap[sourceType()]"></span>'
self.app_source_helper.bootstrap(self.domain)
report_source_fields = self.app_source_helper.get_fields()
report_source_help_texts = {
"source_type": _("<strong>Form</strong>: display data from form submissions.<br/><strong>Case</strong>: display data from your cases. You must be using case management for this option."),
"application": _("Which application should the data come from?"),
"source": _("Choose the case type or form from which to retrieve data for this report."),
}
self.fields.update(report_source_fields)
self.fields['chart_type'].required = self.report_type == "chart"
self.helper = FormHelper()
self.helper.form_class = "form form-horizontal"
self.helper.form_id = "report-builder-form"
self.helper.label_class = 'col-sm-3 col-md-2 col-lg-2'
self.helper.field_class = 'col-sm-9 col-md-8 col-lg-6'
chart_type_crispy_field = None
if self.report_type == 'chart':
chart_type_crispy_field = FieldWithHelpBubble('chart_type', help_bubble_text=_("<strong>Bar</strong> shows one vertical bar for each value in your case or form. <strong>Pie</strong> shows what percentage of the total each value is."))
report_source_crispy_fields = []
for k in report_source_fields.keys():
if k in report_source_help_texts:
report_source_crispy_fields.append(FieldWithHelpBubble(
k, help_bubble_text=report_source_help_texts[k]
))
else:
report_source_crispy_fields.append(k)
top_fields = [
FieldWithHelpBubble(
'report_name',
help_bubble_text=_('Web users will see this name in the "Reports" section of CommCareHQ and can click to view the report'))
]
if chart_type_crispy_field:
top_fields.append(chart_type_crispy_field)
self.helper.layout = crispy.Layout(
crispy.Fieldset(
_('{} Report'.format(self.report_type.capitalize())),
*top_fields
),
crispy.Fieldset(
_('Data'), *report_source_crispy_fields
),
hqcrispy.FormActions(
StrictButton(
_('Next'),
type="submit",
css_class="btn-primary",
)
),
)
@property
def sources_map(self):
return self.app_source_helper.all_sources
def get_selected_source(self):
return self.app_source_helper.get_app_source(self.cleaned_data)
def clean(self):
"""
Raise a validation error if there are already 5 data sources and this
report won't be able to use one of the existing ones.
"""
cleaned_data = super(DataSourceForm, self).clean()
existing_reports = ReportConfiguration.by_domain(self.domain)
builder_reports = filter(lambda report: report.report_meta.created_by_builder, existing_reports)
if len(builder_reports) >= self.max_allowed_reports:
raise forms.ValidationError(_(
"Too many reports!\n"
"Creating this report would cause you to go over the maximum "
"number of report builder reports allowed in this domain. Your"
"limit is {number}. "
"To continue, delete another report and try again. "
).format(number=self.max_allowed_reports))
return cleaned_data
_shared_properties = ['exists_in_current_version', 'display_text', 'property', 'data_source_field']
UserFilterViewModel = namedtuple("UserFilterViewModel", _shared_properties + ['format'])
DefaultFilterViewModel = namedtuple("DefaultFilterViewModel",
_shared_properties + ['format', 'pre_value', 'pre_operator'])
ColumnViewModel = namedtuple("ColumnViewModel", _shared_properties + ['calculation'])
class ConfigureNewReportBase(forms.Form):
user_filters = FilterField(required=False)
default_filters = FilterField(required=False)
button_text = ugettext_noop('Done')
def __init__(self, report_name, app_id, source_type, report_source_id, existing_report=None, *args, **kwargs):
"""
This form can be used to create a new ReportConfiguration, or to modify
an existing one if existing_report is set.
"""
super(ConfigureNewReportBase, self).__init__(*args, **kwargs)
self.existing_report = existing_report
if self.existing_report:
self._bootstrap(self.existing_report)
self.button_text = _('Update Report')
else:
self.report_name = report_name
assert source_type in ['case', 'form']
self.source_type = source_type
self.report_source_id = report_source_id
self.app = Application.get(app_id)
self.domain = self.app.domain
self.ds_builder = DataSourceBuilder(
self.domain, self.app, self.source_type, self.report_source_id
)
self.data_source_properties = self.ds_builder.data_source_properties
self._properties_by_column = {
p.column_id: p for p in self.data_source_properties.values()
}
# NOTE: The corresponding knockout view model is defined in:
# templates/userreports/reportbuilder/configure_report.html
self.helper = FormHelper()
self.helper.form_class = "form form-horizontal"
self.helper.label_class = 'col-sm-3 col-md-2 col-lg-2'
self.helper.field_class = 'col-sm-9 col-md-8 col-lg-6'
self.helper.attrs['data_bind'] = "submit: submitHandler"
self.helper.form_id = "report-config-form"
buttons = [
StrictButton(
_(self.button_text),
css_class="btn btn-primary disable-on-submit",
type="submit",
)
]
# Add a back button if we aren't editing an existing report
if not self.existing_report:
buttons.insert(
0,
crispy.HTML(
'<a class="btn btn-default" href="{}" style="margin-right: 4px">{}</a>'.format(
reverse(
'report_builder_select_source',
args=(self.domain, self.report_type),
),
_('Back')
)
),
)
# Add a "delete report" button if we are editing an existing report
else:
buttons.insert(
0,
crispy.HTML(
'<a id="delete-report-button" class="btn btn-danger pull-right" href="{}">{}</a>'.format(
reverse(
'delete_configurable_report',
args=(self.domain, self.existing_report._id),
) + "?{}".format(urlencode(
{'redirect': reverse('reports_home', args=[self.domain])}
)),
_('Delete Report')
)
)
)
self.helper.layout = crispy.Layout(
self.container_fieldset,
hqcrispy.FormActions(crispy.ButtonHolder(*buttons)),
)
def _bootstrap(self, existing_report):
"""
Use an existing report to initialize some of the instance variables of this
form. This method is used when editing an existing report.
"""
self.report_name = existing_report.title
self.source_type = {
"CommCareCase": "case",
"XFormInstance": "form"
}[existing_report.config.referenced_doc_type]
self.report_source_id = existing_report.config.meta.build.source_id
app_id = existing_report.config.meta.build.app_id
if app_id:
self.app = Application.get(app_id)
else:
raise BadBuilderConfigError(_(
"Report builder data source doesn't reference an application. "
"It is likely this report has been customized and it is no longer editable. "
))
@property
@memoized
def report_column_options(self):
options = OrderedDict()
for id_, prop in self.data_source_properties.iteritems():
if prop.type == "question":
if prop.source['type'] == "MSelect":
option = MultiselectQuestionColumnOption(id_, prop.text, prop.column_id, prop.source)
else:
option = QuestionColumnOption(id_, prop.text, prop.column_id, prop.is_non_numeric, prop.source)
else:
# meta properties
option = ColumnOption(id_, prop.text, prop.column_id, prop.is_non_numeric)
options[id_] = option
return options
@property
def column_config_template(self):
return render_to_string('userreports/partials/property_list_configuration.html')
@property
def container_fieldset(self):
"""
Return the first fieldset in the form.
"""
return crispy.Fieldset(
"",
self.user_filter_fieldset
)
@property
def user_filter_fieldset(self):
"""
Return a fieldset representing the markup used for configuring the
user filters.
"""
return crispy.Fieldset(
_legend(
_("User Filters"),
_("Add filters to your report to allow viewers to select which data the report will display. "
"These filters will be displayed at the top of your report.")
),
crispy.Div(
crispy.HTML(self.column_config_template),
id="user-filters-table",
data_bind='with: userFiltersList'
),
crispy.Hidden('user_filters', None, data_bind="value: userFiltersList.serializedProperties")
)
@property
def default_filter_fieldset(self):
"""
Return a fieldset representing the markup used for configuring the
default filters.
"""
return crispy.Fieldset(
_legend(
_("Default Filters"),
_("These filters are not displayed to report viewers and are always applied to the data.")
),
crispy.Div(
crispy.HTML(self.column_config_template),
id="default-filters-table",
data_bind='with: defaultFiltersList'
),
crispy.Hidden('default_filters', None, data_bind="value: defaultFiltersList.serializedProperties")
)
def _get_data_source_configuration_kwargs(self):
if self._is_multiselect_chart_report:
base_item_expression = self.ds_builder.base_item_expression(True, self.aggregation_field)
else:
base_item_expression = self.ds_builder.base_item_expression(False)
return dict(
display_name=self.ds_builder.data_source_name,
referenced_doc_type=self.ds_builder.source_doc_type,
configured_filter=self.ds_builder.filter,
configured_indicators=self.ds_builder.indicators(
self._number_columns, self._is_multiselect_chart_report
),
base_item_expression=base_item_expression,
meta=DataSourceMeta(build=DataSourceBuildInformation(
source_id=self.report_source_id,
app_id=self.app._id,
app_version=self.app.version,
))
)
def _build_data_source(self):
data_source_config = DataSourceConfiguration(
domain=self.domain,
# The uuid gets truncated, so it's not really universally unique.
table_id=_clean_table_name(self.domain, str(uuid.uuid4().hex)),
**self._get_data_source_configuration_kwargs()
)
data_source_config.validate()
data_source_config.save()
tasks.rebuild_indicators.delay(data_source_config._id)
return data_source_config._id
def update_report(self):
data_source = DataSourceConfiguration.get(self.existing_report.config_id)
if data_source.get_report_count() > 1:
# If another report is pointing at this data source, create a new
# data source for this report so that we can change the indicators
# without worrying about breaking another report.
data_source_config_id = self._build_data_source()
self.existing_report.config_id = data_source_config_id
else:
indicators = self.ds_builder.indicators(self._number_columns)
if data_source.configured_indicators != indicators:
for property_name, value in self._get_data_source_configuration_kwargs().iteritems():
setattr(data_source, property_name, value)
data_source.save()
tasks.rebuild_indicators.delay(data_source._id)
self.existing_report.aggregation_columns = self._report_aggregation_cols
self.existing_report.columns = self._report_columns
self.existing_report.filters = self._report_filters
self.existing_report.configured_charts = self._report_charts
self.existing_report.validate()
self.existing_report.save()
return self.existing_report
def create_report(self):
"""
Creates data source and report config.
:raises BadSpecError if validation fails when building data source, or report is invalid
"""
data_source_config_id = self._build_data_source()
report = ReportConfiguration(
domain=self.domain,
config_id=data_source_config_id,
title=self.report_name,
aggregation_columns=self._report_aggregation_cols,
columns=self._report_columns,
filters=self._report_filters,
configured_charts=self._report_charts,
report_meta=ReportMeta(
created_by_builder=True,
builder_report_type=self.report_type
)
)
report.validate()
report.save()
return report
@property
@memoized
def initial_default_filters(self):
return [self._get_view_model(f) for f in self.existing_report.prefilters] if self.existing_report else []
@property
@memoized
def initial_user_filters(self):
if self.existing_report:
return [self._get_view_model(f) for f in self.existing_report.filters_without_prefilters]
if self.source_type == 'case':
return self._default_case_report_filters
else:
# self.source_type == 'form'
return self._default_form_report_filters
@property
@memoized
def _default_case_report_filters(self):
return [
UserFilterViewModel(
exists_in_current_version=True,
property='closed',
data_source_field=None,
display_text=_('Closed'),
format='Choice',
),
UserFilterViewModel(
exists_in_current_version=True,
property='computed/owner_name',
data_source_field=None,
display_text=_('Case Owner'),
format='Choice',
),
]
@property
@memoized
def _default_form_report_filters(self):
return [
UserFilterViewModel(
exists_in_current_version=True,
property='timeEnd',
data_source_field=None,
display_text='Form completion time',
format='Date',
),
]
def _get_view_model(self, filter):
"""
Given a ReportFilter, return a FilterViewModel representing
the knockout view model representing this filter in the report builder.
"""
exists = self._data_source_prop_exists(filter['field'])
if filter['type'] == 'pre':
return DefaultFilterViewModel(
exists_in_current_version=exists,
display_text='',
format='Value' if filter['pre_value'] else 'Date',
property=self._get_property_id_by_indicator_id(filter['field']) if exists else None,
data_source_field=filter['field'] if not exists else None,
pre_value=filter['pre_value'],
pre_operator=filter['pre_operator'],
)
else:
filter_type_map = {
'dynamic_choice_list': 'Choice',
# This exists to handle the `closed` filter that might exist
'choice_list': 'Choice',
'date': 'Date',
'numeric': 'Numeric'
}
return UserFilterViewModel(
exists_in_current_version=exists,
display_text=filter['display'],
format=filter_type_map[filter['type']],
property=self._get_property_id_by_indicator_id(filter['field']) if exists else None,
data_source_field=filter['field'] if not exists else None
)
def _get_column_option_by_indicator_id(self, indicator_column_id):
"""
Return the ColumnOption corresponding to the given indicator id.
NOTE: This currently assumes that there is a one-to-one mapping between
ColumnOptions and data source indicators, but we may want to remove
this assumption as we add functionality to the report builder.
:param indicator_column_id: The column_id field of a data source
indicator configuration.
:return: The corresponding ColumnOption
"""
for column_option in self.report_column_options.values():
if column_option.indicator_id == indicator_column_id:
return column_option
def _get_property_id_by_indicator_id(self, indicator_column_id):
"""
Return the data source property id corresponding to the given data
source indicator column id.
:param indicator_column_id: The column_id field of a data source indicator
configuration dictionary
:return: A DataSourceProperty property id, e.g. "/data/question1"
"""
data_source_property = self._properties_by_column.get(indicator_column_id)
if data_source_property:
return data_source_property.id
def _column_exists(self, column_id):
"""
Return True if this column corresponds to a question/case property in
the current version of this form/case configuration.
This could be true if a user makes a report, modifies the app, then
edits the report.
column_id is a string like "data_date_q_d1b3693e"
"""
return column_id in [c.indicator_id for c in self.report_column_options.values()]
def _data_source_prop_exists(self, indicator_id):
"""
Return True if there exists a DataSourceProperty corresponding to the
given data source indicator id.
:param indicator_id:
:return:
"""
return indicator_id in self._properties_by_column
def _get_multiselect_indicator_id(self, column_field, indicators):
"""
If this column_field corresponds to a multiselect data source indicator, then return the id of the
indicator. Otherwise return None.
:param column_field: The "field" property of a report column
:return: a data source indicator id
"""
indicator_id = "_".join(column_field.split("_")[:-1])
for indicator in indicators:
if indicator['column_id'] == indicator_id and indicator['type'] == 'choice_list':
return indicator_id
return None
@property
def _report_aggregation_cols(self):
return ['doc_id']
@property
def _report_columns(self):
return []
@property
@memoized
def _number_columns(self):
return [col["field"] for col in self._report_columns if col.get("aggregation", None) in ["avg", "sum"]]
@property
def _is_multiselect_chart_report(self):
return False
@property
def _report_filters(self):
"""
Return the dict filter configurations to be used by the
ReportConfiguration that this form produces.
"""
filter_type_map = {
'Choice': 'dynamic_choice_list',
'Date': 'date',
'Numeric': 'numeric',
'Value': 'pre',
}
def _make_report_filter(conf, index):
property = self.data_source_properties[conf["property"]]
col_id = property.column_id
selected_filter_type = conf['format']
if not selected_filter_type or self.source_type == 'form':
if property.type == 'question':
filter_format = get_filter_format_from_question_type(
property.source['type']
)
else:
assert property.type == 'meta'
filter_format = get_filter_format_from_question_type(
property.source[1]
)
else:
filter_format = filter_type_map[selected_filter_type]
ret = {
"field": col_id,
"slug": "{}_{}".format(col_id, index),
"display": conf["display_text"],
"type": filter_format
}
if conf['format'] == 'Date':
ret.update({'compare_as_string': True})
if conf.get('pre_value') or conf.get('pre_operator'):
ret.update({
'type': 'pre', # type could have been "date"
'pre_operator': conf.get('pre_operator', None),
'pre_value': conf.get('pre_value', []),
})
return ret
user_filter_configs = self.cleaned_data['user_filters']
default_filter_configs = self.cleaned_data['default_filters']
filters = [_make_report_filter(f, i) for i, f in enumerate(user_filter_configs + default_filter_configs)]
if self.source_type == 'case':
# The UI doesn't support specifying "choice_list" filters, only "dynamic_choice_list" filters.
# But, we want to make the open/closed filter a cleaner "choice_list" filter, so we do that here.
self._convert_closed_filter_to_choice_list(filters)
return filters
@classmethod
def _convert_closed_filter_to_choice_list(cls, filters):
for f in filters:
if f['field'] == get_column_name('closed') and f['type'] == 'dynamic_choice_list':
f['type'] = 'choice_list'
f['choices'] = [
{'value': 'True'},
{'value': 'False'}
]
@property
def _report_charts(self):
return []
class ConfigureBarChartReportForm(ConfigureNewReportBase):
group_by = forms.ChoiceField(label=_("Bar Chart Categories"))
report_type = 'chart'
def __init__(self, report_name, app_id, source_type, report_source_id, existing_report=None, *args, **kwargs):
super(ConfigureBarChartReportForm, self).__init__(
report_name, app_id, source_type, report_source_id, existing_report, *args, **kwargs
)
if self.source_type == "form":
self.fields['group_by'].widget = QuestionSelect(attrs={'class': 'input-large'})
else:
self.fields['group_by'].widget = Select2(attrs={'class': 'input-large'})
self.fields['group_by'].choices = self._group_by_choices
# Set initial value of group_by
if self.existing_report:
existing_agg_cols = existing_report.aggregation_columns
assert len(existing_agg_cols) < 2
if existing_agg_cols:
self.fields['group_by'].initial = self._get_property_id_by_indicator_id(existing_agg_cols[0])
@property
def container_fieldset(self):
return crispy.Fieldset(
_('Chart'),
FieldWithHelpBubble(
'group_by',
help_bubble_text=_(
"The values of the selected property will be aggregated "
"and shown as bars in the chart."
),
placeholder=_("Select Property..."),
),
self.user_filter_fieldset,
self.default_filter_fieldset
)
@property
def aggregation_field(self):
return self.cleaned_data["group_by"]
@property
def _report_aggregation_cols(self):
return [
self.data_source_properties[self.aggregation_field].column_id
]
@property
def _report_charts(self):
agg_col = self.data_source_properties[self.aggregation_field].column_id
return [{
"type": "multibar",
"x_axis_column": agg_col,
"y_axis_columns": ["count"],
}]
@property
def _report_columns(self):
agg_col_id = self.data_source_properties[self.aggregation_field].column_id
agg_disp = self.data_source_properties[self.aggregation_field].text
return [
{
"format": "default",
"aggregation": "simple",
"field": agg_col_id,
"type": "field",
"display": agg_disp
},
{
"format": "default",
"aggregation": "sum",
"field": "count",
"type": "field",
"display": "Count"
}
]
@property
def _group_by_choices(self):
return [(p.id, p.text) for p in self.data_source_properties.values()]
@property
@memoized
def _is_multiselect_chart_report(self):
"""
Return True if this is a chart report aggregated by a multiselect question.
The data sources for these sorts of reports are handled differently than other reports.
"""
agg_property = self.data_source_properties[self.aggregation_field]
return agg_property.type == "question" and agg_property.source['type'] == "MSelect"
class ConfigurePieChartReportForm(ConfigureBarChartReportForm):
group_by = forms.ChoiceField(label=_("Pie Chart Segments"))
@property
def container_fieldset(self):
return crispy.Fieldset(
_('Chart Properties'),
FieldWithHelpBubble(
'group_by',
help_bubble_text=_(
"The values of the selected property will be aggregated "
"and shows as the sections of the pie chart."
),
placeholder=_(
"Select Property..."
),
),
self.user_filter_fieldset,
self.default_filter_fieldset
)
@property
def _report_charts(self):
agg = self.data_source_properties[self.aggregation_field].column_id
return [{
"type": "pie",
"aggregation_column": agg,
"value_column": "count",
}]
class ConfigureListReportForm(ConfigureNewReportBase):
report_type = 'list'
columns = JsonField(
expected_type=list,
null_values=([],),
required=True,
widget=forms.HiddenInput,
error_messages={"required": ugettext_lazy("At least one column is required")},
)
column_legend_fine_print = ugettext_noop("Add columns to your report to display information from cases or form submissions. You may rearrange the order of the columns by dragging the arrows next to the column.")
@property
def container_fieldset(self):
source_name = ''
if self.source_type == 'case':
source_name = self.report_source_id
if self.source_type == 'form':
source_name = Form.get_form(self.report_source_id).default_name()
return crispy.Fieldset(
'',
crispy.Fieldset(
_legend(
_("Rows"),
_('This report will show one row for each {name} {source}').format(
name=source_name, source=self.source_type
)
)
),
self.column_fieldset,
self.user_filter_fieldset,
self.default_filter_fieldset
)
@property
def column_fieldset(self):
return crispy.Fieldset(
_legend(_("Columns"), _(self.column_legend_fine_print)),
crispy.Div(
crispy.HTML(self.column_config_template), id="columns-table", data_bind='with: columnsList'
),
hqcrispy.HiddenFieldWithErrors('columns', data_bind="value: columnsList.serializedProperties"),
)
@property
@memoized
def initial_columns(self):
if self.existing_report:
reverse_agg_map = {
'avg': 'Average',
'sum': 'Sum',
'expand': 'Count per Choice'
}
added_multiselect_columns = set()
cols = []
for c in self.existing_report.columns:
mselect_indicator_id = self._get_multiselect_indicator_id(
c['field'], self.existing_report.config.configured_indicators
)
indicator_id = mselect_indicator_id or c['field']
display = c['display']
exists = self._column_exists(indicator_id)
if mselect_indicator_id:
if mselect_indicator_id not in added_multiselect_columns:
added_multiselect_columns.add(mselect_indicator_id)
display = MultiselectQuestionColumnOption.LABEL_DIVIDER.join(
display.split(MultiselectQuestionColumnOption.LABEL_DIVIDER)[:-1]
)
else:
continue
cols.append(
ColumnViewModel(
display_text=display,
exists_in_current_version=exists,
property=self._get_column_option_by_indicator_id(indicator_id).id if exists else None,
data_source_field=indicator_id if not exists else None,
calculation=reverse_agg_map.get(c.get('aggregation'), 'Count per Choice')
)
)
return cols
return [ColumnViewModel(
display_text='',
exists_in_current_version=True,
property=None,
data_source_field=None,
calculation=_('Count per Choice')
)]
@property
def _report_columns(self):
columns = []
for i, conf in enumerate(self.cleaned_data['columns']):
columns.extend(
self.report_column_options[conf['property']].to_column_dicts(i, conf['display_text'], "simple")
)
return columns
@property
def _report_aggregation_cols(self):
return ['doc_id']
class ConfigureTableReportForm(ConfigureListReportForm, ConfigureBarChartReportForm):
report_type = 'table'
column_legend_fine_print = ugettext_noop('Add columns for this report to aggregate. Each property you add will create a column for every value of that property. For example, if you add a column for a yes or no question, the report will show a column for "yes" and a column for "no."')
group_by = forms.ChoiceField(label=_("Show one row for each"))
@property
def container_fieldset(self):
return crispy.Fieldset(
"",
self.column_fieldset,
crispy.Fieldset(
_legend(
_("Rows"),
_('Choose which property this report will group its results by. Each value of this property will be a row in the table. For example, if you choose a yes or no question, the report will show a row for "yes" and a row for "no."'),
),
'group_by',
),
self.user_filter_fieldset,
self.default_filter_fieldset
)
@property
def _report_charts(self):
# Override the behavior inherited from ConfigureBarChartReportForm
return []
@property
def _is_multiselect_chart_report(self):
return False
@property
@memoized
def report_column_options(self):
options = super(ConfigureTableReportForm, self).report_column_options
count_col = CountColumn("Number of Cases" if self.source_type == "case" else "Number of Forms")
options[count_col.id] = count_col
return options
@property
def _report_columns(self):
agg_field_id = self.data_source_properties[self.aggregation_field].column_id
agg_field_text = self.data_source_properties[self.aggregation_field].text
columns = []
for i, conf in enumerate(self.cleaned_data['columns']):
columns.extend(
self.report_column_options[conf['property']].to_column_dicts(
i,
conf['display_text'],
conf['calculation'],
is_aggregated_on=conf["property"] == self.aggregation_field
)
)
# Add the aggregation indicator to the columns if it's not already present.
displaying_agg_column = any(
c for c in self.cleaned_data['columns'] if c['property'] == self.aggregation_field
)
if not displaying_agg_column:
columns = self._get_column_option_by_indicator_id(agg_field_id).to_column_dicts(
"agg", agg_field_text, 'simple', is_aggregated_on=True
) + columns
else:
# Don't expand the aggregation column
for c in columns:
if c['field'] == agg_field_id:
c['aggregation'] = "simple"
return columns
@property
@memoized
def initial_columns(self):
# columns are ColumnViewModels (not ColumnOptions)
columns = super(ConfigureTableReportForm, self).initial_columns
# Remove the aggregation indicator from the columns.
# It gets removed because we want it to be a column in the report,
# but we don't want it to appear in the builder.
if self.existing_report:
agg_properties = [
self._get_property_id_by_indicator_id(c)
for c in self.existing_report.aggregation_columns
]
return [c for c in columns if c.property not in agg_properties]
return columns
@property
@memoized
def _report_aggregation_cols(self):
# we want the bar chart behavior, which is reproduced here:
return [
self.data_source_properties[self.aggregation_field].column_id
]
class ConfigureWorkerReportForm(ConfigureTableReportForm):
# This is a ConfigureTableReportForm, but with a predetermined aggregation
report_type = 'worker'
column_legend_fine_print = ugettext_noop('Add columns for this report to aggregate. Each property you add will create a column for every value of that property. For example, if you add a column for a yes or no question, the report will show a column for "yes" and a column for "no".')
def __init__(self, *args, **kwargs):
super(ConfigureWorkerReportForm, self).__init__(*args, **kwargs)
self.fields.pop('group_by')
@property
def aggregation_field(self):
if self.source_type == "form":
return "username"
if self.source_type == "case":
return "computed/user_name"
@property
@memoized
def _default_case_report_filters(self):
return [
UserFilterViewModel(
exists_in_current_version=True,
property='closed',
data_source_field=None,
display_text='closed',
format='Choice',
),
UserFilterViewModel(
exists_in_current_version=True,
property='computed/user_name',
data_source_field=None,
display_text='user name',
format='Choice',
),
]
@property
def container_fieldset(self):
return crispy.Fieldset(
'',
crispy.Fieldset(
_legend(
_("Rows"),
_('This report will show one row for each mobile worker'),
)
),
self.column_fieldset,
self.user_filter_fieldset,
self.default_filter_fieldset
)
class ConfigureMapReportForm(ConfigureListReportForm):
report_type = 'map'
location = forms.ChoiceField(label="Location field")
def __init__(self, report_name, app_id, source_type, report_source_id, existing_report=None, *args, **kwargs):
super(ConfigureMapReportForm, self).__init__(
report_name, app_id, source_type, report_source_id, existing_report, *args, **kwargs
)
if self.source_type == "form":
self.fields['location'].widget = QuestionSelect(attrs={'class': 'input-large'})
else:
self.fields['location'].widget = Select2(attrs={'class': 'input-large'})
self.fields['location'].choices = self._location_choices
# Set initial value of location
if self.existing_report and existing_report.location_column_id:
existing_loc_col = existing_report.location_column_id
self.fields['location'].initial = self._get_property_id_by_indicator_id(existing_loc_col)
@property
def _location_choices(self):
return [(p.id, p.text) for p in self.data_source_properties.values()]
@property
def container_fieldset(self):
return crispy.Fieldset(
"",
self.column_fieldset,
crispy.Fieldset(
_legend(
_("Location"),
_('Choose which property represents the location.'),
),
'location',
),
self.user_filter_fieldset,
self.default_filter_fieldset
)
@property
@memoized
def initial_columns(self):
columns = super(ConfigureMapReportForm, self).initial_columns
# Remove the location indicator from the columns.
# It gets removed because we want it to be a column in the report,
# but we don't want it to appear in the builder.
if self.existing_report and self.existing_report.location_column_id:
col_id = self.existing_report.location_column_id
location_property = self._get_property_id_by_indicator_id(col_id)
return [c for c in columns if c.property != location_property]
return columns
@property
def location_field(self):
return self.cleaned_data["location"]
@property
def _report_columns(self):
loc_field_id = self.data_source_properties[self.location_field].column_id
loc_field_text = self.data_source_properties[self.location_field].text
columns = super(ConfigureMapReportForm, self)._report_columns
# Add the location indicator to the columns if it's not already present.
displaying_loc_column = bool([c for c in columns if c['field'] == loc_field_id])
if not displaying_loc_column:
columns = columns + [{
"column_id": loc_field_id,
"type": "location",
'field': loc_field_id,
'display': loc_field_text
}]
return columns
| {
"content_hash": "534785b34738cae8c4bce500f4037702",
"timestamp": "",
"source": "github",
"line_count": 1505,
"max_line_length": 289,
"avg_line_length": 39.245182724252494,
"alnum_prop": 0.5703474197480699,
"repo_name": "qedsoftware/commcare-hq",
"id": "ee3b75a8748963260e232d02163f19852112dd93",
"size": "59064",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "corehq/apps/userreports/reports/builder/forms.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "ActionScript",
"bytes": "15950"
},
{
"name": "CSS",
"bytes": "508392"
},
{
"name": "HTML",
"bytes": "2869325"
},
{
"name": "JavaScript",
"bytes": "2395360"
},
{
"name": "PHP",
"bytes": "2232"
},
{
"name": "PLpgSQL",
"bytes": "125298"
},
{
"name": "Python",
"bytes": "14670713"
},
{
"name": "Shell",
"bytes": "37514"
}
],
"symlink_target": ""
} |
import subprocess,sys
from glint_arg_parser import GlintArgumentParser
import glint_platform as plat
pkg_dir = "glint-service"
def execute_command(cmd_args,input):
if input is None:
process = subprocess.Popen(cmd_args,stdout=subprocess.PIPE,stderr=subprocess.PIPE)
out,err = process.communicate()
else:
#print "Need to use use input"
process = subprocess.Popen(cmd_args,stdout=subprocess.PIPE,stdin=subprocess.PIPE,stderr=subprocess.PIPE)
out,err = process.communicate(input=input)
if err:
print "warning: %s"%err
sys.stdout.flush()
return out,err
gap = GlintArgumentParser()
gap.init_stunnel_arg_parser()
args = gap.parser.parse_args()
if args.install:
print "Install stunnel"
if plat.isRedhat():
[out,err] = execute_command(['yum','install','stunnel'],'y')
else:
[out,err] = execute_command(['apt-get','install','stunnel'],'y')
[out,err] = execute_command(['mkdir','/etc/stunnel'],None)
[out,err] = execute_command(['openssl','req','-new','-x509','-days','365','-nodes','-out','/etc/stunnel/stunnel.pem','-keyout','/etc/stunnel/stunnel.pem'],'CA\nBC\nVIC\nUVIC\nHEPGC\nopenstack\[email protected]\n')
[out,err] = execute_command(['/usr/bin/openssl','gendh','2048','>>','/etc/stunnel/stunnel.pem'],None)
[out,err] = execute_command(['chmod','600','/etc/stunnel/stunnel.pem'],None)
[out,err] = execute_command(['mkdir','/var/run/stunnel'],None)
[out,err] = execute_command(['cp','%s/openstack-glint-stunnel'%pkg_dir,'/etc/init.d/.'],None)
[out,err] = execute_command(['chmod','755','/etc/init.d/openstack-glint-stunnel'],None)
[out,err] = execute_command(['cp','%s/dev_https'%pkg_dir,'/etc/stunnel/.'],None)
[out,err] = execute_command(['service','openstack-glint-stunnel','start'],None)
#[out,err] = execute_command(['stunnel','dev_https','&'],None)
print "started stunnel "
elif args.uninstall:
print "Uninstall stunnel"
[out,err] = execute_command(['service','openstack-glint-stunnel','stop'],None)
[out,err] = execute_command(['rm','-f','/etc/init.d/openstack-glint-stunnel'],None)
[out,err] = execute_command(['rm','-rf','/var/run/stunnel'],None)
[out,err] = execute_command(['yum','remove','stunnel'],'y')
[out,err] = execute_command(['rm','-rf','/etc/stunnel'],None)
| {
"content_hash": "4dec495914fac592b7355679e5d5abfb",
"timestamp": "",
"source": "github",
"line_count": 49,
"max_line_length": 215,
"avg_line_length": 47.87755102040816,
"alnum_prop": 0.6508951406649617,
"repo_name": "hep-gc/glint-service",
"id": "daf5abfadbbb92218f283440963f5bf9a638d0df",
"size": "2364",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "glint-service/glint_stunnel_setup.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "34062"
},
{
"name": "Shell",
"bytes": "7526"
}
],
"symlink_target": ""
} |
from test_framework.mininode import *
from test_framework.test_framework import StardustTestFramework
from test_framework.util import *
from test_framework.blocktools import create_block, create_coinbase
from test_framework.siphash import siphash256
from test_framework.script import CScript, OP_TRUE
'''
CompactBlocksTest -- test compact blocks (BIP 152)
'''
# TestNode: A peer we use to send messages to stardustd, and store responses.
class TestNode(SingleNodeConnCB):
def __init__(self):
SingleNodeConnCB.__init__(self)
self.last_sendcmpct = None
self.last_headers = None
self.last_inv = None
self.last_cmpctblock = None
self.block_announced = False
self.last_getdata = None
self.last_getblocktxn = None
self.last_block = None
self.last_blocktxn = None
def on_sendcmpct(self, conn, message):
self.last_sendcmpct = message
def on_block(self, conn, message):
self.last_block = message
def on_cmpctblock(self, conn, message):
self.last_cmpctblock = message
self.block_announced = True
def on_headers(self, conn, message):
self.last_headers = message
self.block_announced = True
def on_inv(self, conn, message):
self.last_inv = message
self.block_announced = True
def on_getdata(self, conn, message):
self.last_getdata = message
def on_getblocktxn(self, conn, message):
self.last_getblocktxn = message
def on_blocktxn(self, conn, message):
self.last_blocktxn = message
# Requires caller to hold mininode_lock
def received_block_announcement(self):
return self.block_announced
def clear_block_announcement(self):
with mininode_lock:
self.block_announced = False
self.last_inv = None
self.last_headers = None
self.last_cmpctblock = None
def get_headers(self, locator, hashstop):
msg = msg_getheaders()
msg.locator.vHave = locator
msg.hashstop = hashstop
self.connection.send_message(msg)
def send_header_for_blocks(self, new_blocks):
headers_message = msg_headers()
headers_message.headers = [CBlockHeader(b) for b in new_blocks]
self.send_message(headers_message)
class CompactBlocksTest(StardustTestFramework):
def __init__(self):
super().__init__()
self.setup_clean_chain = True
self.num_nodes = 1
self.utxos = []
def setup_network(self):
self.nodes = []
# Turn off segwit in this test, as compact blocks don't currently work
# with segwit. (After BIP 152 is updated to support segwit, we can
# test behavior with and without segwit enabled by adding a second node
# to the test.)
self.nodes = start_nodes(self.num_nodes, self.options.tmpdir, [["-debug", "-logtimemicros=1", "-bip9params=segwit:0:0"]])
def build_block_on_tip(self):
height = self.nodes[0].getblockcount()
tip = self.nodes[0].getbestblockhash()
mtp = self.nodes[0].getblockheader(tip)['mediantime']
block = create_block(int(tip, 16), create_coinbase(height + 1), mtp + 1)
block.solve()
return block
# Create 10 more anyone-can-spend utxo's for testing.
def make_utxos(self):
block = self.build_block_on_tip()
self.test_node.send_and_ping(msg_block(block))
assert(int(self.nodes[0].getbestblockhash(), 16) == block.sha256)
self.nodes[0].generate(100)
total_value = block.vtx[0].vout[0].nValue
out_value = total_value // 10
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(block.vtx[0].sha256, 0), b''))
for i in range(10):
tx.vout.append(CTxOut(out_value, CScript([OP_TRUE])))
tx.rehash()
block2 = self.build_block_on_tip()
block2.vtx.append(tx)
block2.hashMerkleRoot = block2.calc_merkle_root()
block2.solve()
self.test_node.send_and_ping(msg_block(block2))
assert_equal(int(self.nodes[0].getbestblockhash(), 16), block2.sha256)
self.utxos.extend([[tx.sha256, i, out_value] for i in range(10)])
return
# Test "sendcmpct":
# - No compact block announcements or getdata(MSG_CMPCT_BLOCK) unless
# sendcmpct is sent.
# - If sendcmpct is sent with version > 0, the message is ignored.
# - If sendcmpct is sent with boolean 0, then block announcements are not
# made with compact blocks.
# - If sendcmpct is then sent with boolean 1, then new block announcements
# are made with compact blocks.
def test_sendcmpct(self):
print("Testing SENDCMPCT p2p message... ")
# Make sure we get a version 0 SENDCMPCT message from our peer
def received_sendcmpct():
return (self.test_node.last_sendcmpct is not None)
got_message = wait_until(received_sendcmpct, timeout=30)
assert(got_message)
assert_equal(self.test_node.last_sendcmpct.version, 1)
tip = int(self.nodes[0].getbestblockhash(), 16)
def check_announcement_of_new_block(node, peer, predicate):
self.test_node.clear_block_announcement()
node.generate(1)
got_message = wait_until(peer.received_block_announcement, timeout=30)
assert(got_message)
with mininode_lock:
assert(predicate)
# We shouldn't get any block announcements via cmpctblock yet.
check_announcement_of_new_block(self.nodes[0], self.test_node, lambda: self.test_node.last_cmpctblock is None)
# Try one more time, this time after requesting headers.
self.test_node.clear_block_announcement()
self.test_node.get_headers(locator=[tip], hashstop=0)
wait_until(self.test_node.received_block_announcement, timeout=30)
self.test_node.clear_block_announcement()
check_announcement_of_new_block(self.nodes[0], self.test_node, lambda: self.test_node.last_cmpctblock is None and self.test_node.last_inv is not None)
# Now try a SENDCMPCT message with too-high version
sendcmpct = msg_sendcmpct()
sendcmpct.version = 2
self.test_node.send_message(sendcmpct)
check_announcement_of_new_block(self.nodes[0], self.test_node, lambda: self.test_node.last_cmpctblock is None)
# Now try a SENDCMPCT message with valid version, but announce=False
self.test_node.send_message(msg_sendcmpct())
check_announcement_of_new_block(self.nodes[0], self.test_node, lambda: self.test_node.last_cmpctblock is None)
# Finally, try a SENDCMPCT message with announce=True
sendcmpct.version = 1
sendcmpct.announce = True
self.test_node.send_message(sendcmpct)
check_announcement_of_new_block(self.nodes[0], self.test_node, lambda: self.test_node.last_cmpctblock is not None)
# Try one more time
check_announcement_of_new_block(self.nodes[0], self.test_node, lambda: self.test_node.last_cmpctblock is not None)
# Try one more time, after turning on sendheaders
self.test_node.send_message(msg_sendheaders())
check_announcement_of_new_block(self.nodes[0], self.test_node, lambda: self.test_node.last_cmpctblock is not None)
# Now turn off announcements
sendcmpct.announce = False
check_announcement_of_new_block(self.nodes[0], self.test_node, lambda: self.test_node.last_cmpctblock is None and self.test_node.last_headers is not None)
# This test actually causes stardustd to (reasonably!) disconnect us, so do this last.
def test_invalid_cmpctblock_message(self):
print("Testing invalid index in cmpctblock message...")
self.nodes[0].generate(101)
block = self.build_block_on_tip()
cmpct_block = P2PHeaderAndShortIDs()
cmpct_block.header = CBlockHeader(block)
cmpct_block.prefilled_txn_length = 1
# This index will be too high
prefilled_txn = PrefilledTransaction(1, block.vtx[0])
cmpct_block.prefilled_txn = [prefilled_txn]
self.test_node.send_and_ping(msg_cmpctblock(cmpct_block))
assert(int(self.nodes[0].getbestblockhash(), 16) == block.hashPrevBlock)
# Compare the generated shortids to what we expect based on BIP 152, given
# stardustd's choice of nonce.
def test_compactblock_construction(self):
print("Testing compactblock headers and shortIDs are correct...")
# Generate a bunch of transactions.
self.nodes[0].generate(101)
num_transactions = 25
address = self.nodes[0].getnewaddress()
for i in range(num_transactions):
self.nodes[0].sendtoaddress(address, 0.1)
# Now mine a block, and look at the resulting compact block.
self.test_node.clear_block_announcement()
block_hash = int(self.nodes[0].generate(1)[0], 16)
# Store the raw block in our internal format.
block = FromHex(CBlock(), self.nodes[0].getblock("%02x" % block_hash, False))
[tx.calc_sha256() for tx in block.vtx]
block.rehash()
# Don't care which type of announcement came back for this test; just
# request the compact block if we didn't get one yet.
wait_until(self.test_node.received_block_announcement, timeout=30)
with mininode_lock:
if self.test_node.last_cmpctblock is None:
self.test_node.clear_block_announcement()
inv = CInv(4, block_hash) # 4 == "CompactBlock"
self.test_node.send_message(msg_getdata([inv]))
wait_until(self.test_node.received_block_announcement, timeout=30)
# Now we should have the compactblock
header_and_shortids = None
with mininode_lock:
assert(self.test_node.last_cmpctblock is not None)
# Convert the on-the-wire representation to absolute indexes
header_and_shortids = HeaderAndShortIDs(self.test_node.last_cmpctblock.header_and_shortids)
# Check that we got the right block!
header_and_shortids.header.calc_sha256()
assert_equal(header_and_shortids.header.sha256, block_hash)
# Make sure the prefilled_txn appears to have included the coinbase
assert(len(header_and_shortids.prefilled_txn) >= 1)
assert_equal(header_and_shortids.prefilled_txn[0].index, 0)
# Check that all prefilled_txn entries match what's in the block.
for entry in header_and_shortids.prefilled_txn:
entry.tx.calc_sha256()
assert_equal(entry.tx.sha256, block.vtx[entry.index].sha256)
# Check that the cmpctblock message announced all the transactions.
assert_equal(len(header_and_shortids.prefilled_txn) + len(header_and_shortids.shortids), len(block.vtx))
# And now check that all the shortids are as expected as well.
# Determine the siphash keys to use.
[k0, k1] = header_and_shortids.get_siphash_keys()
index = 0
while index < len(block.vtx):
if (len(header_and_shortids.prefilled_txn) > 0 and
header_and_shortids.prefilled_txn[0].index == index):
# Already checked prefilled transactions above
header_and_shortids.prefilled_txn.pop(0)
else:
shortid = calculate_shortid(k0, k1, block.vtx[index].sha256)
assert_equal(shortid, header_and_shortids.shortids[0])
header_and_shortids.shortids.pop(0)
index += 1
# Test that stardustd requests compact blocks when we announce new blocks
# via header or inv, and that responding to getblocktxn causes the block
# to be successfully reconstructed.
def test_compactblock_requests(self):
print("Testing compactblock requests... ")
# Try announcing a block with an inv or header, expect a compactblock
# request
for announce in ["inv", "header"]:
block = self.build_block_on_tip()
with mininode_lock:
self.test_node.last_getdata = None
if announce == "inv":
self.test_node.send_message(msg_inv([CInv(2, block.sha256)]))
else:
self.test_node.send_header_for_blocks([block])
success = wait_until(lambda: self.test_node.last_getdata is not None, timeout=30)
assert(success)
assert_equal(len(self.test_node.last_getdata.inv), 1)
assert_equal(self.test_node.last_getdata.inv[0].type, 4)
assert_equal(self.test_node.last_getdata.inv[0].hash, block.sha256)
# Send back a compactblock message that omits the coinbase
comp_block = HeaderAndShortIDs()
comp_block.header = CBlockHeader(block)
comp_block.nonce = 0
comp_block.shortids = [1] # this is useless, and wrong
self.test_node.send_and_ping(msg_cmpctblock(comp_block.to_p2p()))
assert_equal(int(self.nodes[0].getbestblockhash(), 16), block.hashPrevBlock)
# Expect a getblocktxn message.
with mininode_lock:
assert(self.test_node.last_getblocktxn is not None)
absolute_indexes = self.test_node.last_getblocktxn.block_txn_request.to_absolute()
assert_equal(absolute_indexes, [0]) # should be a coinbase request
# Send the coinbase, and verify that the tip advances.
msg = msg_blocktxn()
msg.block_transactions.blockhash = block.sha256
msg.block_transactions.transactions = [block.vtx[0]]
self.test_node.send_and_ping(msg)
assert_equal(int(self.nodes[0].getbestblockhash(), 16), block.sha256)
# Create a chain of transactions from given utxo, and add to a new block.
def build_block_with_transactions(self, utxo, num_transactions):
block = self.build_block_on_tip()
for i in range(num_transactions):
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(utxo[0], utxo[1]), b''))
tx.vout.append(CTxOut(utxo[2] - 1000, CScript([OP_TRUE])))
tx.rehash()
utxo = [tx.sha256, 0, tx.vout[0].nValue]
block.vtx.append(tx)
block.hashMerkleRoot = block.calc_merkle_root()
block.solve()
return block
# Test that we only receive getblocktxn requests for transactions that the
# node needs, and that responding to them causes the block to be
# reconstructed.
def test_getblocktxn_requests(self):
print("Testing getblocktxn requests...")
# First try announcing compactblocks that won't reconstruct, and verify
# that we receive getblocktxn messages back.
utxo = self.utxos.pop(0)
block = self.build_block_with_transactions(utxo, 5)
self.utxos.append([block.vtx[-1].sha256, 0, block.vtx[-1].vout[0].nValue])
comp_block = HeaderAndShortIDs()
comp_block.initialize_from_block(block)
self.test_node.send_and_ping(msg_cmpctblock(comp_block.to_p2p()))
with mininode_lock:
assert(self.test_node.last_getblocktxn is not None)
absolute_indexes = self.test_node.last_getblocktxn.block_txn_request.to_absolute()
assert_equal(absolute_indexes, [1, 2, 3, 4, 5])
msg = msg_blocktxn()
msg.block_transactions = BlockTransactions(block.sha256, block.vtx[1:])
self.test_node.send_and_ping(msg)
assert_equal(int(self.nodes[0].getbestblockhash(), 16), block.sha256)
utxo = self.utxos.pop(0)
block = self.build_block_with_transactions(utxo, 5)
self.utxos.append([block.vtx[-1].sha256, 0, block.vtx[-1].vout[0].nValue])
# Now try interspersing the prefilled transactions
comp_block.initialize_from_block(block, prefill_list=[0, 1, 5])
self.test_node.send_and_ping(msg_cmpctblock(comp_block.to_p2p()))
with mininode_lock:
assert(self.test_node.last_getblocktxn is not None)
absolute_indexes = self.test_node.last_getblocktxn.block_txn_request.to_absolute()
assert_equal(absolute_indexes, [2, 3, 4])
msg.block_transactions = BlockTransactions(block.sha256, block.vtx[2:5])
self.test_node.send_and_ping(msg)
assert_equal(int(self.nodes[0].getbestblockhash(), 16), block.sha256)
# Now try giving one transaction ahead of time.
utxo = self.utxos.pop(0)
block = self.build_block_with_transactions(utxo, 5)
self.utxos.append([block.vtx[-1].sha256, 0, block.vtx[-1].vout[0].nValue])
self.test_node.send_and_ping(msg_tx(block.vtx[1]))
assert(block.vtx[1].hash in self.nodes[0].getrawmempool())
# Prefill 4 out of the 6 transactions, and verify that only the one
# that was not in the mempool is requested.
comp_block.initialize_from_block(block, prefill_list=[0, 2, 3, 4])
self.test_node.send_and_ping(msg_cmpctblock(comp_block.to_p2p()))
with mininode_lock:
assert(self.test_node.last_getblocktxn is not None)
absolute_indexes = self.test_node.last_getblocktxn.block_txn_request.to_absolute()
assert_equal(absolute_indexes, [5])
msg.block_transactions = BlockTransactions(block.sha256, [block.vtx[5]])
self.test_node.send_and_ping(msg)
assert_equal(int(self.nodes[0].getbestblockhash(), 16), block.sha256)
# Now provide all transactions to the node before the block is
# announced and verify reconstruction happens immediately.
utxo = self.utxos.pop(0)
block = self.build_block_with_transactions(utxo, 10)
self.utxos.append([block.vtx[-1].sha256, 0, block.vtx[-1].vout[0].nValue])
for tx in block.vtx[1:]:
self.test_node.send_message(msg_tx(tx))
self.test_node.sync_with_ping()
# Make sure all transactions were accepted.
mempool = self.nodes[0].getrawmempool()
for tx in block.vtx[1:]:
assert(tx.hash in mempool)
# Clear out last request.
with mininode_lock:
self.test_node.last_getblocktxn = None
# Send compact block
comp_block.initialize_from_block(block, prefill_list=[0])
self.test_node.send_and_ping(msg_cmpctblock(comp_block.to_p2p()))
with mininode_lock:
# Shouldn't have gotten a request for any transaction
assert(self.test_node.last_getblocktxn is None)
# Tip should have updated
assert_equal(int(self.nodes[0].getbestblockhash(), 16), block.sha256)
# Incorrectly responding to a getblocktxn shouldn't cause the block to be
# permanently failed.
def test_incorrect_blocktxn_response(self):
print("Testing handling of incorrect blocktxn responses...")
if (len(self.utxos) == 0):
self.make_utxos()
utxo = self.utxos.pop(0)
block = self.build_block_with_transactions(utxo, 10)
self.utxos.append([block.vtx[-1].sha256, 0, block.vtx[-1].vout[0].nValue])
# Relay the first 5 transactions from the block in advance
for tx in block.vtx[1:6]:
self.test_node.send_message(msg_tx(tx))
self.test_node.sync_with_ping()
# Make sure all transactions were accepted.
mempool = self.nodes[0].getrawmempool()
for tx in block.vtx[1:6]:
assert(tx.hash in mempool)
# Send compact block
comp_block = HeaderAndShortIDs()
comp_block.initialize_from_block(block, prefill_list=[0])
self.test_node.send_and_ping(msg_cmpctblock(comp_block.to_p2p()))
absolute_indexes = []
with mininode_lock:
assert(self.test_node.last_getblocktxn is not None)
absolute_indexes = self.test_node.last_getblocktxn.block_txn_request.to_absolute()
assert_equal(absolute_indexes, [6, 7, 8, 9, 10])
# Now give an incorrect response.
# Note that it's possible for stardustd to be smart enough to know we're
# lying, since it could check to see if the shortid matches what we're
# sending, and eg disconnect us for misbehavior. If that behavior
# change were made, we could just modify this test by having a
# different peer provide the block further down, so that we're still
# verifying that the block isn't marked bad permanently. This is good
# enough for now.
msg = msg_blocktxn()
msg.block_transactions = BlockTransactions(block.sha256, [block.vtx[5]] + block.vtx[7:])
self.test_node.send_and_ping(msg)
# Tip should not have updated
assert_equal(int(self.nodes[0].getbestblockhash(), 16), block.hashPrevBlock)
# We should receive a getdata request
success = wait_until(lambda: self.test_node.last_getdata is not None, timeout=10)
assert(success)
assert_equal(len(self.test_node.last_getdata.inv), 1)
assert_equal(self.test_node.last_getdata.inv[0].type, 2)
assert_equal(self.test_node.last_getdata.inv[0].hash, block.sha256)
# Deliver the block
self.test_node.send_and_ping(msg_block(block))
assert_equal(int(self.nodes[0].getbestblockhash(), 16), block.sha256)
def test_getblocktxn_handler(self):
print("Testing getblocktxn handler...")
# stardustd won't respond for blocks whose height is more than 15 blocks
# deep.
MAX_GETBLOCKTXN_DEPTH = 15
chain_height = self.nodes[0].getblockcount()
current_height = chain_height
while (current_height >= chain_height - MAX_GETBLOCKTXN_DEPTH):
block_hash = self.nodes[0].getblockhash(current_height)
block = FromHex(CBlock(), self.nodes[0].getblock(block_hash, False))
msg = msg_getblocktxn()
msg.block_txn_request = BlockTransactionsRequest(int(block_hash, 16), [])
num_to_request = random.randint(1, len(block.vtx))
msg.block_txn_request.from_absolute(sorted(random.sample(range(len(block.vtx)), num_to_request)))
self.test_node.send_message(msg)
success = wait_until(lambda: self.test_node.last_blocktxn is not None, timeout=10)
assert(success)
[tx.calc_sha256() for tx in block.vtx]
with mininode_lock:
assert_equal(self.test_node.last_blocktxn.block_transactions.blockhash, int(block_hash, 16))
all_indices = msg.block_txn_request.to_absolute()
for index in all_indices:
tx = self.test_node.last_blocktxn.block_transactions.transactions.pop(0)
tx.calc_sha256()
assert_equal(tx.sha256, block.vtx[index].sha256)
self.test_node.last_blocktxn = None
current_height -= 1
# Next request should be ignored, as we're past the allowed depth.
block_hash = self.nodes[0].getblockhash(current_height)
msg.block_txn_request = BlockTransactionsRequest(int(block_hash, 16), [0])
self.test_node.send_and_ping(msg)
with mininode_lock:
assert_equal(self.test_node.last_blocktxn, None)
def test_compactblocks_not_at_tip(self):
print("Testing compactblock requests/announcements not at chain tip...")
# Test that requesting old compactblocks doesn't work.
MAX_CMPCTBLOCK_DEPTH = 11
new_blocks = []
for i in range(MAX_CMPCTBLOCK_DEPTH):
self.test_node.clear_block_announcement()
new_blocks.append(self.nodes[0].generate(1)[0])
wait_until(self.test_node.received_block_announcement, timeout=30)
self.test_node.clear_block_announcement()
self.test_node.send_message(msg_getdata([CInv(4, int(new_blocks[0], 16))]))
success = wait_until(lambda: self.test_node.last_cmpctblock is not None, timeout=30)
assert(success)
self.test_node.clear_block_announcement()
self.nodes[0].generate(1)
wait_until(self.test_node.received_block_announcement, timeout=30)
self.test_node.clear_block_announcement()
self.test_node.send_message(msg_getdata([CInv(4, int(new_blocks[0], 16))]))
success = wait_until(lambda: self.test_node.last_block is not None, timeout=30)
assert(success)
with mininode_lock:
self.test_node.last_block.block.calc_sha256()
assert_equal(self.test_node.last_block.block.sha256, int(new_blocks[0], 16))
# Generate an old compactblock, and verify that it's not accepted.
cur_height = self.nodes[0].getblockcount()
hashPrevBlock = int(self.nodes[0].getblockhash(cur_height-5), 16)
block = self.build_block_on_tip()
block.hashPrevBlock = hashPrevBlock
block.solve()
comp_block = HeaderAndShortIDs()
comp_block.initialize_from_block(block)
self.test_node.send_and_ping(msg_cmpctblock(comp_block.to_p2p()))
tips = self.nodes[0].getchaintips()
found = False
for x in tips:
if x["hash"] == block.hash:
assert_equal(x["status"], "headers-only")
found = True
break
assert(found)
# Requesting this block via getblocktxn should silently fail
# (to avoid fingerprinting attacks).
msg = msg_getblocktxn()
msg.block_txn_request = BlockTransactionsRequest(block.sha256, [0])
with mininode_lock:
self.test_node.last_blocktxn = None
self.test_node.send_and_ping(msg)
with mininode_lock:
assert(self.test_node.last_blocktxn is None)
def run_test(self):
# Setup the p2p connections and start up the network thread.
self.test_node = TestNode()
connections = []
connections.append(NodeConn('127.0.0.1', p2p_port(0), self.nodes[0], self.test_node))
self.test_node.add_connection(connections[0])
NetworkThread().start() # Start up network handling in another thread
# Test logic begins here
self.test_node.wait_for_verack()
# We will need UTXOs to construct transactions in later tests.
self.make_utxos()
self.test_sendcmpct()
self.test_compactblock_construction()
self.test_compactblock_requests()
self.test_getblocktxn_requests()
self.test_getblocktxn_handler()
self.test_compactblocks_not_at_tip()
self.test_incorrect_blocktxn_response()
self.test_invalid_cmpctblock_message()
if __name__ == '__main__':
CompactBlocksTest().main()
| {
"content_hash": "df754b45d1f53602ee52700978a1b61e",
"timestamp": "",
"source": "github",
"line_count": 603,
"max_line_length": 162,
"avg_line_length": 44.346600331674956,
"alnum_prop": 0.640327586851651,
"repo_name": "ctwiz/stardust",
"id": "7a6065e3923ed2d7372b4f681c5586b1ca630610",
"size": "26952",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "qa/rpc-tests/p2p-compactblocks.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "679824"
},
{
"name": "C++",
"bytes": "4516675"
},
{
"name": "CSS",
"bytes": "1127"
},
{
"name": "Groff",
"bytes": "3831"
},
{
"name": "HTML",
"bytes": "50621"
},
{
"name": "Java",
"bytes": "2101"
},
{
"name": "M4",
"bytes": "169659"
},
{
"name": "Makefile",
"bytes": "96188"
},
{
"name": "Objective-C",
"bytes": "3778"
},
{
"name": "Objective-C++",
"bytes": "7244"
},
{
"name": "Protocol Buffer",
"bytes": "2312"
},
{
"name": "Python",
"bytes": "920958"
},
{
"name": "QMake",
"bytes": "2021"
},
{
"name": "Shell",
"bytes": "26160"
}
],
"symlink_target": ""
} |
"""SavedModel builder implementation."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
from google.protobuf.any_pb2 import Any
from tensorflow.core.framework import types_pb2
from tensorflow.core.protobuf import meta_graph_pb2
from tensorflow.core.protobuf import saved_model_pb2
from tensorflow.core.protobuf import saver_pb2
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.lib.io import file_io
from tensorflow.python.ops import variables
from tensorflow.python.platform import tf_logging
from tensorflow.python.saved_model import constants
from tensorflow.python.training import saver as tf_saver
from tensorflow.python.util import compat
from tensorflow.python.util.deprecation import deprecated_args
from tensorflow.python.util.tf_export import tf_export
@tf_export("saved_model.builder.SavedModelBuilder")
class SavedModelBuilder(object):
"""Builds the `SavedModel` protocol buffer and saves variables and assets.
The `SavedModelBuilder` class provides functionality to build a `SavedModel`
protocol buffer. Specifically, this allows multiple meta graphs to be saved as
part of a single language-neutral `SavedModel`, while sharing variables and
assets.
To build a SavedModel, the first meta graph must be saved with variables.
Subsequent meta graphs will simply be saved with their graph definitions. If
assets need to be saved and written or copied to disk, they can be provided
when the meta graph def is added. If multiple meta graph defs are associated
an asset of the same name, only the first version is retained.
Each meta graph added to the SavedModel must be annotated with tags. The tags
provide a means to identify the specific meta graph to load and restore, along
with the shared set of variables and assets.
Typical usage for the `SavedModelBuilder`:
```python
...
builder = tf.saved_model.builder.SavedModelBuilder(export_dir)
with tf.Session(graph=tf.Graph()) as sess:
...
builder.add_meta_graph_and_variables(sess,
["foo-tag"],
signature_def_map=foo_signatures,
assets_collection=foo_assets)
...
with tf.Session(graph=tf.Graph()) as sess:
...
builder.add_meta_graph(["bar-tag", "baz-tag"])
...
builder.save()
```
"""
def __init__(self, export_dir):
self._saved_model = saved_model_pb2.SavedModel()
self._saved_model.saved_model_schema_version = (
constants.SAVED_MODEL_SCHEMA_VERSION)
self._export_dir = export_dir
if file_io.file_exists(export_dir):
raise AssertionError(
"Export directory already exists. Please specify a different export "
"directory: %s" % export_dir)
file_io.recursive_create_dir(self._export_dir)
# Boolean to track whether variables and assets corresponding to the
# SavedModel have been saved. Specifically, the first meta graph to be added
# MUST use the add_meta_graph_and_variables() API. Subsequent add operations
# on the SavedModel MUST use the add_meta_graph() API which does not save
# weights.
self._has_saved_variables = False
def _save_and_write_assets(self, assets_collection_to_add=None):
"""Saves asset to the meta graph and writes asset files to disk.
Args:
assets_collection_to_add: The collection where the asset paths are setup.
"""
asset_filename_map = _maybe_save_assets(assets_collection_to_add)
# Return if there are no assets to write.
if not asset_filename_map:
tf_logging.info("No assets to write.")
return
assets_destination_dir = os.path.join(
compat.as_bytes(self._export_dir),
compat.as_bytes(constants.ASSETS_DIRECTORY))
if not file_io.file_exists(assets_destination_dir):
file_io.recursive_create_dir(assets_destination_dir)
# Copy each asset from source path to destination path.
for asset_basename, asset_source_filepath in asset_filename_map.items():
asset_destination_filepath = os.path.join(
compat.as_bytes(assets_destination_dir),
compat.as_bytes(asset_basename))
# Only copy the asset file to the destination if it does not already
# exist. This is to ensure that an asset with the same name defined as
# part of multiple graphs is only copied the first time.
if not file_io.file_exists(asset_destination_filepath):
file_io.copy(asset_source_filepath, asset_destination_filepath)
tf_logging.info("Assets written to: %s",
compat.as_text(assets_destination_dir))
def _maybe_add_main_op(self, main_op):
"""Adds main op to the SavedModel.
Args:
main_op: Main op to run as part of graph initialization. If None, no
main op will be added to the graph.
Raises:
TypeError: if main op is provided but is not of type `Operation`.
ValueError: if the Graph already contains an init op.
"""
if main_op is None:
return
if not isinstance(main_op, ops.Operation):
raise TypeError("main_op needs to be an Operation: %r" % main_op)
# Validate that no other init ops have been added to this graph already.
# We check main_op and legacy_init_op for thoroughness and explicitness.
for init_op_key in (constants.MAIN_OP_KEY, constants.LEGACY_INIT_OP_KEY):
if ops.get_collection(init_op_key):
raise ValueError(
"Graph already contains one or more main ops under the "
"collection {}.".format(init_op_key))
ops.add_to_collection(constants.MAIN_OP_KEY, main_op)
def _add_train_op(self, train_op):
"""Add train op to the SavedModel.
Note that this functionality is in development, and liable to be
moved elsewhere.
Args:
train_op: Op or group of ops that are used for training. These are
stored as a collection with key TRAIN_OP_KEY, but not executed.
Raises:
TypeError if Train op is not of type `Operation`.
"""
if train_op is not None:
if (not isinstance(train_op, ops.Tensor) and
not isinstance(train_op, ops.Operation)):
raise TypeError("train_op needs to be a Tensor or Op: %r" % train_op)
ops.add_to_collection(constants.TRAIN_OP_KEY, train_op)
def _tag_and_add_meta_graph(self, meta_graph_def, tags, signature_def_map):
"""Tags the meta graph def and adds it to the SavedModel.
Tags the meta graph def with the supplied tags, adds signature defs to it if
provided and appends the meta graph def to the SavedModel proto.
Args:
meta_graph_def: The meta graph def to add to the SavedModel.
tags: The set of tags to annotate the meta graph def with.
signature_def_map: The map of signature defs to be added to the meta graph
def.
"""
for tag in tags:
meta_graph_def.meta_info_def.tags.append(tag)
if signature_def_map is not None:
for key in signature_def_map:
meta_graph_def.signature_def[key].CopyFrom(signature_def_map[key])
proto_meta_graph_def = self._saved_model.meta_graphs.add()
proto_meta_graph_def.CopyFrom(meta_graph_def)
def _validate_tensor_info(self, tensor_info):
"""Validates the `TensorInfo` proto.
Checks if the `encoding` (`name` or `coo_sparse`) and `dtype` fields exist
and are non-empty.
Args:
tensor_info: `TensorInfo` protocol buffer to validate.
Raises:
AssertionError: If the `name` or `dtype` fields of the supplied
`TensorInfo` proto are not populated.
"""
if tensor_info is None:
raise AssertionError(
"All TensorInfo protos used in the SignatureDefs must have the name "
"and dtype fields set.")
if tensor_info.WhichOneof("encoding") is None:
# TODO(soergel) validate each of the fields of coo_sparse
raise AssertionError(
"All TensorInfo protos used in the SignatureDefs must have one of "
"the 'encoding' fields (e.g., name or coo_sparse) set: %s"
% tensor_info)
if tensor_info.dtype is types_pb2.DT_INVALID:
raise AssertionError(
"All TensorInfo protos used in the SignatureDefs must have the dtype "
"field set: %s" % tensor_info)
def _validate_signature_def_map(self, signature_def_map):
"""Validates the `SignatureDef` entries in the signature def map.
Validation of entries in the signature def map includes ensuring that the
`name` and `dtype` fields of the TensorInfo protos of the `inputs` and
`outputs` of each `SignatureDef` are populated.
Args:
signature_def_map: The map of signature defs to be validated.
"""
if signature_def_map is not None:
for signature_def_key in signature_def_map:
signature_def = signature_def_map[signature_def_key]
inputs = signature_def.inputs
outputs = signature_def.outputs
for inputs_key in inputs:
self._validate_tensor_info(inputs[inputs_key])
for outputs_key in outputs:
self._validate_tensor_info(outputs[outputs_key])
def _add_collections(
self, assets_collection, main_op, train_op):
"""Add asset and op collections to be saved."""
# Save asset files and write them to disk, if any.
self._save_and_write_assets(assets_collection)
self._maybe_add_main_op(main_op)
self._add_train_op(train_op)
def _maybe_create_saver(self, saver=None):
"""Creates a sharded saver if one does not already exist."""
if not saver:
# Initialize a saver to generate a sharded output for all saveables in the
# current scope.
saver = tf_saver.Saver(
variables._all_saveable_objects(), # pylint: disable=protected-access
sharded=True,
write_version=saver_pb2.SaverDef.V2,
allow_empty=True)
return saver
@deprecated_args(None,
"Pass your op to the equivalent parameter main_op instead.",
"legacy_init_op")
def add_meta_graph(self,
tags,
signature_def_map=None,
assets_collection=None,
legacy_init_op=None,
clear_devices=False,
main_op=None,
strip_default_attrs=False,
saver=None):
# pylint: disable=line-too-long
"""Adds the current meta graph to the SavedModel.
Creates a Saver in the current scope and uses the Saver to export the meta
graph def. Invoking this API requires the `add_meta_graph_and_variables()`
API to have been invoked before.
Args:
tags: The set of tags to annotate the meta graph def with.
signature_def_map: The map of signature defs to be added to the meta graph
def.
assets_collection: Assets collection to be saved with SavedModel. Note
that this collection should be a subset of the assets saved as part of
the first meta graph in the SavedModel.
legacy_init_op: Legacy support for op or group of ops to execute after the
restore op upon a load. Deprecated; please use main_op instead.
clear_devices: Set to true if the device info on the default graph should
be cleared.
main_op: Op or group of ops to execute when the graph is loaded. Note
that when the main_op is specified it is run after the restore op at
load-time.
strip_default_attrs: Boolean. If `True`, default-valued attributes will be
removed from the NodeDefs. For a detailed guide, see
[Stripping Default-Valued Attributes](https://github.com/tensorflow/tensorflow/blob/master/tensorflow/python/saved_model/README.md#stripping-default-valued-attributes).
saver: An instance of tf.train.Saver that will be used to export the
metagraph. If None, a sharded Saver that restores all variables will
be used.
Raises:
AssertionError: If the variables for the SavedModel have not been saved
yet, or if the graph already contains one or more legacy init ops.
"""
# pylint: enable=line-too-long
if not self._has_saved_variables:
raise AssertionError(
"Graph state including variables and assets has not been saved yet. "
"Please invoke `add_meta_graph_and_variables()` first.")
# Validate the signature def map to ensure all included TensorInfos are
# properly populated.
self._validate_signature_def_map(signature_def_map)
# legacy_init_op is deprecated, and going away in TF 2.0.
# Re-mapping to main_op, as treatment is identical regardless.
main_op = main_op or legacy_init_op
# Add assets and ops
self._add_collections(assets_collection, main_op, None)
saver = self._maybe_create_saver(saver)
# The graph almost certainly previously contained at least one Saver, and
# possibly several (e.g. one for loading a pretrained embedding, and another
# for the model weights). Removing the preexisting ones was the
# motivation for the clear_extraneous_savers option, but it turns out that
# there are edge cases where that option breaks the graph. Until that is
# resolved, we just leave the option set to False for now.
# TODO(soergel): Reinstate clear_extraneous_savers=True when possible.
meta_graph_def = saver.export_meta_graph(
clear_devices=clear_devices, strip_default_attrs=strip_default_attrs)
# Tag the meta graph def and add it to the SavedModel.
self._tag_and_add_meta_graph(meta_graph_def, tags, signature_def_map)
@deprecated_args(None,
"Pass your op to the equivalent parameter main_op instead.",
"legacy_init_op")
def add_meta_graph_and_variables(self,
sess,
tags,
signature_def_map=None,
assets_collection=None,
legacy_init_op=None,
clear_devices=False,
main_op=None,
strip_default_attrs=False,
saver=None):
# pylint: disable=line-too-long
"""Adds the current meta graph to the SavedModel and saves variables.
Creates a Saver to save the variables from the provided session. Exports the
corresponding meta graph def. This function assumes that the variables to be
saved have been initialized. For a given `SavedModelBuilder`, this API must
be called exactly once and for the first meta graph to save. For subsequent
meta graph defs to be added, the `add_meta_graph()` API must be used.
Args:
sess: The TensorFlow session from which to save the meta graph and
variables.
tags: The set of tags with which to save the meta graph.
signature_def_map: The map of signature def map to add to the meta graph
def.
assets_collection: Assets collection to be saved with SavedModel.
legacy_init_op: Legacy support for op or group of ops to execute after the
restore op upon a load. Deprecated; please use main_op instead.
clear_devices: Set to true if the device info on the default graph should
be cleared.
main_op: Op or group of ops to execute when the graph is loaded. Note
that when the main_op is specified it is run after the restore op at
load-time.
strip_default_attrs: Boolean. If `True`, default-valued attributes will be
removed from the NodeDefs. For a detailed guide, see
[Stripping Default-Valued Attributes](https://github.com/tensorflow/tensorflow/blob/master/tensorflow/python/saved_model/README.md#stripping-default-valued-attributes).
saver: An instance of tf.train.Saver that will be used to export the
metagraph and save variables. If None, a sharded Saver that restores
all variables will be used.
"""
# pylint: enable=line-too-long
if self._has_saved_variables:
raise AssertionError("Graph state including variables and assets has "
"already been saved. Please invoke "
"`add_meta_graph()` instead.")
# Validate the signature def map to ensure all included TensorInfos are
# properly populated.
self._validate_signature_def_map(signature_def_map)
# legacy_init_op is deprecated, and going away in TF 2.0.
# Re-mapping to main_op, as treatment is identical regardless.
main_op = main_op or legacy_init_op
# Add assets and ops
self._add_collections(assets_collection, main_op, None)
# Create the variables sub-directory, if it does not exist.
variables_dir = os.path.join(
compat.as_text(self._export_dir),
compat.as_text(constants.VARIABLES_DIRECTORY))
if not file_io.file_exists(variables_dir):
file_io.recursive_create_dir(variables_dir)
variables_path = os.path.join(
compat.as_text(variables_dir),
compat.as_text(constants.VARIABLES_FILENAME))
saver = self._maybe_create_saver(saver)
# Save the variables. Also, disable writing the checkpoint state proto. The
# file is not used during SavedModel loading. In addition, since a
# SavedModel can be copied or moved, this avoids the checkpoint state to
# become outdated.
saver.save(sess, variables_path, write_meta_graph=False, write_state=False)
# Export the meta graph def.
# The graph almost certainly previously contained at least one Saver, and
# possibly several (e.g. one for loading a pretrained embedding, and another
# for the model weights). Removing the preexisting ones was the
# motivation for the clear_extraneous_savers option, but it turns out that
# there are edge cases where that option breaks the graph. Until that is
# resolved, we just leave the option set to False for now.
# TODO(soergel): Reinstate clear_extraneous_savers=True when possible.
meta_graph_def = saver.export_meta_graph(
clear_devices=clear_devices, strip_default_attrs=strip_default_attrs)
# Tag the meta graph def and add it to the SavedModel.
self._tag_and_add_meta_graph(meta_graph_def, tags, signature_def_map)
# Mark this instance of SavedModel as having saved variables, such that
# subsequent attempts to save variables will fail.
self._has_saved_variables = True
def save(self, as_text=False):
"""Writes a `SavedModel` protocol buffer to disk.
The function writes the SavedModel protocol buffer to the export directory
in serialized format.
Args:
as_text: Writes the SavedModel protocol buffer in text format to disk.
Returns:
The path to which the SavedModel protocol buffer was written.
"""
if not file_io.file_exists(self._export_dir):
file_io.recursive_create_dir(self._export_dir)
if as_text:
path = os.path.join(
compat.as_bytes(self._export_dir),
compat.as_bytes(constants.SAVED_MODEL_FILENAME_PBTXT))
file_io.write_string_to_file(path, str(self._saved_model))
else:
path = os.path.join(
compat.as_bytes(self._export_dir),
compat.as_bytes(constants.SAVED_MODEL_FILENAME_PB))
file_io.write_string_to_file(path, self._saved_model.SerializeToString())
tf_logging.info("SavedModel written to: %s", compat.as_text(path))
return path
def _maybe_save_assets(assets_collection_to_add=None):
"""Saves assets to the meta graph.
Args:
assets_collection_to_add: The collection where the asset paths are setup.
Returns:
A dict of asset basenames for saving to the original full path to the asset.
Raises:
ValueError: Indicating an invalid filepath tensor.
"""
# Map of target file names to original filenames
asset_filename_map = {}
if assets_collection_to_add is None:
tf_logging.info("No assets to save.")
return asset_filename_map
# Iterate over the supplied asset collection, build the `AssetFile` proto
# and add them to the collection with key `constants.ASSETS_KEY`, in the
# graph.
for asset_tensor in assets_collection_to_add:
asset_source_filepath = _asset_path_from_tensor(asset_tensor)
if not asset_source_filepath:
raise ValueError("Invalid asset filepath tensor %s" % asset_tensor)
asset_filename = _get_asset_filename_to_add(
asset_source_filepath, asset_filename_map)
# Build `AssetFile` proto and add it to the asset collection in the graph.
# Note that this should be done even when the file is a duplicate of an
# already-added file, as the tensor reference should still exist.
_add_asset_to_collection(asset_filename, asset_tensor)
# In the cases where we are adding a duplicate, this will result in the
# last of the filepaths being the one used for copying the file to the
# SavedModel. Since the files in question are the same, it doesn't matter
# either way.
asset_filename_map[asset_filename] = asset_source_filepath
tf_logging.info("Assets added to graph.")
return asset_filename_map
def _get_asset_filename_to_add(asset_filepath, asset_filename_map):
"""Get a unique basename to add to the SavedModel if this file is unseen.
Assets come from users as full paths, and we save them out to the
SavedModel as basenames. In some cases, the basenames collide. Here,
we dedupe asset basenames by first checking if the file is the same,
and, if different, generate and return an index-suffixed basename
that can be used to add the asset to the SavedModel.
Args:
asset_filepath: the full path to the asset that is being saved
asset_filename_map: a dict of filenames used for saving the asset in
the SavedModel to full paths from which the filenames were derived.
Returns:
Uniquified filename string if the file is not a duplicate, or the original
filename if the file has already been seen and saved.
"""
asset_filename = os.path.basename(asset_filepath)
if asset_filename not in asset_filename_map:
# This is an unseen asset. Safe to add.
return asset_filename
other_asset_filepath = asset_filename_map[asset_filename]
if other_asset_filepath == asset_filepath:
# This is the same file, stored twice in the collection list. No need
# to make unique.
return asset_filename
# Else, asset_filename is in the map, and the filepath is different. Dedupe.
if not file_io.filecmp(asset_filepath, other_asset_filepath):
# Files are different; dedupe filenames.
return _get_unique_asset_filename(asset_filename, asset_filename_map)
# Files are the same; don't make unique.
return asset_filename
def _get_unique_asset_filename(asset_filename, asset_filename_map):
i = 1
unique_filename = asset_filename
while unique_filename in asset_filename_map:
unique_filename = compat.as_bytes("_").join(
[compat.as_bytes(asset_filename), compat.as_bytes(str(i))])
i += 1
return unique_filename
def _asset_path_from_tensor(path_tensor):
"""Returns the filepath value stored in constant `path_tensor`.
Args:
path_tensor: Tensor of a file-path.
Returns:
The string value i.e. path of the tensor, if valid.
Raises:
TypeError if tensor does not match expected op type, dtype or value.
"""
if not isinstance(path_tensor, ops.Tensor):
raise TypeError("Asset path tensor must be a Tensor.")
if path_tensor.op.type != "Const":
raise TypeError("Asset path tensor must be of type constant.")
if path_tensor.dtype != dtypes.string:
raise TypeError("Asset path tensor must be of dtype string.")
str_values = path_tensor.op.get_attr("value").string_val
if len(str_values) != 1:
raise TypeError("Asset path tensor must be a scalar.")
return str_values[0]
def _add_asset_to_collection(asset_filename, asset_tensor):
"""Builds an asset proto and adds it to the asset collection of the graph.
Args:
asset_filename: The filename of the asset to be added.
asset_tensor: The asset tensor used to populate the tensor info of the
asset proto.
"""
asset_proto = meta_graph_pb2.AssetFileDef()
asset_proto.filename = asset_filename
asset_proto.tensor_info.name = asset_tensor.name
asset_any_proto = Any()
asset_any_proto.Pack(asset_proto)
ops.add_to_collection(constants.ASSETS_KEY, asset_any_proto)
| {
"content_hash": "b0d171df32658ba1ecf0630684e0158a",
"timestamp": "",
"source": "github",
"line_count": 598,
"max_line_length": 176,
"avg_line_length": 41.118729096989966,
"alnum_prop": 0.6848997519215909,
"repo_name": "aselle/tensorflow",
"id": "8c985a7c2fa2b515c2daed1349996dd30f6d7ce1",
"size": "25278",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tensorflow/python/saved_model/builder_impl.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "9258"
},
{
"name": "C",
"bytes": "321697"
},
{
"name": "C#",
"bytes": "7259"
},
{
"name": "C++",
"bytes": "46003590"
},
{
"name": "CMake",
"bytes": "207738"
},
{
"name": "Dockerfile",
"bytes": "6905"
},
{
"name": "Go",
"bytes": "1210133"
},
{
"name": "HTML",
"bytes": "4680032"
},
{
"name": "Java",
"bytes": "829230"
},
{
"name": "Jupyter Notebook",
"bytes": "2578736"
},
{
"name": "LLVM",
"bytes": "6536"
},
{
"name": "Makefile",
"bytes": "52243"
},
{
"name": "Objective-C",
"bytes": "15650"
},
{
"name": "Objective-C++",
"bytes": "99265"
},
{
"name": "PHP",
"bytes": "2140"
},
{
"name": "Perl",
"bytes": "7536"
},
{
"name": "PureBasic",
"bytes": "25356"
},
{
"name": "Python",
"bytes": "39898642"
},
{
"name": "Ruby",
"bytes": "533"
},
{
"name": "Shell",
"bytes": "447009"
},
{
"name": "Smarty",
"bytes": "6870"
}
],
"symlink_target": ""
} |
"""List cluster command."""
from apitools.base.py import list_pager
from googlecloudsdk.api_lib.dataproc import constants
from googlecloudsdk.calliope import base
from googlecloudsdk.core import properties
@base.ReleaseTracks(base.ReleaseTrack.GA)
class List(base.ListCommand):
"""View a list of clusters in a project.
View a list of clusters in a project.
## EXAMPLES
To see the list of all clusters, run:
$ {command}
"""
@staticmethod
def Args(parser):
base.URI_FLAG.RemoveFromParser(parser)
base.PAGE_SIZE_FLAG.SetDefault(parser, constants.DEFAULT_PAGE_SIZE)
def Collection(self):
return 'dataproc.clusters'
def Run(self, args):
client = self.context['dataproc_client']
messages = self.context['dataproc_messages']
project = properties.VALUES.core.project.Get(required=True)
region = self.context['dataproc_region']
request = self.GetRequest(messages, project, region, args)
return list_pager.YieldFromList(
client.projects_regions_clusters,
request,
limit=args.limit, field='clusters',
batch_size=args.page_size,
batch_size_attribute='pageSize')
@staticmethod
def GetRequest(messages, project, region, args):
return messages.DataprocProjectsRegionsClustersListRequest(
projectId=project, region=region)
@base.ReleaseTracks(base.ReleaseTrack.ALPHA, base.ReleaseTrack.BETA)
class ListBeta(List):
"""View a list of clusters in a project.
View a list of clusters in a project. An optional filter can be used to
constrain the clusters returned. Filters are case-sensitive and have the
following syntax:
field = value [AND [field = value]] ...
where `field` is one of `status.state`, `clusterName`, or `labels.[KEY]`,
and `[KEY]` is a label key. `value` can be ```*``` to match all values.
`status.state` can be one of the following: `ACTIVE`, `INACTIVE`,
`CREATING`, `RUNNING`, `ERROR`, `DELETING`, or `UPDATING`. `ACTIVE`
contains the `CREATING`, `UPDATING`, and `RUNNING` states. `INACTIVE`
contains the `DELETING` and `ERROR` states. `clusterName` is the name of the
cluster provided at creation time. Only the logical `AND` operator is
supported; space-separated items are treated as having an implicit `AND`
operator.
## EXAMPLES
To see the list of all clusters, run:
$ {command}
To show a cluster whose name is `mycluster`, run:
$ {command} --filter='clusterName = mycluster'
To see the list of all clusters with particular labels, run:
$ {command} --filter='labels.env = staging AND labels.starred = *'
To see a list of all active clusters with particular labels, run:
$ {command} --filter='status.state = ACTIVE labels.env = staging AND labels.starred = *'
"""
@staticmethod
def GetRequest(messages, project, region, args):
# Explicitly null out args.filter if present because by default args.filter
# also acts as a postfilter to the things coming back from the backend
backend_filter = None
if args.filter:
backend_filter = args.filter
args.filter = None
return messages.DataprocProjectsRegionsClustersListRequest(
projectId=project, region=region, filter=backend_filter)
| {
"content_hash": "126fe77de58164e801f1cce06e0f418a",
"timestamp": "",
"source": "github",
"line_count": 101,
"max_line_length": 92,
"avg_line_length": 31.95049504950495,
"alnum_prop": 0.7111868608614813,
"repo_name": "KaranToor/MA450",
"id": "59815caa539554db03796f424b05b5c6ae143ab3",
"size": "3823",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "google-cloud-sdk/lib/surface/dataproc/clusters/list.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "3162"
},
{
"name": "CSS",
"bytes": "1930"
},
{
"name": "HTML",
"bytes": "13381"
},
{
"name": "Java",
"bytes": "151442"
},
{
"name": "JavaScript",
"bytes": "4906"
},
{
"name": "Makefile",
"bytes": "1636"
},
{
"name": "Objective-C",
"bytes": "13335"
},
{
"name": "PHP",
"bytes": "9086"
},
{
"name": "Pascal",
"bytes": "62"
},
{
"name": "Python",
"bytes": "19710731"
},
{
"name": "Roff",
"bytes": "2069494"
},
{
"name": "Ruby",
"bytes": "690"
},
{
"name": "Shell",
"bytes": "32272"
},
{
"name": "Smarty",
"bytes": "4968"
},
{
"name": "SourcePawn",
"bytes": "616"
},
{
"name": "Swift",
"bytes": "14225"
}
],
"symlink_target": ""
} |
"""
MINDBODY Public API
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) # noqa: E501
OpenAPI spec version: v6
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import swagger_client
from swagger_client.models.get_contact_logs_request import GetContactLogsRequest # noqa: E501
from swagger_client.rest import ApiException
class TestGetContactLogsRequest(unittest.TestCase):
"""GetContactLogsRequest unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testGetContactLogsRequest(self):
"""Test GetContactLogsRequest"""
# FIXME: construct object with mandatory attributes with example values
# model = swagger_client.models.get_contact_logs_request.GetContactLogsRequest() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| {
"content_hash": "09e76a0dc8a164ed99e386a09753e8ba",
"timestamp": "",
"source": "github",
"line_count": 38,
"max_line_length": 119,
"avg_line_length": 25.81578947368421,
"alnum_prop": 0.7074413863404689,
"repo_name": "mindbody/API-Examples",
"id": "4bffc02832941b88f8c0b86a31c232c399c20e57",
"size": "998",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "SDKs/Python/test/test_get_contact_logs_request.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "PHP",
"bytes": "3610259"
},
{
"name": "Python",
"bytes": "2338642"
},
{
"name": "Ruby",
"bytes": "2284441"
},
{
"name": "Shell",
"bytes": "5058"
}
],
"symlink_target": ""
} |
from tw.api import CSSLink, JSLink, JSSource
from tw.forms.fields import InputField,SingleSelectField,TextArea,RadioButtonList,Form, ListFieldSet, HiddenField,MultipleSelectField
from tg import config
FIXED_WIDTH_CLASS = "width-250"
OPTION_WIDTH_CLASS = "width-220"
FIXED_HEIGHT_CLASS = "height-70"
class RPACBasicWidget(InputField):
css_class = FIXED_WIDTH_CLASS
#*********************************************
# fields class
#*********************************************
class RPACHidden(HiddenField):
displays_on = config.default_renderer
class RPACText(RPACBasicWidget):
displays_on = config.default_renderer
type = "text"
class RPACSelect(SingleSelectField):
displays_on = config.default_renderer
css_class = FIXED_WIDTH_CLASS
class RPACMultipleSelect(MultipleSelectField):
displays_on = config.default_renderer
css_class = " ".join([OPTION_WIDTH_CLASS,"jqery_multiSelect"])
class RPACTextarea(TextArea):
displays_on = config.default_renderer
css_class = " ".join([FIXED_WIDTH_CLASS,FIXED_HEIGHT_CLASS])
class RPACRadio(RadioButtonList):
displays_on = config.default_renderer
template = "gapproject.templates.widgets.selection_list"
class RPACJQueryFields(RPACBasicWidget):
params = ["jquery_class"]
jquery_class = ""
def update_params(self, d):
super(RPACJQueryFields, self).update_params(d)
if self.jquery_class not in d.css_classes :
d.css_classes.append(self.jquery_class)
class RPACSearchText(RPACJQueryFields):
type = "text"
jquery_class = "ajaxSearchField"
class RPACCalendarPicker(RPACJQueryFields):
type = "text"
jquery_class = "datePicker"
class RPACNumeric(RPACJQueryFields):
type = "text"
jquery_class = "numeric"
class RPACAjaxText(RPACJQueryFields):
type = "text"
jquery_class = "ajaxSearchField"
class RPACRequiredMixin():
params = ["attrs","isRequired"]
class RPACRequiredTextField(RPACRequiredMixin,RPACSearchText):
pass
class RPACRequiredSingleSelectField(RPACRequiredMixin,RPACSelect):
pass
class RPACNumberText(RPACJQueryFields):
type = "text"
jquery_class = "v_is_number"
class RPACList(ListFieldSet):
def __init__(self, id=None, parent=None, children=[], **kw):
super(RPACList, self)
self.children=[RPACText()]
#*********************************************
# form class
#*********************************************
class RPACForm(Form):
template = "gapproject.templates.widgets.form1"
def __init__(self, id=None, parent=None, children=[], **kw):
super(Form, self).__init__(id, parent, children, **kw)
class RPACNoForm(Form):
template = "gapproject.templates.widgets.form2"
def __init__(self, id=None, parent=None, children=[], ** kw):
super(Form, self).__init__(id, parent, children, ** kw)
class RPACSubmitForm(Form):
template = "gapproject.templates.widgets.form3"
def __init__(self, id=None, parent=None, children=[], **kw):
super(Form, self).__init__(id, parent, children, **kw)
class RPACDesplay(Form):
template = "gapproject.templates.widgets.column1"
def __init__(self, id=None, parent=None, children=[], **kw):
super(Form, self).__init__(id, parent, children, **kw)
class RPACHiddenForm(Form):
template = "gapproject.templates.widgets.column2"
def __init__(self, id=None, parent=None, children=[], ** kw):
super(Form, self).__init__(id, parent, children, ** kw) | {
"content_hash": "f9cbc1452f9275b2e5f6a9837a708913",
"timestamp": "",
"source": "github",
"line_count": 121,
"max_line_length": 133,
"avg_line_length": 29.520661157024794,
"alnum_prop": 0.6413773796192609,
"repo_name": "LamCiuLoeng/gap",
"id": "991b8221f86b2390c4a8213311dde38e08a15560",
"size": "3597",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "gapproject/widgets/components.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "56292"
},
{
"name": "CoffeeScript",
"bytes": "906"
},
{
"name": "JavaScript",
"bytes": "220573"
},
{
"name": "Python",
"bytes": "503067"
},
{
"name": "SQL",
"bytes": "2365"
}
],
"symlink_target": ""
} |
from google.cloud import aiplatform_v1
def sample_add_execution_events():
# Create a client
client = aiplatform_v1.MetadataServiceClient()
# Initialize request argument(s)
request = aiplatform_v1.AddExecutionEventsRequest(
execution="execution_value",
)
# Make the request
response = client.add_execution_events(request=request)
# Handle the response
print(response)
# [END aiplatform_generated_aiplatform_v1_MetadataService_AddExecutionEvents_sync]
| {
"content_hash": "d5c7f83c00efd5bccf0a6d2b86ae0ac1",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 82,
"avg_line_length": 26.31578947368421,
"alnum_prop": 0.732,
"repo_name": "googleapis/python-aiplatform",
"id": "b6b14e7038325ae0d26b2c82ffa5fea23531f8bb",
"size": "1528",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "samples/generated_samples/aiplatform_generated_aiplatform_v1_metadata_service_add_execution_events_sync.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "2050"
},
{
"name": "Python",
"bytes": "23977004"
},
{
"name": "Shell",
"bytes": "30668"
}
],
"symlink_target": ""
} |
import os
import sys
from keras.applications.vgg16 import VGG16
from keras.models import Sequential, Model
from keras.layers import Input, Activation, Dropout, Flatten, Dense
from keras.preprocessing import image
import numpy as np
if len(sys.argv) != 2:
print("usage: python predict.py [filename]")
sys.exit(1)
filename = sys.argv[1]
print('input:', filename)
result_dir = 'results'
img_height, img_width = 150, 150
channels = 3
# VGG16
input_tensor = Input(shape=(img_height, img_width, channels))
vgg16_model = VGG16(include_top=False, weights='imagenet', input_tensor=input_tensor)
# FC
top_model = Sequential()
top_model.add(Flatten(input_shape=vgg16_model.output_shape[1:]))
top_model.add(Dense(256, activation='relu'))
top_model.add(Dropout(0.5))
top_model.add(Dense(1, activation='sigmoid'))
# VGG16とFCを接続
model = Model(input=vgg16_model.input, output=top_model(vgg16_model.output))
# 学習済みの重みをロード
model.load_weights(os.path.join(result_dir, 'finetuning.h5'))
model.compile(loss='binary_crossentropy',
optimizer='adam',
metrics=['accuracy'])
# model.summary()
# 画像を読み込んで4次元テンソルへ変換
img = image.load_img(filename, target_size=(img_height, img_width))
x = image.img_to_array(img)
x = np.expand_dims(x, axis=0)
# 学習時にImageDataGeneratorのrescaleで正規化したので同じ処理が必要!
# これを忘れると結果がおかしくなるので注意
x = x / 255.0
# print(x)
# print(x.shape)
# クラスを予測
# 入力は1枚の画像なので[0]のみ
pred = model.predict(x)[0]
print(pred)
| {
"content_hash": "5760412d84942003a60d68006e5ff3b3",
"timestamp": "",
"source": "github",
"line_count": 58,
"max_line_length": 85,
"avg_line_length": 24.896551724137932,
"alnum_prop": 0.7257617728531855,
"repo_name": "aidiary/keras_examples",
"id": "6e13feda3079e6d2ab963defd997df29125376be",
"size": "1626",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "vgg16/dogs_vs_cats/predict.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "135491"
},
{
"name": "Shell",
"bytes": "1066"
}
],
"symlink_target": ""
} |
import os
import six
# NOTE(jokke): simplified transition to py3, behaves like py2 xrange
from six.moves import range
from glance.common import crypt
from glance.common import utils
from glance.tests import utils as test_utils
class UtilsTestCase(test_utils.BaseTestCase):
def test_encryption(self):
# Check that original plaintext and unencrypted ciphertext match
# Check keys of the three allowed lengths
key_list = ["1234567890abcdef",
"12345678901234567890abcd",
"1234567890abcdef1234567890ABCDEF"]
plaintext_list = ['']
blocksize = 64
for i in range(3 * blocksize):
text = os.urandom(i)
if six.PY3:
text = text.decode('latin1')
plaintext_list.append(text)
for key in key_list:
for plaintext in plaintext_list:
ciphertext = crypt.urlsafe_encrypt(key, plaintext, blocksize)
self.assertIsInstance(ciphertext, bytes)
if six.PY3:
self.assertNotEqual(ciphertext, plaintext.encode('utf-8'))
else:
self.assertNotEqual(ciphertext, plaintext)
text = crypt.urlsafe_decrypt(key, ciphertext)
self.assertIsInstance(text, str)
self.assertEqual(plaintext, text)
def test_empty_metadata_headers(self):
"""Ensure unset metadata is not encoded in HTTP headers"""
metadata = {
'foo': 'bar',
'snafu': None,
'bells': 'whistles',
'unset': None,
'empty': '',
'properties': {
'distro': '',
'arch': None,
'user': 'nobody',
},
}
headers = utils.image_meta_to_http_headers(metadata)
self.assertNotIn('x-image-meta-snafu', headers)
self.assertNotIn('x-image-meta-uset', headers)
self.assertNotIn('x-image-meta-snafu', headers)
self.assertNotIn('x-image-meta-property-arch', headers)
self.assertEqual('bar', headers.get('x-image-meta-foo'))
self.assertEqual('whistles', headers.get('x-image-meta-bells'))
self.assertEqual('', headers.get('x-image-meta-empty'))
self.assertEqual('', headers.get('x-image-meta-property-distro'))
self.assertEqual('nobody', headers.get('x-image-meta-property-user'))
| {
"content_hash": "1ac945d265438863a83d76131b931de9",
"timestamp": "",
"source": "github",
"line_count": 67,
"max_line_length": 78,
"avg_line_length": 36.40298507462686,
"alnum_prop": 0.5830258302583026,
"repo_name": "vuntz/glance",
"id": "b04e00c61e18301c15b047e5d35d0a047054e234",
"size": "3080",
"binary": false,
"copies": "8",
"ref": "refs/heads/master",
"path": "glance/tests/unit/test_misc.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "3964511"
},
{
"name": "Shell",
"bytes": "7860"
}
],
"symlink_target": ""
} |
"""
This example demonstrates one way to access the weights of a custom skflow
model. It is otherwise identical to the standard MNIST convolutional code.
"""
from sklearn import metrics
import tensorflow as tf
from tensorflow.examples.tutorials.mnist import input_data
import skflow
### Download and load MNIST data.
mnist = input_data.read_data_sets('MNIST_data')
### Linear classifier.
classifier = skflow.TensorFlowLinearClassifier(
n_classes=10, batch_size=100, steps=1000, learning_rate=0.01)
classifier.fit(mnist.train.images, mnist.train.labels)
score = metrics.accuracy_score(mnist.test.labels, classifier.predict(mnist.test.images))
print('Accuracy: {0:f}'.format(score))
### Convolutional network
def max_pool_2x2(tensor_in):
return tf.nn.max_pool(tensor_in, ksize=[1, 2, 2, 1], strides=[1, 2, 2, 1],
padding='SAME')
def conv_model(X, y):
# reshape X to 4d tensor with 2nd and 3rd dimensions being image width and height
# final dimension being the number of color channels
X = tf.reshape(X, [-1, 28, 28, 1])
# first conv layer will compute 32 features for each 5x5 patch
with tf.variable_scope('conv_layer1'):
h_conv1 = skflow.ops.conv2d(X, n_filters=32, filter_shape=[5, 5],
bias=True, activation=tf.nn.relu)
h_pool1 = max_pool_2x2(h_conv1)
# second conv layer will compute 64 features for each 5x5 patch
with tf.variable_scope('conv_layer2'):
h_conv2 = skflow.ops.conv2d(h_pool1, n_filters=64, filter_shape=[5, 5],
bias=True, activation=tf.nn.relu)
h_pool2 = max_pool_2x2(h_conv2)
# reshape tensor into a batch of vectors
h_pool2_flat = tf.reshape(h_pool2, [-1, 7 * 7 * 64])
# densely connected layer with 1024 neurons
h_fc1 = skflow.ops.dnn(h_pool2_flat, [1024], activation=tf.nn.relu, keep_prob=0.5)
return skflow.models.logistic_regression(h_fc1, y)
# Training and predicting
classifier = skflow.TensorFlowEstimator(
model_fn=conv_model, n_classes=10, batch_size=100, steps=20000,
learning_rate=0.001)
classifier.fit(mnist.train.images, mnist.train.labels)
score = metrics.accuracy_score(mnist.test.labels, classifier.predict(mnist.test.images))
print('Accuracy: {0:f}'.format(score))
# Examining fitted weights
## General usage is classifier.get_tensor_value('foo')
## 'foo' must be the variable scope of the desired tensor followed by the
## graph path.
## To understand the mechanism and figure out the right scope and path, you can do logging.
## Then use TensorBoard or a text editor on the log file to look at available strings.
## First Convolutional Layer
print('1st Convolutional Layer weights and Bias')
print(classifier.get_tensor_value('conv_layer1/convolution/filters:0'))
print(classifier.get_tensor_value('conv_layer1/convolution/bias:0'))
## Second Convolutional Layer
print('2nd Convolutional Layer weights and Bias')
print(classifier.get_tensor_value('conv_layer2/convolution/filters:0'))
print(classifier.get_tensor_value('conv_layer2/convolution/bias:0'))
## Densely Connected Layer
print('Densely Connected Layer weights')
print(classifier.get_tensor_value('dnn/layer0/Linear/Matrix:0'))
## Logistic Regression weights
print('Logistic Regression weights')
print(classifier.get_tensor_value('logistic_regression/weights:0'))
| {
"content_hash": "19ae715847161a0d71be77291fd59a35",
"timestamp": "",
"source": "github",
"line_count": 84,
"max_line_length": 91,
"avg_line_length": 40.07142857142857,
"alnum_prop": 0.7180629827688652,
"repo_name": "Resly/pipeline",
"id": "9dab12866c02a912807fef3123f2b553f0134624",
"size": "3987",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "jupyterhub.ml/notebooks/zz_old/TensorFlow/SkFlow_DEPRECATED/mnist_weights.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ApacheConf",
"bytes": "36325"
},
{
"name": "Batchfile",
"bytes": "21218"
},
{
"name": "C",
"bytes": "1759"
},
{
"name": "C++",
"bytes": "50538"
},
{
"name": "CSS",
"bytes": "441446"
},
{
"name": "Cuda",
"bytes": "3113"
},
{
"name": "Go",
"bytes": "9555"
},
{
"name": "HTML",
"bytes": "48376774"
},
{
"name": "Java",
"bytes": "108962"
},
{
"name": "JavaScript",
"bytes": "539670"
},
{
"name": "Jupyter Notebook",
"bytes": "18176491"
},
{
"name": "Makefile",
"bytes": "357"
},
{
"name": "Protocol Buffer",
"bytes": "137774"
},
{
"name": "Python",
"bytes": "667334"
},
{
"name": "Scala",
"bytes": "366964"
},
{
"name": "Shell",
"bytes": "110692"
},
{
"name": "XSLT",
"bytes": "26188"
}
],
"symlink_target": ""
} |
from classy import Class
import matplotlib.pyplot as plt
import numpy as np
params = {
'output': 'tCl lCl',
'l_max_scalars': 2508,
'lensing': 'yes',
'P_k_ini type': 'external_Pk',
'command': 'python /home/andrew/Research/tools/class_public-2.4.3/external_Pk/generate_Pk_cosines.py',
'custom1': 0,
'custom2': 0,
'custom3': 0,
'custom4': 0,
'custom5': 0}
#Get the unperturbed cls for comparison
cosmo = Class()
cosmo.set(params)
cosmo.compute()
clso=cosmo.lensed_cl(2508)['tt'][30:]
ell = cosmo.lensed_cl(2508)['ell'][30:]
for i in range(len(clso)):
clso[i]=ell[i]*(ell[i]+1)/(4*np.pi)*((2.726e6)**2)*clso[i]
a=np.zeros(5)
cosmo.struct_cleanup()
cosmo.empty()
dcls=np.zeros([clso.shape[0],5])
h=1e-6
for m in range(5):
a[m]=h
# Define your cosmology (what is not specified will be set to CLASS default parameters)
params = {
'output': 'tCl lCl',
'l_max_scalars': 2508,
'lensing': 'yes',
'P_k_ini type': 'external_Pk',
'command': 'python /home/andrew/Research/tools/class_public-2.4.3/external_Pk/generate_Pk_cosines.py',
'custom1': a[0],
'custom2': a[1],
'custom3': a[2],
'custom4': a[3],
'custom5': a[4]}
# Create an instance of the CLASS wrapper
cosmo = Class()
# Set the parameters to the cosmological code
cosmo.set(params)
# Run the whole code. Depending on your output, it will call the
# CLASS modules more or less fast. For instance, without any
# output asked, CLASS will only compute background quantities,
# thus running almost instantaneously.
# This is equivalent to the beginning of the `main` routine of CLASS,
# with all the struct_init() methods called.
cosmo.compute()
# Access the lensed cl until l=2000
cls = cosmo.lensed_cl(2508)['tt'][30:]
ell = cosmo.lensed_cl(2508)['ell'][30:]
for i in range(len(cls)):
cls[i]=ell[i]*(ell[i]+1)/(4*np.pi)*((2.726e6)**2)*cls[i]
dcls[:,m]=(cls-clso)/h
# Clean CLASS (the equivalent of the struct_free() in the `main`
# of CLASS. This step is primordial when running in a loop over different
# cosmologies, as you will saturate your memory very fast if you ommit
# it.
cosmo.struct_cleanup()
a[m]=0
# If you want to change completely the cosmology, you should also
# clean the arguments, otherwise, if you are simply running on a loop
# of different values for the same parameters, this step is not needed
cosmo.empty()
#dcls=responses[amps[max_index-1]][1:,0:]
np.savetxt('xmat_cosines.txt',dcls, fmt='%1.4e')
#np.savetxt('pert_size.txt',[amps[max_index-1]*2.41e-9,amps[max_index]*2.41e-9], fmt='%1.4e')
| {
"content_hash": "fd04f35ace102d513d925854192b4264",
"timestamp": "",
"source": "github",
"line_count": 80,
"max_line_length": 107,
"avg_line_length": 32.4375,
"alnum_prop": 0.6666666666666666,
"repo_name": "aarrasmi/Dimension-Reduction-Preliminary",
"id": "b9077880ac9dc1d8a47bddb33e4cb646dc5baff7",
"size": "2595",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "clresponse_cosines.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "4384"
}
],
"symlink_target": ""
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.