repo_name
stringlengths 5
100
| path
stringlengths 4
299
| copies
stringclasses 990
values | size
stringlengths 4
7
| content
stringlengths 666
1.03M
| license
stringclasses 15
values | hash
int64 -9,223,351,895,964,839,000
9,223,297,778B
| line_mean
float64 3.17
100
| line_max
int64 7
1k
| alpha_frac
float64 0.25
0.98
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
pkdevbox/trac | trac/versioncontrol/web_ui/browser.py | 2 | 44862 | # -*- coding: utf-8 -*-
#
# Copyright (C) 2003-2014 Edgewall Software
# Copyright (C) 2003-2005 Jonas Borgström <[email protected]>
# Copyright (C) 2005-2007 Christian Boos <[email protected]>
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at http://trac.edgewall.org/wiki/TracLicense.
#
# This software consists of voluntary contributions made by many
# individuals. For the exact contribution history, see the revision
# history and logs, available at http://trac.edgewall.org/log/.
#
# Author: Jonas Borgström <[email protected]>
import re
from datetime import datetime, timedelta
from fnmatch import fnmatchcase
from genshi.builder import tag
from trac.config import BoolOption, ListOption, Option
from trac.core import *
from trac.mimeview.api import IHTMLPreviewAnnotator, Mimeview, is_binary
from trac.perm import IPermissionRequestor, PermissionError
from trac.resource import Resource, ResourceNotFound
from trac.util import as_bool, embedded_numbers
from trac.util.datefmt import http_date, to_datetime, utc
from trac.util.html import Markup, escape
from trac.util.text import exception_to_unicode, shorten_line
from trac.util.translation import _, cleandoc_
from trac.versioncontrol.api import NoSuchChangeset, RepositoryManager
from trac.versioncontrol.web_ui.util import *
from trac.web.api import IRequestHandler, RequestDone
from trac.web.chrome import (INavigationContributor, add_ctxtnav, add_link,
add_script, add_stylesheet, prevnext_nav,
web_context)
from trac.wiki.api import IWikiMacroProvider, IWikiSyntaxProvider, parse_args
from trac.wiki.formatter import format_to_html, format_to_oneliner
CHUNK_SIZE = 4096
class IPropertyRenderer(Interface):
"""Render node properties in TracBrowser and TracChangeset views."""
def match_property(name, mode):
"""Indicate whether this renderer can treat the given property
`mode` is the current rendering context, which can be:
- 'browser' rendered in the browser view
- 'changeset' rendered in the changeset view as a node property
- 'revprop' rendered in the changeset view as a revision property
Other identifiers might be used by plugins, so it's advised to simply
ignore unknown modes.
Returns a quality number, ranging from 0 (unsupported) to 9
(''perfect'' match).
"""
def render_property(name, mode, context, props):
"""Render the given property.
`name` is the property name as given to `match()`,
`mode` is the same as for `match_property`,
`context` is the context for the node being render
(useful when the rendering depends on the node kind) and
`props` is the collection of the corresponding properties
(i.e. the `node.get_properties()`).
The rendered result can be one of the following:
- `None`: the property will be skipped
- an `unicode` value: the property will be displayed as text
- a `RenderedProperty` instance: the property will only be displayed
using the instance's `content` attribute, and the other attributes
will also be used in some display contexts (like `revprop`)
- `Markup` or other Genshi content: the property will be displayed
normally, using that content as a block-level markup
"""
class RenderedProperty(object):
def __init__(self, name=None, name_attributes=None,
content=None, content_attributes=None):
self.name = name
self.name_attributes = name_attributes
self.content = content
self.content_attributes = content_attributes
class DefaultPropertyRenderer(Component):
"""Default version control property renderer."""
implements(IPropertyRenderer)
def match_property(self, name, mode):
return 1
def render_property(self, name, mode, context, props):
# No special treatment besides respecting newlines in values.
value = props[name]
if value and '\n' in value:
value = Markup(''.join(['<br />%s' % escape(v)
for v in value.split('\n')]))
return value
class WikiPropertyRenderer(Component):
"""Wiki text property renderer."""
implements(IPropertyRenderer)
wiki_properties = ListOption('browser', 'wiki_properties',
'trac:description',
doc="""Comma-separated list of version control properties to render
as wiki content in the repository browser.
""")
oneliner_properties = ListOption('browser', 'oneliner_properties',
'trac:summary',
doc="""Comma-separated list of version control properties to render
as oneliner wiki content in the repository browser.
""")
def match_property(self, name, mode):
return 4 if name in self.wiki_properties \
or name in self.oneliner_properties else 0
def render_property(self, name, mode, context, props):
if name in self.wiki_properties:
return format_to_html(self.env, context, props[name])
else:
return format_to_oneliner(self.env, context, props[name])
class TimeRange(object):
min = datetime(1, 1, 1, 0, 0, 0, 0, utc) # tz aware version of datetime.min
def __init__(self, base):
self.oldest = self.newest = base
self._total = None
def seconds_between(self, dt1, dt2):
delta = dt1 - dt2
return delta.days * 24 * 3600 + delta.seconds
def to_seconds(self, dt):
return self.seconds_between(dt, TimeRange.min)
def from_seconds(self, secs):
return TimeRange.min + timedelta(*divmod(secs, 24* 3600))
def relative(self, datetime):
if self._total is None:
self._total = float(self.seconds_between(self.newest, self.oldest))
age = 1.0
if self._total:
age = self.seconds_between(datetime, self.oldest) / self._total
return age
def insert(self, datetime):
self._total = None
self.oldest = min(self.oldest, datetime)
self.newest = max(self.newest, datetime)
class BrowserModule(Component):
implements(INavigationContributor, IPermissionRequestor, IRequestHandler,
IWikiSyntaxProvider, IHTMLPreviewAnnotator,
IWikiMacroProvider)
property_renderers = ExtensionPoint(IPropertyRenderer)
realm = RepositoryManager.source_realm
downloadable_paths = ListOption('browser', 'downloadable_paths',
'/trunk, /branches/*, /tags/*',
doc="""List of repository paths that can be downloaded.
Leave this option empty if you want to disable all downloads, otherwise
set it to a comma-separated list of authorized paths (those paths are
glob patterns, i.e. "*" can be used as a wild card). In a
multi-repository environment, the path must be qualified with the
repository name if the path does not point to the default repository
(e.g. /reponame/trunk). Note that a simple prefix matching is
performed on the paths, so aliases won't get automatically resolved.
""")
color_scale = BoolOption('browser', 'color_scale', True,
doc="""Enable colorization of the ''age'' column.
This uses the same color scale as the source code annotation:
blue is older, red is newer.
""")
NEWEST_COLOR = (255, 136, 136)
newest_color = Option('browser', 'newest_color', repr(NEWEST_COLOR),
doc="""(r,g,b) color triple to use for the color corresponding
to the newest color, for the color scale used in ''blame'' or
the browser ''age'' column if `color_scale` is enabled.
""")
OLDEST_COLOR = (136, 136, 255)
oldest_color = Option('browser', 'oldest_color', repr(OLDEST_COLOR),
doc="""(r,g,b) color triple to use for the color corresponding
to the oldest color, for the color scale used in ''blame'' or
the browser ''age'' column if `color_scale` is enabled.
""")
intermediate_point = Option('browser', 'intermediate_point', '',
doc="""If set to a value between 0 and 1 (exclusive), this will be the
point chosen to set the `intermediate_color` for interpolating
the color value.
""")
intermediate_color = Option('browser', 'intermediate_color', '',
doc="""(r,g,b) color triple to use for the color corresponding
to the intermediate color, if two linear interpolations are used
for the color scale (see `intermediate_point`).
If not set, the intermediate color between `oldest_color` and
`newest_color` will be used.
""")
render_unsafe_content = BoolOption('browser', 'render_unsafe_content',
'false',
"""Whether raw files should be rendered in the browser, or only made
downloadable.
Pretty much any file may be interpreted as HTML by the browser,
which allows a malicious user to create a file containing cross-site
scripting attacks.
For open repositories where anyone can check-in a file, it is
recommended to leave this option disabled.""")
hidden_properties = ListOption('browser', 'hide_properties', 'svk:merge',
doc="""Comma-separated list of version control properties to hide from
the repository browser.
""")
# public methods
def get_custom_colorizer(self):
"""Returns a converter for values from [0.0, 1.0] to a RGB triple."""
def interpolate(old, new, value):
# Provides a linearly interpolated color triple for `value`
# which must be a floating point value between 0.0 and 1.0
return tuple([int(b + (a - b) * value) for a, b in zip(new, old)])
def parse_color(rgb, default):
# Get three ints out of a `rgb` string or return `default`
try:
t = tuple([int(v) for v in re.split(r'(\d+)', rgb)[1::2]])
return t if len(t) == 3 else default
except ValueError:
return default
newest_color = parse_color(self.newest_color, self.NEWEST_COLOR)
oldest_color = parse_color(self.oldest_color, self.OLDEST_COLOR)
try:
intermediate = float(self.intermediate_point)
except ValueError:
intermediate = None
if intermediate:
intermediate_color = parse_color(self.intermediate_color, None)
if not intermediate_color:
intermediate_color = tuple([(a + b) / 2 for a, b in
zip(newest_color, oldest_color)])
def colorizer(value):
if value <= intermediate:
value = value / intermediate
return interpolate(oldest_color, intermediate_color, value)
else:
value = (value - intermediate) / (1.0 - intermediate)
return interpolate(intermediate_color, newest_color, value)
else:
def colorizer(value):
return interpolate(oldest_color, newest_color, value)
return colorizer
# INavigationContributor methods
def get_active_navigation_item(self, req):
return 'browser'
def get_navigation_items(self, req):
rm = RepositoryManager(self.env)
if any(repos.is_viewable(req.perm) for repos
in rm.get_real_repositories()):
yield ('mainnav', 'browser',
tag.a(_('Browse Source'), href=req.href.browser()))
# IPermissionRequestor methods
def get_permission_actions(self):
return ['BROWSER_VIEW', 'FILE_VIEW']
# IRequestHandler methods
def match_request(self, req):
match = re.match(r'/(export|browser|file)(/.*)?$', req.path_info)
if match:
mode, path = match.groups()
if mode == 'export':
if path and '/' in path:
path_elts = path.split('/', 2)
if len(path_elts) != 3:
return False
path = path_elts[2]
req.args['rev'] = path_elts[1]
req.args['format'] = 'raw'
elif mode == 'file':
req.redirect(req.href.browser(path, rev=req.args.get('rev'),
format=req.args.get('format')),
permanent=True)
req.args['path'] = path or '/'
return True
def process_request(self, req):
presel = req.args.get('preselected')
if presel and (presel + '/').startswith(req.href.browser() + '/'):
req.redirect(presel)
path = req.args.get('path', '/')
rev = req.args.get('rev', '')
if rev.lower() in ('', 'head'):
rev = None
format = req.args.get('format')
order = req.args.get('order', 'name').lower()
desc = 'desc' in req.args
rm = RepositoryManager(self.env)
all_repositories = rm.get_all_repositories()
reponame, repos, path = rm.get_repository_by_path(path)
# Repository index
show_index = not reponame and path == '/'
if show_index:
if repos and (as_bool(all_repositories[''].get('hidden'))
or not repos.is_viewable(req.perm)):
repos = None
if not repos and reponame:
raise ResourceNotFound(_("Repository '%(repo)s' not found",
repo=reponame))
if reponame and reponame != repos.reponame: # Redirect alias
qs = req.query_string
req.redirect(req.href.browser(repos.reponame or None, path)
+ ('?' + qs if qs else ''))
reponame = repos.reponame if repos else None
# Find node for the requested path/rev
context = web_context(req)
node = None
changeset = None
display_rev = lambda rev: rev
if repos:
try:
if rev:
rev = repos.normalize_rev(rev)
# If `rev` is `None`, we'll try to reuse `None` consistently,
# as a special shortcut to the latest revision.
rev_or_latest = rev or repos.youngest_rev
node = get_existing_node(req, repos, path, rev_or_latest)
except NoSuchChangeset as e:
raise ResourceNotFound(e, _('Invalid changeset number'))
if node:
try:
# use changeset instance to retrieve branches and tags
changeset = repos.get_changeset(node.rev)
except NoSuchChangeset:
pass
context = context.child(repos.resource.child(self.realm, path,
version=rev_or_latest))
display_rev = repos.display_rev
# Prepare template data
path_links = get_path_links(req.href, reponame, path, rev,
order, desc)
repo_data = dir_data = file_data = None
if show_index:
repo_data = self._render_repository_index(
context, all_repositories, order, desc)
if node:
if not node.is_viewable(req.perm):
raise PermissionError('BROWSER_VIEW' if node.isdir else
'FILE_VIEW', node.resource, self.env)
if node.isdir:
if format in ('zip',): # extension point here...
self._render_zip(req, context, repos, node, rev)
# not reached
dir_data = self._render_dir(req, repos, node, rev, order, desc)
elif node.isfile:
file_data = self._render_file(req, context, repos, node, rev)
if not repos and not (repo_data and repo_data['repositories']):
# If no viewable repositories, check permission instead of
# repos.is_viewable()
req.perm.require('BROWSER_VIEW')
if show_index:
raise ResourceNotFound(_("No viewable repositories"))
else:
raise ResourceNotFound(_("No node %(path)s", path=path))
quickjump_data = properties_data = None
if node and not req.is_xhr:
properties_data = self.render_properties(
'browser', context, node.get_properties())
quickjump_data = list(repos.get_quickjump_entries(rev))
data = {
'context': context, 'reponame': reponame, 'repos': repos,
'repoinfo': all_repositories.get(reponame or ''),
'path': path, 'rev': node and node.rev, 'stickyrev': rev,
'display_rev': display_rev, 'changeset': changeset,
'created_path': node and node.created_path,
'created_rev': node and node.created_rev,
'properties': properties_data,
'path_links': path_links,
'order': order, 'desc': 1 if desc else None,
'repo': repo_data, 'dir': dir_data, 'file': file_data,
'quickjump_entries': quickjump_data,
'wiki_format_messages': \
self.config['changeset'].getbool('wiki_format_messages'),
'xhr': req.is_xhr, # Remove in 1.3.1
}
if req.is_xhr: # render and return the content only
return 'dir_entries.html', data, None
if dir_data or repo_data:
add_script(req, 'common/js/expand_dir.js')
add_script(req, 'common/js/keyboard_nav.js')
# Links for contextual navigation
if node:
if node.isfile:
prev_rev = repos.previous_rev(rev=node.created_rev,
path=node.created_path)
if prev_rev:
href = req.href.browser(reponame,
node.created_path, rev=prev_rev)
add_link(req, 'prev', href,
_('Revision %(num)s', num=display_rev(prev_rev)))
if rev is not None:
add_link(req, 'up', req.href.browser(reponame,
node.created_path))
next_rev = repos.next_rev(rev=node.created_rev,
path=node.created_path)
if next_rev:
href = req.href.browser(reponame, node.created_path,
rev=next_rev)
add_link(req, 'next', href,
_('Revision %(num)s', num=display_rev(next_rev)))
prevnext_nav(req, _('Previous Revision'), _('Next Revision'),
_('Latest Revision'))
else:
if path != '/':
add_link(req, 'up', path_links[-2]['href'],
_('Parent directory'))
add_ctxtnav(req, tag.a(_('Last Change'),
href=req.href.changeset(node.created_rev, reponame,
node.created_path)))
if node.isfile:
annotate = data['file']['annotate']
if annotate:
add_ctxtnav(req, _('Normal'),
title=_('View file without annotations'),
href=req.href.browser(reponame,
node.created_path,
rev=rev))
if annotate != 'blame':
add_ctxtnav(req, _('Blame'),
title=_('Annotate each line with the last '
'changed revision '
'(this can be time consuming...)'),
href=req.href.browser(reponame,
node.created_path,
rev=rev,
annotate='blame'))
add_ctxtnav(req, _('Revision Log'),
href=req.href.log(reponame, path, rev=rev))
path_url = repos.get_path_url(path, rev)
if path_url:
if path_url.startswith('//'):
path_url = req.scheme + ':' + path_url
add_ctxtnav(req, _('Repository URL'), href=path_url)
add_stylesheet(req, 'common/css/browser.css')
return 'browser.html', data, None
# Internal methods
def _render_repository_index(self, context, all_repositories, order, desc):
# Color scale for the age column
timerange = custom_colorizer = None
if self.color_scale:
custom_colorizer = self.get_custom_colorizer()
rm = RepositoryManager(self.env)
repositories = []
for reponame, repoinfo in all_repositories.iteritems():
if not reponame or as_bool(repoinfo.get('hidden')):
continue
try:
repos = rm.get_repository(reponame)
except TracError as err:
entry = (reponame, repoinfo, None, None,
exception_to_unicode(err), None)
else:
if repos:
if not repos.is_viewable(context.perm):
continue
try:
youngest = repos.get_changeset(repos.youngest_rev)
except NoSuchChangeset:
youngest = None
if self.color_scale and youngest:
if not timerange:
timerange = TimeRange(youngest.date)
else:
timerange.insert(youngest.date)
raw_href = self._get_download_href(context.href, repos,
None, None)
entry = (reponame, repoinfo, repos, youngest, None,
raw_href)
else:
entry = (reponame, repoinfo, None, None, u"\u2013", None)
if entry[4] is not None: # Check permission in case of error
root = Resource('repository', reponame).child(self.realm, '/')
if 'BROWSER_VIEW' not in context.perm(root):
continue
repositories.append(entry)
# Ordering of repositories
if order == 'date':
def repo_order((reponame, repoinfo, repos, youngest, err, href)):
return (youngest.date if youngest else to_datetime(0),
embedded_numbers(reponame.lower()))
elif order == 'author':
def repo_order((reponame, repoinfo, repos, youngest, err, href)):
return (youngest.author.lower() if youngest else '',
embedded_numbers(reponame.lower()))
else:
def repo_order((reponame, repoinfo, repos, youngest, err, href)):
return embedded_numbers(reponame.lower())
repositories = sorted(repositories, key=repo_order, reverse=desc)
return {'repositories' : repositories,
'timerange': timerange, 'colorize_age': custom_colorizer}
def _render_dir(self, req, repos, node, rev, order, desc):
req.perm(node.resource).require('BROWSER_VIEW')
download_href = self._get_download_href
# Entries metadata
class entry(object):
_copy = 'name rev created_rev kind isdir path content_length' \
.split()
__slots__ = _copy + ['raw_href']
def __init__(self, node):
for f in entry._copy:
setattr(self, f, getattr(node, f))
self.raw_href = download_href(req.href, repos, node, rev)
entries = [entry(n) for n in node.get_entries()
if n.is_viewable(req.perm)]
changes = get_changes(repos, [i.created_rev for i in entries],
self.log)
if rev:
newest = repos.get_changeset(rev).date
else:
newest = datetime.now(req.tz)
# Color scale for the age column
timerange = custom_colorizer = None
if self.color_scale:
timerange = TimeRange(newest)
max_s = req.args.get('range_max_secs')
min_s = req.args.get('range_min_secs')
parent_range = [timerange.from_seconds(long(s))
for s in [max_s, min_s] if s]
this_range = [c.date for c in changes.values() if c]
for dt in this_range + parent_range:
timerange.insert(dt)
custom_colorizer = self.get_custom_colorizer()
# Ordering of entries
if order == 'date':
def file_order(a):
return (changes[a.created_rev].date,
embedded_numbers(a.name.lower()))
elif order == 'size':
def file_order(a):
return (a.content_length,
embedded_numbers(a.name.lower()))
elif order == 'author':
def file_order(a):
return (changes[a.created_rev].author.lower(),
embedded_numbers(a.name.lower()))
else:
def file_order(a):
return embedded_numbers(a.name.lower())
dir_order = 1 if desc else -1
def browse_order(a):
return dir_order if a.isdir else 0, file_order(a)
entries = sorted(entries, key=browse_order, reverse=desc)
# ''Zip Archive'' alternate link
zip_href = self._get_download_href(req.href, repos, node, rev)
if zip_href:
add_link(req, 'alternate', zip_href, _('Zip Archive'),
'application/zip', 'zip')
return {'entries': entries, 'changes': changes,
'timerange': timerange, 'colorize_age': custom_colorizer,
'range_max_secs': (timerange and
timerange.to_seconds(timerange.newest)),
'range_min_secs': (timerange and
timerange.to_seconds(timerange.oldest)),
}
def _iter_nodes(self, node):
stack = [node]
while stack:
node = stack.pop()
yield node
if node.isdir:
stack.extend(sorted(node.get_entries(),
key=lambda x: x.name,
reverse=True))
def _render_zip(self, req, context, repos, root_node, rev=None):
if not self.is_path_downloadable(repos, root_node.path):
raise TracError(_("Path not available for download"))
req.perm(context.resource).require('FILE_VIEW')
root_path = root_node.path.rstrip('/')
if root_path:
archive_name = root_node.name
else:
archive_name = repos.reponame or 'repository'
filename = '%s-%s.zip' % (archive_name, root_node.rev)
render_zip(req, filename, repos, root_node, self._iter_nodes)
def _render_file(self, req, context, repos, node, rev=None):
req.perm(node.resource).require('FILE_VIEW')
mimeview = Mimeview(self.env)
# MIME type detection
content = node.get_processed_content()
chunk = content.read(CHUNK_SIZE)
mime_type = node.content_type
if not mime_type or mime_type == 'application/octet-stream':
mime_type = mimeview.get_mimetype(node.name, chunk) or \
mime_type or 'text/plain'
# Eventually send the file directly
format = req.args.get('format')
if format in ('raw', 'txt'):
req.send_response(200)
req.send_header('Content-Type',
'text/plain' if format == 'txt' else mime_type)
req.send_header('Last-Modified', http_date(node.last_modified))
if rev is None:
req.send_header('Pragma', 'no-cache')
req.send_header('Cache-Control', 'no-cache')
req.send_header('Expires', 'Fri, 01 Jan 1999 00:00:00 GMT')
if not self.render_unsafe_content:
# Force browser to download files instead of rendering
# them, since they might contain malicious code enabling
# XSS attacks
req.send_header('Content-Disposition', 'attachment')
req.end_headers()
# Note: don't pass an iterable instance to RequestDone, instead
# call req.write() with each chunk here to avoid SEGVs (#11805)
while chunk:
req.write(chunk)
chunk = content.read(CHUNK_SIZE)
raise RequestDone
else:
# The changeset corresponding to the last change on `node`
# is more interesting than the `rev` changeset.
changeset = repos.get_changeset(node.created_rev)
# add ''Plain Text'' alternate link if needed
if not is_binary(chunk) and mime_type != 'text/plain':
plain_href = req.href.browser(repos.reponame or None,
node.path, rev=rev, format='txt')
add_link(req, 'alternate', plain_href, _('Plain Text'),
'text/plain')
# add ''Original Format'' alternate link (always)
raw_href = req.href.export(rev or repos.youngest_rev,
repos.reponame or None, node.path)
add_link(req, 'alternate', raw_href, _('Original Format'),
mime_type)
self.log.debug("Rendering preview of node %s@%s with mime-type %s",
node.name, rev, mime_type)
content = None # the remainder of that content is not needed
add_stylesheet(req, 'common/css/code.css')
annotations = ['lineno']
annotate = req.args.get('annotate')
if annotate:
annotations.insert(0, annotate)
preview_data = mimeview.preview_data(context,
node.get_processed_content(),
node.get_content_length(),
mime_type, node.created_path,
raw_href,
annotations=annotations,
force_source=bool(annotate))
return {
'changeset': changeset,
'size': node.content_length,
'preview': preview_data,
'annotate': annotate,
}
def _get_download_href(self, href, repos, node, rev):
"""Return the URL for downloading a file, or a directory as a ZIP."""
if node is not None and node.isfile:
return href.export(rev or 'HEAD', repos.reponame or None,
node.path)
path = '' if node is None else node.path.strip('/')
if self.is_path_downloadable(repos, path):
return href.browser(repos.reponame or None, path,
rev=rev or repos.youngest_rev, format='zip')
# public methods
def is_path_downloadable(self, repos, path):
if repos.reponame:
path = repos.reponame + '/' + path
return any(fnmatchcase(path, dp.strip('/'))
for dp in self.downloadable_paths)
def render_properties(self, mode, context, props):
"""Prepare rendering of a collection of properties."""
return filter(None, [self.render_property(name, mode, context, props)
for name in sorted(props)])
def render_property(self, name, mode, context, props):
"""Renders a node property to HTML."""
if name in self.hidden_properties:
return
candidates = []
for renderer in self.property_renderers:
quality = renderer.match_property(name, mode)
if quality > 0:
candidates.append((quality, renderer))
candidates.sort(reverse=True)
for (quality, renderer) in candidates:
try:
rendered = renderer.render_property(name, mode, context, props)
if not rendered:
return rendered
if isinstance(rendered, RenderedProperty):
value = rendered.content
else:
value = rendered
rendered = None
prop = {'name': name, 'value': value, 'rendered': rendered}
return prop
except Exception as e:
self.log.warning('Rendering failed for property %s with '
'renderer %s: %s', name,
renderer.__class__.__name__,
exception_to_unicode(e, traceback=True))
# IWikiSyntaxProvider methods
def get_wiki_syntax(self):
return []
def get_link_resolvers(self):
"""TracBrowser link resolvers.
- `source:` and `browser:`
* simple paths (/dir/file)
* paths at a given revision (/dir/file@234)
* paths with line number marks (/dir/file@234:10,20-30)
* paths with line number anchor (/dir/file@234#L100)
Marks and anchor can be combined.
The revision must be present when specifying line numbers.
In the few cases where it would be redundant (e.g. for tags), the
revision number itself can be omitted: /tags/v10/file@100-110#L99
"""
return [('repos', self._format_browser_link),
('export', self._format_export_link),
('source', self._format_browser_link),
('browser', self._format_browser_link)]
def _format_export_link(self, formatter, ns, export, label):
export, query, fragment = formatter.split_link(export)
if ':' in export:
rev, path = export.split(':', 1)
elif '@' in export:
path, rev = export.split('@', 1)
else:
rev, path = None, export
node, raw_href, title = self._get_link_info(path, rev, formatter.href,
formatter.perm)
if raw_href:
return tag.a(label, class_='export', href=raw_href + fragment,
title=title)
return tag.a(label, class_='missing export')
def _format_browser_link(self, formatter, ns, path, label):
path, query, fragment = formatter.split_link(path)
rev = marks = None
match = self.PATH_LINK_RE.match(path)
if match:
path, rev, marks = match.groups()
href = formatter.href
src_href = href.browser(path, rev=rev, marks=marks) + query + fragment
node, raw_href, title = self._get_link_info(path, rev, formatter.href,
formatter.perm)
if not node:
return tag.a(label, class_='missing source')
link = tag.a(label, class_='source', href=src_href)
if raw_href:
link = tag(link, tag.a(u'\u200b', href=raw_href + fragment,
title=title,
class_='trac-rawlink' if node.isfile
else 'trac-ziplink'))
return link
PATH_LINK_RE = re.compile(r"([^@#:]*)" # path
r"[@:]([^#:]+)?" # rev
r"(?::(\d+(?:-\d+)?(?:,\d+(?:-\d+)?)*))?" # marks
)
def _get_link_info(self, path, rev, href, perm):
rm = RepositoryManager(self.env)
node = raw_href = title = None
try:
reponame, repos, npath = rm.get_repository_by_path(path)
node = get_allowed_node(repos, npath, rev, perm)
if node is not None:
raw_href = self._get_download_href(href, repos, node, rev)
title = _("Download") if node.isfile \
else _("Download as Zip archive")
except TracError:
pass
return (node, raw_href, title)
# IHTMLPreviewAnnotator methods
def get_annotation_type(self):
return 'blame', _('Rev'), _('Revision in which the line changed')
def get_annotation_data(self, context):
"""Cache the annotation data corresponding to each revision."""
return BlameAnnotator(self.env, context)
def annotate_row(self, context, row, lineno, line, blame_annotator):
blame_annotator.annotate(row, lineno)
# IWikiMacroProvider methods
def get_macros(self):
yield "RepositoryIndex"
def get_macro_description(self, name):
description = cleandoc_("""
Display the list of available repositories.
Can be given the following named arguments:
''format''::
Select the rendering format:
- ''compact'' produces a comma-separated list of repository prefix
names (default)
- ''list'' produces a description list of repository prefix names
- ''table'' produces a table view, similar to the one visible in
the ''Browse View'' page
''glob''::
Do a glob-style filtering on the repository names (defaults to '*')
''order''::
Order repositories by the given column (one of "name", "date" or
"author")
''desc''::
When set to 1, order by descending order
(''since 0.12'')
""")
return 'messages', description
def expand_macro(self, formatter, name, content):
args, kwargs = parse_args(content)
format = kwargs.get('format', 'compact')
glob = kwargs.get('glob', '*')
order = kwargs.get('order')
desc = as_bool(kwargs.get('desc', 0))
rm = RepositoryManager(self.env)
all_repos = dict(rdata for rdata in rm.get_all_repositories().items()
if fnmatchcase(rdata[0], glob))
if format == 'table':
repo = self._render_repository_index(formatter.context, all_repos,
order, desc)
add_stylesheet(formatter.req, 'common/css/browser.css')
wiki_format_messages = self.config['changeset'] \
.getbool('wiki_format_messages')
data = {'repo': repo, 'order': order, 'desc': 1 if desc else None,
'reponame': None, 'path': '/', 'stickyrev': None,
'wiki_format_messages': wiki_format_messages}
from trac.web.chrome import Chrome
return Chrome(self.env).render_template(
formatter.req, 'repository_index.html', data, None,
fragment=True)
def get_repository(reponame):
try:
return rm.get_repository(reponame)
except TracError:
return
all_repos = [(reponame, get_repository(reponame))
for reponame in all_repos]
all_repos = sorted(((reponame, repos) for reponame, repos in all_repos
if repos
and not as_bool(repos.params.get('hidden'))
and repos.is_viewable(formatter.perm)),
reverse=desc)
def repolink(reponame, repos):
label = reponame or _('(default)')
return Markup(tag.a(label,
title=_('View repository %(repo)s', repo=label),
href=formatter.href.browser(repos.reponame or None)))
if format == 'list':
return tag.dl([
tag(tag.dt(repolink(reponame, repos)),
tag.dd(repos.params.get('description')))
for reponame, repos in all_repos])
else: # compact
return Markup(', ').join([repolink(reponame, repos)
for reponame, repos in all_repos])
class BlameAnnotator(object):
def __init__(self, env, context):
self.env = env
self.context = context
rm = RepositoryManager(self.env)
self.repos = rm.get_repository(context.resource.parent.id)
self.path = context.resource.id
self.rev = context.resource.version
# maintain state
self.prev_chgset = None
self.chgset_data = {}
add_script(context.req, 'common/js/blame.js')
add_stylesheet(context.req, 'common/css/changeset.css')
add_stylesheet(context.req, 'common/css/diff.css')
self.reset()
def reset(self):
rev = self.rev
node = self.repos.get_node(self.path, rev)
# FIXME: get_annotations() should be in the Resource API
# -- get revision numbers for each line
self.annotations = node.get_annotations()
# -- from the annotations, retrieve changesets and
# determine the span of dates covered, for the color code.
# Note: changesets[i].rev can differ from annotations[i]
# (long form vs. compact, short rev form for the latter).
self.changesets = []
chgset = self.repos.get_changeset(rev)
chgsets = {rev: chgset}
self.timerange = TimeRange(chgset.date)
for idx in range(len(self.annotations)):
rev = self.annotations[idx]
chgset = chgsets.get(rev)
if not chgset:
chgset = self.repos.get_changeset(rev)
chgsets[rev] = chgset
self.timerange.insert(chgset.date)
# get list of changeset parallel to annotations
self.changesets.append(chgset)
# -- retrieve the original path of the source, for each rev
# (support for copy/renames)
self.paths = {}
for path, rev, chg in node.get_history():
self.paths[rev] = path
# -- get custom colorize function
browser = BrowserModule(self.env)
self.colorize_age = browser.get_custom_colorizer()
def annotate(self, row, lineno):
if lineno > len(self.annotations):
row.append(tag.th())
return
rev = self.annotations[lineno-1]
chgset = self.changesets[lineno-1]
path = self.paths.get(rev, None)
# Note: path will be None if copy/rename is not supported
# by get_history
# -- compute anchor and style once per revision
if rev not in self.chgset_data:
chgset_href = \
self.context.href.changeset(rev, self.repos.reponame or None,
path)
short_author = chgset.author.split(' ', 1)[0]
title = shorten_line('%s: %s' % (short_author, chgset.message))
anchor = tag.a('[%s]' % self.repos.short_rev(rev), # shortname
title=title, href=chgset_href)
color = self.colorize_age(self.timerange.relative(chgset.date))
style = 'background-color: rgb(%d, %d, %d);' % color
self.chgset_data[rev] = (anchor, style)
else:
anchor, style = self.chgset_data[rev]
if self.prev_chgset != chgset:
self.prev_style = style
# optimize away the path if there's no copy/rename info
if not path or path == self.path:
path = ''
# -- produce blame column, eventually with an anchor
style = self.prev_style
if lineno < len(self.changesets) and self.changesets[lineno] == chgset:
style += ' border-bottom: none;'
blame_col = tag.th(style=style, class_='blame r%s' % rev)
if self.prev_chgset != chgset:
blame_col.append(anchor)
self.prev_chgset = chgset
row.append(blame_col)
| bsd-3-clause | -679,467,882,957,554,700 | 41.602089 | 79 | 0.544561 | false |
cogeorg/BlackRhino | examples/firesales_simple/networkx/algorithms/isomorphism/matchhelpers.py | 35 | 12220 | """Functions which help end users define customize node_match and
edge_match functions to use during isomorphism checks.
"""
from itertools import permutations
import types
import networkx as nx
__all__ = ['categorical_node_match',
'categorical_edge_match',
'categorical_multiedge_match',
'numerical_node_match',
'numerical_edge_match',
'numerical_multiedge_match',
'generic_node_match',
'generic_edge_match',
'generic_multiedge_match',
]
def copyfunc(f, name=None):
"""Returns a deepcopy of a function."""
try:
return types.FunctionType(f.func_code, f.func_globals, name or f.name,
f.func_defaults, f.func_closure)
except AttributeError:
return types.FunctionType(f.__code__, f.__globals__, name or f.name,
f.__defaults__, f.__closure__)
def allclose(x, y, rtol=1.0000000000000001e-05, atol=1e-08):
"""Returns True if x and y are sufficiently close, elementwise.
Parameters
----------
rtol : float
The relative error tolerance.
atol : float
The absolute error tolerance.
"""
# assume finite weights, see numpy.allclose() for reference
for xi, yi in zip(x,y):
if not ( abs(xi-yi) <= atol + rtol * abs(yi) ):
return False
return True
def close(x, y, rtol=1.0000000000000001e-05, atol=1e-08):
"""Returns True if x and y are sufficiently close.
Parameters
----------
rtol : float
The relative error tolerance.
atol : float
The absolute error tolerance.
"""
# assume finite weights, see numpy.allclose() for reference
return abs(x-y) <= atol + rtol * abs(y)
categorical_doc = """
Returns a comparison function for a categorical node attribute.
The value(s) of the attr(s) must be hashable and comparable via the ==
operator since they are placed into a set([]) object. If the sets from
G1 and G2 are the same, then the constructed function returns True.
Parameters
----------
attr : string | list
The categorical node attribute to compare, or a list of categorical
node attributes to compare.
default : value | list
The default value for the categorical node attribute, or a list of
default values for the categorical node attributes.
Returns
-------
match : function
The customized, categorical `node_match` function.
Examples
--------
>>> import networkx.algorithms.isomorphism as iso
>>> nm = iso.categorical_node_match('size', 1)
>>> nm = iso.categorical_node_match(['color', 'size'], ['red', 2])
"""
def categorical_node_match(attr, default):
if nx.utils.is_string_like(attr):
def match(data1, data2):
return data1.get(attr, default) == data2.get(attr, default)
else:
attrs = list(zip(attr, default)) # Python 3
def match(data1, data2):
values1 = set([data1.get(attr, d) for attr, d in attrs])
values2 = set([data2.get(attr, d) for attr, d in attrs])
return values1 == values2
return match
categorical_edge_match = copyfunc(categorical_node_match, 'categorical_edge_match')
def categorical_multiedge_match(attr, default):
if nx.utils.is_string_like(attr):
def match(datasets1, datasets2):
values1 = set([data.get(attr, default) for data in datasets1.values()])
values2 = set([data.get(attr, default) for data in datasets2.values()])
return values1 == values2
else:
attrs = list(zip(attr, default)) # Python 3
def match(datasets1, datasets2):
values1 = set([])
for data1 in datasets1.values():
x = tuple( data1.get(attr, d) for attr, d in attrs )
values1.add(x)
values2 = set([])
for data2 in datasets2.values():
x = tuple( data2.get(attr, d) for attr, d in attrs )
values2.add(x)
return values1 == values2
return match
# Docstrings for categorical functions.
categorical_node_match.__doc__ = categorical_doc
categorical_edge_match.__doc__ = categorical_doc.replace('node', 'edge')
tmpdoc = categorical_doc.replace('node', 'edge')
tmpdoc = tmpdoc.replace('categorical_edge_match', 'categorical_multiedge_match')
categorical_multiedge_match.__doc__ = tmpdoc
numerical_doc = """
Returns a comparison function for a numerical node attribute.
The value(s) of the attr(s) must be numerical and sortable. If the
sorted list of values from G1 and G2 are the same within some
tolerance, then the constructed function returns True.
Parameters
----------
attr : string | list
The numerical node attribute to compare, or a list of numerical
node attributes to compare.
default : value | list
The default value for the numerical node attribute, or a list of
default values for the numerical node attributes.
rtol : float
The relative error tolerance.
atol : float
The absolute error tolerance.
Returns
-------
match : function
The customized, numerical `node_match` function.
Examples
--------
>>> import networkx.algorithms.isomorphism as iso
>>> nm = iso.numerical_node_match('weight', 1.0)
>>> nm = iso.numerical_node_match(['weight', 'linewidth'], [.25, .5])
"""
def numerical_node_match(attr, default, rtol=1.0000000000000001e-05, atol=1e-08):
if nx.utils.is_string_like(attr):
def match(data1, data2):
return close(data1.get(attr, default),
data2.get(attr, default),
rtol=rtol, atol=atol)
else:
attrs = list(zip(attr, default)) # Python 3
def match(data1, data2):
values1 = [data1.get(attr, d) for attr, d in attrs]
values2 = [data2.get(attr, d) for attr, d in attrs]
return allclose(values1, values2, rtol=rtol, atol=atol)
return match
numerical_edge_match = copyfunc(numerical_node_match, 'numerical_edge_match')
def numerical_multiedge_match(attr, default, rtol=1.0000000000000001e-05, atol=1e-08):
if nx.utils.is_string_like(attr):
def match(datasets1, datasets2):
values1 = sorted([data.get(attr, default) for data in datasets1.values()])
values2 = sorted([data.get(attr, default) for data in datasets2.values()])
return allclose(values1, values2, rtol=rtol, atol=atol)
else:
attrs = list(zip(attr, default)) # Python 3
def match(datasets1, datasets2):
values1 = []
for data1 in datasets1.values():
x = tuple( data1.get(attr, d) for attr, d in attrs )
values1.append(x)
values2 = []
for data2 in datasets2.values():
x = tuple( data2.get(attr, d) for attr, d in attrs )
values2.append(x)
values1.sort()
values2.sort()
for xi, yi in zip(values1, values2):
if not allclose(xi, yi, rtol=rtol, atol=atol):
return False
else:
return True
return match
# Docstrings for numerical functions.
numerical_node_match.__doc__ = numerical_doc
numerical_edge_match.__doc__ = numerical_doc.replace('node', 'edge')
tmpdoc = numerical_doc.replace('node', 'edge')
tmpdoc = tmpdoc.replace('numerical_edge_match', 'numerical_multiedge_match')
numerical_multiedge_match.__doc__ = tmpdoc
generic_doc = """
Returns a comparison function for a generic attribute.
The value(s) of the attr(s) are compared using the specified
operators. If all the attributes are equal, then the constructed
function returns True.
Parameters
----------
attr : string | list
The node attribute to compare, or a list of node attributes
to compare.
default : value | list
The default value for the node attribute, or a list of
default values for the node attributes.
op : callable | list
The operator to use when comparing attribute values, or a list
of operators to use when comparing values for each attribute.
Returns
-------
match : function
The customized, generic `node_match` function.
Examples
--------
>>> from operator import eq
>>> from networkx.algorithms.isomorphism.matchhelpers import close
>>> from networkx.algorithms.isomorphism import generic_node_match
>>> nm = generic_node_match('weight', 1.0, close)
>>> nm = generic_node_match('color', 'red', eq)
>>> nm = generic_node_match(['weight', 'color'], [1.0, 'red'], [close, eq])
"""
def generic_node_match(attr, default, op):
if nx.utils.is_string_like(attr):
def match(data1, data2):
return op(data1.get(attr, default), data2.get(attr, default))
else:
attrs = list(zip(attr, default, op)) # Python 3
def match(data1, data2):
for attr, d, operator in attrs:
if not operator(data1.get(attr, d), data2.get(attr, d)):
return False
else:
return True
return match
generic_edge_match = copyfunc(generic_node_match, 'generic_edge_match')
def generic_multiedge_match(attr, default, op):
"""Returns a comparison function for a generic attribute.
The value(s) of the attr(s) are compared using the specified
operators. If all the attributes are equal, then the constructed
function returns True. Potentially, the constructed edge_match
function can be slow since it must verify that no isomorphism
exists between the multiedges before it returns False.
Parameters
----------
attr : string | list
The edge attribute to compare, or a list of node attributes
to compare.
default : value | list
The default value for the edge attribute, or a list of
default values for the dgeattributes.
op : callable | list
The operator to use when comparing attribute values, or a list
of operators to use when comparing values for each attribute.
Returns
-------
match : function
The customized, generic `edge_match` function.
Examples
--------
>>> from operator import eq
>>> from networkx.algorithms.isomorphism.matchhelpers import close
>>> from networkx.algorithms.isomorphism import generic_node_match
>>> nm = generic_node_match('weight', 1.0, close)
>>> nm = generic_node_match('color', 'red', eq)
>>> nm = generic_node_match(['weight', 'color'],
... [1.0, 'red'],
... [close, eq])
...
"""
# This is slow, but generic.
# We must test every possible isomorphism between the edges.
if nx.utils.is_string_like(attr):
def match(datasets1, datasets2):
values1 = [data.get(attr, default) for data in datasets1.values()]
values2 = [data.get(attr, default) for data in datasets2.values()]
for vals2 in permutations(values2):
for xi, yi in zip(values1, vals2):
if not op(xi, yi):
# This is not an isomorphism, go to next permutation.
break
else:
# Then we found an isomorphism.
return True
else:
# Then there are no isomorphisms between the multiedges.
return False
else:
attrs = list(zip(attr, default)) # Python 3
def match(datasets1, datasets2):
values1 = []
for data1 in datasets1.values():
x = tuple( data1.get(attr, d) for attr, d in attrs )
values1.append(x)
values2 = []
for data2 in datasets2.values():
x = tuple( data2.get(attr, d) for attr, d in attrs )
values2.append(x)
for vals2 in permutations(values2):
for xi, yi, operator in zip(values1, vals2, op):
if not operator(xi, yi):
return False
else:
return True
return match
# Docstrings for numerical functions.
generic_node_match.__doc__ = generic_doc
generic_edge_match.__doc__ = generic_doc.replace('node', 'edge')
| gpl-3.0 | 7,606,248,243,959,091,000 | 34.317919 | 86 | 0.617512 | false |
AMObox/teammaniac | plugin.video.specto/resources/lib/resolvers/realdebrid.py | 9 | 10928 | # -*- coding: utf-8 -*-
'''
Exodus Add-on
Copyright (C) 2016 lambda
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import urllib,json,time
import urlparse
from resources.lib.libraries import cache
from resources.lib.libraries import control
from resources.lib.libraries import client
def rdAuthorize():
try:
CLIENT_ID = 'TC3DG7YFNBKQK'
USER_AGENT = 'SPECTO for Kodi/1.0'
if not '' in credentials()['realdebrid'].values():
if control.yesnoDialog(control.lang(32411).encode('utf-8'), control.lang(32413).encode('utf-8'), '', 'RealDebrid', control.lang(32415).encode('utf-8'), control.lang(32414).encode('utf-8')):
control.set_setting('realdebrid_client_id','')
control.set_setting('realdebrid_client_secret', '')
control.set_setting('realdebrid_token', '')
control.set_setting('realdebrid_refresh', '')
control.set_setting('realdebrid_auth', '')
raise Exception()
headers = {'User-Agent': USER_AGENT}
url = 'https://api.real-debrid.com/oauth/v2/device/code?client_id=%s&new_credentials=yes' % (CLIENT_ID)
result = client.request(url, headers=headers)
result = json.loads(result)
verification_url = control.lang(30416).encode('utf-8') + '[COLOR skyblue]%s[/COLOR]' % (result['verification_url'])
user_code = control.lang(30417).encode('utf-8') + '[COLOR skyblue]%s[/COLOR]' % (result['user_code'])
device_code = result['device_code']
interval = result['interval']
progressDialog = control.progressDialog
progressDialog.create('RealDebrid', verification_url, user_code)
for i in range(0, 3600):
try:
if progressDialog.iscanceled(): break
time.sleep(1)
if not float(i) % interval == 0: raise Exception()
url = 'https://api.real-debrid.com/oauth/v2/device/credentials?client_id=%s&code=%s' % (CLIENT_ID, device_code)
result = client.request(url, headers=headers, error=True)
result = json.loads(result)
if 'client_secret' in result: break
except:
pass
try: progressDialog.close()
except: pass
id, secret = result['client_id'], result['client_secret']
url = 'https://api.real-debrid.com/oauth/v2/token'
post = {'client_id': id, 'client_secret': secret, 'code': device_code, 'grant_type': 'http://oauth.net/grant_type/device/1.0'}
result = client.request(url, post=post, headers=headers)
result = json.loads(result)
token, refresh = result['access_token'], result['refresh_token']
control.set_setting('realdebrid_client_id', id)
control.set_setting('realdebrid_client_secret', secret)
control.set_setting('realdebrid_token', token)
control.set_setting('realdebrid_refresh', refresh)
control.set_setting('realdebrid_auth', '*************')
raise Exception()
except:
control.openSettings('3.13')
def rdDict():
try:
if '' in credentials()['realdebrid'].values(): raise Exception()
url = 'https://api.real-debrid.com/rest/1.0/hosts/domains'
result = cache.get(client.request, 24, url)
hosts = json.loads(result)
hosts = [i.lower() for i in hosts]
return hosts
except:
return []
def pzDict():
try:
if '' in credentials()['premiumize'].values(): raise Exception()
user, password = credentials()['premiumize']['user'], credentials()['premiumize']['pass']
url = 'http://api.premiumize.me/pm-api/v1.php?method=hosterlist¶ms[login]=%s¶ms[pass]=%s' % (user, password)
result = cache.get(client.request, 24, url)
hosts = json.loads(result)['result']['hosterlist']
hosts = [i.lower() for i in hosts]
return hosts
except:
return []
def adDict():
try:
if '' in credentials()['alldebrid'].values(): raise Exception()
url = 'http://alldebrid.com/api.php?action=get_host'
result = cache.get(client.request, 24, url)
hosts = json.loads('[%s]' % result)
hosts = [i.lower() for i in hosts]
return hosts
except:
return []
def rpDict():
try:
if '' in credentials()['rpnet'].values(): raise Exception()
url = 'http://premium.rpnet.biz/hoster2.json'
result = cache.get(client.request, 24, url)
result = json.loads(result)
hosts = result['supported']
hosts = [i.lower() for i in hosts]
return hosts
except:
return []
def debridDict():
return {
'realdebrid': rdDict(),
'premiumize': pzDict(),
'alldebrid': adDict(),
'rpnet': rpDict()
}
def credentials():
return {
'realdebrid': {
'id': control.setting('realdebrid_client_id'),
'secret': control.setting('realdebrid_client_secret'),
'token': control.setting('realdebrid_token'),
'refresh': control.setting('realdebrid_refresh')
},
'premiumize': {
'user': control.setting('premiumize.user'),
'pass': control.setting('premiumize.pin')
},
'alldebrid': {
'user': control.setting('alldebrid.user'),
'pass': control.setting('alldebrid.pass')
},
'rpnet': {
'user': control.setting('rpnet.user'),
'pass': control.setting('rpnet.api')
}}
def status():
try:
c = [i for i in credentials().values() if not '' in i.values()]
if len(c) == 0: return False
else: return True
except:
return False
def getHosts():
myhosts = rdDict()
for i in range(len(myhosts)):
myhosts[i] = myhosts[i].split('.')[-2].encode('utf-8')
#control.log("@@@@ REALDEBRID HOSTS %s ### " % (myhosts))
return myhosts
def resolve(url, debrid='realdebrid'):
u = url
u = u.replace('filefactory.com/stream/', 'filefactory.com/file/')
#control.log("@@@@ REALDEBRID INIT %s ### %s" % (url,debrid))
try:
u1 = urlparse.urlparse(url)[1].split('.')
u1 = u[-2] + '.' + u[-1]
if status() is False:raise Exception()
if not debrid == 'realdebrid' and not debrid == True: raise Exception()
#raise Exception()
if '' in credentials()['realdebrid'].values(): raise Exception()
id, secret, token, refresh = credentials()['realdebrid']['id'], credentials()['realdebrid']['secret'], credentials()['realdebrid']['token'], credentials()['realdebrid']['refresh']
USER_AGENT = 'Kodi Exodus/3.0'
post = {'link': u}
headers = {'Authorization': 'Bearer %s' % token, 'User-Agent': USER_AGENT}
url = 'http://api.real-debrid.com/rest/1.0/unrestrict/link'
result = client.request(url, post=post, headers=headers, error=True)
control.log('@@ DEBRID RESULTS@@ %s' % result)
result = json.loads(result)
if 'error' in result and result['error'] == 'bad_token':
result = client.request('https://api.real-debrid.com/oauth/v2/token', post={'client_id': id, 'client_secret': secret, 'code': refresh, 'grant_type': 'http://oauth.net/grant_type/device/1.0'}, headers={'User-Agent': USER_AGENT}, error=True)
result = json.loads(result)
control.log('Refreshing Expired Real Debrid Token: |%s|%s|' % (id, refresh))
control.log('Refreshing Expired : |%s|' % (result))
if 'error' in result: return
token, refresh = result['access_token'], result['refresh_token']
control.set_setting('realdebrid_token', token)
control.set_setting('realdebrid_refresh', refresh)
headers['Authorization'] = 'Bearer %s' % result['access_token']
result = client.request(url, post=post, headers=headers)
result = json.loads(result)
if 'error' in result and result['error'] == 'file_unavailable':
control.log("@@@@ REALDEBRID FILE UNAVAIL %s ### %s" % (url))
return
url = result['download']
control.log('@@ DEBRID URl@@ %s' % url)
return url
except:
pass
try:
if not debrid == 'premiumize' and not debrid == True: raise Exception()
if '' in credentials()['premiumize'].values(): raise Exception()
user, password = credentials()['premiumize']['user'], credentials()['premiumize']['pass']
url = 'http://api.premiumize.me/pm-api/v1.php?method=directdownloadlink¶ms[login]=%s¶ms[pass]=%s¶ms[link]=%s' % (user, password, urllib.quote_plus(u))
result = client.request(url, close=False)
url = json.loads(result)['result']['location']
return url
except:
pass
try:
if not debrid == 'alldebrid' and not debrid == True: raise Exception()
if '' in credentials()['alldebrid'].values(): raise Exception()
user, password = credentials()['alldebrid']['user'], credentials()['alldebrid']['pass']
login_data = {'action': 'login', 'login_login': user, 'login_password': password}
login_link = 'http://alldebrid.com/register/?%s' % login_data
cookie = client.request(login_link, output='cookie', close=False)
url = 'http://www.alldebrid.com/service.php?link=%s' % urllib.quote_plus(u)
result = client.request(url, cookie=cookie, close=False)
url = client.parseDOM(result, 'a', ret='href', attrs = {'class': 'link_dl'})[0]
url = client.replaceHTMLCodes(url)
url = '%s|Cookie=%s' % (url, urllib.quote_plus(cookie))
return url
except:
pass
try:
if not debrid == 'rpnet' and not debrid == True: raise Exception()
if '' in credentials()['rpnet'].values(): raise Exception()
user, password = credentials()['rpnet']['user'], credentials()['rpnet']['pass']
login_data = {'username': user, 'password': password, 'action': 'generate', 'links': u}
login_link = 'http://premium.rpnet.biz/client_api.php?%s' % login_data
result = client.request(login_link, close=False)
result = json.loads(result)
url = result['links'][0]['generated']
return url
except:
return
| gpl-2.0 | -8,558,387,706,598,783,000 | 36.682759 | 251 | 0.59709 | false |
yast/yast-python-bindings | examples/FakeUserInput1.py | 1 | 1318 | # encoding: utf-8
from yast import import_module
import_module('UI')
from yast import *
class FakeUserInput1Client:
def main(self):
# Build dialog with one input field field, 4 Beatles buttons and an OK button.
UI.OpenDialog(
VBox(
InputField(Id("name"), "Name:"),
HBox(
PushButton(Id("john"), "&John"),
PushButton(Id("paul"), "&Paul"),
PushButton(Id("george"), "&George"),
PushButton(Id("ringo"), "&Ringo")
),
PushButton(Id("ok"), "&OK")
)
)
# Wait for user input.
button = None
UI.FakeUserInput("john")
UI.FakeUserInput("paul")
UI.FakeUserInput("george")
UI.FakeUserInput("ringo")
while True:
button = UI.UserInput()
if button == "john":
UI.ChangeWidget(Id("name"), "Value", "John Lennon")
elif button == "paul":
UI.ChangeWidget(Id("name"), "Value", "Paul McCartney")
elif button == "george":
UI.ChangeWidget(Id("name"), "Value", "George Harrison")
elif button == "ringo":
UI.ChangeWidget(Id("name"), "Value", "Ringo Starr")
ycpbuiltins.sleep(3 * 1000)
if button == "ok":
break
UI.CloseDialog()
FakeUserInput1Client().main()
| gpl-2.0 | -1,673,797,786,104,484,000 | 25.897959 | 84 | 0.540971 | false |
darp/plot-tools | lib/plots/LinePlot.py | 1 | 1284 | from lib.plots.AbstractPlot import AbstractPlot
import numpy as np
class LinePlot(AbstractPlot):
def __init__( self ):
params = dict()
params['xlim'] = [0,5]
params['ylim'] = [0,20]
params['xlabel'] = 'xlabel'
params['ylabel'] = 'ylabel'
AbstractPlot.__init__( self, params )
def registerPlottingFunctions( self ):
functions = list()
functions.append( self.__plot_scatter1 )
functions.append( self.__plot_line1 )
functions.append( self.__plot_line2 )
functions.append( self.__plot_text1 )
return functions
def __plot_text1( self ):
x,y = 2.2,10
text = 'text'
self._plot_text(self, x, y, text )
def __plot_scatter1( self ):
x = np.linspace(0,10,100)
y = x**2 + 3
m = (y + np.random.randn(1,100)*3)[0]
params = { 'color' : 'gray' }
label = 'label'
self._plot_scatter(self, x, m, label, params )
def __plot_line1( self ):
x = np.linspace(0,10,100)
y = x**2 + 3
self._plot_line(x, y, 'line 1' )
def __plot_line2( self ):
x = np.linspace(0,10,100)
y = x**2 + 10
params = { 'linestyle' : '--' }
self._plot_line(x, y, 'line 2', params ) | gpl-3.0 | -5,991,547,424,446,628,000 | 28.204545 | 54 | 0.521028 | false |
venkateshdaram434/augmented-traffic-control | atc/django-atc-profile-storage/setup.py | 16 | 2610 | #!/usr/bin/env python
#
# Copyright (c) 2014, Facebook, Inc.
# All rights reserved.
#
# This source code is licensed under the BSD-style license found in the
# LICENSE file in the root directory of this source tree. An additional grant
# of patent rights can be found in the PATENTS file in the same directory.
#
#
import os
import re
import sys
from setuptools import setup
with open(os.path.join(os.path.dirname(__file__), 'README.md')) as readme:
README = readme.read()
def get_version(package):
"""
Return package version as listed in `__version__` in `init.py`.
"""
init_py = open(os.path.join(package, '__init__.py')).read()
return re.search("__version__ = ['\"]([^'\"]+)['\"]", init_py).group(1)
def get_packages(package):
"""
Return root package and all sub-packages.
"""
return [dirpath
for dirpath, dirnames, filenames in os.walk(package)
if os.path.exists(os.path.join(dirpath, '__init__.py'))]
def get_package_data(package):
"""
Return all files under the root package, that are not in a
package themselves.
"""
walk = [(dirpath.replace(package + os.sep, '', 1), filenames)
for dirpath, dirnames, filenames in os.walk(package)
if not os.path.exists(os.path.join(dirpath, '__init__.py'))]
filepaths = []
for base, filenames in walk:
filepaths.extend([os.path.join(base, filename)
for filename in filenames])
return {package: filepaths}
version = get_version('atc_profile_storage')
if sys.argv[-1] == 'publish':
if os.system("pip freeze | grep wheel"):
print("wheel not installed.\nUse `pip install wheel`.\nExiting.")
sys.exit()
if os.system("pip freeze | grep twine"):
print("twine not installed.\nUse `pip install twine`.\nExiting.")
sys.exit()
os.system("python setup.py sdist bdist_wheel")
os.system("twine upload dist/*")
print("You probably want to also tag the version now:")
print(" git tag -a %s -m 'version %s'" % (version, version))
print(" git push --tags")
sys.exit()
setup(
name='django-atc-profile-storage',
version=version,
description='ATC Profile storage app',
author='Emmanuel Bretelle',
author_email='[email protected]',
url='https://github.com/facebook/augmented-traffic-control',
packages=get_packages('atc_profile_storage'),
package_data=get_package_data('atc_profile_storage'),
classifiers=['Programming Language :: Python', ],
long_description=README,
install_requires=['djangorestframework']
)
| bsd-3-clause | 3,907,256,152,483,308,000 | 30.829268 | 78 | 0.640613 | false |
mailmanindinc/qualitybots | src/appengine/common/gql_util.py | 26 | 1494 | #!/usr/bin/python2.4
#
# Copyright 2011 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""AppEngine Datastore/GQL related Utilities module.
This common util provides helper functionality to extend/support
various GQL related queries.
"""
def FetchEntities(query_obj, limit):
"""Fetches number of Entities up to limit using query object.
Args:
query_obj: AppEngine Datastore Query Object.
limit: Fetch limit on number of records you want to fetch.
Returns:
Fetched Entities.
"""
entities = []
# If Limit is more than 1000 than let's fetch more records using cursor.
if limit > 1000:
results = query_obj.fetch(1000)
entities.extend(results)
cursor = query_obj.cursor()
while results and limit > len(entities):
query_obj.with_cursor(cursor)
results = query_obj.fetch(1000)
entities.extend(results)
cursor = query_obj.cursor()
else:
entities = query_obj.fetch(limit)
return entities
| apache-2.0 | 252,499,380,492,406,270 | 28.88 | 74 | 0.72423 | false |
teto/mptcpnetlink | daemon/daemon.py | 1 | 8902 | #!/usr/bin/python3
import sys
import netlink.capi as nl
import netlink.genl.capi as genl
import traceback
import logging
import argparse
import struct
import socket
import subprocess
import signal
import binascii
#
logger = logging.getLogger( __name__ )
logger.setLevel( logging.DEBUG )
# logger= logging
# print ("handlers", logger.handlers )
handler = logging.StreamHandler()
#logging.FileHandler('hello.log')
handler.setLevel(logging.DEBUG)
logger.addHandler( handler )
# PATH_TOWARDS_PROGRAM = "/home/teto/lig/process_lig_output.sh"
ELC_REQUEST_RLOCS_FOR_EID= 0
ELC_RESULTS =1
ELC_SET_MAP_RESOLVER =2
ELC_MAX=3
LIG_GENL_VERSION=1
LIG_GROUP_NAME ="lig_daemons"
LIG_FAMILY_NAME ="LIG_FAMILY"
# typedef enum {
ELA_RLOCS_NUMBER = 1 # /* number of rlocs u8 */
ELA_MPTCP_TOKEN = 2 # to be able to retrieve correct socket */
ELA_EID=3 #= an IP. Only v4 supported as an u32 */
ELA_MAX=4 #
logger.debug("Starting LIG DAEMON\n");
# print("level" , logging.getLevelName( logger.getEffectiveLevel() ) )
def sigint_handler(signum, frame):
print( 'Stop pressing the CTRL+C!' )
class LigDaemon:
def add_membership(self):
# nl.nl_socket_drop_membership
pass
def __init__(self,lig_program, mapresolver=None,simulate=None):
self.done = 1;
# by default, should be possible to override
self.mapresolver = mapresolver or "153.16.49.112";
self.simulate = simulate
self.lig_program = lig_program
# TODO replace by Socket
# allocates callback
tx_cb = nl.nl_cb_alloc(nl.NL_CB_DEFAULT)
#Clone an existing callback handle
self.rx_cb = nl.nl_cb_clone(tx_cb)
# allocates sockets
self.sk = nl.nl_socket_alloc_cb(tx_cb)
# set callback handers
# last parameter represents arguments to pass
logger.info("Setting callback functions")
# nl.py_nl_cb_err(self.rx_cb, nl.NL_CB_CUSTOM, error_handler, self);
# nl_cb_set( callback_set, type, kind, function,args )
nl.py_nl_cb_set(self.rx_cb, nl.NL_CB_FINISH, nl.NL_CB_VERBOSE, finish_handler, self);
nl.py_nl_cb_set(self.rx_cb, nl.NL_CB_ACK, nl.NL_CB_VERBOSE, ack_handler, self);
nl.py_nl_cb_set(self.rx_cb, nl.NL_CB_VALID, nl.NL_CB_CUSTOM, msg_handler, self);
# nl.py_nl_cb_set(self.rx_cb, nl.NL_CB_VALID, nl.NL_CB_CUSTOM, self.handle, None);
# Notifications do not use sequence numbers, disable sequence number checking.
#nl.nl_socket_disable_seq_check(self.sk);
#nl.nl_socket_disable_auto_ack(self.sk);
# establish connection
genl.genl_connect(self.sk)
self.family_id = genl.genl_ctrl_resolve(self.sk, LIG_FAMILY_NAME)
# register to the multicast group
# print( dir( sys.modules["netlink.genl.capi"]) )
# print( dir( sys.modules["netlink.capi"]) )
logger.info("family %s registered with number %d"%(LIG_FAMILY_NAME, self.family_id));
self.group_id = genl.genl_ctrl_resolve_grp (self.sk, LIG_FAMILY_NAME, LIG_GROUP_NAME);
if self.group_id < 0 :
# should log it
logger.error("Could not find group group %s. Is the adequate module loaded ?"%LIG_FAMILY_NAME)
exit(1)
logger.info("Group id found: %d" % self.group_id);
logger.info("Using mapresolver %s"%self.mapresolver)
if self.simulate:
logger.info("Simulation mode enabled %d"%self.simulate)
else:
logger.info("Real mode enabled")
ret = nl.nl_socket_add_membership(self.sk, self.group_id);
if ret == 0:
logger.info("Registration successful")
else:
logger.error("Could not register to group")
exit(1)
# send answer via netlink
# send it into antoehr thread ?
def send_rlocs_list_for_eid(self, seq_nb, token, nb_of_rlocs):
logger.info("Sending rlocs nb of '%d' for token %d with seq nb %d"%(nb_of_rlocs,token,seq_nb))
msg = nl.nlmsg_alloc()
# returns void*
genl.genlmsg_put(msg,
0, # port
0, # seq nb
self.family_id, # family_id
0, # length of user header
0, # optional flags
ELC_RESULTS, # cmd
LIG_GENL_VERSION # version
)
nl.nla_put_u32(msg, ELA_RLOCS_NUMBER, nb_of_rlocs );
nl.nla_put_u32(msg, ELA_MPTCP_TOKEN , token );
err = nl.nl_send_auto_complete(self.sk, msg);
if err < 0:
logger.error("Error while sending answer")
nl.nlmsg_free(msg)
return False
nl.nlmsg_free(msg)
return True
def run(self):
err = 0
# cbd.done > 0 and not err < 0
while True:
# expects handle / cb configuration
# see nl.c:965
err = nl.nl_recvmsgs(self.sk, self.rx_cb)
# err = nl.nl_recvmsgs_default(self.sk)
if err < 0:
logger.error( "Error for nl_recvmsgs: %d: %s"% (err, nl.nl_geterror(err)) )
break;
def retrieve_number_of_rlocs(self,eid):
print("retrieve_number_of_rlocs")
# if in simulation mode, always return the same answer
if self.simulate:
logger.info("Simulation mode returning %d for eid %s"%(self.simulate, eid) )
return self.simulate
try:
#number_of_rlocs=$(lig -m $mapresolver $eid 2>&1 | grep -c up)
#PATH_TOWARDS_PROGRAM
cmd= self.lig_program + " -m " + self.mapresolver + eid +" 2>&1" + "| grep -c up"
# args = [ self.lig_program,"-m", self.mapresolver, eid , "2>&1" ]
output = subprocess.check_output( cmd , shell=True);
print( "Result: ", output.decode() )
return int( output.decode() );
except subprocess.CalledProcessError as e:
logger.error("Could not retrieve the correct number of rlocs. Return code: %d"%e.returncode)
return -1
def handle(self, m):
print("Hello world from ember function");
logger.debug("Handle Msg from class")
try:
nlmsghdr = nl.nlmsg_hdr(m)
print("nlmsghdr: flags:", nlmsghdr.nlmsg_flags , "seq:", nlmsghdr.nlmsg_seq )
genlhdr = genl.genlmsg_hdr( nlmsghdr )
if not genlhdr:
logger.error("Could not get generic header")
return nl.NL_STOP
if genlhdr.cmd == ELC_REQUEST_RLOCS_FOR_EID:
logger.info("Request RLOC for an EID")
# attrs = None
print("Message handler got called");
err, attrs = genl.py_genlmsg_parse(
nlmsghdr,
0, # will be returned as an attribute
ELA_MAX,
None
)
if err < 0:
logger.error("An error happened while parsing attributes")
return nl.NL_STOP;
logger.info("Looking for ELA")
if ELA_EID in attrs:
print ("hello", attrs[ELA_EID])
eid = nl.nla_get_u32(attrs[ELA_EID]);
print ("eid", eid)
print ("token", attrs[ELA_MPTCP_TOKEN])
token = nl.nla_get_u32(attrs[ELA_MPTCP_TOKEN]);
print("token", token)
# print("Requested EID ",eid, " for token ",binascii.hexlify( token ))
# I => unsigned int
packed_value = struct.pack('I', eid)
addr = socket.inet_ntoa(packed_value)
nb = self.retrieve_number_of_rlocs( addr )
if nb < 0:
logger.warning("An error happened while retrieveing nb of rlocs")
return nl.NL_STOP
else:
#nlmsghdr.nlmsg_seq + 1
self.send_rlocs_list_for_eid( 0, token, nb )
return nl.NL_SKIP
else:
logger.error("Missing critical attribute in packet")
else:
logger.warning("Unhandled command %d"% genlhdr.cmd)
# nlmsg_data returns void* so not usable straightaway
# TODO need to retrieve command
# print("e", err)
return nl.NL_SKIP
except Exception as e:
(t,v,tb) = sys.exc_info()
print( "test", v.message,e )
traceback.print_tb(tb)
return nl.NL_SKIP
def error_handler(err, a):
print("error handler")
logger.error("Error handler called")
# a.done = err.error
return nl.NL_STOP
def finish_handler(m, arg):
print("finish handler")
logger.info("finish_handler called")
return nl.NL_SKIP
def ack_handler(m, arg):
print("ack handler")
logger.info("ack handler called")
# arg.done = 0
return nl.NL_STOP
def msg_handler(m, arg):
print("msg handler called")
# print ( dir (arg) )
arg.handle(m)
# return nl.NL_OK
return nl.NL_SKIP
###############################
###############################
## TO TEST LIBNL (remove later)
###############################
msg_handler = "hello world"
ack_handler = None
if __name__ == '__main__':
signal.signal(signal.SIGINT, sigint_handler)
# run tests
parser = argparse.ArgumentParser(
description='Daemon listeling for mptcp netlink requests'
)
#there must be at most one ?
parser.add_argument('mapresolver', nargs="?", default="153.16.49.112", #DEFAULT_MAPRESOLVER,
help="Choose")
parser.add_argument('--simulate', dest="number_of_subflows", type=int)
# subparsers = parser.add_subparsers(dest="mode", help='sub-command help')
# parser = subparsers.add_parser('daemon',help='daemon help')
args = parser.parse_args( sys.argv[1:] )
try:
# could pass mr, or simulate mode
#
daemon = LigDaemon(
lig_program="/home/teto/lig/lig" ,
mapresolver=args.mapresolver,
simulate=(args.number_of_subflows or None)
)
daemon.run();
except Exception as e:
# (type, value, traceback)
(t, v, tb) = sys.exc_info()
print("hello world",v )
traceback.print_tb(tb)
| gpl-3.0 | -4,051,952,761,526,717,000 | 23.865922 | 97 | 0.657043 | false |
isaachenrion/jets | src/proteins/test/test_argument_converter.py | 1 | 1879 | import os
from .test_monitors import test_monitor_collection
def test_argument_converter(args):
'''
Takes an argparse namespace, and converts it into argument dictionaries.
Each argument dictionary is fed into a specific function or class in the
training script. e.g. admin_kwargs is the set of arguments to be fed
to the experiment Administrator.
'''
if args.debug:
args.email = None
args.verbose = True
args.batch_size = 3
args.epochs = 15
args.lr = 0.1
args.period = 2
args.seed = 1
args.hidden = 1
args.iters = 2
args.lf = 1
return dict(
admin_kwargs=get_admin_kwargs(args),
data_loader_kwargs=get_data_loader_kwargs(args),
model_loading_kwargs=get_model_loading_kwargs(args),
)
def get_admin_kwargs(args):
return dict(
dataset=args.dataset,
debug=args.debug,
slurm_array_task_id=args.slurm_array_task_id,
slurm_array_job_id=args.slurm_array_job_id,
gpu=args.gpu,
seed=args.seed,
email_filename=args.email_filename,
silent=args.silent,
verbose=args.verbose,
cmd_line_args=args.cmd_line_args,
monitor_collection=test_monitor_collection(),
arg_string=args.arg_string,
root_dir=args.root_dir,
)
def get_data_loader_kwargs(args):
data_dir = os.path.join(args.data_dir, 'proteins', 'pdb25')
if args.debug:
data_dir = os.path.join(data_dir, 'small')
return dict(
debug=args.debug,
data_dir=data_dir,
n_test=args.n_test,
batch_size=args.batch_size
)
def get_model_loading_kwargs(args):
arg_list = [
'models_dir',
'model',
'single_model',
#'inventory'
]
return {k: v for k, v in vars(args).items() if k in arg_list}
| bsd-3-clause | -5,814,255,701,661,723,000 | 25.842857 | 76 | 0.604577 | false |
tp9/misc_python_projects | tictactoe.py | 1 | 3517 | computer = "X"; human = "O"
scan = [[0,1,2],[3,4,5],[6,7,8], \
[0,3,6],[1,4,7],[2,5,8],[0,4,8],[2,4,6]]
# 0 1 2
# 3 4 5
# 6 7 8
priority = [4, 0,2,8,6, 1,5,7,3]
def clrArray():
a = []
for i in range(9):
a.append(0)
return a
def print_board(board):
def mark(i):
if board[i] == 0:
return str(i + 1)
return board[i]
for i in range(0,len(board),3):
print(" " + mark(i)+mark(i+1)+mark(i+2))
# possible values of best
best_draw = -3; best_done = -2; best_common = -1
def scan_board(board):
global hcnt, ccnt, best, common1, common0
common1 = clrArray(); common0 = clrArray()
best = best_draw
for i in range(len(scan)):
empty = []; hcnt = 0; ccnt = 0
for j in scan[i]:
if board[j] == human:
hcnt += 1
elif board [j] == computer:
ccnt += 1
else:
empty.append(j)
if hcnt == 3 or ccnt == 3: best = best_done; return
if len(empty) == 1 and (hcnt == 2 or ccnt == 2):
best = empty[0]; return
if len(empty) == 2 and ccnt == 1:
best = best_common
common1 [empty[0]] += 1; common1 [empty[1]] += 1
elif len(empty)==3 or len(empty) == 2 and ccnt == 1:
best = best_common;
common0 [empty[0]] += 1; common0 [empty[1]] += 1
if len(empty) == 3: common0 [empty[2]] += 1
def find_max(a):
m = -1; j = -1
for i in priority:
if a [i] > m: m = a[i]; j = i
return j
def choose_move(human_turn, board):
global best,common1,common0
scan_board(board)
if hcnt == 3:
print("You win!")
elif ccnt == 3:
print("I win!")
elif best == best_draw:
print("It's a draw...")
elif human_turn:
return True
elif best == best_common:
best = find_max(common1)
if common1[best] == 0:
best = find_max(common0)
if common0[best] == 0:
print("Error: No common0 found")
return False
return best >= 0
else:
return best >= 0
return False
def play_game(board):
global msg, first_move
mv = input(msg + " ")
if mv == "q" or mv == "Q":
print("Game Aborted")
return False
if mv == "" and first_move:
if choose_move(False, board):
board[best] = computer
print("------------")
print_board(board)
return True
try:
mi = int(mv) - 1
except:
msg = "Input not a number. Try again:"
return True
if 0 > mi or mi > 8:
msg = "Input out of bounds. Try again:"
return True
if board[mi] != 0:
msg = "Requested square occupied. Try again:"
return True
board[mi] = human
msg = "Your square?"
print_board(board)
if choose_move(False, board):
board[best] = computer
print("------------")
print_board(board)
return choose_move(True, board)
return False
def main():
global msg
board = clrArray()
msg = "Your square?"
first_move = True
again = True
print("Hit enter to let computer move first")
print("Enter Q to quit game")
print("Enter 1 through 9 to play a square")
print("Computer is " + computer + " you are " + human)
print_board(board)
while again:
again = play_game(board)
first_move = False
if __name__ == '__main__': main() | mit | -4,169,633,967,128,925,700 | 26.271318 | 60 | 0.497299 | false |
ramusus/django-vkontakte-wall | vkontakte_wall/migrations/0013_change_m2m_to_m2m_history.py | 1 | 14633 | # -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Post.like_users.through.time_from'
db.add_column('vkontakte_wall_post_like_users', 'time_from',
self.gf('django.db.models.fields.DateTimeField')(null=True, db_index=True),
keep_default=False)
# Adding field 'Post.like_users.through.time_to'
db.add_column('vkontakte_wall_post_like_users', 'time_to',
self.gf('django.db.models.fields.DateTimeField')(null=True, db_index=True),
keep_default=False)
# Adding field 'Comment.like_users.through.time_from'
db.add_column('vkontakte_wall_comment_like_users', 'time_from',
self.gf('django.db.models.fields.DateTimeField')(null=True, db_index=True),
keep_default=False)
# Adding field 'Comment.like_users.through.time_to'
db.add_column('vkontakte_wall_comment_like_users', 'time_to',
self.gf('django.db.models.fields.DateTimeField')(null=True, db_index=True),
keep_default=False)
def backwards(self, orm):
# Deleting field 'Post.like_users.through.time_from'
db.delete_column('vkontakte_wall_post_like_users', 'time_from')
# Deleting field 'Post.like_users.through.time_to'
db.delete_column('vkontakte_wall_post_like_users', 'time_to')
# Deleting field 'Comment.like_users.through.time_from'
db.delete_column('vkontakte_wall_comment_like_users', 'time_from')
# Deleting field 'Comment.like_users.through.time_to'
db.delete_column('vkontakte_wall_comment_like_users', 'time_to')
models = {
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'vkontakte_places.city': {
'Meta': {'ordering': "['name']", 'object_name': 'City'},
'area': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'country': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'cities'", 'null': 'True', 'to': u"orm['vkontakte_places.Country']"}),
'fetched': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'region': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'remote_id': ('django.db.models.fields.BigIntegerField', [], {'unique': 'True'})
},
u'vkontakte_places.country': {
'Meta': {'ordering': "['name']", 'object_name': 'Country'},
'fetched': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'remote_id': ('django.db.models.fields.BigIntegerField', [], {'unique': 'True'})
},
u'vkontakte_users.user': {
'Meta': {'object_name': 'User'},
'about': ('django.db.models.fields.TextField', [], {}),
'activity': ('django.db.models.fields.TextField', [], {}),
'albums': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'audios': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'bdate': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'books': ('django.db.models.fields.TextField', [], {}),
'city': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['vkontakte_places.City']", 'null': 'True', 'on_delete': 'models.SET_NULL'}),
'counters_updated': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'country': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['vkontakte_places.Country']", 'null': 'True', 'on_delete': 'models.SET_NULL'}),
'facebook': ('django.db.models.fields.CharField', [], {'max_length': '500'}),
'facebook_name': ('django.db.models.fields.CharField', [], {'max_length': '500'}),
'faculty': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True'}),
'faculty_name': ('django.db.models.fields.CharField', [], {'max_length': '500'}),
'fetched': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'followers': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'friends': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'friends_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'friends_users': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'followers_users'", 'symmetrical': 'False', 'to': u"orm['vkontakte_users.User']"}),
'games': ('django.db.models.fields.TextField', [], {}),
'graduation': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True'}),
'has_avatar': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_index': 'True'}),
'has_mobile': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'home_phone': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'interests': ('django.db.models.fields.TextField', [], {}),
'is_deactivated': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'livejournal': ('django.db.models.fields.CharField', [], {'max_length': '500'}),
'mobile_phone': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'movies': ('django.db.models.fields.TextField', [], {}),
'mutual_friends': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'notes': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'photo': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
'photo_big': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
'photo_medium': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
'photo_medium_rec': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
'photo_rec': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
'rate': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True'}),
'relation': ('django.db.models.fields.SmallIntegerField', [], {'null': 'True'}),
'remote_id': ('django.db.models.fields.BigIntegerField', [], {'primary_key': 'True'}),
'screen_name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'db_index': 'True'}),
'sex': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'skype': ('django.db.models.fields.CharField', [], {'max_length': '500'}),
'subscriptions': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'sum_counters': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'timezone': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'tv': ('django.db.models.fields.TextField', [], {}),
'twitter': ('django.db.models.fields.CharField', [], {'max_length': '500'}),
'university': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True'}),
'university_name': ('django.db.models.fields.CharField', [], {'max_length': '500'}),
'user_photos': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'user_videos': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'videos': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'wall_comments': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'})
},
u'vkontakte_wall.comment': {
'Meta': {'object_name': 'Comment'},
'archived': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'author_content_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'comments'", 'to': u"orm['contenttypes.ContentType']"}),
'author_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'date': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True'}),
'fetched': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'from_id': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'like_users': ('m2m_history.fields.ManyToManyHistoryField', [], {'related_name': "'like_comments'", 'symmetrical': 'False', 'to': u"orm['vkontakte_users.User']"}),
'likes': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'db_index': 'True'}),
'post': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'wall_comments'", 'to': u"orm['vkontakte_wall.Post']"}),
'raw_html': ('django.db.models.fields.TextField', [], {}),
'raw_json': ('annoying.fields.JSONField', [], {'default': '{}', 'null': 'True'}),
'remote_id': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': "'20'"}),
'reply_for_content_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'replies'", 'null': 'True', 'to': u"orm['contenttypes.ContentType']"}),
'reply_for_id': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'db_index': 'True'}),
'reply_to': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['vkontakte_wall.Comment']", 'null': 'True'}),
'text': ('django.db.models.fields.TextField', [], {}),
'wall_owner_content_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'vkontakte_wall_comments'", 'to': u"orm['contenttypes.ContentType']"}),
'wall_owner_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'})
},
u'vkontakte_wall.post': {
'Meta': {'object_name': 'Post'},
'archived': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'attachments': ('django.db.models.fields.TextField', [], {}),
'author_content_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'vkontakte_posts'", 'to': u"orm['contenttypes.ContentType']"}),
'author_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'comments': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'db_index': 'True'}),
'copy_owner_content_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'vkontakte_wall_copy_posts'", 'null': 'True', 'to': u"orm['contenttypes.ContentType']"}),
'copy_owner_id': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'db_index': 'True'}),
'copy_post': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['vkontakte_wall.Post']", 'null': 'True'}),
'copy_text': ('django.db.models.fields.TextField', [], {}),
'date': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True'}),
'fetched': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'geo': ('django.db.models.fields.TextField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'like_users': ('m2m_history.fields.ManyToManyHistoryField', [], {'related_name': "'like_posts'", 'symmetrical': 'False', 'to': u"orm['vkontakte_users.User']"}),
'likes': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'db_index': 'True'}),
'media': ('django.db.models.fields.TextField', [], {}),
'online': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True'}),
'post_source': ('django.db.models.fields.TextField', [], {}),
'raw_html': ('django.db.models.fields.TextField', [], {}),
'raw_json': ('annoying.fields.JSONField', [], {'default': '{}', 'null': 'True'}),
'remote_id': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': "'20'"}),
'reply_count': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True'}),
'repost_users': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'repost_posts'", 'symmetrical': 'False', 'to': u"orm['vkontakte_users.User']"}),
'reposts': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'db_index': 'True'}),
'signer_id': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True'}),
'text': ('django.db.models.fields.TextField', [], {}),
'wall_owner_content_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'vkontakte_wall_posts'", 'to': u"orm['contenttypes.ContentType']"}),
'wall_owner_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'})
}
}
complete_apps = ['vkontakte_wall'] | bsd-3-clause | -1,909,253,257,071,442,400 | 78.967213 | 197 | 0.572405 | false |
MediaKraken/MediaKraken_Deployment | source/subprogram_metadata_games.py | 1 | 21188 | """
Copyright (C) 2015 Quinn D Granfor <[email protected]>
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
version 2, as published by the Free Software Foundation.
This program is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License version 2 for more details.
You should have received a copy of the GNU General Public License
version 2 along with this program; if not, write to the Free
Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
MA 02110-1301, USA.
"""
import json
import os
import sys
import zipfile
import xmltodict
from common import common_config_ini
from common import common_internationalization
from common import common_logging_elasticsearch_httpx
from common import common_network
from common import common_signal
from common import common_system
# verify this program isn't already running!
if common_system.com_process_list(
process_name='/usr/bin/python3 /mediakraken/subprogram_metadata_games.py'):
sys.exit(0)
# start logging
common_logging_elasticsearch_httpx.com_es_httpx_post(message_type='info',
message_text='START',
index_name='subprogram_metadata_games')
# set signal exit breaks
common_signal.com_signal_set_break()
# open the database
option_config_json, db_connection = common_config_ini.com_config_read(force_local=False)
# technically arcade games are "systems"....
# they just don't have @isdevice = 'yes' like mess hardware does
# However, mame games are still being put as "games" and not systems
# to ease search and other filters by game/system
update_game = 0
insert_game = 0
def process_mame_record(game_xml):
global update_game
global insert_game
# TODO change this to upsert
json_data = xmltodict.parse(game_xml)
# see if exists then need to update
if db_connection.db_meta_game_list_count(json_data['machine']['@name']) > 0:
# TODO handle shortname properly
db_connection.db_meta_game_update(None, json_data['machine']['@name'],
json_data['machine']['description'],
json_data)
update_game += 1
else:
# TODO handle shortname properly
db_connection.db_meta_game_insert(None, json_data['machine']['@name'],
json_data['machine']['description'],
json_data)
insert_game += 1
# create mame game list
file_name = ('/mediakraken/emulation/mame0%slx.zip' %
option_config_json['MAME']['Version'])
# only do the parse/import if not processed before
if not os.path.exists(file_name):
common_network.mk_network_fetch_from_url(
('https://github.com/mamedev/mame/releases/download/mame0%s/mame0%slx.zip'
% (option_config_json['MAME']['Version'], option_config_json['MAME']['Version'])),
file_name)
# unzip to file to try to save memory
zip_handle = zipfile.ZipFile(file_name, 'r') # issues if u do RB
zip_handle.extractall('/mediakraken/emulation/')
zip_handle.close()
# this is done to keep xmltodict from running out of memory while processing
game_xml = ''
first_record = True
old_line = None
with open('/mediakraken/emulation/mame0%s.xml'
% option_config_json['MAME']['Version']) as infile:
for line in infile:
if line.find('</mame>') == 0: # skip the last line
pass
elif line.find(' <machine') == 0: # first position of line
old_line = line
if first_record is False:
process_mame_record(line + game_xml)
game_xml = ''
first_record = False
else:
if first_record is False:
game_xml += line
# game_xml += line # get last value - do NOT do this as it'll attach </mame>
# do last machine
process_mame_record(old_line + game_xml)
# write totals
if update_game > 0:
db_connection.db_notification_insert(
common_internationalization.com_inter_number_format(update_game)
+ " games(s) metadata updated from MAME %s XML" % option_config_json['MAME']['Version'],
True)
if insert_game > 0:
db_connection.db_notification_insert(
common_internationalization.com_inter_number_format(insert_game)
+ " games(s) metadata added from MAME %s XML" % option_config_json['MAME']['Version'],
True)
# commit all changes to db
db_connection.db_commit()
# load games from hash files
file_name = ('/mediakraken/emulation/mame0%s.zip' %
option_config_json['MAME']['Version'])
# only do the parse/import if not processed before
if not os.path.exists(file_name):
common_network.mk_network_fetch_from_url(
('https://github.com/mamedev/mame/archive/mame0%s.zip'
% option_config_json['MAME']['Version']),
file_name)
total_software = 0
total_software_update = 0
zip_handle = zipfile.ZipFile(file_name, 'r') # issues if u do RB
zip_handle.extractall('/mediakraken/emulation/')
zip_handle.close()
for zippedfile in os.listdir('/mediakraken/emulation/mame-mame0%s/hash'
% option_config_json['MAME']['Version']):
# find system id from mess
file_name, ext = os.path.splitext(zippedfile)
print('fil,etx %s %s' % (file_name, ext), flush=True)
if ext == ".xml" or ext == ".hsi":
file_handle = open(os.path.join('/mediakraken/emulation/mame-mame0%s/hash'
% option_config_json['MAME']['Version'], zippedfile),
"r",
encoding='utf-8')
json_data = xmltodict.parse(file_handle.read())
file_handle.close()
game_short_name_guid \
= db_connection.db_meta_games_system_guid_by_short_name(file_name)
print('wh %s' % game_short_name_guid, flush=True)
if game_short_name_guid is None:
game_short_name_guid = db_connection.db_meta_games_system_insert(
None, file_name, None)
print('json: %s' % json_data, flush=True)
if ext == ".xml":
# could be no games in list
if 'software' in json_data['softwarelist']:
print(json_data['softwarelist']['software'], flush=True)
# TODO this fails if only one game
print(len(json_data['softwarelist']['software']), flush=True)
if '@name' in json_data['softwarelist']['software']:
# TODO check to see if exists....upsert instead
db_connection.db_meta_game_insert(game_short_name_guid,
json_data['softwarelist']['software'][
'@name'],
json_data['softwarelist']['software'][
'@name'],
json_data['softwarelist']['software'])
else:
for json_game in json_data['softwarelist']['software']:
print(('xml: %s', json_game), flush=True)
# json_game = json.loads(json_game)
# TODO check to see if exists....upsert instead
# build args and insert the record
db_connection.db_meta_game_insert(game_short_name_guid,
json_game['@name'],
json_game['@name'], json_game)
total_software += 1
elif ext == ".hsi":
# could be no games in list
if 'hash' in json_data['hashfile']:
if '@name' in json_data['hashfile']['hash']:
# TODO check to see if exists....upsert instead
db_connection.db_meta_game_insert(game_short_name_guid,
json_data['hashfile']['hash'][
'@name'],
json_data['hashfile']['hash'][
'@name'],
json_data['hashfile']['hash'])
else:
for json_game in json_data['hashfile']['hash']:
print('hsi: %s' % json_game, flush=True)
# TODO check to see if exists....upsert instead
# build args and insert the record
db_connection.db_meta_game_insert(game_short_name_guid,
json_game['@name'],
json_game['@name'], json_game)
total_software += 1
if total_software > 0:
db_connection.db_notification_insert(
common_internationalization.com_inter_number_format(total_software)
+ " games(s) metadata added from MAME %s hash" % option_config_json['MAME']['Version'],
True)
if total_software_update > 0:
db_connection.db_notification_insert(
common_internationalization.com_inter_number_format(
total_software_update)
+ " games(s) metadata updated from MAME %s hash" % option_config_json['MAME'][
'Version'], True)
# commit all changes to db
db_connection.db_commit()
# update mame game descriptions from history dat
file_name = ('/mediakraken/emulation/history%s.zip' %
option_config_json['MAME']['Version'])
# only do the parse/import if not processed before
if not os.path.exists(file_name):
common_network.mk_network_fetch_from_url(
('https://www.arcade-history.com/dats/historydat%s.zip' %
option_config_json['MAME']['Version']),
file_name)
game_titles = []
game_desc = ""
add_to_desc = False
new_title = None
total_software = 0
total_software_update = 0
system_name = None
# do this all the time, since could be a new one
with zipfile.ZipFile(file_name, 'r') as zf:
zf.extract('history.dat', '/mediakraken/emulation/')
history_file = open("/mediakraken/emulation/history.dat", "r",
encoding='utf-8')
while 1:
line = history_file.readline()
# print('line: %s' % line, flush=True)
if not line:
break
if line[0] == '$' and line[-1:] == ',': # this could be a new system/game item
# MAME "system"....generally a PCB game
if line.find("$info=") == 0: # goes by position if found
system_name = None
game_titles = line.split("=", 1)[1].split(",")
# end of info block for game
elif line.find("$end") == 0: # goes by position if found
add_to_desc = False
for game in game_titles:
print('game: %s' % game, flush=True)
game_data = db_connection.db_meta_game_by_name_and_system(game, system_name)[0]
print('data: %s' % game_data, flush=True)
if game_data is None:
db_connection.db_meta_game_insert(
db_connection.db_meta_games_system_guid_by_short_name(
system_name),
new_title, game, json.dumps({'overview': game_desc}))
total_software += 1
else:
game_data['gi_game_info_json']['overview'] = game_desc
print(game_data['gi_id'], flush=True)
db_connection.db_meta_game_update_by_guid(game_data['gi_id'],
json.dumps(game_data[
'gi_game_info_json']))
total_software_update += 1
game_desc = ''
# this line can be skipped and is basically the "start" of game info
elif line.find("$bio") == 0: # goes by position if found
line = history_file.readline() # skip blank line
new_title = history_file.readline().strip() # grab the "real" game name
add_to_desc = True
else:
# should be a system/game
system_name = line[1:].split('=', 1)[0]
game_titles = line.split("=", 1)[1].split(",")
else:
if add_to_desc:
game_desc += line
history_file.close()
if total_software > 0:
db_connection.db_notification_insert(
common_internationalization.com_inter_number_format(total_software)
+ " games(s) metadata added from MAME %s hash" % option_config_json['MAME']['Version'],
True)
if total_software_update > 0:
db_connection.db_notification_insert(
common_internationalization.com_inter_number_format(
total_software_update)
+ " games(s) metadata updated from MAME %s hash" % option_config_json['MAME'][
'Version'], True)
# commit all changes to db
db_connection.db_commit()
# read the category file and create dict/list for it
file_name = ('/mediakraken/emulation/category%s.zip' %
option_config_json['MAME']['Version'])
# only do the parse/import if not processed before
if not os.path.exists(file_name):
common_network.mk_network_fetch_from_url(
(
'https://www.progettosnaps.net/download?tipo=category&file=/renameset/packs/pS_category_%s.zip' %
option_config_json['MAME']['Version']),
file_name)
with zipfile.ZipFile(file_name, 'r') as zf:
zf.extract('folders/category.ini', '/mediakraken/emulation/')
history_file = open("/mediakraken/emulation/category.ini", "r",
encoding='utf-8')
cat_file = open("category.ini", "r", encoding='utf-8')
cat_dictionary = {}
category = ""
while 1:
line = cat_file.readline()
if not line:
break
if line.find("[") == 0:
category = line.replace("[", "").replace("]", "").replace(" ", "").rstrip('\n').rstrip(
'\r') # wipe out space to make the category table
elif len(line) > 1:
result_value = db_connection.db_meta_game_category_by_name(category)
if result_value is None:
result_value = db_connection.db_meta_game_category_add(category)
cat_dictionary[line.strip()] = result_value
# grab all system null in db as those are mame
for sql_row in db_connection.db_media_mame_game_list():
db_connection.db_media_game_category_update(cat_dictionary[sql_row['gi_short_name']],
sql_row['gi_id'])
# grab all the non parent roms that aren't set
for sql_row in db_connection.db_media_game_clone_list():
for sql_cat_row in db_connection.db_media_game_category_by_name(sql_row['gi_cloneof']):
db_connection.db_media_game_category_update(sql_cat_row['gi_gc_category'],
sql_row['gi_id'])
# update mess system description
file_name = ('/mediakraken/emulation/messinfo%s.zip' %
option_config_json['MAME']['Version'])
# only do the parse/import if not processed before
if not os.path.exists(file_name):
common_network.mk_network_fetch_from_url(
(
'https://www.progettosnaps.net/download?tipo=messinfo&file=pS_messinfo_%s.zip' %
option_config_json['MAME']['Version']),
file_name)
with zipfile.ZipFile(file_name, 'r') as zf:
zf.extract('messinfo.dat', '/mediakraken/emulation/')
infile = open("/mediakraken/emulation/messinfo.dat", "r",
encoding='utf-8')
start_system_read = False
skip_next_line = False
long_name_next = False
desc_next = False
wip_in_progress = False
romset_in_progress = False
# store args to sql
sys_short_name = ""
sys_longname = None
sys_manufacturer = None
sys_year = None
sys_desc = None
sys_emulation = None
sys_color = None
sys_sound = None
sys_graphics = None
sys_save_state = None
sys_wip = ""
sys_romset = None
sql_string = ""
while 1:
line = infile.readline()
if not line:
break
if skip_next_line:
skip_next_line = False
else:
if line.find("DRIVERS INFO") != -1: # stop at drivers
break
line = line.replace(" ", "")
if line[0] == "#" or len(line) < 4 \
or line.find("$mame") == 0: # skip comments and blank lines
if line.find("$mame") == 0:
skip_next_line = True
long_name_next = True
elif line.find("$info") == 0: # found so begin start system read
start_system_read = True
# load the short name
sys_short_name = line.split('=')[1]
elif line.find("Emulation:") == 0: # found so begin start system read
sys_emulation = line.split(' ')[1]
elif line.find("Color:") == 0: # found so begin start system read
sys_color = line.split(' ')[1]
elif line.find("Sound:") == 0: # found so begin start system read
sys_sound = line.split(' ')[1]
elif line.find("Graphics:") == 0: # found so begin start system read
sys_graphics = line.split(' ')[1]
elif line.find("Save State:") == 0: # found so begin start system read
if line.rsplit(' ', 1)[1][:-1] == "Supported":
sys_save_state = True
else:
sys_save_state = False
elif line.find("WIP:") == 0: # found so begin start system read
wip_in_progress = True
elif line.find("Romset:") == 0: # found so begin start system read
wip_in_progress = False
romset_in_progress = True
else:
if wip_in_progress and line.find("Romset:") != 0:
# sys_wip += line[:-1] + "<BR>"
pass
if romset_in_progress and line.find("$end") != 0:
# sys_romset += line[:-1] + "<BR>"
pass
if desc_next:
sys_desc = line
desc_next = False
if long_name_next:
try:
sys_longname, sys_manufacturer, sys_year = line.split(',')
except:
sys_longname, msys_manufacturer, sys_year = line.rsplit(',', 2)
long_name_next = False
desc_next = True
if line.find("$end") == 0: # end of system info so store system into db
romset_in_progress = False
if sys_desc[:-1] == "...":
sys_desc = None
else:
sys_desc = sys_desc[:-1]
sys_emulation = sys_emulation[:-1]
sys_color = sys_color[:-1]
sys_sound = sys_sound[:-1]
sys_graphics = sys_graphics[:-1]
# upsert the system
db_connection.db_meta_game_system_upsert(sys_short_name[:-1],
sys_longname,
sys_desc, sys_year[:-1],
sys_manufacturer,
sys_emulation,
sys_color, sys_sound,
sys_graphics, sys_save_state)
sys_wip = None
sys_romset = None
# commit all changes to db
db_connection.db_commit()
# close the database
db_connection.db_close()
| gpl-3.0 | 8,125,929,116,995,987,000 | 45.772627 | 113 | 0.517604 | false |
SnabbCo/neutron | neutron/extensions/vpnaas.py | 9 | 18214 | # (c) Copyright 2013 Hewlett-Packard Development Company, L.P.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# @author: Swaminathan Vasudevan, Hewlett-Packard.
import abc
import six
from neutron.api import extensions
from neutron.api.v2 import attributes as attr
from neutron.api.v2 import resource_helper
from neutron.common import exceptions as qexception
from neutron.plugins.common import constants
from neutron.services import service_base
class VPNServiceNotFound(qexception.NotFound):
message = _("VPNService %(vpnservice_id)s could not be found")
class IPsecSiteConnectionNotFound(qexception.NotFound):
message = _("ipsec_site_connection %(ipsecsite_conn_id)s not found")
class IPsecSiteConnectionDpdIntervalValueError(qexception.InvalidInput):
message = _("ipsec_site_connection %(attr)s is "
"equal to or less than dpd_interval")
class IPsecSiteConnectionMtuError(qexception.InvalidInput):
message = _("ipsec_site_connection MTU %(mtu)d is too small "
"for ipv%(version)s")
class IKEPolicyNotFound(qexception.NotFound):
message = _("IKEPolicy %(ikepolicy_id)s could not be found")
class IPsecPolicyNotFound(qexception.NotFound):
message = _("IPsecPolicy %(ipsecpolicy_id)s could not be found")
class IKEPolicyInUse(qexception.InUse):
message = _("IKEPolicy %(ikepolicy_id)s is in use by existing "
"IPsecSiteConnection and can't be updated or deleted")
class VPNServiceInUse(qexception.InUse):
message = _("VPNService %(vpnservice_id)s is still in use")
class RouterInUseByVPNService(qexception.InUse):
message = _("Router %(router_id)s is used by VPNService %(vpnservice_id)s")
class VPNStateInvalidToUpdate(qexception.BadRequest):
message = _("Invalid state %(state)s of vpnaas resource %(id)s"
" for updating")
class IPsecPolicyInUse(qexception.InUse):
message = _("IPsecPolicy %(ipsecpolicy_id)s is in use by existing "
"IPsecSiteConnection and can't be updated or deleted")
class DeviceDriverImportError(qexception.NeutronException):
message = _("Can not load driver :%(device_driver)s")
class SubnetIsNotConnectedToRouter(qexception.BadRequest):
message = _("Subnet %(subnet_id)s is not "
"connected to Router %(router_id)s")
class RouterIsNotExternal(qexception.BadRequest):
message = _("Router %(router_id)s has no external network gateway set")
vpn_supported_initiators = ['bi-directional', 'response-only']
vpn_supported_encryption_algorithms = ['3des', 'aes-128',
'aes-192', 'aes-256']
vpn_dpd_supported_actions = [
'hold', 'clear', 'restart', 'restart-by-peer', 'disabled'
]
vpn_supported_transform_protocols = ['esp', 'ah', 'ah-esp']
vpn_supported_encapsulation_mode = ['tunnel', 'transport']
#TODO(nati) add kilobytes when we support it
vpn_supported_lifetime_units = ['seconds']
vpn_supported_pfs = ['group2', 'group5', 'group14']
vpn_supported_ike_versions = ['v1', 'v2']
vpn_supported_auth_mode = ['psk']
vpn_supported_auth_algorithms = ['sha1']
vpn_supported_phase1_negotiation_mode = ['main']
vpn_lifetime_limits = (60, attr.UNLIMITED)
positive_int = (0, attr.UNLIMITED)
RESOURCE_ATTRIBUTE_MAP = {
'vpnservices': {
'id': {'allow_post': False, 'allow_put': False,
'validate': {'type:uuid': None},
'is_visible': True,
'primary_key': True},
'tenant_id': {'allow_post': True, 'allow_put': False,
'validate': {'type:string': None},
'required_by_policy': True,
'is_visible': True},
'name': {'allow_post': True, 'allow_put': True,
'validate': {'type:string': None},
'is_visible': True, 'default': ''},
'description': {'allow_post': True, 'allow_put': True,
'validate': {'type:string': None},
'is_visible': True, 'default': ''},
'subnet_id': {'allow_post': True, 'allow_put': False,
'validate': {'type:uuid': None},
'is_visible': True},
'router_id': {'allow_post': True, 'allow_put': False,
'validate': {'type:uuid': None},
'is_visible': True},
'admin_state_up': {'allow_post': True, 'allow_put': True,
'default': True,
'convert_to': attr.convert_to_boolean,
'is_visible': True},
'status': {'allow_post': False, 'allow_put': False,
'is_visible': True}
},
'ipsec_site_connections': {
'id': {'allow_post': False, 'allow_put': False,
'validate': {'type:uuid': None},
'is_visible': True,
'primary_key': True},
'tenant_id': {'allow_post': True, 'allow_put': False,
'validate': {'type:string': None},
'required_by_policy': True,
'is_visible': True},
'name': {'allow_post': True, 'allow_put': True,
'validate': {'type:string': None},
'is_visible': True, 'default': ''},
'description': {'allow_post': True, 'allow_put': True,
'validate': {'type:string': None},
'is_visible': True, 'default': ''},
'peer_address': {'allow_post': True, 'allow_put': True,
'validate': {'type:string': None},
'is_visible': True},
'peer_id': {'allow_post': True, 'allow_put': True,
'validate': {'type:string': None},
'is_visible': True},
'peer_cidrs': {'allow_post': True, 'allow_put': True,
'convert_to': attr.convert_to_list,
'validate': {'type:subnet_list': None},
'is_visible': True},
'route_mode': {'allow_post': False, 'allow_put': False,
'default': 'static',
'is_visible': True},
'mtu': {'allow_post': True, 'allow_put': True,
'default': '1500',
'validate': {'type:range': positive_int},
'convert_to': attr.convert_to_int,
'is_visible': True},
'initiator': {'allow_post': True, 'allow_put': True,
'default': 'bi-directional',
'validate': {'type:values': vpn_supported_initiators},
'is_visible': True},
'auth_mode': {'allow_post': False, 'allow_put': False,
'default': 'psk',
'validate': {'type:values': vpn_supported_auth_mode},
'is_visible': True},
'psk': {'allow_post': True, 'allow_put': True,
'validate': {'type:string': None},
'is_visible': True},
'dpd': {'allow_post': True, 'allow_put': True,
'convert_to': attr.convert_none_to_empty_dict,
'is_visible': True,
'default': {},
'validate': {
'type:dict_or_empty': {
'actions': {
'type:values': vpn_dpd_supported_actions,
},
'interval': {
'type:range': positive_int
},
'timeout': {
'type:range': positive_int
}}}},
'admin_state_up': {'allow_post': True, 'allow_put': True,
'default': True,
'convert_to': attr.convert_to_boolean,
'is_visible': True},
'status': {'allow_post': False, 'allow_put': False,
'is_visible': True},
'vpnservice_id': {'allow_post': True, 'allow_put': False,
'validate': {'type:uuid': None},
'is_visible': True},
'ikepolicy_id': {'allow_post': True, 'allow_put': False,
'validate': {'type:uuid': None},
'is_visible': True},
'ipsecpolicy_id': {'allow_post': True, 'allow_put': False,
'validate': {'type:uuid': None},
'is_visible': True}
},
'ipsecpolicies': {
'id': {'allow_post': False, 'allow_put': False,
'validate': {'type:uuid': None},
'is_visible': True,
'primary_key': True},
'tenant_id': {'allow_post': True, 'allow_put': False,
'validate': {'type:string': None},
'required_by_policy': True,
'is_visible': True},
'name': {'allow_post': True, 'allow_put': True,
'validate': {'type:string': None},
'is_visible': True, 'default': ''},
'description': {'allow_post': True, 'allow_put': True,
'validate': {'type:string': None},
'is_visible': True, 'default': ''},
'transform_protocol': {
'allow_post': True,
'allow_put': True,
'default': 'esp',
'validate': {
'type:values': vpn_supported_transform_protocols},
'is_visible': True},
'auth_algorithm': {
'allow_post': True,
'allow_put': True,
'default': 'sha1',
'validate': {
'type:values': vpn_supported_auth_algorithms
},
'is_visible': True},
'encryption_algorithm': {
'allow_post': True,
'allow_put': True,
'default': 'aes-128',
'validate': {
'type:values': vpn_supported_encryption_algorithms
},
'is_visible': True},
'encapsulation_mode': {
'allow_post': True,
'allow_put': True,
'default': 'tunnel',
'validate': {
'type:values': vpn_supported_encapsulation_mode
},
'is_visible': True},
'lifetime': {'allow_post': True, 'allow_put': True,
'convert_to': attr.convert_none_to_empty_dict,
'default': {},
'validate': {
'type:dict_or_empty': {
'units': {
'type:values': vpn_supported_lifetime_units,
},
'value': {
'type:range': vpn_lifetime_limits
}}},
'is_visible': True},
'pfs': {'allow_post': True, 'allow_put': True,
'default': 'group5',
'validate': {'type:values': vpn_supported_pfs},
'is_visible': True}
},
'ikepolicies': {
'id': {'allow_post': False, 'allow_put': False,
'validate': {'type:uuid': None},
'is_visible': True,
'primary_key': True},
'tenant_id': {'allow_post': True, 'allow_put': False,
'validate': {'type:string': None},
'required_by_policy': True,
'is_visible': True},
'name': {'allow_post': True, 'allow_put': True,
'validate': {'type:string': None},
'is_visible': True, 'default': ''},
'description': {'allow_post': True, 'allow_put': True,
'validate': {'type:string': None},
'is_visible': True, 'default': ''},
'auth_algorithm': {'allow_post': True, 'allow_put': True,
'default': 'sha1',
'validate': {
'type:values': vpn_supported_auth_algorithms},
'is_visible': True},
'encryption_algorithm': {
'allow_post': True, 'allow_put': True,
'default': 'aes-128',
'validate': {'type:values': vpn_supported_encryption_algorithms},
'is_visible': True},
'phase1_negotiation_mode': {
'allow_post': True, 'allow_put': True,
'default': 'main',
'validate': {
'type:values': vpn_supported_phase1_negotiation_mode
},
'is_visible': True},
'lifetime': {'allow_post': True, 'allow_put': True,
'convert_to': attr.convert_none_to_empty_dict,
'default': {},
'validate': {
'type:dict_or_empty': {
'units': {
'type:values': vpn_supported_lifetime_units,
},
'value': {
'type:range': vpn_lifetime_limits,
}}},
'is_visible': True},
'ike_version': {'allow_post': True, 'allow_put': True,
'default': 'v1',
'validate': {
'type:values': vpn_supported_ike_versions},
'is_visible': True},
'pfs': {'allow_post': True, 'allow_put': True,
'default': 'group5',
'validate': {'type:values': vpn_supported_pfs},
'is_visible': True}
}
}
class Vpnaas(extensions.ExtensionDescriptor):
@classmethod
def get_name(cls):
return "VPN service"
@classmethod
def get_alias(cls):
return "vpnaas"
@classmethod
def get_description(cls):
return "Extension for VPN service"
@classmethod
def get_namespace(cls):
return "https://wiki.openstack.org/Neutron/VPNaaS"
@classmethod
def get_updated(cls):
return "2013-05-29T10:00:00-00:00"
@classmethod
def get_resources(cls):
special_mappings = {'ikepolicies': 'ikepolicy',
'ipsecpolicies': 'ipsecpolicy'}
plural_mappings = resource_helper.build_plural_mappings(
special_mappings, RESOURCE_ATTRIBUTE_MAP)
plural_mappings['peer_cidrs'] = 'peer_cidr'
attr.PLURALS.update(plural_mappings)
return resource_helper.build_resource_info(plural_mappings,
RESOURCE_ATTRIBUTE_MAP,
constants.VPN,
register_quota=True,
translate_name=True)
@classmethod
def get_plugin_interface(cls):
return VPNPluginBase
def update_attributes_map(self, attributes):
super(Vpnaas, self).update_attributes_map(
attributes, extension_attrs_map=RESOURCE_ATTRIBUTE_MAP)
def get_extended_resources(self, version):
if version == "2.0":
return RESOURCE_ATTRIBUTE_MAP
else:
return {}
@six.add_metaclass(abc.ABCMeta)
class VPNPluginBase(service_base.ServicePluginBase):
def get_plugin_name(self):
return constants.VPN
def get_plugin_type(self):
return constants.VPN
def get_plugin_description(self):
return 'VPN service plugin'
@abc.abstractmethod
def get_vpnservices(self, context, filters=None, fields=None):
pass
@abc.abstractmethod
def get_vpnservice(self, context, vpnservice_id, fields=None):
pass
@abc.abstractmethod
def create_vpnservice(self, context, vpnservice):
pass
@abc.abstractmethod
def update_vpnservice(self, context, vpnservice_id, vpnservice):
pass
@abc.abstractmethod
def delete_vpnservice(self, context, vpnservice_id):
pass
@abc.abstractmethod
def get_ipsec_site_connections(self, context, filters=None, fields=None):
pass
@abc.abstractmethod
def get_ipsec_site_connection(self, context,
ipsecsite_conn_id, fields=None):
pass
@abc.abstractmethod
def create_ipsec_site_connection(self, context, ipsec_site_connection):
pass
@abc.abstractmethod
def update_ipsec_site_connection(self, context,
ipsecsite_conn_id, ipsec_site_connection):
pass
@abc.abstractmethod
def delete_ipsec_site_connection(self, context, ipsecsite_conn_id):
pass
@abc.abstractmethod
def get_ikepolicy(self, context, ikepolicy_id, fields=None):
pass
@abc.abstractmethod
def get_ikepolicies(self, context, filters=None, fields=None):
pass
@abc.abstractmethod
def create_ikepolicy(self, context, ikepolicy):
pass
@abc.abstractmethod
def update_ikepolicy(self, context, ikepolicy_id, ikepolicy):
pass
@abc.abstractmethod
def delete_ikepolicy(self, context, ikepolicy_id):
pass
@abc.abstractmethod
def get_ipsecpolicies(self, context, filters=None, fields=None):
pass
@abc.abstractmethod
def get_ipsecpolicy(self, context, ipsecpolicy_id, fields=None):
pass
@abc.abstractmethod
def create_ipsecpolicy(self, context, ipsecpolicy):
pass
@abc.abstractmethod
def update_ipsecpolicy(self, context, ipsecpolicy_id, ipsecpolicy):
pass
@abc.abstractmethod
def delete_ipsecpolicy(self, context, ipsecpolicy_id):
pass
| apache-2.0 | -8,009,310,376,805,833,000 | 36.788382 | 79 | 0.519985 | false |
leandrotoledo/python-telegram-bot | telegram/files/animation.py | 2 | 5198 | #!/usr/bin/env python
#
# A library that provides a Python interface to the Telegram Bot API
# Copyright (C) 2015-2021
# Leandro Toledo de Souza <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser Public License for more details.
#
# You should have received a copy of the GNU Lesser Public License
# along with this program. If not, see [http://www.gnu.org/licenses/].
"""This module contains an object that represents a Telegram Animation."""
from typing import TYPE_CHECKING, Any, Optional
from telegram import PhotoSize, TelegramObject
from telegram.utils.helpers import DEFAULT_NONE
from telegram.utils.types import JSONDict, ODVInput
if TYPE_CHECKING:
from telegram import Bot, File
class Animation(TelegramObject):
"""This object represents an animation file (GIF or H.264/MPEG-4 AVC video without sound).
Objects of this class are comparable in terms of equality. Two objects of this class are
considered equal, if their :attr:`file_unique_id` is equal.
Args:
file_id (:obj:`str`): Identifier for this file, which can be used to download
or reuse the file.
file_unique_id (:obj:`str`): Unique identifier for this file, which
is supposed to be the same over time and for different bots.
Can't be used to download or reuse the file.
width (:obj:`int`): Video width as defined by sender.
height (:obj:`int`): Video height as defined by sender.
duration (:obj:`int`): Duration of the video in seconds as defined by sender.
thumb (:class:`telegram.PhotoSize`, optional): Animation thumbnail as defined by sender.
file_name (:obj:`str`, optional): Original animation filename as defined by sender.
mime_type (:obj:`str`, optional): MIME type of the file as defined by sender.
file_size (:obj:`int`, optional): File size.
bot (:class:`telegram.Bot`, optional): The Bot to use for instance methods.
**kwargs (:obj:`dict`): Arbitrary keyword arguments.
Attributes:
file_id (:obj:`str`): File identifier.
file_unique_id (:obj:`str`): Unique identifier for this file, which
is supposed to be the same over time and for different bots.
Can't be used to download or reuse the file.
width (:obj:`int`): Video width as defined by sender.
height (:obj:`int`): Video height as defined by sender.
duration (:obj:`int`): Duration of the video in seconds as defined by sender.
thumb (:class:`telegram.PhotoSize`): Optional. Animation thumbnail as defined by sender.
file_name (:obj:`str`): Optional. Original animation filename as defined by sender.
mime_type (:obj:`str`): Optional. MIME type of the file as defined by sender.
file_size (:obj:`int`): Optional. File size.
bot (:class:`telegram.Bot`): Optional. The Bot to use for instance methods.
"""
__slots__ = (
'bot',
'width',
'file_id',
'file_size',
'file_name',
'thumb',
'duration',
'mime_type',
'height',
'file_unique_id',
'_id_attrs',
)
def __init__(
self,
file_id: str,
file_unique_id: str,
width: int,
height: int,
duration: int,
thumb: PhotoSize = None,
file_name: str = None,
mime_type: str = None,
file_size: int = None,
bot: 'Bot' = None,
**_kwargs: Any,
):
# Required
self.file_id = str(file_id)
self.file_unique_id = str(file_unique_id)
self.width = int(width)
self.height = int(height)
self.duration = duration
# Optionals
self.thumb = thumb
self.file_name = file_name
self.mime_type = mime_type
self.file_size = file_size
self.bot = bot
self._id_attrs = (self.file_unique_id,)
@classmethod
def de_json(cls, data: Optional[JSONDict], bot: 'Bot') -> Optional['Animation']:
"""See :meth:`telegram.TelegramObject.de_json`."""
data = cls._parse_data(data)
if not data:
return None
data['thumb'] = PhotoSize.de_json(data.get('thumb'), bot)
return cls(bot=bot, **data)
def get_file(
self, timeout: ODVInput[float] = DEFAULT_NONE, api_kwargs: JSONDict = None
) -> 'File':
"""Convenience wrapper over :attr:`telegram.Bot.get_file`
For the documentation of the arguments, please see :meth:`telegram.Bot.get_file`.
Returns:
:class:`telegram.File`
Raises:
:class:`telegram.error.TelegramError`
"""
return self.bot.get_file(file_id=self.file_id, timeout=timeout, api_kwargs=api_kwargs)
| lgpl-3.0 | 2,308,843,740,912,829,000 | 36.941606 | 96 | 0.633513 | false |
Ledoux/ShareYourSystem | Pythonlogy/build/lib/ShareYourSystem/Standards/Itemizers/Parenter/__init__.py | 2 | 21776 | # -*- coding: utf-8 -*-
"""
<DefineSource>
@Date : Fri Nov 14 13:20:38 2014 \n
@Author : Erwan Ledoux \n\n
</DefineSource>
A Parenter completes the list of grand-parent nodes that
a child node could have. It acts only at one level.
"""
#<DefineAugmentation>
import ShareYourSystem as SYS
BaseModuleStr="ShareYourSystem.Standards.Itemizers.Manager"
DecorationModuleStr="ShareYourSystem.Standards.Classors.Classer"
SYS.setSubModule(globals())
SYS.addDo('Parenter','Parent','Parenting','Parented')
#</DefineAugmentation>
#<ImportSpecificModules>
import copy
from ShareYourSystem.Standards.Itemizers import Setter,Pather,Teamer
from ShareYourSystem.Standards.Interfacers import Printer
Manager=BaseModule
#</ImportSpecificModules>
#<DefineLocals>
ParentPreviousStr="^"
ParentGrandPreviousStr="^^"
ParentTopStr="Top"
ParentUpStr="?^"
ParentDownStr="?v"
ParentMutePrefixStr='!'
SYS.addSingPlural('Child','Children')
SYS.addSingPlural('GrandChild','GrandChildren')
ListDict=SYS.ListDict
#</DefineLocals>
#<DefineClass>
@DecorationClass(
**{
'ClassingSwitchMethodStrsList':[
'parent'
]
}
)
class ParenterClass(BaseClass):
def default_init(self,
_ParentKeyStr={
'DefaultValueType':property,
'PropertyInitVariable':"Top",
'PropertyDocStr':'I am reactive when I know my parent !'
},
_ParentTagStr="",
_ParentTeamTagStr="",
_ParentManagementTagStr="",
_ParentGrandTeamTagStr="",
_ParentGrandManagementTagStr="",
_ParentDeriveTeamerVariable=None,
_ParentGrandDeriveTeamerVariable=None,
_ParentTopDeriveTeamerVariable=None,
_ParentingTriggerVariable=None,
_ParentedTotalDeriveParentersList=None,
_ParentedTotalDeriveTeamersList=None,
_ParentedTotalDeriveManagersList=None,
_ParentedTotalListDict=None,
_ParentedTotalSingularListDict=None,
_ParentedTotalPathStr="",
_ParentedTeamPathStr="",
_ParentedManagementPathStr="",
_ParentedTriggerVariablesList=None,
**_KwargVariablesDict
):
#Call the parent init method
BaseClass.__init__(self,**_KwargVariablesDict)
#init
self.ParentedTotalDeriveParentersList=[]
self.ParentedTotalDeriveTeamersList=[]
self.ParentedTotalDeriveManagersList=[]
#set top
self.ParentTopDeriveTeamerVariable=self
#update
#self.TeamingBeforeSetVariable=SYS.update(self.TeamingBeforeSetVariable,[('ParentDeriveTeamerVariable',self)])
#self.ManagingBeforeSetVariable=SYS.update(self.ManagingBeforeSetVariable,[('ParentDeriveTeamerVariable',self)])
def do_parent(self):
#get
ParentedDeriveTeamerVariable=self.ParentDeriveTeamerVariable
#debug
'''
self.debug(
[
'We parent here',
('self.',self,[
#'ManagementPointDeriveTeamer',
'NameStr'
]),
'ParentedDeriveTeamerVariable!=None is',
str(ParentedDeriveTeamerVariable!=None)
]
)
'''
#Check
if ParentedDeriveTeamerVariable!=None:
#/####################/#
# Set shortly the grand parent
#
if hasattr(self.ParentDeriveTeamerVariable,'ParentGrandDeriveTeamerVariable'):
#get
self.ParentGrandDeriveTeamerVariable=self.ParentDeriveTeamerVariable.ParentDeriveTeamerVariable
#Check
if self.ParentGrandDeriveTeamerVariable!=None:
#Check
if self.ParentGrandDeriveTeamerVariable.TeamTagStr!="":
#set
self.ParentGrandTeamTagStr=self.ParentGrandDeriveTeamerVariable.TeamTagStr
#Check
if self.ParentGrandDeriveTeamerVariable.ManagementTagStr!="":
#set
self.ParentGrandManagementTagStr=self.ParentGrandDeriveTeamerVariable.ManagementTagStr
#/####################/#
# Now build the list chain of Teamers and Managers
#
#add
self.ParentedTotalDeriveParentersList=[
ParentedDeriveTeamerVariable
]+ParentedDeriveTeamerVariable.ParentedTotalDeriveParentersList
#add
if self.TeamTagStr!="":
#add
self.ParentedTotalDeriveTeamersList=[
ParentedDeriveTeamerVariable
]+ParentedDeriveTeamerVariable.ParentedTotalDeriveTeamersList
#set
self.ParentedTotalDeriveManagersList=ParentedDeriveTeamerVariable.ParentedTotalDeriveManagersList
else:
#add
self.ParentedTotalDeriveManagersList=[
ParentedDeriveTeamerVariable
]+ParentedDeriveTeamerVariable.ParentedTotalDeriveManagersList
#set
self.ParentedTotalDeriveTeamersList=ParentedDeriveTeamerVariable.ParentedTotalDeriveTeamersList
#/####################/#
# Now build the ordered dict chain of Teamers and Managers
#
#dict
self.ParentedTotalListDict=ListDict(
map(
lambda __ParentedTotalDeriveParenter:
(
__ParentedTotalDeriveParenter.TeamTagStr
if __ParentedTotalDeriveParenter.TeamTagStr!=""
else __ParentedTotalDeriveParenter.ManagementTagStr,
__ParentedTotalDeriveParenter
),
self.ParentedTotalDeriveParentersList
)
)
#debug
'''
self.debug(
[
'The ParentedTotalListDict is setted',
'self.ParentedTotalListDict.keys() is ',
str(self.ParentedTotalListDict.keys())
]
)
'''
#Check
if self.ParentDeriveTeamerVariable.TeamTagStr!='':
#init with
self.ParentedTotalSingularListDict=ListDict(
[(
SYS.getSingularStrWithPluralStr(
self.ParentDeriveTeamerVariable.TeamTagStr
),
self.ParentDeriveTeamerVariable.ParentDeriveTeamerVariable
)]
)
else:
#init
self.ParentedTotalSingularListDict=ListDict()
#dict
self.ParentedTotalSingularListDict.update(
SYS.filterNone(
map(
lambda __ParentedTotalDeriveTeamer:
(
SYS.getSingularStrWithPluralStr(
__ParentedTotalDeriveTeamer.ParentDeriveTeamerVariable.TeamTagStr
),
__ParentedTotalDeriveTeamer
)
if __ParentedTotalDeriveTeamer.ParentDeriveTeamerVariable!=None
else None,
self.ParentedTotalDeriveTeamersList
)
)
)
#/####################/#
# Now build the paths
#
#map
[
self.ParentedTotalPathStr,
self.ParentedTeamPathStr,
self.ParentedManagementPathStr,
]=map(
lambda __ParentedList:
Pather.PathPrefixStr+Pather.PathPrefixStr.join(
SYS.reverse(
map(
lambda __ParentedDeriveTeamer:
__ParentedDeriveTeamer.ParentKeyStr,
__ParentedList
)
)
),
map(
lambda __KeyStr:
getattr(self,__KeyStr),
[
'ParentedTotalDeriveParentersList',
'ParentedTotalDeriveTeamersList',
'ParentedTotalDeriveManagersList',
]
)
)
#debug
'''
self.debug(
("self.",self,[
'ParentedTotalPathStr',
'ParentedTeamPathStr',
'ParentedManagementPathStr'
]
)
)
'''
#/####################/#
# Set the top teamer variable
#
#Check
if len(self.ParentedTotalDeriveParentersList)>0:
#last one
self.ParentTopDeriveTeamerVariable=self.ParentedTotalDeriveParentersList[-1]
#debug
'''
self.debug(
('self.',self,['ParentTopDeriveTeamerVariable'])
)
'''
else:
#set
self.ParentTopDeriveTeamerVariable=self
#debug
'''
self.debug(
[
'Finally',
('self.',self,['ParentTopDeriveTeamerVariable'])
]
)
'''
#/####################/#
# link to the ParentTagStr
#
#set
self.ParentTagStr=self.ParentedTotalPathStr.replace('/','_')+'_'+self.ParentKeyStr
#/####################/#
# Adapt the shape of the ParentedTriggerVariablesList
# for the trigger
#init
self.ParentedTriggerVariablesList=SYS.SetList(
self.ParentingTriggerVariable
)
def mimic_team(self):
#/#################/#
# Call the base method
#
#debug
'''
self.debug(
[
'We team here',
'call the parent method firsts',
('self.',self,[
'TeamingKeyStr',
'ParentChildSetVariable'
])
]
)
'''
#call the base method
BaseClass.team(self)
#/#################/#
# Set the parent in the child
#
#debug
'''
self.debug(
('self.',self,['TeamingKeyStr'])
)
'''
#set
if hasattr(self.TeamedValueVariable,'ParentDeriveTeamerVariable'):
#Check
if self.TeamedValueVariable.ParentDeriveTeamerVariable!=self:
self.TeamedValueVariable.ParentDeriveTeamerVariable=self
self.TeamedValueVariable.ParentKeyStr=self.TeamingKeyStr
self.TeamedValueVariable.ParentManagementTagStr=self.TeamedValueVariable.ParentDeriveTeamerVariable.ManagementTagStr
#/###################/#
# Look maybe for a grandparent
#
#debug
'''
self.debug(
[
'We look for a grand parent here',
('self.',self,['ParentDeriveTeamerVariable'])
]
)
'''
#set
self.TeamedValueVariable.ParentGrandDeriveTeamerVariable=self.ParentDeriveTeamerVariable
#Check
if self.TeamedValueVariable.ParentGrandDeriveTeamerVariable!=None:
#set
self.TeamedValueVariable.ParentGrandTeamTagStr=self.TeamedValueVariable.ParentGrandDeriveTeamerVariable.TeamTagStr
def mimic_manage(self):
#/#################/#
# Call the base method
#
#debug
'''
self.debug(
[
'We manage here',
'call the base method first'
]
)
'''
#call the base method
BaseClass.manage(self)
#/#################/#
# Set the parent in the child
#
#debug
'''
self.debug(
('self.',self,['ManagingKeyStr'])
)
'''
#Check
if hasattr(self.ManagedValueVariable,'ParentDeriveTeamerVariable'):
#Check
if self.ManagedValueVariable.ParentDeriveTeamerVariable!=self:
#set
self.ManagedValueVariable.ParentDeriveTeamerVariable=self
self.ManagedValueVariable.ParentKeyStr=self.ManagingKeyStr
self.ManagedValueVariable.ParentTeamTagStr=self.ManagedValueVariable.ParentDeriveTeamerVariable.TeamTagStr
#/###################/#
# Look maybe for a grandparent
#
#set
self.ManagedValueVariable.ParentGrandDeriveTeamerVariable=self.ParentDeriveTeamerVariable
#Check
if self.ManagedValueVariable.ParentGrandDeriveTeamerVariable!=None:
#set
self.ManagedValueVariable.ParentGrandManagementTagStr=self.ManagedValueVariable.ParentGrandDeriveTeamerVariable.ManagementTagStr
def mimic_get(self):
#debug
'''
self.debug(
[
('self.',self,[
'GettingKeyVariable',
])
]
)
'''
#Check
if self.GettingKeyVariable==ParentPreviousStr:
#debug
'''
self.debug('We get the previous parent')
'''
#alias
self.GettedValueVariable=self.ParentDeriveTeamerVariable
#Stop the setting
return {"HookingIsBool":False}
elif self.GettingKeyVariable==ParentGrandPreviousStr:
#debug
'''
self.debug('We get the previous grand parent')
'''
#alias
self.GettedValueVariable=self.ParentGrandDeriveTeamerVariable
#Stop the setting
return {"HookingIsBool":False}
#Check
elif self.GettingKeyVariable==ParentTopStr:
#debug
'''
self.debug(
[
'We get the top parent',
('self.',self,['ParentTopDeriveTeamerVariable'])
]
)
'''
#alias
self.GettedValueVariable=self.ParentTopDeriveTeamerVariable
#Stop the setting
return {"HookingIsBool":False}
elif self.GettingKeyVariable==ParentUpStr:
#debug
self.debug(
[
'We command a up parent'
]
)
#command
self.command(
'^',
'#call:parent',
_BeforeWalkRigidBool=True,
_AfterSelfRigidBool=True
)
#return
return
elif self.GettingKeyVariable==ParentDownStr:
#debug
'''
self.debug(
'We command a down parent'
)
'''
#command
self.command(
'+'+Teamer.TeamChildPrefixStr+'.values+'+Manager.ManagementChildPrefixStr+'.values',
'#call:parent',
_AfterWalkRigidBool=True,
_BeforeSelfRigidBool=True
)
#return
return
#debug
'''
self.debug(
[
'get with the base method',
'BaseClass is '+str(BaseClass)
]
)
'''
#Call the base method
return BaseClass.get(self)
def mimic_set(self):
#Check
if type(self.SettingKeyVariable)==str:
#/##################/#
# Special DeriveParentTeamerVariable case
# just setattr to make the set shorter
#Check
if self.SettingKeyVariable=='ParentDeriveTeamerVariable':
#set
self.ParentDeriveTeamerVariable=self.SettingValueVariable
#return
return {'HookingIsBool':False}
#/##################/#
# Special Mute case
#
#Check
elif self.SettingKeyVariable.startswith(
ParentMutePrefixStr
):
#deprefix
MuteGetKeyStr=SYS.deprefix(
self.SettingKeyVariable,
ParentMutePrefixStr
)
#get
MuteGetValueVariable=self[MuteGetKeyStr]
#init
#MuteSetValueVariable=self.SettingValueVariable()['#map@set'](
# MuteGetValueVariable.__dict__
#)
MuteSetValueVariable=self.SettingValueVariable()
MuteSetValueVariable.__dict__=MuteGetValueVariable.__dict__
#debug
'''
self.debug(
[
'We are going to mute...',
'MuteGetKeyStr is '+str(MuteGetKeyStr),
'MuteGetValueVariable.TeamTagStr is '+str(MuteGetValueVariable.TeamTagStr),
'MuteGetValueVariable.ManagementTagStr is '+str(MuteGetValueVariable.ManagementTagStr),
('self.',self,['SettingValueVariable']),
'MuteSetValueVariable is ',
SYS._str(MuteSetValueVariable)
]
)
'''
#Check
if MuteGetValueVariable.ParentDeriveTeamerVariable.TeamedOnceBool:
#debug
'''
self.debug(
'We team again'
)
'''
#del
del MuteGetValueVariable.ParentDeriveTeamerVariable.TeamDict[
MuteGetValueVariable.TeamTagStr
]
#team again
MuteGetValueVariable.ParentDeriveTeamerVariable.team(
MuteGetValueVariable.TeamTagStr,
MuteSetValueVariable
)
#return
return {'HookingIsBool':False}
else:
#debug
'''
self.debug(
'We manage again'
)
'''
#del
del MuteGetValueVariable.ParentDeriveTeamerVariable.ManagementDict[
MuteGetValueVariable.ManagementTagStr
]
#manage again
MuteGetValueVariable.ParentDeriveTeamerVariable.manage(
MuteGetValueVariable.ManagementTagStr,
MuteSetValueVariable
)
#return
return {'HookingIsBool':False}
#Call the base method
BaseClass.set(self)
"""
def mimic_array(self):
#call the parent method
BaseClass.array(self)
#parent down
if self.ArrayingTopBool:
self.get('?v')
"""
def propertize_setWatchAfterParentWithParenterBool(self,_SettingValueVariable):
#set the value of the "hidden" property variable
self._WatchAfterParentWithParenterBool=_SettingValueVariable
#debug
'''
self.debug(
[
'We have parented here',
'_SettingValueVariable is '+str(_SettingValueVariable)
]
)
'''
#Check
if _SettingValueVariable:
#debug
'''
self.debug(
[
'We have parented here !',
('self.',self,[
'ParentedTotalPathStr',
'ParentedTriggerVariablesList'
]),
'we launch the trigger'
]
)
'''
#trigger map@set
self[Setter.SetMapStr](self.ParentedTriggerVariablesList)
#debug
'''
self.debug(
[
'We have trigerred',
'self is '+SYS._str(self)
]
)
'''
else:
#debug
'''
self.debug(
'We have switched the parent here !'
)
'''
def mimic__print(self,**_KwargVariablesDict):
#debug
'''
print('Parenter 525')
print('self.PrintingCopyVariable.PrintingInstanceSkipKeyStrsList is ')
print(self.PrintingCopyVariable.PrintingInstanceSkipKeyStrsList)
print('')
'''
#/##################/#
# Modify the printing Variable
#
#Check
if self.PrintingSelfBool:
#map
map(
lambda __ItemTuple:
self.PrintingCopyVariable.TeamDict.__setitem__(
__ItemTuple[0],
Printer.getPointerStr(__ItemTuple[1])+" (Empty)"
)
if hasattr(
__ItemTuple[1],'ManagementDict'
) and len(__ItemTuple[1].ManagementDict)==0
else None,
self.PrintingCopyVariable.TeamDict.items()
)
#Check
if self.ParentedTotalPathStr!="":
#Check
if hasattr(self.PrintingCopyVariable,'PrintingInfoStr'):
#add
self.PrintingCopyVariable.PrintingInfoStr+=' ^'
#/##################/#
# Call the base method
#
#call
BaseClass._print(self,**_KwargVariablesDict)
def parentUp(self,**_KwargVariablesDict):
#debug
'''
self.debug(
[
'we parent up here',
'self.ParentDeriveTeamerVariable!=None is ',
str(self.ParentDeriveTeamerVariable!=None)
]
)
'''
#/###############/#
# First make parent the parent
#
#Check
if self.ParentDeriveTeamerVariable!=None:
#make the parent first
self.ParentDeriveTeamerVariable.parentUp(**_KwargVariablesDict)
#/###############/#
# Set here the kwargs
#
#Check
if len(_KwargVariablesDict)>0:
#map
map(
lambda __ItemTuple:
setattr(
self,
*__ItemTuple
),
_KwargVariablesDict.items()
)
#/###############/#
# parent here
#
#parent
self.parent()
#return self
return self
def parentDown(self,
_TeamVariable=None,
_ManagementVariable=None,
_HookMethodStr="",
**_KwargVariablesDict
):
#debug
'''
self.debug(
[
'We parent down here',
('self.',self,[
'TeamedOnceBool'
]),
'_TeamVariable is ',
str(_TeamVariable),
'_ManagementVariable is ',
str(_ManagementVariable)
]
)
'''
#/###############/#
# Set here the kwargs
#
#Check
if len(_KwargVariablesDict)>0:
#map
map(
lambda __ItemTuple:
setattr(
self,
*__ItemTuple
),
_KwargVariablesDict.items()
)
#/###############/#
# First parent here
#
#debug
'''
self.debug(
[
'Check if we have to parent',
('self.',self,['WatchAfterParentWithParenterBool'])
]
)
'''
#parent
self.parent()
#Check
if _HookMethodStr!="" and hasattr(self,_HookMethodStr):
#call
getattr(self,_HookMethodStr)()
#/###############/#
# Command the children to parent down also
#
#debug
self.debug(
[
'Look how to parentDown the children',
('self.',self,['TeamedOnceBool'])
]
)
#Check
if self.TeamedOnceBool:
#/###############/#
# Determine the TeamStrsList
#
if _TeamVariable==None:
TeamStrsList=self.TeamDict.keys()
elif len(_TeamVariable)>0 and type(_TeamVariable[0])==str:
TeamStrsList=self.TeamDict
else:
TeamStrsList=self.mapCondition(
_TeamVariable,
self.TeamDict.keys(),
_DirectBool=True
)
#debug
'''
self.debug(
[
'_TeamVariable is ',
str(_TeamVariable),
'TeamStrsList is ',
str(TeamStrsList)
]
)
'''
#/#################/#
# Get the teams and parent switching the key strs
#
#map
map(
lambda __ElementVariable:
__ElementVariable.parentDown(
_TeamVariable,
_ManagementVariable,
_HookMethodStr,
**_KwargVariablesDict
)
if hasattr(__ElementVariable,'parentDown')
else None,
map(
lambda __TeamStr:
self.TeamDict[
__TeamStr
],
TeamStrsList
)
)
else:
#/###############/#
# Determine the ManagementStrsList
#
if _ManagementVariable==None:
ManagementStrsList=self.ManagementDict.keys()
elif len(_ManagementVariable)>0 and type(_ManagementVariable)==str:
ManagementStrsList=self.ManagementDict
else:
ManagementStrsList=self.mapCondition(
_ManagementVariable,
self.ManagementDict.keys(),
_DirectBool=True
)
#debug
'''
self.debug(
[
'_ManagementVariable is ',
str(_ManagementVariable),
'ManagementStrsList is ',
str(ManagementStrsList)
]
)
'''
#/#################/#
# Get the managements and parent switching the key strs
#
#map
map(
lambda __ElementVariable:
__ElementVariable.parentDown(
_TeamVariable,
_ManagementVariable,
_HookMethodStr,
**_KwargVariablesDict
)
if hasattr(__ElementVariable,'parentDown')
else None,
map(
lambda __ManagementStr:
self.ManagementDict[
__ManagementStr
],
ManagementStrsList
)
)
#return self
return self
def getParenterStr(self):
#get
ParenterStr=Printer.getPointerStr(
self
)
#Check
if self.TeamedOnceBool:
ParenterStr+=' with : '+str(self.TeamDict.keys())
elif self.ManagedOnceBool:
ParenterStr+=' with : '+str(self.ManagementDict.keys())
#return
return ParenterStr
def __contains__(self,_KeyStr):
#Check
if self.TeamedOnceBool:
return _KeyStr in self.TeamDict
elif self.TeamedOnceBool:
return _KeyStr in self.ManagementDict
else:
return (_KeyStr in self.TeamDict) or (_KeyStr in self.ManagementDict)
def __len__(self):
#Check
if self.TeamedOnceBool:
return len(self.TeamDict)
elif self.TeamedOnceBool:
return len(self.ManagementDict)
else:
return 0
#</DefineClass>
#Set
SYS.ManagerClass.ManagingValueClass=ParenterClass
SYS.TeamerClass.TeamingValueClass=ParenterClass
#</DefinePrint>
ParenterClass.PrintingClassSkipKeyStrsList.extend(
[
'ParentKeyStr',
'ParentTagStr',
'ParentTeamTagStr',
'ParentManagementTagStr',
'ParentGrandTeamTagStr',
'ParentGrandManagementTagStr',
'ParentDeriveTeamerVariable',
'ParentGrandDeriveTeamerVariable',
'ParentTopDeriveTeamerVariable',
'ParentingTriggerVariable',
'ParentedTotalDeriveParentersList',
'ParentedTotalDeriveTeamersList',
'ParentedTotalDeriveManagersList',
'ParentedTotalSingularListDict',
'ParentedTotalListDict',
'ParentedTotalPathStr',
'ParentedTeamPathStr',
'ParentedManagementPathStr',
'ParentedTriggerVariablesList'
]
)
#<DefinePrint>
| mit | -5,161,058,367,534,580,000 | 19.051565 | 132 | 0.654252 | false |
djeo94/CouchPotatoServer | couchpotato/core/media/_base/providers/torrent/scenetime.py | 23 | 5678 | import traceback
from bs4 import BeautifulSoup
from couchpotato.core.helpers.encoding import tryUrlencode, toUnicode
from couchpotato.core.helpers.variable import tryInt
from couchpotato.core.logger import CPLog
from couchpotato.core.media._base.providers.torrent.base import TorrentProvider
log = CPLog(__name__)
class Base(TorrentProvider):
urls = {
'test': 'https://www.scenetime.com/',
'login': 'https://www.scenetime.com/takelogin.php',
'login_check': 'https://www.scenetime.com/inbox.php',
'detail': 'https://www.scenetime.com/details.php?id=%s',
'search': 'https://www.scenetime.com/browse.php?search=%s&cat=%d',
'download': 'https://www.scenetime.com/download.php/%s/%s',
}
cat_ids = [
([59], ['720p', '1080p']),
([81], ['brrip']),
([102], ['bd50']),
([3], ['dvdrip']),
]
http_time_between_calls = 1 # Seconds
cat_backup_id = None
def _searchOnTitle(self, title, movie, quality, results):
url = self.urls['search'] % (tryUrlencode('%s %s' % (title.replace(':', ''), movie['info']['year'])), self.getCatId(quality)[0])
data = self.getHTMLData(url)
if data:
html = BeautifulSoup(data)
try:
result_table = html.find(attrs = {'id': 'torrenttable'})
if not result_table:
log.error('failed to generate result_table')
return
entries = result_table.find_all('tr')
for result in entries[1:]:
cells = result.find_all('td')
link = result.find('a', attrs = {'class': 'index'})
torrent_id = link['href'].replace('download.php/','').split('/')[0]
torrent_file = link['href'].replace('download.php/','').split('/')[1]
size = self.parseSize(cells[5].contents[0] + cells[5].contents[2])
name_row = cells[1].contents[0]
name = name_row.getText()
seeders_row = cells[6].contents[0]
seeders = seeders_row.getText()
results.append({
'id': torrent_id,
'name': name,
'url': self.urls['download'] % (torrent_id,torrent_file),
'detail_url': self.urls['detail'] % torrent_id,
'size': size,
'seeders': seeders,
})
except:
log.error('Failed to parsing %s: %s', (self.getName(), traceback.format_exc()))
def getLoginParams(self):
return {
'login': 'submit',
'username': self.conf('username'),
'password': self.conf('password'),
}
def loginSuccess(self, output):
return 'logout.php' in output.lower() or 'Welcome' in output.lower()
loginCheckSuccess = loginSuccess
config = [{
'name': 'scenetime',
'groups': [
{
'tab': 'searcher',
'list': 'torrent_providers',
'name': 'SceneTime',
'description': '<a href="https://www.scenetime.com">SceneTime</a>',
'wizard': True,
'icon': 'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAAAAXNSR0IArs4c6QAAAARnQU1BAACxjwv8YQUAAAAJcEhZcwAADsMAAA7DAcdvqGQAAAAYdEVYdFNvZnR3YXJlAHBhaW50Lm5ldCA0LjAuNWWFMmUAAAIwSURBVDhPZZFbSBRRGMePs7Mzjma7+9AWWxpeYrXLkrcIfUwIpIeK3tO1hWhfltKwhyJMFIqgCz2EpdHWRun2oGG02O2hlYyypY21CygrlbhRIYHizO6/mdk5szPtB785hzm//zeXj7Q89q4I4QaQBx6ZHQY84Efq4Rrbg4rxVmx61AJ2pFY/twzvhP1hU4ZwIQ8K7mw1wdzdhrrxQ7g8E0Q09R6flubw+mcM7tHWPJcwt91ghuTQUDWYW8rejbrRA3i1OA0xLYGWJO8bxw6q50YIc70CRoQbNbj2MQgpkwsrpTYI7ze5CoS5UgYjpTd3YWphWg1l1CuwLC4jufQNtaG9JleBWM67YKR6oBlzf+bVoPIOUiaNwVgIzcF9sF3aknMvZFfCnnNCp9eJqqsNSKQ+qw2USssNzrzoh9Dnynmaq6yEPe2AkfX9lXjy5akWz9ZkcgqVFz0mj0KsJ0tgROh2oCfSJ3/3ihaHPA0Rh+/7UNhtN7kKhAsI+J+a3u2If49r8WxFZiawtsuR5xLumBUU3s/B2bkOm0+V4V3yrTwFOgcg8SMBe8CmuxTC+SygFB3l8TzxDLOpWYiSqEWzFf0ahc2/RncphPcSUIqPWPFhPqZFcrUqraLzXkA+Z3WXQvh2eaNR3MHmNVB+YPjNMMqPb9Q9I6YGRR0WTMQj6hOV+f/++wuDLwfg7iqH4GVMQQrh28w3Nvgd2H22Hk09jag6UYoSH4/C9gKTo9NG8A8MPUM4DJp74gAAAABJRU5ErkJggg==',
'options': [
{
'name': 'enabled',
'type': 'enabler',
'default': False,
},
{
'name': 'username',
'default': '',
},
{
'name': 'password',
'default': '',
'type': 'password',
},
{
'name': 'seed_ratio',
'label': 'Seed ratio',
'type': 'float',
'default': 1,
'description': 'Will not be (re)moved until this seed ratio is met.',
},
{
'name': 'seed_time',
'label': 'Seed time',
'type': 'int',
'default': 40,
'description': 'Will not be (re)moved until this seed time (in hours) is met.',
},
{
'name': 'extra_score',
'advanced': True,
'label': 'Extra Score',
'type': 'int',
'default': 20,
'description': 'Starting score for each release found via this provider.',
}
],
},
],
}]
| gpl-3.0 | -3,218,943,192,358,938,000 | 40.144928 | 963 | 0.527651 | false |
Architektor/PySnip | venv/lib/python2.7/site-packages/twisted/conch/tap.py | 7 | 3144 | # -*- test-case-name: twisted.conch.test.test_tap -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Support module for making SSH servers with twistd.
"""
from twisted.conch import unix
from twisted.conch import checkers as conch_checkers
from twisted.conch.openssh_compat import factory
from twisted.cred import portal, strcred
from twisted.python import usage
from twisted.application import strports
class Options(usage.Options, strcred.AuthOptionMixin):
synopsis = "[-i <interface>] [-p <port>] [-d <dir>] "
longdesc = ("Makes a Conch SSH server. If no authentication methods are "
"specified, the default authentication methods are UNIX passwords "
"and SSH public keys. If --auth options are "
"passed, only the measures specified will be used.")
optParameters = [
["interface", "i", "", "local interface to which we listen"],
["port", "p", "tcp:22", "Port on which to listen"],
["data", "d", "/etc", "directory to look for host keys in"],
["moduli", "", None, "directory to look for moduli in "
"(if different from --data)"]
]
compData = usage.Completions(
optActions={"data": usage.CompleteDirs(descr="data directory"),
"moduli": usage.CompleteDirs(descr="moduli directory"),
"interface": usage.CompleteNetInterfaces()}
)
def __init__(self, *a, **kw):
usage.Options.__init__(self, *a, **kw)
# call the default addCheckers (for backwards compatibility) that will
# be used if no --auth option is provided - note that conch's
# UNIXPasswordDatabase is used, instead of twisted.plugins.cred_unix's
# checker
super(Options, self).addChecker(conch_checkers.UNIXPasswordDatabase())
super(Options, self).addChecker(conch_checkers.SSHPublicKeyChecker(
conch_checkers.UNIXAuthorizedKeysFiles()))
self._usingDefaultAuth = True
def addChecker(self, checker):
"""
Add the checker specified. If any checkers are added, the default
checkers are automatically cleared and the only checkers will be the
specified one(s).
"""
if self._usingDefaultAuth:
self['credCheckers'] = []
self['credInterfaces'] = {}
self._usingDefaultAuth = False
super(Options, self).addChecker(checker)
def makeService(config):
"""
Construct a service for operating a SSH server.
@param config: An L{Options} instance specifying server options, including
where server keys are stored and what authentication methods to use.
@return: An L{IService} provider which contains the requested SSH server.
"""
t = factory.OpenSSHFactory()
r = unix.UnixSSHRealm()
t.portal = portal.Portal(r, config.get('credCheckers', []))
t.dataRoot = config['data']
t.moduliRoot = config['moduli'] or config['data']
port = config['port']
if config['interface']:
# Add warning here
port += ':interface=' + config['interface']
return strports.service(port, t)
| gpl-3.0 | 1,169,508,590,292,866,600 | 35.988235 | 78 | 0.647265 | false |
monkeysecurity/bozor | bozor/aws/iam/role.py | 1 | 2398 | from botor import Botor
from botor.aws.iam import get_role_managed_policies, get_role_inline_policies, get_role_instance_profiles
from bozor.aws.iam import _get_name_from_structure, modify, _conn_from_args
def _get_base(role, **conn):
"""
Determine whether the boto get_role call needs to be made or if we already have all that data
in the role object.
:param role: dict containing (at the very least) role_name and/or arn.
:param conn: dict containing enough information to make a connection to the desired account.
:return: Camelized dict describing role containing all all base_fields.
"""
base_fields = frozenset(['Arn', 'AssumeRolePolicyDocument', 'Path', 'RoleId', 'RoleName', 'CreateDate'])
needs_base = False
for field in base_fields:
if field not in role:
needs_base = True
break
if needs_base:
role_name = _get_name_from_structure(role, 'RoleName')
role = Botor.go('iam.client.get_role', RoleName=role_name, **conn)
role = role['Role']
# cast CreateDate from a datetime to something JSON serializable.
role.update(dict(CreateDate=str(role['CreateDate'])))
return role
def get_role(role, output='camelized', **conn):
"""
Orchestrates all the calls required to fully build out an IAM Role in the following format:
{
"Arn": ...,
"AssumeRolePolicyDocument": ...,
"CreateDate": ..., # str
"InlinePolicies": ...,
"InstanceProfiles": ...,
"ManagedPolicies": ...,
"Path": ...,
"RoleId": ...,
"RoleName": ...,
}
:param role: dict containing (at the very least) role_name and/or arn.
:param output: Determines whether keys should be returned camelized or underscored.
:param conn: dict containing enough information to make a connection to the desired account.
Must at least have 'assume_role' key.
:return: dict containing a fully built out role.
"""
role = modify(role, 'camelized')
_conn_from_args(role, conn)
role = _get_base(role, **conn)
role.update(
{
'managed_policies': get_role_managed_policies(role, **conn),
'inline_policies': get_role_inline_policies(role, **conn),
'instance_profiles': get_role_instance_profiles(role, **conn)
}
)
return modify(role, format=output) | apache-2.0 | 4,586,827,074,024,530,400 | 35.907692 | 108 | 0.638032 | false |
Spoffy/DreamDaemon-Control-Scripts | config.py | 2 | 1034 | import os.path
import SS13.hooks
class Config:
def __init__(self, **kwargs):
if kwargs:
self.__dict__ = kwargs
def items(self):
return self.__dict__.items()
def get_dmb_path(self):
return os.path.join(self.path, self.dmb)
def run_hook(self, hook_name):
if self.hooks and hook_name in self.hooks:
return self.hooks[hook_name](self)
def sample_hook(config):
print("Hello from the Sample Hook!")
DD_INSTALL_PATH = "/usr/local/byond/bin/DreamDaemon"
DM_INSTALL_PATH = "/usr/local/byond/bin/DreamMaker"
PATH_TO_CRONTAB = "/root/SS13/scripts/ss13.cron"
CONFIGS = [
Config(
name="Test",
path="../tgstation",
dmb="tgstation.dmb",
dme="tgstation.dme",
port="52601",
args=["-logself", "-trusted", "-unsafe_diag"],
config_file="config/config.txt",
backup_files=["config/admins.txt", "config/names"],
backup_dest="",
hooks={
"pre_update": SS13.hooks.pre_update,
"post_update": SS13.hooks.post_update
}
)
]
#Index of the default configuration to use.
DEFAULT_CONFIG = 0
| gpl-2.0 | -5,926,989,017,683,442,000 | 21.478261 | 53 | 0.659574 | false |
hesseltuinhof/mxnet | example/dsd/sparse_sgd.py | 1 | 7254 | from mxnet.ndarray import NDArray, topk, abs as NDabs
from mxnet.optimizer import SGD, register
import logging
log = 'Sparsity Update:\t'
@register
class SparseSGD(SGD):
"""The SGD optimizer with weight pruning.
This class implements the optimizer described in the paper *DSD: Dense-Sparse-Dense Training for
Deep Neural Networks*, available at https://arxiv.org/pdf/1607.04381.pdf
The optimizer updates the weights the same way as done in SGD, but does the following
preprocessing::
if threshold given, all weights below the threshold in absolute value are pruned,
mask = abs(weight) >= threshold
if sparsity level given, the smallest (sparsity)% weights in absolute value are pruned
(or the largest (100-sparsity)% weights in absolute value are used)
mask = topk(abs(weight), ret_typ='mask', k=weight.size*(100-sparsity)/100)
=> mask[i,j] = {0 if weight[i,j] is pruned, 1 otherwise} (for a matrix representation)
weight = weight * mask
grad = grad * mask
state = state * mask
This optimizer accepts the following parameters in addition to those accepted
by :class:`.SGD`.
Parameters
----------
pruning_switch_epoch : list of ints, optional
The epochs at which there is a change in sparsity level (should be in ascending order).
weight_sparsity : list of floats, optional
The sparsity on the weights required on each iteration of sparse training.
bias_sparsity : list of floats, optional
The sparsity on the biases required on each iteration of sparse training.
weight_threshold : list of floats, optional
The absolute value threshold on the weights required on each iteration of sparse training.
bias_threshold : list of floats, optional
The absolute value threshold on the biases required on each iteration of sparse training.
batches_per_epoch : int, optional
The number of batches in each epoch.
(The ceiling integer value of number_of_examples / batch_size)
"""
def __init__(self, pruning_switch_epoch, batches_per_epoch,
weight_sparsity=None, bias_sparsity=None,
weight_threshold=None, bias_threshold=None, **kwargs):
super(SparseSGD, self).__init__(**kwargs)
self.masks = []
self.masks_updated = False
self.epoch = 0
self.pruning_switch_epoch = pruning_switch_epoch
self.batches_per_epoch = batches_per_epoch
# get weight and bias sparsity percentages
self.weight_sparsity = weight_sparsity
self.bias_sparsity = bias_sparsity
if weight_sparsity is not None:
assert len(weight_sparsity) == len(bias_sparsity), \
'weight_sparsity and bias_sparsity should have same length'
assert len(weight_sparsity) == len(pruning_switch_epoch), \
'pruning_switch_epoch and weight_sparsity should have same length'
# get weight and bias sparsity thresholds
self.weight_threshold = weight_threshold
self.bias_threshold = bias_threshold
if weight_threshold is not None:
assert len(weight_threshold) == len(bias_threshold), \
'weight_threshold and bias_threshold should have same length'
assert len(weight_threshold) == len(pruning_switch_epoch), \
'pruning_switch_epoch and weight_sparsity_threshold should have same length'
# either percentages or thresholds must be given
assert weight_sparsity is not None or weight_threshold is not None,\
'weight_sparsity or weight_sparsity_threshold should be given'
def update_masks(self, index, weight):
"""Updates the masks for sparse training.
Parameters
----------
index : int
The index for weight.
weight : NDArray
The weight matrix.
Returns
-------
boolean
If the masks were changed
"""
# determine number of updates without actually updating the count
if index not in self._index_update_count:
num_update = self.begin_num_update
else:
num_update = self._index_update_count[index]
num_update += 1
num_update = max(num_update, self.num_update)
# calculate epoch
epoch = int((num_update - 1) / self.batches_per_epoch) + 1
# determine if masks need to be updated, and get corresponding parameters
if index == 0:
self.masks_updated = True
if self.epoch != epoch:
self.epoch = epoch
if epoch == 1:
self.masks_updated = False
if self.weight_sparsity is not None:
logging.info(log + 'bias-sparsity={}, weight-sparsity={}'.format(self.bias_sparsity[0], self.weight_sparsity[0]))
else:
logging.info(log + 'bias-threshold={}, weight-threshold={}'.format(self.bias_threshold[0], self.weight_threshold[0]))
if self.pruning_switch_epoch[0] + 1 == epoch:
self.masks_updated = False
self.pruning_switch_epoch.pop(0)
if self.weight_sparsity is not None:
self.weight_sparsity.pop(0)
self.bias_sparsity.pop(0)
logging.info(log + 'bias-sparsity={}, weight-sparsity={}'.format(self.bias_sparsity[0], self.weight_sparsity[0]))
else:
self.weight_threshold.pop(0)
self.bias_threshold.pop(0)
logging.info(log + 'bias-threshold={}, weight-threshold={}'.format(self.bias_threshold[0], self.weight_threshold[0]))
# update masks if needed
if not self.masks_updated:
# initialize masks
if epoch == 1:
self.masks.append(None)
# if percentages are given
if self.weight_sparsity is not None:
if len(weight.shape) == 1:
sparsity = self.bias_sparsity[0]
else:
sparsity = self.weight_sparsity[0]
number_unpruned = int((100.0 - sparsity) * weight.size / 100.0)
self.masks[index] = topk(NDabs(weight), axis=None, ret_typ='mask',
k=number_unpruned)
# if thresholds are given
else:
if len(weight.shape) == 1:
threshold = self.bias_threshold[0]
else:
threshold = self.weight_threshold[0]
self.masks[index] = NDabs(weight) >= threshold
return not self.masks_updated
def update(self, index, weight, grad, state):
assert(isinstance(weight, NDArray))
assert(isinstance(grad, NDArray))
# preprocessing for pruning
if self.update_masks(index, weight):
weight[:] = weight * self.masks[index]
grad[:] = grad * self.masks[index]
if state is not None:
state[:] = state * self.masks[index]
super(SparseSGD, self).update(index, weight, grad, state)
| apache-2.0 | -7,886,738,699,163,366,000 | 41.670588 | 137 | 0.600221 | false |
joeythesaint/yocto-autobuilder | lib/python2.7/site-packages/sqlalchemy_migrate-0.6-py2.6.egg/migrate/versioning/util/__init__.py | 5 | 5337 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
""".. currentmodule:: migrate.versioning.util"""
import warnings
import logging
from decorator import decorator
from pkg_resources import EntryPoint
from sqlalchemy import create_engine
from sqlalchemy.engine import Engine
from sqlalchemy.pool import StaticPool
from migrate.versioning import exceptions
from migrate.versioning.util.keyedinstance import KeyedInstance
from migrate.versioning.util.importpath import import_path
from migrate.changeset.exceptions import *
log = logging.getLogger(__name__)
def load_model(dotted_name):
"""Import module and use module-level variable".
:param dotted_name: path to model in form of string: ``some.python.module:Class``
.. versionchanged:: 0.5.4
"""
if isinstance(dotted_name, basestring):
if ':' not in dotted_name:
# backwards compatibility
warnings.warn('model should be in form of module.model:User '
'and not module.model.User', MigrateDeprecationWarning)
dotted_name = ':'.join(dotted_name.rsplit('.', 1))
return EntryPoint.parse('x=%s' % dotted_name).load(False)
else:
# Assume it's already loaded.
return dotted_name
def asbool(obj):
"""Do everything to use object as bool"""
if isinstance(obj, basestring):
obj = obj.strip().lower()
if obj in ['true', 'yes', 'on', 'y', 't', '1']:
return True
elif obj in ['false', 'no', 'off', 'n', 'f', '0']:
return False
else:
raise ValueError("String is not true/false: %r" % obj)
if obj in (True, False):
return bool(obj)
else:
raise ValueError("String is not true/false: %r" % obj)
def guess_obj_type(obj):
"""Do everything to guess object type from string
Tries to convert to `int`, `bool` and finally returns if not succeded.
.. versionadded: 0.5.4
"""
result = None
try:
result = int(obj)
except:
pass
if result is None:
try:
result = asbool(obj)
except:
pass
if result is not None:
return result
else:
return obj
@decorator
def catch_known_errors(f, *a, **kw):
"""Decorator that catches known api errors
.. versionadded: 0.5.4
"""
try:
return f(*a, **kw)
except exceptions.PathFoundError, e:
raise exceptions.KnownError("The path %s already exists" % e.args[0])
def construct_engine(engine, **opts):
""".. versionadded:: 0.5.4
Constructs and returns SQLAlchemy engine.
Currently, there are 2 ways to pass create_engine options to :mod:`migrate.versioning.api` functions:
:param engine: connection string or a existing engine
:param engine_dict: python dictionary of options to pass to `create_engine`
:param engine_arg_*: keyword parameters to pass to `create_engine` (evaluated with :func:`migrate.versioning.util.guess_obj_type`)
:type engine_dict: dict
:type engine: string or Engine instance
:type engine_arg_*: string
:returns: SQLAlchemy Engine
.. note::
keyword parameters override ``engine_dict`` values.
"""
if isinstance(engine, Engine):
return engine
elif not isinstance(engine, basestring):
raise ValueError("you need to pass either an existing engine or a database uri")
# get options for create_engine
if opts.get('engine_dict') and isinstance(opts['engine_dict'], dict):
kwargs = opts['engine_dict']
else:
kwargs = dict()
# DEPRECATED: handle echo the old way
echo = asbool(opts.get('echo', False))
if echo:
warnings.warn('echo=True parameter is deprecated, pass '
'engine_arg_echo=True or engine_dict={"echo": True}',
MigrateDeprecationWarning)
kwargs['echo'] = echo
# parse keyword arguments
for key, value in opts.iteritems():
if key.startswith('engine_arg_'):
kwargs[key[11:]] = guess_obj_type(value)
log.debug('Constructing engine')
# TODO: return create_engine(engine, poolclass=StaticPool, **kwargs)
# seems like 0.5.x branch does not work with engine.dispose and staticpool
return create_engine(engine, **kwargs)
@decorator
def with_engine(f, *a, **kw):
"""Decorator for :mod:`migrate.versioning.api` functions
to safely close resources after function usage.
Passes engine parameters to :func:`construct_engine` and
resulting parameter is available as kw['engine'].
Engine is disposed after wrapped function is executed.
.. versionadded: 0.6.0
"""
url = a[0]
engine = construct_engine(url, **kw)
try:
kw['engine'] = engine
return f(*a, **kw)
finally:
if isinstance(engine, Engine):
log.debug('Disposing SQLAlchemy engine %s', engine)
engine.dispose()
class Memoize:
"""Memoize(fn) - an instance which acts like fn but memoizes its arguments
Will only work on functions with non-mutable arguments
ActiveState Code 52201
"""
def __init__(self, fn):
self.fn = fn
self.memo = {}
def __call__(self, *args):
if not self.memo.has_key(args):
self.memo[args] = self.fn(*args)
return self.memo[args]
| gpl-2.0 | 1,236,873,553,558,883,600 | 28.65 | 134 | 0.63294 | false |
clemkoa/scikit-learn | examples/cluster/plot_kmeans_digits.py | 46 | 4491 | """
===========================================================
A demo of K-Means clustering on the handwritten digits data
===========================================================
In this example we compare the various initialization strategies for
K-means in terms of runtime and quality of the results.
As the ground truth is known here, we also apply different cluster
quality metrics to judge the goodness of fit of the cluster labels to the
ground truth.
Cluster quality metrics evaluated (see :ref:`clustering_evaluation` for
definitions and discussions of the metrics):
=========== ========================================================
Shorthand full name
=========== ========================================================
homo homogeneity score
compl completeness score
v-meas V measure
ARI adjusted Rand index
AMI adjusted mutual information
silhouette silhouette coefficient
=========== ========================================================
"""
print(__doc__)
from time import time
import numpy as np
import matplotlib.pyplot as plt
from sklearn import metrics
from sklearn.cluster import KMeans
from sklearn.datasets import load_digits
from sklearn.decomposition import PCA
from sklearn.preprocessing import scale
np.random.seed(42)
digits = load_digits()
data = scale(digits.data)
n_samples, n_features = data.shape
n_digits = len(np.unique(digits.target))
labels = digits.target
sample_size = 300
print("n_digits: %d, \t n_samples %d, \t n_features %d"
% (n_digits, n_samples, n_features))
print(82 * '_')
print('init\t\ttime\tinertia\thomo\tcompl\tv-meas\tARI\tAMI\tsilhouette')
def bench_k_means(estimator, name, data):
t0 = time()
estimator.fit(data)
print('%-9s\t%.2fs\t%i\t%.3f\t%.3f\t%.3f\t%.3f\t%.3f\t%.3f'
% (name, (time() - t0), estimator.inertia_,
metrics.homogeneity_score(labels, estimator.labels_),
metrics.completeness_score(labels, estimator.labels_),
metrics.v_measure_score(labels, estimator.labels_),
metrics.adjusted_rand_score(labels, estimator.labels_),
metrics.adjusted_mutual_info_score(labels, estimator.labels_),
metrics.silhouette_score(data, estimator.labels_,
metric='euclidean',
sample_size=sample_size)))
bench_k_means(KMeans(init='k-means++', n_clusters=n_digits, n_init=10),
name="k-means++", data=data)
bench_k_means(KMeans(init='random', n_clusters=n_digits, n_init=10),
name="random", data=data)
# in this case the seeding of the centers is deterministic, hence we run the
# kmeans algorithm only once with n_init=1
pca = PCA(n_components=n_digits).fit(data)
bench_k_means(KMeans(init=pca.components_, n_clusters=n_digits, n_init=1),
name="PCA-based",
data=data)
print(82 * '_')
# #############################################################################
# Visualize the results on PCA-reduced data
reduced_data = PCA(n_components=2).fit_transform(data)
kmeans = KMeans(init='k-means++', n_clusters=n_digits, n_init=10)
kmeans.fit(reduced_data)
# Step size of the mesh. Decrease to increase the quality of the VQ.
h = .02 # point in the mesh [x_min, x_max]x[y_min, y_max].
# Plot the decision boundary. For that, we will assign a color to each
x_min, x_max = reduced_data[:, 0].min() - 1, reduced_data[:, 0].max() + 1
y_min, y_max = reduced_data[:, 1].min() - 1, reduced_data[:, 1].max() + 1
xx, yy = np.meshgrid(np.arange(x_min, x_max, h), np.arange(y_min, y_max, h))
# Obtain labels for each point in mesh. Use last trained model.
Z = kmeans.predict(np.c_[xx.ravel(), yy.ravel()])
# Put the result into a color plot
Z = Z.reshape(xx.shape)
plt.figure(1)
plt.clf()
plt.imshow(Z, interpolation='nearest',
extent=(xx.min(), xx.max(), yy.min(), yy.max()),
cmap=plt.cm.Paired,
aspect='auto', origin='lower')
plt.plot(reduced_data[:, 0], reduced_data[:, 1], 'k.', markersize=2)
# Plot the centroids as a white X
centroids = kmeans.cluster_centers_
plt.scatter(centroids[:, 0], centroids[:, 1],
marker='x', s=169, linewidths=3,
color='w', zorder=10)
plt.title('K-means clustering on the digits dataset (PCA-reduced data)\n'
'Centroids are marked with white cross')
plt.xlim(x_min, x_max)
plt.ylim(y_min, y_max)
plt.xticks(())
plt.yticks(())
plt.show()
| bsd-3-clause | 3,329,043,012,926,230,500 | 34.642857 | 79 | 0.603652 | false |
themurph/openshift-tools | openshift/installer/vendored/openshift-ansible-3.5.28/roles/openshift_certificate_expiry/library/openshift_cert_expiry.py | 5 | 33144 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# pylint: disable=line-too-long,invalid-name
"""For details on this module see DOCUMENTATION (below)"""
import datetime
import io
import os
import subprocess
import yaml
from six.moves import configparser
from ansible.module_utils.basic import AnsibleModule
try:
# You can comment this import out and include a 'pass' in this
# block if you're manually testing this module on a NON-ATOMIC
# HOST (or any host that just doesn't have PyOpenSSL
# available). That will force the `load_and_handle_cert` function
# to use the Fake OpenSSL classes.
import OpenSSL.crypto
HAS_OPENSSL = True
except ImportError:
# Some platforms (such as RHEL Atomic) may not have the Python
# OpenSSL library installed. In this case we will use a manual
# work-around to parse each certificate.
#
# Check for 'OpenSSL.crypto' in `sys.modules` later.
HAS_OPENSSL = False
DOCUMENTATION = '''
---
module: openshift_cert_expiry
short_description: Check OpenShift Container Platform (OCP) and Kube certificate expirations on a cluster
description:
- The M(openshift_cert_expiry) module has two basic functions: to flag certificates which will expire in a set window of time from now, and to notify you about certificates which have already expired.
- When the module finishes, a summary of the examination is returned. Each certificate in the summary has a C(health) key with a value of one of the following:
- C(ok) - not expired, and outside of the expiration C(warning_days) window.
- C(warning) - not expired, but will expire between now and the C(warning_days) window.
- C(expired) - an expired certificate.
- Certificate flagging follow this logic:
- If the expiration date is before now then the certificate is classified as C(expired).
- The certificates time to live (expiration date - now) is calculated, if that time window is less than C(warning_days) the certificate is classified as C(warning).
- All other conditions are classified as C(ok).
- The following keys are ALSO present in the certificate summary:
- C(cert_cn) - The common name of the certificate (additional CNs present in SAN extensions are omitted)
- C(days_remaining) - The number of days until the certificate expires.
- C(expiry) - The date the certificate expires on.
- C(path) - The full path to the certificate on the examined host.
version_added: "1.0"
options:
config_base:
description:
- Base path to OCP system settings.
required: false
default: /etc/origin
warning_days:
description:
- Flag certificates which will expire in C(warning_days) days from now.
required: false
default: 30
show_all:
description:
- Enable this option to show analysis of ALL certificates examined by this module.
- By default only certificates which have expired, or will expire within the C(warning_days) window will be reported.
required: false
default: false
author: "Tim Bielawa (@tbielawa) <[email protected]>"
'''
EXAMPLES = '''
# Default invocation, only notify about expired certificates or certificates which will expire within 30 days from now
- openshift_cert_expiry:
# Expand the warning window to show certificates expiring within a year from now
- openshift_cert_expiry: warning_days=365
# Show expired, soon to expire (now + 30 days), and all other certificates examined
- openshift_cert_expiry: show_all=true
'''
class FakeOpenSSLCertificate(object):
"""This provides a rough mock of what you get from
`OpenSSL.crypto.load_certificate()`. This is a work-around for
platforms missing the Python OpenSSL library.
"""
def __init__(self, cert_string):
"""`cert_string` is a certificate in the form you get from running a
.crt through 'openssl x509 -in CERT.cert -text'"""
self.cert_string = cert_string
self.serial = None
self.subject = None
self.extensions = []
self.not_after = None
self._parse_cert()
def _parse_cert(self):
"""Manually parse the certificate line by line"""
self.extensions = []
PARSING_ALT_NAMES = False
for line in self.cert_string.split('\n'):
l = line.strip()
if PARSING_ALT_NAMES:
# We're parsing a 'Subject Alternative Name' line
self.extensions.append(
FakeOpenSSLCertificateSANExtension(l))
PARSING_ALT_NAMES = False
continue
# parse out the bits that we can
if l.startswith('Serial Number:'):
# Serial Number: 11 (0xb)
# => 11
self.serial = int(l.split()[-2])
elif l.startswith('Not After :'):
# Not After : Feb 7 18:19:35 2019 GMT
# => strptime(str, '%b %d %H:%M:%S %Y %Z')
# => strftime('%Y%m%d%H%M%SZ')
# => 20190207181935Z
not_after_raw = l.partition(' : ')[-1]
# Last item: ('Not After', ' : ', 'Feb 7 18:19:35 2019 GMT')
not_after_parsed = datetime.datetime.strptime(not_after_raw, '%b %d %H:%M:%S %Y %Z')
self.not_after = not_after_parsed.strftime('%Y%m%d%H%M%SZ')
elif l.startswith('X509v3 Subject Alternative Name:'):
PARSING_ALT_NAMES = True
continue
elif l.startswith('Subject:'):
# O=system:nodes, CN=system:node:m01.example.com
self.subject = FakeOpenSSLCertificateSubjects(l.partition(': ')[-1])
def get_serial_number(self):
"""Return the serial number of the cert"""
return self.serial
def get_subject(self):
"""Subjects must implement get_components() and return dicts or
tuples. An 'openssl x509 -in CERT.cert -text' with 'Subject':
Subject: Subject: O=system:nodes, CN=system:node:m01.example.com
might return: [('O=system', 'nodes'), ('CN=system', 'node:m01.example.com')]
"""
return self.subject
def get_extension(self, i):
"""Extensions must implement get_short_name() and return the string
'subjectAltName'"""
return self.extensions[i]
def get_extension_count(self):
""" get_extension_count """
return len(self.extensions)
def get_notAfter(self):
"""Returns a date stamp as a string in the form
'20180922170439Z'. strptime the result with format param:
'%Y%m%d%H%M%SZ'."""
return self.not_after
class FakeOpenSSLCertificateSANExtension(object): # pylint: disable=too-few-public-methods
"""Mocks what happens when `get_extension` is called on a certificate
object"""
def __init__(self, san_string):
"""With `san_string` as you get from:
$ openssl x509 -in certificate.crt -text
"""
self.san_string = san_string
self.short_name = 'subjectAltName'
def get_short_name(self):
"""Return the 'type' of this extension. It's always the same though
because we only care about subjectAltName's"""
return self.short_name
def __str__(self):
"""Return this extension and the value as a simple string"""
return self.san_string
# pylint: disable=too-few-public-methods
class FakeOpenSSLCertificateSubjects(object):
"""Mocks what happens when `get_subject` is called on a certificate
object"""
def __init__(self, subject_string):
"""With `subject_string` as you get from:
$ openssl x509 -in certificate.crt -text
"""
self.subjects = []
for s in subject_string.split(', '):
name, _, value = s.partition('=')
self.subjects.append((name, value))
def get_components(self):
"""Returns a list of tuples"""
return self.subjects
# We only need this for one thing, we don't care if it doesn't have
# that many public methods
#
# pylint: disable=too-few-public-methods
class FakeSecHead(object):
"""etcd does not begin their config file with an opening [section] as
required by the Python ConfigParser module. We hack around it by
slipping one in ourselves prior to parsing.
Source: Alex Martelli - http://stackoverflow.com/a/2819788/6490583
"""
def __init__(self, fp):
self.fp = fp
self.sechead = '[ETCD]\n'
def readline(self):
"""Make this look like a file-type object"""
if self.sechead:
try:
return self.sechead
finally:
self.sechead = None
else:
return self.fp.readline()
######################################################################
def filter_paths(path_list):
"""`path_list` - A list of file paths to check. Only files which exist
will be returned
"""
return [p for p in path_list if os.path.exists(os.path.realpath(p))]
# pylint: disable=too-many-locals,too-many-branches
#
# TODO: Break this function down into smaller chunks
def load_and_handle_cert(cert_string, now, base64decode=False, ans_module=None):
"""Load a certificate, split off the good parts, and return some
useful data
Params:
- `cert_string` (string) - a certificate loaded into a string object
- `now` (datetime) - a datetime object of the time to calculate the certificate 'time_remaining' against
- `base64decode` (bool) - run .decode('base64') on the input?
- `ans_module` (AnsibleModule) - The AnsibleModule object for this module (so we can raise errors)
Returns:
A tuple of the form:
(cert_subject, cert_expiry_date, time_remaining, cert_serial_number)
"""
if base64decode:
_cert_string = cert_string.decode('base-64')
else:
_cert_string = cert_string
# Disable this. We 'redefine' the type because we are working
# around a missing library on the target host.
#
# pylint: disable=redefined-variable-type
if HAS_OPENSSL:
# No work-around required
cert_loaded = OpenSSL.crypto.load_certificate(
OpenSSL.crypto.FILETYPE_PEM, _cert_string)
else:
# Missing library, work-around required. Run the 'openssl'
# command on it to decode it
cmd = 'openssl x509 -text'
try:
openssl_proc = subprocess.Popen(cmd.split(),
stdout=subprocess.PIPE,
stdin=subprocess.PIPE)
except OSError:
ans_module.fail_json(msg="Error: The 'OpenSSL' python library and CLI command were not found on the target host. Unable to parse any certificates. This host will not be included in generated reports.")
else:
openssl_decoded = openssl_proc.communicate(_cert_string.encode('utf-8'))[0].decode('utf-8')
cert_loaded = FakeOpenSSLCertificate(openssl_decoded)
######################################################################
# Read all possible names from the cert
cert_subjects = []
for name, value in cert_loaded.get_subject().get_components():
cert_subjects.append('{}:{}'.format(name, value))
# To read SANs from a cert we must read the subjectAltName
# extension from the X509 Object. What makes this more difficult
# is that pyOpenSSL does not give extensions as an iterable
san = None
for i in range(cert_loaded.get_extension_count()):
ext = cert_loaded.get_extension(i)
if ext.get_short_name() == 'subjectAltName':
san = ext
if san is not None:
# The X509Extension object for subjectAltName prints as a
# string with the alt names separated by a comma and a
# space. Split the string by ', ' and then add our new names
# to the list of existing names
cert_subjects.extend(str(san).split(', '))
cert_subject = ', '.join(cert_subjects)
######################################################################
# Grab the expiration date
not_after = cert_loaded.get_notAfter()
# example get_notAfter() => 20180922170439Z
if isinstance(not_after, bytes):
not_after = not_after.decode('utf-8')
cert_expiry_date = datetime.datetime.strptime(
not_after,
'%Y%m%d%H%M%SZ')
time_remaining = cert_expiry_date - now
return (cert_subject, cert_expiry_date, time_remaining, cert_loaded.get_serial_number())
def classify_cert(cert_meta, now, time_remaining, expire_window, cert_list):
"""Given metadata about a certificate under examination, classify it
into one of three categories, 'ok', 'warning', and 'expired'.
Params:
- `cert_meta` dict - A dict with certificate metadata. Required fields
include: 'cert_cn', 'path', 'expiry', 'days_remaining', 'health'.
- `now` (datetime) - a datetime object of the time to calculate the certificate 'time_remaining' against
- `time_remaining` (datetime.timedelta) - a timedelta for how long until the cert expires
- `expire_window` (datetime.timedelta) - a timedelta for how long the warning window is
- `cert_list` list - A list to shove the classified cert into
Return:
- `cert_list` - The updated list of classified certificates
"""
expiry_str = str(cert_meta['expiry'])
# Categorization
if cert_meta['expiry'] < now:
# This already expired, must NOTIFY
cert_meta['health'] = 'expired'
elif time_remaining < expire_window:
# WARN about this upcoming expirations
cert_meta['health'] = 'warning'
else:
# Not expired or about to expire
cert_meta['health'] = 'ok'
cert_meta['expiry'] = expiry_str
cert_meta['serial_hex'] = hex(int(cert_meta['serial']))
cert_list.append(cert_meta)
return cert_list
def tabulate_summary(certificates, kubeconfigs, etcd_certs, router_certs, registry_certs):
"""Calculate the summary text for when the module finishes
running. This includes counts of each classification and what have
you.
Params:
- `certificates` (list of dicts) - Processed `expire_check_result`
dicts with filled in `health` keys for system certificates.
- `kubeconfigs` - as above for kubeconfigs
- `etcd_certs` - as above for etcd certs
Return:
- `summary_results` (dict) - Counts of each cert type classification
and total items examined.
"""
items = certificates + kubeconfigs + etcd_certs + router_certs + registry_certs
summary_results = {
'system_certificates': len(certificates),
'kubeconfig_certificates': len(kubeconfigs),
'etcd_certificates': len(etcd_certs),
'router_certs': len(router_certs),
'registry_certs': len(registry_certs),
'total': len(items),
'ok': 0,
'warning': 0,
'expired': 0
}
summary_results['expired'] = len([c for c in items if c['health'] == 'expired'])
summary_results['warning'] = len([c for c in items if c['health'] == 'warning'])
summary_results['ok'] = len([c for c in items if c['health'] == 'ok'])
return summary_results
######################################################################
# This is our module MAIN function after all, so there's bound to be a
# lot of code bundled up into one block
#
# Reason: These checks are disabled because the issue was introduced
# during a period where the pylint checks weren't enabled for this file
# Status: temporarily disabled pending future refactoring
# pylint: disable=too-many-locals,too-many-statements,too-many-branches
def main():
"""This module examines certificates (in various forms) which compose
an OpenShift Container Platform cluster
"""
module = AnsibleModule(
argument_spec=dict(
config_base=dict(
required=False,
default="/etc/origin",
type='str'),
warning_days=dict(
required=False,
default=30,
type='int'),
show_all=dict(
required=False,
default=False,
type='bool')
),
supports_check_mode=True,
)
# Basic scaffolding for OpenShift specific certs
openshift_base_config_path = os.path.realpath(module.params['config_base'])
openshift_master_config_path = os.path.join(openshift_base_config_path,
"master", "master-config.yaml")
openshift_node_config_path = os.path.join(openshift_base_config_path,
"node", "node-config.yaml")
openshift_cert_check_paths = [
openshift_master_config_path,
openshift_node_config_path,
]
# Paths for Kubeconfigs. Additional kubeconfigs are conditionally
# checked later in the code
master_kube_configs = ['admin', 'openshift-master',
'openshift-node', 'openshift-router',
'openshift-registry']
kubeconfig_paths = []
for m_kube_config in master_kube_configs:
kubeconfig_paths.append(
os.path.join(openshift_base_config_path, "master", m_kube_config + ".kubeconfig")
)
# Validate some paths we have the ability to do ahead of time
openshift_cert_check_paths = filter_paths(openshift_cert_check_paths)
kubeconfig_paths = filter_paths(kubeconfig_paths)
# etcd, where do you hide your certs? Used when parsing etcd.conf
etcd_cert_params = [
"ETCD_CA_FILE",
"ETCD_CERT_FILE",
"ETCD_PEER_CA_FILE",
"ETCD_PEER_CERT_FILE",
]
# Expiry checking stuff
now = datetime.datetime.now()
# todo, catch exception for invalid input and return a fail_json
warning_days = int(module.params['warning_days'])
expire_window = datetime.timedelta(days=warning_days)
# Module stuff
#
# The results of our cert checking to return from the task call
check_results = {}
check_results['meta'] = {}
check_results['meta']['warning_days'] = warning_days
check_results['meta']['checked_at_time'] = str(now)
check_results['meta']['warn_before_date'] = str(now + expire_window)
check_results['meta']['show_all'] = str(module.params['show_all'])
# All the analyzed certs accumulate here
ocp_certs = []
######################################################################
# Sure, why not? Let's enable check mode.
if module.check_mode:
check_results['ocp_certs'] = []
module.exit_json(
check_results=check_results,
msg="Checked 0 total certificates. Expired/Warning/OK: 0/0/0. Warning window: %s days" % module.params['warning_days'],
rc=0,
changed=False
)
######################################################################
# Check for OpenShift Container Platform specific certs
######################################################################
for os_cert in filter_paths(openshift_cert_check_paths):
# Open up that config file and locate the cert and CA
with io.open(os_cert, 'r', encoding='utf-8') as fp:
cert_meta = {}
cfg = yaml.load(fp)
# cert files are specified in parsed `fp` as relative to the path
# of the original config file. 'master-config.yaml' with certFile
# = 'foo.crt' implies that 'foo.crt' is in the same
# directory. certFile = '../foo.crt' is in the parent directory.
cfg_path = os.path.dirname(fp.name)
cert_meta['certFile'] = os.path.join(cfg_path, cfg['servingInfo']['certFile'])
cert_meta['clientCA'] = os.path.join(cfg_path, cfg['servingInfo']['clientCA'])
######################################################################
# Load the certificate and the CA, parse their expiration dates into
# datetime objects so we can manipulate them later
for _, v in cert_meta.items():
with io.open(v, 'r', encoding='utf-8') as fp:
cert = fp.read()
(cert_subject,
cert_expiry_date,
time_remaining,
cert_serial) = load_and_handle_cert(cert, now, ans_module=module)
expire_check_result = {
'cert_cn': cert_subject,
'path': fp.name,
'expiry': cert_expiry_date,
'days_remaining': time_remaining.days,
'health': None,
'serial': cert_serial
}
classify_cert(expire_check_result, now, time_remaining, expire_window, ocp_certs)
######################################################################
# /Check for OpenShift Container Platform specific certs
######################################################################
######################################################################
# Check service Kubeconfigs
######################################################################
kubeconfigs = []
# There may be additional kubeconfigs to check, but their naming
# is less predictable than the ones we've already assembled.
try:
# Try to read the standard 'node-config.yaml' file to check if
# this host is a node.
with io.open(openshift_node_config_path, 'r', encoding='utf-8') as fp:
cfg = yaml.load(fp)
# OK, the config file exists, therefore this is a
# node. Nodes have their own kubeconfig files to
# communicate with the master API. Let's read the relative
# path to that file from the node config.
node_masterKubeConfig = cfg['masterKubeConfig']
# As before, the path to the 'masterKubeConfig' file is
# relative to `fp`
cfg_path = os.path.dirname(fp.name)
node_kubeconfig = os.path.join(cfg_path, node_masterKubeConfig)
with io.open(node_kubeconfig, 'r', encoding='utf8') as fp:
# Read in the nodes kubeconfig file and grab the good stuff
cfg = yaml.load(fp)
c = cfg['users'][0]['user']['client-certificate-data']
(cert_subject,
cert_expiry_date,
time_remaining,
cert_serial) = load_and_handle_cert(c, now, base64decode=True, ans_module=module)
expire_check_result = {
'cert_cn': cert_subject,
'path': fp.name,
'expiry': cert_expiry_date,
'days_remaining': time_remaining.days,
'health': None,
'serial': cert_serial
}
classify_cert(expire_check_result, now, time_remaining, expire_window, kubeconfigs)
except IOError:
# This is not a node
pass
for kube in filter_paths(kubeconfig_paths):
with io.open(kube, 'r', encoding='utf-8') as fp:
# TODO: Maybe consider catching exceptions here?
cfg = yaml.load(fp)
# Per conversation, "the kubeconfigs you care about:
# admin, router, registry should all be single
# value". Following that advice we only grab the data for
# the user at index 0 in the 'users' list. There should
# not be more than one user.
c = cfg['users'][0]['user']['client-certificate-data']
(cert_subject,
cert_expiry_date,
time_remaining,
cert_serial) = load_and_handle_cert(c, now, base64decode=True, ans_module=module)
expire_check_result = {
'cert_cn': cert_subject,
'path': fp.name,
'expiry': cert_expiry_date,
'days_remaining': time_remaining.days,
'health': None,
'serial': cert_serial
}
classify_cert(expire_check_result, now, time_remaining, expire_window, kubeconfigs)
######################################################################
# /Check service Kubeconfigs
######################################################################
######################################################################
# Check etcd certs
#
# Two things to check: 'external' etcd, and embedded etcd.
######################################################################
# FIRST: The 'external' etcd
#
# Some values may be duplicated, make this a set for now so we
# unique them all
etcd_certs_to_check = set([])
etcd_certs = []
etcd_cert_params.append('dne')
try:
with io.open('/etc/etcd/etcd.conf', 'r', encoding='utf-8') as fp:
etcd_config = configparser.ConfigParser()
# Reason: This check is disabled because the issue was introduced
# during a period where the pylint checks weren't enabled for this file
# Status: temporarily disabled pending future refactoring
# pylint: disable=deprecated-method
etcd_config.readfp(FakeSecHead(fp))
for param in etcd_cert_params:
try:
etcd_certs_to_check.add(etcd_config.get('ETCD', param))
except configparser.NoOptionError:
# That parameter does not exist, oh well...
pass
except IOError:
# No etcd to see here, move along
pass
for etcd_cert in filter_paths(etcd_certs_to_check):
with io.open(etcd_cert, 'r', encoding='utf-8') as fp:
c = fp.read()
(cert_subject,
cert_expiry_date,
time_remaining,
cert_serial) = load_and_handle_cert(c, now, ans_module=module)
expire_check_result = {
'cert_cn': cert_subject,
'path': fp.name,
'expiry': cert_expiry_date,
'days_remaining': time_remaining.days,
'health': None,
'serial': cert_serial
}
classify_cert(expire_check_result, now, time_remaining, expire_window, etcd_certs)
######################################################################
# Now the embedded etcd
######################################################################
try:
with io.open('/etc/origin/master/master-config.yaml', 'r', encoding='utf-8') as fp:
cfg = yaml.load(fp)
except IOError:
# Not present
pass
else:
if cfg.get('etcdConfig', {}).get('servingInfo', {}).get('certFile', None) is not None:
# This is embedded
etcd_crt_name = cfg['etcdConfig']['servingInfo']['certFile']
else:
# Not embedded
etcd_crt_name = None
if etcd_crt_name is not None:
# etcd_crt_name is relative to the location of the
# master-config.yaml file
cfg_path = os.path.dirname(fp.name)
etcd_cert = os.path.join(cfg_path, etcd_crt_name)
with open(etcd_cert, 'r') as etcd_fp:
(cert_subject,
cert_expiry_date,
time_remaining,
cert_serial) = load_and_handle_cert(etcd_fp.read(), now, ans_module=module)
expire_check_result = {
'cert_cn': cert_subject,
'path': etcd_fp.name,
'expiry': cert_expiry_date,
'days_remaining': time_remaining.days,
'health': None,
'serial': cert_serial
}
classify_cert(expire_check_result, now, time_remaining, expire_window, etcd_certs)
######################################################################
# /Check etcd certs
######################################################################
######################################################################
# Check router/registry certs
#
# These are saved as secrets in etcd. That means that we can not
# simply read a file to grab the data. Instead we're going to
# subprocess out to the 'oc get' command. On non-masters this
# command will fail, that is expected so we catch that exception.
######################################################################
router_certs = []
registry_certs = []
######################################################################
# First the router certs
try:
router_secrets_raw = subprocess.Popen('oc get -n default secret router-certs -o yaml'.split(),
stdout=subprocess.PIPE)
router_ds = yaml.load(router_secrets_raw.communicate()[0])
router_c = router_ds['data']['tls.crt']
router_path = router_ds['metadata']['selfLink']
except TypeError:
# YAML couldn't load the result, this is not a master
pass
except OSError:
# The OC command doesn't exist here. Move along.
pass
else:
(cert_subject,
cert_expiry_date,
time_remaining,
cert_serial) = load_and_handle_cert(router_c, now, base64decode=True, ans_module=module)
expire_check_result = {
'cert_cn': cert_subject,
'path': router_path,
'expiry': cert_expiry_date,
'days_remaining': time_remaining.days,
'health': None,
'serial': cert_serial
}
classify_cert(expire_check_result, now, time_remaining, expire_window, router_certs)
######################################################################
# Now for registry
try:
registry_secrets_raw = subprocess.Popen('oc get -n default secret registry-certificates -o yaml'.split(),
stdout=subprocess.PIPE)
registry_ds = yaml.load(registry_secrets_raw.communicate()[0])
registry_c = registry_ds['data']['registry.crt']
registry_path = registry_ds['metadata']['selfLink']
except TypeError:
# YAML couldn't load the result, this is not a master
pass
except OSError:
# The OC command doesn't exist here. Move along.
pass
else:
(cert_subject,
cert_expiry_date,
time_remaining,
cert_serial) = load_and_handle_cert(registry_c, now, base64decode=True, ans_module=module)
expire_check_result = {
'cert_cn': cert_subject,
'path': registry_path,
'expiry': cert_expiry_date,
'days_remaining': time_remaining.days,
'health': None,
'serial': cert_serial
}
classify_cert(expire_check_result, now, time_remaining, expire_window, registry_certs)
######################################################################
# /Check router/registry certs
######################################################################
res = tabulate_summary(ocp_certs, kubeconfigs, etcd_certs, router_certs, registry_certs)
msg = "Checked {count} total certificates. Expired/Warning/OK: {exp}/{warn}/{ok}. Warning window: {window} days".format(
count=res['total'],
exp=res['expired'],
warn=res['warning'],
ok=res['ok'],
window=int(module.params['warning_days']),
)
# By default we only return detailed information about expired or
# warning certificates. If show_all is true then we will print all
# the certificates examined.
if not module.params['show_all']:
check_results['ocp_certs'] = [crt for crt in ocp_certs if crt['health'] in ['expired', 'warning']]
check_results['kubeconfigs'] = [crt for crt in kubeconfigs if crt['health'] in ['expired', 'warning']]
check_results['etcd'] = [crt for crt in etcd_certs if crt['health'] in ['expired', 'warning']]
check_results['registry'] = [crt for crt in registry_certs if crt['health'] in ['expired', 'warning']]
check_results['router'] = [crt for crt in router_certs if crt['health'] in ['expired', 'warning']]
else:
check_results['ocp_certs'] = ocp_certs
check_results['kubeconfigs'] = kubeconfigs
check_results['etcd'] = etcd_certs
check_results['registry'] = registry_certs
check_results['router'] = router_certs
# Sort the final results to report in order of ascending safety
# time. That is to say, the certificates which will expire sooner
# will be at the front of the list and certificates which will
# expire later are at the end. Router and registry certs should be
# limited to just 1 result, so don't bother sorting those.
def cert_key(item):
''' return the days_remaining key '''
return item['days_remaining']
check_results['ocp_certs'] = sorted(check_results['ocp_certs'], key=cert_key)
check_results['kubeconfigs'] = sorted(check_results['kubeconfigs'], key=cert_key)
check_results['etcd'] = sorted(check_results['etcd'], key=cert_key)
# This module will never change anything, but we might want to
# change the return code parameter if there is some catastrophic
# error we noticed earlier
module.exit_json(
check_results=check_results,
summary=res,
msg=msg,
rc=0,
changed=False
)
if __name__ == '__main__':
main()
| apache-2.0 | -3,453,586,666,350,126,000 | 38.410226 | 213 | 0.582338 | false |
rmfitzpatrick/ansible | lib/ansible/modules/storage/netapp/sf_account_manager.py | 39 | 8755 | #!/usr/bin/python
# (c) 2017, NetApp, Inc
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
module: sf_account_manager
short_description: Manage SolidFire accounts
extends_documentation_fragment:
- netapp.solidfire
version_added: '2.3'
author: Sumit Kumar ([email protected])
description:
- Create, destroy, or update accounts on SolidFire
options:
state:
description:
- Whether the specified account should exist or not.
required: true
choices: ['present', 'absent']
name:
description:
- Unique username for this account. (May be 1 to 64 characters in length).
required: true
new_name:
description:
- New name for the user account.
required: false
default: None
initiator_secret:
description:
- CHAP secret to use for the initiator. Should be 12-16 characters long and impenetrable.
- The CHAP initiator secrets must be unique and cannot be the same as the target CHAP secret.
- If not specified, a random secret is created.
required: false
target_secret:
description:
- CHAP secret to use for the target (mutual CHAP authentication).
- Should be 12-16 characters long and impenetrable.
- The CHAP target secrets must be unique and cannot be the same as the initiator CHAP secret.
- If not specified, a random secret is created.
required: false
attributes:
description: List of Name/Value pairs in JSON object format.
required: false
account_id:
description:
- The ID of the account to manage or update.
required: false
default: None
status:
description:
- Status of the account.
required: false
'''
EXAMPLES = """
- name: Create Account
sf_account_manager:
hostname: "{{ solidfire_hostname }}"
username: "{{ solidfire_username }}"
password: "{{ solidfire_password }}"
state: present
name: TenantA
- name: Modify Account
sf_account_manager:
hostname: "{{ solidfire_hostname }}"
username: "{{ solidfire_username }}"
password: "{{ solidfire_password }}"
state: present
name: TenantA
new_name: TenantA-Renamed
- name: Delete Account
sf_account_manager:
hostname: "{{ solidfire_hostname }}"
username: "{{ solidfire_username }}"
password: "{{ solidfire_password }}"
state: absent
name: TenantA-Renamed
"""
RETURN = """
"""
import traceback
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils._text import to_native
import ansible.module_utils.netapp as netapp_utils
HAS_SF_SDK = netapp_utils.has_sf_sdk()
class SolidFireAccount(object):
def __init__(self):
self.argument_spec = netapp_utils.ontap_sf_host_argument_spec()
self.argument_spec.update(dict(
state=dict(required=True, choices=['present', 'absent']),
name=dict(required=True, type='str'),
account_id=dict(required=False, type='int', default=None),
new_name=dict(required=False, type='str', default=None),
initiator_secret=dict(required=False, type='str'),
target_secret=dict(required=False, type='str'),
attributes=dict(required=False, type='dict'),
status=dict(required=False, type='str'),
))
self.module = AnsibleModule(
argument_spec=self.argument_spec,
supports_check_mode=True
)
p = self.module.params
# set up state variables
self.state = p['state']
self.name = p['name']
self.account_id = p['account_id']
self.new_name = p['new_name']
self.initiator_secret = p['initiator_secret']
self.target_secret = p['target_secret']
self.attributes = p['attributes']
self.status = p['status']
if HAS_SF_SDK is False:
self.module.fail_json(msg="Unable to import the SolidFire Python SDK")
else:
self.sfe = netapp_utils.create_sf_connection(module=self.module)
def get_account(self):
"""
Return account object if found
:return: Details about the account. None if not found.
:rtype: dict
"""
account_list = self.sfe.list_accounts()
for account in account_list.accounts:
if account.username == self.name:
# Update self.account_id:
if self.account_id is not None:
if account.account_id == self.account_id:
return account
else:
self.account_id = account.account_id
return account
return None
def create_account(self):
try:
self.sfe.add_account(username=self.name,
initiator_secret=self.initiator_secret,
target_secret=self.target_secret,
attributes=self.attributes)
except Exception as e:
self.module.fail_json(msg='Error creating account %s: %s)' % (self.name, to_native(e)),
exception=traceback.format_exc())
def delete_account(self):
try:
self.sfe.remove_account(account_id=self.account_id)
except Exception as e:
self.module.fail_json(msg='Error deleting account %s: %s' % (self.account_id, to_native(e)),
exception=traceback.format_exc())
def update_account(self):
try:
self.sfe.modify_account(account_id=self.account_id,
username=self.new_name,
status=self.status,
initiator_secret=self.initiator_secret,
target_secret=self.target_secret,
attributes=self.attributes)
except Exception as e:
self.module.fail_json(msg='Error updating account %s: %s' % (self.account_id, to_native(e)),
exception=traceback.format_exc())
def apply(self):
changed = False
account_exists = False
update_account = False
account_detail = self.get_account()
if account_detail:
account_exists = True
if self.state == 'absent':
changed = True
elif self.state == 'present':
# Check if we need to update the account
if account_detail.username is not None and self.new_name is not None and \
account_detail.username != self.new_name:
update_account = True
changed = True
elif account_detail.status is not None and self.status is not None \
and account_detail.status != self.status:
update_account = True
changed = True
elif account_detail.initiator_secret is not None and self.initiator_secret is not None \
and account_detail.initiator_secret != self.initiator_secret:
update_account = True
changed = True
elif account_detail.target_secret is not None and self.target_secret is not None \
and account_detail.target_secret != self.target_secret:
update_account = True
changed = True
elif account_detail.attributes is not None and self.attributes is not None \
and account_detail.attributes != self.attributes:
update_account = True
changed = True
else:
if self.state == 'present':
changed = True
if changed:
if self.module.check_mode:
pass
else:
if self.state == 'present':
if not account_exists:
self.create_account()
elif update_account:
self.update_account()
elif self.state == 'absent':
self.delete_account()
self.module.exit_json(changed=changed)
def main():
v = SolidFireAccount()
v.apply()
if __name__ == '__main__':
main()
| gpl-3.0 | -9,155,482,523,556,984,000 | 31.1875 | 104 | 0.562193 | false |
susilehtola/psi4 | tests/pytests/test_erisieve.py | 7 | 4496 | import pytest
import psi4
import itertools
from .utils import compare_integers, compare_values
pytestmark = pytest.mark.quick
def test_no_screening_schwarz():
"""Checks the number of shell quartets screened with Schwarz screening.
No shell quartets should be screened with a threshold of 0.0"""
mol = psi4.geometry("""
Ne 0.0 0.0 0.0
Ne 4.0 0.0 0.0
Ne 8.0 0.0 0.0
""")
psi4.set_options({ "ints_tolerance" : 0.0 ,
"screening" : "schwarz" })
basis = psi4.core.BasisSet.build(mol, target='cc-pVDZ')
factory = psi4.core.IntegralFactory(basis)
eri = factory.eri(0)
shell_inds = range(basis.nshell())
quartets = itertools.product(shell_inds, shell_inds, shell_inds, shell_inds)
screen_count = 0
for m, n, r, s in quartets:
if not eri.shell_significant(m, n, r, s):
screen_count += 1
assert compare_integers(0, screen_count, 'Quartets Schwarz Screened, Cutoff 0')
def test_no_screening_csam():
"""Checks the number of shell quartets screened with CSAM screening.
No shell quartets should be screened with a threshold of 0.0"""
mol = psi4.geometry("""
Ne 0.0 0.0 0.0
Ne 4.0 0.0 0.0
Ne 8.0 0.0 0.0
""")
psi4.set_options({ "ints_tolerance" : 0.0,
"screening" : "csam" })
basis = psi4.core.BasisSet.build(mol, target='cc-pVDZ')
factory = psi4.core.IntegralFactory(basis)
eri = factory.eri(0)
shell_inds = range(basis.nshell())
quartets = itertools.product(shell_inds, shell_inds, shell_inds, shell_inds)
screen_count = 0
for m, n, r, s in quartets:
if not eri.shell_significant(m, n, r, s):
screen_count += 1
assert compare_integers(0, screen_count, 'Quartets CSAM Screened, Cutoff 0')
def test_schwarz_vs_csam_quartets():
"""Checks difference between the number of shell quartets screened with Schwarz and CSAM screening.
CSAM is strictly tighter than Schwarz and should screen at least all of the same shell pairs.
Default threshhold of 1.0E-12 is used"""
mol = psi4.geometry("""
Ne 0.0 0.0 0.0
Ne 4.0 0.0 0.0
Ne 8.0 0.0 0.0
""")
psi4.set_options({ "ints_tolerance" : 1e-12})
basis = psi4.core.BasisSet.build(mol, target='DZ')
factory = psi4.core.IntegralFactory(basis)
psi4.set_options({ "ints_tolerance" : 1e-12,
"screening" : 'csam' })
eriCSAM = factory.eri(0)
psi4.set_options({ "screening" : 'schwarz', 'integral_package': 'libint2' })
eriSchwarz = factory.eri(0)
shell_inds = range(basis.nshell())
quartets = itertools.product(shell_inds, shell_inds, shell_inds, shell_inds)
screen_count_both = 0
screen_count_csam = 0
screen_count_schwarz = 0
screen_count_none = 0
for m, n, r, s in quartets:
screen_schwarz = not eriSchwarz.shell_significant(m, n, r, s)
screen_csam = not eriCSAM.shell_significant(m, n, r, s)
if screen_schwarz and screen_csam:
screen_count_both += 1
elif screen_csam:
screen_count_csam += 1
elif screen_schwarz:
screen_count_schwarz += 1
else:
screen_count_none += 1
assert compare_integers(75072, screen_count_both, 'Schwarz vs CSAM Screening, Cutoff 1.0e-12')
assert compare_integers(2336, screen_count_csam, 'Schwarz vs CSAM Screening, Cutoff 1.0e-12')
assert compare_integers(0, screen_count_schwarz, 'Schwarz vs CSAM Screening, Cutoff 1.0e-12')
assert compare_integers(27568, screen_count_none, 'Schwarz vs CSAM Screening, Cutoff 1.0e-12')
def test_schwarz_vs_csam_energy():
"""Checks difference in Hartree-Fock energy between no screening and CSAM screening, which should be
insignificant. """
psi4.geometry("""
Ne 0.0 0.0 0.0
Ne 4.0 0.0 0.0
Ne 8.0 0.0 0.0
""")
psi4.set_options({'scf_type' : 'direct',
'd_convergence' : 1e-12,
'screening' : 'schwarz',
'ints_tolerance' : 1.0e-12 })
e_schwarz = psi4.energy('hf/DZ')
psi4.core.clean()
psi4.set_options({'scf_type' : 'direct',
'd_convergence' : 1e-12,
'screening' : 'csam',
'ints_tolerance' : 1.0e-12 })
e_csam = psi4.energy('hf/DZ')
assert compare_values(e_schwarz, e_csam, 11, 'Schwarz vs CSAM Screening, Cutoff 1.0e-12')
| lgpl-3.0 | -5,319,509,434,290,676,000 | 32.552239 | 104 | 0.609431 | false |
heenbo/mosquitto-heenbo | test/lib/03-publish-qos0-no-payload.py | 7 | 2173 | #!/usr/bin/env python
# Test whether a client sends a correct PUBLISH to a topic with QoS 0 and no payload.
# The client should connect to port 1888 with keepalive=60, clean session set,
# and client id publish-qos0-test-np
# The test will send a CONNACK message to the client with rc=0. Upon receiving
# the CONNACK and verifying that rc=0, the client should send a PUBLISH message
# to topic "pub/qos0/no-payload/test" with zero length payload and QoS=0. If
# rc!=0, the client should exit with an error.
# After sending the PUBLISH message, the client should send a DISCONNECT message.
import inspect
import os
import subprocess
import socket
import sys
import time
# From http://stackoverflow.com/questions/279237/python-import-a-module-from-a-folder
cmd_subfolder = os.path.realpath(os.path.abspath(os.path.join(os.path.split(inspect.getfile( inspect.currentframe() ))[0],"..")))
if cmd_subfolder not in sys.path:
sys.path.insert(0, cmd_subfolder)
import mosq_test
rc = 1
keepalive = 60
connect_packet = mosq_test.gen_connect("publish-qos0-test-np", keepalive=keepalive)
connack_packet = mosq_test.gen_connack(rc=0)
publish_packet = mosq_test.gen_publish("pub/qos0/no-payload/test", qos=0)
disconnect_packet = mosq_test.gen_disconnect()
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.settimeout(10)
sock.bind(('', 1888))
sock.listen(5)
client_args = sys.argv[1:]
env = dict(os.environ)
env['LD_LIBRARY_PATH'] = '../../lib:../../lib/cpp'
try:
pp = env['PYTHONPATH']
except KeyError:
pp = ''
env['PYTHONPATH'] = '../../lib/python:'+pp
client = mosq_test.start_client(filename=sys.argv[1].replace('/', '-'), cmd=client_args, env=env)
try:
(conn, address) = sock.accept()
conn.settimeout(10)
if mosq_test.expect_packet(conn, "connect", connect_packet):
conn.send(connack_packet)
if mosq_test.expect_packet(conn, "publish", publish_packet):
if mosq_test.expect_packet(conn, "disconnect", disconnect_packet):
rc = 0
conn.close()
finally:
client.terminate()
client.wait()
sock.close()
exit(rc)
| gpl-3.0 | -4,717,451,930,396,633,000 | 30.492754 | 129 | 0.705016 | false |
alanconway/dispatch | console/test/mock/nexthop.py | 4 | 4808 | #!/usr/bin/env python
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
import collections
class TreeNode(object):
def __init__(self, f, parent):
self.name = f
self.parent = parent
self.children = []
self.visited = False
def procreate(self, links):
if self.visited:
return
self.visited = True
for link in links:
if link['source']['nodeType'] == 'inter-router' and link['target']['nodeType'] == 'inter-router':
if (link['source']['name'] == self.name or link['target']['name'] == self.name):
name = link['source']['name'] if link['target']['name'] == self.name else link['target']['name']
if not name in self.ancestors():
self.children.append(TreeNode(name, self))
def ancestors(self):
a = self.geneology(self.parent)
a.reverse()
return a
def geneology(self, parent):
if parent is None:
return []
ret = [parent.name]
ret.extend(self.geneology(parent.parent))
return ret
class Hopper(object):
def __init__(self, verbose):
self.tree = {}
self.table = {}
self.verbose = verbose
def get(self, f, t, links):
if self.verbose:
print ("------- asked to get " + f + " to " + t)
if f in self.table and t in self.table[f]:
if self.verbose:
print " ------- returning existing " + str(self.table[f][t])
return self.table[f][t]
self.tree = {}
#treef = self.highest(f)
#if treef is None:
treef = self.root(f)
q = collections.deque([treef])
while len(q):
node = q.popleft()
self.process(f, node, treef.name, links)
if f in self.table and t in self.table[f]:
if self.verbose:
print " ------- returning " + str(self.table[f][t])
ret = self.table[f][t]
#self.table = {}
return ret
for n in node.children:
q.append(n)
if self.verbose:
print (" ------- returning unfound nextHop of None")
def process(self, f, node, r, links):
node.procreate(links)
for n in node.children:
self.populateTable(f, n, r)
def populateTable(self, f, node, r):
n = node.name
if not f in self.table:
self.table[f] = {}
self.table[f][f] = None
if not n in self.table:
self.table[n] = {}
self.table[n][n] = None
if not node.parent:
return
if node.parent.name == f:
self.table[f][n] = None
self.table[n][f] = None
else:
def setHop(n, a, p):
if not a in self.table[n]:
self.table[n][a] = p
def loop(ancestors):
for i in range(len(ancestors)):
start = ancestors[i]
for j in range(i+1, len(ancestors)):
stop = ancestors[j]
if j-i == 1:
setHop(start, stop, None)
else:
setHop(start, stop, ancestors[i+1])
ancestors = node.ancestors()
while len(ancestors) > 0 and ancestors[0] != r:
ancestors.pop(0)
ancestors.append(n)
loop(ancestors)
ancestors.reverse()
loop(ancestors)
def root(self, f):
if not self.tree:
self.tree[f] = TreeNode(f, None)
return self.tree[list(self.tree.keys())[0]]
def highest(self, f):
r = self.root(f)
if r.name == f:
return r
q = collections.deque([r])
while len(q):
node = q.popleft()
for n in node.children:
if n.name == f:
return n
q.append(n)
return None
| apache-2.0 | 8,123,882,340,375,102,000 | 32.158621 | 116 | 0.517887 | false |
spring-week-topos/cinder-week | cinder/tests/test_vmware_volumeops.py | 3 | 39031 | # Copyright (c) 2014 VMware, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Test suite for VMware VMDK driver volumeops module.
"""
import mock
from cinder import test
from cinder import units
from cinder.volume.drivers.vmware import error_util
from cinder.volume.drivers.vmware import vim_util
from cinder.volume.drivers.vmware import volumeops
class VolumeOpsTestCase(test.TestCase):
"""Unit tests for volumeops module."""
MAX_OBJECTS = 100
def setUp(self):
super(VolumeOpsTestCase, self).setUp()
self.session = mock.MagicMock()
self.vops = volumeops.VMwareVolumeOps(self.session, self.MAX_OBJECTS)
def test_split_datastore_path(self):
test1 = '[datastore1] myfolder/mysubfolder/myvm.vmx'
(datastore, folder, file_name) = volumeops.split_datastore_path(test1)
self.assertEqual(datastore, 'datastore1')
self.assertEqual(folder, 'myfolder/mysubfolder/')
self.assertEqual(file_name, 'myvm.vmx')
test2 = '[datastore2 ] myfolder/myvm.vmdk'
(datastore, folder, file_name) = volumeops.split_datastore_path(test2)
self.assertEqual(datastore, 'datastore2')
self.assertEqual(folder, 'myfolder/')
self.assertEqual(file_name, 'myvm.vmdk')
test3 = 'myfolder/myvm.vmdk'
self.assertRaises(IndexError, volumeops.split_datastore_path, test3)
def vm(self, val):
"""Create a mock vm in retrieve result format."""
vm = mock.MagicMock()
prop = mock.Mock(spec=object)
prop.val = val
vm.propSet = [prop]
return vm
def test_get_backing(self):
name = 'mock-backing'
# Test no result
self.session.invoke_api.return_value = None
result = self.vops.get_backing(name)
self.assertIsNone(result)
self.session.invoke_api.assert_called_once_with(vim_util,
'get_objects',
self.session.vim,
'VirtualMachine',
self.MAX_OBJECTS)
# Test single result
vm = self.vm(name)
vm.obj = mock.sentinel.vm_obj
retrieve_result = mock.Mock(spec=object)
retrieve_result.objects = [vm]
self.session.invoke_api.return_value = retrieve_result
self.vops.cancel_retrieval = mock.Mock(spec=object)
result = self.vops.get_backing(name)
self.assertEqual(mock.sentinel.vm_obj, result)
self.session.invoke_api.assert_called_with(vim_util, 'get_objects',
self.session.vim,
'VirtualMachine',
self.MAX_OBJECTS)
self.vops.cancel_retrieval.assert_called_once_with(retrieve_result)
# Test multiple results
retrieve_result2 = mock.Mock(spec=object)
retrieve_result2.objects = [vm('1'), vm('2'), vm('3')]
self.session.invoke_api.return_value = retrieve_result2
self.vops.continue_retrieval = mock.Mock(spec=object)
self.vops.continue_retrieval.return_value = retrieve_result
result = self.vops.get_backing(name)
self.assertEqual(mock.sentinel.vm_obj, result)
self.session.invoke_api.assert_called_with(vim_util, 'get_objects',
self.session.vim,
'VirtualMachine',
self.MAX_OBJECTS)
self.vops.continue_retrieval.assert_called_once_with(retrieve_result2)
self.vops.cancel_retrieval.assert_called_with(retrieve_result)
def test_delete_backing(self):
backing = mock.sentinel.backing
task = mock.sentinel.task
self.session.invoke_api.return_value = task
self.vops.delete_backing(backing)
self.session.invoke_api.assert_called_once_with(self.session.vim,
"Destroy_Task",
backing)
self.session.wait_for_task(task)
def test_get_host(self):
instance = mock.sentinel.instance
host = mock.sentinel.host
self.session.invoke_api.return_value = host
result = self.vops.get_host(instance)
self.assertEqual(host, result)
self.session.invoke_api.assert_called_once_with(vim_util,
'get_object_property',
self.session.vim,
instance,
'runtime.host')
def test_get_hosts(self):
hosts = mock.sentinel.hosts
self.session.invoke_api.return_value = hosts
result = self.vops.get_hosts()
self.assertEqual(hosts, result)
self.session.invoke_api.assert_called_once_with(vim_util,
'get_objects',
self.session.vim,
'HostSystem',
self.MAX_OBJECTS)
def test_continue_retrieval(self):
retrieve_result = mock.sentinel.retrieve_result
self.session.invoke_api.return_value = retrieve_result
result = self.vops.continue_retrieval(retrieve_result)
self.assertEqual(retrieve_result, result)
self.session.invoke_api.assert_called_once_with(vim_util,
'continue_retrieval',
self.session.vim,
retrieve_result)
def test_cancel_retrieval(self):
retrieve_result = mock.sentinel.retrieve_result
self.session.invoke_api.return_value = retrieve_result
result = self.vops.cancel_retrieval(retrieve_result)
self.assertIsNone(result)
self.session.invoke_api.assert_called_once_with(vim_util,
'cancel_retrieval',
self.session.vim,
retrieve_result)
def test_is_usable(self):
mount_info = mock.Mock(spec=object)
mount_info.accessMode = "readWrite"
mount_info.mounted = True
mount_info.accessible = True
datastore = mock.sentinel.datastore
self.assertTrue(self.vops._is_usable(datastore, mount_info))
del mount_info.mounted
self.assertTrue(self.vops._is_usable(datastore, mount_info))
mount_info.accessMode = "readonly"
self.assertFalse(self.vops._is_usable(datastore, mount_info))
mount_info.accessMode = "readWrite"
mount_info.mounted = False
self.assertFalse(self.vops._is_usable(datastore, mount_info))
mount_info.mounted = True
mount_info.accessible = False
self.assertFalse(self.vops._is_usable(datastore, mount_info))
with mock.patch.object(self.vops, 'get_summary') as get_summary:
del mount_info.accessible
summary = mock.Mock(spec=object)
summary.accessible = True
get_summary.return_value = summary
self.assertTrue(self.vops._is_usable(datastore, mount_info))
summary.accessible = False
self.assertFalse(self.vops._is_usable(datastore, mount_info))
def _create_host_mounts(self, access_mode, host, set_accessible=True,
is_accessible=True, mounted=True):
"""Create host mount value of datastore with single mount info.
:param access_mode: string specifying the read/write permission
:param set_accessible: specify whether accessible property
should be set
:param is_accessible: boolean specifying whether the datastore
is accessible to host
:param host: managed object reference of the connected
host
:return: list of host mount info
"""
mntInfo = mock.Mock(spec=object)
mntInfo.accessMode = access_mode
if set_accessible:
mntInfo.accessible = is_accessible
else:
del mntInfo.accessible
mntInfo.mounted = mounted
host_mount = mock.Mock(spec=object)
host_mount.key = host
host_mount.mountInfo = mntInfo
host_mounts = mock.Mock(spec=object)
host_mounts.DatastoreHostMount = [host_mount]
return host_mounts
def test_get_connected_hosts(self):
datastore = mock.sentinel.datastore
host = mock.Mock(spec=object)
host.value = mock.sentinel.host
host_mounts = self._create_host_mounts("readWrite", host)
self.session.invoke_api.return_value = host_mounts
hosts = self.vops.get_connected_hosts(datastore)
self.assertEqual([mock.sentinel.host], hosts)
self.session.invoke_api.assert_called_once_with(vim_util,
'get_object_property',
self.session.vim,
datastore,
'host')
def test_is_valid(self):
datastore = mock.sentinel.datastore
host = mock.Mock(spec=object)
host.value = mock.sentinel.host
def _is_valid(host_mounts, is_valid):
self.session.invoke_api.return_value = host_mounts
result = self.vops._is_valid(datastore, host)
self.assertEqual(is_valid, result)
self.session.invoke_api.assert_called_with(vim_util,
'get_object_property',
self.session.vim,
datastore,
'host')
# Test with accessible attr
_is_valid(self._create_host_mounts("readWrite", host), True)
# Test without accessible attr, and use summary instead
with mock.patch.object(self.vops, 'get_summary') as get_summary:
summary = mock.Mock(spec=object)
summary.accessible = True
get_summary.return_value = summary
_is_valid(self._create_host_mounts("readWrite", host, False),
True)
# Test negative cases for is_valid
_is_valid(self._create_host_mounts("Inaccessible", host), False)
_is_valid(self._create_host_mounts("readWrite", host, True, False),
False)
_is_valid(self._create_host_mounts("readWrite", host, True, True,
False), False)
with mock.patch.object(self.vops, 'get_summary') as get_summary:
summary = mock.Mock(spec=object)
summary.accessible = False
get_summary.return_value = summary
_is_valid(self._create_host_mounts("readWrite", host, False),
False)
def test_get_dss_rp(self):
# build out props to be returned by 1st invoke_api call
datastore_prop = mock.Mock(spec=object)
datastore_prop.name = 'datastore'
datastore_prop.val = mock.Mock(spec=object)
datastore_prop.val.ManagedObjectReference = [mock.sentinel.ds1,
mock.sentinel.ds2]
compute_resource_prop = mock.Mock(spec=object)
compute_resource_prop.name = 'parent'
compute_resource_prop.val = mock.sentinel.compute_resource
elem = mock.Mock(spec=object)
elem.propSet = [datastore_prop, compute_resource_prop]
props = [elem]
# build out host_mounts to be returned by 2nd invoke_api call
host = mock.Mock(spec=object)
host.value = mock.sentinel.host
host_mounts = self._create_host_mounts("readWrite", host)
# build out resource_pool to be returned by 3rd invoke_api call
resource_pool = mock.sentinel.resource_pool
# set return values for each call of invoke_api
self.session.invoke_api.side_effect = [props,
host_mounts,
host_mounts,
resource_pool]
# invoke function and verify results
(dss_actual, rp_actual) = self.vops.get_dss_rp(host)
self.assertEqual([mock.sentinel.ds1, mock.sentinel.ds2], dss_actual)
self.assertEqual(resource_pool, rp_actual)
# invoke function with no valid datastore and verify exception raised
host_mounts = self._create_host_mounts("inaccessible", host)
self.session.invoke_api.side_effect = [props,
host_mounts,
host_mounts,
resource_pool]
self.assertRaises(error_util.VimException, self.vops.get_dss_rp, host)
def test_get_parent(self):
# Not recursive
child = mock.Mock(spec=object)
child._type = 'Parent'
ret = self.vops._get_parent(child, 'Parent')
self.assertEqual(ret, child)
# Recursive
parent = mock.Mock(spec=object)
parent._type = 'Parent'
child = mock.Mock(spec=object)
child._type = 'Child'
self.session.invoke_api.return_value = parent
ret = self.vops._get_parent(child, 'Parent')
self.assertEqual(ret, parent)
self.session.invoke_api.assert_called_with(vim_util,
'get_object_property',
self.session.vim, child,
'parent')
def test_get_dc(self):
# set up hierarchy of objects
dc = mock.Mock(spec=object)
dc._type = 'Datacenter'
o1 = mock.Mock(spec=object)
o1._type = 'mockType1'
o1.parent = dc
o2 = mock.Mock(spec=object)
o2._type = 'mockType2'
o2.parent = o1
# mock out invoke_api behaviour to fetch parent
def mock_invoke_api(vim_util, method, vim, the_object, arg):
return the_object.parent
self.session.invoke_api.side_effect = mock_invoke_api
ret = self.vops.get_dc(o2)
self.assertEqual(dc, ret)
def test_get_vmfolder(self):
self.session.invoke_api.return_value = mock.sentinel.ret
ret = self.vops.get_vmfolder(mock.sentinel.dc)
self.assertEqual(mock.sentinel.ret, ret)
self.session.invoke_api.assert_called_once_with(vim_util,
'get_object_property',
self.session.vim,
mock.sentinel.dc,
'vmFolder')
def test_create_folder_not_present(self):
"""Test create_folder when child not present."""
parent_folder = mock.sentinel.parent_folder
child_name = 'child_folder'
prop_val = mock.Mock(spec=object)
prop_val.ManagedObjectReference = []
child_folder = mock.sentinel.child_folder
self.session.invoke_api.side_effect = [prop_val, child_folder]
ret = self.vops.create_folder(parent_folder, child_name)
self.assertEqual(child_folder, ret)
expected_invoke_api = [mock.call(vim_util, 'get_object_property',
self.session.vim, parent_folder,
'childEntity'),
mock.call(self.session.vim, 'CreateFolder',
parent_folder, name=child_name)]
self.assertEqual(expected_invoke_api,
self.session.invoke_api.mock_calls)
def test_create_folder_already_present(self):
"""Test create_folder when child already present."""
parent_folder = mock.sentinel.parent_folder
child_name = 'child_folder'
prop_val = mock.Mock(spec=object)
child_entity_1 = mock.Mock(spec=object)
child_entity_1._type = 'Folder'
child_entity_1_name = 'SomeOtherName'
child_entity_2 = mock.Mock(spec=object)
child_entity_2._type = 'Folder'
child_entity_2_name = child_name
prop_val.ManagedObjectReference = [child_entity_1, child_entity_2]
self.session.invoke_api.side_effect = [prop_val, child_entity_1_name,
child_entity_2_name]
ret = self.vops.create_folder(parent_folder, child_name)
self.assertEqual(child_entity_2, ret)
expected_invoke_api = [mock.call(vim_util, 'get_object_property',
self.session.vim, parent_folder,
'childEntity'),
mock.call(vim_util, 'get_object_property',
self.session.vim, child_entity_1,
'name'),
mock.call(vim_util, 'get_object_property',
self.session.vim, child_entity_2,
'name')]
self.assertEqual(expected_invoke_api,
self.session.invoke_api.mock_calls)
def test_get_create_spec(self):
factory = self.session.vim.client.factory
factory.create.return_value = mock.Mock(spec=object)
name = mock.sentinel.name
size_kb = 0.5
disk_type = 'thin'
ds_name = mock.sentinel.ds_name
ret = self.vops._get_create_spec(name, size_kb, disk_type, ds_name)
self.assertEqual(name, ret.name)
self.assertEqual('[%s]' % ds_name, ret.files.vmPathName)
self.assertEqual(1, ret.deviceChange[1].device.capacityInKB)
expected = [mock.call.create('ns0:VirtualLsiLogicController'),
mock.call.create('ns0:VirtualDeviceConfigSpec'),
mock.call.create('ns0:VirtualDisk'),
mock.call.create('ns0:VirtualDiskFlatVer2BackingInfo'),
mock.call.create('ns0:VirtualDeviceConfigSpec'),
mock.call.create('ns0:VirtualMachineFileInfo'),
mock.call.create('ns0:VirtualMachineConfigSpec')]
factory.create.assert_has_calls(expected, any_order=True)
@mock.patch('cinder.volume.drivers.vmware.volumeops.VMwareVolumeOps.'
'_get_create_spec')
def test_create_backing(self, get_create_spec):
create_spec = mock.sentinel.create_spec
get_create_spec.return_value = create_spec
task = mock.sentinel.task
self.session.invoke_api.return_value = task
task_info = mock.Mock(spec=object)
task_info.result = mock.sentinel.result
self.session.wait_for_task.return_value = task_info
name = 'backing_name'
size_kb = mock.sentinel.size_kb
disk_type = mock.sentinel.disk_type
folder = mock.sentinel.folder
resource_pool = mock.sentinel.resource_pool
host = mock.sentinel.host
ds_name = mock.sentinel.ds_name
ret = self.vops.create_backing(name, size_kb, disk_type, folder,
resource_pool, host, ds_name)
self.assertEqual(mock.sentinel.result, ret)
get_create_spec.assert_called_once_with(name, size_kb, disk_type,
ds_name, None)
self.session.invoke_api.assert_called_once_with(self.session.vim,
'CreateVM_Task',
folder,
config=create_spec,
pool=resource_pool,
host=host)
self.session.wait_for_task.assert_called_once_with(task)
def test_get_datastore(self):
backing = mock.sentinel.backing
datastore = mock.Mock(spec=object)
datastore.ManagedObjectReference = [mock.sentinel.ds]
self.session.invoke_api.return_value = datastore
ret = self.vops.get_datastore(backing)
self.assertEqual(mock.sentinel.ds, ret)
self.session.invoke_api.assert_called_once_with(vim_util,
'get_object_property',
self.session.vim,
backing, 'datastore')
def test_get_summary(self):
datastore = mock.sentinel.datastore
summary = mock.sentinel.summary
self.session.invoke_api.return_value = summary
ret = self.vops.get_summary(datastore)
self.assertEqual(summary, ret)
self.session.invoke_api.assert_called_once_with(vim_util,
'get_object_property',
self.session.vim,
datastore,
'summary')
def test_get_relocate_spec(self):
factory = self.session.vim.client.factory
spec = mock.Mock(spec=object)
factory.create.return_value = spec
datastore = mock.sentinel.datastore
resource_pool = mock.sentinel.resource_pool
host = mock.sentinel.host
disk_move_type = mock.sentinel.disk_move_type
ret = self.vops._get_relocate_spec(datastore, resource_pool, host,
disk_move_type)
self.assertEqual(spec, ret)
self.assertEqual(datastore, ret.datastore)
self.assertEqual(resource_pool, ret.pool)
self.assertEqual(host, ret.host)
self.assertEqual(disk_move_type, ret.diskMoveType)
@mock.patch('cinder.volume.drivers.vmware.volumeops.VMwareVolumeOps.'
'_get_relocate_spec')
def test_relocate_backing(self, get_relocate_spec):
spec = mock.sentinel.relocate_spec
get_relocate_spec.return_value = spec
task = mock.sentinel.task
self.session.invoke_api.return_value = task
backing = mock.sentinel.backing
datastore = mock.sentinel.datastore
resource_pool = mock.sentinel.resource_pool
host = mock.sentinel.host
self.vops.relocate_backing(backing, datastore, resource_pool, host)
# Verify calls
disk_move_type = 'moveAllDiskBackingsAndAllowSharing'
get_relocate_spec.assert_called_once_with(datastore, resource_pool,
host, disk_move_type)
self.session.invoke_api.assert_called_once_with(self.session.vim,
'RelocateVM_Task',
backing,
spec=spec)
self.session.wait_for_task.assert_called_once_with(task)
def test_move_backing_to_folder(self):
task = mock.sentinel.task
self.session.invoke_api.return_value = task
backing = mock.sentinel.backing
folder = mock.sentinel.folder
self.vops.move_backing_to_folder(backing, folder)
# Verify calls
self.session.invoke_api.assert_called_once_with(self.session.vim,
'MoveIntoFolder_Task',
folder,
list=[backing])
self.session.wait_for_task.assert_called_once_with(task)
def test_create_snapshot_operation(self):
task = mock.sentinel.task
self.session.invoke_api.return_value = task
task_info = mock.Mock(spec=object)
task_info.result = mock.sentinel.result
self.session.wait_for_task.return_value = task_info
backing = mock.sentinel.backing
name = mock.sentinel.name
desc = mock.sentinel.description
quiesce = True
ret = self.vops.create_snapshot(backing, name, desc, quiesce)
self.assertEqual(mock.sentinel.result, ret)
self.session.invoke_api.assert_called_once_with(self.session.vim,
'CreateSnapshot_Task',
backing, name=name,
description=desc,
memory=False,
quiesce=quiesce)
self.session.wait_for_task.assert_called_once_with(task)
def test_get_snapshot_from_tree(self):
volops = volumeops.VMwareVolumeOps
name = mock.sentinel.name
# Test snapshot == 'None'
ret = volops._get_snapshot_from_tree(name, None)
self.assertIsNone(ret)
# Test root == snapshot
snapshot = mock.sentinel.snapshot
node = mock.Mock(spec=object)
node.name = name
node.snapshot = snapshot
ret = volops._get_snapshot_from_tree(name, node)
self.assertEqual(ret, snapshot)
# Test root.childSnapshotList == None
root = mock.Mock(spec=object)
root.name = 'root'
del root.childSnapshotList
ret = volops._get_snapshot_from_tree(name, root)
self.assertIsNone(ret)
# Test root.child == snapshot
root.childSnapshotList = [node]
ret = volops._get_snapshot_from_tree(name, root)
self.assertEqual(ret, snapshot)
def test_get_snapshot(self):
# build out the root snapshot tree
snapshot_name = mock.sentinel.snapshot_name
snapshot = mock.sentinel.snapshot
root = mock.Mock(spec=object)
root.name = 'root'
node = mock.Mock(spec=object)
node.name = snapshot_name
node.snapshot = snapshot
root.childSnapshotList = [node]
# Test rootSnapshotList is not None
snapshot_tree = mock.Mock(spec=object)
snapshot_tree.rootSnapshotList = [root]
self.session.invoke_api.return_value = snapshot_tree
backing = mock.sentinel.backing
ret = self.vops.get_snapshot(backing, snapshot_name)
self.assertEqual(snapshot, ret)
self.session.invoke_api.assert_called_with(vim_util,
'get_object_property',
self.session.vim,
backing,
'snapshot')
# Test rootSnapshotList == None
snapshot_tree.rootSnapshotList = None
ret = self.vops.get_snapshot(backing, snapshot_name)
self.assertIsNone(ret)
self.session.invoke_api.assert_called_with(vim_util,
'get_object_property',
self.session.vim,
backing,
'snapshot')
def test_delete_snapshot(self):
backing = mock.sentinel.backing
snapshot_name = mock.sentinel.snapshot_name
# Test snapshot is None
with mock.patch.object(self.vops, 'get_snapshot') as get_snapshot:
get_snapshot.return_value = None
self.vops.delete_snapshot(backing, snapshot_name)
get_snapshot.assert_called_once_with(backing, snapshot_name)
# Test snapshot is not None
snapshot = mock.sentinel.snapshot
task = mock.sentinel.task
invoke_api = self.session.invoke_api
invoke_api.return_value = task
with mock.patch.object(self.vops, 'get_snapshot') as get_snapshot:
get_snapshot.return_value = snapshot
self.vops.delete_snapshot(backing, snapshot_name)
get_snapshot.assert_called_with(backing, snapshot_name)
invoke_api.assert_called_once_with(self.session.vim,
'RemoveSnapshot_Task',
snapshot, removeChildren=False)
self.session.wait_for_task.assert_called_once_with(task)
def test_get_folder(self):
folder = mock.sentinel.folder
backing = mock.sentinel.backing
with mock.patch.object(self.vops, '_get_parent') as get_parent:
get_parent.return_value = folder
ret = self.vops._get_folder(backing)
self.assertEqual(folder, ret)
get_parent.assert_called_once_with(backing, 'Folder')
def test_get_clone_spec(self):
factory = self.session.vim.client.factory
spec = mock.Mock(spec=object)
factory.create.return_value = spec
datastore = mock.sentinel.datastore
disk_move_type = mock.sentinel.disk_move_type
snapshot = mock.sentinel.snapshot
ret = self.vops._get_clone_spec(datastore, disk_move_type, snapshot)
self.assertEqual(spec, ret)
self.assertEqual(snapshot, ret.snapshot)
self.assertEqual(spec, ret.location)
self.assertEqual(datastore, ret.location.datastore)
self.assertEqual(disk_move_type, ret.location.diskMoveType)
expected_calls = [mock.call('ns0:VirtualMachineRelocateSpec'),
mock.call('ns0:VirtualMachineCloneSpec')]
factory.create.assert_has_calls(expected_calls, any_order=True)
@mock.patch('cinder.volume.drivers.vmware.volumeops.VMwareVolumeOps.'
'_get_clone_spec')
def test_clone_backing(self, get_clone_spec):
folder = mock.Mock(name='folder', spec=object)
folder._type = 'Folder'
task = mock.sentinel.task
self.session.invoke_api.side_effect = [folder, task, folder, task]
task_info = mock.Mock(spec=object)
task_info.result = mock.sentinel.new_backing
self.session.wait_for_task.return_value = task_info
clone_spec = mock.sentinel.clone_spec
get_clone_spec.return_value = clone_spec
# Test non-linked clone_backing
name = mock.sentinel.name
backing = mock.Mock(spec=object)
backing._type = 'VirtualMachine'
snapshot = mock.sentinel.snapshot
clone_type = "anything-other-than-linked"
datastore = mock.sentinel.datstore
ret = self.vops.clone_backing(name, backing, snapshot, clone_type,
datastore)
# verify calls
self.assertEqual(mock.sentinel.new_backing, ret)
disk_move_type = 'moveAllDiskBackingsAndDisallowSharing'
get_clone_spec.assert_called_with(datastore, disk_move_type, snapshot)
expected = [mock.call(vim_util, 'get_object_property',
self.session.vim, backing, 'parent'),
mock.call(self.session.vim, 'CloneVM_Task', backing,
folder=folder, name=name, spec=clone_spec)]
self.assertEqual(expected, self.session.invoke_api.mock_calls)
# Test linked clone_backing
clone_type = volumeops.LINKED_CLONE_TYPE
ret = self.vops.clone_backing(name, backing, snapshot, clone_type,
datastore)
# verify calls
self.assertEqual(mock.sentinel.new_backing, ret)
disk_move_type = 'createNewChildDiskBacking'
get_clone_spec.assert_called_with(datastore, disk_move_type, snapshot)
expected = [mock.call(vim_util, 'get_object_property',
self.session.vim, backing, 'parent'),
mock.call(self.session.vim, 'CloneVM_Task', backing,
folder=folder, name=name, spec=clone_spec),
mock.call(vim_util, 'get_object_property',
self.session.vim, backing, 'parent'),
mock.call(self.session.vim, 'CloneVM_Task', backing,
folder=folder, name=name, spec=clone_spec)]
self.assertEqual(expected, self.session.invoke_api.mock_calls)
def test_delete_file(self):
file_mgr = mock.sentinel.file_manager
self.session.vim.service_content.fileManager = file_mgr
task = mock.sentinel.task
invoke_api = self.session.invoke_api
invoke_api.return_value = task
# Test delete file
file_path = mock.sentinel.file_path
datacenter = mock.sentinel.datacenter
self.vops.delete_file(file_path, datacenter)
# verify calls
invoke_api.assert_called_once_with(self.session.vim,
'DeleteDatastoreFile_Task',
file_mgr,
name=file_path,
datacenter=datacenter)
self.session.wait_for_task.assert_called_once_with(task)
def test_get_path_name(self):
path = mock.Mock(spec=object)
path_name = mock.sentinel.vm_path_name
path.vmPathName = path_name
invoke_api = self.session.invoke_api
invoke_api.return_value = path
backing = mock.sentinel.backing
ret = self.vops.get_path_name(backing)
self.assertEqual(path_name, ret)
invoke_api.assert_called_once_with(vim_util, 'get_object_property',
self.session.vim, backing,
'config.files')
def test_get_entity_name(self):
entity_name = mock.sentinel.entity_name
invoke_api = self.session.invoke_api
invoke_api.return_value = entity_name
entity = mock.sentinel.entity
ret = self.vops.get_entity_name(entity)
self.assertEqual(entity_name, ret)
invoke_api.assert_called_once_with(vim_util, 'get_object_property',
self.session.vim, entity, 'name')
def test_get_vmdk_path(self):
# Setup hardware_devices for test
device = mock.Mock()
device.__class__.__name__ = 'VirtualDisk'
backing = mock.Mock()
backing.__class__.__name__ = 'VirtualDiskFlatVer2BackingInfo'
backing.fileName = mock.sentinel.vmdk_path
device.backing = backing
invoke_api = self.session.invoke_api
invoke_api.return_value = [device]
# Test get_vmdk_path
ret = self.vops.get_vmdk_path(backing)
self.assertEqual(mock.sentinel.vmdk_path, ret)
invoke_api.assert_called_once_with(vim_util, 'get_object_property',
self.session.vim, backing,
'config.hardware.device')
def test_copy_vmdk_file(self):
task = mock.sentinel.task
invoke_api = self.session.invoke_api
invoke_api.return_value = task
disk_mgr = self.session.vim.service_content.virtualDiskManager
dc_ref = self.session.dc_ref
src_vmdk_file_path = self.session.src
dest_vmdk_file_path = self.session.dest
self.vops.copy_vmdk_file(dc_ref, src_vmdk_file_path,
dest_vmdk_file_path)
invoke_api.assert_called_once_with(self.session.vim,
'CopyVirtualDisk_Task',
disk_mgr,
sourceName=src_vmdk_file_path,
sourceDatacenter=dc_ref,
destName=dest_vmdk_file_path,
destDatacenter=dc_ref,
force=True)
self.session.wait_for_task.assert_called_once_with(task)
def test_delete_vmdk_file(self):
task = mock.sentinel.task
invoke_api = self.session.invoke_api
invoke_api.return_value = task
disk_mgr = self.session.vim.service_content.virtualDiskManager
dc_ref = self.session.dc_ref
vmdk_file_path = self.session.vmdk_file
self.vops.delete_vmdk_file(vmdk_file_path, dc_ref)
invoke_api.assert_called_once_with(self.session.vim,
'DeleteVirtualDisk_Task',
disk_mgr,
name=vmdk_file_path,
datacenter=dc_ref)
self.session.wait_for_task.assert_called_once_with(task)
def test_extend_virtual_disk(self):
"""Test volumeops.extend_virtual_disk."""
task = mock.sentinel.task
invoke_api = self.session.invoke_api
invoke_api.return_value = task
disk_mgr = self.session.vim.service_content.virtualDiskManager
fake_size = 5
fake_size_in_kb = fake_size * units.MiB
fake_name = 'fake_volume_0000000001'
fake_dc = mock.sentinel.datacenter
self.vops.extend_virtual_disk(fake_size,
fake_name, fake_dc)
invoke_api.assert_called_once_with(self.session.vim,
"ExtendVirtualDisk_Task",
disk_mgr,
name=fake_name,
datacenter=fake_dc,
newCapacityKb=fake_size_in_kb,
eagerZero=False)
self.session.wait_for_task.assert_called_once_with(task)
| apache-2.0 | -2,167,145,331,251,935,200 | 46.890798 | 78 | 0.546796 | false |
nhuntwalker/astroML | astroML/tests/test_filters.py | 3 | 1036 | import numpy as np
from numpy.testing import assert_allclose
from astroML.filters import savitzky_golay, wiener_filter
def test_savitzky_golay():
y = np.zeros(100)
y[::2] = 1
f = savitzky_golay(y, window_size=3, order=1)
assert_allclose(f, (2 - y) / 3.)
def test_savitzky_golay_fft():
y = np.random.normal(size=100)
for width in [3, 5]:
for order in range(width - 1):
f1 = savitzky_golay(y, width, order, use_fft=False)
f2 = savitzky_golay(y, width, order, use_fft=True)
assert_allclose(f1, f2)
def test_wiener_filter_simple():
t = np.linspace(0, 1, 256)
h = np.zeros_like(t)
h[::2] = 1000
s = wiener_filter(t, h)
assert_allclose(s, np.mean(h))
def test_wienter_filter_spike():
np.random.seed(0)
N = 2048
dt = 0.05
t = dt * np.arange(N)
h = np.exp(-0.5 * ((t - 20.) / 1.0) ** 2) + 10
hN = h + np.random.normal(0, 0.05, size=h.shape)
h_smooth = wiener_filter(t, hN)
assert_allclose(h, h_smooth, atol=0.03)
| bsd-2-clause | -3,242,995,103,012,016,000 | 24.268293 | 63 | 0.586873 | false |
mqingyn/torngas | torngas/mixins/miiddleware.py | 5 | 1855 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
class MiddlewareHandlerMixin(object):
_url_kwargs = {}
def __init__(self, application, request, **kwargs):
if kwargs:
self._url_kwargs.update(kwargs)
kwargs.clear()
super(MiddlewareHandlerMixin, self).__init__(application, request, **kwargs)
def prepare(self):
res = self.application.middleware_fac.run_request(self)
self.on_prepare()
return res
def on_prepare(self):
pass
def render_string(self, template_name, **kwargs):
self.application.middleware_fac.run_render(self, template_name, **kwargs)
return super(MiddlewareHandlerMixin, self).render_string(template_name, **kwargs)
def finish(self, chunk=None):
# finish之前可能执行过多次write,反而chunk可能为None
# 真正的chunk数据在self._write_buffer中,包含历次write的数据
# 这里将chunk数据write进_write_buffer中,然后将chunk置空
if chunk:
self.write(chunk)
chunk = None
self.application.middleware_fac.run_response(self, self._write_buffer)
super(MiddlewareHandlerMixin, self).finish(chunk)
def write(self, chunk, status=None):
if status:
self.set_status(status)
super(MiddlewareHandlerMixin, self).write(chunk)
def log_exception(self, typ, value, tb):
"""重写404请求的异常处理
"""
if not self.application.middleware_fac.run_exception(self, typ, value, tb):
super(MiddlewareHandlerMixin, self).log_exception(typ, value, tb)
def on_finish(self):
super(MiddlewareHandlerMixin, self).on_finish()
self.application.middleware_fac.run_endcall(self)
self.complete_finish()
def complete_finish(self):
pass
| bsd-3-clause | 6,188,107,260,915,439,000 | 32.037736 | 89 | 0.639063 | false |
Multimac/ansible-modules-extras | cloud/cloudstack/cs_vmsnapshot.py | 24 | 9084 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# (c) 2015, René Moser <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: cs_vmsnapshot
short_description: Manages VM snapshots on Apache CloudStack based clouds.
description:
- Create, remove and revert VM from snapshots.
version_added: '2.0'
author: "René Moser (@resmo)"
options:
name:
description:
- Unique Name of the snapshot. In CloudStack terms display name.
required: true
aliases: ['display_name']
vm:
description:
- Name of the virtual machine.
required: true
description:
description:
- Description of the snapshot.
required: false
default: null
snapshot_memory:
description:
- Snapshot memory if set to true.
required: false
default: false
zone:
description:
- Name of the zone in which the VM is in. If not set, default zone is used.
required: false
default: null
project:
description:
- Name of the project the VM is assigned to.
required: false
default: null
state:
description:
- State of the snapshot.
required: false
default: 'present'
choices: [ 'present', 'absent', 'revert' ]
domain:
description:
- Domain the VM snapshot is related to.
required: false
default: null
account:
description:
- Account the VM snapshot is related to.
required: false
default: null
poll_async:
description:
- Poll async jobs until job has finished.
required: false
default: true
extends_documentation_fragment: cloudstack
'''
EXAMPLES = '''
# Create a VM snapshot of disk and memory before an upgrade
- local_action:
module: cs_vmsnapshot
name: Snapshot before upgrade
vm: web-01
snapshot_memory: yes
# Revert a VM to a snapshot after a failed upgrade
- local_action:
module: cs_vmsnapshot
name: Snapshot before upgrade
vm: web-01
state: revert
# Remove a VM snapshot after successful upgrade
- local_action:
module: cs_vmsnapshot
name: Snapshot before upgrade
vm: web-01
state: absent
'''
RETURN = '''
---
id:
description: UUID of the snapshot.
returned: success
type: string
sample: a6f7a5fc-43f8-11e5-a151-feff819cdc9f
name:
description: Name of the snapshot.
returned: success
type: string
sample: snapshot before update
display_name:
description: Display name of the snapshot.
returned: success
type: string
sample: snapshot before update
created:
description: date of the snapshot.
returned: success
type: string
sample: 2015-03-29T14:57:06+0200
current:
description: true if snapshot is current
returned: success
type: boolean
sample: True
state:
description: state of the vm snapshot
returned: success
type: string
sample: Allocated
type:
description: type of vm snapshot
returned: success
type: string
sample: DiskAndMemory
description:
description: description of vm snapshot
returned: success
type: string
sample: snapshot brought to you by Ansible
domain:
description: Domain the the vm snapshot is related to.
returned: success
type: string
sample: example domain
account:
description: Account the vm snapshot is related to.
returned: success
type: string
sample: example account
project:
description: Name of project the vm snapshot is related to.
returned: success
type: string
sample: Production
'''
try:
from cs import CloudStack, CloudStackException, read_config
has_lib_cs = True
except ImportError:
has_lib_cs = False
# import cloudstack common
from ansible.module_utils.cloudstack import *
class AnsibleCloudStackVmSnapshot(AnsibleCloudStack):
def __init__(self, module):
super(AnsibleCloudStackVmSnapshot, self).__init__(module)
self.returns = {
'type': 'type',
'current': 'current',
}
def get_snapshot(self):
args = {}
args['virtualmachineid'] = self.get_vm('id')
args['account'] = self.get_account('name')
args['domainid'] = self.get_domain('id')
args['projectid'] = self.get_project('id')
args['name'] = self.module.params.get('name')
snapshots = self.cs.listVMSnapshot(**args)
if snapshots:
return snapshots['vmSnapshot'][0]
return None
def create_snapshot(self):
snapshot = self.get_snapshot()
if not snapshot:
self.result['changed'] = True
args = {}
args['virtualmachineid'] = self.get_vm('id')
args['name'] = self.module.params.get('name')
args['description'] = self.module.params.get('description')
args['snapshotmemory'] = self.module.params.get('snapshot_memory')
if not self.module.check_mode:
res = self.cs.createVMSnapshot(**args)
if 'errortext' in res:
self.module.fail_json(msg="Failed: '%s'" % res['errortext'])
poll_async = self.module.params.get('poll_async')
if res and poll_async:
snapshot = self._poll_job(res, 'vmsnapshot')
return snapshot
def remove_snapshot(self):
snapshot = self.get_snapshot()
if snapshot:
self.result['changed'] = True
if not self.module.check_mode:
res = self.cs.deleteVMSnapshot(vmsnapshotid=snapshot['id'])
if 'errortext' in res:
self.module.fail_json(msg="Failed: '%s'" % res['errortext'])
poll_async = self.module.params.get('poll_async')
if res and poll_async:
res = self._poll_job(res, 'vmsnapshot')
return snapshot
def revert_vm_to_snapshot(self):
snapshot = self.get_snapshot()
if snapshot:
self.result['changed'] = True
if snapshot['state'] != "Ready":
self.module.fail_json(msg="snapshot state is '%s', not ready, could not revert VM" % snapshot['state'])
if not self.module.check_mode:
res = self.cs.revertToVMSnapshot(vmsnapshotid=snapshot['id'])
poll_async = self.module.params.get('poll_async')
if res and poll_async:
res = self._poll_job(res, 'vmsnapshot')
return snapshot
self.module.fail_json(msg="snapshot not found, could not revert VM")
def main():
module = AnsibleModule(
argument_spec = dict(
name = dict(required=True, aliases=['display_name']),
vm = dict(required=True),
description = dict(default=None),
zone = dict(default=None),
snapshot_memory = dict(choices=BOOLEANS, default=False),
state = dict(choices=['present', 'absent', 'revert'], default='present'),
domain = dict(default=None),
account = dict(default=None),
project = dict(default=None),
poll_async = dict(choices=BOOLEANS, default=True),
api_key = dict(default=None),
api_secret = dict(default=None, no_log=True),
api_url = dict(default=None),
api_http_method = dict(choices=['get', 'post'], default='get'),
api_timeout = dict(type='int', default=10),
api_region = dict(default='cloudstack'),
),
required_together = (
['icmp_type', 'icmp_code'],
['api_key', 'api_secret', 'api_url'],
),
supports_check_mode=True
)
if not has_lib_cs:
module.fail_json(msg="python library cs required: pip install cs")
try:
acs_vmsnapshot = AnsibleCloudStackVmSnapshot(module)
state = module.params.get('state')
if state in ['revert']:
snapshot = acs_vmsnapshot.revert_vm_to_snapshot()
elif state in ['absent']:
snapshot = acs_vmsnapshot.remove_snapshot()
else:
snapshot = acs_vmsnapshot.create_snapshot()
result = acs_vmsnapshot.get_result(snapshot)
except CloudStackException, e:
module.fail_json(msg='CloudStackException: %s' % str(e))
module.exit_json(**result)
# import module snippets
from ansible.module_utils.basic import *
if __name__ == '__main__':
main()
| gpl-3.0 | -2,134,727,779,371,979,800 | 28.296774 | 119 | 0.619247 | false |
daicang/Leetcode-solutions | 1004-max-consecutive-ones-iii.py | 1 | 1970 | class Solution(object):
def longestOnes(self, A, K):
"""
:type A: List[int]
:type K: int
:rtype: int
"""
# DP, TLE
#
# length = len(A)
# dp = []
# for _ in range(K+1):
# dp.append([0]*length)
# for step in range(K+1):
# for last in range(length):
# if last == 0:
# if step == 0:
# dp[0][0] = A[0]
# else:
# dp[step][0] = 1
# elif step == 0:
# if A[last] == 1:
# dp[0][last] = dp[0][last-1]+1
# else:
# dp[0][last] = 0
# elif step > last+1:
# dp[step][last] = dp[last+1][last]
# else:
# if A[last] == 1:
# dp[step][last] = dp[step][last-1] + 1
# else:
# dp[step][last] = dp[step-1][last-1]+1
# print(dp)
# return max(dp[K])
# Sliding window
lidx = 0
maxl = 0
k_left = K
length = 0
for ridx, rval in enumerate(A):
if rval == 1:
length += 1
maxl = max(maxl, length)
continue
# rval == 0
if k_left > 0:
k_left -= 1
length += 1
maxl = max(maxl, length)
else:
if rval == 1:
length += 1
maxl = max(maxl, length)
else:
while A[lidx] == 1:
lidx += 1
assert A[lidx] == 0
lidx += 1
length = ridx - lidx+1
maxl = max(maxl, length)
return maxl
s = Solution()
print(s.longestOnes([0,0,0,1], 4))
| mit | -4,802,826,370,369,503,000 | 24.921053 | 63 | 0.316751 | false |
csdms/dakota | dakotathon/utils.py | 1 | 6305 | #! /usr/bin/env python
"""Helper functions for processing Dakota parameter and results files."""
import os
import subprocess
import re
import yaml
import numpy as np
import collections
def is_dakota_installed():
"""Check whether Dakota is installed and in the execution path.
Returns
-------
bool
True if Dakota is callable.
"""
try:
subprocess.check_call(["dakota", "--version"])
except (subprocess.CalledProcessError, OSError):
return False
else:
return True
def which(prog, env=None):
"""Call the OS `which` function.
Parameters
----------
prog : str
The command name.
env : str, optional
An environment variable.
Returns
-------
The path to the command, or None if the command is not found.
"""
prog = os.environ.get(env or prog.upper(), prog)
try:
prog = subprocess.check_output(
["/usr/bin/which", prog], stderr=open("/dev/null", "w")
).strip()
except subprocess.CalledProcessError:
return None
else:
return prog
def which_dakota():
"""Locate the Dakota executable.
Returns
-------
The path to the Dakota executable, or None if Dakota is not found.
"""
return which("dakota")
def add_dyld_library_path():
"""Add the `DYLD_LIBRARY_PATH` environment variable for Dakota."""
try:
dakota_exe = which_dakota()
dakota_dir = os.path.dirname(os.path.dirname(dakota_exe))
os.environ["DYLD_LIBRARY_PATH"] = (
os.path.join(dakota_dir, "bin")
+ os.path.pathsep
+ os.path.join(dakota_dir, "lib")
)
except (AttributeError, TypeError):
return None
def get_response_descriptors(params_file):
"""Extract response descriptors from a Dakota parameters file.
Parameters
----------
params_file : str
The path to a Dakota parameters file.
Returns
-------
list
A list of response descriptors for the Dakota experiment.
"""
labels = []
try:
with open(params_file, "r") as fp:
for line in fp:
if re.search("ASV_", line):
labels.append("".join(re.findall(":(\S+)", line)))
except IOError:
return None
else:
return labels
def get_attributes(obj):
"""Get and format the attributes of an object.
Parameters
----------
section
An object that has attributes.
Returns
-------
dict
The object's attributes.
"""
attrs = obj.__dict__.copy()
attrs_fmtd = {}
for key in attrs:
key_fmtd = key.lstrip("_")
attrs_fmtd[key_fmtd] = attrs[key]
return attrs_fmtd
def get_configuration_file(params_file):
"""Extract the configuration filepath from a Dakota parameters file.
Parameters
----------
params_file : str
The path to a Dakota parameters file.
Returns
-------
str
The path to the configuration file for the Dakota experiment.
"""
with open(params_file, "r") as fp:
for line in fp:
if re.search("AC_1", line):
return line.split("AC_1")[0].strip()
def deserialize(config_file):
"""Load settings from a YAML configuration file.
Returns
-------
dict
Configuration settings in a dict.
"""
with open(config_file, "r") as fp:
return yaml.safe_load(fp)
def compute_statistic(statistic, array):
"""Compute the statistic used in a Dakota response function.
Parameters
----------
statistic : str
A string with the name of the statistic to compute ('mean',
'median', etc.).
array : array_like
An array data structure, such as a numpy array.
Returns
-------
float
The value of the computed statistic.
"""
return np.__getattribute__(statistic)(array)
def write_results(results_file, values, labels):
"""Write a Dakota results file from a set of input values.
Parameters
----------
results_file : str
The path to a Dakota results file.
values : array_like
A list or array of numeric values.
labels : str
A list of labels to attach to the values.
"""
arr_values = np.asarray(values)
arr_labels = np.asarray(labels)
results = np.column_stack((arr_values, arr_labels))
np.savetxt(results_file, results, delimiter="\t", fmt="%s")
def to_iterable(x):
"""Get an iterable version of an input.
Parameters
----------
x
Anything.
Returns
-------
If the input isn't iterable, or is a string, then a tuple; else,
the input.
Notes
-----
Courtesy http://stackoverflow.com/a/6711233/1563298
"""
if isinstance(x, collections.Iterable) and not isinstance(x, str):
return x
else:
return (x,)
def configure_parameters(params):
"""Preprocess Dakota parameters prior to committing to a config file.
Parameters
----------
params : dict
Configuration parameters for a Dakota experiment that map to the
items in the Dakota configuration file, **dakota.yaml**.
Returns
-------
(dict, dict)
An updated dict of Dakota configuration parameters, and a dict
of substitutions used to create the Dakota template ("dtmpl")
file.
"""
try:
params["component"]
except KeyError:
try:
params["plugin"]
except KeyError:
params["component"] = params["plugin"] = ""
else:
params["analysis_driver"] = "dakota_run_plugin"
params["component"] = ""
else:
params["analysis_driver"] = "dakota_run_component"
params["plugin"] = ""
to_check = [
"descriptors",
"response_descriptors",
"response_statistics",
"auxiliary_files",
]
for item in to_check:
try:
if isinstance(params[item], str):
params[item] = [params[item]]
except KeyError:
pass
subs = {}
for item in params["descriptors"]:
subs[item] = "{" + item + "}"
try:
subs["run_duration"] = params["run_duration"]
except KeyError:
pass
return params, subs
| mit | -1,859,121,270,276,616,400 | 21.598566 | 73 | 0.580492 | false |
Keisuke69/libcloud | libcloud/httplib_ssl.py | 1 | 5693 | # Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Subclass for httplib.HTTPSConnection with optional certificate name
verification, depending on libcloud.security settings.
"""
import httplib
import os
import re
import socket
import ssl
import warnings
import libcloud.security
class LibcloudHTTPSConnection(httplib.HTTPSConnection):
"""LibcloudHTTPSConnection
Subclass of HTTPSConnection which verifies certificate names
if and only if CA certificates are available.
"""
verify = False # does not verify
ca_cert = None # no default CA Certificate
def __init__(self, *args, **kwargs):
"""Constructor
"""
self._setup_verify()
httplib.HTTPSConnection.__init__(self, *args, **kwargs)
def _setup_verify(self):
"""Setup Verify SSL or not
Reads security module's VERIFY_SSL_CERT and toggles whether
the class overrides the connect() class method or runs the
inherited httplib.HTTPSConnection connect()
"""
self.verify = libcloud.security.VERIFY_SSL_CERT
self.strict = libcloud.security.VERIFY_SSL_CERT_STRICT
if self.verify:
self._setup_ca_cert()
else:
warnings.warn(libcloud.security.VERIFY_SSL_DISABLED_MSG)
def _setup_ca_cert(self):
"""Setup CA Certs
Search in CA_CERTS_PATH for valid candidates and
return first match. Otherwise, complain about certs
not being available.
"""
if not self.verify:
return
ca_certs_available = [cert
for cert in libcloud.security.CA_CERTS_PATH
if os.path.exists(cert)]
if ca_certs_available:
# use first available certificate
self.ca_cert = ca_certs_available[0]
else:
if self.strict:
raise RuntimeError(libcloud.security.CA_CERTS_UNAVAILABLE_ERROR_MSG)
else:
# no certificates found; toggle verify to False
warnings.warn(libcloud.security.CA_CERTS_UNAVAILABLE_WARNING_MSG)
self.ca_cert = None
self.verify = False
def connect(self):
"""Connect
Checks if verification is toggled; if not, just call
httplib.HTTPSConnection's connect
"""
if not self.verify:
return httplib.HTTPSConnection.connect(self)
# otherwise, create a connection and verify the hostname
# use socket.create_connection (in 2.6+) if possible
if getattr(socket, 'create_connection', None):
sock = socket.create_connection((self.host, self.port),
self.timeout)
else:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.connect((self.host, self.port))
self.sock = ssl.wrap_socket(sock,
self.key_file,
self.cert_file,
cert_reqs=ssl.CERT_REQUIRED,
ca_certs=self.ca_cert,
ssl_version=ssl.PROTOCOL_TLSv1)
cert = self.sock.getpeercert()
if not self._verify_hostname(self.host, cert):
raise ssl.SSLError('Failed to verify hostname')
def _verify_hostname(self, hostname, cert):
"""Verify hostname against peer cert
Check both commonName and entries in subjectAltName, using a
rudimentary glob to dns regex check to find matches
"""
common_name = self._get_common_name(cert)
alt_names = self._get_subject_alt_names(cert)
# replace * with alphanumeric and dash
# replace . with literal .
valid_patterns = [
re.compile(
pattern.replace(
r".", r"\."
).replace(
r"*", r"[0-9A-Za-z]+"
)
)
for pattern
in (set(common_name) | set(alt_names))
]
return any(
pattern.search(hostname)
for pattern in valid_patterns
)
def _get_subject_alt_names(self, cert):
"""Get SubjectAltNames
Retrieve 'subjectAltName' attributes from cert data structure
"""
if 'subjectAltName' not in cert:
values = []
else:
values = [value
for field, value in cert['subjectAltName']
if field == 'DNS']
return values
def _get_common_name(self, cert):
"""Get Common Name
Retrieve 'commonName' attribute from cert data structure
"""
if 'subject' not in cert:
return None
values = [value[0][1]
for value in cert['subject']
if value[0][0] == 'commonName']
return values
| apache-2.0 | 7,246,060,071,857,471,000 | 34.360248 | 84 | 0.58651 | false |
q14035/pimouse_ros | scripts/motors1.py | 1 | 1702 | #!/usr/bin/env python
#encoding: utf8
import sys, rospy, math
from pimouse_ros.msg import MotorFreqs
from geometry_msgs.msg import Twist
class Motor():
def __init__(self):
if not self.set_power(True): sys.exit(1)
rospy.on_shutdown(self.set_power)
self.sub_raw = rospy.Subscriber('motor_raw', MotorFreqs, self.callback_raw_freq)
self.sub_cmd_vel = rospy.Subscriber('cmd_vel', Twist, self.callback_cmd_vel)
self.last_time = rospy.Time.now()
self.using_cmd_vel = False
def set_power(self, onoff = False):
en = "/dev/rtmotoren0"
try:
with open(en, 'w') as f:
f.write("1\n" if onoff else "0\n")
self.is_on = onoff
return True
except:
rospy.logerr("cannot write to " + en)
return False
def set_raw_freq(self, left_hz, right_hz):
if not self.is_on:
rospy.logerr("not enpowered")
return
try:
with open("/dev/rtmotor_raw_l0", 'w') as lf, open("/dev/rtmotor_raw_r0", 'w') as rf:
lf.write(str(int(round(left_hz))) + "\n")
rf.write(str(int(round(right_hz))) + "\n")
except:
rospy.logerr("cannot write to rtmotor_raw_*")
def callback_raw_freq(self, message):
self.set_raw_freq(message.left_hz, message.right_hz)
def callback_cmd_vel(self, message):
forward_hz = 80000.0*message.linear.x/(9*math.pi)
rot_hz = 400.0*message.angular.z/math.pi
self.set_raw_freq(forward_hz-rot_hz, forward_hz+rot_hz)
self.using_cmd_vel = True
self.last_time = rospy.Time.now()
if __name__ == '__main__':
rospy.init_node('motors')
m = Motor()
rate = rospy.Rate(10)
while not rospy.is_shutdown():
if m.using_cmd_vel and rospy.Time.now().to_sec() - m.last_time.to_sec() >= 1.0:
m.set_raw_freq(0, 0)
m.using_cmd_vel = False
rate.sleep()
| gpl-3.0 | 7,478,278,535,235,567,000 | 27.366667 | 87 | 0.658637 | false |
Stanford-Online/edx-platform | openedx/core/djangoapps/credentials/management/commands/tests/test_notify_credentials.py | 9 | 5400 | """
Tests the ``notify_credentials`` management command.
"""
from __future__ import absolute_import, unicode_literals
from datetime import datetime
import mock
from django.core.management import call_command
from django.core.management.base import CommandError
from django.test import TestCase
from freezegun import freeze_time
from lms.djangoapps.certificates.tests.factories import GeneratedCertificateFactory
from lms.djangoapps.grades.models import PersistentCourseGrade
from openedx.core.djangolib.testing.utils import skip_unless_lms
from student.tests.factories import UserFactory
from ..notify_credentials import Command
COMMAND_MODULE = 'openedx.core.djangoapps.credentials.management.commands.notify_credentials'
@skip_unless_lms
class TestNotifyCredentials(TestCase):
"""
Tests the ``notify_credentials`` management command.
"""
def setUp(self):
super(TestNotifyCredentials, self).setUp()
self.user = UserFactory.create()
with freeze_time(datetime(2017, 1, 1)):
self.cert1 = GeneratedCertificateFactory(user=self.user, course_id='course-v1:edX+Test+1')
with freeze_time(datetime(2017, 2, 1)):
self.cert2 = GeneratedCertificateFactory(user=self.user, course_id='course-v1:edX+Test+2')
with freeze_time(datetime(2017, 3, 1)):
self.cert3 = GeneratedCertificateFactory(user=self.user, course_id='course-v1:edX+Test+3')
print('self.cert1.modified_date', self.cert1.modified_date)
# No factory for these
with freeze_time(datetime(2017, 1, 1)):
self.grade1 = PersistentCourseGrade.objects.create(user_id=self.user.id, course_id='course-v1:edX+Test+1',
percent_grade=1)
with freeze_time(datetime(2017, 2, 1)):
self.grade2 = PersistentCourseGrade.objects.create(user_id=self.user.id, course_id='course-v1:edX+Test+2',
percent_grade=1)
with freeze_time(datetime(2017, 3, 1)):
self.grade3 = PersistentCourseGrade.objects.create(user_id=self.user.id, course_id='course-v1:edX+Test+3',
percent_grade=1)
print('self.grade1.modified', self.grade1.modified)
@mock.patch(COMMAND_MODULE + '.Command.send_notifications')
def test_course_args(self, mock_send):
call_command(Command(), '--course', 'course-v1:edX+Test+1', 'course-v1:edX+Test+2')
self.assertTrue(mock_send.called)
self.assertEqual(list(mock_send.call_args[0][0]), [self.cert1, self.cert2])
self.assertEqual(list(mock_send.call_args[0][1]), [self.grade1, self.grade2])
@mock.patch(COMMAND_MODULE + '.Command.send_notifications')
def test_date_args(self, mock_send):
call_command(Command(), '--start-date', '2017-01-31')
self.assertTrue(mock_send.called)
self.assertListEqual(list(mock_send.call_args[0][0]), [self.cert2, self.cert3])
self.assertListEqual(list(mock_send.call_args[0][1]), [self.grade2, self.grade3])
mock_send.reset_mock()
call_command(Command(), '--start-date', '2017-02-01', '--end-date', '2017-02-02')
self.assertTrue(mock_send.called)
self.assertListEqual(list(mock_send.call_args[0][0]), [self.cert2])
self.assertListEqual(list(mock_send.call_args[0][1]), [self.grade2])
mock_send.reset_mock()
call_command(Command(), '--end-date', '2017-02-02')
self.assertTrue(mock_send.called)
self.assertListEqual(list(mock_send.call_args[0][0]), [self.cert1, self.cert2])
self.assertListEqual(list(mock_send.call_args[0][1]), [self.grade1, self.grade2])
@mock.patch(COMMAND_MODULE + '.Command.send_notifications')
def test_no_args(self, mock_send):
with self.assertRaisesRegex(CommandError, 'You must specify a filter.*'):
call_command(Command())
self.assertFalse(mock_send.called)
@mock.patch(COMMAND_MODULE + '.Command.send_notifications')
def test_dry_run(self, mock_send):
call_command(Command(), '--dry-run', '--start-date', '2017-02-01')
self.assertFalse(mock_send.called)
@mock.patch(COMMAND_MODULE + '.handle_cert_change')
@mock.patch(COMMAND_MODULE + '.send_grade_if_interesting')
@mock.patch(COMMAND_MODULE + '.handle_course_cert_awarded')
@mock.patch(COMMAND_MODULE + '.handle_course_cert_changed')
def test_hand_off(self, mock_grade_cert_change, mock_grade_interesting, mock_program_awarded, mock_program_changed):
call_command(Command(), '--start-date', '2017-02-01')
self.assertEqual(mock_grade_cert_change.call_count, 2)
self.assertEqual(mock_grade_interesting.call_count, 2)
self.assertEqual(mock_program_awarded.call_count, 2)
self.assertEqual(mock_program_changed.call_count, 2)
@mock.patch(COMMAND_MODULE + '.time')
def test_delay(self, mock_time):
call_command(Command(), '--start-date', '2017-02-01')
self.assertEqual(mock_time.sleep.call_count, 0)
mock_time.sleep.reset_mock()
call_command(Command(), '--start-date', '2017-02-01', '--delay', '0.2')
self.assertEqual(mock_time.sleep.call_count, 4) # After each cert and each grade (2 each)
self.assertEqual(mock_time.sleep.call_args[0][0], 0.2)
| agpl-3.0 | 7,781,274,588,323,939,000 | 48.541284 | 120 | 0.659074 | false |
sydneyweidman/djangotutorial | polls/migrations/0001_initial.py | 1 | 1075 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='Choice',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('choice_text', models.CharField(max_length=200)),
('votes', models.IntegerField()),
],
),
migrations.CreateModel(
name='Question',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('question_text', models.CharField(max_length=200)),
('pub_date', models.DateTimeField(verbose_name=b'date published')),
],
),
migrations.AddField(
model_name='choice',
name='question',
field=models.ForeignKey(to='polls.Question'),
),
]
| gpl-2.0 | -405,429,514,385,193,000 | 30.617647 | 114 | 0.542326 | false |
gplib/gplib | gplib/apps/externaldbs/db_router.py | 1 | 3789 | # -*- coding: utf-8 -*-
# Este archivo es parte de GPLib - http://gplib.org/
#
# GPlib es software libre desarrollado en la Facultad de Filosofía y Letras de
# la Universidad de Buenos Aires y liberado bajo los términos de la licencia
# GPLIB FILO www.gplib.org/licencia bajo los términos de GPL de GNU. Usted
# puede redistribuirlo y/o modificarlo bajo los términos de la licencia GPLIB
# FILO de GNU General Public License como esta publicado en la Free Software
# Foundation, tanto en la versión 3 de la licencia, o cualquiera de las
# versiones futuras Gplib es distribuido con el objetivo de que sea útil, pero
# SIN NINGUNA GARANTÍA DE FUNCIONAMIENTO; ni siquiera la garantía implícita de
# que sirva para un propósito particular. Cuando implemente este sistema
# sugerimos el registro en www.gplib.org/registro, con el fin de fomentar una
# comunidad de usuarios de GPLib. Ver la GNU General Public License para más
# detalles.http://www.gnu.org/licenses/>
#
#
# Este arquivo é parte do GPLib http://gplib.org/
#
# GPLib é sofware livre desenviolvido na Faculdade de Filosofia e Letras da
# Universidade de Buenos Aires e liberado sob os termos da licença GPLib FILO
# www.gplib.org/licencia/ sob os termos de GPL de GNU. Você pode redistribuí-lo
# e/ou modificá-lo sob os termos da licença pública geral GNU como publicado na
# Free Software Foundation , tanto na versão 3 da licença ou quaisquer
# versões futuras. GPLib é distribuído com o objetivo de que seja útil, mas SEM
# QUALQUER GARANTIA DE PERFORMANCE; nem a garantia implícita de que servem a uma
# finalidade específica. Quando você implementar este sistema sugerimos o
# registro em www.gplib.org/registro/, a fim de promover uma comunidade de
# usuarios do GPLib. Veja a GNU General Public License para mais detalles.
# http://www.gnu.org/licenses/
#
#
# This file is part of GPLib - http://gplib.org/
#
# GPLib is free software developed by Facultad de Filosofia y Letras Universidad
# de Buenos Aires and distributed under the scope of GPLIB FILO
# www.gplib.org/license and the GPL Public License GNU. You can redistribute it
# and/or modify it under the terms of the GPLIB FILO GNU General Public License
# as published by the Free Software Foundation, either version 3 of the License,
# or (at your option) any later version.
#
# GPLib is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. After roll your own version of GPLIB you may register
# at www.gplib.org/register to buld a comunity of users and developers. See the
# GNU General Public License for more details.
class ExternalDbsRouter(object):
"""A router to control all database operations on models in
the externaldbs application"""
def db_for_read(self, model, **hints):
"Point all operations on test_extra models to 'test'"
if model._meta.app_label == 'test_extra':
return 'test'
return None
def db_for_write(self, model, **hints):
"Point all operations on test_extra models to 'test'"
if model._meta.app_label == 'test_extra':
return 'test'
return None
def allow_relation(self, obj1, obj2, **hints):
"Allow any relation if a model in test_extra is involved"
if obj1._meta.app_label == 'test_extra' or obj2._meta.app_label == 'test_extra':
return True
return None
def allow_syncdb(self, db, model):
"Make sure the test_extra app only appears on the 'test' db"
if db == 'test':
return model._meta.app_label == 'test_extra'
elif model._meta.app_label == 'test_extra':
return False
return None
| gpl-3.0 | -5,710,859,616,506,360,000 | 48.486842 | 88 | 0.72268 | false |
Beauhurst/django | tests/template_tests/utils.py | 107 | 4023 | import functools
import os
from django.template.engine import Engine
from django.test.utils import override_settings
from django.utils.safestring import mark_safe
ROOT = os.path.dirname(os.path.abspath(__file__))
TEMPLATE_DIR = os.path.join(ROOT, 'templates')
def setup(templates, *args, **kwargs):
"""
Runs test method multiple times in the following order:
debug cached string_if_invalid
----- ------ -----------------
False False
False True
False False INVALID
False True INVALID
True False
True True
"""
# when testing deprecation warnings, it's useful to run just one test since
# the message won't be displayed multiple times
test_once = kwargs.get('test_once', False)
for arg in args:
templates.update(arg)
# numerous tests make use of an inclusion tag
# add this in here for simplicity
templates["inclusion.html"] = "{{ result }}"
loaders = [
('django.template.loaders.cached.Loader', [
('django.template.loaders.locmem.Loader', templates),
]),
]
def decorator(func):
# Make Engine.get_default() raise an exception to ensure that tests
# are properly isolated from Django's global settings.
@override_settings(TEMPLATES=None)
@functools.wraps(func)
def inner(self):
# Set up custom template tag libraries if specified
libraries = getattr(self, 'libraries', {})
self.engine = Engine(
libraries=libraries,
loaders=loaders,
)
func(self)
if test_once:
return
func(self)
self.engine = Engine(
libraries=libraries,
loaders=loaders,
string_if_invalid='INVALID',
)
func(self)
func(self)
self.engine = Engine(
debug=True,
libraries=libraries,
loaders=loaders,
)
func(self)
func(self)
return inner
return decorator
# Helper objects
class SomeException(Exception):
silent_variable_failure = True
class SomeOtherException(Exception):
pass
class ShouldNotExecuteException(Exception):
pass
class SomeClass:
def __init__(self):
self.otherclass = OtherClass()
def method(self):
return 'SomeClass.method'
def method2(self, o):
return o
def method3(self):
raise SomeException
def method4(self):
raise SomeOtherException
def method5(self):
raise TypeError
def __getitem__(self, key):
if key == 'silent_fail_key':
raise SomeException
elif key == 'noisy_fail_key':
raise SomeOtherException
raise KeyError
@property
def silent_fail_attribute(self):
raise SomeException
@property
def noisy_fail_attribute(self):
raise SomeOtherException
@property
def attribute_error_attribute(self):
raise AttributeError
@property
def type_error_attribute(self):
raise TypeError
class OtherClass:
def method(self):
return 'OtherClass.method'
class TestObj:
def is_true(self):
return True
def is_false(self):
return False
def is_bad(self):
raise ShouldNotExecuteException()
class SilentGetItemClass:
def __getitem__(self, key):
raise SomeException
class SilentAttrClass:
def b(self):
raise SomeException
b = property(b)
class UTF8Class:
"Class whose __str__ returns non-ASCII data"
def __str__(self):
return 'ŠĐĆŽćžšđ'
# These two classes are used to test auto-escaping of string output.
class UnsafeClass:
def __str__(self):
return 'you & me'
class SafeClass:
def __str__(self):
return mark_safe('you > me')
| bsd-3-clause | -3,874,766,148,632,897,000 | 21.305556 | 79 | 0.585554 | false |
jim-cooley/abletonremotescripts | remote-scripts/branches/VCM600_2/VCM600_2.py | 1 | 8375 | # Embedded file name: /Users/versonator/Jenkins/live/Binary/Core_Release_32_static/midi-remote-scripts/VCM600/VCM600.py
from __future__ import with_statement
import Live
from _Framework.ControlSurface import ControlSurface
from _Framework.InputControlElement import *
from _Framework.SliderElement import SliderElement
from _Framework.ButtonElement import ButtonElement
from _Framework.EncoderElement import EncoderElement
from _Framework.ChannelStripComponent import ChannelStripComponent
from _Framework.DeviceComponent import DeviceComponent
from _Framework.TransportComponent import TransportComponent
from _Framework.ClipSlotComponent import ClipSlotComponent
from _Framework.SceneComponent import SceneComponent
from _Framework.SessionComponent import SessionComponent
from _Framework.ChannelTranslationSelector import ChannelTranslationSelector
from consts import *
from ViewTogglerComponent import ViewTogglerComponent
from SpecialMixerComponent import SpecialMixerComponent
from logly import *
class VCM600_2(ControlSurface):
""" Script for Vestax's VCM600 Controller """
def __init__(self, c_instance):
ControlSurface.__init__(self, c_instance)
self._set_suppress_rebuild_requests(True)
with self.component_guard():
self._setup_session_control()
self._setup_mixer_control()
self._setup_device_control()
self._setup_transport_control()
self._setup_view_control()
self._set_suppress_rebuild_requests(False)
logly_set_logger(self)
logly_message("VCM600.2 loaded.")
# General Operation:
def _on_selected_track_changed(self):
ControlSurface._on_selected_track_changed(self)
track = self.song().view.selected_track
device_to_select = track.view.selected_device
if device_to_select == None and len(track.devices) > 0:
device_to_select = track.devices[0]
if device_to_select != None:
self.song().view.select_device(device_to_select)
self._device_component.set_device(device_to_select)
return None
# this method is called by Live when it needs to disconnect. It's very important that any observers that were set up in the script are removed here
def disconnect(self):
# if self.song().view.selected_track_has_listener(self._update_selected_device):
# self.song().view.remove_selected_track_listener(self._update_selected_device)
logly_message("VCM: disconnected.")
ControlSurface.disconnect(self)
return None
# Component setup:
def _setup_session_control(self):
is_momentary = True
down_button = ButtonElement(is_momentary, MIDI_NOTE_TYPE, VCM_CHANNEL, SCENE_WHEEL_DOWN)
up_button = ButtonElement(is_momentary, MIDI_NOTE_TYPE, VCM_CHANNEL, SCENE_WHEEL_UP)
session = SessionComponent(NUM_TRACKS, NUM_SCENES)
session.set_select_buttons(down_button, up_button)
session.selected_scene().set_launch_button(ButtonElement(is_momentary, MIDI_NOTE_TYPE, VCM_CHANNEL, SCENE_WHEEL_CLICK))
track_stop_buttons = [ ButtonElement(is_momentary, MIDI_NOTE_TYPE, track, TRACK_STOP) for track in range(NUM_TRACKS) ]
session.set_stop_track_clip_buttons(tuple(track_stop_buttons))
for track in range(NUM_TRACKS):
session.selected_scene().clip_slot(track).set_launch_button(ButtonElement(is_momentary, MIDI_NOTE_TYPE, track, TRACK_PLAY))
def _setup_mixer_control(self):
is_momentary = True
mixer = SpecialMixerComponent(NUM_TRACKS, NUM_RETURNS)
for track in range(NUM_TRACKS):
strip = mixer.channel_strip(track)
strip.set_volume_control(SliderElement(MIDI_CC_TYPE, track, TRACK_VOLUME))
strip.set_pan_control(EncoderElement(MIDI_CC_TYPE, track, TRACK_PAN, Live.MidiMap.MapMode.absolute))
strip.set_send_controls((EncoderElement(MIDI_CC_TYPE, track, TRACK_SEND_A, Live.MidiMap.MapMode.absolute), EncoderElement(MIDI_CC_TYPE, track, TRACK_SEND_B, Live.MidiMap.MapMode.absolute)))
strip.set_solo_button(ButtonElement(is_momentary, MIDI_NOTE_TYPE, track, TRACK_SOLO))
strip.set_mute_button(ButtonElement(is_momentary, MIDI_NOTE_TYPE, track, TRACK_MUTE))
strip.set_crossfade_toggle(ButtonElement(is_momentary, MIDI_NOTE_TYPE, track, TRACK_CF_ASSIGN))
eq = mixer.track_eq(track)
eq.set_gain_controls(tuple([ EncoderElement(MIDI_CC_TYPE, track, TRACK_GAIN_LOW - index, Live.MidiMap.MapMode.absolute) for index in range(NUM_TRACK_GAINS) ]))
eq.set_cut_buttons(tuple([ ButtonElement(is_momentary, MIDI_NOTE_TYPE, track, TRACK_LOW_CUT - index) for index in range(NUM_TRACK_GAINS) ]))
filter = mixer.track_filter(track)
filter.set_filter_controls(EncoderElement(MIDI_CC_TYPE, track, TRACK_FREQUENCY, Live.MidiMap.MapMode.absolute), EncoderElement(MIDI_CC_TYPE, track, TRACK_RESONANCE, Live.MidiMap.MapMode.absolute))
for ret_track in range(NUM_RETURNS):
strip = mixer.return_strip(ret_track)
strip.set_volume_control(SliderElement(MIDI_CC_TYPE, VCM_CHANNEL, RETURN_VOLUME + ret_track))
strip.set_pan_control(EncoderElement(MIDI_CC_TYPE, VCM_CHANNEL, RETURN_PAN + ret_track, Live.MidiMap.MapMode.absolute))
strip.set_mute_button(ButtonElement(is_momentary, MIDI_NOTE_TYPE, VCM_CHANNEL, RETURN_MUTE + ret_track))
mixer.set_crossfader_control(SliderElement(MIDI_CC_TYPE, VCM_CHANNEL, CROSS_FADER))
mixer.set_prehear_volume_control(EncoderElement(MIDI_CC_TYPE, VCM_CHANNEL, CUE_VOLUME, Live.MidiMap.MapMode.absolute))
mixer.master_strip().set_volume_control(SliderElement(MIDI_CC_TYPE, VCM_CHANNEL, MASTER_VOLUME))
mixer.master_strip().set_pan_control(EncoderElement(MIDI_CC_TYPE, VCM_CHANNEL, MASTER_PAN, Live.MidiMap.MapMode.absolute))
return mixer
def _setup_device_control(self):
is_momentary = True
device_bank_buttons = []
device_param_controls = []
for index in range(NUM_DEVICE_BUTTONS):
device_bank_buttons.append(ButtonElement(is_momentary, MIDI_NOTE_TYPE, VCM_CHANNEL, DEVICE_BUTTON_ROW_1 + index))
device_param_controls.append(EncoderElement(MIDI_CC_TYPE, VCM_CHANNEL, DEVICE_PARAM_ROW_1 + index, Live.MidiMap.MapMode.absolute))
device = DeviceComponent()
device.set_bank_buttons(tuple(device_bank_buttons))
device.set_parameter_controls(tuple(device_param_controls))
device_translation_selector = ChannelTranslationSelector()
device_translation_selector.set_controls_to_translate(tuple(device_param_controls))
device_translation_selector.set_mode_buttons(tuple(device_bank_buttons))
self.set_device_component(device)
def _setup_transport_control(self):
is_momentary = True
transport = TransportComponent()
transport.set_play_button(ButtonElement(is_momentary, MIDI_NOTE_TYPE, VCM_CHANNEL, TRANSPORT_PLAY))
transport.set_record_button(ButtonElement(is_momentary, MIDI_NOTE_TYPE, VCM_CHANNEL, TRANSPORT_RECORD))
transport.set_nudge_buttons(ButtonElement(is_momentary, MIDI_NOTE_TYPE, VCM_CHANNEL, TEMPO_NUDGE_RIGHT), ButtonElement(is_momentary, MIDI_NOTE_TYPE, VCM_CHANNEL, TEMPO_NUDGE_LEFT))
transport.set_loop_button(ButtonElement(is_momentary, MIDI_NOTE_TYPE, VCM_CHANNEL, TRANSPORT_LOOP))
transport.set_punch_buttons(ButtonElement(is_momentary, MIDI_NOTE_TYPE, VCM_CHANNEL, LOOP_IN), ButtonElement(is_momentary, MIDI_NOTE_TYPE, VCM_CHANNEL, LOOP_OUT))
transport.set_tempo_control(SliderElement(MIDI_CC_TYPE, VCM_CHANNEL, TEMPO_COURSE), SliderElement(MIDI_CC_TYPE, VCM_CHANNEL, TEMPO_FINE))
def _setup_view_control(self):
is_momentary = True
view = ViewTogglerComponent(NUM_TRACKS)
view.set_buttons(tuple([ ButtonElement(is_momentary, MIDI_NOTE_TYPE, track, TRACK_VIEW_DEVICE) for track in range(NUM_TRACKS) ]), tuple([ ButtonElement(is_momentary, MIDI_NOTE_TYPE, track, TRACK_VIEW_CLIP) for track in range(NUM_TRACKS) ]))
# Misc Methods:
# this method needs to be here so that Live knows what to do (nothing, in this case) when it receives sysex from the CNTRLR
def handle_sysex(self, midi_bytes):
pass
| apache-2.0 | -7,399,438,344,921,186,000 | 59.688406 | 248 | 0.712358 | false |
pixelrebel/st2 | st2common/st2common/util/sandboxing.py | 10 | 4401 | # Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Utility functions for our sandboxing model which is implemented on top of
separate processes and virtualenv.
"""
import os
import sys
from distutils.sysconfig import get_python_lib
from oslo_config import cfg
from st2common.constants.pack import SYSTEM_PACK_NAMES
__all__ = [
'get_sandbox_python_binary_path',
'get_sandbox_python_path',
'get_sandbox_path',
'get_sandbox_virtualenv_path'
]
def get_sandbox_python_binary_path(pack=None):
"""
Return path to the Python binary for the provided pack.
:param pack: Pack name.
:type pack: ``str``
"""
system_base_path = cfg.CONF.system.base_path
virtualenv_path = os.path.join(system_base_path, 'virtualenvs', pack)
if pack in SYSTEM_PACK_NAMES:
# Use system python for "packs" and "core" actions
python_path = sys.executable
else:
python_path = os.path.join(virtualenv_path, 'bin/python')
return python_path
def get_sandbox_path(virtualenv_path):
"""
Return PATH environment variable value for the sandboxed environment.
This function makes sure that virtualenv/bin directory is in the path and has precedence over
the global PATH values.
Note: This function needs to be called from the parent process (one which is spawning a
sandboxed process).
"""
sandbox_path = []
parent_path = os.environ.get('PATH', '')
if not virtualenv_path:
return parent_path
parent_path = parent_path.split(':')
parent_path = [path for path in parent_path if path]
# Add virtualenv bin directory
virtualenv_bin_path = os.path.join(virtualenv_path, 'bin/')
sandbox_path.append(virtualenv_bin_path)
sandbox_path.extend(parent_path)
sandbox_path = ':'.join(sandbox_path)
return sandbox_path
def get_sandbox_python_path(inherit_from_parent=True, inherit_parent_virtualenv=True):
"""
Return PYTHONPATH environment variable value for the new sandboxed environment.
This function takes into account if the current (parent) process is running under virtualenv
and other things like that.
Note: This function needs to be called from the parent process (one which is spawning a
sandboxed process).
:param inherit_from_parent: True to inheir PYTHONPATH from the current process.
:type inherit_from_parent: ``str``
:param inherit_parent_virtualenv: True to inherit virtualenv path if the current process is
running inside virtual environment.
:type inherit_parent_virtualenv: ``str``
"""
sandbox_python_path = []
parent_python_path = os.environ.get('PYTHONPATH', '')
parent_python_path = parent_python_path.split(':')
parent_python_path = [path for path in parent_python_path if path]
if inherit_from_parent:
sandbox_python_path.extend(parent_python_path)
if inherit_parent_virtualenv and hasattr(sys, 'real_prefix'):
# We are running inside virtualenv
site_packages_dir = get_python_lib()
assert sys.prefix in site_packages_dir
sandbox_python_path.append(site_packages_dir)
sandbox_python_path = ':'.join(sandbox_python_path)
sandbox_python_path = ':' + sandbox_python_path
return sandbox_python_path
def get_sandbox_virtualenv_path(pack):
"""
Return a path to the virtual environment for the provided pack.
"""
if pack in SYSTEM_PACK_NAMES:
virtualenv_path = None
else:
system_base_path = cfg.CONF.system.base_path
virtualenv_path = os.path.join(system_base_path, 'virtualenvs', pack)
return virtualenv_path
| apache-2.0 | -5,991,135,997,040,287,000 | 32.340909 | 97 | 0.702568 | false |
MPC-Berkeley/barc | workspace/src/labs/src/lab4/MovementTest.py | 2 | 2606 | #!/usr/bin/env python
# ---------------------------------------------------------------------------
# Licensing Information: You are free to use or extend these projects for
# education or reserach purposes provided that (1) you retain this notice
# and (2) you provide clear attribution to UC Berkeley, including a link
# to http://barc-project.com
#
# Attibution Information: The barc project ROS code-base was developed at UC
# Berkeley in the Model Predictive Control (MPC) lab by Jon Gonzales
# ([email protected]) and Greg Marcil ([email protected]). The cloud
# services integation with ROS was developed by Kiet Lam
# ([email protected]). The web-server app Dator was based on an open source
# project by Bruce Wootton
# ---------------------------------------------------------------------------
# README: This node serves as an outgoing messaging bus from odroid to arduino
# Subscribes: steering and motor commands on 'ecu'
# Publishes: combined ecu commands as 'ecu_pwm'
from rospy import init_node, Subscriber, Publisher, get_param
from rospy import Rate, is_shutdown, ROSInterruptException, spin, on_shutdown
from barc.msg import ECU
from numpy import pi
import rospy
import time
motor_pwm = 1500
servo_pwm = 1580
def arduino_interface():
global ecu_pub, motor_pwm, servo_pwm
init_node('arduino_interface')
# set node rate
loop_rate = 50
dt = 1.0 / loop_rate
rate = rospy.Rate(loop_rate)
time_prev = time.time()
ecu_pub = Publisher('ecu_pwm', ECU, queue_size = 10)
while not rospy.is_shutdown():
if time.time() >= time_prev and time.time() < time_prev + 2:
motor_pwm = 1580.0
elif time.time() < time_prev + 4:
motor_pwm = 1620.0
elif time.time() < time_prev + 6:
motor_pwm = 1500.0
elif time.time() < time_prev + 9:
motor_pwm = 1580.0
servo_pwm = 1200.0
elif time.time() < time_prev + 11:
motor_pwm = 1500.0
servo_pwm = 1500
elif time.time() < time_prev + 14:
motor_pwm = 1580.0
servo_pwm = 1800.0
elif time.time() < time_prev + 17:
motor_pwm = 1500.0
servo_pwm = 1500
elif time.time() >= time_prev + 18:
break
ecu_cmd = ECU(motor_pwm, servo_pwm)
ecu_pub.publish(ecu_cmd)
# wait
rate.sleep()
#############################################################
if __name__ == '__main__':
try:
arduino_interface()
except ROSInterruptException:
pass
| mit | -4,820,288,112,991,433,000 | 33.289474 | 80 | 0.575211 | false |
indeedops/dd-agent | util.py | 1 | 20612 | # (C) Datadog, Inc. 2010-2016
# All rights reserved
# Licensed under Simplified BSD License (see LICENSE)
# stdlib
from collections import deque
import logging
import os
import platform
import re
import signal
import socket
import sys
import time
import types
import urllib2
import uuid
# 3p
import simplejson as json
import yaml # noqa, let's guess, probably imported somewhere
from tornado import ioloop
try:
from yaml import CLoader as yLoader
from yaml import CDumper as yDumper
except ImportError:
# On source install C Extensions might have not been built
from yaml import Loader as yLoader # noqa, imported from here elsewhere
from yaml import Dumper as yDumper # noqa, imported from here elsewhere
# These classes are now in utils/, they are just here for compatibility reasons,
# if a user actually uses them in a custom check
# If you're this user, please use utils.pidfile or utils.platform instead
# FIXME: remove them at a point (6.x)
from utils.dockerutil import DockerUtil
from utils.pidfile import PidFile # noqa, see ^^^
from utils.platform import Platform
from utils.proxy import get_proxy
from utils.subprocess_output import get_subprocess_output
VALID_HOSTNAME_RFC_1123_PATTERN = re.compile(r"^(([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9\-]*[a-zA-Z0-9])\.)*([A-Za-z0-9]|[A-Za-z0-9][A-Za-z0-9\-]*[A-Za-z0-9])$")
MAX_HOSTNAME_LEN = 255
COLON_NON_WIN_PATH = re.compile(':(?!\\\\)')
log = logging.getLogger(__name__)
NumericTypes = (float, int, long)
def plural(count):
if count == 1:
return ""
return "s"
def get_tornado_ioloop():
return ioloop.IOLoop.current()
def get_uuid():
# Generate a unique name that will stay constant between
# invocations, such as platform.node() + uuid.getnode()
# Use uuid5, which does not depend on the clock and is
# recommended over uuid3.
# This is important to be able to identify a server even if
# its drives have been wiped clean.
# Note that this is not foolproof but we can reconcile servers
# on the back-end if need be, based on mac addresses.
return uuid.uuid5(uuid.NAMESPACE_DNS, platform.node() + str(uuid.getnode())).hex
def get_os():
"Human-friendly OS name"
if sys.platform == 'darwin':
return 'mac'
elif sys.platform.find('freebsd') != -1:
return 'freebsd'
elif sys.platform.find('linux') != -1:
return 'linux'
elif sys.platform.find('win32') != -1:
return 'windows'
elif sys.platform.find('sunos') != -1:
return 'solaris'
else:
return sys.platform
def headers(agentConfig):
# Build the request headers
return {
'User-Agent': 'Datadog Agent/%s' % agentConfig['version'],
'Content-Type': 'application/x-www-form-urlencoded',
'Accept': 'text/html, */*',
}
def windows_friendly_colon_split(config_string):
'''
Perform a split by ':' on the config_string
without splitting on the start of windows path
'''
if Platform.is_win32():
# will split on path/to/module.py:blabla but not on C:\\path
return COLON_NON_WIN_PATH.split(config_string)
else:
return config_string.split(':')
def cast_metric_val(val):
# ensure that the metric value is a numeric type
if not isinstance(val, NumericTypes):
# Try the int conversion first because want to preserve
# whether the value is an int or a float. If neither work,
# raise a ValueError to be handled elsewhere
for cast in [int, float]:
try:
val = cast(val)
return val
except ValueError:
continue
raise ValueError
return val
_IDS = {}
def get_next_id(name):
global _IDS
current_id = _IDS.get(name, 0)
current_id += 1
_IDS[name] = current_id
return current_id
def is_valid_hostname(hostname):
if hostname.lower() in set([
'localhost',
'localhost.localdomain',
'localhost6.localdomain6',
'ip6-localhost',
]):
log.warning("Hostname: %s is local" % hostname)
return False
if len(hostname) > MAX_HOSTNAME_LEN:
log.warning("Hostname: %s is too long (max length is %s characters)" % (hostname, MAX_HOSTNAME_LEN))
return False
if VALID_HOSTNAME_RFC_1123_PATTERN.match(hostname) is None:
log.warning("Hostname: %s is not complying with RFC 1123" % hostname)
return False
return True
def check_yaml(conf_path):
with open(conf_path) as f:
check_config = yaml.load(f.read(), Loader=yLoader)
assert 'init_config' in check_config, "No 'init_config' section found"
assert 'instances' in check_config, "No 'instances' section found"
valid_instances = True
if check_config['instances'] is None or not isinstance(check_config['instances'], list):
valid_instances = False
else:
for i in check_config['instances']:
if not isinstance(i, dict):
valid_instances = False
break
if not valid_instances:
raise Exception('You need to have at least one instance defined in the YAML file for this check')
else:
return check_config
def get_hostname(config=None):
"""
Get the canonical host name this agent should identify as. This is
the authoritative source of the host name for the agent.
Tries, in order:
* agent config (datadog.conf, "hostname:")
* 'hostname -f' (on unix)
* socket.gethostname()
"""
hostname = None
# first, try the config
if config is None:
from config import get_config
config = get_config(parse_args=True)
config_hostname = config.get('hostname')
if config_hostname and is_valid_hostname(config_hostname):
return config_hostname
# Try to get GCE instance name
if hostname is None:
gce_hostname = GCE.get_hostname(config)
if gce_hostname is not None:
if is_valid_hostname(gce_hostname):
return gce_hostname
# Try to get the docker hostname
docker_util = DockerUtil()
if hostname is None and docker_util.is_dockerized():
docker_hostname = docker_util.get_hostname()
if docker_hostname is not None and is_valid_hostname(docker_hostname):
hostname = docker_hostname
# then move on to os-specific detection
if hostname is None:
def _get_hostname_unix():
try:
# try fqdn
out, _, rtcode = get_subprocess_output(['/bin/hostname', '-f'], log)
if rtcode == 0:
return out.strip()
except Exception:
return None
os_name = get_os()
if os_name in ['mac', 'freebsd', 'linux', 'solaris']:
unix_hostname = _get_hostname_unix()
if unix_hostname and is_valid_hostname(unix_hostname):
hostname = unix_hostname
# if we have an ec2 default hostname, see if there's an instance-id available
if (Platform.is_ecs_instance()) or (hostname is not None and EC2.is_default(hostname)):
instanceid = EC2.get_instance_id(config)
if instanceid:
hostname = instanceid
# fall back on socket.gethostname(), socket.getfqdn() is too unreliable
if hostname is None:
try:
socket_hostname = socket.gethostname()
except socket.error:
socket_hostname = None
if socket_hostname and is_valid_hostname(socket_hostname):
hostname = socket_hostname
if hostname is None:
log.critical('Unable to reliably determine host name. You can define one in datadog.conf or in your hosts file')
raise Exception('Unable to reliably determine host name. You can define one in datadog.conf or in your hosts file')
else:
return hostname
class GCE(object):
URL = "http://169.254.169.254/computeMetadata/v1/?recursive=true"
TIMEOUT = 0.1 # second
SOURCE_TYPE_NAME = 'google cloud platform'
metadata = None
EXCLUDED_ATTRIBUTES = ["kube-env", "startup-script", "sshKeys", "user-data",
"cli-cert", "ipsec-cert", "ssl-cert"]
@staticmethod
def _get_metadata(agentConfig):
if GCE.metadata is not None:
return GCE.metadata
if not agentConfig['collect_instance_metadata']:
log.info("Instance metadata collection is disabled. Not collecting it.")
GCE.metadata = {}
return GCE.metadata
socket_to = None
try:
socket_to = socket.getdefaulttimeout()
socket.setdefaulttimeout(GCE.TIMEOUT)
except Exception:
pass
try:
opener = urllib2.build_opener()
opener.addheaders = [('X-Google-Metadata-Request','True')]
GCE.metadata = json.loads(opener.open(GCE.URL).read().strip())
except Exception:
GCE.metadata = {}
try:
if socket_to is None:
socket_to = 3
socket.setdefaulttimeout(socket_to)
except Exception:
pass
return GCE.metadata
@staticmethod
def get_tags(agentConfig):
if not agentConfig['collect_instance_metadata']:
return None
try:
host_metadata = GCE._get_metadata(agentConfig)
tags = []
for key, value in host_metadata['instance'].get('attributes', {}).iteritems():
if key in GCE.EXCLUDED_ATTRIBUTES:
continue
tags.append("%s:%s" % (key, value))
tags.extend(host_metadata['instance'].get('tags', []))
tags.append('zone:%s' % host_metadata['instance']['zone'].split('/')[-1])
tags.append('instance-type:%s' % host_metadata['instance']['machineType'].split('/')[-1])
tags.append('internal-hostname:%s' % host_metadata['instance']['hostname'])
tags.append('instance-id:%s' % host_metadata['instance']['id'])
tags.append('project:%s' % host_metadata['project']['projectId'])
tags.append('numeric_project_id:%s' % host_metadata['project']['numericProjectId'])
GCE.metadata['hostname'] = host_metadata['instance']['hostname'].split('.')[0]
return tags
except Exception:
return None
@staticmethod
def get_hostname(agentConfig):
try:
host_metadata = GCE._get_metadata(agentConfig)
hostname = host_metadata['instance']['hostname']
if agentConfig.get('gce_updated_hostname'):
return hostname
else:
return hostname.split('.')[0]
except Exception:
return None
@staticmethod
def get_host_aliases(agentConfig):
try:
host_metadata = GCE._get_metadata(agentConfig)
project_id = host_metadata['project']['projectId']
instance_name = host_metadata['instance']['hostname'].split('.')[0]
return ['%s.%s' % (instance_name, project_id)]
except Exception:
return None
class EC2(object):
"""Retrieve EC2 metadata
"""
EC2_METADATA_HOST = "http://169.254.169.254"
METADATA_URL_BASE = EC2_METADATA_HOST + "/latest/meta-data"
INSTANCE_IDENTITY_URL = EC2_METADATA_HOST + "/latest/dynamic/instance-identity/document"
TIMEOUT = 0.1 # second
DEFAULT_PREFIXES = [u'ip-', u'domu']
metadata = {}
class NoIAMRole(Exception):
"""
Instance has no associated IAM role.
"""
pass
@staticmethod
def is_default(hostname):
hostname = hostname.lower()
for prefix in EC2.DEFAULT_PREFIXES:
if hostname.startswith(prefix):
return True
return False
@staticmethod
def get_iam_role():
"""
Retrieve instance's IAM role.
Raise `NoIAMRole` when unavailable.
"""
try:
return urllib2.urlopen(EC2.METADATA_URL_BASE + "/iam/security-credentials/").read().strip()
except urllib2.HTTPError as err:
if err.code == 404:
raise EC2.NoIAMRole()
raise
@staticmethod
def get_tags(agentConfig):
"""
Retrieve AWS EC2 tags.
"""
if not agentConfig['collect_instance_metadata']:
log.info("Instance metadata collection is disabled. Not collecting it.")
return []
EC2_tags = []
socket_to = None
try:
socket_to = socket.getdefaulttimeout()
socket.setdefaulttimeout(EC2.TIMEOUT)
except Exception:
pass
try:
iam_role = EC2.get_iam_role()
iam_params = json.loads(urllib2.urlopen(EC2.METADATA_URL_BASE + "/iam/security-credentials/" + unicode(iam_role)).read().strip())
instance_identity = json.loads(urllib2.urlopen(EC2.INSTANCE_IDENTITY_URL).read().strip())
region = instance_identity['region']
import boto.ec2
proxy_settings = get_proxy(agentConfig) or {}
connection = boto.ec2.connect_to_region(
region,
aws_access_key_id=iam_params['AccessKeyId'],
aws_secret_access_key=iam_params['SecretAccessKey'],
security_token=iam_params['Token'],
proxy=proxy_settings.get('host'), proxy_port=proxy_settings.get('port'),
proxy_user=proxy_settings.get('user'), proxy_pass=proxy_settings.get('password')
)
tag_object = connection.get_all_tags({'resource-id': EC2.metadata['instance-id']})
EC2_tags = [u"%s:%s" % (tag.name, tag.value) for tag in tag_object]
if agentConfig.get('collect_security_groups') and EC2.metadata.get('security-groups'):
EC2_tags.append(u"security-group-name:{0}".format(EC2.metadata.get('security-groups')))
except EC2.NoIAMRole:
log.warning(
u"Unable to retrieve AWS EC2 custom tags: "
u"an IAM role associated with the instance is required"
)
except Exception:
log.exception("Problem retrieving custom EC2 tags")
try:
if socket_to is None:
socket_to = 3
socket.setdefaulttimeout(socket_to)
except Exception:
pass
return EC2_tags
@staticmethod
def get_metadata(agentConfig):
"""Use the ec2 http service to introspect the instance. This adds latency if not running on EC2
"""
# >>> import urllib2
# >>> urllib2.urlopen('http://169.254.169.254/latest/', timeout=1).read()
# 'meta-data\nuser-data'
# >>> urllib2.urlopen('http://169.254.169.254/latest/meta-data', timeout=1).read()
# 'ami-id\nami-launch-index\nami-manifest-path\nhostname\ninstance-id\nlocal-ipv4\npublic-keys/\nreservation-id\nsecurity-groups'
# >>> urllib2.urlopen('http://169.254.169.254/latest/meta-data/instance-id', timeout=1).read()
# 'i-deadbeef'
# Every call may add TIMEOUT seconds in latency so don't abuse this call
# python 2.4 does not support an explicit timeout argument so force it here
# Rather than monkey-patching urllib2, just lower the timeout globally for these calls
if not agentConfig['collect_instance_metadata']:
log.info("Instance metadata collection is disabled. Not collecting it.")
return {}
socket_to = None
try:
socket_to = socket.getdefaulttimeout()
socket.setdefaulttimeout(EC2.TIMEOUT)
except Exception:
pass
for k in ('instance-id', 'hostname', 'local-hostname', 'public-hostname', 'ami-id', 'local-ipv4', 'public-keys/', 'public-ipv4', 'reservation-id', 'security-groups'):
try:
v = urllib2.urlopen(EC2.METADATA_URL_BASE + "/" + unicode(k)).read().strip()
assert type(v) in (types.StringType, types.UnicodeType) and len(v) > 0, "%s is not a string" % v
EC2.metadata[k.rstrip('/')] = v
except Exception:
pass
try:
if socket_to is None:
socket_to = 3
socket.setdefaulttimeout(socket_to)
except Exception:
pass
return EC2.metadata
@staticmethod
def get_instance_id(agentConfig):
try:
return EC2.get_metadata(agentConfig).get("instance-id", None)
except Exception:
return None
class Watchdog(object):
"""
Simple signal-based watchdog. Restarts the process when:
* no reset was made for more than a specified duration
* (optional) a specified memory threshold is exceeded
* (optional) a suspicious high activity is detected, i.e. too many resets for a given timeframe.
**Warning**: Not thread-safe.
Can only be invoked once per process, so don't use with multiple threads.
If you instantiate more than one, you're also asking for trouble.
"""
# Activity history timeframe
_RESTART_TIMEFRAME = 60
def __init__(self, duration, max_mem_mb=None, max_resets=None):
import resource
# Set the duration
self._duration = int(duration)
signal.signal(signal.SIGALRM, Watchdog.self_destruct)
# Set memory usage threshold
if max_mem_mb is not None:
self._max_mem_kb = 1024 * max_mem_mb
max_mem_bytes = 1024 * self._max_mem_kb
resource.setrlimit(resource.RLIMIT_AS, (max_mem_bytes, max_mem_bytes))
self.memory_limit_enabled = True
else:
self.memory_limit_enabled = False
# Set high activity monitoring
self._restarts = deque([])
self._max_resets = max_resets
@staticmethod
def self_destruct(signum, frame):
"""
Kill the process. It will be eventually restarted.
"""
try:
import traceback
log.error("Self-destructing...")
log.error(traceback.format_exc())
finally:
os.kill(os.getpid(), signal.SIGKILL)
def _is_frenetic(self):
"""
Detect suspicious high activity, i.e. the number of resets exceeds the maximum limit set
on the watchdog timeframe.
Flush old activity history
"""
now = time.time()
while(self._restarts and self._restarts[0] < now - self._RESTART_TIMEFRAME):
self._restarts.popleft()
return len(self._restarts) > self._max_resets
def reset(self):
"""
Reset the watchdog state, i.e.
* re-arm alarm signal
* (optional) check memory consumption
* (optional) save reset history, flush old entries and check frequency
"""
# Check memory consumption: restart if too high as tornado will swallow MemoryErrors
if self.memory_limit_enabled:
mem_usage_kb = int(os.popen('ps -p %d -o %s | tail -1' % (os.getpid(), 'rss')).read())
if mem_usage_kb > (0.95 * self._max_mem_kb):
Watchdog.self_destruct(signal.SIGKILL, sys._getframe(0))
# Check activity
if self._max_resets:
self._restarts.append(time.time())
if self._is_frenetic():
Watchdog.self_destruct(signal.SIGKILL, sys._getframe(0))
# Re arm alarm signal
log.debug("Resetting watchdog for %d" % self._duration)
signal.alarm(self._duration)
class Timer(object):
""" Helper class """
def __init__(self):
self.start()
def _now(self):
return time.time()
def start(self):
self.started = self._now()
self.last = self.started
return self
def step(self):
now = self._now()
step = now - self.last
self.last = now
return step
def total(self, as_sec=True):
return self._now() - self.started
"""
Iterable Recipes
"""
def chunks(iterable, chunk_size):
"""Generate sequences of `chunk_size` elements from `iterable`."""
iterable = iter(iterable)
while True:
chunk = [None] * chunk_size
count = 0
try:
for _ in range(chunk_size):
chunk[count] = iterable.next()
count += 1
yield chunk[:count]
except StopIteration:
if count:
yield chunk[:count]
break
| bsd-3-clause | 281,054,137,435,490,940 | 32.679739 | 174 | 0.599117 | false |
sharkykh/SickRage | lib/pgi/clib/_utils.py | 19 | 6169 | # Copyright 2012,2013 Christoph Reiter
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
import os
import sys
from weakref import proxy
from ctypes import cdll, c_void_p, c_size_t, c_char_p
from ._compat import PY3
class _BaseFinalizer(object):
# Set used to keep the proxy around.
# Deletion automatically removes proxies from the set.
_objects = set()
@classmethod
def track(cls, obj, ptr):
"""
Track an object which needs destruction when it is garbage collected.
"""
cls._objects.add(cls(obj, ptr))
def __init__(self, obj, ptr):
self.obj = proxy(obj, self.delete)
self.ptr = ptr
def delete(self, deadweakproxy):
type(self)._objects.remove(self)
self.destructor(deadweakproxy, self.ptr)
# decode a path from glib
if os.name == "nt":
def fsdecode(path):
return path.decode("utf-8")
elif PY3:
_FSENC = sys.getfilesystemencoding()
def fsdecode(path):
return path.decode(_FSENC, "surrogateescape")
else:
def fsdecode(path):
return path
if os.name == "nt":
_so_mapping = {
"glib-2.0": "libglib-2.0-0.dll",
"gobject-2.0": "libgobject-2.0-0.dll",
"girepository-1.0": "libgirepository-1.0-1.dll",
}
elif os.uname()[0] == "Darwin":
_so_mapping = {
"glib-2.0": "libglib-2.0.0.dylib",
"gobject-2.0": "libgobject-2.0.0.dylib",
"girepository-1.0": "libgirepository-1.0.1.dylib",
}
else:
_so_mapping = {
"glib-2.0": "libglib-2.0.so.0",
"gobject-2.0": "libgobject-2.0.so.0",
"girepository-1.0": "libgirepository-1.0.so.1",
}
if os.name == "nt":
stdlib = memcpy = cdll.msvcrt
elif os.uname()[0] == "Darwin":
stdlib = getattr(cdll, "libc.dylib")
else:
stdlib = getattr(cdll, "libc.so.6")
memcpy = stdlib.memcpy
memcpy.argtypes = [c_void_p, c_void_p, c_size_t]
memcpy.restype = c_void_p
_internal = {}
def find_library(name, cached=True, internal=True):
"""
cached: Return a new instance
internal: return a shared instance that's not the ctypes cached one
"""
# a new one
if not cached:
return cdll.LoadLibrary(_so_mapping[name])
# from the shared internal set or a new one
if internal:
if name not in _internal:
_internal[name] = cdll.LoadLibrary(_so_mapping[name])
return _internal[name]
# a shared one
return getattr(cdll, _so_mapping[name])
class _CProperty(object):
_cache = {}
def __init__(self, *args):
self.args = args
def __get__(self, instance, owner):
if instance is None:
return self
lib, name, symbol, ret, args = self.args
assert len(args) == 1
func = self._cache.get((lib, symbol), None)
if func is None:
self._cache[(lib, symbol)] = func = getattr(lib, symbol)
func.argtypes = args
func.restype = ret
value = func(instance)
if PY3 and issubclass(ret, c_char_p) and value is not None:
value = value.decode("utf-8")
setattr(instance, name, value)
return value
class _CMethod(object):
def __init__(self, *args):
self.args = args
def __get__(self, instance, *args):
owner, name, lib, symbol, ret, args, wrap, unref = self.args
func = getattr(lib, symbol)
func.argtypes = args
func.restype = ret
def unref_func(*x):
instance = func(*x)
instance._take_ownership()
return instance
if instance is None:
instance = owner
if wrap:
if unref:
setattr(owner, name, unref_func)
else:
setattr(owner, name, lambda *x: func(*x))
return getattr(instance, name)
else:
# FIXME: handle unref
assert not unref
setattr(owner, name, staticmethod(func))
return getattr(owner, name)
def wrap_class(lib, base, ptr, prefix, methods):
for method in methods:
unref = False
if len(method) == 3:
name, ret, args = method
else:
name, ret, args, unref = method
# _get_name -> _name
# _get_version(*args) -> _get_version(*args)
attr_name = name
if name[:1] == "_":
name = name[1:]
symbol = prefix + name
is_method = args and args[0] == ptr
if is_method:
# Methods that have no arguments and return no pointer type
# can be getters and the values can be cached. hurray!
try:
is_pointer = hasattr(ret, "contents") or ret is c_void_p or \
issubclass(ret, c_void_p)
except TypeError:
is_pointer = False
is_void = ret is None
if len(args) == 1 and not is_void and not is_pointer:
is_override = attr_name.startswith("_")
if name.startswith("get_"):
attr_name = name.split("_", 1)[-1]
elif name.startswith("to_"):
attr_name = name.split("_", 1)[-1]
if is_override:
attr_name = "_" + attr_name
# e.g. conflict with ctypes "contents", "value" attribute
while hasattr(ptr, attr_name):
attr_name += "_"
prop = _CProperty(lib, attr_name, symbol, ret, args)
setattr(ptr, attr_name, prop)
else:
method = _CMethod(
ptr, attr_name, lib, symbol, ret, args, True, unref)
setattr(ptr, attr_name, method)
else:
if base is None:
base = ptr
static_method = _CMethod(
base, attr_name, lib, symbol, ret, args, False, unref)
setattr(base, attr_name, static_method)
| gpl-3.0 | 2,926,396,634,060,663,300 | 27.962441 | 77 | 0.549522 | false |
jleclanche/django-push-notifications | push_notifications/conf/legacy.py | 2 | 5539 | from django.core.exceptions import ImproperlyConfigured
from ..settings import PUSH_NOTIFICATIONS_SETTINGS as SETTINGS
from .base import BaseConfig
__all__ = [
"LegacyConfig"
]
class empty(object):
pass
class LegacyConfig(BaseConfig):
msg = "Setup PUSH_NOTIFICATIONS_SETTINGS properly to send messages"
def _get_application_settings(self, application_id, settings_key, error_message):
"""Legacy behaviour"""
if not application_id:
value = SETTINGS.get(settings_key, empty)
if value is empty:
raise ImproperlyConfigured(error_message)
return value
else:
msg = (
"LegacySettings does not support application_id. To enable "
"multiple application support, use push_notifications.conf.AppSettings."
)
raise ImproperlyConfigured(msg)
def get_gcm_api_key(self, application_id=None):
msg = (
'Set PUSH_NOTIFICATIONS_SETTINGS["GCM_API_KEY"] to send messages through GCM.'
)
return self._get_application_settings(application_id, "GCM_API_KEY", msg)
def get_fcm_api_key(self, application_id=None):
msg = (
'Set PUSH_NOTIFICATIONS_SETTINGS["FCM_API_KEY"] to send messages through FCM.'
)
return self._get_application_settings(application_id, "FCM_API_KEY", msg)
def get_post_url(self, cloud_type, application_id=None):
key = "{}_POST_URL".format(cloud_type)
msg = (
'Set PUSH_NOTIFICATIONS_SETTINGS["{}"] to send messages through {}.'.format(
key, cloud_type
)
)
return self._get_application_settings(application_id, key, msg)
def get_error_timeout(self, cloud_type, application_id=None):
key = "{}_ERROR_TIMEOUT".format(cloud_type)
msg = (
'Set PUSH_NOTIFICATIONS_SETTINGS["{}"] to send messages through {}.'.format(
key, cloud_type
)
)
return self._get_application_settings(application_id, key, msg)
def get_max_recipients(self, cloud_type, application_id=None):
key = "{}_MAX_RECIPIENTS".format(cloud_type)
msg = (
'Set PUSH_NOTIFICATIONS_SETTINGS["{}"] to send messages through {}.'.format(
key, cloud_type
)
)
return self._get_application_settings(application_id, key, msg)
def has_auth_token_creds(self, application_id=None):
try:
self._get_apns_auth_key(application_id)
self._get_apns_auth_key_id(application_id)
self._get_apns_team_id(application_id)
except ImproperlyConfigured:
return False
return True
def get_apns_certificate(self, application_id=None):
r = self._get_application_settings(
application_id, "APNS_CERTIFICATE",
"You need to setup PUSH_NOTIFICATIONS_SETTINGS properly to send messages"
)
if not isinstance(r, str):
# probably the (Django) file, and file path should be got
if hasattr(r, "path"):
return r.path
elif (hasattr(r, "has_key") or hasattr(r, "__contains__")) and "path" in r:
return r["path"]
else:
msg = (
"The APNS certificate settings value should be a string, or "
"should have a 'path' attribute or key"
)
raise ImproperlyConfigured(msg)
return r
def get_apns_auth_creds(self, application_id=None):
return (
self._get_apns_auth_key(application_id),
self._get_apns_auth_key_id(application_id),
self._get_apns_team_id(application_id))
def _get_apns_auth_key(self, application_id=None):
return self._get_application_settings(application_id, "APNS_AUTH_KEY_PATH", self.msg)
def _get_apns_team_id(self, application_id=None):
return self._get_application_settings(application_id, "APNS_TEAM_ID", self.msg)
def _get_apns_auth_key_id(self, application_id=None):
return self._get_application_settings(application_id, "APNS_AUTH_KEY_ID", self.msg)
def get_apns_use_sandbox(self, application_id=None):
return self._get_application_settings(application_id, "APNS_USE_SANDBOX", self.msg)
def get_apns_use_alternative_port(self, application_id=None):
return
self._get_application_settings(application_id, "APNS_USE_ALTERNATIVE_PORT", self.msg)
def get_apns_topic(self, application_id=None):
return self._get_application_settings(application_id, "APNS_TOPIC", self.msg)
def get_apns_host(self, application_id=None):
return self._get_application_settings(application_id, "APNS_HOST", self.msg)
def get_apns_port(self, application_id=None):
return self._get_application_settings(application_id, "APNS_PORT", self.msg)
def get_apns_feedback_host(self, application_id=None):
return self._get_application_settings(application_id, "APNS_FEEDBACK_HOST", self.msg)
def get_apns_feedback_port(self, application_id=None):
return self._get_application_settings(application_id, "APNS_FEEDBACK_PORT", self.msg)
def get_wns_package_security_id(self, application_id=None):
return self._get_application_settings(application_id, "WNS_PACKAGE_SECURITY_ID", self.msg)
def get_wns_secret_key(self, application_id=None):
msg = "Setup PUSH_NOTIFICATIONS_SETTINGS properly to send messages"
return self._get_application_settings(application_id, "WNS_SECRET_KEY", msg)
def get_wp_post_url(self, application_id, browser):
msg = "Setup PUSH_NOTIFICATIONS_SETTINGS properly to send messages"
return self._get_application_settings(application_id, "WP_POST_URL", msg)[browser]
def get_wp_private_key(self, application_id=None):
msg = "Setup PUSH_NOTIFICATIONS_SETTINGS properly to send messages"
return self._get_application_settings(application_id, "WP_PRIVATE_KEY", msg)
def get_wp_claims(self, application_id=None):
msg = "Setup PUSH_NOTIFICATIONS_SETTINGS properly to send messages"
return self._get_application_settings(application_id, "WP_CLAIMS", msg)
| mit | 1,503,820,679,603,699,000 | 34.280255 | 92 | 0.723777 | false |
jjlee3/openthread | tests/scripts/thread-cert/Cert_5_6_05_NetworkDataRegisterAfterAttachRouter.py | 5 | 5400 | #!/usr/bin/python
#
# Copyright (c) 2016, The OpenThread Authors.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. Neither the name of the copyright holder nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
import time
import unittest
import node
LEADER = 1
ROUTER = 2
ED1 = 3
SED1 = 4
class Cert_5_6_5_NetworkDataRegisterAfterAttachRouter(unittest.TestCase):
def setUp(self):
self.nodes = {}
for i in range(1,5):
self.nodes[i] = node.Node(i)
self.nodes[LEADER].set_panid(0xface)
self.nodes[LEADER].set_mode('rsdn')
self.nodes[LEADER].add_whitelist(self.nodes[ROUTER].get_addr64())
self.nodes[LEADER].enable_whitelist()
self.nodes[ROUTER].set_panid(0xface)
self.nodes[ROUTER].set_mode('rsdn')
self.nodes[ROUTER].add_whitelist(self.nodes[LEADER].get_addr64())
self.nodes[ROUTER].add_whitelist(self.nodes[ED1].get_addr64())
self.nodes[ROUTER].add_whitelist(self.nodes[SED1].get_addr64())
self.nodes[ROUTER].enable_whitelist()
self.nodes[ROUTER].set_router_selection_jitter(1)
self.nodes[ED1].set_panid(0xface)
self.nodes[ED1].set_mode('rsn')
self.nodes[ED1].add_whitelist(self.nodes[ROUTER].get_addr64())
self.nodes[ED1].enable_whitelist()
self.nodes[SED1].set_panid(0xface)
self.nodes[SED1].set_mode('s')
self.nodes[SED1].add_whitelist(self.nodes[ROUTER].get_addr64())
self.nodes[SED1].enable_whitelist()
self.nodes[SED1].set_timeout(3)
def tearDown(self):
for node in list(self.nodes.values()):
node.stop()
del self.nodes
def test(self):
self.nodes[LEADER].start()
self.nodes[LEADER].set_state('leader')
self.assertEqual(self.nodes[LEADER].get_state(), 'leader')
self.nodes[ROUTER].start()
time.sleep(5)
self.assertEqual(self.nodes[ROUTER].get_state(), 'router')
self.nodes[ED1].start()
time.sleep(5)
self.assertEqual(self.nodes[ED1].get_state(), 'child')
self.nodes[SED1].start()
time.sleep(5)
self.assertEqual(self.nodes[SED1].get_state(), 'child')
self.nodes[ROUTER].add_prefix('2001:2:0:1::/64', 'paros')
self.nodes[ROUTER].add_prefix('2001:2:0:2::/64', 'paro')
self.nodes[ROUTER].register_netdata()
time.sleep(10)
addrs = self.nodes[ED1].get_addrs()
self.assertTrue(any('2001:2:0:1' in addr[0:10] for addr in addrs))
self.assertTrue(any('2001:2:0:2' in addr[0:10] for addr in addrs))
for addr in addrs:
if addr[0:3] == '200':
self.assertTrue(self.nodes[LEADER].ping(addr))
addrs = self.nodes[SED1].get_addrs()
self.assertTrue(any('2001:2:0:1' in addr[0:10] for addr in addrs))
self.assertFalse(any('2001:2:0:2' in addr[0:10] for addr in addrs))
for addr in addrs:
if addr[0:3] == '200':
self.assertTrue(self.nodes[LEADER].ping(addr))
self.nodes[ROUTER].add_prefix('2001:2:0:3::/64', 'pacs')
self.nodes[ROUTER].register_netdata()
time.sleep(10)
addrs = self.nodes[ED1].get_addrs()
self.assertTrue(any('2001:2:0:1' in addr[0:10] for addr in addrs))
self.assertTrue(any('2001:2:0:2' in addr[0:10] for addr in addrs))
self.assertTrue(any('2001:2:0:3' in addr[0:10] for addr in addrs))
for addr in addrs:
if addr[0:3] == '200':
self.assertTrue(self.nodes[LEADER].ping(addr))
addrs = self.nodes[SED1].get_addrs()
self.assertTrue(any('2001:2:0:1' in addr[0:10] for addr in addrs))
self.assertFalse(any('2001:2:0:2' in addr[0:10] for addr in addrs))
self.assertTrue(any('2001:2:0:3' in addr[0:10] for addr in addrs))
for addr in addrs:
if addr[0:3] == '200':
self.assertTrue(self.nodes[LEADER].ping(addr))
if __name__ == '__main__':
unittest.main()
| bsd-3-clause | -196,297,783,018,666,000 | 39.298507 | 78 | 0.648704 | false |
shumik/skencil-c | Sketch/UI/reloaddlg.py | 1 | 2631 | # Sketch - A Python-based interactive drawing program
# Copyright (C) 1997, 1998 by Bernhard Herzog
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Library General Public
# License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Library General Public License for more details.
#
# You should have received a copy of the GNU Library General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
import operator, string
from Sketch.warn import pdebug, warn_tb, INTERNAL
from Tkinter import Frame, Scrollbar
from Tkinter import RIGHT, BOTTOM, X, Y, BOTH, TOP
from tkext import UpdatedButton, UpdatedListbox, COMMAND
from sketchdlg import SketchPanel
import prompt
class ReloadPanel(SketchPanel):
title = 'Reload Modules'
receivers = []
def __init__(self, master, main_window, doc):
SketchPanel.__init__(self, master, main_window, doc,
name = 'reloaddlg')
def build_dlg(self):
top = self.top
list_frame = Frame(top)
list_frame.pack(side = TOP, expand = 1, fill = BOTH)
sb_vert = Scrollbar(list_frame, takefocus = 0)
sb_vert.pack(side = RIGHT, fill = Y)
module_list = UpdatedListbox(list_frame, name = 'list')
module_list.pack(expand = 1, fill = BOTH)
module_list.Subscribe(COMMAND, self.do_reload)
sb_vert['command'] = (module_list, 'yview')
module_list['yscrollcommand'] = (sb_vert, 'set')
self.module_list = module_list
frame = Frame(top)
frame.pack(side = BOTTOM, fill = X)
for text, cmd in [('Reload Module', self.do_reload),
('Update List', self.update_list),
('Close', self.close_dlg)]:
button = UpdatedButton(frame, text = text, command = cmd)
button.pack(side = TOP, fill = X, expand = 1)
self.update_list()
def init_from_doc(self):
pass
def update_list(self):
modules = prompt.get_sketch_modules()
modules = map(lambda mod: (mod.__name__, mod), modules)
modules.sort()
names = map(operator.getitem, modules, [0] * len(modules))
self.module_list.SetList(names)
self.modules = modules
def do_reload(self):
index = self.module_list.curselection()
index = string.atoi(index[0])
pdebug(None, 'reloading', self.modules[index])
try:
reload(self.modules[index][1])
except:
warn_tb(INTERNAL)
| gpl-2.0 | 637,809,239,642,575,100 | 29.952941 | 74 | 0.708856 | false |
greyhwndz/rethinkdb | drivers/python/rethinkdb/_backup.py | 19 | 3968 | from __future__ import print_function
from copy import deepcopy
import socket, sys, string, re
try:
import rethinkdb as r
except ImportError:
print("The RethinkDB python driver is required to use this command.")
print("Please install the driver via `pip install rethinkdb`.")
exit(1)
# This file contains common functions used by the import/export/dump/restore scripts
def os_call_wrapper(fn, filename, error_str):
try:
fn(filename)
except OSError as ex:
raise RuntimeError(error_str % (filename, ex.strerror))
def parse_connect_option(connect):
host_port = connect.split(":")
if len(host_port) == 1:
host_port = (host_port[0], "28015") # If just a host, use the default port
if len(host_port) != 2:
raise RuntimeError("Error: Invalid 'host:port' format: %s" % connect)
return host_port
def parse_db_table(item):
if not all(c in string.ascii_letters + string.digits + "._" for c in item):
raise RuntimeError("Error: Invalid 'db' or 'db.table' name: %s" % item)
db_table = item.split(".")
if len(db_table) == 1:
return (db_table[0], None)
elif len(db_table) == 2:
return tuple(db_table)
else:
raise RuntimeError("Error: Invalid 'db' or 'db.table' format: %s" % item)
def parse_db_table_options(db_table_options):
res = []
for item in db_table_options:
res.append(parse_db_table(item))
return res
# This function is used to wrap rethinkdb calls to recover from connection errors
# The first argument to the function is an output parameter indicating if progress
# has been made since the last call. This is passed as an array so it works as an
# output parameter. The first item in the array is compared each attempt to check
# if progress has been made.
# Using this wrapper, the given function will be called until 5 connection errors
# occur in a row with no progress being made. Care should be taken that the given
# function will terminate as long as the progress parameter is changed.
def rdb_call_wrapper(conn_fn, context, fn, *args, **kwargs):
i = 0
max_attempts = 5
progress = [None]
while True:
last_progress = deepcopy(progress[0])
try:
conn = conn_fn()
return fn(progress, conn, *args, **kwargs)
except socket.error as ex:
i = i + 1 if progress[0] == last_progress else 0
if i == max_attempts:
raise RuntimeError("Connection error during '%s': %s" % (context, ex.message))
except (r.ReqlError, r.ReqlDriverError) as ex:
raise RuntimeError("ReQL error during '%s': %s" % (context, ex.message))
def print_progress(ratio):
total_width = 40
done_width = int(ratio * total_width)
undone_width = total_width - done_width
print("\r[%s%s] %3d%%" % ("=" * done_width, " " * undone_width, int(100 * ratio)), end=' ')
sys.stdout.flush()
def check_minimum_version(progress, conn, minimum_version):
stringify_version = lambda v: '.'.join(map(str, v))
parsed_version = None
try:
version = r.db('rethinkdb').table('server_status')[0]['process']['version'].run(conn)
matches = re.match('rethinkdb (\d+)\.(\d+)\.(\d+)', version)
if matches == None:
raise RuntimeError("invalid version string format")
parsed_version = tuple(int(num) for num in matches.groups())
if parsed_version < minimum_version:
raise RuntimeError("incompatible version")
except (RuntimeError, TypeError, r.ReqlRuntimeError):
if parsed_version is None:
message = "Error: Incompatible server version found, expected >= %s" % \
stringify_version(minimum_version)
else:
message = "Error: Incompatible server version found (%s), expected >= %s" % \
(stringify_version(parsed_version), stringify_version(minimum_version))
raise RuntimeError(message)
| agpl-3.0 | 8,806,051,871,625,875,000 | 40.768421 | 95 | 0.644405 | false |
netscaler/horizon | openstack_dashboard/api/cinder.py | 9 | 5532 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Copyright 2012 OpenStack Foundation
# Copyright 2012 Nebula, Inc.
# Copyright (c) 2012 X.commerce, a business unit of eBay Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import absolute_import
import logging
from django.conf import settings # noqa
from django.utils.translation import ugettext_lazy as _ # noqa
from cinderclient.v1 import client as cinder_client
from horizon import exceptions
from openstack_dashboard.api import base
from openstack_dashboard.api import nova
LOG = logging.getLogger(__name__)
# API static values
VOLUME_STATE_AVAILABLE = "available"
DEFAULT_QUOTA_NAME = 'default'
def cinderclient(request):
insecure = getattr(settings, 'OPENSTACK_SSL_NO_VERIFY', False)
cacert = getattr(settings, 'OPENSTACK_SSL_CACERT', None)
cinder_url = ""
try:
cinder_url = base.url_for(request, 'volume')
except exceptions.ServiceCatalogException:
LOG.debug('no volume service configured.')
return None
LOG.debug('cinderclient connection created using token "%s" and url "%s"' %
(request.user.token.id, cinder_url))
c = cinder_client.Client(request.user.username,
request.user.token.id,
project_id=request.user.tenant_id,
auth_url=cinder_url,
insecure=insecure,
cacert=cacert,
http_log_debug=settings.DEBUG)
c.client.auth_token = request.user.token.id
c.client.management_url = cinder_url
return c
def volume_list(request, search_opts=None):
"""
To see all volumes in the cloud as an admin you can pass in a special
search option: {'all_tenants': 1}
"""
c_client = cinderclient(request)
if c_client is None:
return []
return c_client.volumes.list(search_opts=search_opts)
def volume_get(request, volume_id):
volume_data = cinderclient(request).volumes.get(volume_id)
for attachment in volume_data.attachments:
if "server_id" in attachment:
instance = nova.server_get(request, attachment['server_id'])
attachment['instance_name'] = instance.name
else:
# Nova volume can occasionally send back error'd attachments
# the lack a server_id property; to work around that we'll
# give the attached instance a generic name.
attachment['instance_name'] = _("Unknown instance")
return volume_data
def volume_create(request, size, name, description, volume_type,
snapshot_id=None, metadata=None, image_id=None):
return cinderclient(request).volumes.create(size, display_name=name,
display_description=description, volume_type=volume_type,
snapshot_id=snapshot_id, metadata=metadata, imageRef=image_id)
def volume_delete(request, volume_id):
return cinderclient(request).volumes.delete(volume_id)
def volume_snapshot_get(request, snapshot_id):
return cinderclient(request).volume_snapshots.get(snapshot_id)
def volume_snapshot_list(request):
c_client = cinderclient(request)
if c_client is None:
return []
return c_client.volume_snapshots.list()
def volume_snapshot_create(request, volume_id, name, description):
return cinderclient(request).volume_snapshots.create(
volume_id, display_name=name, display_description=description)
def volume_snapshot_delete(request, snapshot_id):
return cinderclient(request).volume_snapshots.delete(snapshot_id)
def tenant_quota_get(request, tenant_id):
c_client = cinderclient(request)
if c_client is None:
return base.QuotaSet()
return base.QuotaSet(c_client.quotas.get(tenant_id))
def tenant_quota_update(request, tenant_id, **kwargs):
return cinderclient(request).quotas.update(tenant_id, **kwargs)
def default_quota_get(request, tenant_id):
return base.QuotaSet(cinderclient(request).quotas.defaults(tenant_id))
def default_quota_update(request, **kwargs):
cinderclient(request).quota_classes.update(DEFAULT_QUOTA_NAME, **kwargs)
def volume_type_list(request):
return cinderclient(request).volume_types.list()
def volume_type_create(request, name):
return cinderclient(request).volume_types.create(name)
def volume_type_delete(request, volume_type_id):
return cinderclient(request).volume_types.delete(volume_type_id)
def tenant_absolute_limits(request):
limits = cinderclient(request).limits.get().absolute
limits_dict = {}
for limit in limits:
# -1 is used to represent unlimited quotas
if limit.value == -1:
limits_dict[limit.name] = float("inf")
else:
limits_dict[limit.name] = limit.value
return limits_dict
| apache-2.0 | -6,215,433,780,044,840,000 | 32.527273 | 79 | 0.687816 | false |
stanta/darfchain | darfchain_docker_vagrant/tests/common/test_schema.py | 3 | 6302 | """
This module is tests related to schema checking, but _not_ of granular schematic
properties related to validation.
"""
from unittest.mock import patch
from hypothesis import given
from hypothesis_regex import regex
from pytest import raises
from bigchaindb.common.exceptions import SchemaValidationError
from bigchaindb.common.schema import (
TX_SCHEMA_COMMON, VOTE_SCHEMA, drop_schema_descriptions,
validate_transaction_schema, validate_vote_schema)
SUPPORTED_CRYPTOCONDITION_TYPES = ('threshold-sha-256', 'ed25519-sha-256')
UNSUPPORTED_CRYPTOCONDITION_TYPES = (
'preimage-sha-256', 'prefix-sha-256', 'rsa-sha-256')
################################################################################
# Test of schema utils
def _test_additionalproperties(node, path=''):
"""
Validate that each object node has additionalProperties set, so that
objects with junk keys do not pass as valid.
"""
if isinstance(node, list):
for i, nnode in enumerate(node):
_test_additionalproperties(nnode, path + str(i) + '.')
if isinstance(node, dict):
if node.get('type') == 'object':
assert 'additionalProperties' in node, \
('additionalProperties not set at path:' + path)
for name, val in node.items():
_test_additionalproperties(val, path + name + '.')
def test_transaction_schema_additionalproperties():
_test_additionalproperties(TX_SCHEMA_COMMON)
def test_vote_schema_additionalproperties():
_test_additionalproperties(VOTE_SCHEMA)
def test_drop_descriptions():
node = {
'description': 'abc',
'properties': {
'description': {
'description': ('The property named "description" should stay'
'but description meta field goes'),
},
'properties': {
'description': 'this must go'
},
'any': {
'anyOf': [
{
'description': 'must go'
}
]
}
},
'definitions': {
'wat': {
'description': 'go'
}
}
}
expected = {
'properties': {
'description': {},
'properties': {},
'any': {
'anyOf': [
{}
]
}
},
'definitions': {
'wat': {},
}
}
drop_schema_descriptions(node)
assert node == expected
################################################################################
# Test call transaction schema
def test_validate_transaction_create(create_tx):
validate_transaction_schema(create_tx.to_dict())
def test_validate_transaction_signed_create(signed_create_tx):
validate_transaction_schema(signed_create_tx.to_dict())
def test_validate_transaction_signed_transfer(signed_transfer_tx):
validate_transaction_schema(signed_transfer_tx.to_dict())
def test_validate_transaction_fails():
with raises(SchemaValidationError):
validate_transaction_schema({})
def test_validate_failure_inconsistent():
with patch('jsonschema.validate'):
with raises(SchemaValidationError):
validate_transaction_schema({})
@given(condition_uri=regex(
r'^ni:\/\/\/sha-256;([a-zA-Z0-9_-]{{0,86}})\?fpt=({})'
r'&cost=[0-9]+(?![\n])$'.format('|'.join(
t for t in SUPPORTED_CRYPTOCONDITION_TYPES))))
def test_condition_uri_with_supported_fpt(dummy_transaction, condition_uri):
dummy_transaction['outputs'][0]['condition']['uri'] = condition_uri
validate_transaction_schema(dummy_transaction)
@given(condition_uri=regex(r'^ni:\/\/\/sha-256;([a-zA-Z0-9_-]{{0,86}})\?fpt='
r'({})&cost=[0-9]+(?![\n])$'.format(
'|'.join(UNSUPPORTED_CRYPTOCONDITION_TYPES))))
def test_condition_uri_with_unsupported_fpt(dummy_transaction, condition_uri):
dummy_transaction['outputs'][0]['condition']['uri'] = condition_uri
with raises(SchemaValidationError):
validate_transaction_schema(dummy_transaction)
@given(condition_uri=regex(
r'^ni:\/\/\/sha-256;([a-zA-Z0-9_-]{{0,86}})\?fpt=(?!{})'
r'&cost=[0-9]+(?![\n])$'.format('$|'.join(
t for t in SUPPORTED_CRYPTOCONDITION_TYPES))))
def test_condition_uri_with_unknown_fpt(dummy_transaction, condition_uri):
dummy_transaction['outputs'][0]['condition']['uri'] = condition_uri
with raises(SchemaValidationError):
validate_transaction_schema(dummy_transaction)
@given(condition_uri=regex(
r'^ni:\/\/\/sha-256;([a-zA-Z0-9_-]{0,86})\?fpt=threshold-sha-256'
r'&cost=[0-9]+&subtypes=ed25519-sha-256(?![\n])$'))
def test_condition_uri_with_supported_subtype(dummy_transaction,
condition_uri):
dummy_transaction['outputs'][0]['condition']['uri'] = condition_uri
validate_transaction_schema(dummy_transaction)
@given(condition_uri=regex(
r'^ni:\/\/\/sha-256;([a-zA-Z0-9_-]{0,86})\?fpt=threshold-sha-256&cost='
r'[0-9]+&subtypes=(preimage-sha-256|prefix-sha-256|rsa-sha-256)(?![\n])$'))
def test_condition_uri_with_unsupported_subtype(dummy_transaction,
condition_uri):
dummy_transaction['outputs'][0]['condition']['uri'] = condition_uri
with raises(SchemaValidationError):
validate_transaction_schema(dummy_transaction)
@given(condition_uri=regex(
r'^ni:\/\/\/sha-256;([a-zA-Z0-9_-]{{0,86}})\?fpt=threshold-sha-256'
r'&cost=[0-9]+&subtypes=(?!{})(?![\n])$'.format('$|'.join(
t for t in SUPPORTED_CRYPTOCONDITION_TYPES))))
def test_condition_uri_with_unknown_subtype(dummy_transaction, condition_uri):
dummy_transaction['outputs'][0]['condition']['uri'] = condition_uri
with raises(SchemaValidationError):
validate_transaction_schema(dummy_transaction)
################################################################################
# Test call vote schema
def test_validate_vote(structurally_valid_vote):
validate_vote_schema(structurally_valid_vote)
def test_validate_vote_fails():
with raises(SchemaValidationError):
validate_vote_schema({})
| gpl-3.0 | 3,962,140,046,457,637,000 | 32.88172 | 80 | 0.586163 | false |
GoogleCloudPlatformTraining/cp100-bookshelf | app-engine/bookshelf/model_datastore.py | 1 | 2619 | # Copyright 2016 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from flask import current_app
from google.appengine.datastore.datastore_query import Cursor
from google.appengine.ext import ndb
builtin_list = list
def init_app(app):
pass
# [START model]
class Book(ndb.Model):
author = ndb.StringProperty()
description = ndb.StringProperty(indexed=False)
publishedDate = ndb.StringProperty()
title = ndb.StringProperty()
# [END model]
# [START from_datastore]
def from_datastore(entity):
"""Translates Datastore results into the format expected by the
application.
Datastore typically returns:
[Entity{key: (kind, id), prop: val, ...}]
This returns:
{id: id, prop: val, ...}
"""
if not entity:
return None
if isinstance(entity, builtin_list):
entity = entity.pop()
book = {}
book['id'] = entity.key.id()
book['author'] = entity.author
book['description'] = entity.description
book['publishedDate'] = entity.publishedDate
book['title'] = entity.title
return book
# [END from_datastore]
# [START list]
def list(limit=10, cursor=None):
if cursor:
cursor = Cursor(urlsafe=cursor)
query = Book.query().order(Book.title)
entities, cursor, more = query.fetch_page(limit, start_cursor=cursor)
entities = builtin_list(map(from_datastore, entities))
return entities, cursor.urlsafe() if len(entities) == limit else None
# [END list]
# [START read]
def read(id):
book_key = ndb.Key('Book', int(id))
results = book_key.get()
return from_datastore(results)
# [END read]
# [START update]
def update(data, id=None):
if id:
key = ndb.Key('Book', int(id))
book = key.get()
else:
book = Book()
book.author = data['author']
book.description = data['description']
book.publishedDate = data['publishedDate']
book.title = data['title']
book.put()
return from_datastore(book)
create = update
# [END update]
# [START delete]
def delete(id):
key = ndb.Key('Book', int(id))
key.delete()
# [END delete]
| apache-2.0 | -7,738,208,283,967,408,000 | 24.427184 | 74 | 0.668194 | false |
jrg365/gpytorch | test/variational/test_batch_decoupled_variational_strategy.py | 1 | 7445 | #!/usr/bin/env python3
import unittest
import torch
import gpytorch
from gpytorch.test.variational_test_case import VariationalTestCase
def likelihood_cls():
return gpytorch.likelihoods.GaussianLikelihood()
def strategy_cls(model, inducing_points, variational_distribution, learn_inducing_locations):
return gpytorch.variational.BatchDecoupledVariationalStrategy(
model, inducing_points, variational_distribution, learn_inducing_locations
)
def batch_dim_strategy_cls(model, inducing_points, variational_distribution, learn_inducing_locations):
return gpytorch.variational.BatchDecoupledVariationalStrategy(
model, inducing_points, variational_distribution, learn_inducing_locations, mean_var_batch_dim=-1
)
class TestBatchDecoupledVariationalGP(VariationalTestCase, unittest.TestCase):
@property
def batch_shape(self):
return torch.Size([])
@property
def distribution_cls(self):
return gpytorch.variational.CholeskyVariationalDistribution
@property
def likelihood_cls(self):
return likelihood_cls
@property
def mll_cls(self):
return gpytorch.mlls.VariationalELBO
@property
def strategy_cls(self):
return strategy_cls
def test_training_iteration(self, *args, **kwargs):
cg_mock, cholesky_mock = super().test_training_iteration(*args, **kwargs)
self.assertFalse(cg_mock.called)
self.assertEqual(cholesky_mock.call_count, 2) # One for each forward pass, and for computing prior dist
def test_eval_iteration(self, *args, **kwargs):
cg_mock, cholesky_mock = super().test_eval_iteration(*args, **kwargs)
self.assertFalse(cg_mock.called)
self.assertEqual(cholesky_mock.call_count, 1) # One to compute cache, that's it!
class TestBatchDecoupledPredictiveGP(TestBatchDecoupledVariationalGP):
@property
def mll_cls(self):
return gpytorch.mlls.PredictiveLogLikelihood
class TestBatchDecoupledRobustVGP(TestBatchDecoupledVariationalGP):
@property
def mll_cls(self):
return gpytorch.mlls.GammaRobustVariationalELBO
class TestMeanFieldBatchDecoupledVariationalGP(TestBatchDecoupledVariationalGP):
@property
def distribution_cls(self):
return gpytorch.variational.MeanFieldVariationalDistribution
class TestMeanFieldBatchDecoupledPredictiveGP(TestBatchDecoupledPredictiveGP):
@property
def distribution_cls(self):
return gpytorch.variational.MeanFieldVariationalDistribution
class TestMeanFieldBatchDecoupledRobustVGP(TestBatchDecoupledRobustVGP):
@property
def distribution_cls(self):
return gpytorch.variational.MeanFieldVariationalDistribution
class TestBatchDecoupledVariationalGPBatchDim(TestBatchDecoupledVariationalGP, unittest.TestCase):
def _make_model_and_likelihood(
self,
num_inducing=16,
batch_shape=torch.Size([]),
inducing_batch_shape=torch.Size([]),
strategy_cls=gpytorch.variational.VariationalStrategy,
distribution_cls=gpytorch.variational.CholeskyVariationalDistribution,
constant_mean=True,
):
class _SVGPRegressionModel(gpytorch.models.ApproximateGP):
def __init__(self, inducing_points):
variational_distribution = distribution_cls(num_inducing, batch_shape=batch_shape)
variational_strategy = strategy_cls(
self, inducing_points, variational_distribution, learn_inducing_locations=True
)
super().__init__(variational_strategy)
if constant_mean:
self.mean_module = gpytorch.means.ConstantMean(batch_shape=batch_shape + torch.Size([2]))
self.mean_module.initialize(constant=1.0)
else:
self.mean_module = gpytorch.means.ZeroMean()
self.covar_module = gpytorch.kernels.ScaleKernel(
gpytorch.kernels.RBFKernel(batch_shape=batch_shape + torch.Size([2])),
batch_shape=batch_shape + torch.Size([2]),
)
def forward(self, x):
mean_x = self.mean_module(x)
covar_x = self.covar_module(x)
latent_pred = gpytorch.distributions.MultivariateNormal(mean_x, covar_x)
return latent_pred
inducing_points = torch.randn(num_inducing, 2).repeat(*inducing_batch_shape, 1, 1)
return _SVGPRegressionModel(inducing_points), self.likelihood_cls()
@property
def distribution_cls(self):
return gpytorch.variational.CholeskyVariationalDistribution
@property
def mll_cls(self):
return gpytorch.mlls.PredictiveLogLikelihood
class TestMeanFieldBatchDecoupledVariationalGPBatchDim(TestBatchDecoupledVariationalGPBatchDim, unittest.TestCase):
@property
def distribution_cls(self):
return gpytorch.variational.MeanFieldVariationalDistribution
class TestBatchDecoupledVariationalGPOtherBatchDim(TestBatchDecoupledVariationalGP, unittest.TestCase):
def _make_model_and_likelihood(
self,
num_inducing=16,
batch_shape=torch.Size([]),
inducing_batch_shape=torch.Size([]),
strategy_cls=gpytorch.variational.VariationalStrategy,
distribution_cls=gpytorch.variational.CholeskyVariationalDistribution,
constant_mean=True,
):
class _SVGPRegressionModel(gpytorch.models.ApproximateGP):
def __init__(self, inducing_points):
variational_distribution = distribution_cls(num_inducing, batch_shape=batch_shape)
variational_strategy = strategy_cls(
self, inducing_points, variational_distribution, learn_inducing_locations=True
)
super().__init__(variational_strategy)
if constant_mean:
self.mean_module = gpytorch.means.ConstantMean(
batch_shape=batch_shape[:-1] + torch.Size([2]) + batch_shape[-1:]
)
self.mean_module.initialize(constant=1.0)
else:
self.mean_module = gpytorch.means.ZeroMean()
self.covar_module = gpytorch.kernels.ScaleKernel(
gpytorch.kernels.RBFKernel(batch_shape=batch_shape[:-1] + torch.Size([2]) + batch_shape[-1:]),
batch_shape=batch_shape[:-1] + torch.Size([2]) + batch_shape[-1:],
)
def forward(self, x):
mean_x = self.mean_module(x)
covar_x = self.covar_module(x)
latent_pred = gpytorch.distributions.MultivariateNormal(mean_x, covar_x)
return latent_pred
inducing_points = torch.randn(num_inducing, 2).repeat(*inducing_batch_shape, 1, 1)
return _SVGPRegressionModel(inducing_points), self.likelihood_cls()
@property
def strategy_cls(self):
def _batch_dim_strategy_cls(model, inducing_points, variational_distribution, learn_inducing_locations):
return gpytorch.variational.BatchDecoupledVariationalStrategy(
model, inducing_points, variational_distribution, learn_inducing_locations, mean_var_batch_dim=-2
)
return _batch_dim_strategy_cls
@property
def batch_shape(self):
return torch.Size([3])
if __name__ == "__main__":
unittest.main()
| mit | 97,202,635,179,650,930 | 37.57513 | 115 | 0.670383 | false |
I-sektionen/i-portalen | wsgi/iportalen_django/iportalen/settings.py | 1 | 10793 | """
Django settings for iportalen project.
Generated by 'django-admin startproject' using Django 1.8.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.8/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
DJ_PROJECT_DIR = os.path.dirname(__file__)
BASE_DIR = os.path.dirname(DJ_PROJECT_DIR)
WSGI_DIR = os.path.dirname(BASE_DIR)
REPO_DIR = os.path.dirname(WSGI_DIR)
#BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Used to determined if being run on Openshift, Jenkins or local. Determines DB-connection settings.
ON_PASS = 'OPENSHIFT_REPO_DIR' in os.environ
ON_CIRCLE = 'ON_CIRCLE' in os.environ
ON_JENKINS = 'JENKINS_SERVER_IPORTALEN' in os.environ
ON_AWS = 'ON_AWS' in os.environ
ON_LOCAL_DOCKER = 'ON_LOCAL_DOCKER' in os.environ
if ON_AWS or ON_PASS or ON_JENKINS or ON_LOCAL_DOCKER:
ALLOWED_HOSTS = ['*']
DEBUG = False
else:
ALLOWED_HOSTS = ['*']
DEBUG = True
ADMINS = [('Webmaster', '[email protected]')]
MANAGERS = ADMINS
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.8/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
if not (ON_PASS or ON_AWS):
SECRET_KEY = '^+^i^1i94%j-hi+107xw(vf^mz4hg--#w0mw93+kc#&4vc=#=@'
else:
SECRET_KEY = os.environ.get('DJANGO_SECRET_KEY')
if ON_PASS:
ssl = False
try:
s = str(os.environ.get('SSL_ENABLED'))
if s == str("TRUE"):
ssl = True
except:
pass
if ssl:
SECURE_SSL_REDIRECT = True
SESSION_COOKIE_SECURE = True
CSRF_COOKIE_SECURE = True
# Application definition
INSTALLED_APPS = (
'dal',
'dal_select2',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.sites',
'alumni_portal',
'speaker_list',
'iportalen',
'votings',
'hero',
'storages',
'tags',
'user_managements',
'articles',
'events',
'organisations',
'bookings',
'course_evaluations',
'faq',
'django.contrib.sitemaps',
'rest_framework',
'django_nose',
'corsheaders',
'letsencrypt',
'fika_penalty',
'liu_crawler',
'webgroup',
'exchange_portal',
'thesis_portal'
)
if not ON_PASS or ON_AWS:
INSTALLED_APPS = INSTALLED_APPS + ('debug_toolbar',)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'corsheaders.middleware.CorsMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.middleware.security.SecurityMiddleware',
'csp.middleware.CSPMiddleware',
)
AUTH_USER_MODEL = 'user_managements.IUser'
ROOT_URLCONF = 'iportalen.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [(os.path.join(os.path.dirname(BASE_DIR), 'templates'))],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
'django.template.context_processors.media',
],
},
},
]
TEST_RUNNER = 'django_nose.NoseTestSuiteRunner'
NOSE_ARGS = [
'--with-coverage'
]
WSGI_APPLICATION = 'iportalen.wsgi.application'
if ON_AWS or ON_LOCAL_DOCKER:
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': os.environ['AWS_DB_NAME'],
'USER': os.environ['AWS_DB_USERNAME'],
'PASSWORD': os.environ['AWS_DB_PASSWORD'],
'HOST': os.environ['AWS_DB_HOSTNAME'],
'PORT': os.environ['AWS_DB_PORT']
}
}
elif ON_PASS:
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': os.environ['OPENSHIFT_APP_NAME'],
'USER': os.environ['OPENSHIFT_MYSQL_DB_USERNAME'],
'PASSWORD': os.environ['OPENSHIFT_MYSQL_DB_PASSWORD'],
'HOST': os.environ['OPENSHIFT_MYSQL_DB_HOST'],
'PORT': os.environ['OPENSHIFT_MYSQL_DB_PORT']
}
}
elif ON_JENKINS:
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': os.environ['JENKINS_DB_NAME'],
'USER': 'mysql_jenkins',
'PASSWORD': '123123123HEJJE', # Securely generated password.
'HOST': 'localhost',
'PORT': '3306'
}
}
elif ON_CIRCLE:
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': 'circle_test',
'USER': 'ubuntu'
}
}
else:
from .helpers import get_mysql_credentials
mysql = get_mysql_credentials() # Local db credentials.
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': 'django_iportalen',
'USER': mysql["user"],
'PASSWORD': mysql["password"],
'HOST': mysql["host"],
'PORT': mysql["port"],
}
}
# Database
# https://docs.djangoproject.com/en/1.8/ref/settings/#databases
# Internationalization
# https://docs.djangoproject.com/en/1.8/topics/i18n/
LANGUAGE_CODE = 'sv-se' # en-us
TIME_ZONE = 'Europe/Stockholm'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Target folder of collectstatic.
# Staticfiles settings for local dev environment:
if not (ON_PASS or ON_AWS):
STATIC_ROOT = os.path.join(BASE_DIR, "../static/")
STATIC_URL = "/static/"
STATICFILES_DIRS = (
os.path.join(BASE_DIR, "local_static"),
)
MEDIA_URL = "/media/"
MEDIA_ROOT = os.path.join(BASE_DIR, "../media/")
# This is the s3 settings for Openshift.
if ON_PASS or ON_AWS:
STATIC_ROOT = os.path.normpath(os.path.join(BASE_DIR, "../static/"))
MEDIA_ROOT = os.path.join(os.path.dirname(BASE_DIR), "media")
AWS_ACCESS_KEY_ID = os.environ.get('AWS_ACCESS_KEY_ID')
AWS_SECRET_ACCESS_KEY = os.environ.get('AWS_SECRET_ACCESS_KEY')
AWS_STORAGE_BUCKET_NAME = os.environ.get('AWS_STORAGE_BUCKET_NAME')
S3_URL = 'https://{0}.s3.amazonaws.com/'.format(AWS_STORAGE_BUCKET_NAME)
STATIC_URL = os.environ.get('STATIC_URL', S3_URL + 'static/')
DEFAULT_FILE_STORAGE = 'iportalen.storage.MediaRootS3BotoStorage'
STATICFILES_STORAGE = 'iportalen.storage.StaticRootS3BotoStorage'
MEDIA_URL = os.environ.get('MEDIA_URL', S3_URL + 'client/')
AWS_HEADERS = { # see http://developer.yahoo.com/performance/rules.html#expires
'Expires': 'Thu, 31 Dec 2099 20:00:00 GMT',
'Cache-Control': 'max-age=94608000',
}
LOGIN_URL = 'login_view'
REST_FRAMEWORK = {
# Use Django's standard `django.contrib.auth` permissions,
# or allow read-only access for unauthenticated users.
'DEFAULT_PERMISSION_CLASSES': [
'rest_framework.permissions.DjangoModelPermissionsOrAnonReadOnly'
],
'DEFAULT_FILTER_BACKENDS': ('rest_framework.filters.DjangoFilterBackend',),
'DEFAULT_PAGINATION_CLASS': 'rest_framework.pagination.LimitOffsetPagination'
}
# Email settings:
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend' # This is a dummy backend which prints emails as a
# normal print() statement (i.e. to stdout)
EMAIL_HOST_USER = '[email protected]'
if ON_PASS or ON_AWS:
send_email = False
try:
s = str(os.environ.get('SEND_EMAIL'))
if s == str('TRUE'):
send_email = True
except:
pass
if send_email:
EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'
EMAIL_USE_TLS = True
EMAIL_HOST = 'smtp.gmail.com'
EMAIL_PORT = 587
EMAIL_HOST_PASSWORD = os.environ.get('EMAIL_PASSWORD')
SITE_ID = 2
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse',
},
'require_debug_true': {
'()': 'django.utils.log.RequireDebugTrue',
},
},
'handlers': {
'console': {
'level': 'ERROR',
'filters': ['require_debug_true'],
'class': 'logging.StreamHandler',
},
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django': {
'level': 'ERROR',
'handlers': ['console']
},
'django.request': {
'level': 'ERROR',
'handlers': ['console']
},
'django.template': {
'level': 'ERROR',
'handlers': ['console']
},
'django.db.backends': {
'level': 'ERROR',
'handlers': ['console']
},
'root': {
'level': 'ERROR',
'handlers': ['console']
},
'core.handlers': {
'level': 'ERROR',
'handlers': ['console']
},
},
}
CORS_ORIGIN_ALLOW_ALL = False
if ON_PASS or ON_AWS:
CORS_ORIGIN_WHITELIST = (
'utlandsportalen-ember.herokuapp.com',
)
if not (ON_PASS or ON_AWS):
CORS_ORIGIN_WHITELIST = (
'127.0.0.1:4200',
'127.0.0.1:1337',
)
CSP_DEFAULT_SRC = (
"'self'", "'unsafe-eval'", "'unsafe-inline'", '*.s3.amazonaws.com/', 's3.amazonaws.com/',
'https://maxcdn.bootstrapcdn.com/', 'google-analytics.com/',
'*.googleapis.com/', 'https://cdnjs.cloudflare.com/', '*.gstatic.com/',
'*.github.com/repos/I-sektionen/', 'data:', "s3.eu-central-1.amazonaws.com/", 'https://calendar.google.com/'
)
CSP_SCRIPT_SRC = (
"'self'", "'unsafe-eval'", "'unsafe-inline'",
'*.googleapis.com/','*.s3.amazonaws.com/', 's3.amazonaws.com/',
'iportalen/js/jquery.datetimepicker.full.min.js', '*.googletagmanager.com/',
'google-analytics.com/', 'https://www.gstatic.com/', 'https://www.google-analytics.com/analytics.js',
'https://cdnjs.cloudflare.com/ajax/libs/fancybox/2.1.5/jquery.fancybox.min.js', 'https://calendar.google.com/'
)
| mit | 8,324,262,706,845,594,000 | 28.651099 | 116 | 0.603632 | false |
mgrygoriev/CloudFerry | evacuation/actions/evacuate_vms.py | 1 | 1782 | # Copyright (c) 2015 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the License);
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an AS IS BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and#
# limitations under the License.
import json
from cloudferrylib.base.action import action
from cloudferrylib.utils import log
from condensation import process
from condensation import action as c_action
import data_storage
LOG = log.getLogger(__name__)
class Evacuate(action.Action):
def __init__(self, iteration, **kwargs):
self.iteration = iteration
super(Evacuate, self).__init__(**kwargs)
def run(self, **kwargs):
compute_resource = self.cloud.resources['compute']
cloud = process.SOURCE
LOG.debug("getting info on cloud {cloud} "
"iteration {iteration} from db".format(
cloud=cloud,
iteration=self.iteration))
info = data_storage.get(
c_action.get_key(self.iteration, cloud))
if not info:
LOG.info("cannot find info in db on {cloud}-{iteration}".format(
cloud=cloud,
iteration=self.iteration))
return {}
actions = json.loads(info).get(c_action.CONDENSE)
LOG.debug("live-migrating vm one by one")
for vm_id, dest_host in actions:
compute_resource.live_migrate_vm(vm_id, dest_host)
return {}
| apache-2.0 | 1,393,073,314,021,618,000 | 33.941176 | 76 | 0.657688 | false |
alonisser/Open-Knesset | mks/migrations/0018_recalc_avg_weekly_presence_hours.py | 15 | 10799 | # encoding: utf-8
import datetime
from south.db import db
from south.v2 import DataMigration
from django.db import models
class Migration(DataMigration):
def forwards(self, orm):
"Write your forwards methods here."
for mk in orm.Member.objects.all():
hours = orm.WeeklyPresence.objects.filter(member=mk).values_list('hours',flat=True)
if len(hours):
mk.average_weekly_presence_hours = round(sum(hours)/len(hours),1)
mk.save()
def backwards(self, orm):
"Write your backwards methods here."
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'mks.correlation': {
'Meta': {'object_name': 'Correlation'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'm1': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'m1'", 'to': "orm['mks.Member']"}),
'm2': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'m2'", 'to': "orm['mks.Member']"}),
'normalized_score': ('django.db.models.fields.FloatField', [], {'null': 'True'}),
'not_same_party': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
'mks.member': {
'Meta': {'object_name': 'Member'},
'area_of_residence': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'average_weekly_presence_hours': ('django.db.models.fields.FloatField', [], {'null': 'True'}),
'bills_stats_approved': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'bills_stats_first': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'bills_stats_pre': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'bills_stats_proposed': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'blog': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['planet.Blog']", 'unique': 'True', 'null': 'True', 'blank': 'True'}),
'current_party': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'members'", 'null': 'True', 'to': "orm['mks.Party']"}),
'current_role_descriptions': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'null': 'True', 'blank': 'True'}),
'date_of_birth': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'date_of_death': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}),
'end_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'family_status': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}),
'fax': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}),
'gender': ('django.db.models.fields.CharField', [], {'max_length': '1', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'img_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'is_current': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'number_of_children': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'parties': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'all_members'", 'symmetrical': 'False', 'through': "orm['mks.Membership']", 'to': "orm['mks.Party']"}),
'phone': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}),
'place_of_birth': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'place_of_residence': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'place_of_residence_lat': ('django.db.models.fields.CharField', [], {'max_length': '16', 'null': 'True', 'blank': 'True'}),
'place_of_residence_lon': ('django.db.models.fields.CharField', [], {'max_length': '16', 'null': 'True', 'blank': 'True'}),
'residence_centrality': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'residence_economy': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'start_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'}),
'website': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'year_of_aliyah': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'})
},
'mks.membership': {
'Meta': {'object_name': 'Membership'},
'end_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'member': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['mks.Member']"}),
'party': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['mks.Party']"}),
'start_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'})
},
'mks.party': {
'Meta': {'object_name': 'Party'},
'end_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_coalition': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'number_of_members': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'number_of_seats': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'start_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'})
},
'mks.weeklypresence': {
'Meta': {'object_name': 'WeeklyPresence'},
'date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'hours': ('django.db.models.fields.FloatField', [], {'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'member': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['mks.Member']"})
},
'planet.blog': {
'Meta': {'object_name': 'Blog'},
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '255', 'blank': 'True'}),
'url': ('django.db.models.fields.URLField', [], {'unique': 'True', 'max_length': '1024', 'db_index': 'True'})
}
}
complete_apps = ['mks']
| bsd-3-clause | -4,717,993,240,307,620,000 | 77.824818 | 200 | 0.543198 | false |
cisco-sas/kitty | tests/test_model_low_level_calculated.py | 1 | 26550 | # -*- coding: utf-8 -*-
# Copyright (C) 2016 Cisco Systems, Inc. and/or its affiliates. All rights reserved.
#
# This file is part of Kitty.
#
# Kitty is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# Kitty is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Kitty. If not, see <http://www.gnu.org/licenses/>.
'''
Tests calculated fields
'''
from common import metaTest, BaseTestCase
from bitstring import Bits
import hashlib
from struct import unpack
from kitty.model import String, Static
from kitty.model import BitField, UInt32
from kitty.model import Clone, Size, SizeInBytes, Md5, Sha1, Sha224, Sha256, Sha384, Sha512, Hash
from kitty.model import ElementCount, IndexOf, Offset, AbsoluteOffset, CalculatedBits, CalculatedStr
from kitty.model import Container
from kitty.model import ENC_INT_BE
from kitty.core import KittyException
class CalculatedTestCase(BaseTestCase):
__meta__ = True
def setUp(self, cls=None):
super(CalculatedTestCase, self).setUp(cls)
self.depends_on_name = 'depends_on'
self.depends_on_value = 'the_value'
self.uut_name = 'uut'
def calculate(self, field):
'''
:param field: field to base calculation on
:return: calculated value
'''
raise NotImplementedError
def get_default_field(self, fuzzable=False):
return self.cls(self.depends_on_name, fuzzable=fuzzable, name=self.uut_name)
def get_original_field(self):
return String(self.depends_on_value, name=self.depends_on_name)
@metaTest
def testCalculatedAfterField(self):
original_field = self.get_original_field()
calculated_field = self.get_default_field()
container = Container([original_field, calculated_field])
expected = self.calculate(original_field)
actual = calculated_field.render()
self.assertEqual(expected, actual)
while container.mutate():
expected = self.calculate(original_field)
actual = calculated_field.render()
self.assertEqual(expected, actual)
@metaTest
def testCalculatedBeforeField(self):
original_field = self.get_original_field()
calculated_field = self.get_default_field()
container = Container([calculated_field, original_field])
expected = self.calculate(original_field)
actual = calculated_field.render()
self.assertEqual(expected, actual)
while container.mutate():
expected = self.calculate(original_field)
actual = calculated_field.render()
self.assertEqual(expected, actual)
@metaTest
def testAbsoluteNameExists(self):
original_field = self.get_original_field()
absolute_name = '/B/C/' + original_field.get_name()
self.depends_on_name = absolute_name
calculated_field = self.get_default_field()
container = Container(name='A', fields=[
Container(name='B', fields=[
Container(name='C', fields=[
original_field
]),
calculated_field
])
])
expected = self.calculate(original_field)
actual = calculated_field.render()
self.assertEqual(expected, actual)
while container.mutate():
expected = self.calculate(original_field)
actual = calculated_field.render()
self.assertEqual(expected, actual)
@metaTest
def testAbsoluteNameDoesNotExist(self):
original_field = self.get_original_field()
absolute_name = '/B/' + original_field.get_name()
self.depends_on_name = absolute_name
calculated_field = self.get_default_field()
container = Container(name='A', fields=[
Container(name='B', fields=[
Container(name='C', fields=[
original_field
]),
calculated_field
])
])
with self.assertRaises(KittyException):
container.render()
@metaTest
def testNameDoesNotExist(self):
original_field = self.get_original_field()
self.depends_on_name = 'not really'
calculated_field = self.get_default_field()
container = Container([original_field, calculated_field])
with self.assertRaises(KittyException):
container.render()
@metaTest
def testInvalidFieldNameRaisesException(self):
with self.assertRaises(KittyException):
self.uut_name = 'invalid/name'
self.get_default_field()
@metaTest
def testNameIsNotAString(self):
self.depends_on_name = 1
with self.assertRaises(KittyException):
self.get_default_field()
class CalculatedBitsTests(CalculatedTestCase):
__meta__ = False
def calc_func(self, bits):
return bits
def setUp(self, cls=CalculatedBits):
super(CalculatedBitsTests, self).setUp(cls)
def get_default_field(self, fuzzable=False, func=None):
if func is None:
func = self.calc_func
return self.cls(self.depends_on_name, func=func, fuzzable=fuzzable, name=self.uut_name)
def get_original_field(self):
return String(self.depends_on_value, name=self.depends_on_name)
def calculate(self, field):
return field.render()
def testExceptionWhenFuncNotCallable(self):
with self.assertRaises(KittyException):
self.get_default_field(func=123)
def testExceptionWhenFuncRaisesException(self):
def func(bits):
raise Exception('boom')
with self.assertRaises(KittyException):
self.get_default_field(func=func)
def testExceptionWhenFuncReturnsNone(self):
def func(bits):
return None
with self.assertRaises(KittyException):
self.get_default_field(func=func)
def testExceptionWhenFuncReturnsString(self):
def func(bits):
return 'boom'
with self.assertRaises(KittyException):
self.get_default_field(func=func)
def testExceptionWhenFuncReturnsInt(self):
def func(bits):
return 1
with self.assertRaises(KittyException):
self.get_default_field(func=func)
class CalculatedStrTests(CalculatedTestCase):
__meta__ = False
def calc_func(self, s):
return s
def setUp(self, cls=CalculatedStr):
super(CalculatedStrTests, self).setUp(cls)
def get_default_field(self, fuzzable=False, func=None):
if func is None:
func = self.calc_func
return self.cls(self.depends_on_name, func=func, fuzzable=fuzzable, name=self.uut_name)
def get_original_field(self):
return String(self.depends_on_value, name=self.depends_on_name)
def calculate(self, field):
return field.render()
def testExceptionWhenFuncNotCallable(self):
with self.assertRaises(KittyException):
self.get_default_field(func=123)
def testExceptionWhenFuncRaisesException(self):
def func(s):
raise Exception('boom')
with self.assertRaises(KittyException):
self.get_default_field(func=func)
def testExceptionWhenFuncReturnsNone(self):
def func(s):
return None
with self.assertRaises(KittyException):
self.get_default_field(func=func)
def testExceptionWhenFuncReturnsBits(self):
def func(s):
return Bits('')
with self.assertRaises(KittyException):
self.get_default_field(func=func)
def testExceptionWhenFuncReturnsInt(self):
def func(s):
return 1
with self.assertRaises(KittyException):
self.get_default_field(func=func)
class CloneTests(CalculatedTestCase):
__meta__ = False
def setUp(self, cls=Clone):
super(CloneTests, self).setUp(cls)
def calculate(self, field):
return field.render()
class ElementCountTests(CalculatedTestCase):
__meta__ = False
def setUp(self, cls=ElementCount):
super(ElementCountTests, self).setUp(cls)
self.length = 32
self.bit_field = BitField(value=0, length=self.length)
def get_default_field(self, fuzzable=False, correction=None):
return self.cls(self.depends_on_name, correction=correction, length=self.length, fuzzable=fuzzable, name=self.uut_name)
def calculate(self, field):
self.bit_field.set_current_value(len(field.get_rendered_fields()))
return self.bit_field.render()
def testContainerWithInternalContainer(self):
container = Container(
name=self.depends_on_name,
fields=[
String('abc'),
String('def'),
Container(
name='counts_as_one',
fields=[
String('ghi'),
String('jkl'),
])
])
uut = self.get_default_field()
full = Container([container, uut])
full.render()
self.assertEqual(uut.render(), self.calculate(container))
del full
def testInternalContainer(self):
internal_container = Container(
name=self.depends_on_name,
fields=[
String('ghi', name='field3'),
String('jkl', name='field4'),
])
container = Container(
name='this_doesnt_count',
fields=[
String('abc', name='field1'),
String('def', name='field2'),
internal_container
])
uut = self.get_default_field()
full = Container([container, uut])
full.render()
self.assertEqual(uut.render(), self.calculate(internal_container))
del full
def testInvalidCorrectionStr(self):
with self.assertRaises(KittyException):
self.get_default_field(correction='boom')
class IndexOfTestCase(CalculatedTestCase):
__meta__ = False
def setUp(self, cls=IndexOf):
super(IndexOfTestCase, self).setUp(cls)
self.length = 32
self.bit_field = BitField(value=0, length=self.length)
def get_default_field(self, fuzzable=False):
return self.cls(self.depends_on_name, length=self.length, fuzzable=fuzzable, name=self.uut_name)
def calculate(self, field):
rendered = field.enclosing.get_rendered_fields()
if field in rendered:
value = rendered.index(field)
else:
value = len(rendered)
self.bit_field.set_current_value(value)
return self.bit_field.render()
def _testCorrectIndex(self, expected_index):
field_list = [String('%d' % i) for i in range(20)]
field_list[expected_index] = self.get_original_field()
uut = self.get_default_field()
t = Container(name='level1', fields=[uut, Container(name='level2', fields=field_list)])
rendered = uut.render().tobytes()
result = unpack('>I', rendered)[0]
self.assertEqual(result, expected_index)
del t
def testCorrectIndexFirst(self):
self._testCorrectIndex(0)
def testCorrectIndexMiddle(self):
self._testCorrectIndex(10)
def testCorrectIndexLast(self):
self._testCorrectIndex(19)
def testFieldNotRenderedAlone(self):
expected_index = 0
uut = self.get_default_field()
the_field = Static(name=self.depends_on_name, value='')
t = Container(name='level1', fields=[uut, Container(name='level2', fields=the_field)])
rendered = uut.render().tobytes()
result = unpack('>I', rendered)[0]
self.assertEqual(result, expected_index)
del t
def testFieldNotRenderedWithOtherFields(self):
expected_index = 3
uut = self.get_default_field()
fields = [
Static(name=self.depends_on_name, value=''),
Static('field1'),
Static('field2'),
Static('field3'),
]
t = Container(name='level1', fields=[uut, Container(name='level2', fields=fields)])
rendered = uut.render().tobytes()
result = unpack('>I', rendered)[0]
self.assertEqual(result, expected_index)
del t
class SizeTests(CalculatedTestCase):
__meta__ = False
def setUp(self, cls=Size, length=32):
super(SizeTests, self).setUp(cls)
self.bit_field = BitField(value=0, length=length)
self.length = length
def get_default_field(self, length=None, calc_func=None, fuzzable=False):
if length is None:
length = self.length
if calc_func is None:
return self.cls(self.depends_on_name, length=length, fuzzable=fuzzable, name=self.uut_name)
else:
return self.cls(self.depends_on_name, length=length, calc_func=calc_func, fuzzable=fuzzable, name=self.uut_name)
def calculate(self, field, calc_func=None):
value = field.render()
if calc_func:
val = calc_func(value)
else:
val = len(value.bytes)
self.bit_field.set_current_value(val)
return self.bit_field.render()
def testCustomFuncValid(self):
def func(x):
return len(x)
original_field = self.get_original_field()
calculated_field = self.get_default_field(calc_func=func)
container = Container([original_field, calculated_field])
expected = self.calculate(original_field, calc_func=func)
actual = calculated_field.render()
self.assertEqual(expected, actual)
while container.mutate():
expected = self.calculate(original_field, calc_func=func)
actual = calculated_field.render()
self.assertEqual(expected, actual)
def testInvalidLength0(self):
with self.assertRaises(KittyException):
self.cls(self.depends_on_name, length=0)
def testInvalidLengthNegative(self):
with self.assertRaises(KittyException):
self.cls(self.depends_on_name, length=-3)
def testSizeInclusiveAlone(self):
self.length = 32
container = Container(
name=self.depends_on_name,
fields=[
self.get_default_field()
])
rendered = container.render()
self.assertEqual(len(rendered), self.length)
self.assertEqual(unpack('>I', rendered.tobytes())[0], self.length // 8)
class SizeInBytesTest(CalculatedTestCase):
__meta__ = False
def setUp(self, cls=SizeInBytes, length=32):
super(SizeInBytesTest, self).setUp(cls)
self.bit_field = BitField(value=0, length=length)
self.length = length
def get_default_field(self, fuzzable=False):
return self.cls(self.depends_on_name, length=self.length, fuzzable=fuzzable, name=self.uut_name)
def calculate(self, field):
value = field.render()
self.bit_field.set_current_value(len(value.bytes))
return self.bit_field.render()
class OffsetTests(BaseTestCase):
__meta__ = False
def setUp(self):
super(OffsetTests, self).setUp(Offset)
self.frm = None
self.to = UInt32(name='to', value=1)
self.target_field = self.to
self.uut_len = 32
self.correction = 0
self.encoder = ENC_INT_BE
self.fuzzable = False
self.uut_name = 'uut'
def get_default_field(self, fuzzable=False):
if fuzzable is None:
fuzzable = self.fuzzable
return self.cls(
self.frm,
self.target_field,
self.uut_len,
correction=self.correction,
encoder=self.encoder,
fuzzable=fuzzable,
name=self.uut_name
)
def testAbsoluteOffsetOfPostField(self):
uut = self.get_default_field()
container = Container(name='container', fields=[uut, self.to])
container.render()
uut_rendered = uut.render()
uut_val = unpack('>I', uut_rendered.tobytes())[0]
self.assertEqual(len(uut_rendered), uut_val)
self.assertEqual(32, uut_val)
def testAbsoluteOffsetOfPostFieldFixed(self):
uut = self.get_default_field()
container = Container(name='container', fields=[uut, self.to])
container.render()
uut_rendered = uut.render()
uut_val = unpack('>I', uut_rendered.tobytes())[0]
self.assertEqual(32, uut_val)
def testAbsoluteOffsetOfPreFieldAtTheBeginning(self):
uut = self.get_default_field()
container = Container(name='container', fields=[self.to, uut])
container.render()
uut_rendered = uut.render()
uut_val = unpack('>I', uut_rendered.tobytes())[0]
self.assertEqual(0, uut_val)
def testAbsoluteOffsetOfPreFieldNotAtTheBeginning(self):
uut = self.get_default_field()
pre_field = String(name='first', value='first')
container = Container(name='container', fields=[pre_field, self.to, uut])
while container.mutate():
container.render()
uut_rendered = uut.render()
uut_val = unpack('>I', uut_rendered.tobytes())[0]
self.assertEqual(len(pre_field.render()), uut_val)
def testDefaultCorrectionFunctionIsBytes(self):
self.correction = None
uut = self.get_default_field()
pre_field = String(name='first', value='first')
container = Container(name='container', fields=[pre_field, self.to, uut])
while container.mutate():
container.render()
uut_rendered = uut.render()
uut_val = unpack('>I', uut_rendered.tobytes())[0]
self.assertEqual(len(pre_field.render().tobytes()), uut_val)
def testCorrectionInt(self):
self.correction = 5
uut = self.get_default_field()
pre_field = String(name='first', value='first')
container = Container(name='container', fields=[pre_field, self.to, uut])
while container.mutate():
container.render()
uut_rendered = uut.render()
uut_val = unpack('>I', uut_rendered.tobytes())[0]
self.assertEqual(len(pre_field.render()) + 5, uut_val)
def testResolveTargetFieldByName(self):
self.target_field = 'to'
uut = self.get_default_field()
container = Container(name='container', fields=[uut, self.to])
container.render()
uut_rendered = uut.render()
uut_val = unpack('>I', uut_rendered.tobytes())[0]
self.assertEqual(len(uut_rendered), uut_val)
self.assertEqual(32, uut_val)
def testResolveFieldByAbsoluteName(self):
self.target_field = '/B/C/to'
uut = self.get_default_field()
container = Container(name='A', fields=[
uut,
Container(name='B', fields=[
Container(name='C', fields=[
self.to,
Container(name='D', fields=[
UInt32(name='E', value=1)
]),
]),
]),
])
container.render()
uut_rendered = uut.render()
uut_val = unpack('>I', uut_rendered.tobytes())[0]
self.assertEqual(len(uut_rendered), uut_val)
self.assertEqual(32, uut_val)
@metaTest
def testInvalidFieldNameRaisesException(self):
with self.assertRaises(KittyException):
self.uut_name = 'invalid/name'
self.get_default_field()
class AbsoluteOffsetTests(BaseTestCase):
__meta__ = False
def setUp(self):
super(AbsoluteOffsetTests, self).setUp(AbsoluteOffset)
self.to = UInt32(name='to', value=1)
self.target_field = self.to
self.uut_len = 32
self.correction = 0
self.encoder = ENC_INT_BE
self.fuzzable = False
self.uut_name = 'uut'
def get_default_field(self):
return self.cls(
self.target_field,
self.uut_len,
correction=self.correction,
encoder=self.encoder,
fuzzable=self.fuzzable,
name=self.uut_name
)
def testAbsoluteOffsetOfPostField(self):
uut = self.get_default_field()
container = Container(name='container', fields=[uut, self.to])
container.render()
uut_rendered = uut.render()
uut_val = unpack('>I', uut_rendered.tobytes())[0]
self.assertEqual(len(uut_rendered), uut_val)
self.assertEqual(32, uut_val)
def testAbsoluteOffsetOfPostFieldFixed(self):
uut = self.get_default_field()
container = Container(name='container', fields=[uut, self.to])
container.render()
uut_rendered = uut.render()
uut_val = unpack('>I', uut_rendered.tobytes())[0]
self.assertEqual(32, uut_val)
def testAbsoluteOffsetOfPreFieldAtTheBeginning(self):
uut = self.get_default_field()
container = Container(name='container', fields=[self.to, uut])
container.render()
uut_rendered = uut.render()
uut_val = unpack('>I', uut_rendered.tobytes())[0]
self.assertEqual(0, uut_val)
def testAbsoluteOffsetOfPreFieldNotAtTheBeginning(self):
uut = self.get_default_field()
pre_field = String(name='first', value='first')
container = Container(name='container', fields=[pre_field, self.to, uut])
while container.mutate():
container.render()
uut_rendered = uut.render()
uut_val = unpack('>I', uut_rendered.tobytes())[0]
self.assertEqual(len(pre_field.render()), uut_val)
def testDefaultCorrectionFunctionIsBytes(self):
self.correction = None
uut = self.get_default_field()
pre_field = String(name='first', value='first')
container = Container(name='container', fields=[pre_field, self.to, uut])
while container.mutate():
container.render()
uut_rendered = uut.render()
uut_val = unpack('>I', uut_rendered.tobytes())[0]
self.assertEqual(len(pre_field.render().tobytes()), uut_val)
def testCorrectionInt(self):
self.correction = 5
uut = self.get_default_field()
pre_field = String(name='first', value='first')
container = Container(name='container', fields=[pre_field, self.to, uut])
while container.mutate():
container.render()
uut_rendered = uut.render()
uut_val = unpack('>I', uut_rendered.tobytes())[0]
self.assertEqual(len(pre_field.render()) + 5, uut_val)
def testResolveFieldByAbsoluteName(self):
self.target_field = '/B/C/to'
uut = self.get_default_field()
container = Container(name='A', fields=[
uut,
Container(name='B', fields=[
Container(name='C', fields=[
self.to,
Container(name='D', fields=[
UInt32(name='E', value=1)
]),
]),
]),
])
container.render()
uut_rendered = uut.render()
uut_val = unpack('>I', uut_rendered.tobytes())[0]
self.assertEqual(len(uut_rendered), uut_val)
self.assertEqual(32, uut_val)
@metaTest
def testInvalidFieldNameRaisesException(self):
with self.assertRaises(KittyException):
self.uut_name = 'invalid/name'
self.get_default_field()
class HashTests(CalculatedTestCase):
__meta__ = True
def setUp(self, cls=None, hasher=None):
super(HashTests, self).setUp(cls)
self.hasher = hasher
def calculate(self, field):
value = field.render()
digest = self.hasher(value.bytes).digest()
return Bits(bytes=digest)
class GenericHashTests(CalculatedTestCase):
__meta__ = False
def setUp(self):
super(GenericHashTests, self).setUp(Hash)
self.hasher = hashlib.md5
def get_default_field(self, fuzzable=False):
return self.cls(self.depends_on_name, algorithm='md5', fuzzable=fuzzable, name=self.uut_name)
def calculate(self, field):
value = field.render()
digest = self.hasher(value.bytes).digest()
return Bits(bytes=digest)
def testInvalidAlgorithmName(self):
with self.assertRaises(KittyException):
Hash(self.depends_on_name, algorithm='boom')
def testInvalidHashFunctionRaisesException(self):
def func(data):
raise Exception('boom')
with self.assertRaises(KittyException):
Hash(self.depends_on_name, algorithm='boom')
def testInvalidHashFunctionReturnsInt(self):
def func(data):
return 1
with self.assertRaises(KittyException):
Hash(self.depends_on_name, algorithm='boom')
def testInvalidHashFunctionReturnsNone(self):
def func(data):
return None
with self.assertRaises(KittyException):
Hash(self.depends_on_name, algorithm='boom')
def testInvalidHashFunctionReturnsBits(self):
def func(data):
return Bits()
with self.assertRaises(KittyException):
Hash(self.depends_on_name, algorithm='boom')
class Md5Tests(HashTests):
__meta__ = False
def setUp(self):
super(Md5Tests, self).setUp(Md5, hashlib.md5)
class Sha1Tests(HashTests):
__meta__ = False
def setUp(self):
super(Sha1Tests, self).setUp(Sha1, hashlib.sha1)
class Sha224Tests(HashTests):
__meta__ = False
def setUp(self):
super(Sha224Tests, self).setUp(Sha224, hashlib.sha224)
class Sha256Tests(HashTests):
__meta__ = False
def setUp(self):
super(Sha256Tests, self).setUp(Sha256, hashlib.sha256)
class Sha384Tests(HashTests):
__meta__ = False
def setUp(self):
super(Sha384Tests, self).setUp(Sha384, hashlib.sha384)
class Sha512Tests(HashTests):
__meta__ = False
def setUp(self):
super(Sha512Tests, self).setUp(Sha512, hashlib.sha512)
| gpl-2.0 | 3,889,909,666,758,704,000 | 32.565107 | 127 | 0.612467 | false |
Psycojoker/baron | tests/test_inner_formatting_grouper.py | 2 | 5487 | #!/usr/bin/python
# -*- coding:utf-8 -*-
from baron.inner_formatting_grouper import group
def test_empty():
assert group([]) == []
def test_some_stuff():
assert group([
('INT', '1'),
('PLUS', '+', [('SPACE', ' ')], [('SPACE', ' ')]),
('INT', '2')
]) == [
('INT', '1'),
('PLUS', '+', [('SPACE', ' ')], [('SPACE', ' ')]),
('INT', '2')
]
def test_parenthesis():
assert group([
('LEFT_PARENTHESIS', '('),
('ENDL', '\n'),
('RIGHT_PARENTHESIS', ')'),
]) == [
('LEFT_PARENTHESIS', '(', [], [('ENDL', '\n')]),
('RIGHT_PARENTHESIS', ')'),
]
def test_parenthesis_one_space():
assert group([
('LEFT_PARENTHESIS', '(', [('SPACE', ' ')]),
('ENDL', '\n'),
('RIGHT_PARENTHESIS', ')'),
]) == [
('LEFT_PARENTHESIS', '(', [('SPACE', ' ')], [('ENDL', '\n')]),
('RIGHT_PARENTHESIS', ')'),
]
def test_parenthesis_two_space():
assert group([
('LEFT_PARENTHESIS', '(', [('SPACE', ' ')], [('SPACE', ' ')]),
('ENDL', '\n'),
('RIGHT_PARENTHESIS', ')'),
]) == [
('LEFT_PARENTHESIS', '(', [('SPACE', ' ')], [('SPACE', ' '), ('ENDL', '\n')]),
('RIGHT_PARENTHESIS', ')'),
]
def test_two_parenthesis():
assert group([
('LEFT_PARENTHESIS', '('),
('ENDL', '\n'),
('ENDL', '\n'),
('RIGHT_PARENTHESIS', ')'),
]) == [
('LEFT_PARENTHESIS', '(', [], [('ENDL', '\n'), ('ENDL', '\n')]),
('RIGHT_PARENTHESIS', ')'),
]
def test_two_parenthesis_comma():
assert group([
('LEFT_PARENTHESIS', '('),
('ENDL', '\n'),
('COMMA', ','),
('ENDL', '\n'),
('RIGHT_PARENTHESIS', ')'),
]) == [
('LEFT_PARENTHESIS', '(', [], [('ENDL', '\n'), ]),
('COMMA', ',', [], [('ENDL', '\n')]),
('RIGHT_PARENTHESIS', ')'),
]
def test_tuple_one():
assert group([
('LEFT_PARENTHESIS', '('),
('ENDL', '\n'),
('NAME', 'a'),
('ENDL', '\n'),
('COMMA', ','),
('ENDL', '\n'),
('NAME', 'a'),
('ENDL', '\n'),
('RIGHT_PARENTHESIS', ')'),
]) == [
('LEFT_PARENTHESIS', '(', [], [('ENDL', '\n'), ]),
('NAME', 'a'),
('COMMA', ',', [('ENDL', '\n')], [('ENDL', '\n')]),
('NAME', 'a'),
('RIGHT_PARENTHESIS', ')', [('ENDL', '\n')], []),
]
def test_set_one():
assert group([
('LEFT_BRACKET', '{'),
('ENDL', '\n'),
('NAME', 'a'),
('ENDL', '\n'),
('COMMA', ','),
('ENDL', '\n'),
('NAME', 'a'),
('ENDL', '\n'),
('RIGHT_BRACKET', '}'),
]) == [
('LEFT_BRACKET', '{', [], [('ENDL', '\n'), ]),
('NAME', 'a'),
('COMMA', ',', [('ENDL', '\n')], [('ENDL', '\n')]),
('NAME', 'a'),
('RIGHT_BRACKET', '}', [('ENDL', '\n')], []),
]
def test_list_one():
assert group([
('LEFT_SQUARE_BRACKET', '['),
('ENDL', '\n'),
('NAME', 'a'),
('ENDL', '\n'),
('COMMA', ','),
('ENDL', '\n'),
('NAME', 'a'),
('ENDL', '\n'),
('RIGHT_SQUARE_BRACKET', ']'),
]) == [
('LEFT_SQUARE_BRACKET', '[', [], [('ENDL', '\n'), ]),
('NAME', 'a'),
('COMMA', ',', [('ENDL', '\n')], [('ENDL', '\n')]),
('NAME', 'a'),
('RIGHT_SQUARE_BRACKET', ']', [('ENDL', '\n')], []),
]
def test_dict_one():
assert group([
('LEFT_BRACKET', '{'),
('ENDL', '\n'),
('NAME', 'a'),
('ENDL', '\n'),
('COLON', ':'),
('ENDL', '\n'),
('NAME', 'a'),
('ENDL', '\n'),
('COMMA', ','),
('ENDL', '\n'),
('NAME', 'a'),
('ENDL', '\n'),
('RIGHT_BRACKET', '}'),
]) == [
('LEFT_BRACKET', '{', [], [('ENDL', '\n')]),
('NAME', 'a'),
('COLON', ':', [('ENDL', '\n')], [('ENDL', '\n')]),
('NAME', 'a'),
('COMMA', ',', [('ENDL', '\n')], [('ENDL', '\n')]),
('NAME', 'a'),
('RIGHT_BRACKET', '}', [('ENDL', '\n')], []),
]
def test_number_backslash():
assert group([
('INT', '3'),
('SPACE', '\\'),
]) == [
('INT', '3'),
('SPACE', '\\'),
]
def test_number_backslash_newline():
assert group([
('INT', '3'),
('SPACE', '\\\n'),
]) == [
('INT', '3'),
('SPACE', '\\\n'),
]
def test_nested_grouping_after_endl():
"""
(b
[0])
"""
assert group([
('LEFT_PARENTHESIS', '('),
('NAME', 'b'),
('ENDL', '\n'),
('SPACE', ' '),
('LEFT_SQUARE_BRACKET', '['),
('INT', '0'),
('RIGHT_SQUARE_BRACKET', ']'),
('RIGHT_PARENTHESIS', ')'),
]) == [
('LEFT_PARENTHESIS', '('),
('NAME', 'b'),
('LEFT_SQUARE_BRACKET', '[', [('ENDL', '\n'), ('SPACE', ' ')], []),
('INT', '0'),
('RIGHT_SQUARE_BRACKET', ']'),
('RIGHT_PARENTHESIS', ')'),
]
def test_equal():
"""
(a = b)
"""
assert group([
('LEFT_PARENTHESIS', '('),
('NAME', 'a'),
('SPACE', ' '),
('EQUAL', '='),
('SPACE', ' '),
('RIGHT_PARENTHESIS', ')'),
]) == [
('LEFT_PARENTHESIS', '('),
('NAME', 'a'),
('EQUAL', '=', [('SPACE', ' ')], [('SPACE', ' ')]),
('RIGHT_PARENTHESIS', ')'),
]
| lgpl-3.0 | -297,596,925,344,129,500 | 23.065789 | 86 | 0.336249 | false |
MSeifert04/astropy | astropy/io/ascii/cds.py | 4 | 13486 | # Licensed under a 3-clause BSD style license - see LICENSE.rst
"""An extensible ASCII table reader and writer.
cds.py:
Classes to read CDS / Vizier table format
:Copyright: Smithsonian Astrophysical Observatory (2011)
:Author: Tom Aldcroft ([email protected])
"""
import fnmatch
import itertools
import re
import os
from contextlib import suppress
from . import core
from . import fixedwidth
from astropy.units import Unit
__doctest_skip__ = ['*']
class CdsHeader(core.BaseHeader):
col_type_map = {'e': core.FloatType,
'f': core.FloatType,
'i': core.IntType,
'a': core.StrType}
'The ReadMe file to construct header from.'
readme = None
def get_type_map_key(self, col):
match = re.match(r'\d*(\S)', col.raw_type.lower())
if not match:
raise ValueError('Unrecognized CDS format "{}" for column "{}"'.format(
col.raw_type, col.name))
return match.group(1)
def get_cols(self, lines):
"""
Initialize the header Column objects from the table ``lines`` for a CDS
header.
Parameters
----------
lines : list
List of table lines
"""
# Read header block for the table ``self.data.table_name`` from the read
# me file ``self.readme``.
if self.readme and self.data.table_name:
in_header = False
readme_inputter = core.BaseInputter()
f = readme_inputter.get_lines(self.readme)
# Header info is not in data lines but in a separate file.
lines = []
comment_lines = 0
for line in f:
line = line.strip()
if in_header:
lines.append(line)
if line.startswith(('------', '=======')):
comment_lines += 1
if comment_lines == 3:
break
else:
match = re.match(r'Byte-by-byte Description of file: (?P<name>.+)$',
line, re.IGNORECASE)
if match:
# Split 'name' in case in contains multiple files
names = [s for s in re.split('[, ]+', match.group('name'))
if s]
# Iterate on names to find if one matches the tablename
# including wildcards.
for pattern in names:
if fnmatch.fnmatch(self.data.table_name, pattern):
in_header = True
lines.append(line)
break
else:
raise core.InconsistentTableError("Can't find table {} in {}".format(
self.data.table_name, self.readme))
found_line = False
for i_col_def, line in enumerate(lines):
if re.match(r'Byte-by-byte Description', line, re.IGNORECASE):
found_line = True
elif found_line: # First line after list of file descriptions
i_col_def -= 1 # Set i_col_def to last description line
break
re_col_def = re.compile(r"""\s*
(?P<start> \d+ \s* -)? \s*
(?P<end> \d+) \s+
(?P<format> [\w.]+) \s+
(?P<units> \S+) \s+
(?P<name> \S+)
(\s+ (?P<descr> \S.*))?""",
re.VERBOSE)
cols = []
for line in itertools.islice(lines, i_col_def+4, None):
if line.startswith(('------', '=======')):
break
match = re_col_def.match(line)
if match:
col = core.Column(name=match.group('name'))
col.start = int(re.sub(r'[-\s]', '',
match.group('start') or match.group('end'))) - 1
col.end = int(match.group('end'))
unit = match.group('units')
if unit == '---':
col.unit = None # "---" is the marker for no unit in CDS table
else:
col.unit = Unit(unit, format='cds', parse_strict='warn')
col.description = (match.group('descr') or '').strip()
col.raw_type = match.group('format')
col.type = self.get_col_type(col)
match = re.match(
r'\? (?P<equal> =)? (?P<nullval> \S*) (\s+ (?P<descriptiontext> \S.*))?', col.description, re.VERBOSE)
if match:
col.description = (match.group('descriptiontext') or '').strip()
if issubclass(col.type, core.FloatType):
fillval = 'nan'
else:
fillval = '0'
if match.group('nullval') == '-':
col.null = '---'
# CDS tables can use -, --, ---, or ---- to mark missing values
# see https://github.com/astropy/astropy/issues/1335
for i in [1, 2, 3, 4]:
self.data.fill_values.append(('-'*i, fillval, col.name))
else:
col.null = match.group('nullval')
self.data.fill_values.append((col.null, fillval, col.name))
cols.append(col)
else: # could be a continuation of the previous col's description
if cols:
cols[-1].description += line.strip()
else:
raise ValueError(f'Line "{line}" not parsable as CDS header')
self.names = [x.name for x in cols]
self.cols = cols
class CdsData(core.BaseData):
"""CDS table data reader
"""
splitter_class = fixedwidth.FixedWidthSplitter
def process_lines(self, lines):
"""Skip over CDS header by finding the last section delimiter"""
# If the header has a ReadMe and data has a filename
# then no need to skip, as the data lines do not have header
# info. The ``read`` method adds the table_name to the ``data``
# attribute.
if self.header.readme and self.table_name:
return lines
i_sections = [i for i, x in enumerate(lines)
if x.startswith(('------', '======='))]
if not i_sections:
raise core.InconsistentTableError('No CDS section delimiter found')
return lines[i_sections[-1]+1:]
class Cds(core.BaseReader):
"""CDS format table.
See: http://vizier.u-strasbg.fr/doc/catstd.htx
Example::
Table: Table name here
= ==============================================================================
Catalog reference paper
Bibliography info here
================================================================================
ADC_Keywords: Keyword ; Another keyword ; etc
Description:
Catalog description here.
================================================================================
Byte-by-byte Description of file: datafile3.txt
--------------------------------------------------------------------------------
Bytes Format Units Label Explanations
--------------------------------------------------------------------------------
1- 3 I3 --- Index Running identification number
5- 6 I2 h RAh Hour of Right Ascension (J2000)
8- 9 I2 min RAm Minute of Right Ascension (J2000)
11- 15 F5.2 s RAs Second of Right Ascension (J2000)
--------------------------------------------------------------------------------
Note (1): A CDS file can contain sections with various metadata.
Notes can be multiple lines.
Note (2): Another note.
--------------------------------------------------------------------------------
1 03 28 39.09
2 04 18 24.11
**About parsing the CDS format**
The CDS format consists of a table description and the table data. These
can be in separate files as a ``ReadMe`` file plus data file(s), or
combined in a single file. Different subsections within the description
are separated by lines of dashes or equal signs ("------" or "======").
The table which specifies the column information must be preceded by a line
starting with "Byte-by-byte Description of file:".
In the case where the table description is combined with the data values,
the data must be in the last section and must be preceded by a section
delimiter line (dashes or equal signs only).
**Basic usage**
Use the ``ascii.read()`` function as normal, with an optional ``readme``
parameter indicating the CDS ReadMe file. If not supplied it is assumed that
the header information is at the top of the given table. Examples::
>>> from astropy.io import ascii
>>> table = ascii.read("data/cds.dat")
>>> table = ascii.read("data/vizier/table1.dat", readme="data/vizier/ReadMe")
>>> table = ascii.read("data/cds/multi/lhs2065.dat", readme="data/cds/multi/ReadMe")
>>> table = ascii.read("data/cds/glob/lmxbrefs.dat", readme="data/cds/glob/ReadMe")
The table name and the CDS ReadMe file can be entered as URLs. This can be used
to directly load tables from the Internet. For example, Vizier tables from the
CDS::
>>> table = ascii.read("ftp://cdsarc.u-strasbg.fr/pub/cats/VII/253/snrs.dat",
... readme="ftp://cdsarc.u-strasbg.fr/pub/cats/VII/253/ReadMe")
If the header (ReadMe) and data are stored in a single file and there
is content between the header and the data (for instance Notes), then the
parsing process may fail. In this case you can instruct the reader to
guess the actual start of the data by supplying ``data_start='guess'`` in the
call to the ``ascii.read()`` function. You should verify that the output
data table matches expectation based on the input CDS file.
**Using a reader object**
When ``Cds`` reader object is created with a ``readme`` parameter
passed to it at initialization, then when the ``read`` method is
executed with a table filename, the header information for the
specified table is taken from the ``readme`` file. An
``InconsistentTableError`` is raised if the ``readme`` file does not
have header information for the given table.
>>> readme = "data/vizier/ReadMe"
>>> r = ascii.get_reader(ascii.Cds, readme=readme)
>>> table = r.read("data/vizier/table1.dat")
>>> # table5.dat has the same ReadMe file
>>> table = r.read("data/vizier/table5.dat")
If no ``readme`` parameter is specified, then the header
information is assumed to be at the top of the given table.
>>> r = ascii.get_reader(ascii.Cds)
>>> table = r.read("data/cds.dat")
>>> #The following gives InconsistentTableError, since no
>>> #readme file was given and table1.dat does not have a header.
>>> table = r.read("data/vizier/table1.dat")
Traceback (most recent call last):
...
InconsistentTableError: No CDS section delimiter found
Caveats:
* The Units and Explanations are available in the column ``unit`` and
``description`` attributes, respectively.
* The other metadata defined by this format is not available in the output table.
"""
_format_name = 'cds'
_io_registry_format_aliases = ['cds']
_io_registry_can_write = False
_description = 'CDS format table'
data_class = CdsData
header_class = CdsHeader
def __init__(self, readme=None):
super().__init__()
self.header.readme = readme
def write(self, table=None):
"""Not available for the Cds class (raises NotImplementedError)"""
raise NotImplementedError
def read(self, table):
# If the read kwarg `data_start` is 'guess' then the table may have extraneous
# lines between the end of the header and the beginning of data.
if self.data.start_line == 'guess':
# Replicate the first part of BaseReader.read up to the point where
# the table lines are initially read in.
with suppress(TypeError):
# For strings only
if os.linesep not in table + '':
self.data.table_name = os.path.basename(table)
self.data.header = self.header
self.header.data = self.data
# Get a list of the lines (rows) in the table
lines = self.inputter.get_lines(table)
# Now try increasing data.start_line by one until the table reads successfully.
# For efficiency use the in-memory list of lines instead of `table`, which
# could be a file.
for data_start in range(len(lines)):
self.data.start_line = data_start
with suppress(Exception):
table = super().read(lines)
return table
else:
return super().read(table)
| bsd-3-clause | -2,524,261,184,576,346,600 | 40.368098 | 122 | 0.523135 | false |
tushar7795/MicroBlog | flask/lib/python2.7/site-packages/sqlalchemy/engine/__init__.py | 16 | 18803 | # engine/__init__.py
# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""SQL connections, SQL execution and high-level DB-API interface.
The engine package defines the basic components used to interface
DB-API modules with higher-level statement construction,
connection-management, execution and result contexts. The primary
"entry point" class into this package is the Engine and its public
constructor ``create_engine()``.
This package includes:
base.py
Defines interface classes and some implementation classes which
comprise the basic components used to interface between a DB-API,
constructed and plain-text statements, connections, transactions,
and results.
default.py
Contains default implementations of some of the components defined
in base.py. All current database dialects use the classes in
default.py as base classes for their own database-specific
implementations.
strategies.py
The mechanics of constructing ``Engine`` objects are represented
here. Defines the ``EngineStrategy`` class which represents how
to go from arguments specified to the ``create_engine()``
function, to a fully constructed ``Engine``, including
initialization of connection pooling, dialects, and specific
subclasses of ``Engine``.
threadlocal.py
The ``TLEngine`` class is defined here, which is a subclass of
the generic ``Engine`` and tracks ``Connection`` and
``Transaction`` objects against the identity of the current
thread. This allows certain programming patterns based around
the concept of a "thread-local connection" to be possible.
The ``TLEngine`` is created by using the "threadlocal" engine
strategy in conjunction with the ``create_engine()`` function.
url.py
Defines the ``URL`` class which represents the individual
components of a string URL passed to ``create_engine()``. Also
defines a basic module-loading strategy for the dialect specifier
within a URL.
"""
from .interfaces import (
Connectable,
Dialect,
ExecutionContext,
ExceptionContext,
# backwards compat
Compiled,
TypeCompiler
)
from .base import (
Connection,
Engine,
NestedTransaction,
RootTransaction,
Transaction,
TwoPhaseTransaction,
)
from .result import (
BaseRowProxy,
BufferedColumnResultProxy,
BufferedColumnRow,
BufferedRowResultProxy,
FullyBufferedResultProxy,
ResultProxy,
RowProxy,
)
from .util import (
connection_memoize
)
from . import util, strategies
# backwards compat
from ..sql import ddl
default_strategy = 'plain'
def create_engine(*args, **kwargs):
"""Create a new :class:`.Engine` instance.
The standard calling form is to send the URL as the
first positional argument, usually a string
that indicates database dialect and connection arguments::
engine = create_engine("postgresql://scott:tiger@localhost/test")
Additional keyword arguments may then follow it which
establish various options on the resulting :class:`.Engine`
and its underlying :class:`.Dialect` and :class:`.Pool`
constructs::
engine = create_engine("mysql://scott:tiger@hostname/dbname",
encoding='latin1', echo=True)
The string form of the URL is
``dialect[+driver]://user:password@host/dbname[?key=value..]``, where
``dialect`` is a database name such as ``mysql``, ``oracle``,
``postgresql``, etc., and ``driver`` the name of a DBAPI, such as
``psycopg2``, ``pyodbc``, ``cx_oracle``, etc. Alternatively,
the URL can be an instance of :class:`~sqlalchemy.engine.url.URL`.
``**kwargs`` takes a wide variety of options which are routed
towards their appropriate components. Arguments may be specific to
the :class:`.Engine`, the underlying :class:`.Dialect`, as well as the
:class:`.Pool`. Specific dialects also accept keyword arguments that
are unique to that dialect. Here, we describe the parameters
that are common to most :func:`.create_engine()` usage.
Once established, the newly resulting :class:`.Engine` will
request a connection from the underlying :class:`.Pool` once
:meth:`.Engine.connect` is called, or a method which depends on it
such as :meth:`.Engine.execute` is invoked. The :class:`.Pool` in turn
will establish the first actual DBAPI connection when this request
is received. The :func:`.create_engine` call itself does **not**
establish any actual DBAPI connections directly.
.. seealso::
:doc:`/core/engines`
:doc:`/dialects/index`
:ref:`connections_toplevel`
:param case_sensitive=True: if False, result column names
will match in a case-insensitive fashion, that is,
``row['SomeColumn']``.
.. versionchanged:: 0.8
By default, result row names match case-sensitively.
In version 0.7 and prior, all matches were case-insensitive.
:param connect_args: a dictionary of options which will be
passed directly to the DBAPI's ``connect()`` method as
additional keyword arguments. See the example
at :ref:`custom_dbapi_args`.
:param convert_unicode=False: if set to True, sets
the default behavior of ``convert_unicode`` on the
:class:`.String` type to ``True``, regardless
of a setting of ``False`` on an individual
:class:`.String` type, thus causing all :class:`.String`
-based columns
to accommodate Python ``unicode`` objects. This flag
is useful as an engine-wide setting when using a
DBAPI that does not natively support Python
``unicode`` objects and raises an error when
one is received (such as pyodbc with FreeTDS).
See :class:`.String` for further details on
what this flag indicates.
:param creator: a callable which returns a DBAPI connection.
This creation function will be passed to the underlying
connection pool and will be used to create all new database
connections. Usage of this function causes connection
parameters specified in the URL argument to be bypassed.
:param echo=False: if True, the Engine will log all statements
as well as a repr() of their parameter lists to the engines
logger, which defaults to sys.stdout. The ``echo`` attribute of
``Engine`` can be modified at any time to turn logging on and
off. If set to the string ``"debug"``, result rows will be
printed to the standard output as well. This flag ultimately
controls a Python logger; see :ref:`dbengine_logging` for
information on how to configure logging directly.
:param echo_pool=False: if True, the connection pool will log
all checkouts/checkins to the logging stream, which defaults to
sys.stdout. This flag ultimately controls a Python logger; see
:ref:`dbengine_logging` for information on how to configure logging
directly.
:param encoding: Defaults to ``utf-8``. This is the string
encoding used by SQLAlchemy for string encode/decode
operations which occur within SQLAlchemy, **outside of
the DBAPI.** Most modern DBAPIs feature some degree of
direct support for Python ``unicode`` objects,
what you see in Python 2 as a string of the form
``u'some string'``. For those scenarios where the
DBAPI is detected as not supporting a Python ``unicode``
object, this encoding is used to determine the
source/destination encoding. It is **not used**
for those cases where the DBAPI handles unicode
directly.
To properly configure a system to accommodate Python
``unicode`` objects, the DBAPI should be
configured to handle unicode to the greatest
degree as is appropriate - see
the notes on unicode pertaining to the specific
target database in use at :ref:`dialect_toplevel`.
Areas where string encoding may need to be accommodated
outside of the DBAPI include zero or more of:
* the values passed to bound parameters, corresponding to
the :class:`.Unicode` type or the :class:`.String` type
when ``convert_unicode`` is ``True``;
* the values returned in result set columns corresponding
to the :class:`.Unicode` type or the :class:`.String`
type when ``convert_unicode`` is ``True``;
* the string SQL statement passed to the DBAPI's
``cursor.execute()`` method;
* the string names of the keys in the bound parameter
dictionary passed to the DBAPI's ``cursor.execute()``
as well as ``cursor.setinputsizes()`` methods;
* the string column names retrieved from the DBAPI's
``cursor.description`` attribute.
When using Python 3, the DBAPI is required to support
*all* of the above values as Python ``unicode`` objects,
which in Python 3 are just known as ``str``. In Python 2,
the DBAPI does not specify unicode behavior at all,
so SQLAlchemy must make decisions for each of the above
values on a per-DBAPI basis - implementations are
completely inconsistent in their behavior.
:param execution_options: Dictionary execution options which will
be applied to all connections. See
:meth:`~sqlalchemy.engine.Connection.execution_options`
:param implicit_returning=True: When ``True``, a RETURNING-
compatible construct, if available, will be used to
fetch newly generated primary key values when a single row
INSERT statement is emitted with no existing returning()
clause. This applies to those backends which support RETURNING
or a compatible construct, including Postgresql, Firebird, Oracle,
Microsoft SQL Server. Set this to ``False`` to disable
the automatic usage of RETURNING.
:param isolation_level: this string parameter is interpreted by various
dialects in order to affect the transaction isolation level of the
database connection. The parameter essentially accepts some subset of
these string arguments: ``"SERIALIZABLE"``, ``"REPEATABLE_READ"``,
``"READ_COMMITTED"``, ``"READ_UNCOMMITTED"`` and ``"AUTOCOMMIT"``.
Behavior here varies per backend, and
individual dialects should be consulted directly.
Note that the isolation level can also be set on a per-:class:`.Connection`
basis as well, using the
:paramref:`.Connection.execution_options.isolation_level`
feature.
.. seealso::
:attr:`.Connection.default_isolation_level` - view default level
:paramref:`.Connection.execution_options.isolation_level`
- set per :class:`.Connection` isolation level
:ref:`SQLite Transaction Isolation <sqlite_isolation_level>`
:ref:`Postgresql Transaction Isolation <postgresql_isolation_level>`
:ref:`MySQL Transaction Isolation <mysql_isolation_level>`
:ref:`session_transaction_isolation` - for the ORM
:param label_length=None: optional integer value which limits
the size of dynamically generated column labels to that many
characters. If less than 6, labels are generated as
"_(counter)". If ``None``, the value of
``dialect.max_identifier_length`` is used instead.
:param listeners: A list of one or more
:class:`~sqlalchemy.interfaces.PoolListener` objects which will
receive connection pool events.
:param logging_name: String identifier which will be used within
the "name" field of logging records generated within the
"sqlalchemy.engine" logger. Defaults to a hexstring of the
object's id.
:param max_overflow=10: the number of connections to allow in
connection pool "overflow", that is connections that can be
opened above and beyond the pool_size setting, which defaults
to five. this is only used with :class:`~sqlalchemy.pool.QueuePool`.
:param module=None: reference to a Python module object (the module
itself, not its string name). Specifies an alternate DBAPI module to
be used by the engine's dialect. Each sub-dialect references a
specific DBAPI which will be imported before first connect. This
parameter causes the import to be bypassed, and the given module to
be used instead. Can be used for testing of DBAPIs as well as to
inject "mock" DBAPI implementations into the :class:`.Engine`.
:param paramstyle=None: The `paramstyle <http://legacy.python.org/dev/peps/pep-0249/#paramstyle>`_
to use when rendering bound parameters. This style defaults to the
one recommended by the DBAPI itself, which is retrieved from the
``.paramstyle`` attribute of the DBAPI. However, most DBAPIs accept
more than one paramstyle, and in particular it may be desirable
to change a "named" paramstyle into a "positional" one, or vice versa.
When this attribute is passed, it should be one of the values
``"qmark"``, ``"numeric"``, ``"named"``, ``"format"`` or
``"pyformat"``, and should correspond to a parameter style known
to be supported by the DBAPI in use.
:param pool=None: an already-constructed instance of
:class:`~sqlalchemy.pool.Pool`, such as a
:class:`~sqlalchemy.pool.QueuePool` instance. If non-None, this
pool will be used directly as the underlying connection pool
for the engine, bypassing whatever connection parameters are
present in the URL argument. For information on constructing
connection pools manually, see :ref:`pooling_toplevel`.
:param poolclass=None: a :class:`~sqlalchemy.pool.Pool`
subclass, which will be used to create a connection pool
instance using the connection parameters given in the URL. Note
this differs from ``pool`` in that you don't actually
instantiate the pool in this case, you just indicate what type
of pool to be used.
:param pool_logging_name: String identifier which will be used within
the "name" field of logging records generated within the
"sqlalchemy.pool" logger. Defaults to a hexstring of the object's
id.
:param pool_size=5: the number of connections to keep open
inside the connection pool. This used with
:class:`~sqlalchemy.pool.QueuePool` as
well as :class:`~sqlalchemy.pool.SingletonThreadPool`. With
:class:`~sqlalchemy.pool.QueuePool`, a ``pool_size`` setting
of 0 indicates no limit; to disable pooling, set ``poolclass`` to
:class:`~sqlalchemy.pool.NullPool` instead.
:param pool_recycle=-1: this setting causes the pool to recycle
connections after the given number of seconds has passed. It
defaults to -1, or no timeout. For example, setting to 3600
means connections will be recycled after one hour. Note that
MySQL in particular will disconnect automatically if no
activity is detected on a connection for eight hours (although
this is configurable with the MySQLDB connection itself and the
server configuration as well).
:param pool_reset_on_return='rollback': set the "reset on return"
behavior of the pool, which is whether ``rollback()``,
``commit()``, or nothing is called upon connections
being returned to the pool. See the docstring for
``reset_on_return`` at :class:`.Pool`.
.. versionadded:: 0.7.6
:param pool_timeout=30: number of seconds to wait before giving
up on getting a connection from the pool. This is only used
with :class:`~sqlalchemy.pool.QueuePool`.
:param strategy='plain': selects alternate engine implementations.
Currently available are:
* the ``threadlocal`` strategy, which is described in
:ref:`threadlocal_strategy`;
* the ``mock`` strategy, which dispatches all statement
execution to a function passed as the argument ``executor``.
See `example in the FAQ
<http://www.sqlalchemy.org/trac/wiki/FAQ#HowcanIgettheCREATETABLEDROPTABLEoutputasastring>`_.
:param executor=None: a function taking arguments
``(sql, *multiparams, **params)``, to which the ``mock`` strategy will
dispatch all statement execution. Used only by ``strategy='mock'``.
"""
strategy = kwargs.pop('strategy', default_strategy)
strategy = strategies.strategies[strategy]
return strategy.create(*args, **kwargs)
def engine_from_config(configuration, prefix='sqlalchemy.', **kwargs):
"""Create a new Engine instance using a configuration dictionary.
The dictionary is typically produced from a config file.
The keys of interest to ``engine_from_config()`` should be prefixed, e.g.
``sqlalchemy.url``, ``sqlalchemy.echo``, etc. The 'prefix' argument
indicates the prefix to be searched for. Each matching key (after the
prefix is stripped) is treated as though it were the corresponding keyword
argument to a :func:`.create_engine` call.
The only required key is (assuming the default prefix) ``sqlalchemy.url``,
which provides the :ref:`database URL <database_urls>`.
A select set of keyword arguments will be "coerced" to their
expected type based on string values. The set of arguments
is extensible per-dialect using the ``engine_config_types`` accessor.
:param configuration: A dictionary (typically produced from a config file,
but this is not a requirement). Items whose keys start with the value
of 'prefix' will have that prefix stripped, and will then be passed to
:ref:`create_engine`.
:param prefix: Prefix to match and then strip from keys
in 'configuration'.
:param kwargs: Each keyword argument to ``engine_from_config()`` itself
overrides the corresponding item taken from the 'configuration'
dictionary. Keyword arguments should *not* be prefixed.
"""
options = dict((key[len(prefix):], configuration[key])
for key in configuration
if key.startswith(prefix))
options['_coerce_config'] = True
options.update(kwargs)
url = options.pop('url')
return create_engine(url, **options)
__all__ = (
'create_engine',
'engine_from_config',
)
| bsd-3-clause | -7,058,442,239,504,188,000 | 42.424942 | 103 | 0.686167 | false |
diegocepedaw/oncall | src/oncall/api/v0/ical_key_user.py | 1 | 2738 | # Copyright (c) LinkedIn Corporation. All rights reserved. Licensed under the BSD-2 Clause license.
# See LICENSE in the project root for license information.
from falcon import HTTPNotFound, HTTPForbidden, HTTP_201
from ...auth import login_required
from .ical_key import get_ical_key, update_ical_key, delete_ical_key, generate_ical_key
@login_required
def on_get(req, resp, user_name):
"""Get the secret key that grants public access to user_name's oncall
calendar for the logged-in user.
Current policy only allows the logged-in user to get its own key,
so user_name parameter must be the same as the logged-in user.
**Example request:**
.. sourcecode:: http
GET /api/v0/ical_key/user/jdoe HTTP/1.1
Content-Type: text/plain
ef895425-5f49-11ea-8eee-10e7c6352aff
"""
challenger = req.context['user']
if challenger != user_name:
raise HTTPForbidden(
'Unauthorized',
'Action not allowed: "%s" is not allowed to view ical_key of "%s"' % (challenger, user_name)
)
key = get_ical_key(challenger, user_name, 'user')
if key is None:
raise HTTPNotFound()
resp.body = key
resp.set_header('Content-Type', 'text/plain')
@login_required
def on_post(req, resp, user_name):
"""Update or create the secret key that grants public access to
user_name's oncall calendar for the logged-in user. Updating the
secret key will automatically invalidate existing secret keys. A
subsequent GET will get the secret key.
Current policy only allows the logged-in user to get its own key,
so user_name parameter must be the same as the logged-in user.
"""
challenger = req.context['user']
if challenger != user_name:
raise HTTPForbidden(
'Unauthorized',
'Action not allowed: "%s" is not allowed to update ical_key of "%s"' % (challenger, user_name)
)
key = generate_ical_key()
update_ical_key(challenger, user_name, 'user', key)
resp.status = HTTP_201
resp.body = key
resp.set_header('Content-Type', 'text/plain')
@login_required
def on_delete(req, resp, user_name):
"""Delete the secret key that grants public access to user_name's
oncall calendar for the logged-in user.
Current policy only allows the logged-in user to get its own key,
so user_name parameter must be the same as the logged-in user.
"""
challenger = req.context['user']
if challenger != user_name:
raise HTTPForbidden(
'Unauthorized',
'Action not allowed: "%s" is not allowed to delete ical_key of "%s"' % (challenger, user_name)
)
delete_ical_key(challenger, user_name, 'user')
| bsd-2-clause | -4,688,494,480,453,262,000 | 31.211765 | 106 | 0.664354 | false |
svn2github/pyopt | pyOpt/pyALPSO/alpso_spm.py | 2 | 36595 | #!/usr/bin/env python
'''
alpso_spm - Parallel Python Version of the Augmented Lagrangian Particle Swarm Optimizer
This version uses mpi4py and a static allocation of objective function
evaluation to the available processes - Static Process Management (SPM).
alpso is a global optimizer which solves problems of the form:
min F(x)
subject to: Gi(x) = 0, i = 1(1)ME
Gj(x) <= 0, j = ME+1(1)M
xLB <= x <= xUB
usage:
mpirun manager: mpirun -n (#nproc) python alpso_dpm.py
slurm manager: srun -p sca -K -n (#nproc) python alpso_dpm.py
Copyright (c) 2006-2011 by Dr. Ruben E. Perez and Mr. Peter Jansen
All rights reserved. Not to be used for commercial purposes.
Revision: 1.0 $Date: 31/08/2009 21:00$
Developers:
-----------
- Dr. Ruben E. Perez (RP)
- Mr. Peter W. Jansen (PJ)
History
-------
v. 1.0 - Initial Class Creation (PJ, 2008)
- Added Print support (PJ, 2008)
- Scaling Option (PJ, 2008)
- Removed lambda convergance citeria (PJ, 2009)
- Added Multiple x0s Input Functionality (PJ, 2010)
Note: This implementation does not allow multiple successive call to the optimizer.
'''
__version__ = '$Revision: $'
'''
To Do:
- Add Other Inertia and Velocity Updates to Inner Loop
'''
# =============================================================================
# Standard Python modules
# =============================================================================
import os, sys, random, time
import pdb
from math import floor
# =============================================================================
# External Python modules
# =============================================================================
import numpy
try:
import mpi4py
from mpi4py import MPI
except ImportError:
print 'alpso_spm: mpi4py library failed to import'
#end
# =============================================================================
# Extension modules
# =============================================================================
# =============================================================================
# Misc Definitions
# =============================================================================
inf = 10.E+20 # define a value for infinity
# =============================================================================
eps = 1.0 # define a value for machine precision
while ((eps/2.0 + 1.0) > 1.0):
eps = eps/2.0
#end
eps = 2.0*eps
#eps = math.ldexp(1,-52)
#==============================================================================
# alpso function
#==============================================================================
def alpso(dimensions,constraints,neqcons,xtype,x0,xmin,xmax,swarmsize,nhn,
nhm,maxOutIter,maxInnIter,minInnIter,stopCriteria,stopIters,etol,
itol,rtol,atol,dtol,prtOutIter,prtInnIter,r0,vinit,vmax,c1,c2,w1,w2,
ns,nf,vcrazy,fileout,filename,logfile,hstfile,rseed,scale,nhs,objfunc):
'''
Python Version of the Augmented Lagrangian Particle Swarm Optimizer
Documentation last updated: April. 29, 2008 - Ruben E. Perez
'''
# MPI Setup
comm = MPI.COMM_WORLD
nproc = comm.Get_size()
myrank = comm.Get_rank()
if (mpi4py.__version__[0] == '0'):
Barrier = comm.Barrier
Send = comm.Send
Recv = comm.Recv
Bcast = comm.Bcast
elif (mpi4py.__version__[0] == '1'):
Barrier = comm.barrier
Send = comm.send
Recv = comm.recv
Bcast = comm.bcast
#end
myswarm = xrange(myrank,swarmsize,nproc)
if myrank != 0:
prtOutIter = 0
prtInnIter = 0
fileout = 0
#end
#
if (x0 != []):
if isinstance(x0,list):
x0 = numpy.array(x0)
elif not isinstance(x0,numpy.ndarray):
if myrank == 0:
print """Warning: Initial x must be either list or numpy.array,
all initial positions randomly generated"""
else:
pass
#end
#end
#end
#
if (hstfile != None):
h_start = True
else:
h_start = False
#end
if (logfile != None):
sto_hst = True
else:
sto_hst = False
#end
h_start = Bcast(h_start, root=0)
# Set random number seed
rand = random.Random()
if rseed == {}:
rseed = time.time()
#end
rseed = Bcast(rseed,root=0)
rand.seed(rseed)
if (filename == ''):
filename = 'ALPSO.out'
#end
ofname = ''
sfname = ''
fntmp = filename.split('.')
if (len(fntmp) == 1):
ofname += fntmp[0] + '_print.out'
sfname += fntmp[0] + '_summary.out'
else:
if '/' not in fntmp[-1] and '\\' not in fntmp[-1]:
ofname += filename[:filename.rfind('.')]+ '_print.' + fntmp[-1]
sfname += filename[:filename.rfind('.')]+'_summary.' + fntmp[-1]
else:
ofname += filename + '_print.out'
sfname += filename + '_summary.out'
#end
#end
header = ''
header += ' '*37 + '======================\n'
header += ' '*39 + ' ALPSO 1.1 (SPM)\n'
header += ' '*37 + '======================\n\n'
header += 'Parameters:\n'
header += '-'*97 + '\n'
if (maxInnIter != minInnIter):
diI = 1
else:
diI = 0
#end
if (x0 != []):
if len(x0.shape) == 1:
nxi = 1
else:
nxi = x0.shape[0]
#end
else:
nxi = 0
#end
header += 'Swarmsize :%9d'%(swarmsize) + ' MaxOuterIters :%9d'%(maxOutIter) + ' Seed:%26.8f\n'%(rseed)
header += 'Cognitive Parameter :%9.3f'%(c1) + ' MaxInnerIters :%9d'%(maxInnIter) + ' Scaling :%11d\n'%(scale)
header += 'Social Parameter :%9.3f' %(c2) + ' MinInnerIters :%9d'%(minInnIter) + ' Stopping Criteria :%11d\n'%(stopCriteria)
header += 'Initial Weight :%9.3f' %(w1) + ' DynInnerIters :%9d'%(diI) + ' Number of Failures :%11d\n' %(ns)
header += 'Final Weight :%9.3f' %(w2) + ' StoppingIters :%9d'%(stopIters) + ' Number of Successes:%11d\n\n' %(nf)
header += 'Absolute Tolerance : %1.2e' %(atol) + ' Number Initial Pos:%9d'%(nxi) + ' Neighbourhood Model:%11s\n' %(nhm)
header += 'Relative Tolerance : %1.2e' %(rtol) + ' Initial Velocity :%9d'%(vinit) + ' Neighbourhood Size :%11d\n' %(nhn)
header += 'Inequality Tolerance: %1.2e' %(itol) + ' Maximum Velocity :%9d'%(vmax) + ' Selfless :%11d\n' %(nhs)
header += 'Equality Tolerance : %1.2e' %(etol) + ' Craziness Velocity: %1.2e'%(vcrazy) + ' Fileout :%11d\n' %(fileout)
header += 'Global Distance : %1.2e' %(dtol) + ' Initial Penalty :%9.2f' %(r0) + ' File Name :%11s\n' %(filename)
header += '-'*97 + '\n\n'
if (fileout == 1) or (fileout == 3):
if os.path.isfile(ofname):
os.remove(ofname)
#end
ofile = open(ofname,'w')
ofile.write(header)
#end
if (fileout == 2) or (fileout == 3):
if os.path.isfile(sfname):
os.remove(sfname)
#end
sfile = open(sfname,'w')
sfile.write(header)
#end
dt = 1.0
vlimit = vmax
vmax = numpy.ones(dimensions,float)*vmax
if (scale == 1):
space_centre = numpy.zeros(dimensions,float)
space_halflen = numpy.zeros(dimensions,float)
for j in xrange(dimensions):
space_centre[j] = (xmin[j] + xmax[j])/2.0
space_halflen[j] = ((xmax[j]-xmin[j])/2.0)
#end
xmin = -numpy.ones(dimensions,float)
xmax = numpy.ones(dimensions,float)
else:
for j in xrange(dimensions):
vmax[j] = ((xmax[j]-xmin[j])/2.0)*vlimit
#end
#end
# Initialize the positions and velocities for entire population
x_k = numpy.zeros((swarmsize,dimensions), float)
v_k = numpy.zeros((swarmsize,dimensions), float)
discrete_i = []
for i in xrange(swarmsize):
for j in xrange(dimensions):
x_k[i,j] = xmin[j] + rand.random()*(xmax[j]-xmin[j])
if (xtype[j] == 1):
discrete_i.append(j)
#end
v_k[i,j] = (xmin[j] + rand.random()*(xmax[j]-xmin[j]))/dt
#end
#end
if (x0 != []):
if len(x0.shape) == 1:
if (scale == 1):
x_k[0,:] = (x0[:] - space_centre)/space_halflen
else:
x_k[0,:] = x0[:]
#end
else:
if (x0.shape[0] > swarmsize):
if (myrank == 0):
print 'Warning: %d initial positions specified for %d particles, last %d positions ignored' %(x0.shape[0],swarmsize,x0.shape[0]-swarmsize)
#end
x0 = x0[0:swarmsize,:]
#end
for i in xrange(x0.shape[0]):
if (scale == 1):
x_k[i,:] = (x0[i,:] - space_centre)/space_halflen
else:
x_k[i,:] = x0[i,:]
#end
#end
#end
#end
# Initialize Augmented Lagrange
f = numpy.zeros(swarmsize, float)
L = numpy.zeros(swarmsize, float)
g = numpy.zeros([swarmsize,constraints], float)
g_old = numpy.zeros([swarmsize,constraints], float)
rp = numpy.ones(constraints, float)*r0
lambda_val = numpy.zeros(constraints, float)
lambda_old = numpy.zeros(constraints, float)
tau = numpy.zeros([swarmsize,constraints], float)
tau_new = numpy.zeros(constraints,float)
tau_old = numpy.zeros(constraints,float)
nfevals = 0
if h_start:
if (myrank == 0):
[vals,hist_end] = hstfile.read([],ident=['obj','con'])
if not hist_end:
f = vals['obj'][0]
g = vals['con'][0].reshape(g.shape)
else:
h_start = False
hstfile.close()
#end
#end
#end
h_start = Bcast(h_start,root=0)
#end
if not h_start:
## MPI Objective Function Evaluation
x_k = Bcast(x_k,root=0)
send_buf = {}
for i in myswarm:
p_eval = numpy.zeros(constraints+1, float)
if (scale == 1):
xtmp = (x_k[i,:] * space_halflen) + space_centre
else:
xtmp = x_k[i,:]
#end
for m in discrete_i:
xtmp[m] = floor(xtmp[m] + 0.5)
#end
[f_tmp,g_tmp] = objfunc(xtmp)
p_eval[0] = f_tmp
p_eval[1:constraints+1] = g_tmp[:]
send_buf[i] = p_eval
#end
if myrank == 0:
for i in send_buf.keys():
f[i] = send_buf[i][0]
g[i,:] = send_buf[i][1:]
#end
#end
if myrank != 0:
Send(send_buf,dest=0)
else:
p_results = []
for proc in xrange(1,nproc):
p_results.append(Recv(source=proc))
#end
#end
if myrank == 0:
for proc in xrange(nproc-1):
for i in p_results[proc].keys():
f[i] = p_results[proc][i][0]
g[i,:] = p_results[proc][i][1:]
#end
#end
#end
nfevals += swarmsize
#end
for i in xrange(swarmsize):
# Augmented Lagrangian Value
L[i] = f[i]
if (constraints>0):
# Equality Constraints
for l in xrange(neqcons):
tau[i,l] = g[i,l]
#end
# Inequality Constraints
for l in xrange(neqcons,constraints):
if (rp[l] != 0):
if (g[i,l] > -lambda_val[l]/(2*rp[l])):
tau[i,l] = g[i,l]
else:
tau[i,l] = -lambda_val[l]/(2*rp[l])
#end
else:
tau[i,l] = g[i,l]
#end
#end
#
for l in xrange(constraints):
L[i] += lambda_val[l]*tau[i,l] + rp[l]*tau[i,l]**2
#end
#end
#end
# Initialize Particles Best
best_x = numpy.zeros((swarmsize,dimensions))
best_L = numpy.zeros(swarmsize, float)
best_f = numpy.zeros(swarmsize, float)
best_g = numpy.zeros([swarmsize,constraints], float)
for i in xrange(swarmsize):
for j in xrange(dimensions):
best_x[i,j] = x_k[i,j]
#end
best_L[i] = L[i]
best_f[i] = f[i]
for l in xrange(constraints):
best_g[i,l] = g[i,l]
#end
#end
# Initialize Swarm Best
swarm_i = L.argmin()
swarm_i_old = 0
swarm_x = numpy.zeros(dimensions, float)
for j in xrange(dimensions):
swarm_x[j] = x_k[swarm_i,j]
#end
swarm_L = L[swarm_i]
swarm_L_old = L[0]
swarm_f = f[swarm_i]
swarm_f_old = f[0]
swarm_g = numpy.zeros(constraints, float)
swarm_g_old = numpy.zeros(constraints, float)
for l in xrange(constraints):
swarm_g[l] = g[swarm_i,l]
swarm_g_old[l] = g[0,l]
#end
# Initialize Neighbourhood
if (nhm == 'dlring') or (nhm == 'slring') or (nhm == 'wheel') or (nhm == 'spatial') or (nhm == 'sfrac'):
nhps = []
nhbest_L = numpy.ones(swarmsize)*inf
nhbest_f = numpy.zeros(swarmsize)
nhbest_x = numpy.zeros((swarmsize,dimensions))
nhbest_i = numpy.zeros(swarmsize)
if (nhm == 'dlring'):
for i in xrange(swarmsize):
nhps.append([])
if (nhs == 0):
nhps[i].append(i)
#end
for nb in xrange(1,(nhn/2)+1):
if (i + nb >= swarmsize):
nhps[i].append(-1 + nb)
else:
nhps[i].append(i + nb)
#end
if (i - nb < 0):
nhps[i].append(swarmsize + i - nb)
else:
nhps[i].append(i - nb)
#end
#end
#end
elif (nhm == 'slring'):
for i in xrange(swarmsize):
nhps.append([])
if (nhs == 0):
nhps[i].append(i)
#end
for nb in xrange(1,(nhn/2)+1):
if (i + nb >= swarmsize):
nhps[i].append(-1 + nb)
else:
nhps[i].append(i + nb)
#end
if (i - (nb*2) < 0):
nhps[i].append(swarmsize + i - (nb*2))
else:
nhps[i].append(i - (nb*2))
#end
#end
#end
elif (nhm == 'wheel'):
nhps.append([])
nhps[0].append(0)
for i in xrange(1,swarmsize):
nhps.append([])
nhps[i].append(i)
nhps[i].append(0)
nhps[0].append(i)
#end
elif (nhm == 'spatial'):
pdist = numpy.ones((swarmsize,swarmsize))*inf
sbuf = {}
for i in myswarm:
sbuf[i] = {}
for i2 in xrange(i+1,swarmsize):
pdist_tmp = numpy.linalg.norm(x_k[i2,:] - x_k[i,:])
sbuf[i][i2] = pdist_tmp
#end
#end
if (myrank != 0):
Send(sbuf,dest=0)
else:
recb = []
for proc in xrange(1,nproc):
recb.append(Recv(source=proc))
#end
#end
if (myrank == 0):
for i in sbuf.keys():
for i2 in sbuf[i].keys():
pdist[i,i2] = sbuf[i][i2]
pdist[i2,i] = sbuf[i][i2]
#end
#end
for proc in xrange(nproc-1):
for i in recb[proc].keys():
for i2 in recb[proc][i].keys():
pdist[i,i2] = recb[proc][i][i2]
pdist[i2,i] = recb[proc][i][i2]
#end
#end
#end
#end
pdist = Bcast(pdist,root=0)
for i in xrange(swarmsize):
nhps.append([])
for nb in xrange(nhn):
nhps[i].append(pdist[i,:].argmin())
pdist[i,nhps[i][nb]] = inf
#end
if (nhs == 0):
nhps[i].append(i)
#end
#end
elif (nhm == 'sfrac'):
pdist = numpy.zeros((swarmsize,swarmsize))
d_max = numpy.zeros(swarmsize)
frac = 0.6
for i in xrange(swarmsize):
for i2 in xrange(i+1,swarmsize):
pdist[i,i2] = numpy.linalg.norm(x_k[i2,:] - x_k[i,:])
#end
for i2 in xrange(i):
pdist[i,i2] = pdist[i2,i]
#end
#end
for i in xrange(swarmsize):
nhps.append([])
d_max[i] = pdist[i,:].max()
for i2 in xrange(swarmsize):
if (i == i2):
if (nhs == 1):
pass
else:
nhps[i].append(i)
#end
else:
if (pdist[i,i2]/d_max[i] < frac):
nhps[i].append(i2)
#end
#end
#end
#end
#end
# Inizialize Neighbourhood Best
for i in xrange(swarmsize):
for nbp in nhps[i]:
if (L[nbp] < nhbest_L[i]):
nhbest_L[i] = L[nbp]
nhbest_f[i] = f[nbp]
nhbest_x[i,:] = x_k[nbp,:]
nhbest_i[i] = nbp
#end
#end
#end
#end
# Initialize stopping criteria distances
global_dist = 0
for i in xrange(swarmsize):
dist = 0
for j in xrange(dimensions):
dist += (x_k[i,j] - swarm_x[j])**2
#end
global_dist += (dist)**0.5
#end
global_distance_reference = global_dist/swarmsize # relative extent of the swarm
global_distance = numpy.zeros(stopIters, float)
global_L = numpy.zeros(stopIters, float)
for k in xrange(stopIters):
global_distance[k] = global_distance_reference
global_L[k] = swarm_L
#end
# Store History
if sto_hst:
logfile.write(rseed,'seed')
if (scale == 1):
x_uns = numpy.zeros(x_k.shape)
for i in xrange(swarmsize):
x_uns[i,:] = (x_k[i,:] * space_halflen) + space_centre
#end
else:
x_uns = x_k
#end
if discrete_i != []:
for i in xrange(swarmsize):
for m in discrete_i:
x_uns[i,m] = floor(x_uns[i,m] + 0.5)
#end
#end
#end
logfile.write(x_uns,'x')
logfile.write(f,'obj')
logfile.write(g,'con')
logfile.write(swarm_x,'gbest_x')
logfile.write(swarm_f,'gbest_f')
logfile.write(swarm_g,'gbest_g')
#end
# Output to Summary File
if (fileout == 2) or (fileout == 3):
stext = ''
stext += 'Global Best Particle:\n'
stext += '-'*97 + '\n'
stext += ' Major Minor nFCon Violation(L2) Objective Lagrangian Rel Lagrangian Global Dist\n'
stext += '-'*97 + '\n'
sfile.write(stext)
sfile.flush()
#end
# Outer optimization loop
k_out = 0
stop_main_flag = 0
no_successes = 0
no_failures = 0
rho = 1.0
vcr = 0.0
while ((k_out < maxOutIter) and (stop_main_flag == 0)):
k_out += 1
# Update g_old Major Iteration
for i in xrange(swarmsize):
g_old[i,:] = g[i,:]
#end
# Inner optimization loop - core ALPSO algorithm applied to the lagrangian function
k_inn = 0
stop_inner = 0
while ((k_inn < maxInnIter) and (stop_inner == 0)):
k_inn += 1
# calculating new search radius for the best particle ("Guaranteed Convergence" method)
if ((swarm_i == swarm_i_old) and (swarm_L >= swarm_L_old)):
no_failures += 1
no_successes = 0
elif ((swarm_i == swarm_i_old) and (swarm_L < swarm_L_old)):
no_successes += 1
no_failures = 0
else:
no_successes = 0
no_failures = 0
#end
if (no_successes > ns):
rho = 2.0*rho
no_successes = 0
elif (no_failures > nf):
rho = 0.5*rho
no_failures = 0
#end
if (rho < 10e-5):
rho = 10e-5
elif (rho > 1.0):
rho = 1.0
#end
# memorization for next outer iteration
if k_inn == 1:
swarm_i_old = swarm_i
swarm_L_old = swarm_L
swarm_f_old = swarm_f
swarm_g_old[:] = swarm_g[:]
#end
# stopping criteria distances
global_dist = 0
for i in xrange(swarmsize):
dist = 0
for j in xrange(dimensions):
dist += (x_k[i,j] - swarm_x[j])**2
#end
global_dist += (dist)**0.5
#end
global_distance[0] = global_dist/swarmsize # relative extent of the swarm
# Update inertia weight
w = w2 + ((w2 - w1)/global_distance_reference)*global_distance[1]
if (w > w1):
w = w1
elif (w < w2):
w = w2
#end
# Swarm Update
for i in xrange(swarmsize):
# Update velocity vector
if (nhm == 'dlring') or (nhm == 'slring') or (nhm == 'wheel') or (nhm == 'spatial') or (nhm == 'sfrac'):
lbest_x = nhbest_x[i,:]
else:
lbest_x = swarm_x[:]
#end
for j in xrange(dimensions):
if (i == swarm_i):
rr = rand.random()
v_k[i,j] = w*v_k[i,j] + -x_k[i,j] + swarm_x[j] + rho*(1.0 - 2.0*rr)
else:
r1 = rand.random()
r2 = rand.random()
rc = rand.random()
v_k[i,j] = w*v_k[i,j] + c1*r1*(best_x[i,j]-x_k[i,j])/dt + c2*r2*(lbest_x[j] - x_k[i,j])/dt + vcr*(1.0 - 2.0*rc)
#end
# Check for velocity vector out of range
if (v_k[i,j] > vmax[j]):
v_k[i,j] = vmax[j]
elif (v_k[i,j] < -vmax[j]):
v_k[i,j] = -vmax[j]
#end
# positions update
x_k[i,j] = x_k[i,j] + v_k[i,j]*dt
if (xtype[j] == 1):
x_k[i,j] = floor(x_k[i,j] + 0.5)
#end
# Check for positions out of range
if (x_k[i,j] > xmax[j]):
x_k[i,j] = xmax[j]
elif (x_k[i,j] < xmin[j]):
x_k[i,j] = xmin[j]
#end
#end
#end
if h_start:
if (myrank == 0):
[vals,hist_end] = hstfile.read([],ident=['obj','con'])
if not hist_end:
f = vals['obj'][0]
g = vals['con'][0].reshape(g.shape)
else:
h_start = False
hstfile.close()
#end
#end
h_start = Bcast(h_start,root=0)
#end
if not h_start:
## MPI Objective Function Evaluation
Barrier()
x_k = Bcast(x_k,root=0)
send_buf = {}
for i in myswarm:
p_eval = numpy.zeros(constraints+1, float)
if (scale == 1):
xtmp = (x_k[i,:] * space_halflen) + space_centre
else:
xtmp = x_k[i,:]
#end
for m in discrete_i:
xtmp[m] = floor(xtmp[m] + 0.5)
#end
[f_tmp,g_tmp] = objfunc(xtmp)
p_eval[0] = f_tmp
p_eval[1:constraints+1] = g_tmp[:]
send_buf[i] = p_eval
#end
if myrank == 0:
for i in send_buf.keys():
f[i] = send_buf[i][0]
g[i,:] = send_buf[i][1:]
#end
#end
if myrank != 0:
Send(send_buf,dest=0)
else:
p_results = []
for proc in xrange(1,nproc):
p_results.append(Recv(source=proc))
#end
#end
if myrank == 0:
for proc in xrange(nproc-1):
for i in p_results[proc].keys():
f[i] = p_results[proc][i][0]
g[i,:] = p_results[proc][i][1:]
#end
#end
#end
nfevals += swarmsize
#end
# Store History
if sto_hst:
if (scale == 1):
x_uns = numpy.zeros(x_k.shape)
for i in xrange(swarmsize):
x_uns[i,:] = (x_k[i,:] * space_halflen) + space_centre
#end
else:
x_uns = x_k
#end
if discrete_i != []:
for i in xrange(swarmsize):
for m in discrete_i:
x_uns[i,m] = floor(x_uns[i,m] + 0.5)
#end
#end
#end
logfile.write(x_uns,'x')
logfile.write(f,'obj')
logfile.write(g,'con')
#end
# Augmented Lagrange
for i in xrange(swarmsize):
# Lagrangian Value
L[i] = f[i]
if (constraints > 0):
# Equality Constraints
for l in xrange(neqcons):
tau[i,l] = g[i,l]
#end
# Inequality Constraints
for l in xrange(neqcons,constraints):
if (rp[l] != 0):
if (g[i,l] > -lambda_val[l]/(2*rp[l])):
tau[i,l] = g[i,l]
else:
tau[i,l] = -lambda_val[l]/(2*rp[l])
#end
else:
tau[i,l] = g[i,l]
#end
#end
#
for l in xrange(constraints):
L[i] += lambda_val[l]*tau[i,l] + rp[l]*tau[i,l]**2
#end
#end
#end
# Particle Best Update
for i in xrange(swarmsize):
if (L[i] < best_L[i]):
best_L[i] = L[i]
best_f[i] = f[i]
best_g[i,:] = g[i,:]
best_x[i,:] = x_k[i,:]
#end
#end
# Swarm Best Update
for i in xrange(swarmsize):
if (L[i] < swarm_L):
# update of the best particle and best position
swarm_i = i
swarm_x[:] = x_k[i,:]
# update of the best objective function value found
swarm_f = f[i]
# update of the best constraints values found
swarm_g[:] = g[i,:]
# update of the swarm best L
swarm_L = L[i]
#end
#end
# Spatial Neighbourhood Update
if (nhm == 'spatial') or (nhm == 'sfrac'):
sbuf = {}
for i in myswarm:
sbuf[i] = {}
for i2 in xrange(i+1,swarmsize):
pdist_tmp = numpy.linalg.norm(x_k[i2,:] - x_k[i,:])
sbuf[i][i2] = pdist_tmp
#end
#end
if (myrank != 0):
Send(sbuf,dest=0)
else:
recb = []
for proc in xrange(1,nproc):
recb.append(Recv(source=proc))
#end
#end
if (myrank == 0):
for i in sbuf.keys():
for i2 in sbuf[i].keys():
pdist[i,i2] = sbuf[i][i2]
pdist[i2,i] = sbuf[i][i2]
#end
#end
for proc in xrange(nproc-1):
for i in recb[proc].keys():
for i2 in recb[proc][i].keys():
pdist[i,i2] = recb[proc][i][i2]
pdist[i2,i] = recb[proc][i][i2]
#end
#end
#end
#end
pdist = Bcast(pdist,root=0)
if (nhm == 'spatial'):
for i in xrange(swarmsize):
nhps[i] = []
for nb in xrange(nhn):
nhps[i].append(pdist[i,:].argmin())
pdist[i,nhps[i][nb]] = inf
#end
if (nhs == 0):
nhps[i].append(i)
#end
#end
else:
frac = ((3*k_out)+0.6*maxOutIter)/maxOutIter
if frac >= 1.0:
nhm = 'gbest'
else:
for i in xrange(swarmsize):
nhps[i] = []
d_max[i] = pdist[i,:].max()
for i2 in xrange(swarmsize):
if (i == i2):
if (nhs == 1):
pass
else:
nhps[i].append(i)
#end
else:
if (pdist[i,i2]/d_max[i] < frac):
nhps[i].append(i2)
#end
#end
#end
#end
#end
#end
#end
# Neighbourhood Best Update
if (nhm == 'dlring') or (nhm == 'slring') or (nhm == 'wheel') or (nhm == 'spatial') or (nhm == 'sfrac'):
for i in xrange(swarmsize):
for nbp in nhps[i]:
if (L[nbp] < nhbest_L[i]):
nhbest_L[i] = L[nbp]
nhbest_f[i] = f[nbp]
nhbest_x[i,:] = x_k[nbp,:]
nhbest_i[i] = nbp
#end
#end
#end
#end
# Print Inner
if (prtInnIter != 0 and numpy.mod(k_inn,prtInnIter) == 0):
# output to screen
print '%d Inner Iteration of %d Outer Iteration' %(k_inn,k_out)
#end
if (fileout == 1) or (fileout == 3):
# output to filename
pass
#end
#Inner Loop Convergence
if (k_inn >= minInnIter):
if (myrank == 0):
if (swarm_L < swarm_L_old):
stop_inner = 1
#end
#end
stop_inner = Bcast(stop_inner,root=0)
#end
#Store History
if sto_hst:
logfile.write(swarm_x,'gbest_x')
logfile.write(swarm_f,'gbest_f')
logfile.write(swarm_g,'gbest_g')
#end
#end
# Print Outer
if myrank == 0:
if (prtOutIter != 0 and numpy.mod(k_out,prtOutIter) == 0):
# Output to screen
print("="*80 + "\n")
print("NUMBER OF ITERATIONS: %d\n" %(k_out))
print("NUMBER OF OBJECTIVE FUNCTION EVALUATIONS: %d\n" %(nfevals))
print("OBJECTIVE FUNCTION VALUE:")
print("\tF = %.16ge\n" %(float(swarm_f)))
if (constraints > 0):
# Equality Constraints
print("EQUALITY CONSTRAINTS VALUES:")
for l in xrange(neqcons):
print("\tH(%d) = %g" %(l,swarm_g[l]))
#end
# Inequality Constraints
print("\nINEQUALITY CONSTRAINTS VALUES:")
for l in xrange(neqcons,constraints):
print("\tG(%d) = %g" %(l,swarm_g[l]))
#end
#end
print("\nLAGRANGIAN MULTIPLIERS VALUES:")
for l in xrange(constraints):
print("\tL(%d) = %g" %(l,lambda_val[l]))
#end
print("\nBEST POSITION:")
if (scale == 1) or (fileout == 3):
xtmp = (swarm_x[:] * space_halflen) + space_centre
else:
xtmp = swarm_x[:]
#end
for m in discrete_i:
xtmp[m] = floor(xtmp[m] + 0.5)
#end
text = ''
for j in xrange(dimensions):
text += ("\tP(%d) = %.16g\t" %(j,xtmp[j]))
if (numpy.mod(j+1,3) == 0):
text +=("\n")
#end
#end
print text
print("="*80 + "\n")
#end
if (fileout == 1) or (fileout == 3):
# Output to filename
ofile.write("\n" + "="*80 + "\n")
ofile.write("\nNUMBER OF ITERATIONS: %d\n" %(k_out))
ofile.write("\nNUMBER OF OBJECTIVE FUNCTION EVALUATIONS: %d\n" %(nfevals))
ofile.write("\nOBJECTIVE FUNCTION VALUE:\n")
ofile.write("\tF = %.16g\n" %(float(swarm_f)))
if (constraints > 0):
# Equality Constraints
ofile.write("\nEQUALITY CONSTRAINTS VALUES:\n")
for l in xrange(neqcons):
ofile.write("\tH(%d) = %g\n" %(l,swarm_g[l]))
#end
# Inequality Constraints
ofile.write("\nINEQUALITY CONSTRAINTS VALUES:\n")
for l in xrange(neqcons,constraints):
ofile.write("\tG(%d) = %g\n" %(l,swarm_g[l]))
#end
#end
ofile.write("\nLAGRANGIAN MULTIPLIERS VALUES:\n")
for l in xrange(constraints):
ofile.write("\tL(%d) = %g\n" %(l,lambda_val[l]))
#end
ofile.write("\nBEST POSITION:\n")
if (scale == 1):
xtmp = (swarm_x[:] * space_halflen) + space_centre
else:
xtmp = swarm_x[:]
#end
for m in discrete_i:
xtmp[m] = floor(xtmp[m] + 0.5)
#end
text = ''
for j in xrange(dimensions):
text += ("\tP(%d) = %.16g\t" %(j,xtmp[j]))
if (numpy.mod(j+1,3) == 0):
text +=("\n")
#end
#end
ofile.write(text)
ofile.write("\n" + "="*80 + "\n")
ofile.flush()
#end
# Store History
if (sto_hst and (minInnIter != maxInnIter)):
logfile.write(k_inn,'ninner')
#end
#end
if myrank == 0:
# Test Constraint convergence
stop_con_num = 0
infeas_con = []
if (constraints == 0):
stop_constraints_flag = 1
else:
for l in xrange(neqcons):
if (abs(swarm_g[l]) <= etol):
stop_con_num += 1
else:
infeas_con.append(l)
#end
#end
for l in xrange(neqcons,constraints):
if (swarm_g[l] < itol):
stop_con_num += 1
else:
infeas_con.append(l)
#end
#end
if (stop_con_num == constraints):
stop_constraints_flag = 1
else:
stop_constraints_flag = 0
#end
#end
# Test Position and Function convergence
stop_criteria_flag = 0
if (stopCriteria == 1):
# setting up the stopping criteria based on distance and tolerance
for k in xrange(stopIters-1,0,-1):
global_distance[k] = global_distance[k-1]
global_L[k] = global_L[k-1]
#end
#
global_dist = 0
for i in xrange(swarmsize):
dist = 0
for j in xrange(dimensions):
dist += (x_k[i,j] - swarm_x[j])**2
#end
global_dist += (dist)**0.5
#end
global_distance[0] = global_dist/swarmsize # relative extent of the swarm
#
global_L[0] = swarm_L
#
if (abs(global_distance[0]-global_distance[stopIters-1]) <= \
dtol*abs(global_distance[stopIters-1]) and \
abs(global_L[0]-global_L[stopIters-1]) <= \
rtol*abs(global_L[stopIters-1]) or \
abs(global_L[0]-global_L[stopIters-1]) <= atol):
stop_criteria_flag = 1
else:
stop_criteria_flag = 0
#end
#end
# Test Convergence
if (stop_constraints_flag == 1 and stop_criteria_flag == 1):
stop_main_flag = 1
else:
stop_main_flag = 0
#end
# Output to Summary File
if (fileout == 2) or (fileout == 3):
cvss = 0.0
for l in infeas_con:
cvss += swarm_g[l]**2
#end
cvL2 = cvss**0.5
if (stopCriteria == 1):
relL = abs(global_L[0]-global_L[stopIters-1])/abs(global_L[stopIters-1])
stext = '%9d%8d%8d%15.4e%15f%13.4e%16.4e%14.4e\n' %(k_out,k_inn,stop_con_num,cvL2,swarm_f,swarm_L,relL,global_distance[0])
else:
stext = '%9d%8d%8d%15.4e%15f%13.4e%16s%14s\n' %(k_out,k_inn,stop_con_num,cvL2,swarm_f,swarm_L,'NA','NA')
#end
sfile.write(stext)
sfile.flush()
#end
# Update Augmented Lagrangian Terms
if (stop_main_flag == 0):
if (constraints > 0):
# Update new Tau
for l in xrange(neqcons):
tau_new[l] = swarm_g[l]
#end
for l in xrange(neqcons,constraints):
if (swarm_g[l] > -lambda_val[l]/(2*rp[l])):
tau_new[l] = swarm_g[l]
else:
tau_new[l] = -lambda_val[l]/(2*rp[l])
#end
#end
# Update Lagrange Multiplier
for l in xrange(constraints):
lambda_old[l] = lambda_val[l]
lambda_val[l] += 2*rp[l]*tau_new[l]
if (abs(lambda_val[l]) < eps):
lambda_val[l] = 0.0
#end
#end
# Update Penalty Factor
for l in xrange(neqcons):
if (abs(swarm_g[l]) > abs(swarm_g_old[l]) and abs(swarm_g[l]) > etol):
rp[l] = 2.0*rp[l]
elif (abs(swarm_g[l]) <= etol):
rp[l] = 0.5*rp[l]
#end
#end
for l in xrange(neqcons,constraints):
if (swarm_g[l] > swarm_g_old[l] and swarm_g[l] > itol):
rp[l] = 2.0*rp[l]
elif (swarm_g[l] <= itol):
rp[l] = 0.5*rp[l]
#end
#end
# Apply Lower Bounds on rp
for l in xrange(neqcons):
if (rp[l] < 0.5*(abs(lambda_val[l])/etol)**0.5):
rp[l] = 0.5*(abs(lambda_val[l])/etol)**0.5
#end
#end
for l in xrange(neqcons,constraints):
if (rp[l] < 0.5*(abs(lambda_val[l])/itol)**0.5):
rp[l] = 0.5*(abs(lambda_val[l])/itol)**0.5
#end
#end
for l in xrange(constraints):
if (rp[l] < 1):
rp[l] = 1
#end
#end
#end
for i in xrange(swarmsize):
if (constraints > 0):
# Update Tau
for l in xrange(neqcons):
tau[i,l] = g[i,l]
#end
for l in xrange(neqcons,constraints):
if (g[i,l] > -lambda_val[l]/(2*rp[l])):
tau[i,l] = g[i,l]
else:
tau[i,l] = -lambda_val[l]/(2*rp[l])
#end
#end
#end
#end
# set craziness velocity for next inner loop run
vcr = (1 - k_out/maxOutIter)*vcrazy
# update swarm with new Lagrangian function for next inner run
for i in xrange(swarmsize):
L[i] = f[i]
if (constraints > 0):
for l in xrange(constraints):
L[i] += lambda_val[l]*tau[i,l] + rp[l]*tau[i,l]**2
#end
#end
#end
swarm_L = L[swarm_i]
swarm_L_old = swarm_f_old
if (constraints > 0):
# Equality Constraints
for l in xrange(neqcons):
tau_old[l] = swarm_g_old[l]
#end
# Inequality Constraints
for l in xrange(neqcons,constraints):
if (rp[l] != 0):
if (swarm_g_old[l] > -lambda_val[l]/(2*rp[l])):
tau_old[l] = swarm_g_old[l]
else:
tau_old[l] = -lambda_val[l]/(2*rp[l])
#end
else:
tau_old[l] = swarm_g_old[l]
#end
#end
#
for l in xrange(constraints):
swarm_L_old += lambda_val[l]*tau_old[l] + rp[l]*tau_old[l]**2
#end
#end
# reset swarm memory for next inner run
for i in xrange(swarmsize):
best_L[i] = L[i]
best_f[i] = f[i]
best_g[i,:] = g[i,:]
best_x[i,:] = x_k[i,:]
#end
#end
#end
Barrier()
recv_buf = Bcast(stop_main_flag,root=0)
stop_main_flag = recv_buf
#end
# Print Results
if myrank == 0:
if (prtOutIter != 0):
# Output to screen
print("="*80 + "\n")
print("RANDOM SEED VALUE: %.8f\n" %(rseed))
print("NUMBER OF ITERATIONS: %d\n" %(k_out))
print("NUMBER OF OBJECTIVE FUNCTION EVALUATIONS: %d\n" %(nfevals))
print("OBJECTIVE FUNCTION VALUE:")
print("\tF = %.16g\n" %(float(swarm_f)))
if (constraints > 0):
# Equality Constraints
print("EQUALITY CONSTRAINTS VALUES:")
for l in xrange(neqcons):
print("\tH(%d) = %g" %(l,swarm_g[l]))
#end
# Inequality Constraints
print("\nINEQUALITY CONSTRAINTS VALUES:")
for l in xrange(neqcons,constraints):
print("\tG(%d) = %g" %(l,swarm_g[l]))
#end
#end
print("\nLAGRANGIAN MULTIPLIERS VALUES:")
for l in xrange(constraints):
print("\tL(%d) = %g" %(l,float(lambda_val[l])))
#end
print("\nBEST POSITION:")
if (scale == 1):
xtmp = (swarm_x[:] * space_halflen) + space_centre
else:
xtmp = swarm_x[:]
#end
for m in discrete_i:
xtmp[m] = floor(xtmp[m] + 0.5)
#end
text = ''
for j in xrange(dimensions):
text += ("\tP(%d) = %.16g\t" %(j,xtmp[j]))
if (numpy.mod(j+1,3) == 0):
text +=("\n")
#end
#end
print text
print("="*80 + "\n")
#end
if (fileout == 1) or (fileout == 3):
ofile.close()
#end
if (fileout == 2) or (fileout == 3):
# Output to Summary
sfile.write("\n\nSolution:")
sfile.write("\n" + "="*97 + "\n")
sfile.write("\nNUMBER OF ITERATIONS: %d\n" %(k_out))
sfile.write("\nNUMBER OF OBJECTIVE FUNCTION EVALUATIONS: %d\n" %(nfevals))
sfile.write("\nOBJECTIVE FUNCTION VALUE:\n")
sfile.write("\tF = %.16g\n" %(float(swarm_f)))
if (constraints > 0):
# Equality Constraints
sfile.write("\nEQUALITY CONSTRAINTS VALUES:\n")
for l in xrange(neqcons):
sfile.write("\tH(%d) = %g\n" %(l,swarm_g[l]))
#end
# Inequality Constraints
sfile.write("\nINEQUALITY CONSTRAINTS VALUES:\n")
for l in xrange(neqcons,constraints):
sfile.write("\tG(%d) = %g\n" %(l,swarm_g[l]))
#end
#end
sfile.write("\nLAGRANGIAN MULTIPLIERS VALUES:\n")
for l in xrange(constraints):
sfile.write("\tL(%d) = %g\n" %(l,float(lambda_val[l])))
#end
sfile.write("\nBEST POSITION:\n")
if (scale == 1):
xtmp = (swarm_x[:] * space_halflen) + space_centre
else:
xtmp = swarm_x[:]
#end
for m in discrete_i:
xtmp[m] = floor(xtmp[m] + 0.5)
#end
text = ''
for j in xrange(dimensions):
text += ("\tP(%d) = %.16g\t" %(j,xtmp[j]))
if (numpy.mod(j+1,3) == 0):
text +=("\n")
#end
#end
sfile.write(text)
sfile.write("\n" + "="*97 + "\n")
sfile.flush()
sfile.close()
#end
#end
# Results
if (scale == 1):
opt_x = (swarm_x * space_halflen) + space_centre
else:
opt_x = swarm_x
#end
for m in discrete_i:
opt_x[m] = floor(opt_x[m] + 0.5)
#end
opt_f = swarm_f
opt_g = swarm_g
opt_lambda = lambda_val[:]
opt_x = Bcast(opt_x,root=0)
opt_f = Bcast(opt_f,root=0)
opt_g = Bcast(opt_g,root=0)
opt_lambda = Bcast(opt_lambda,root=0)
return opt_x,opt_f,opt_g,opt_lambda,nfevals,'%.8f' %(rseed)
#==============================================================================
# Optimizers Test
#==============================================================================
if __name__ == '__main__':
print 'Testing ...'
# Test alpso
alpso = alpso()
print alpso
| gpl-3.0 | 6,654,524,556,026,336,000 | 24.099451 | 143 | 0.533925 | false |
mtbc/openmicroscopy | components/tools/OmeroPy/test/integration/clitest/test_script.py | 5 | 3089 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Test of the scripts plugin
Copyright 2010-2013 Glencoe Software, Inc. All rights reserved.
Use is subject to license terms supplied in LICENSE.txt
"""
from test.integration.clitest.cli import CLITest
from omero.plugins.script import ScriptControl
from omero.util.temp_files import create_path
import pytest
scriptText = """
import omero, omero.scripts as s
from omero.rtypes import *
client = s.client("testFullSession", "simple ping script", \
s.Long("a").inout(), s.String("b").inout())
client.setOutput("a", rlong(0))
client.setOutput("b", rstring("c"))
client.closeSession()
"""
subcommands = [
"demo", "list", "cat", "edit", "params", "launch", "disable", "enable",
"jobs", "serve", "upload", "replace", "delete", "run"]
class TestScript(CLITest):
def setup_method(self, method):
super(TestScript, self).setup_method(method)
self.cli.register("script", ScriptControl, "TEST")
self.args += ["script"]
# Help subcommands
# ========================================================================
def testHelp(self):
self.args += ["-h"]
self.cli.invoke(self.args, strict=True)
@pytest.mark.parametrize("subcommand", subcommands)
def testSubcommandHelp(self, subcommand):
self.args += [subcommand, "-h"]
self.cli.invoke(self.args, strict=True)
def testList(self):
self.args += ["list"]
self.cli.invoke(self.args, strict=True) # Throws NonZeroReturnCode
def testDemo(self):
self.args += ["demo"]
self.cli.invoke(self.args, strict=True)
def testFullSession(self):
p = create_path(suffix=".py")
p.write_text(scriptText)
# Sets current script
self.cli.invoke(self.args + ["upload", str(p)], strict=True)
self.cli.invoke(self.args + ["list", "user"], strict=True)
# Replace subcommand
# ========================================================================
def testReplace(self):
p = create_path(suffix=".py")
p.write_text(scriptText)
# test replace with user script (not official)
# Sets current script
self.cli.invoke(self.args + ["upload", str(p)], strict=True)
newId = self.cli.get("script.file.id")
self.cli.invoke(self.args + ["list", "user"], strict=True)
replaceArgs = self.args + ["replace", str(newId), str(p)]
print replaceArgs
self.cli.invoke(replaceArgs, strict=True)
def testReplaceOfficial(self):
p = create_path(suffix=".py")
p.write_text(scriptText)
# test replace with official script
self.args = self.root_login_args() + ["script"]
uploadArgs = self.args + ["upload", str(p), "--official"]
self.cli.invoke(uploadArgs, strict=True) # Sets current script
newId = self.cli.get("script.file.id")
self.cli.invoke(self.args + ["list"], strict=True)
replaceArgs = self.args + ["replace", str(newId), str(p)]
self.cli.invoke(replaceArgs, strict=True)
| gpl-2.0 | -6,947,533,266,396,586,000 | 32.576087 | 78 | 0.596957 | false |
MaisonLogicielLibre/CogSciProject-master | bcam_script/DigitSpan/digitspan.py | 1 | 6103 | #!/usr/bin/python
import pygame, time, random, math
from pygame.locals import *
#import heginput
pygame.init()
#pygame.mouse.set_visible(False)
pygame.event.set_blocked(pygame.MOUSEMOTION)
# screensize = (1280, 800)
screensize = pygame.display.list_modes()[0]
flags = pygame.constants.FULLSCREEN
screen = pygame.display.set_mode(screensize, flags)
secsperevent = 2.
secstodisplay = 1.
backwards = True
trialsperblock = 15
levelthreshold = 2
startinglevel = 6
black = (0,0,0)
white = (255, 255, 255)
bgColor = black
textColor = white
bigfont = pygame.font.SysFont('timesnewroman', 64)
font = pygame.font.SysFont('timesnewroman', 24)
screen.fill(black)
pygame.display.flip()
class Abort(Exception):
pass
mouseclicks = []
def readkeyinput():
pygame.event.clear()
repeat = True
text = ''
while (repeat):
newevent = pygame.event.wait()
if (newevent.type == 2):
if (newevent.key == K_RETURN):
repeat = False
elif (newevent.key == K_BACKSPACE):
if len(text) > 0:
text = text[:-1]
elif (newevent.key == K_ESCAPE):
raise Abort
else:
text = text + newevent.unicode
centered_word(text, textColor)
return text
def mean_sd(vec):
meanval = sum(vec)/len(vec)
sd = 0.
for t in vec:
sd += (t - meanval)**2
sd = math.sqrt(sd/ (len(vec)) )
return meanval, sd
def display_number(n):
centered_word(n, textColor, font=bigfont)
sleep(secstodisplay)
screen.fill(bgColor)
pygame.display.flip()
sleep(secsperevent - secstodisplay)
def countdown(message):
screen.fill(bgColor)
centered_word(message, textColor)
sleep(3)
centered_word('Get ready.', textColor)
sleep(2)
screen.fill(bgColor)
pygame.display.flip()
sleep(1)
def centered_word(word, color = textColor, font=font):
screen.fill(bgColor)
renderedword = font.render(word, True, color)
topleft = (screensize[0]/2 - renderedword.get_width()/2, screensize[1]/2 - renderedword.get_height()/2)
screen.blit(renderedword, topleft)
pygame.display.flip()
def sleep(secs):
start = time.time()
while time.time() < start + secs:
time.sleep(0.0005)
newevent = pygame.event.poll()
if newevent.type == 2:
if newevent.key == K_ESCAPE:
raise Abort
elif newevent.type == pygame.MOUSEBUTTONDOWN:
mouseclicks.append(time.time())
def generateblock(ishigh):
blocklen = int(secsperblock / secsperevent)
block = [ishigh for i in range(int(blocklen/updownratio))]
block.extend([not ishigh for i in range( int( blocklen * (1 - 1/updownratio)))])
random.shuffle(block)
return block
def run_block():
trial = trialsperblock
level = startinglevel
points = levelthreshold / 2
while trial > 0:
numbers = [random.randint(0,9) for i in range(level)]
results = runTrial(numbers).strip()
try:
resultnumbers = map (int, results)
except:
centered_word("Sorry, invalid response.")
print "Invalid response: %s" % results
sleep(2)
if backwards: resultnumbers.reverse()
if (numbers == resultnumbers):
points += 1
centered_word("Correct.")
else:
points -= 1
centered_word("Incorrect.")
if points >= levelthreshold:
level += 1
points = 0
elif points < 0:
level -= 1
points = levelthreshold - 1
trial -= 1
sleep(2)
screen.fill(bgColor)
pygame.display.flip()
return level + (points / float(levelthreshold))
def runTrial(numbers):
for number in numbers:
display_number(str(number))
centered_word("Type in your answer now.")
return readkeyinput()
centered_word("Please type the subject's name.")
subjname = readkeyinput()
if backwards: foo = "backwards"
else: foo = "in order"
centered_word("Remember the numbers presented, then type them back in %s when prompted." % foo)
sleep(5)
span = run_block()
print span
##blocks = [generateblock(blo) for blo in blockpattern]
##btimes = [run_block(block) for block in blocks]
###b1times = run_block(block1)
###b2times = run_block(block2)
##results = [calctimes(btime) for btime in btimes]
##f = file(subjname + ".tova.txt", 'w')
##for i in range(len(results)):
## f.write("Block %2i:\n" % (i+1))
## f.write(results[i][-2])
## f.write('\n')
##f.write(''.join( [''.rjust(16)]+[("Block %2i"%(i+1)).rjust(10) for i in range(len(results))]+['\n'] ))
##f.write(''.join( ['Response time:' .ljust(16)]+[`result[0]`.rjust(10) for result in results]+['\n'] ))
##f.write(''.join( ['Response stdev:'.ljust(16)]+[('%3.1f'%(result[1])).rjust(10) for result in results]+['\n'] ))
##f.write(''.join( ['False hits:' .ljust(16)]+[`result[2]`.rjust(10) for result in results]+['\n'] ))
##f.write(''.join( ['Misses:' .ljust(16)]+[`result[3]`.rjust(10) for result in results]+['\n'] ))
##f.write(''.join( ['Double hits:' .ljust(16)]+[`result[4]`.rjust(10) for result in results]+['\n'] ))
##f.close()
##
##foo = ''
##foo += '%10s%16s%16s%16s%16s%16s\n' %('', 'Response time', 'Response stdev', 'False hits', 'Misses', 'Double hits')
##for i in range(len(results)):
## foo += '%10s%16i%16.1f%16i%16i%16i\n' % tuple(['Block %2i: '%(i+1)] + list(results[i][:-2]))
##foo += '%10s%16i%16.1f%16i%16i%16i\n' % tuple(["Totals:"]+[mean_sd(vec)[0] for vec in [[results[x][y] for x in range(len(results))] for y in range(len(results[0])-2)]])
##foo += '%10s%16i%16.1f%16i%16i%16i\n' % tuple(["StDevs:"]+[mean_sd(vec)[1] for vec in [[results[x][y] for x in range(len(results))] for y in range(len(results[0])-2)]])
##print foo
##
##
##
##
#meantime = sum(reactiontimes)/len(reactiontimes)
#sd = 0.
#for t in reactiontimes:
# sd += (t - meantime)**2
#sd = math.sqrt(sd/ (len(reactiontimes)) )
pygame.display.quit() | mit | 8,513,167,265,140,920,000 | 29.068966 | 174 | 0.598886 | false |
horstjens/ThePythonGameBook | en/pygame/003_static_blit.py | 1 | 3173 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
003_static_blit.py
static blitting and drawing
url: http://thepythongamebook.com/en:part2:pygame:step003
author: [email protected]
licence: gpl, see http://www.gnu.org/licenses/gpl.html
work with python3.4 and python2.7
Blitting a surface on a static position
Drawing a filled circle into ballsurface.
Blitting this surface once.
introducing pygame draw methods
The ball's rectangular surface is black because the background
color of the ball's surface was never defined nor filled."""
#the next line is only needed for python2.x and not necessary for python3.x
from __future__ import print_function, division
import pygame
pygame.init()
screen=pygame.display.set_mode((640,480))
background = pygame.Surface(screen.get_size())
background.fill((255,255,255)) # fill the background white
background = background.convert() # prepare for faster blitting
ballsurface = pygame.Surface((50,50)) # create a rectangular surface for the ball
#pygame.draw.circle(Surface, color, pos, radius, width=0)
# draw blue filled circle on ball surface
pygame.draw.circle(ballsurface, (0,0,255), (25,25),25)
ballsurface = ballsurface.convert()
ballx = 320
bally = 240
#------- try out some pygame draw functions --------
# see the original documentation at http://www.pygame.org/docs/ref/draw.html
# pygame.draw.rect(Surface, color, Rect, width=0): return Rect
# rect: (x-position of topleft corner, y-position of topleft corner, width, height)
pygame.draw.rect(background, (0,255,0), (50,50,100,25))
# pygame.draw.circle(Surface, color, pos, radius, width=0): return Rect
pygame.draw.circle(background, (0,200,0), (200,50), 35)
# pygame.draw.polygon(Surface, color, pointlist, width=0): return Rect
pygame.draw.polygon(background, (0,180,0), ((250,100),(300,0),(350,50)))
# pygame.draw.arc(Surface, color, Rect, start_angle, stop_angle, width=1): return Rect
# radiant instead of grad
pygame.draw.arc(background, (0,150,0),(400,10,150,100), 0, 3.14)
#------- blit the surfaces on the screen to make them visible
screen.blit(background, (0,0)) # blit the background on the screen (overwriting all)
screen.blit(ballsurface, (ballx, bally)) # blit the topleft corner of ball surface at pos (ballx, bally)
clock = pygame.time.Clock()
mainloop = True
FPS = 30 # desired framerate in frames per second. try out other values !
playtime = 0.0
while mainloop:
milliseconds = clock.tick(FPS) # do not go faster than this frame rate
playtime += milliseconds / 1000.0
# ----- event handler -----
for event in pygame.event.get():
if event.type == pygame.QUIT:
mainloop = False # pygame window closed by user
elif event.type == pygame.KEYDOWN:
if event.key == pygame.K_ESCAPE:
mainloop = False # user pressed ESC
pygame.display.set_caption("Frame rate: {:0.2f} frames per second."
" Playtime: {:.2} seconds".format(
clock.get_fps(),playtime))
pygame.display.flip() # flip the screen like in a flipbook
print("this 'game' was played for %.2f seconds" % playtime)
| gpl-3.0 | -2,612,266,754,215,972,400 | 42.465753 | 105 | 0.699023 | false |
gtsapelas/TRANSrisk_fcm_project | fcm_app/migrations/0028_auto_20180925_1731.py | 1 | 1151 | # -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2018-09-25 14:31
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('fcm_app', '0027_auto_20180721_2115'),
]
operations = [
migrations.CreateModel(
name='Tags',
fields=[
('name', models.CharField(default='', max_length=100, primary_key=True, serialize=False)),
],
),
migrations.AlterField(
model_name='fcm',
name='title',
field=models.CharField(max_length=500),
),
migrations.AlterField(
model_name='fcm_concept',
name='title',
field=models.CharField(max_length=500),
),
migrations.AlterField(
model_name='fcm_edges',
name='title',
field=models.CharField(max_length=500),
),
migrations.AddField(
model_name='fcm',
name='tags',
field=models.ManyToManyField(related_name='fcm_set', to='fcm_app.Tags'),
),
]
| mit | 655,630,355,022,140,300 | 27.073171 | 106 | 0.534318 | false |
AlexaProjects/Alexa2 | ALEXA-IDE/core/user_files/alexa_ide/addins/plugins/alexatools/appobjectcapturescreen.py | 1 | 58851 | # -*- coding: UTF-8 -*-
#
# Copyright (C) 2013 Alan Pipitone
#
# This file is part of Al'EXA-IDE.
#
# Al'EXA-IDE is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Al'EXA-IDE is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Al'EXA-IDE. If not, see <http://www.gnu.org/licenses/>.
#PYTHON
import os
import sys
import copy
#NINJA
from ninja_ide.tools import json_manager
from appobjectdialog import AppObjDialog
from apptextdialog import AppTextInRegionDialog
from exportedmenu import ExportedMenu
try:
import json
except ImportError:
import simplejson as json
#QT Toolkit
from PyQt4.QtGui import *
from PyQt4.QtCore import *
import ImageQt
#OPENCV
import cv2
import cv2.cv as cv
#PIL
import Image
#NUMPY
import numpy
#ALEXA
from Alexa import *
class AppObjectCaptureScreenshot(QWidget):
def __init__(self, plugin):
QWidget.__init__(self)
self.plugin = plugin
self.plug_path = plugin.path
#if self.plugin.undockWindowOpened is True:
#self.plugin.undockWindow.close()
SERVICE_NAME = "editor"
self.editor_service = self.plugin.locator.get_service(SERVICE_NAME)
fullFileName = self.editor_service.get_editor_path()
#from os.path import basename
filePath = os.path.split(fullFileName)[0]
fileName = os.path.split(fullFileName)[1]
#print os.path.splitext(fileName)[1]
self.jsonfile = json_manager.read_json(filePath + os.sep + fileName.replace(os.path.splitext(fileName)[1], ".nja"))
#filetoparse = filePath + os.sep + fileName.replace("py", "nja")
self.jsonfile["ocrdatafolder"]
self.isCanny = False
self.MovingAppObject = False
self.TollerancePreview = False
#self.binarizeImagePreviewFlag = False
#self.Brightness = 0.0
#self.Contrast = 0.0
#self.binarizeLabelPreviewFlag = False
#set pixmap for the background
self.pixmap = QPixmap()
self.pixmap.load(self.plug_path + os.sep + 'tmp' + os.sep + 'screenshot.png')
self.OriginalScreenshot = Image.open(self.plug_path + os.sep + 'tmp' + os.sep + 'screenshot.png')
#store if mouse is pressed
self.pressed = False
self.released = False
self.printLabelBorder = False
self.InsideRect = False
self.InsideRegion = False
self.AdjustOnlyHeightTollerance = False
self.AdjustOnlyWidthTollerance = False
#store mouse position
self.mouseOldX = 0
self.mouseOldY = 0
self.mouseNewX = 0
self.mouseNewY = 0
self.rectLabelCollection = []
self.rectLabelCollectionDeleted = []
#Al'exa AppObject
self.AlexaAppObjectsBackup = []
self.AlexaAppObjectsBackupDeleted = []
self.AlexaAppObjects = []
self.AlexaAppObjectsDeleted = []
self.LabelOfInterest = []
self.LastRectHover = 0
self.Dialog = None
self.DialogOpened = False
self.DialogHwnd = None
self.CropLabel = False
self.CropRegion = False
self.AppObjectFeedbackIndex = None
self.indexFound = None
self.indexFoundAppText = None
def UpdateLabelOfInterest(self):
self.LabelOfInterest = []
if self.AlexaAppObjects[self.AppObjectFeedbackIndex].Label.Width != 0 and self.AlexaAppObjects[self.AppObjectFeedbackIndex].Label.Height != 0:
left = self.AlexaAppObjects[self.AppObjectFeedbackIndex].RectX + self.AlexaAppObjects[self.AppObjectFeedbackIndex].Label.OffsetX
top = self.AlexaAppObjects[self.AppObjectFeedbackIndex].RectY + self.AlexaAppObjects[self.AppObjectFeedbackIndex].Label.OffsetY
width = self.AlexaAppObjects[self.AppObjectFeedbackIndex].Label.Width
height = self.AlexaAppObjects[self.AppObjectFeedbackIndex].Label.Height
box = (left, top, left + width, top + height)
self.LabelOfInterest.append(box)
elif self.AlexaAppObjects[self.AppObjectFeedbackIndex].Label.Position == "top":
self.AddLabelOfInterestTop()
elif self.AlexaAppObjects[self.AppObjectFeedbackIndex].Label.Position == "left":
self.AddLabelOfInterestLeft()
elif self.AlexaAppObjects[self.AppObjectFeedbackIndex].Label.Position == "inside":
self.AddLabelOfInterestInside()
elif self.AlexaAppObjects[self.AppObjectFeedbackIndex].Label.Position == "right":
self.AddLabelOfInterestRight()
elif self.AlexaAppObjects[self.AppObjectFeedbackIndex].Label.Position == "":
self.AddLabelOfInterestTop()
self.AddLabelOfInterestLeft()
self.AddLabelOfInterestInside()
self.AddLabelOfInterestRight()
def AddLabelOfInterestTop(self):
left = self.AlexaAppObjects[self.AppObjectFeedbackIndex].RectX - 10
top = self.AlexaAppObjects[self.AppObjectFeedbackIndex].RectY - (self.AlexaAppObjects[self.AppObjectFeedbackIndex].Height + (self.AlexaAppObjects[self.AppObjectFeedbackIndex].Height / 2))
width = self.AlexaAppObjects[self.AppObjectFeedbackIndex].Width + 20
height = (self.AlexaAppObjects[self.AppObjectFeedbackIndex].Height + (self.AlexaAppObjects[self.AppObjectFeedbackIndex].Height / 2))
box = (left, top, left + width, top + height)
self.LabelOfInterest.append(box)
def AddLabelOfInterestLeft(self):
left = self.AlexaAppObjects[self.AppObjectFeedbackIndex].RectX - (self.AlexaAppObjects[self.AppObjectFeedbackIndex].Width * 2)
top = self.AlexaAppObjects[self.AppObjectFeedbackIndex].RectY
width = self.AlexaAppObjects[self.AppObjectFeedbackIndex].Width * 2
height = self.AlexaAppObjects[self.AppObjectFeedbackIndex].Height
box = (left, top, left + width, top + height)
self.LabelOfInterest.append(box)
def AddLabelOfInterestInside(self):
left = self.AlexaAppObjects[self.AppObjectFeedbackIndex].RectX + 2
top = self.AlexaAppObjects[self.AppObjectFeedbackIndex].RectY + 2
width = self.AlexaAppObjects[self.AppObjectFeedbackIndex].Width - 4
height = self.AlexaAppObjects[self.AppObjectFeedbackIndex].Height - 4
box = (left, top, left + width, top + height)
self.LabelOfInterest.append(box)
def AddLabelOfInterestRight(self):
left = self.AlexaAppObjects[self.AppObjectFeedbackIndex].RectX + self.AlexaAppObjects[self.AppObjectFeedbackIndex].Width
top = self.AlexaAppObjects[self.AppObjectFeedbackIndex].RectY
width = self.AlexaAppObjects[self.AppObjectFeedbackIndex].Width * 2
height = self.AlexaAppObjects[self.AppObjectFeedbackIndex].Height
box = (left, top, left + width, top + height)
self.LabelOfInterest.append(box)
def BringWindowToFront(self):
if sys.platform == 'win32':
win32gui.SetForegroundWindow(self.DialogHwnd)
def PaintRectTollerance(self, paint, AlexaAppObject):
pen = QPen()
pen.setWidth(1)
pen.setBrush(QColor(255, 0, 0, 255))
paint.setPen(pen)
OuterPath = QPainterPath()
OuterPath.setFillRule(Qt.WindingFill)
OuterPath.addRect(AlexaAppObject.RectX - AlexaAppObject.WidthTollerance,
AlexaAppObject.RectY - AlexaAppObject.HeightTollerance,
AlexaAppObject.Width + (AlexaAppObject.WidthTollerance * 2),
AlexaAppObject.Height + (AlexaAppObject.HeightTollerance * 2))
if AlexaAppObject.Width >= (AlexaAppObject.WidthTollerance * 2) and AlexaAppObject.Height >= (AlexaAppObject.HeightTollerance * 2):
InnerPath = QPainterPath()
InnerPath.addRect(AlexaAppObject.RectX + AlexaAppObject.WidthTollerance,
AlexaAppObject.RectY + AlexaAppObject.HeightTollerance,
AlexaAppObject.Width - (AlexaAppObject.WidthTollerance * 2),
AlexaAppObject.Height - (AlexaAppObject.HeightTollerance * 2))
FillPath = OuterPath.subtracted(InnerPath)
paint.fillPath(FillPath, QColor(255, 0, 255, 130))
else:
paint.fillPath(OuterPath, QColor(255, 0, 255, 130))
pen.setWidth(1)
pen.setStyle(Qt.DashLine)
paint.setPen(pen)
paint.drawRect(AlexaAppObject.RectX - AlexaAppObject.WidthTollerance,
AlexaAppObject.RectY - AlexaAppObject.HeightTollerance,
AlexaAppObject.Width + (AlexaAppObject.WidthTollerance * 2),
AlexaAppObject.Height + (AlexaAppObject.HeightTollerance * 2))
pen.setStyle(Qt.SolidLine)
paint.setPen(pen)
#paint.drawRect(x, y, AlexaAppObject.Width, AlexaAppObject.Height)
pen.setStyle(Qt.DashLine)
paint.setPen(pen)
if AlexaAppObject.Width >= (AlexaAppObject.WidthTollerance * 2) and AlexaAppObject.Height >= (AlexaAppObject.HeightTollerance * 2):
paint.drawRect(AlexaAppObject.RectX + AlexaAppObject.WidthTollerance,
AlexaAppObject.RectY + AlexaAppObject.HeightTollerance,
AlexaAppObject.Width - (AlexaAppObject.WidthTollerance * 2),
AlexaAppObject.Height - (AlexaAppObject.HeightTollerance * 2))
elif AlexaAppObject.Width >= (AlexaAppObject.WidthTollerance * 2):
paint.drawLine(AlexaAppObject.RectX + AlexaAppObject.WidthTollerance,
AlexaAppObject.RectY + (AlexaAppObject.Height / 2),
AlexaAppObject.RectX + AlexaAppObject.Width - AlexaAppObject.WidthTollerance,
AlexaAppObject.RectY + (AlexaAppObject.Height / 2))
#paint.drawLine(AlexaAppObject.RectX + 8, AlexaAppObject.RectY + (h/2), AlexaAppObject.RectX + 8 + 1, AlexaAppObject.RectY + (h/2))
#paint.drawLine(AlexaAppObject.RectX + w - 8, AlexaAppObject.RectY + (h/2), AlexaAppObject.RectX + w - 8, AlexaAppObject.RectY + (h/2))
#pen.setStyle(Qt.SolidLine)
#paint.setPen(pen)
#paint.drawLine(AlexaAppObject.RectX + 8, y, AlexaAppObject.RectX + 8, AlexaAppObject.RectY + h)
elif AlexaAppObject.Height >= (AlexaAppObject.HeightTollerance * 2):
paint.drawLine(AlexaAppObject.RectX + (AlexaAppObject.Width / 2),
AlexaAppObject.RectY + AlexaAppObject.HeightTollerance,
AlexaAppObject.RectX + (AlexaAppObject.Width / 2),
AlexaAppObject.RectY + AlexaAppObject.Height - AlexaAppObject.HeightTollerance)
else:
paint.drawLine(AlexaAppObject.RectX + (AlexaAppObject.Width / 2),
AlexaAppObject.RectY + (AlexaAppObject.Height / 2),
AlexaAppObject.RectX + (AlexaAppObject.Width / 2),
AlexaAppObject.RectY + (AlexaAppObject.Height / 2))
pen.setWidth(1)
pen.setStyle(Qt.SolidLine)
pen.setBrush(QBrush(QColor(0, 255, 0, 255)))
paint.setPen(pen)
#paint.drawLine(x, AlexaAppObject.RectY - 5 - 10, AlexaAppObject.RectX + w, AlexaAppObject.RectY - 5 - 10)
#paint.drawLine(x, AlexaAppObject.RectY - 5 - 6, x, AlexaAppObject.RectY - 5 - 14)
#paint.drawLine(AlexaAppObject.RectX + w, AlexaAppObject.RectY - 5 - 6, AlexaAppObject.RectX + w, AlexaAppObject.RectY - 5 - 14)
def paintEvent(self, event):
paint = QPainter()
paint.begin(self)
paint.drawPixmap(0, 0, self.pixmap)
pen = QPen()
#serve per creare un quadrato senza angoli smussati
pen.setJoinStyle(Qt.MiterJoin)
center = QPoint(QCursor.pos())
cnt = 0
self.InsideRect = False
self.InsideRegion = False
#ciclo per disegno rettangoli e tooltip
for appObject in self.AlexaAppObjects:
pen.setBrush(QColor(255, 0, 0, 255))
pen.setWidth(1)
paint.setPen(pen)
if self.printLabelBorder is True:
for box in self.LabelOfInterest:
pen.setWidth(1)
pen.setStyle(Qt.DashLine)
if self.indexFound is not None and self.indexFound == self.AppObjectFeedbackIndex:
pen.setBrush(QColor(112, 81, 213, 255))
else:
pen.setBrush(QColor(255, 0, 0, 255))
paint.setPen(pen)
newRect = QRect(box[0], box[1], box[2] - box[0], box[3] - box[1])
paint.drawRect(newRect)
if self.TollerancePreview is True and self.CropLabel is False and self.CropRegion is False and cnt == self.AppObjectFeedbackIndex:
pen.setBrush(QColor(255, 0, 0, 255))
pen.setWidth(1)
paint.setPen(pen)
font = paint.font()
paint.setFont(font)
x = appObject.RectX
y = appObject.RectY
#w = appObject.Width
#h = appObject.Height
'''
paint.drawText(x, y - 30,
"Height: " + str(appObject.Height) +
", Width: " + str(appObject.Width))
'''
#appObject.Height = 200
#self.PaintRectHover(paint, x, y, w, h)
self.PaintRectTollerance(paint, appObject)
elif (center.x() > appObject.RectX and
center.x() < appObject.Width + appObject.RectX and
center.y() > appObject.RectY and
center.y() < appObject.Height + appObject.RectY and
self.CropLabel is False and self.CropRegion is False):
pen.setStyle(Qt.SolidLine)
if self.indexFound is not None and self.indexFound == self.AppObjectFeedbackIndex and self.indexFound == cnt:
pen.setBrush(QColor(112, 81, 213, 255))
else:
pen.setBrush(QColor(255, 0, 0, 255))
pen.setWidth(1)
paint.setPen(pen)
font = paint.font()
paint.setFont(font)
x = appObject.RectX
y = appObject.RectY
if self.indexFound is not None and self.indexFound == self.AppObjectFeedbackIndex and self.indexFound == cnt:
paint.fillRect(appObject.RectX,
appObject.RectY,
appObject.Width,
appObject.Height,
QBrush(QColor(0, 255, 0, 130)))
else:
paint.fillRect(appObject.RectX,
appObject.RectY,
appObject.Width,
appObject.Height,
QBrush(QColor(255, 0, 255, 130)))
newRect = QRect(appObject.RectX,
appObject.RectY,
appObject.Width,
appObject.Height)
paint.drawRect(newRect)
#self.PaintRectHover(paint, x, y, appObject)
self.InsideRect = True
self.LastRectHover = cnt
elif (center.x() > appObject.RectX + appObject.CropRegionX and
center.x() < appObject.RectX + appObject.CropRegionX + appObject.CropRegionWidth and
center.y() > appObject.RectY + appObject.CropRegionY and
center.y() < appObject.RectY + appObject.CropRegionY + appObject.CropRegionHeight and
self.CropLabel is False and self.CropRegion is False):
pen.setStyle(Qt.SolidLine)
if self.indexFound is not None and self.indexFound == self.AppObjectFeedbackIndex and self.indexFound == cnt:
pen.setBrush(QColor(112, 81, 213, 255))
else:
pen.setBrush(QColor(255, 0, 0, 255))
pen.setWidth(1)
paint.setPen(pen)
font = paint.font()
paint.setFont(font)
x = appObject.RectX
y = appObject.RectY
if self.indexFound is not None and self.indexFound == self.AppObjectFeedbackIndex and self.indexFound == cnt:
paint.fillRect(appObject.RectX,
appObject.RectY,
appObject.Width,
appObject.Height,
QBrush(QColor(0, 255, 0, 130)))
else:
paint.fillRect(appObject.RectX,
appObject.RectY,
appObject.Width,
appObject.Height,
QBrush(QColor(255, 0, 255, 130)))
newRect = QRect(appObject.RectX,
appObject.RectY,
appObject.Width,
appObject.Height)
paint.drawRect(newRect)
#self.PaintRectHover(paint, x, y, appObject)
self.InsideRegion = True
self.LastRectHover = cnt
else:
#paint.setRenderHint(QPainter.SmoothPixmapTransform)
pen.setStyle(Qt.SolidLine)
if self.indexFound is not None and self.indexFound == self.AppObjectFeedbackIndex and self.indexFound == cnt:
pen.setBrush(QColor(112, 81, 213, 255))
else:
pen.setBrush(QColor(255, 0, 0, 255))
pen.setWidth(1)
paint.setPen(pen)
if self.indexFound is not None and self.indexFound == self.AppObjectFeedbackIndex and self.indexFound == cnt:
paint.fillRect(appObject.RectX,
appObject.RectY,
appObject.Width,
appObject.Height,
QBrush(QColor(0, 255, 0, 130)))
else:
paint.fillRect(appObject.RectX,
appObject.RectY,
appObject.Width,
appObject.Height,
QBrush(QColor(255, 0, 255, 130)))
'''
paint.fillRect(appObject.RectX,
appObject.RectY,
appObject.Width,
appObject.Height,
QBrush(QColor(255, 0, 255, 100)))
'''
newRect = QRect(appObject.RectX,
appObject.RectY,
appObject.Width,
appObject.Height)
paint.drawRect(newRect)
if appObject.CropRegionX != 0 and appObject.CropRegionY != 0 and appObject.CropRegionWidth != 0 and appObject.CropRegionHeight != 0:
x = appObject.RectX
y = appObject.RectY
w = appObject.CropRegionWidth
h = appObject.CropRegionHeight
'''
pen.setStyle(Qt.DashLine)
pen.setBrush(QColor(0, 0, 255, 255))
pen.setWidth(1)
paint.setPen(pen)
newRect = QRect(x + appObject.CropRegionX,
y + appObject.CropRegionY, w, h)
paint.drawRect(newRect)
'''
pen.setStyle(Qt.SolidLine)
pen.setBrush(QColor(0, 78, 255, 255))
pen.setWidth(1)
paint.setPen(pen)
paint.fillRect(x + appObject.CropRegionX,
y + appObject.CropRegionY, w, h,
QBrush(QColor(100, 80, 155, 100)))
newRect = QRect(x + appObject.CropRegionX,
y + appObject.CropRegionY, w, h)
paint.drawRect(newRect)
if self.indexFoundAppText is not None and self.indexFoundAppText == self.AppObjectFeedbackIndex and self.indexFoundAppText == cnt:
pen.setWidth(1)
pen.setStyle(Qt.DashLine)
pen.setBrush(QColor(255, 0, 255, 255))
paint.setPen(pen)
newRect = QRect(x + appObject.CropRegionX + appObject.AppText.x, y + appObject.CropRegionY + appObject.AppText.y, appObject.AppText.Width, appObject.AppText.Height)
paint.drawRect(newRect)
if appObject.Label.OffsetX != 0 and appObject.Label.OffsetY != 0 and appObject.Label.Width != 0 and appObject.Label.Height != 0:
'''
x = appObject.RectX
y = appObject.RectY
w = appObject.Width
h = appObject.Height
#paint.setRenderHint(QPainter.SmoothPixmapTransform)
pen.setStyle(Qt.DashLine)
#pen.setStyle(Qt.DashDotLine)
#pen.setDashPattern([1, 1])
pen.setBrush(QColor(255, 0, 0, 255))
pen.setWidth(1)
paint.setPen(pen)
newRect = QRect(x + appObject.Label.OffsetX,
y + appObject.Label.OffsetY,
appObject.Label.Width,
appObject.Label.Height)
paint.drawRect(newRect)
'''
cnt = cnt + 1
#paint.restore()
self.mouseNewX = center.x()
self.mouseNewY = center.y()
if self.InsideRect is True or self.InsideRegion is True:
if self.DialogOpened is True or self.InsideRegion is True:
self.setCursor(QCursor(Qt.ArrowCursor))
else:
self.setCursor(QCursor(Qt.SizeAllCursor))
return
if self.pressed is False:
if self.DialogOpened is True:
self.setCursor(QCursor(Qt.ArrowCursor))
return
#pen.setStyle(Qt.DashDotLine)
pen.setDashPattern([1, 1])
pen.setWidth(1)
pen.setBrush(QColor(32, 178, 170, 255))
#pen.setBrush(QColor(225, 0, 0, 255))
paint.setPen(pen)
#dal centro in alto
paint.drawLine(center.x(), center.y(), center.x(), 0)
#dal centro in basso
paint.drawLine(center.x(), center.y(), center.x(), self.height())
paint.drawLine(center.x(), center.y(), 0, center.y())
paint.drawLine(center.x(), center.y(), self.width(), center.y())
pen.setStyle(Qt.SolidLine)
pen.setWidth(1)
pen.setBrush(Qt.red)
pen.setCapStyle(Qt.RoundCap)
pen.setJoinStyle(Qt.RoundJoin)
else:
pen.setWidth(1)
pen.setStyle(Qt.SolidLine)
#pen.setBrush(QColor(128, 128, 128, 255))
pen.setBrush(QBrush(QColor(0, 255, 0, 255)))
paint.setPen(pen)
paint.fillRect(self.mouseOldX + 1,
self.mouseOldY + 1,
center.x() - self.mouseOldX - 1,
center.y() - self.mouseOldY - 1,
QBrush(QColor(32, 178, 170, 100)))
rect = QRect(self.mouseOldX, self.mouseOldY,
center.x() - self.mouseOldX, center.y() - self.mouseOldY)
paint.drawRect(rect)
self.setCursor(QCursor(Qt.CrossCursor))
#self.setCursor(QCursor(Qt.BlankCursor))
paint.end()
def mouseMoveEvent(self, event):
pos = event.pos()
#print('mouse move: (%d, %d)' % (pos.x(), pos.y()))
if self.InsideRect is True and self.pressed is True:
self.MovingAppObject = True
#newRect = QRect(pos.x() - self.xOffset, pos.y() - self.yOffset,
#self.AlexaAppObjects[self.LastRectHover].width(),
#self.AlexaAppObjects[self.LastRectHover].height())
self.AlexaAppObjects[self.LastRectHover].RectX = pos.x() - self.xOffset
self.AlexaAppObjects[self.LastRectHover].RectY = pos.y() - self.yOffset
#self.rectObjCollection[self.LastRectHover].x = pos.x()
#self.rectObjCollection[self.LastRectHover].y = pos.y()
#rise paint event
#self.color = QColor(Qt.red)
self.update()
def wheelEvent(self, event):
if self.DialogOpened is True:
return
#print self.LastRectHover
#print self.InsideRect
if self.InsideRect is True:
if self.AdjustOnlyHeightTollerance is True:
self.AlexaAppObjects[self.LastRectHover].HeightTollerance = self.AlexaAppObjects[self.LastRectHover].HeightTollerance + event.delta() / 120
if self.AlexaAppObjects[self.LastRectHover].HeightTollerance < 0:
self.AlexaAppObjects[self.LastRectHover].HeightTollerance = 0
elif self.AdjustOnlyWidthTollerance is True:
self.AlexaAppObjects[self.LastRectHover].WidthTollerance = self.AlexaAppObjects[self.LastRectHover].WidthTollerance + event.delta() / 120
if self.AlexaAppObjects[self.LastRectHover].WidthTollerance < 0:
self.AlexaAppObjects[self.LastRectHover].WidthTollerance = 0
else:
self.AlexaAppObjects[self.LastRectHover].HeightTollerance = self.AlexaAppObjects[self.LastRectHover].HeightTollerance + event.delta() / 120
if self.AlexaAppObjects[self.LastRectHover].HeightTollerance < 0:
self.AlexaAppObjects[self.LastRectHover].HeightTollerance = 0
self.AlexaAppObjects[self.LastRectHover].WidthTollerance = self.AlexaAppObjects[self.LastRectHover].WidthTollerance + event.delta() / 120
if self.AlexaAppObjects[self.LastRectHover].WidthTollerance < 0:
self.AlexaAppObjects[self.LastRectHover].WidthTollerance = 0
self.update()
#mouse press event
def mousePressEvent(self, event):
if self.DialogOpened is True or self.InsideRegion is True:
#self.BringWindowToFront()
return
if event.buttons() == Qt.LeftButton:
if self.InsideRect is True:
pos = QPoint(QCursor.pos())
self.xOffset = pos.x() - self.AlexaAppObjects[self.LastRectHover].RectX
self.yOffset = pos.y() - self.AlexaAppObjects[self.LastRectHover].RectY
#self.setCursor(QCursor(Qt.BlankCursor))
self.pressed = True
origin = QPoint(QCursor.pos())
self.mouseOldX = origin.x()
self.mouseOldY = origin.y()
self.update()
def mouseDoubleClickEvent(self, event):
if self.DialogOpened is True:
#self.BringWindowToFront()
return
if self.InsideRect is True:
self.DialogOpened = True
self.Dialog = AppObjDialog(self, self.LastRectHover, 0)
self.Dialog.show()
self.AppObjectFeedbackIndex = self.Dialog.objectIndex
return
if self.InsideRegion is True:
self.DialogOpened = True
self.Dialog = AppTextInRegionDialog(self, self.LastRectHover, 0)
self.Dialog.show()
self.AppObjectFeedbackIndex = self.Dialog.objectIndex
return
#mouse release event
def mouseReleaseEvent(self, event):
self.MovingAppObject = False
if self.DialogOpened is True:
#self.BringWindowToFront()
return
if self.InsideRect is True or self.InsideRegion is True:
self.pressed = False
#self.AlexaAppObjectsBackup[self.LastRectHover] = copy.deepcopy(self.AlexaAppObjects[self.LastRectHover])
self.update()
return
#if(event.type() == QEvent.MouseButtonRelease):
if event.button() == Qt.LeftButton:
if self.DialogOpened is True:
#self.BringWindowToFront()
return
self.pressed = False
self.released = True
width = self.mouseNewX - self.mouseOldX
height = self.mouseNewY - self.mouseOldY
rect = QRect(self.mouseOldX, self.mouseOldY, width, height)
if (rect.width() >= 3 or rect.width() <= -3) and (rect.height() >= 3 or rect.height() <= -3):
AlexaObject = AlexaAppObjectPlus()
if (rect.width() < 0 and rect.height() < 0):
x = rect.x() + rect.width()
y = rect.y() + rect.height()
w = -rect.width()
h = -rect.height()
##rect = QRect(x, y, w, h)
elif (rect.width() < 0 and rect.height() > 0):
x = rect.x() + rect.width()
y = rect.y()
w = -rect.width()
h = rect.height()
##rect = QRect(x, y, w, h)
elif (rect.width() > 0 and rect.height() < 0):
x = rect.x()
y = rect.y() + rect.height()
w = rect.width()
h = -rect.height()
##rect = QRect(x, y, w, h)
else:
x = rect.x()
y = rect.y()
w = rect.width()
h = rect.height()
##rect = QRect(x, y, w, h)
if width < 0:
width = width * -1
if height < 0:
height = height * -1
if self.CropLabel is False and self.CropRegion is False:
#AlexaObject.Height = height
#AlexaObject.Width = width
AlexaObject.Height = h
AlexaObject.Width = w
AlexaObject.HeightTollerance = 10
AlexaObject.WidthTollerance = 10
AlexaObject.RectX = x
AlexaObject.RectY = y
self.AlexaAppObjects.append(AlexaObject)
#self.AlexaAppObjectsBackup.append(AlexaObject)
self.AlexaAppObjectsBackup.append(copy.deepcopy(AlexaObject))
self.AlexaAppObjectsDeleted = []
##self.rectObjCollection.append(rect)
##self.rectObjCollectionDeleted = []
self.DialogOpened = True
#self.Dialog = AppObjDialog(self, len(self.AlexaAppObjects)-1)
self.Dialog = AppObjDialog(self, len(self.AlexaAppObjects)-1, 0)
self.Dialog.show()
self.AppObjectFeedbackIndex = self.Dialog.objectIndex
elif self.CropLabel is True:
self.AlexaAppObjects[self.AppObjectFeedbackIndex].Label.OffsetX = x - self.AlexaAppObjects[self.AppObjectFeedbackIndex].RectX
self.AlexaAppObjects[self.AppObjectFeedbackIndex].Label.OffsetY = y - self.AlexaAppObjects[self.AppObjectFeedbackIndex].RectY
self.AlexaAppObjects[self.AppObjectFeedbackIndex].Label.Width = w
self.AlexaAppObjects[self.AppObjectFeedbackIndex].Label.Height = h
self.DialogOpened = True
#self.Dialog = AppObjDialog(self, len(self.AlexaAppObjects)-1)
self.Dialog = AppObjDialog(self, len(self.AlexaAppObjects)-1, 1)
self.Dialog.show()
self.AppObjectFeedbackIndex = self.Dialog.objectIndex
self.CropLabel = False
elif self.CropRegion is True:
self.AlexaAppObjects[self.AppObjectFeedbackIndex].CropRegionX = x - self.AlexaAppObjects[self.AppObjectFeedbackIndex].RectX
self.AlexaAppObjects[self.AppObjectFeedbackIndex].CropRegionY = y - self.AlexaAppObjects[self.AppObjectFeedbackIndex].RectY
self.AlexaAppObjects[self.AppObjectFeedbackIndex].CropRegionWidth = w
self.AlexaAppObjects[self.AppObjectFeedbackIndex].CropRegionHeight = h
self.DialogOpened = True
self.Dialog = AppTextInRegionDialog(self, len(self.AlexaAppObjects)-1, 0)
self.Dialog.show()
self.AppObjectFeedbackIndex = self.Dialog.objectIndex
self.CropRegion = False
#ui = Ui_Dialog()
#ui.setupUi(Dialog)
#self.Dialog.show()
#if sys.platform == 'win32':
#self.DialogHwnd = win32self, textgui.GetForegroundWindow()
self.update()
#self.closeExtended()
def keyPressEvent(self, event):
if self.DialogOpened is True:
#self.BringWindowToFront()
return
if event.modifiers() == Qt.ControlModifier and self.InsideRect is True:
self.AdjustOnlyHeightTollerance = True
if event.modifiers() == Qt.AltModifier and self.InsideRect is True:
self.AdjustOnlyWidthTollerance = True
if event.modifiers() == Qt.ControlModifier and event.key() == Qt.Key_Z:
if len(self.AlexaAppObjects) > 0:
self.AlexaAppObjectsDeleted.append(self.AlexaAppObjects[-1])
self.AlexaAppObjectsBackupDeleted.append(self.AlexaAppObjectsBackup[-1])
del self.AlexaAppObjects[-1]
del self.AlexaAppObjectsBackup[-1]
self.update()
if event.modifiers() == Qt.ControlModifier and event.key() == Qt.Key_Y:
if len(self.AlexaAppObjectsDeleted) > 0:
self.AlexaAppObjects.append(self.AlexaAppObjectsDeleted[-1])
self.AlexaAppObjectsBackup.append(self.AlexaAppObjectsBackupDeleted[-1])
del self.AlexaAppObjectsDeleted[-1]
del self.AlexaAppObjectsBackupDeleted[-1]
#self.AlexaAppObjects = []
self.update()
if event.key() == Qt.Key_Escape:
self.closeExtended()
def keyReleaseEvent(self, event):
#print event.key()
if event.key() == Qt.Key_Control:
#if event.modifiers() == Qt.ControlModifier:
self.AdjustOnlyHeightTollerance = False
if event.key() == Qt.Key_Alt:
self.AdjustOnlyWidthTollerance = False
def closeExtended(self):
if self.plugin.undockWindowOpened is True:
self.plugin.undockWindow.setVisible(True)
elif sys.platform == 'win32':
toplist = []
winlist = []
def enum_callback(hwnd, results):
winlist.append((hwnd, win32gui.GetWindowText(hwnd)))
win32gui.EnumWindows(enum_callback, toplist)
firefox = [(hwnd, title) for hwnd, title in
winlist if 'exa-ide' in title.lower() and 'about' not in title.lower() and 'exa tool' not in title.lower()]
# just grab the first window that matches
#firefox = firefox[0]
for ninja in firefox:
win32gui.ShowWindow(ninja[0], win32con.SW_SHOW)
#print str(ninja[0]) + " " + ninja[1]
SERVICE_NAME = "editor"
editor_service = self.plugin.locator.get_service(SERVICE_NAME)
curLineText = editor_service.get_current_line_text()
if editor_service.use_tab() is False:
leadingSpaceNumber = len(curLineText) - len(curLineText.lstrip(' '))
tabChar = ' ' * editor_service.get_indentation()
else:
leadingSpaceNumber = len(curLineText) - len(curLineText.lstrip('\t'))
tabChar = '\t'
leadingChar = ""
if editor_service.use_tab() is False:
for x in range(leadingSpaceNumber):
leadingChar = leadingChar + " "
else:
for x in range(leadingSpaceNumber):
leadingChar = leadingChar + "\t"
for alexaAppObject in self.AlexaAppObjects:
#maxLineLen = 0
#commentLine = ""
description = ""
if alexaAppObject.Name is not None and alexaAppObject.Name != "":
#objName = alexaAppObject.Name.replace(" ", "")
objName = alexaAppObject.Name
else:
objName = "object" + str(self.plugin.AlexaAppObjCnt)
editor_service.insert_text(" #AppObject: " + objName)
editor_service.insert_text(os.linesep)
if alexaAppObject.Description != "":
description = alexaAppObject.Description.splitlines(True)
editor_service.insert_text(leadingChar + "'''")
editor_service.insert_text(os.linesep)
editor_service.insert_text(leadingChar + "Description:")
editor_service.insert_text(os.linesep)
for line in description:
editor_service.insert_text(leadingChar + line)
editor_service.insert_text(os.linesep)
editor_service.insert_text(leadingChar + "'''")
editor_service.insert_text(os.linesep)
editor_service.insert_text(leadingChar + objName + " = AppObject()")
editor_service.insert_text(os.linesep)
editor_service.insert_text(leadingChar + objName + ".Name = \"" + objName + "\"")
editor_service.insert_text(os.linesep)
editor_service.insert_text(leadingChar + objName + ".Height = " + str(alexaAppObject.Height))
editor_service.insert_text(os.linesep)
editor_service.insert_text(leadingChar + objName + ".Width = " + str(alexaAppObject.Width))
editor_service.insert_text(os.linesep)
editor_service.insert_text(leadingChar + objName + ".HeightTollerance = " + str(alexaAppObject.HeightTollerance))
editor_service.insert_text(os.linesep)
editor_service.insert_text(leadingChar + objName + ".WidthTollerance = " + str(alexaAppObject.WidthTollerance))
editor_service.insert_text(os.linesep)
if alexaAppObject.ImageBinarize is True:
editor_service.insert_text(leadingChar + objName + ".ImageBinarize = True")
editor_service.insert_text(os.linesep)
editor_service.insert_text(leadingChar + objName + ".ImageBrightness = " + str(alexaAppObject.ImageBrightness))
editor_service.insert_text(os.linesep)
editor_service.insert_text(leadingChar + objName + ".ImageContrast = " + str(alexaAppObject.ImageContrast))
editor_service.insert_text(os.linesep)
if alexaAppObject.Label.Text != "":
editor_service.insert_text(leadingChar + objName + ".Label.Text = \"" + alexaAppObject.Label.Text + "\"")
editor_service.insert_text(os.linesep)
editor_service.insert_text(leadingChar + objName + ".Label.Language = \"" + alexaAppObject.Label.Language + "\"")
editor_service.insert_text(os.linesep)
if alexaAppObject.Label.OffsetX != 0 and alexaAppObject.Label.OffsetY != 0 and alexaAppObject.Label.Width != 0 and alexaAppObject.Label.Height != 0:
editor_service.insert_text(leadingChar + objName + ".Label.OffsetX = " + str(alexaAppObject.Label.OffsetX))
editor_service.insert_text(os.linesep)
editor_service.insert_text(leadingChar + objName + ".Label.OffsetY = " + str(alexaAppObject.Label.OffsetY))
editor_service.insert_text(os.linesep)
editor_service.insert_text(leadingChar + objName + ".Label.Width = " + str(alexaAppObject.Label.Width))
editor_service.insert_text(os.linesep)
editor_service.insert_text(leadingChar + objName + ".Label.Height = " + str(alexaAppObject.Label.Height))
editor_service.insert_text(os.linesep)
elif alexaAppObject.Label.Position != "":
editor_service.insert_text(leadingChar + objName + ".Label.Position = \"" + alexaAppObject.Label.Position + "\"")
editor_service.insert_text(os.linesep)
if alexaAppObject.Label.Binarize is True:
editor_service.insert_text(leadingChar + objName + ".Label.Binarize = True")
editor_service.insert_text(os.linesep)
editor_service.insert_text(leadingChar + objName + ".Label.Brightness = " + str(alexaAppObject.Label.Brightness))
editor_service.insert_text(os.linesep)
editor_service.insert_text(leadingChar + objName + ".Label.Contrast = " + str(alexaAppObject.Label.Contrast))
editor_service.insert_text(os.linesep)
if alexaAppObject.OcrWhiteList != Ocr.WhiteList:
editor_service.insert_text(leadingChar + "Ocr.WhiteList = \"" + alexaAppObject.OcrWhiteList + "\"")
editor_service.insert_text(os.linesep)
timeOutTime = 15
if alexaAppObject.EnablePerfData is True:
timeOutTime = timeOutTime + alexaAppObject.PerfCriticalLevel
editor_service.insert_text(leadingChar + "performance = " + objName + ".Bind(" + str(timeOutTime) + ")")
editor_service.insert_text(os.linesep)
if alexaAppObject.EnablePerfData is True:
editor_service.insert_text(leadingChar + "NagiosUtils.AddPerformanceData(\"" + objName + "\", performance, " + str(alexaAppObject.PerfWarningLevel) + ", " + str(alexaAppObject.PerfCriticalLevel) + ")")
editor_service.insert_text(os.linesep)
if alexaAppObject.UseMouse is True or alexaAppObject.UseKeyboard is True or\
alexaAppObject.CropRegionX != 0 or alexaAppObject.CropRegionY != 0 or alexaAppObject.CropRegionHeight != 0 or alexaAppObject.CropRegionWidth != 0:
editor_service.insert_text(leadingChar + "if " + objName + ".TimeOut is False:")
editor_service.insert_text(os.linesep)
else:
editor_service.insert_text(leadingChar + "if " + objName + ".TimeOut is False:")
editor_service.insert_text(os.linesep)
editor_service.insert_text(leadingChar + tabChar + "pass")
editor_service.insert_text(os.linesep)
if alexaAppObject.Click is True and alexaAppObject.UseMouse is True:
editor_service.insert_text(leadingChar + tabChar + "Mouse.Click(" + objName + ".x + (" + objName + ".Width / 2), " + objName + ".y + (" + objName + ".Height / 2))")
editor_service.insert_text(os.linesep)
if alexaAppObject.DoubleClick is True and alexaAppObject.UseMouse is True:
editor_service.insert_text(leadingChar + tabChar + "Mouse.DoubleClick(" + objName + ".x + (" + objName + ".Width / 2), " + objName + ".y + (" + objName + ".Height / 2))")
editor_service.insert_text(os.linesep)
if alexaAppObject.UseKeyboard is True:
editor_service.insert_text(leadingChar + tabChar + "Keyboard.InsertText(\"" + alexaAppObject.InsertText + "\")")
editor_service.insert_text(os.linesep)
if alexaAppObject.CropRegionX != 0 or alexaAppObject.CropRegionY != 0 or alexaAppObject.CropRegionHeight != 0 or alexaAppObject.CropRegionWidth != 0:
if alexaAppObject.CropRegionX < 0:
cropRegionX = -alexaAppObject.CropRegionX
cropOperatorX = "-"
else:
cropRegionX = alexaAppObject.CropRegionX
cropOperatorX = "+"
if alexaAppObject.CropRegionY < 0:
cropRegionY = -alexaAppObject.CropRegionY
cropOperatorY = "-"
else:
cropRegionY = alexaAppObject.CropRegionY
cropOperatorY = "+"
editor_service.insert_text(leadingChar + tabChar + "SearchRegion.Bind(" + objName + ".x " + cropOperatorX + " " + str(cropRegionX) + ", " + objName + ".y " + cropOperatorY + " " + str(cropRegionY) + ", " +
str(alexaAppObject.CropRegionWidth) + ", " + str(alexaAppObject.CropRegionHeight) + ")")
editor_service.insert_text(os.linesep)
editor_service.insert_text(leadingChar + "elif " + objName + ".TimeOut is True and ExitOnError is True:")
editor_service.insert_text(os.linesep)
editor_service.insert_text(leadingChar + tabChar + "Finish()")
editor_service.insert_text(os.linesep)
if alexaAppObject.OcrWhiteList != Ocr.WhiteList:
editor_service.insert_text(leadingChar + "Ocr.WhiteList = \"" + Ocr.WhiteList + "\"")
editor_service.insert_text(os.linesep)
editor_service.insert_text(leadingChar + "#end...")
editor_service.insert_text(os.linesep)
'''
editor_service.insert_text(leadingChar + "mouseX = " + objName + ".x + (" + objName + ".Width / 2)")
editor_service.insert_text(os.linesep)
editor_service.insert_text(leadingChar + "mouseY = " + objName + ".y + (" + objName + ".Height / 2)")
editor_service.insert_text(os.linesep)
editor_service.insert_text(os.linesep)
#editor_service.insert_text(leadingChar + 'self, text')
#editor_service.insert_text(os.linesep)
'''
self.plugin.AlexaAppObjCnt = self.plugin.AlexaAppObjCnt + 1
if alexaAppObject.CropRegionX != 0 or alexaAppObject.CropRegionY != 0 or alexaAppObject.CropRegionHeight != 0 or alexaAppObject.CropRegionWidth != 0:
editor_service.insert_text(os.linesep)
description = ""
if alexaAppObject.AppText.Name is not None and alexaAppObject.AppText.Name != "":
#objName = alexaAppObject.AppText.Name.replace(" ", "")
objName = alexaAppObject.AppText.Name
else:
objName = "object" + str(self.plugin.AlexaAppObjCnt)
editor_service.insert_text(leadingChar + " #AppText: " + objName)
editor_service.insert_text(os.linesep)
if alexaAppObject.AppText.Description != "":
description = alexaAppObject.AppText.Description.splitlines(True)
editor_service.insert_text(leadingChar + "'''")
editor_service.insert_text(os.linesep)
editor_service.insert_text(leadingChar + "Description:")
editor_service.insert_text(os.linesep)
for line in description:
editor_service.insert_text(leadingChar + line)
editor_service.insert_text(os.linesep)
editor_service.insert_text(leadingChar + "'''")
editor_service.insert_text(os.linesep)
editor_service.insert_text(leadingChar + objName + " = AppText()")
editor_service.insert_text(os.linesep)
editor_service.insert_text(leadingChar + objName + ".Name = \"" + objName + "\"")
editor_service.insert_text(os.linesep)
if alexaAppObject.AppText.Text != "":
editor_service.insert_text(leadingChar + objName + ".Text = \"" + alexaAppObject.AppText.Text + "\"")
editor_service.insert_text(os.linesep)
editor_service.insert_text(leadingChar + objName + ".Language = \"" + alexaAppObject.AppText.Language + "\"")
editor_service.insert_text(os.linesep)
if alexaAppObject.AppText.Binarize is True:
editor_service.insert_text(leadingChar + objName + ".Binarize = True")
editor_service.insert_text(os.linesep)
editor_service.insert_text(leadingChar + objName + ".Brightness = " + str(alexaAppObject.AppText.Brightness))
editor_service.insert_text(os.linesep)
editor_service.insert_text(leadingChar + objName + ".Contrast = " + str(alexaAppObject.AppText.Contrast))
editor_service.insert_text(os.linesep)
if alexaAppObject.AppText.OcrWhiteList != Ocr.WhiteList:
editor_service.insert_text(leadingChar + "Ocr.WhiteList = \"" + alexaAppObject.AppText.OcrWhiteList + "\"")
editor_service.insert_text(os.linesep)
timeOutTime = 15
if alexaAppObject.AppText.EnablePerfData is True:
timeOutTime = timeOutTime + alexaAppObject.AppText.PerfCriticalLevel
editor_service.insert_text(leadingChar + "performance = " + objName + ".Bind(" + str(timeOutTime) + ")")
editor_service.insert_text(os.linesep)
if alexaAppObject.AppText.EnablePerfData is True:
editor_service.insert_text(leadingChar + "NagiosUtils.AddPerformanceData(\"" + objName + "\", performance, " + str(alexaAppObject.AppText.PerfWarningLevel) + ", " + str(alexaAppObject.AppText.PerfCriticalLevel) + ")")
editor_service.insert_text(os.linesep)
if alexaAppObject.AppText.UseMouse is True or alexaAppObject.AppText.UseKeyboard is True:
editor_service.insert_text(leadingChar + "if " + objName + ".TimeOut is False:")
editor_service.insert_text(os.linesep)
else:
editor_service.insert_text(leadingChar + "if " + objName + ".TimeOut is False:")
editor_service.insert_text(os.linesep)
editor_service.insert_text(leadingChar + tabChar + "pass")
editor_service.insert_text(os.linesep)
if alexaAppObject.AppText.Click is True and alexaAppObject.AppText.UseMouse is True:
editor_service.insert_text(leadingChar + tabChar + "Mouse.Click(" + objName + ".x + (" + objName + ".Width / 2), " + objName + ".y + (" + objName + ".Height / 2))")
editor_service.insert_text(os.linesep)
if alexaAppObject.AppText.DoubleClick is True and alexaAppObject.AppText.UseMouse is True:
editor_service.insert_text(leadingChar + tabChar + "Mouse.DoubleClick(" + objName + ".x + (" + objName + ".Width / 2), " + objName + ".y + (" + objName + ".Height / 2))")
editor_service.insert_text(os.linesep)
if alexaAppObject.AppText.UseKeyboard is True:
editor_service.insert_text(leadingChar + tabChar + "Keyboard.InsertText(\"" + alexaAppObject.AppText.InsertText + "\")")
editor_service.insert_text(os.linesep)
editor_service.insert_text(leadingChar + "elif " + objName + ".TimeOut is True and ExitOnError is True:")
editor_service.insert_text(os.linesep)
editor_service.insert_text(leadingChar + tabChar + "Finish()")
editor_service.insert_text(os.linesep)
if alexaAppObject.AppText.OcrWhiteList != Ocr.WhiteList:
editor_service.insert_text(leadingChar + "Ocr.WhiteList = \"" + Ocr.WhiteList + "\"")
editor_service.insert_text(os.linesep)
editor_service.insert_text(leadingChar + "SearchRegion.Unbind()")
editor_service.insert_text(os.linesep)
editor_service.insert_text(leadingChar + "#end...")
editor_service.insert_text(os.linesep)
self.plugin.AlexaAppObjCnt = self.plugin.AlexaAppObjCnt + 1
editor_service.insert_text(os.linesep)
editor_service.insert_text(leadingChar)
#print editor_service.get_text()
self.close()
def DoBinarizeLabel(self, brightness, contrast):
#print brightness, contrast
self.UpdateLabelOfInterest()
PilImage2 = self.OriginalScreenshot.copy()
for box in self.LabelOfInterest:
region = PilImage2.crop(box)
#region.save("c:\\region.png")
enhancer = ImageEnhance.Brightness(region)
region = enhancer.enhance(brightness)
enhancer = ImageEnhance.Contrast(region)
region = enhancer.enhance(contrast)
#PilImage2.paste(region, box)
cv_im = cv.CreateImageHeader(region.size, cv.IPL_DEPTH_8U, 3)
cv.SetData(cv_im, region.tostring())
mat = cv.GetMat(cv_im)
img = numpy.asarray(mat)
img = cv2.cvtColor(img, cv2.COLOR_RGB2BGR)
gray_image = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
thresh = 100
im_bw = cv2.threshold(gray_image, thresh, 255, cv2.THRESH_BINARY)[1]
im = Image.fromarray(im_bw)
PilImage2.paste(im, box)
self.QtImage1 = ImageQt.ImageQt(PilImage2)
self.QtImage2 = QImage(self.QtImage1)
self.pixmap = QPixmap.fromImage(self.QtImage2)
#self.pixmap.load("im_bw.png")
self.update()
def DoBinarizeRegion(self, brightness, contrast):
#print brightness, contrast
self.UpdateLabelOfInterest()
PilImage2 = self.OriginalScreenshot.copy()
x = self.AlexaAppObjects[self.AppObjectFeedbackIndex].RectX + self.AlexaAppObjects[self.AppObjectFeedbackIndex].CropRegionX
y = self.AlexaAppObjects[self.AppObjectFeedbackIndex].RectY + self.AlexaAppObjects[self.AppObjectFeedbackIndex].CropRegionY
w = self.AlexaAppObjects[self.AppObjectFeedbackIndex].RectX + self.AlexaAppObjects[self.AppObjectFeedbackIndex].CropRegionX + self.AlexaAppObjects[self.AppObjectFeedbackIndex].CropRegionWidth
h = self.AlexaAppObjects[self.AppObjectFeedbackIndex].RectY + self.AlexaAppObjects[self.AppObjectFeedbackIndex].CropRegionY + self.AlexaAppObjects[self.AppObjectFeedbackIndex].CropRegionHeight
box = (x, y, w, h)
region = PilImage2.crop(box)
#region.save("c:\\region.png")
enhancer = ImageEnhance.Brightness(region)
region = enhancer.enhance(brightness)
enhancer = ImageEnhance.Contrast(region)
region = enhancer.enhance(contrast)
#PilImage2.paste(region, box)
cv_im = cv.CreateImageHeader(region.size, cv.IPL_DEPTH_8U, 3)
cv.SetData(cv_im, region.tostring())
mat = cv.GetMat(cv_im)
img = numpy.asarray(mat)
img = cv2.cvtColor(img, cv2.COLOR_RGB2BGR)
gray_image = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
thresh = 100
im_bw = cv2.threshold(gray_image, thresh, 255, cv2.THRESH_BINARY)[1]
im = Image.fromarray(im_bw)
PilImage2.paste(im, box)
self.QtImage1 = ImageQt.ImageQt(PilImage2)
self.QtImage2 = QImage(self.QtImage1)
self.pixmap = QPixmap.fromImage(self.QtImage2)
#self.pixmap.load("im_bw.png")
self.update()
def DoBinarizeImage(self, brightness, contrast):
PilImage2 = self.OriginalScreenshot.copy()
left = self.x()
top = self.y()
width = self.width()
height = self.height()
box = (left, top, left+width, top+height)
region = PilImage2.crop(box)
#region.save("c:\\region.png")
enhancer = ImageEnhance.Brightness(region)
region = enhancer.enhance(brightness)
enhancer = ImageEnhance.Contrast(region)
region = enhancer.enhance(contrast)
#PilImage2.paste(region, box)
cv_im = cv.CreateImageHeader(region.size, cv.IPL_DEPTH_8U, 3)
cv.SetData(cv_im, region.tostring())
mat = cv.GetMat(cv_im)
img = numpy.asarray(mat)
img = cv2.cvtColor(img, cv2.COLOR_RGB2BGR)
gray_image = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
thresh = 127
im_bw = cv2.threshold(gray_image, thresh, 255, cv2.THRESH_BINARY)[1]
im = Image.fromarray(im_bw)
PilImage2.paste(im, box)
#PilImage2.save("im_bw.png")self, text
self.QtImage1 = ImageQt.ImageQt(PilImage2)
self.QtImage2 = QImage(self.QtImage1)
self.pixmap = QPixmap.fromImage(self.QtImage2)
#self.pixmap.load("im_bw.png")
self.update()
def DoCanny(self):
'''
PilImage = Image.open('screenshot.png')
cv_im = cv.CreateImageHeader(PilImage.size, cv.IPL_DEPTH_8U, 3)
cv.SetData(cv_im, PilImage.tostring())
mat = cv.GetMat(cv_im)
img = numpy.asarray(mat)
img = cv2.cvtColor(img, cv2.COLOR_RGB2BGR)
'''
img = cv2.imread(self.plug_path + os.sep + 'tmp' + os.sep + 'screenshot.png')
blue, green, red = cv2.split(img)
# Run canny edge detection on each channel
blue_edges = self.medianCanny(blue, 0.2, 0.3)
green_edges = self.medianCanny(green, 0.2, 0.3)
red_edges = self.medianCanny(red, 0.2, 0.3)
# Join edges back into image
edges = blue_edges | green_edges | red_edges
cv2.imwrite(self.plug_path + os.sep + 'tmp' + os.sep + 'canny.png', edges)
self.pixmap.load(self.plug_path + os.sep + 'tmp' + os.sep + 'canny.png')
def medianCanny(self, img, thresh1, thresh2):
median = numpy.median(img)
img = cv2.Canny(img, int(thresh1 * median), int(thresh2 * median))
return img
class AlexaAppObjectPlus(AppObject, object):
def __init__(self):
super(AlexaAppObjectPlus, self).__init__()
self.RectX = None
self.RectY = None
#self.OcrWhiteList = "'0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ&:/-_\,+()*.=[]<>@"
self.OcrWhiteList = Ocr.WhiteList
self.Click = False
self.DoubleClick = False
self.UseMouse = True
self.UseKeyboard = False
self.InsertText = ""
self.CropRegionX = 0
self.CropRegionY = 0
self.CropRegionHeight = 0
self.CropRegionWidth = 0
self.Description = ""
self.EnablePerfData = False
self.PerfWarningLevel = 0
self.PerfCriticalLevel = 0
self.AppText = AlexaAppTextPlus()
self.AppTextBackup = AlexaAppTextPlus()
class AlexaAppTextPlus(AppText, object):
def __init__(self):
super(AlexaAppTextPlus, self).__init__()
self.OcrWhiteList = Ocr.WhiteList
self.Click = False
self.DoubleClick = False
self.UseMouse = True
self.UseKeyboard = False
self.Description = ""
self.EnablePerfData = False
self.PerfWarningLevel = 0
self.PerfCriticalLevel = 0
self.InsertText = ""
| gpl-3.0 | 3,241,367,506,389,657,600 | 42.951456 | 237 | 0.591392 | false |
DiamondLightSource/auto_tomo_calibration-experimental | measure_resolution/find_resolution.py | 1 | 21663 | import numpy as np
import pylab as pl
from scipy.ndimage.filters import median_filter
from skimage.filter import threshold_otsu
from skimage import io, exposure
import os
import fit_data
from scipy import misc
from math import ceil
def save_data(filename, data):
import cPickle
print("Saving data")
f = open(filename, 'w')
cPickle.dump(data, f)
f.close()
def create_dir(directory):
if not os.path.exists(directory):
os.makedirs(directory)
def fit_and_visualize(image, folder_name, r1, r2, window_size):
"""
Takes in the region of interest, which is a 2D image.
Modulation is calculated for the lines further away
from the touch point. It is used for normalizing MTF.
Intensity_left/right store the intensities of the left and right
spheres, which are the mean pixel values over a narrow strip
along every sphere.
"""
# Denoise using a median filter
if window_size != 0:
denoised = median_filter(image, window_size)
else:
denoised = image
# Save the images containing the gaps
misc.imsave(folder_name + "touch_img.png", denoised)
misc.imsave(folder_name + "touch_img.tif", denoised)
# Calculate average sphere intensity. They are segmented using
# thresholding and the pixels are averaged
left = denoised[:, 0:image.shape[1] / 2. - 10]
right = denoised[:, image.shape[1] / 2. + 10:image.shape[1]]
thresh_l = threshold_otsu(left)
thresh_r = threshold_otsu(right)
sphere_pixels_l = []
for y in range(left.shape[1]):
for x in range(left.shape[0]):
pixel = left[x, y]
if pixel > thresh_l:
sphere_pixels_l.append(pixel)
sphere_pixels_r = []
for y in range(right.shape[1]):
for x in range(right.shape[0]):
pixel = right[x, y]
if pixel > thresh_r:
sphere_pixels_r.append(pixel)
intensity_left = np.mean(sphere_pixels_l)
intensity_right = np.mean(sphere_pixels_r)
# This is the modulation at a high separation, used
# to normalize the MTF values between 0% and 100%
low_freq_left = (intensity_left - np.min(denoised)) /\
(intensity_left + np.min(denoised))
low_freq_right = (intensity_right - np.min(denoised)) /\
(intensity_right + np.min(denoised))
gap = []
mtf_cleft = []
mtf_cright = []
# Take values only to the middle of the image
# since the problem is symmetric on the other side
for i in np.arange(0., image.shape[0] / 2.):
Xdata = []
Ydata = []
gapX = []
gapY = []
distance = dist_between_spheres(r1, r2, i, image.shape[0] / 2.)
signal = [pixel for pixel in denoised[i, :]]
gap_signal = []
for j in np.arange(0., image.shape[1]):
# Used to plot line on the image
Xdata.append(j)
Ydata.append(i)
# If we are in the region where the spheres are separated,
# stores these vlaues to plot the gap
if image.shape[1] / 2. + distance / 2. > j > image.shape[1] / 2. - distance / 2.:
gapX.append(j)
gapY.append(i)
# Take the region around the gap, which later on will be used
# to define the intensity at the gap between the spheres.
# The width of the gap is not exact
if image.shape[1] / 2. + distance + 10 > j > image.shape[1] / 2. - distance - 10:
gap_signal.append(denoised[i, j])
# Check if the gap still exists
if gap_signal:
# If the signal minima is higher than the minima in the gap
# it means that the contrast must be lost in the centre - stop
if distance < 10:
if np.min(signal) >= np.min(gap_signal):
mtf = 100 * modulation(np.min(gap_signal), intensity_left, distance) / low_freq_left
# PLOT THE REGION AROUND THE MIDDLE OF THE CURVE
# PLOT THE LINE PROFILE
# Do this only if mtf is mroe than 9, bellow that,
# the gap is unresolved, and gaussian width starts
# to spread out - ruins the results
if mtf >= 1:
# FIT A GAUSSIAN
amp = -np.min(gap_signal)
centre = np.mean(np.argwhere(np.min(gap_signal) == gap_signal))
sigma = distance / 6.
offset = np.max(gap_signal)
guess_params = [amp, centre, sigma, offset]
Xfit, Yfit, fwhm, fit_centre = fit_data.GaussConst(gap_signal, guess_params)
ymax = np.max(denoised)
ymin = np.min(denoised)
data = np.array([range(len(gap_signal)), gap_signal]).T
pl.plot(data[:,0],
data[:,1], 'bo')
pl.plot(Xfit, Yfit)
pl.title("Analytical {0} / Fitted dist {1} / Contrast {2} ".format(round(distance, 2), round(fwhm, 2), round(mtf,2)))
pl.ylim(ymin, ymax)
# PLOT THE ANALYTICAL WIDTH
pl.plot(np.repeat(fit_centre - distance / 2., len(Yfit)),
np.arange(len(Yfit)), 'r-')
pl.plot(np.repeat(fit_centre + distance / 2., len(Yfit)),
np.arange(len(Yfit)), 'r-', label = "Analytical")
pl.legend()
pl.savefig(folder_name + 'results%i.png' % i)
pl.close('all')
# Store the values of the gap width for every value
# of contrast
gap.append(distance)
mtf = 100 * modulation(np.min(gap_signal), intensity_left, distance) / low_freq_left
mtf_cleft.append(mtf)
mtf = 100 * modulation(np.min(gap_signal), intensity_right, distance) / low_freq_right
mtf_cright.append(mtf)
############# LEFT SPHERE #########################
pl.gca().invert_xaxis()
pl.plot(gap, mtf_cleft, 'r', label="left sphere data")
pl.xlabel("Width in pixels")
pl.ylabel("MTF %")
pl.tight_layout()
# Save data points
save_data(folder_name + 'gap_width.npy', gap)
save_data(folder_name + 'mtf_cleft.npy', mtf_cleft)
f = open(folder_name + 'gap_width.txt', 'w')
for i in range(len(gap)):
f.write(repr(gap[i]) + '\n')
f.close()
f = open(folder_name + 'mtf_cleft.txt', 'w')
for i in range(len(mtf_cleft)):
f.write(repr(mtf_cleft[i]) + '\n')
f.close()
pl.savefig(folder_name + 'mtf_left.png')
pl.close('all')
############### RIGHT SPHERE #####################
pl.gca().invert_xaxis()
pl.plot(gap, mtf_cright, 'r', label="left sphere data")
pl.xlabel("Width in pixels")
pl.ylabel("MTF %")
# Save data points
save_data(folder_name + 'mtf_cright.npy', mtf_cright)
f = open(folder_name + 'mtf_cright.txt', 'w')
for i in range(len(mtf_cright)):
f.write(repr(mtf_cright[i]) + '\n')
f.close()
pl.savefig(folder_name + 'mtf_right.png')
pl.close('all')
def dist_between_spheres(r1, r2, Y, C):
"""
Calculate distance between the spheres using
geometry. Read report to see how it is done.
"""
h = C - Y
d1 = np.sqrt(r1**2 - h**2)
d2 = np.sqrt(r2**2 - h**2)
dist = r1 - d1 + r2 - d2
return dist
def modulation(minima, contrast, distance):
"""
modulation(contrast) = (Imax - Imin) / (Imax + Imin)
"""
numerator = contrast - minima
denominator = contrast + minima
return numerator / denominator
def modulus(vect):
"""
Get the modulus of a vector
"""
return np.sqrt(vect[0]**2 + vect[1]**2 + vect[2]**2)
def distance_3D(c1, c2):
"""
Calculate the distance between two points
"""
return np.sqrt((c1[0] - c2[0]) ** 2 + (c1[1] - c2[1]) ** 2 + (c1[2] - c2[2]) ** 2)
def vector_3D(pt1, pt2, t):
"""
Compute the 3d line equation in a parametric form
(x,y,z) = (x1,y1,z1) - t * (x1-x2, y1-y2, z1-z2)
x = x1 - (x1 - x2)*t
y = y1 - (y1 - y2)*t
z = z1 - (z1 - z2)*t
"""
x1, y1, z1 = pt1
x2, y2, z2 = pt2
modulus = np.sqrt((x1 - x2) ** 2 + (y1 - y2) ** 2 + (z1 - z2) ** 2)
x = x1 + (x2 - x1) / modulus * t
y = y1 + (y2 - y1) / modulus * t
z = z1 + (z2 - z1) / modulus * t
return [x, y, z]
def vector_perpendicular_3D(pt1, pt2, which, Z, Sx):
"""
Returns a vector S perpendicular to a line
between pt1 and pt2 AND such that lies in x-y plane
at height Z
'which' describes through which point to draw it (pt1 or pt2)
Sx describes the position along the perpendicular vector.
"""
v = ((pt2[0] - pt1[0]), (pt2[1] - pt1[1]), (pt2[2] - pt1[2]))
if which == 1:
Sx, Sy = (pt1[0] - v[1] / np.sqrt(v[0]**2 + v[1]**2) * Sx,
pt1[1] + v[0] / np.sqrt(v[0]**2 + v[1]**2) * Sx)
Sz = pt1[2]
elif which == 2:
Sx, Sy = (pt2[0] - v[1] / np.sqrt(v[0]**2 + v[1]**2) * Sx,
pt2[1] + v[0] / np.sqrt(v[0]**2 + v[1]**2) * Sx)
Sz = pt2[2]
return [Sx, Sy, Sz + Z]
def vector_perpendicular_ct_pt(pt1, pt2, r1, Sx):
"""
Vector must be perpendicular to the one
connecting the centres of the spheres, v1, and
to the vector perpendicular to v1, that goes
throught he point of contact
"""
v = ((pt2[0] - pt1[0]), (pt2[1] - pt1[1]), (pt2[2] - pt1[2]))
ct_pt = vector_3D(pt1, pt2, r1)
perp_v_in_xy = np.array(vector_perpendicular_3D(ct_pt, pt2, 1, 0, -1)) -\
np.array(vector_perpendicular_3D(ct_pt, pt2, 1, 0, 1))
vect = np.cross(v, perp_v_in_xy)
mod_vect = modulus(vect)
x = ct_pt[0] + vect[0] / mod_vect * Sx
y = ct_pt[1] + vect[1] / mod_vect * Sx
z = ct_pt[2] + vect[2] / mod_vect * Sx
return [x, y, z]
# Find perpendicular vector components
# if np.isinf(1. / np.sqrt(v[0]**2 + v[2]**2)):
# v1 = np.array([ct_pt[0],
# ct_pt[1] - v[2] / np.sqrt(v[1]**2 + v[2]**2) * Sx,
# ct_pt[2] + v[1] / np.sqrt(v[1]**2 + v[2]**2) * Sx])
#
# elif np.isinf(1. / np.sqrt(v[1]**2 + v[2]**2)):
# v1 = np.array([ct_pt[0] - v[2] / np.sqrt(v[0]**2 + v[2]**2) * Sx,
# ct_pt[1],
# ct_pt[2] + v[0] / np.sqrt(v[0]**2 + v[2]**2) * Sx])
# else:
# v1 = np.array([ct_pt[0] - v[2] / np.sqrt(v[0]**2 + v[2]**2) * Sx,
# ct_pt[1] - v[2] / np.sqrt(v[1]**2 + v[2]**2) * Sx,
# ct_pt[2] + v[0] / np.sqrt(v[0]**2 + v[2]**2) * Sx])
#
# # Add them to get the final vector
# vector_sum = v1 + v2
#
# return v1
# v1 = (0, 0, 0)
# v2 = (5, 0, 5)
#
# vector1 = [vector_3D(v1, v2, i) for i in range(5)]
#
# vector2 = [vector_perpendicular_ct_pt(v1, v2, 1, i) for i in np.arange(5)]
#
# print vector1
# print vector2
def project_onto_plane(vect):
"""
Return vector projection onto the xy plane
"""
x, y, z = vect
return (x, y, 0.)
def find_contact_3D(centroids, radius, tol = 20.):
"""
Arrays of all the centroids and all radii
tol defines the error tolerance between radii distance
Check all centre pairs and determine,
based on their radii, if they are in contact or not
"""
touch_pts = []
centres = []
radii = []
N = len(centroids)
for i in range(N - 1):
for j in range(i + 1, N):
c1 = centroids[i]
c2 = centroids[j]
r1 = radius[i]
r2 = radius[j]
D = r1 + r2
L = distance_3D(c1, c2)
print ""
print "Difference between radii sum and centre distance is", abs(D - L)
print "Distance is ", L
print "Radii sum is ", D
print ""
if abs(D - L) <= tol:
touch_pt = vector_3D(c1, c2, r1)
touch_pts.append(touch_pt)
centres.append((c1, c2))
radii.append((r1, r2))
return centres, touch_pts, radii
def sample_rate(P2, P1):
"""
When we have to loop through pixels at
an angle, the angle needs to be taken
into account. This calculates the change in distance
depending on where the vector is pointing
"""
from numpy import (array, dot)
from numpy.linalg import norm
v = np.array([P1[0] - P2[0], P1[1] - P2[1], P1[2] - P2[2]])
normal = np.array([0,0,1])
projection = np.cross(normal, np.cross(v,normal))
c = np.dot(v, projection) / modulus(projection) / modulus(v)
return 1. / c
def get_slice(P1, P2, name):
"""
Get slice through centre for analysis
"""
centre_dist = distance_3D(P1, P2)
sampling = sample_rate(P1, P2) - 1
plot_img = np.zeros((centre_dist / 2. + 1, centre_dist + 1))
Xrange = np.linspace(-centre_dist / 4., centre_dist /4.,
centre_dist / 2. + 1)
Trange = np.linspace(0., centre_dist,
centre_dist * 2.)
for time in Trange:
# Go up along the line
pt = vector_3D(P1, P2, time + sampling)
interpolated = trilinear(name, pt)
for X in Xrange:
# Get along the X direction for every height
x, y, z = vector_perpendicular_3D(pt, P2, 1, 0, X)
# pixel_value = interpolation(x, y, img)
pixel_value = interpolated([x, y, z])
plot_img[X + centre_dist / 4., time] = pixel_value
return plot_img
def get_slice_perpendicular(P1, P2, r1, name):
"""
Finds a vector between the centres.
Takes the point on the vector that is on the contact point.
Finds a vector going through the contact point that is also
perpendicular to the line connecting the centres.
The slice is then reconstructed from several images
"""
# time goes along the vector between P1 and P2
# since it might be at an angle, I can't loop in 1
# pixel increments - this will miss certain slices. Therefore,
# I need to loop through by 1/cosA, where A is angle between
# the xy plane and vector P1->P2
centre_dist = distance_3D(P1, P2)
perp1 = vector_perpendicular_ct_pt(P1, P2, r1, centre_dist /4.)
perp2 = vector_perpendicular_ct_pt(P1, P2, r1, -centre_dist /4.)
sampling = sample_rate(perp1, perp2) - 1
plot_img = np.zeros((np.int(np.round(centre_dist / 2. + 1, 0)), np.int(np.round(centre_dist / 2. + 1, 0))))
Xrange = np.linspace(-centre_dist / 4., centre_dist /4.,
centre_dist / 2. + 1)
Trange = np.linspace(-centre_dist / 4., centre_dist /4.,
centre_dist / 2. + 1)
for time in Trange:
# Go up along the line
pt = vector_perpendicular_ct_pt(P1, P2, r1, time + sampling)
interpolated = trilinear(name, pt)
for X in Xrange:
# Get along the X direction for every height
x, y, z = vector_perpendicular_3D(pt, P2, 1, 0, X)
# pixel_value = interpolation(x, y, img)
pixel_value = interpolated([x, y, z])
plot_img[X + centre_dist / 4., time + centre_dist / 4.] = pixel_value
return plot_img
def check_alignment(image, r1, r2):
"""
Take a particular line though the image and check
if the spheres were properly aligned in the z direction.
It happens to be off by a pixel or two sometimes
"""
distance = dist_between_spheres(r1, r2, image.shape[0] / 2. + 10, image.shape[0] / 2.)
gap_signal = []
denoised = median_filter(image.copy(), 3)
for j in np.arange(0., image.shape[1]):
# Take the region around the gap, which later on will be used
# to define the intensity at the gap between the spheres.
# The width of the gap is not exact
if image.shape[1] / 2. + distance + 5 > j > image.shape[1] / 2. - distance - 5:
gap_signal.append(denoised[image.shape[0] / 2. + 10, j])
centre = np.mean(np.argwhere(np.min(gap_signal) == gap_signal))
print centre
print len(gap_signal) / 2.
print
if abs(centre - len(gap_signal) / 2.) <= 1.5:
return True
else:
return False
def interpolation(x, y, img):
"""
http://en.wikipedia.org/wiki/Bilinear_interpolation
"""
from math import floor, ceil
x1 = ceil(x)
x2 = floor(x)
y1 = ceil(y)
y2 = floor(y)
Q11 = (x1, y1)
Q12 = (x1, y2)
Q21 = (x2, y1)
Q22 = (x2, y2)
f11 = img[Q11[0], Q11[1]]
f12 = img[Q12[0], Q12[1]]
f21 = img[Q21[0], Q21[1]]
f22 = img[Q22[0], Q22[1]]
try:
pixel_value = 1 / ((x2 - x1) * (y2 - y1)) * (f11 * (x2 - x) * (y2 - y) +
f21 * (x - x1) * (y2 - y) +
f12 * (x2 - x) * (y - y1) +
f22 * (x - x1) * (y - y1))
except:
pixel_value = np.mean([f11, f12, f21, f22])
return pixel_value
def trilinear(name, pt):
"""
Trilinear interpolation
http://docs.scipy.org/doc/scipy-dev/reference/generated/
scipy.interpolate.RegularGridInterpolator.html
"""
from scipy.interpolate import RegularGridInterpolator
input_file = name % int(np.floor(pt[2]))
data0 = io.imread(input_file)
input_file = name % int(np.ceil(pt[2]))
data1 = io.imread(input_file)
xdim = data0.shape[0]
ydim = data0.shape[1]
zdim = 2
empty_arr = np.empty((xdim, ydim, zdim))
empty_arr[:, :, 0] = data0
empty_arr[:, :, 1] = data1
x = np.linspace(0, xdim - 1, xdim)
y = np.linspace(0, ydim - 1, ydim)
z = np.linspace(int(np.floor(pt[2])), int(np.ceil(pt[2])), zdim)
interp = RegularGridInterpolator((x, y, z), empty_arr)
return interp
# def load_up_images(pt1, pt2, name):
# """
# Stack up the region of interest from image slices
# """
# zstart = int(np.min(pt1[2], pt2[2]))
# zend = int(np.max(pt1[2], pt2[2]))
#
# xdim =
# ydim
#
# input_file = name % zstart
# data = io.imread(input_file)
#
# zrange = np.linspace(zstart, zend, zend-zstart)
# store_ROI = np.empty((data.shape[0]))
#
# for i in zrange:
# input_file = name % i
# data = io.imread(input_file)
#
# return
def touch_lines_3D(pt1, pt2, folder_name, name, r1, r2, window_size):
"""
Goes along lines in the region between
two points.
Used for obtaining the widths of the gaussian fitted
to the gap between spheres
"""
# Create an array to store the slanted image slices
# used for plotting
L = distance_3D(pt1, pt2)
D = r1 + r2
print ""
print "Difference between radii sum and centre distance is", abs(D - L)
print "Distance is ", L
print "Radii sum is ", D
print ""
create_dir(folder_name + "plots/")
perpendicular_slice = get_slice_perpendicular(pt1, pt2, r1, name)
misc.imsave(folder_name + "perp_slice.tif", perpendicular_slice)
print "saving the perpendicular slice"
ROI = get_slice(pt1, pt2, name)
fit_and_visualize(ROI, folder_name + "plots/", r1, r2, window_size)
print "saving the slice for MTF"
return
# import cPickle
# import pylab as pl
#
# f = open("/dls/tmp/jjl36382/50867/plots/(793.0, 1143.07, 801.86),(682.61, 1141.0, 1410.12)/plots/gap_width.npy", 'r')
# x1 = cPickle.load(f)
# f.close()
# f = open("/dls/tmp/jjl36382/50873/plots/(796.04, 1146.95, 806.3),(685.0, 1143.98, 1414.78)/plots/gap_width.npy", 'r')
# x2 = cPickle.load(f)
# f.close()
# f = open("/dls/tmp/jjl36382/50880/plots/(798.04, 1147.99, 811.83),(685.0, 1143.0, 1418.03)/plots/gap_width.npy", 'r')
# x3 = cPickle.load(f)
# f.close()
# f = open("/dls/tmp/jjl36382/50867/plots/(793.0, 1143.07, 801.86),(682.61, 1141.0, 1410.12)/plots/mtf_cleft.npy", 'r')
# y1 = cPickle.load(f)
# f.close()
# f = open("/dls/tmp/jjl36382/50873/plots/(796.04, 1146.95, 806.3),(685.0, 1143.98, 1414.78)/plots/mtf_cleft.npy", 'r')
# y2 = cPickle.load(f)
# f.close()
# f = open("/dls/tmp/jjl36382/50880/plots/(798.04, 1147.99, 811.83),(685.0, 1143.0, 1418.03)/plots/mtf_cleft.npy", 'r')
# y3 = cPickle.load(f)
# f.close()
#
# pl.plot(x1, y1, 'r', label = "53keV")
# pl.plot(x3, y3, 'g', label = "75keV")
# pl.plot(x2, y2, 'b', label = "130keV")
#
#
# pl.xlabel("Distance between spheres (pixels)")
# pl.ylabel("MTF %")
# pl.legend()
# pl.gca().invert_xaxis()
# pl.savefig("./median_0.tif")
# pl.show()
# #
| apache-2.0 | 355,403,463,064,665,700 | 30.717423 | 141 | 0.522735 | false |
chiamingyen/pygrouf | init.py | 1 | 8144 | import os
import sqlite3
import pymysql
# 確定程式檔案所在目錄, 在 Windows 有最後的反斜線
_curdir = os.path.join(os.getcwd(), os.path.dirname(__file__))
# 設定在雲端與近端的資料儲存目錄
if 'OPENSHIFT_REPO_DIR' in os.environ.keys():
# 表示程式在雲端執行
data_dir = os.environ['OPENSHIFT_DATA_DIR']
static_dir = os.environ['OPENSHIFT_REPO_DIR']+"/static"
else:
# 表示程式在近端執行
data_dir = _curdir + "/local_data/"
static_dir = _curdir + "/static"
class Init(object):
def __init__(self):
# hope to create downloads and images directories
if not os.path.isdir(data_dir+"downloads"):
try:
os.makedirs(data_dir+"downloads")
except:
print("mkdir error")
if not os.path.isdir(data_dir+"images"):
try:
os.makedirs(data_dir+"images")
except:
print("mkdir error")
if not os.path.isdir(data_dir+"tmp"):
try:
os.makedirs(data_dir+"tmp")
except:
print("mkdir error")
# 假如沒有 adsense_content 則建立一個空白檔案
if not os.path.isfile(data_dir+"adsense_content"):
try:
file = open(data_dir+"adsense_content", "w", encoding="utf-8")
# 寫入內建的 adsense_content 內容
adsense_content = '''
<script type="text/javascript"><!--
google_ad_client = "pub-2140091590744860";
google_ad_width = 300;
google_ad_height = 250;
google_ad_format = "300x250_as";
google_ad_type = "image";
google_ad_channel ="";
google_color_border = "000000";
google_color_link = "0000FF";
google_color_bg = "FFFFFF";
google_color_text = "000000";
google_color_url = "008000";
google_ui_features = "rc:0";
//--></script>
<script type="text/javascript"
src="http://pagead2.googlesyndication.com/pagead/show_ads.js">
</script>
<script type="text/javascript"><!--
google_ad_client = "pub-2140091590744860";
google_ad_width = 300;
google_ad_height = 250;
google_ad_format = "300x250_as";
google_ad_type = "image";
google_ad_channel ="";
google_color_border = "000000";
google_color_link = "0000FF";
google_color_bg = "FFFFFF";
google_color_text = "000000";
google_color_url = "008000";
google_ui_features = "rc:0";
//--></script>
<script type="text/javascript"
src="http://pagead2.googlesyndication.com/pagead/show_ads.js">
</script><br />
'''
file.write(adsense_content+"\n")
file.close()
except:
print("mkdir error")
# 資料庫選用
# 內建使用 sqlite3
ormdb = "sqlite"
#ormdb = "mysql"
#ormdb = "postgresql"
if ormdb == "sqlite":
# 資料庫使用 SQLite
# 這裡應該要使用 peewee 建立資料庫與表格
try:
conn = sqlite3.connect(data_dir+"task.db")
cur = conn.cursor()
# 建立資料表
cur.execute("CREATE TABLE IF NOT EXISTS task( \
id INTEGER PRIMARY KEY AUTOINCREMENT, \
name TEXT, \
owner TEXT, \
type TEXT, \
time TEXT, \
content TEXT, \
ip TEXT, \
follow INTEGER);")
cur.close()
conn.close()
except:
print("can not create db and table")
elif ormdb == "mysql":
# 嘗試建立資料庫與資料表
# 這裡應該要使用 peewee 建立資料庫與表格
if 'OPENSHIFT_REPO_DIR' in os.environ.keys():
host=str(os.environ[str('OPENSHIFT_MYSQL_DB_HOST')])
port=int(os.environ[str('OPENSHIFT_MYSQL_DB_PORT')])
db='cadp'
user=str(os.environ[str('OPENSHIFT_MYSQL_DB_USERNAME')])
passwd=str(os.environ[str('OPENSHIFT_MYSQL_DB_PASSWORD')])
else:
host="yourhost"
port=3306
db='yourdb'
user='youruser'
passwd='yourpassword'
charset='utf8'
# 案例建立時, 就嘗試建立資料庫與資料表
try:
conn = pymysql.connect(host=host, port=port, user=user, passwd=passwd, charset=charset)
# 建立資料庫
cur = conn.cursor()
cur.execute("CREATE DATABASE IF NOT EXISTS "+db+" CHARACTER SET UTF8;")
# 建立資料表
cur.execute("USE "+db+";")
cur.execute("CREATE TABLE IF NOT EXISTS `task` ( \
`id` BIGINT(20) UNSIGNED NOT NULL AUTO_INCREMENT, \
`name` VARCHAR(255) NOT NULL COLLATE 'utf8_unicode_ci', \
`owner` VARCHAR(255) NOT NULL COLLATE 'utf8_unicode_ci', \
`type` VARCHAR(255) NULL DEFAULT NULL COLLATE 'utf8_unicode_ci', \
`time` DATETIME NOT NULL COLLATE 'utf8_unicode_ci', \
`content` LONGTEXT COLLATE 'utf8_unicode_ci', \
`ip` VARCHAR(255) NULL DEFAULT NULL COLLATE 'utf8_unicode_ci', \
`follow` BIGINT(20) UNSIGNED NOT NULL DEFAULT '0', \
PRIMARY KEY (`id`)) \
COLLATE='utf8_general_ci' default charset=utf8 ENGINE=InnoDB;")
cur.close()
conn.close()
except:
print("can not create db and table")
else:
# 使用 PostgreSQL
# 嘗試建立資料庫與資料表
# 這裡應該要使用 peewee 建立資料庫與表格
if 'OPENSHIFT_REPO_DIR' in os.environ.keys():
host=str(os.environ[str('OPENSHIFT_POSTGRESQL_DB_HOST')])
port=int(os.environ[str('OPENSHIFT_POSTGRESQL_DB_PORT')])
db='cadp'
user=str(os.environ[str('OPENSHIFT_POSTGRESQL_DB_USERNAME')])
passwd=str(os.environ[str('OPENSHIFT_POSTGRESQL_DB_PASSWORD')])
else:
host="localhost"
port=3306
db='cadp'
user='root'
passwd='root'
charset='utf8'
# 案例建立時, 就嘗試建立資料庫與資料表
try:
conn = pymysql.connect(host=host, port=port, user=user, passwd=passwd, charset=charset)
# 建立資料庫
cur = conn.cursor()
cur.execute("CREATE DATABASE IF NOT EXISTS "+db+";")
# 建立資料表
cur.execute("USE "+db+";")
cur.execute("CREATE TABLE IF NOT EXISTS `task` ( \
`id` BIGINT(20) UNSIGNED NOT NULL AUTO_INCREMENT, \
`name` VARCHAR(255) NOT NULL COLLATE 'utf8_unicode_ci', \
`owner` VARCHAR(255) NOT NULL COLLATE 'utf8_unicode_ci', \
`type` VARCHAR(255) NULL DEFAULT NULL COLLATE 'utf8_unicode_ci', \
`time` DATETIME NOT NULL COLLATE 'utf8_unicode_ci', \
`content` LONGTEXT COLLATE 'utf8_unicode_ci', \
`ip` VARCHAR(255) NULL DEFAULT NULL COLLATE 'utf8_unicode_ci', \
`follow` BIGINT(20) UNSIGNED NOT NULL DEFAULT '0', \
PRIMARY KEY (`id`)) \
COLLATE='utf8_general_ci' default charset=utf8 ENGINE=InnoDB;")
cur.close()
conn.close()
except:
print("can not create db and table")
| agpl-3.0 | 834,210,522,911,319,900 | 39.825397 | 103 | 0.488336 | false |
danialbehzadi/Nokia-RM-1013-2.0.0.11 | webkit/Tools/Scripts/webkitpy/python24/versioning_unittest.py | 15 | 5449 | # Copyright (C) 2010 Chris Jerdonek ([email protected])
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS'' AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Contains unit tests for versioning.py."""
import logging
import unittest
from webkitpy.common.system.logtesting import LogTesting
from webkitpy.python24.versioning import check_version
from webkitpy.python24.versioning import compare_version
class MockSys(object):
"""A mock sys module for passing to version-checking methods."""
def __init__(self, current_version):
"""Create an instance.
current_version: A version string with major, minor, and micro
version parts.
"""
version_info = current_version.split(".")
version_info = map(int, version_info)
self.version = current_version + " Version details."
self.version_info = version_info
class CompareVersionTest(unittest.TestCase):
"""Tests compare_version()."""
def _mock_sys(self, current_version):
return MockSys(current_version)
def test_default_minimum_version(self):
"""Test the configured minimum version that webkitpy supports."""
(comparison, current_version, min_version) = compare_version()
self.assertEquals(min_version, "2.5")
def compare_version(self, target_version, current_version=None):
"""Call compare_version()."""
if current_version is None:
current_version = "2.5.3"
mock_sys = self._mock_sys(current_version)
return compare_version(mock_sys, target_version)
def compare(self, target_version, current_version=None):
"""Call compare_version(), and return the comparison."""
return self.compare_version(target_version, current_version)[0]
def test_returned_current_version(self):
"""Test the current_version return value."""
current_version = self.compare_version("2.5")[1]
self.assertEquals(current_version, "2.5.3")
def test_returned_target_version(self):
"""Test the current_version return value."""
target_version = self.compare_version("2.5")[2]
self.assertEquals(target_version, "2.5")
def test_target_version_major(self):
"""Test major version for target."""
self.assertEquals(-1, self.compare("3"))
self.assertEquals(0, self.compare("2"))
self.assertEquals(1, self.compare("2", "3.0.0"))
def test_target_version_minor(self):
"""Test minor version for target."""
self.assertEquals(-1, self.compare("2.6"))
self.assertEquals(0, self.compare("2.5"))
self.assertEquals(1, self.compare("2.4"))
def test_target_version_micro(self):
"""Test minor version for target."""
self.assertEquals(-1, self.compare("2.5.4"))
self.assertEquals(0, self.compare("2.5.3"))
self.assertEquals(1, self.compare("2.5.2"))
class CheckVersionTest(unittest.TestCase):
"""Tests check_version()."""
def setUp(self):
self._log = LogTesting.setUp(self)
def tearDown(self):
self._log.tearDown()
def _check_version(self, minimum_version):
"""Call check_version()."""
mock_sys = MockSys("2.5.3")
return check_version(sysmodule=mock_sys, target_version=minimum_version)
def test_true_return_value(self):
"""Test the configured minimum version that webkitpy supports."""
is_current = self._check_version("2.4")
self.assertEquals(True, is_current)
self._log.assertMessages([]) # No warning was logged.
def test_false_return_value(self):
"""Test the configured minimum version that webkitpy supports."""
is_current = self._check_version("2.6")
self.assertEquals(False, is_current)
expected_message = ('WARNING: WebKit Python scripts do not support '
'your current Python version (2.5.3). '
'The minimum supported version is 2.6.\n '
'See the following page to upgrade your Python '
'version:\n\n '
'http://trac.webkit.org/wiki/PythonGuidelines\n\n')
self._log.assertMessages([expected_message])
| gpl-3.0 | 1,371,380,215,722,194,000 | 39.664179 | 80 | 0.663792 | false |
LumPenPacK/NetworkExtractionFromImages | osx_build/nefi2_osx_amd64_xcode_2015/site-packages/numpy/lib/tests/test_stride_tricks.py | 43 | 14199 | from __future__ import division, absolute_import, print_function
import numpy as np
from numpy.testing import (
run_module_suite, assert_equal, assert_array_equal,
assert_raises, assert_
)
from numpy.lib.stride_tricks import (
as_strided, broadcast_arrays, _broadcast_shape, broadcast_to
)
def assert_shapes_correct(input_shapes, expected_shape):
# Broadcast a list of arrays with the given input shapes and check the
# common output shape.
inarrays = [np.zeros(s) for s in input_shapes]
outarrays = broadcast_arrays(*inarrays)
outshapes = [a.shape for a in outarrays]
expected = [expected_shape] * len(inarrays)
assert_equal(outshapes, expected)
def assert_incompatible_shapes_raise(input_shapes):
# Broadcast a list of arrays with the given (incompatible) input shapes
# and check that they raise a ValueError.
inarrays = [np.zeros(s) for s in input_shapes]
assert_raises(ValueError, broadcast_arrays, *inarrays)
def assert_same_as_ufunc(shape0, shape1, transposed=False, flipped=False):
# Broadcast two shapes against each other and check that the data layout
# is the same as if a ufunc did the broadcasting.
x0 = np.zeros(shape0, dtype=int)
# Note that multiply.reduce's identity element is 1.0, so when shape1==(),
# this gives the desired n==1.
n = int(np.multiply.reduce(shape1))
x1 = np.arange(n).reshape(shape1)
if transposed:
x0 = x0.T
x1 = x1.T
if flipped:
x0 = x0[::-1]
x1 = x1[::-1]
# Use the add ufunc to do the broadcasting. Since we're adding 0s to x1, the
# result should be exactly the same as the broadcasted view of x1.
y = x0 + x1
b0, b1 = broadcast_arrays(x0, x1)
assert_array_equal(y, b1)
def test_same():
x = np.arange(10)
y = np.arange(10)
bx, by = broadcast_arrays(x, y)
assert_array_equal(x, bx)
assert_array_equal(y, by)
def test_one_off():
x = np.array([[1, 2, 3]])
y = np.array([[1], [2], [3]])
bx, by = broadcast_arrays(x, y)
bx0 = np.array([[1, 2, 3], [1, 2, 3], [1, 2, 3]])
by0 = bx0.T
assert_array_equal(bx0, bx)
assert_array_equal(by0, by)
def test_same_input_shapes():
# Check that the final shape is just the input shape.
data = [
(),
(1,),
(3,),
(0, 1),
(0, 3),
(1, 0),
(3, 0),
(1, 3),
(3, 1),
(3, 3),
]
for shape in data:
input_shapes = [shape]
# Single input.
assert_shapes_correct(input_shapes, shape)
# Double input.
input_shapes2 = [shape, shape]
assert_shapes_correct(input_shapes2, shape)
# Triple input.
input_shapes3 = [shape, shape, shape]
assert_shapes_correct(input_shapes3, shape)
def test_two_compatible_by_ones_input_shapes():
# Check that two different input shapes of the same length, but some have
# ones, broadcast to the correct shape.
data = [
[[(1,), (3,)], (3,)],
[[(1, 3), (3, 3)], (3, 3)],
[[(3, 1), (3, 3)], (3, 3)],
[[(1, 3), (3, 1)], (3, 3)],
[[(1, 1), (3, 3)], (3, 3)],
[[(1, 1), (1, 3)], (1, 3)],
[[(1, 1), (3, 1)], (3, 1)],
[[(1, 0), (0, 0)], (0, 0)],
[[(0, 1), (0, 0)], (0, 0)],
[[(1, 0), (0, 1)], (0, 0)],
[[(1, 1), (0, 0)], (0, 0)],
[[(1, 1), (1, 0)], (1, 0)],
[[(1, 1), (0, 1)], (0, 1)],
]
for input_shapes, expected_shape in data:
assert_shapes_correct(input_shapes, expected_shape)
# Reverse the input shapes since broadcasting should be symmetric.
assert_shapes_correct(input_shapes[::-1], expected_shape)
def test_two_compatible_by_prepending_ones_input_shapes():
# Check that two different input shapes (of different lengths) broadcast
# to the correct shape.
data = [
[[(), (3,)], (3,)],
[[(3,), (3, 3)], (3, 3)],
[[(3,), (3, 1)], (3, 3)],
[[(1,), (3, 3)], (3, 3)],
[[(), (3, 3)], (3, 3)],
[[(1, 1), (3,)], (1, 3)],
[[(1,), (3, 1)], (3, 1)],
[[(1,), (1, 3)], (1, 3)],
[[(), (1, 3)], (1, 3)],
[[(), (3, 1)], (3, 1)],
[[(), (0,)], (0,)],
[[(0,), (0, 0)], (0, 0)],
[[(0,), (0, 1)], (0, 0)],
[[(1,), (0, 0)], (0, 0)],
[[(), (0, 0)], (0, 0)],
[[(1, 1), (0,)], (1, 0)],
[[(1,), (0, 1)], (0, 1)],
[[(1,), (1, 0)], (1, 0)],
[[(), (1, 0)], (1, 0)],
[[(), (0, 1)], (0, 1)],
]
for input_shapes, expected_shape in data:
assert_shapes_correct(input_shapes, expected_shape)
# Reverse the input shapes since broadcasting should be symmetric.
assert_shapes_correct(input_shapes[::-1], expected_shape)
def test_incompatible_shapes_raise_valueerror():
# Check that a ValueError is raised for incompatible shapes.
data = [
[(3,), (4,)],
[(2, 3), (2,)],
[(3,), (3,), (4,)],
[(1, 3, 4), (2, 3, 3)],
]
for input_shapes in data:
assert_incompatible_shapes_raise(input_shapes)
# Reverse the input shapes since broadcasting should be symmetric.
assert_incompatible_shapes_raise(input_shapes[::-1])
def test_same_as_ufunc():
# Check that the data layout is the same as if a ufunc did the operation.
data = [
[[(1,), (3,)], (3,)],
[[(1, 3), (3, 3)], (3, 3)],
[[(3, 1), (3, 3)], (3, 3)],
[[(1, 3), (3, 1)], (3, 3)],
[[(1, 1), (3, 3)], (3, 3)],
[[(1, 1), (1, 3)], (1, 3)],
[[(1, 1), (3, 1)], (3, 1)],
[[(1, 0), (0, 0)], (0, 0)],
[[(0, 1), (0, 0)], (0, 0)],
[[(1, 0), (0, 1)], (0, 0)],
[[(1, 1), (0, 0)], (0, 0)],
[[(1, 1), (1, 0)], (1, 0)],
[[(1, 1), (0, 1)], (0, 1)],
[[(), (3,)], (3,)],
[[(3,), (3, 3)], (3, 3)],
[[(3,), (3, 1)], (3, 3)],
[[(1,), (3, 3)], (3, 3)],
[[(), (3, 3)], (3, 3)],
[[(1, 1), (3,)], (1, 3)],
[[(1,), (3, 1)], (3, 1)],
[[(1,), (1, 3)], (1, 3)],
[[(), (1, 3)], (1, 3)],
[[(), (3, 1)], (3, 1)],
[[(), (0,)], (0,)],
[[(0,), (0, 0)], (0, 0)],
[[(0,), (0, 1)], (0, 0)],
[[(1,), (0, 0)], (0, 0)],
[[(), (0, 0)], (0, 0)],
[[(1, 1), (0,)], (1, 0)],
[[(1,), (0, 1)], (0, 1)],
[[(1,), (1, 0)], (1, 0)],
[[(), (1, 0)], (1, 0)],
[[(), (0, 1)], (0, 1)],
]
for input_shapes, expected_shape in data:
assert_same_as_ufunc(input_shapes[0], input_shapes[1],
"Shapes: %s %s" % (input_shapes[0], input_shapes[1]))
# Reverse the input shapes since broadcasting should be symmetric.
assert_same_as_ufunc(input_shapes[1], input_shapes[0])
# Try them transposed, too.
assert_same_as_ufunc(input_shapes[0], input_shapes[1], True)
# ... and flipped for non-rank-0 inputs in order to test negative
# strides.
if () not in input_shapes:
assert_same_as_ufunc(input_shapes[0], input_shapes[1], False, True)
assert_same_as_ufunc(input_shapes[0], input_shapes[1], True, True)
def test_broadcast_to_succeeds():
data = [
[np.array(0), (0,), np.array(0)],
[np.array(0), (1,), np.zeros(1)],
[np.array(0), (3,), np.zeros(3)],
[np.ones(1), (1,), np.ones(1)],
[np.ones(1), (2,), np.ones(2)],
[np.ones(1), (1, 2, 3), np.ones((1, 2, 3))],
[np.arange(3), (3,), np.arange(3)],
[np.arange(3), (1, 3), np.arange(3).reshape(1, -1)],
[np.arange(3), (2, 3), np.array([[0, 1, 2], [0, 1, 2]])],
# test if shape is not a tuple
[np.ones(0), 0, np.ones(0)],
[np.ones(1), 1, np.ones(1)],
[np.ones(1), 2, np.ones(2)],
# these cases with size 0 are strange, but they reproduce the behavior
# of broadcasting with ufuncs (see test_same_as_ufunc above)
[np.ones(1), (0,), np.ones(0)],
[np.ones((1, 2)), (0, 2), np.ones((0, 2))],
[np.ones((2, 1)), (2, 0), np.ones((2, 0))],
]
for input_array, shape, expected in data:
actual = broadcast_to(input_array, shape)
assert_array_equal(expected, actual)
def test_broadcast_to_raises():
data = [
[(0,), ()],
[(1,), ()],
[(3,), ()],
[(3,), (1,)],
[(3,), (2,)],
[(3,), (4,)],
[(1, 2), (2, 1)],
[(1, 1), (1,)],
[(1,), -1],
[(1,), (-1,)],
[(1, 2), (-1, 2)],
]
for orig_shape, target_shape in data:
arr = np.zeros(orig_shape)
assert_raises(ValueError, lambda: broadcast_to(arr, target_shape))
def test_broadcast_shape():
# broadcast_shape is already exercized indirectly by broadcast_arrays
assert_raises(ValueError, _broadcast_shape)
assert_equal(_broadcast_shape([1, 2]), (2,))
assert_equal(_broadcast_shape(np.ones((1, 1))), (1, 1))
assert_equal(_broadcast_shape(np.ones((1, 1)), np.ones((3, 4))), (3, 4))
assert_equal(_broadcast_shape(*([np.ones((1, 2))] * 32)), (1, 2))
assert_equal(_broadcast_shape(*([np.ones((1, 2))] * 100)), (1, 2))
# regression tests for gh-5862
assert_equal(_broadcast_shape(*([np.ones(2)] * 32 + [1])), (2,))
bad_args = [np.ones(2)] * 32 + [np.ones(3)] * 32
assert_raises(ValueError, lambda: _broadcast_shape(*bad_args))
def test_as_strided():
a = np.array([None])
a_view = as_strided(a)
expected = np.array([None])
assert_array_equal(a_view, np.array([None]))
a = np.array([1, 2, 3, 4])
a_view = as_strided(a, shape=(2,), strides=(2 * a.itemsize,))
expected = np.array([1, 3])
assert_array_equal(a_view, expected)
a = np.array([1, 2, 3, 4])
a_view = as_strided(a, shape=(3, 4), strides=(0, 1 * a.itemsize))
expected = np.array([[1, 2, 3, 4], [1, 2, 3, 4], [1, 2, 3, 4]])
assert_array_equal(a_view, expected)
# Regression test for gh-5081
dt = np.dtype([('num', 'i4'), ('obj', 'O')])
a = np.empty((4,), dtype=dt)
a['num'] = np.arange(1, 5)
a_view = as_strided(a, shape=(3, 4), strides=(0, a.itemsize))
expected_num = [[1, 2, 3, 4]] * 3
expected_obj = [[None]*4]*3
assert_equal(a_view.dtype, dt)
assert_array_equal(expected_num, a_view['num'])
assert_array_equal(expected_obj, a_view['obj'])
# Make sure that void types without fields are kept unchanged
a = np.empty((4,), dtype='V4')
a_view = as_strided(a, shape=(3, 4), strides=(0, a.itemsize))
assert_equal(a.dtype, a_view.dtype)
# Make sure that the only type that could fail is properly handled
dt = np.dtype({'names': [''], 'formats': ['V4']})
a = np.empty((4,), dtype=dt)
a_view = as_strided(a, shape=(3, 4), strides=(0, a.itemsize))
assert_equal(a.dtype, a_view.dtype)
class VerySimpleSubClass(np.ndarray):
def __new__(cls, *args, **kwargs):
kwargs['subok'] = True
return np.array(*args, **kwargs).view(cls)
class SimpleSubClass(VerySimpleSubClass):
def __new__(cls, *args, **kwargs):
kwargs['subok'] = True
self = np.array(*args, **kwargs).view(cls)
self.info = 'simple'
return self
def __array_finalize__(self, obj):
self.info = getattr(obj, 'info', '') + ' finalized'
def test_subclasses():
# test that subclass is preserved only if subok=True
a = VerySimpleSubClass([1, 2, 3, 4])
assert_(type(a) is VerySimpleSubClass)
a_view = as_strided(a, shape=(2,), strides=(2 * a.itemsize,))
assert_(type(a_view) is np.ndarray)
a_view = as_strided(a, shape=(2,), strides=(2 * a.itemsize,), subok=True)
assert_(type(a_view) is VerySimpleSubClass)
# test that if a subclass has __array_finalize__, it is used
a = SimpleSubClass([1, 2, 3, 4])
a_view = as_strided(a, shape=(2,), strides=(2 * a.itemsize,), subok=True)
assert_(type(a_view) is SimpleSubClass)
assert_(a_view.info == 'simple finalized')
# similar tests for broadcast_arrays
b = np.arange(len(a)).reshape(-1, 1)
a_view, b_view = broadcast_arrays(a, b)
assert_(type(a_view) is np.ndarray)
assert_(type(b_view) is np.ndarray)
assert_(a_view.shape == b_view.shape)
a_view, b_view = broadcast_arrays(a, b, subok=True)
assert_(type(a_view) is SimpleSubClass)
assert_(a_view.info == 'simple finalized')
assert_(type(b_view) is np.ndarray)
assert_(a_view.shape == b_view.shape)
# and for broadcast_to
shape = (2, 4)
a_view = broadcast_to(a, shape)
assert_(type(a_view) is np.ndarray)
assert_(a_view.shape == shape)
a_view = broadcast_to(a, shape, subok=True)
assert_(type(a_view) is SimpleSubClass)
assert_(a_view.info == 'simple finalized')
assert_(a_view.shape == shape)
def test_writeable():
# broadcast_to should return a readonly array
original = np.array([1, 2, 3])
result = broadcast_to(original, (2, 3))
assert_equal(result.flags.writeable, False)
assert_raises(ValueError, result.__setitem__, slice(None), 0)
# but the result of broadcast_arrays needs to be writeable (for now), to
# preserve backwards compatibility
for results in [broadcast_arrays(original),
broadcast_arrays(0, original)]:
for result in results:
assert_equal(result.flags.writeable, True)
# keep readonly input readonly
original.flags.writeable = False
_, result = broadcast_arrays(0, original)
assert_equal(result.flags.writeable, False)
# regresssion test for GH6491
shape = (2,)
strides = [0]
tricky_array = as_strided(np.array(0), shape, strides)
other = np.zeros((1,))
first, second = broadcast_arrays(tricky_array, other)
assert_(first.shape == second.shape)
def test_reference_types():
input_array = np.array('a', dtype=object)
expected = np.array(['a'] * 3, dtype=object)
actual = broadcast_to(input_array, (3,))
assert_array_equal(expected, actual)
actual, _ = broadcast_arrays(input_array, np.ones(3))
assert_array_equal(expected, actual)
if __name__ == "__main__":
run_module_suite()
| bsd-2-clause | -645,732,285,313,370,800 | 33.380145 | 82 | 0.52546 | false |
tensorflow/model-optimization | tensorflow_model_optimization/python/examples/quantization_with_sparsity/keras/mnist_cnn.py | 1 | 5957 | # Copyright 2021 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
# pylint: disable=missing-docstring
"""Prune preserve Quant-Aware Training(pqat) with simple convnet on the MNIST dataset.
As a experimental feature, only `quantize_apply` been enabled with boolean flag
`prune_preserve`
"""
from __future__ import print_function
from absl import app as absl_app
import numpy as np
import tensorflow as tf
from tensorflow_model_optimization.python.core.quantization.keras import quantize
from tensorflow_model_optimization.python.core.quantization.keras.collaborative_optimizations.prune_preserve import (
default_8bit_prune_preserve_quantize_scheme,)
from tensorflow_model_optimization.python.core.sparsity.keras import prune
from tensorflow_model_optimization.python.core.sparsity.keras import pruning_callbacks
from tensorflow_model_optimization.python.core.sparsity.keras import pruning_schedule
layers = tf.keras.layers
def build_sequential_model(input_shape=(28, 28)):
num_classes = 12
return tf.keras.Sequential([
layers.InputLayer(input_shape=input_shape),
layers.Conv2D(32,
5,
padding='same',
activation='relu',
input_shape=input_shape),
layers.MaxPooling2D((2, 2), (2, 2), padding='same'),
layers.Conv2D(64, 5, padding='same', activation='relu'),
layers.MaxPooling2D((2, 2), (2, 2), padding='same'),
layers.Flatten(),
layers.Dense(1024, activation='relu'),
layers.Dropout(0.4),
layers.Dense(num_classes, activation='softmax')
])
def compile_and_fit(model,
image_train,
label_train,
compile_kwargs,
fit_kwargs):
# Compile the model.
compile_args = {
'optimizer': 'adam',
'loss': 'sparse_categorical_crossentropy',
'metrics': ['accuracy'],
}
compile_args.update(compile_kwargs)
model.compile(**compile_args)
# train the model.
fit_args = {'epochs': 4, 'validation_split': 0.1}
fit_args.update(fit_kwargs)
model.fit(image_train, label_train, **fit_args)
def evaluate_and_show_sparsity(model, image_test, label_test):
score = model.evaluate(image_test, label_test, verbose=0)
print('Test loss:', score[0])
print('Test accuracy:', score[1])
for layer in model.layers:
if isinstance(layer,
prune.pruning_wrapper.PruneLowMagnitude) or isinstance(
layer, quantize.quantize_wrapper.QuantizeWrapper):
for weights in layer.trainable_weights:
np_weights = tf.keras.backend.get_value(weights)
sparsity = 1.0 - np.count_nonzero(np_weights) / float(np_weights.size)
print(layer.layer.__class__.__name__, ' (', weights.name,
') sparsity: ', sparsity)
def prune_model(original_model, train_images, train_labels):
batch_size = 256
epochs = 5
pruning_params = {
'pruning_schedule':
pruning_schedule.ConstantSparsity(0.75, begin_step=0, frequency=100)
}
pruning_model = prune.prune_low_magnitude(original_model, **pruning_params)
pruning_model.summary()
callbacks = [pruning_callbacks.UpdatePruningStep()]
fit_kwargs = {
'batch_size': batch_size,
'epochs': epochs,
'callbacks': callbacks,
}
compile_and_fit(pruning_model,
train_images,
train_labels,
compile_kwargs={},
fit_kwargs=fit_kwargs)
return pruning_model
def prune_preserve_quantize_model(pruned_model, train_images, train_labels):
batch_size = 256
epochs = 5
pruned_model = prune.strip_pruning(pruned_model)
# Prune preserve QAT model
quant_aware_annotate_model = quantize.quantize_annotate_model(pruned_model)
quant_aware_model = quantize.quantize_apply(
quant_aware_annotate_model,
scheme=default_8bit_prune_preserve_quantize_scheme
.Default8BitPrunePreserveQuantizeScheme())
quant_aware_model.summary()
fit_kwargs = {
'batch_size': batch_size,
'epochs': epochs,
}
compile_and_fit(quant_aware_model,
train_images,
train_labels,
compile_kwargs={},
fit_kwargs=fit_kwargs)
return quant_aware_model
def main(unused_args):
# Load the MNIST dataset.
mnist = tf.keras.datasets.mnist
(train_images, train_labels), (test_images, test_labels) = mnist.load_data()
# data preprocessing
# normalize the input images so that each pixel value is between 0 and 1.
train_images = train_images / 255.0
test_images = test_images / 255.0
train_images = tf.expand_dims(train_images, axis=-1)
test_images = tf.expand_dims(test_images, axis=-1)
input_shape = train_images.shape[1:]
print('train_images shape:', train_images.shape)
print(train_images.shape[0], 'train samples')
print(test_images.shape[0], 'test samples')
model = build_sequential_model(input_shape)
pruned_model = prune_model(model, train_images, train_labels)
evaluate_and_show_sparsity(pruned_model, test_images, test_labels)
pqat_model = prune_preserve_quantize_model(pruned_model, train_images,
train_labels)
evaluate_and_show_sparsity(pqat_model, test_images, test_labels)
if __name__ == '__main__':
absl_app.run(main)
| apache-2.0 | -6,557,862,777,531,152,000 | 33.633721 | 117 | 0.663085 | false |
matt-gardner/deep_qa | deep_qa/run.py | 1 | 10827 | from typing import List, Tuple, Union
import sys
import logging
import shutil
import os
import random
import pyhocon
import numpy
# pylint: disable=wrong-import-position
from .common.params import Params, replace_none, ConfigurationError
from .common.tee_logger import TeeLogger
logger = logging.getLogger(__name__) # pylint: disable=invalid-name
def prepare_environment(params: Union[Params, dict]):
"""
Sets random seeds for reproducible experiments. This may not work as expected
if you use this from within a python project in which you have already imported Keras.
If you use the scripts/run_model.py entry point to training models with this library,
your experiments should be reproducible. If you are using this from your own project,
you will want to call this function before importing Keras.
Parameters
----------
params: Params object or dict, required.
A ``Params`` object or dict holding the json parameters.
"""
seed = params.pop("random_seed", 13370)
numpy_seed = params.pop("numpy_seed", 1337)
if "keras" in sys.modules:
logger.warning("You have already imported Keras in your code. If you are using DeepQA"
"functionality to set random seeds, they will have no effect, as code"
"prior to calling this function will be non-deterministic. We will not"
"the random seed here.")
seed = None
numpy_seed = None
if seed is not None:
random.seed(seed)
if numpy_seed is not None:
numpy.random.seed(numpy_seed)
from deep_qa.common.checks import log_keras_version_info
log_keras_version_info()
def run_model(param_path: str, model_class=None):
"""
This function is the normal entry point to DeepQA. Use this to run a DeepQA model in
your project. Note that if you care about exactly reproducible experiments,
you should avoid importing Keras before you import and use this function, as
Keras relies on random seeds which can be set in this function via a
JSON specification file.
Note that this function performs training and will also evaluate the trained
model on development and test sets if provided in the parameter json.
Parameters
----------
param_path: str, required.
A json file specifying a DeepQaModel.
model_class: DeepQaModel, optional (default=None).
This option is useful if you have implemented a new model class which
is not one of the ones implemented in this library.
"""
param_dict = pyhocon.ConfigFactory.parse_file(param_path)
params = Params(replace_none(param_dict))
prepare_environment(params)
# These have to be imported _after_ we set the random seed,
# because keras uses the numpy random seed.
from deep_qa.models import concrete_models
import tensorflow
from keras import backend as K
log_dir = params.get("model_serialization_prefix", None) # pylint: disable=no-member
if log_dir is not None:
sys.stdout = TeeLogger(log_dir + "_stdout.log", sys.stdout)
sys.stderr = TeeLogger(log_dir + "_stderr.log", sys.stderr)
handler = logging.FileHandler(log_dir + "_python_logging.log")
handler.setLevel(logging.INFO)
handler.setFormatter(logging.Formatter('%(asctime)s - %(levelname)s - %(name)s - %(message)s'))
logging.getLogger().addHandler(handler)
shutil.copyfile(param_path, log_dir + "_model_params.json")
num_threads = os.environ.get('OMP_NUM_THREADS')
config = {
"allow_soft_placement": True,
"log_device_placement": params.pop("log_device_placement", False)
}
if num_threads is not None:
config["intra_op_parallelism_threads"] = int(num_threads)
global_session = tensorflow.Session(config=tensorflow.ConfigProto(**config))
K.set_session(global_session)
if model_class is None:
model_type = params.pop_choice('model_class', concrete_models.keys())
model_class = concrete_models[model_type]
else:
if params.pop('model_class', None) is not None:
raise ConfigurationError("You have specified a local model class and passed a model_class argument"
"in the json specification. These options are mutually exclusive.")
model = model_class(params)
if model.can_train():
logger.info("Training model")
model.train()
K.clear_session()
else:
raise ConfigurationError("The supplied model does not have enough training inputs.")
def load_model(param_path: str, model_class=None):
"""
Loads and returns a model.
Parameters
----------
param_path: str, required
A json file specifying a DeepQaModel.
model_class: DeepQaModel, optional (default=None)
This option is useful if you have implemented a new model
class which is not one of the ones implemented in this library.
Returns
-------
A ``DeepQaModel`` instance.
"""
logger.info("Loading model from parameter file: %s", param_path)
param_dict = pyhocon.ConfigFactory.parse_file(param_path)
params = Params(replace_none(param_dict))
prepare_environment(params)
from deep_qa.models import concrete_models
if model_class is None:
model_type = params.pop_choice('model_class', concrete_models.keys())
model_class = concrete_models[model_type]
else:
if params.pop('model_class', None) is not None:
raise ConfigurationError("You have specified a local model class and passed a model_class argument"
"in the json specification. These options are mutually exclusive.")
model = model_class(params)
model.load_model()
return model
def score_dataset(param_path: str, dataset_files: List[str], model_class=None):
"""
Loads a model from a saved parameter path and scores a dataset with it, returning the
predictions.
Parameters
----------
param_path: str, required
A json file specifying a DeepQaModel.
dataset_files: List[str]
A list of dataset files to score, the same as you would have specified as ``train_files``
or ``test_files`` in your parameter file.
model_class: DeepQaModel, optional (default=None)
This option is useful if you have implemented a new model class which
is not one of the ones implemented in this library.
Returns
-------
predictions: numpy.array
Numpy array of model predictions in the format of model.outputs (typically one array, but
could be List[numpy.array] if your model has multiple outputs).
labels: numpy.array
The labels on the dataset, as read by the model. We return this so you can compute
whatever metrics you want, if the data was labeled.
"""
model = load_model(param_path, model_class=model_class)
dataset = model.load_dataset_from_files(dataset_files)
return model.score_dataset(dataset)
def evaluate_model(param_path: str, dataset_files: List[str]=None, model_class=None):
"""
Loads a model and evaluates it on some test set.
Parameters
----------
param_path: str, required
A json file specifying a DeepQaModel.
dataset_files: List[str], optional, (default=None)
A list of dataset files to evaluate on. If this is ``None``, we'll evaluate from the
``test_files`` parameter in the input files. If that's also ``None``, we'll crash.
model_class: DeepQaModel, optional (default=None)
This option is useful if you have implemented a new model class which
is not one of the ones implemented in this library.
Returns
-------
Numpy arrays of model predictions in the format of model.outputs.
"""
model = load_model(param_path, model_class=model_class)
if dataset_files is None:
dataset_files = model.test_files
model.evaluate_model(dataset_files)
def score_dataset_with_ensemble(param_paths: List[str],
dataset_files: List[str],
model_class=None) -> Tuple[numpy.array, numpy.array]:
"""
Loads all of the models specified in ``param_paths``, uses each of them to score the dataset
specified by ``dataset_files``, and averages their scores, return an array of ensembled model
predictions.
Parameters
----------
param_paths: List[str]
A list of parameter files that were used to train models. You must have already trained
the corresponding model, as we'll load it and use it in an ensemble here.
dataset_files: List[str]
A list of dataset files to score, the same as you would have specified as ``test_files`` in
any one of the model parameter files.
model_class: ``DeepQaModel``, optional (default=None)
This option is useful if you have implemented a new model class which is not one of the
ones implemented in this library.
Returns
-------
predictions: numpy.array
Numpy array of model predictions in the format of model.outputs (typically one array, but
could be List[numpy.array] if your model has multiple outputs).
labels: numpy.array
The labels on the dataset, as read by the first model. We return this so you can compute
whatever metrics you want, if the data was labeled. Note that if your models all represent
that data differently, this will only give the first one. Hopefully the representation of
the labels is consistent across the models, though; if not, the whole idea of ensembling
them this way is moot, anyway.
"""
models = [load_model(param_path, model_class) for param_path in param_paths]
predictions = []
labels_to_return = None
for i, model in enumerate(models):
logger.info("Scoring model %d of %d", i + 1, len(models))
dataset = model.load_dataset_from_files(dataset_files)
model_predictions, labels = model.score_dataset(dataset)
predictions.append(model_predictions)
if labels_to_return is None:
labels_to_return = labels
logger.info("Averaging model predictions")
all_predictions = numpy.stack(predictions)
averaged = numpy.mean(all_predictions, axis=0)
return averaged, labels_to_return
def compute_accuracy(predictions: numpy.array, labels: numpy.array):
"""
Computes a simple categorical accuracy metric, useful if you used ``score_dataset`` to get
predictions.
"""
accuracy = numpy.mean(numpy.equal(numpy.argmax(predictions, axis=-1),
numpy.argmax(labels, axis=-1)))
logger.info("Accuracy: %f", accuracy)
return accuracy
| apache-2.0 | 9,043,961,431,607,465,000 | 40.482759 | 111 | 0.670823 | false |
jeandet/meson | mesonbuild/scripts/vcstagger.py | 6 | 1557 | # Copyright 2015-2016 The Meson development team
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys, os, subprocess, re
def config_vcs_tag(infile, outfile, fallback, source_dir, replace_string, regex_selector, cmd):
try:
output = subprocess.check_output(cmd, cwd=source_dir)
new_string = re.search(regex_selector, output.decode()).group(1).strip()
except Exception:
new_string = fallback
with open(infile) as f:
new_data = f.read().replace(replace_string, new_string)
if os.path.exists(outfile):
with open(outfile) as f:
needs_update = (f.read() != new_data)
else:
needs_update = True
if needs_update:
with open(outfile, 'w') as f:
f.write(new_data)
def run(args):
infile, outfile, fallback, source_dir, replace_string, regex_selector = args[0:6]
command = args[6:]
config_vcs_tag(infile, outfile, fallback, source_dir, replace_string, regex_selector, command)
return 0
if __name__ == '__main__':
sys.exit(run(sys.argv[1:]))
| apache-2.0 | -6,822,994,307,111,835,000 | 36.071429 | 98 | 0.681439 | false |
Ruide/angr-dev | ana/ana/datalayer.py | 1 | 3029 | import os
import uuid as uuid_module
import weakref
import cPickle as pickle
try:
import pymongo
import bson
except ImportError:
# mongo dependency is optional
pymongo = None
import logging
l = logging.getLogger("ana.datalayer")
class DataLayer:
'''
The DataLayer handles storing and retrieving UUID-identified objects
to/from a central store.
'''
def __init__(self):
self.uuid_cache = weakref.WeakValueDictionary()
self._store_type = None
self.uuid = uuid_module.uuid4()
def store_state(self, uuid, s):
raise NotImplementedError()
def load_state(self, uuid):
raise NotImplementedError()
class SimpleDataLayer(DataLayer):
def __init__(self):
DataLayer.__init__(self)
self._store_type = 'simple'
def store_state(self, uuid, s):
return
def load_state(self, uuid):
raise ANAError("SimpleDataLayer does not support state loading.")
class DirDataLayer(DataLayer):
def __init__(self, pickle_dir):
DataLayer.__init__(self)
self._store_type = 'pickle'
self._dir = pickle_dir
if not os.path.exists(self._dir):
l.warning("Directory '%s' doesn't exit. Creating.", self._dir)
os.makedirs(self._dir)
def store_state(self, uuid, s):
with open(os.path.join(self._dir, str(uuid)+'.p'), 'wb') as f:
pickle.dump(s, f, protocol=pickle.HIGHEST_PROTOCOL)
def load_state(self, uuid):
with open(os.path.join(self._dir, str(uuid)+'.p'), 'rb') as f:
return pickle.load(f)
class MongoDataLayer(DataLayer):
def __init__(self, mongo_args, mongo_db='ana', mongo_collection='storage'):
DataLayer.__init__(self)
if pymongo is None:
raise ImportError("pymongo necessary for ANA mongo backend")
l.debug("Pickling into mongo.")
self._store_type = 'mongo'
self._mongo = pymongo.MongoClient(*mongo_args)[mongo_db][mongo_collection]
def store_state(self, uuid, s):
# TODO: investigate whether check/insert is faster than
# upsert (because of latency) and also deal with the race
# condition here
if self._mongo.find({'_id': uuid}).limit(1).count(with_limit_and_skip=True) == 0:
p = pickle.dumps(s, protocol=pickle.HIGHEST_PROTOCOL)
self._mongo.insert_one({'_id': uuid, 'pickled': bson.binary.Binary(p)})
def load_state(self, uuid):
p = self._mongo.find_one({'_id': uuid})['pickled']
return pickle.loads(p)
class DictDataLayer(DataLayer):
def __init__(self, the_dict=None):
DataLayer.__init__(self)
self._store_type = 'dict'
self._state_store = { } if the_dict is None else the_dict
def store_state(self, uuid, s):
p = pickle.dumps(s, protocol=pickle.HIGHEST_PROTOCOL)
self._state_store[uuid] = p
def load_state(self, uuid):
p = self._state_store[uuid]
return pickle.loads(p)
from .errors import ANAError
| bsd-2-clause | 7,960,456,664,408,711,000 | 29.59596 | 89 | 0.619676 | false |
marcelomiky/PythonCodes | Intro ML Semcomp/semcomp17_ml/utils/ml_utils.py | 1 | 2164 | """plot_utils.py
tb00083@surrey
"""
import matplotlib.pyplot as plt
import numpy as np
def extract_data(data, src_fields, target_fields):
assert type(src_fields) is list and type(target_fields) is list, "Error! both src_fields and target_fields must be a list"
for field in src_fields + target_fields:
if field not in data.keys():
assert 0, "Error! field {} not exist".format(field)
X = [data[field] for field in src_fields]
X = np.array(X,dtype = np.float32)
Z = [data[field] for field in target_fields]
Z = np.array(Z,dtype = np.float32)
return X, Z
def normalise_data(data, u = None, scale = None):
if u is None:
u = data.mean(axis=1, keepdims = True)
scale = data.max(axis=1,keepdims=True) - data.min(axis=1, keepdims=True) + np.finfo(np.float32).eps
out = (data - u)/scale
return out, u, scale
else:
out = (data-u)/scale
return out
def plot_loss(plt_axis, x,y, msg = None):
plt_axis.cla()
plt_axis.plot(x,y,'b')
plt_axis.set_xlabel('iterations')
plt_axis.set_ylabel('loss')
if msg:
plt_axis.set_title(msg)
def plot_scatter_and_line(plt_axis, X, Z, W, msg = None):
plt_axis.cla()
X_ = X[0]
Z_ = Z[0]
W_ = W[0]
plt_axis.plot(X_,Z_,'b.')
xmin = min(X_)
xmax = max(X_)
plt_axis.plot([xmin,xmax], [W_[0]*xmin + W_[1], W_[0]*xmax + W_[1]], 'r')
plt_axis.set_xlabel('x')
plt_axis.set_ylabel('y')
if msg:
plt_axis.set_title(msg)
def plot_scatter_with_label_2d(plt_axis, X, Z, W = None, msg = None):
plt_axis.cla()
X_ = X[:-1, :]
Z_ = Z[0]
idx = Z_ == 1
plt_axis.scatter(X_[0,idx], X_[1, idx], marker='o', color = 'b')
plt_axis.scatter(X_[0,np.invert(idx)], X_[1,np.invert(idx)], marker = 'x', color = 'r')
if W is not None: #plot decision boundary
W_ = W[0]
#line form w1*x1 + w2*x2 + w3 = 0
if W_[1]: #w2 != 0
# x2 = -(w1*x1+w3)/w2
x1 = np.array([X_[0].min(), X_[0].max()])
x2 = -(W_[0]*x1+W_[2])/W_[1]
else:
# x1 = -(w2*x2+w3)/w1
x2 = np.array([X_[1].min(), X_[1].max()])
x1 = -(W_[1]*x2 + W_[2])/W_[0]
plt_axis.plot(x1,x2, 'k')
plt_axis.legend(('1','0'))
if msg:
plt_axis.set_title(msg) | mit | 208,630,451,438,331,260 | 26.88 | 123 | 0.577634 | false |
DCSaunders/tensorflow | tensorflow/python/ops/nn_fused_batchnorm_test.py | 19 | 8762 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for fused_batch_norm related functionality in tensorflow.ops.nn."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import tensorflow as tf
class BatchNormalizationTest(tf.test.TestCase):
def _inference_ref(self, x, scale, offset, mean, var, epsilon, data_format):
if data_format not in ['NHWC', 'NCHW']:
raise ValueError('data_format must be NCHW or NHWC, '
'got %s.' % data_format)
if data_format == 'NCHW':
x = tf.transpose(x, [0, 2, 3, 1])
y = tf.nn.batch_normalization(x, mean, var, offset, scale, epsilon)
if data_format == 'NCHW':
y = tf.transpose(y, [0, 3, 1, 2])
return y.eval()
def _test_inference(self,
x_shape,
scale_shape,
use_gpu=True,
data_format='NHWC'):
np.random.seed(1)
x_val = np.random.random_sample(x_shape).astype(np.float32)
scale_val = np.random.random_sample(scale_shape).astype(np.float32)
offset_val = np.random.random_sample(scale_shape).astype(np.float32)
mean_val = np.random.random_sample(scale_shape).astype(np.float32)
var_val = np.random.random_sample(scale_shape).astype(np.float32)
with self.test_session(use_gpu=use_gpu) as sess:
x = tf.constant(x_val, name='x')
scale = tf.constant(scale_val, name='scale')
offset = tf.constant(offset_val, name='offset')
mean = tf.constant(mean_val, name='mean')
var = tf.constant(var_val, name='variance')
epsilon = 0.001
y, _, _ = tf.nn.fused_batch_norm(
x,
scale,
offset,
mean=mean,
variance=var,
epsilon=epsilon,
data_format=data_format,
is_training=False)
y_val = sess.run(y)
y_ref = self._inference_ref(x, scale, offset, mean, var, epsilon,
data_format)
self.assertAllClose(y_ref, y_val, atol=1e-3)
def _training_ref(self, x, scale, offset, epsilon, data_format):
if data_format not in ['NHWC', 'NCHW']:
raise ValueError('data_format must be NCHW or NHWC, '
'got %s.' % data_format)
if data_format == 'NCHW':
x = tf.transpose(x, [0, 2, 3, 1])
mean, var = tf.nn.moments(x, [0, 1, 2], keep_dims=False)
y = tf.nn.batch_normalization(x, mean, var, offset, scale, epsilon)
if data_format == 'NCHW':
y = tf.transpose(y, [0, 3, 1, 2])
return y.eval(), mean.eval(), var.eval()
def _test_training(self,
x_shape,
scale_shape,
use_gpu=True,
data_format='NHWC'):
np.random.seed(1)
x_val = np.random.random_sample(x_shape).astype(np.float32)
scale_val = np.random.random_sample(scale_shape).astype(np.float32)
offset_val = np.random.random_sample(scale_shape).astype(np.float32)
with self.test_session(use_gpu=use_gpu) as sess:
x = tf.constant(x_val, name='x')
scale = tf.constant(scale_val, name='scale')
offset = tf.constant(offset_val, name='offset')
epsilon = 0.001
y, mean, var = tf.nn.fused_batch_norm(
x,
scale,
offset,
epsilon=epsilon,
data_format=data_format,
is_training=True)
y_val, mean_val, var_val = sess.run([y, mean, var])
y_ref, mean_ref, var_ref = self._training_ref(x, scale, offset, epsilon,
data_format)
self.assertAllClose(y_ref, y_val, atol=1e-3)
self.assertAllClose(mean_ref, mean_val, atol=1e-3)
# This is for Bessel's correction. tf.nn.moments uses n, instead of n-1, as
# the denominator in the formula to calculate variance, while
# tf.nn.fused_batch_norm has Bessel's correction built in.
sample_size = x_val.size / scale_val.size
var_ref = var_ref * sample_size / (max(sample_size - 1.0, 1.0))
self.assertAllClose(var_ref, var_val, atol=1e-3)
def _test_gradient(self,
x_shape,
scale_shape,
use_gpu=True,
data_format='NHWC'):
np.random.seed(1)
x_val = np.random.random_sample(x_shape).astype(np.float32)
scale_val = np.random.random_sample(scale_shape).astype(np.float32)
offset_val = np.random.random_sample(scale_shape).astype(np.float32)
with self.test_session(use_gpu=use_gpu):
x = tf.constant(x_val, name='x')
scale = tf.constant(scale_val, name='scale')
offset = tf.constant(offset_val, name='offset')
y, _, _ = tf.nn.fused_batch_norm(
x, scale, offset, data_format=data_format)
err_x = tf.test.compute_gradient_error(x, x_shape, y, x_shape)
err_scale = tf.test.compute_gradient_error(scale, scale_shape, y, x_shape)
err_offset = tf.test.compute_gradient_error(offset, scale_shape, y,
x_shape)
err_tolerance = 1e-3
self.assertLess(err_x, err_tolerance)
self.assertLess(err_scale, err_tolerance)
self.assertLess(err_offset, err_tolerance)
def testInference(self):
x_shape = [1, 1, 6, 1]
if tf.test.is_gpu_available():
self._test_inference(x_shape, [1], use_gpu=True, data_format='NHWC')
self._test_inference(x_shape, [1], use_gpu=True, data_format='NCHW')
self._test_inference(x_shape, [1], use_gpu=False, data_format='NHWC')
x_shape = [1, 1, 6, 2]
if tf.test.is_gpu_available():
self._test_inference(x_shape, [2], use_gpu=True, data_format='NHWC')
self._test_inference(x_shape, [2], use_gpu=False, data_format='NHWC')
x_shape = [1, 2, 1, 6]
if tf.test.is_gpu_available():
self._test_inference(x_shape, [2], use_gpu=True, data_format='NCHW')
x_shape = [27, 131, 127, 6]
if tf.test.is_gpu_available():
self._test_inference(x_shape, [131], use_gpu=True, data_format='NCHW')
self._test_inference(x_shape, [6], use_gpu=True, data_format='NHWC')
self._test_inference(x_shape, [6], use_gpu=False, data_format='NHWC')
def testTraining(self):
x_shape = [1, 1, 6, 1]
if tf.test.is_gpu_available():
self._test_training(x_shape, [1], use_gpu=True, data_format='NHWC')
self._test_training(x_shape, [1], use_gpu=True, data_format='NCHW')
self._test_training(x_shape, [1], use_gpu=False, data_format='NHWC')
x_shape = [1, 1, 6, 2]
if tf.test.is_gpu_available():
self._test_training(x_shape, [2], use_gpu=True, data_format='NHWC')
self._test_training(x_shape, [2], use_gpu=False, data_format='NHWC')
x_shape = [1, 2, 1, 6]
if tf.test.is_gpu_available():
self._test_training(x_shape, [2], use_gpu=True, data_format='NCHW')
x_shape = [27, 131, 127, 6]
if tf.test.is_gpu_available():
self._test_training(x_shape, [131], use_gpu=True, data_format='NCHW')
self._test_training(x_shape, [6], use_gpu=True, data_format='NHWC')
self._test_training(x_shape, [6], use_gpu=False, data_format='NHWC')
def testBatchNormGrad(self):
x_shape = [1, 1, 6, 1]
if tf.test.is_gpu_available():
self._test_gradient(x_shape, [1], use_gpu=True, data_format='NHWC')
self._test_gradient(x_shape, [1], use_gpu=True, data_format='NCHW')
self._test_gradient(x_shape, [1], use_gpu=False, data_format='NHWC')
x_shape = [1, 1, 6, 2]
if tf.test.is_gpu_available():
self._test_gradient(x_shape, [2], use_gpu=True, data_format='NHWC')
self._test_gradient(x_shape, [2], use_gpu=False, data_format='NHWC')
x_shape = [1, 2, 1, 6]
if tf.test.is_gpu_available():
self._test_gradient(x_shape, [2], use_gpu=True, data_format='NCHW')
x_shape = [7, 9, 13, 6]
if tf.test.is_gpu_available():
self._test_gradient(x_shape, [9], use_gpu=True, data_format='NCHW')
self._test_gradient(x_shape, [6], use_gpu=True, data_format='NHWC')
self._test_gradient(x_shape, [6], use_gpu=False, data_format='NHWC')
if __name__ == '__main__':
tf.test.main()
| apache-2.0 | -1,131,715,948,737,484,500 | 41.125 | 80 | 0.601575 | false |
romaneckert/robot | archive/bionics_old/core.py | 1 | 7117 | #!/usr/bin/env python3
import logging
import time
import os
import urllib.parse
import urllib.request
import urllib.error
from slugify import slugify
from sys import platform
from SimpleWebSocketServer import SimpleWebSocketServer, WebSocket
import threading
import test
import subprocess
from http.server import SimpleHTTPRequestHandler, HTTPServer
from os import curdir, sep
class Controller:
def __init__(self):
Log.setup()
mary_tts_server = MaryTtsServer()
while 5.2 != mary_tts_server.get_version():
pass
self.speaker = Speaker()
self.speaker.start()
self.socketServer = SocketServer()
self.socketServer.start()
self.httpServer = SimpleHTTPServer()
self.httpServer.start()
self.last_time = time.time()
self.start_time = self.last_time
self.delta_time = 0
self.fps = 2
def activate(self):
while True:
current_time = time.time()
self.delta_time = current_time - self.last_time
self.last_time = current_time
self.update()
sleep_time = 1.0/self.fps - time.time() + current_time
if sleep_time > 0:
time.sleep(sleep_time)
def update(self):
print('must implement update function')
# start new process with a message to speak
def say(self, message):
Queues.message.put(message)
class Log:
log_directory = 'logs'
log_file = 'log.log'
@staticmethod
def latest():
messages = []
file_object = open(Log.log_directory + '/' + Log.log_file)
for line in file_object:
messages.append(line)
file_object.close()
return messages
@staticmethod
def info(message):
logging.info(message)
SocketServer.send(message)
@staticmethod
def error(message):
logging.error(message)
SocketServer.send(message)
@staticmethod
def setup():
os.makedirs(Log.log_directory, exist_ok=True)
logging.basicConfig(format='[%(asctime)s] [%(levelname)s] [%(message)s]',
level=logging.DEBUG,
filename=Log.log_directory + '/' + Log.log_file,
datefmt='%d-%m-%Y %H:%M:%S')
class MaryTtsServer (threading.Thread):
__version = 0
__pid = 0
def __init__(self):
threading.Thread.__init__(self)
self.start()
def run(self):
while True:
if self.get_version() != 5.2:
print(self.get_version())
command = "./vendor/marytts-5.2/bin/marytts-server"
self.__pid = subprocess.Popen(command, preexec_fn=os.setpgrp)
Log.info('Starting MaryTTS')
time.sleep(10)
time.sleep(1)
def get_version(self):
try:
urllib.request.urlopen('http://localhost:59125/version').read().decode('utf-8')
self.__version = 5.2
except urllib.error.URLError:
self.__version = 0
return self.__version
class Speaker (threading.Thread):
def __init__(self):
threading.Thread.__init__(self)
def run(self):
while True:
if not Queues.message.empty():
message = Queues.message.get()
Log.info(message)
error = 0
sound_directory = 'sounds'
os.makedirs(sound_directory, exist_ok=True)
file_path = sound_directory + '/' + slugify(message) + '.wav'
if not (os.path.exists(file_path)):
params = (('INPUT_TEXT', message),
('INPUT_TYPE', 'TEXT'),
('OUTPUT_TYPE', 'AUDIO'),
('AUDIO', 'WAVE_FILE'),
('LOCALE', 'de'),
('effect_Chorus_selected', 'on'),
('effect_Chorus_parameters',
'delay1:466;amp1:0.54;delay2:600;amp2:-0.10;delay3:250;amp3:0.30'))
url = 'http://localhost:59125/process?' + urllib.parse.urlencode(params)
try:
urllib.request.urlretrieve(url, file_path)
except:
errorMessage = 'Can not read: ' + url
error = 1
if not error:
Queues.command.put(message)
if 'darwin' == platform:
os.system('afplay ' + file_path + ' > /dev/null 2>&1')
else:
os.system('mplayer ' + file_path + ' > /dev/null 2>&1')
else:
Log.error('Mary TTS not running. ' + errorMessage)
else:
time.sleep(0.1)
class SocketServer (threading.Thread):
clients = []
def __init__(self):
threading.Thread.__init__(self)
self.server = SimpleWebSocketServer('', 8001, Socket)
@staticmethod
def send(message):
for client in SocketServer.clients:
client.sendMessage(message)
def run(self):
self.server.serveforever()
class Socket(WebSocket):
def handleMessage(self):
Queues.message.put(self.data)
def handleConnected(self):
Queues.message.put('Externes Gerät verbunden.')
SocketServer.clients.append(self)
for message in Log.latest():
SocketServer.send(message)
def handleClose(self):
print('close')
class HTTPRequestHandler(SimpleHTTPRequestHandler):
def do_GET(self):
if '/' == self.path:
self.path = "robot.html"
try:
mime_type = ''
if self.path.endswith('.html'):
mime_type = 'text/html'
elif self.path.endswith('.jpg'):
mime_type = 'image/jpg'
elif self.path.endswith('.min.js'):
mime_type = 'application/javascript'
elif self.path.endswith('.min.css'):
mime_type = 'text/css'
if '' != mime_type:
f = open(curdir + sep + 'public' + sep + self.path)
self.send_response(200)
self.send_header('Content-type', mime_type)
self.send_header('Cache-Control', 'no-cache, no-store, must-revalidate')
self.send_header('Pragma', 'no-cache')
self.send_header('Expires', '0')
self.end_headers()
self.wfile.write(f.read().encode('utf-8'))
f.close()
return
except IOError:
self.send_error(404, 'File Not Found: %s' % self.path)
return
class SimpleHTTPServer(threading.Thread):
def __init__(self):
threading.Thread.__init__(self)
self.server = HTTPServer(('', 8000), HTTPRequestHandler)
def run(self):
self.server.serve_forever()
class Queues:
message = test.Queue()
command = test.Queue()
| mit | -5,501,801,795,403,350,000 | 25.651685 | 98 | 0.526981 | false |
oinopion/django | tests/aggregation/tests.py | 14 | 43163 | from __future__ import unicode_literals
import datetime
import re
from decimal import Decimal
from django.core.exceptions import FieldError
from django.db import connection
from django.db.models import (
F, Aggregate, Avg, Count, DecimalField, DurationField, FloatField, Func,
IntegerField, Max, Min, Sum, Value,
)
from django.test import TestCase, ignore_warnings
from django.test.utils import Approximate, CaptureQueriesContext
from django.utils import six, timezone
from django.utils.deprecation import RemovedInDjango20Warning
from .models import Author, Book, Publisher, Store
class AggregateTestCase(TestCase):
@classmethod
def setUpTestData(cls):
cls.a1 = Author.objects.create(name='Adrian Holovaty', age=34)
cls.a2 = Author.objects.create(name='Jacob Kaplan-Moss', age=35)
cls.a3 = Author.objects.create(name='Brad Dayley', age=45)
cls.a4 = Author.objects.create(name='James Bennett', age=29)
cls.a5 = Author.objects.create(name='Jeffrey Forcier', age=37)
cls.a6 = Author.objects.create(name='Paul Bissex', age=29)
cls.a7 = Author.objects.create(name='Wesley J. Chun', age=25)
cls.a8 = Author.objects.create(name='Peter Norvig', age=57)
cls.a9 = Author.objects.create(name='Stuart Russell', age=46)
cls.a1.friends.add(cls.a2, cls.a4)
cls.a2.friends.add(cls.a1, cls.a7)
cls.a4.friends.add(cls.a1)
cls.a5.friends.add(cls.a6, cls.a7)
cls.a6.friends.add(cls.a5, cls.a7)
cls.a7.friends.add(cls.a2, cls.a5, cls.a6)
cls.a8.friends.add(cls.a9)
cls.a9.friends.add(cls.a8)
cls.p1 = Publisher.objects.create(name='Apress', num_awards=3, duration=datetime.timedelta(days=1))
cls.p2 = Publisher.objects.create(name='Sams', num_awards=1, duration=datetime.timedelta(days=2))
cls.p3 = Publisher.objects.create(name='Prentice Hall', num_awards=7)
cls.p4 = Publisher.objects.create(name='Morgan Kaufmann', num_awards=9)
cls.p5 = Publisher.objects.create(name="Jonno's House of Books", num_awards=0)
cls.b1 = Book.objects.create(
isbn='159059725', name='The Definitive Guide to Django: Web Development Done Right',
pages=447, rating=4.5, price=Decimal('30.00'), contact=cls.a1, publisher=cls.p1,
pubdate=datetime.date(2007, 12, 6)
)
cls.b2 = Book.objects.create(
isbn='067232959', name='Sams Teach Yourself Django in 24 Hours',
pages=528, rating=3.0, price=Decimal('23.09'), contact=cls.a3, publisher=cls.p2,
pubdate=datetime.date(2008, 3, 3)
)
cls.b3 = Book.objects.create(
isbn='159059996', name='Practical Django Projects',
pages=300, rating=4.0, price=Decimal('29.69'), contact=cls.a4, publisher=cls.p1,
pubdate=datetime.date(2008, 6, 23)
)
cls.b4 = Book.objects.create(
isbn='013235613', name='Python Web Development with Django',
pages=350, rating=4.0, price=Decimal('29.69'), contact=cls.a5, publisher=cls.p3,
pubdate=datetime.date(2008, 11, 3)
)
cls.b5 = Book.objects.create(
isbn='013790395', name='Artificial Intelligence: A Modern Approach',
pages=1132, rating=4.0, price=Decimal('82.80'), contact=cls.a8, publisher=cls.p3,
pubdate=datetime.date(1995, 1, 15)
)
cls.b6 = Book.objects.create(
isbn='155860191', name='Paradigms of Artificial Intelligence Programming: Case Studies in Common Lisp',
pages=946, rating=5.0, price=Decimal('75.00'), contact=cls.a8, publisher=cls.p4,
pubdate=datetime.date(1991, 10, 15)
)
cls.b1.authors.add(cls.a1, cls.a2)
cls.b2.authors.add(cls.a3)
cls.b3.authors.add(cls.a4)
cls.b4.authors.add(cls.a5, cls.a6, cls.a7)
cls.b5.authors.add(cls.a8, cls.a9)
cls.b6.authors.add(cls.a8)
s1 = Store.objects.create(
name='Amazon.com',
original_opening=datetime.datetime(1994, 4, 23, 9, 17, 42),
friday_night_closing=datetime.time(23, 59, 59)
)
s2 = Store.objects.create(
name='Books.com',
original_opening=datetime.datetime(2001, 3, 15, 11, 23, 37),
friday_night_closing=datetime.time(23, 59, 59)
)
s3 = Store.objects.create(
name="Mamma and Pappa's Books",
original_opening=datetime.datetime(1945, 4, 25, 16, 24, 14),
friday_night_closing=datetime.time(21, 30)
)
s1.books.add(cls.b1, cls.b2, cls.b3, cls.b4, cls.b5, cls.b6)
s2.books.add(cls.b1, cls.b3, cls.b5, cls.b6)
s3.books.add(cls.b3, cls.b4, cls.b6)
def test_empty_aggregate(self):
self.assertEqual(Author.objects.all().aggregate(), {})
def test_single_aggregate(self):
vals = Author.objects.aggregate(Avg("age"))
self.assertEqual(vals, {"age__avg": Approximate(37.4, places=1)})
def test_multiple_aggregates(self):
vals = Author.objects.aggregate(Sum("age"), Avg("age"))
self.assertEqual(vals, {"age__sum": 337, "age__avg": Approximate(37.4, places=1)})
def test_filter_aggregate(self):
vals = Author.objects.filter(age__gt=29).aggregate(Sum("age"))
self.assertEqual(len(vals), 1)
self.assertEqual(vals["age__sum"], 254)
def test_related_aggregate(self):
vals = Author.objects.aggregate(Avg("friends__age"))
self.assertEqual(len(vals), 1)
self.assertAlmostEqual(vals["friends__age__avg"], 34.07, places=2)
vals = Book.objects.filter(rating__lt=4.5).aggregate(Avg("authors__age"))
self.assertEqual(len(vals), 1)
self.assertAlmostEqual(vals["authors__age__avg"], 38.2857, places=2)
vals = Author.objects.all().filter(name__contains="a").aggregate(Avg("book__rating"))
self.assertEqual(len(vals), 1)
self.assertEqual(vals["book__rating__avg"], 4.0)
vals = Book.objects.aggregate(Sum("publisher__num_awards"))
self.assertEqual(len(vals), 1)
self.assertEqual(vals["publisher__num_awards__sum"], 30)
vals = Publisher.objects.aggregate(Sum("book__price"))
self.assertEqual(len(vals), 1)
self.assertEqual(vals["book__price__sum"], Decimal("270.27"))
def test_aggregate_multi_join(self):
vals = Store.objects.aggregate(Max("books__authors__age"))
self.assertEqual(len(vals), 1)
self.assertEqual(vals["books__authors__age__max"], 57)
vals = Author.objects.aggregate(Min("book__publisher__num_awards"))
self.assertEqual(len(vals), 1)
self.assertEqual(vals["book__publisher__num_awards__min"], 1)
def test_aggregate_alias(self):
vals = Store.objects.filter(name="Amazon.com").aggregate(amazon_mean=Avg("books__rating"))
self.assertEqual(len(vals), 1)
self.assertAlmostEqual(vals["amazon_mean"], 4.08, places=2)
def test_annotate_basic(self):
self.assertQuerysetEqual(
Book.objects.annotate().order_by('pk'), [
"The Definitive Guide to Django: Web Development Done Right",
"Sams Teach Yourself Django in 24 Hours",
"Practical Django Projects",
"Python Web Development with Django",
"Artificial Intelligence: A Modern Approach",
"Paradigms of Artificial Intelligence Programming: Case Studies in Common Lisp"
],
lambda b: b.name
)
books = Book.objects.annotate(mean_age=Avg("authors__age"))
b = books.get(pk=self.b1.pk)
self.assertEqual(
b.name,
'The Definitive Guide to Django: Web Development Done Right'
)
self.assertEqual(b.mean_age, 34.5)
def test_annotate_defer(self):
qs = Book.objects.annotate(
page_sum=Sum("pages")).defer('name').filter(pk=self.b1.pk)
rows = [
(1, "159059725", 447, "The Definitive Guide to Django: Web Development Done Right")
]
self.assertQuerysetEqual(
qs.order_by('pk'), rows,
lambda r: (r.id, r.isbn, r.page_sum, r.name)
)
def test_annotate_defer_select_related(self):
qs = Book.objects.select_related('contact').annotate(
page_sum=Sum("pages")).defer('name').filter(pk=self.b1.pk)
rows = [
(1, "159059725", 447, "Adrian Holovaty",
"The Definitive Guide to Django: Web Development Done Right")
]
self.assertQuerysetEqual(
qs.order_by('pk'), rows,
lambda r: (r.id, r.isbn, r.page_sum, r.contact.name, r.name)
)
def test_annotate_m2m(self):
books = Book.objects.filter(rating__lt=4.5).annotate(Avg("authors__age")).order_by("name")
self.assertQuerysetEqual(
books, [
('Artificial Intelligence: A Modern Approach', 51.5),
('Practical Django Projects', 29.0),
('Python Web Development with Django', Approximate(30.3, places=1)),
('Sams Teach Yourself Django in 24 Hours', 45.0)
],
lambda b: (b.name, b.authors__age__avg),
)
books = Book.objects.annotate(num_authors=Count("authors")).order_by("name")
self.assertQuerysetEqual(
books, [
('Artificial Intelligence: A Modern Approach', 2),
('Paradigms of Artificial Intelligence Programming: Case Studies in Common Lisp', 1),
('Practical Django Projects', 1),
('Python Web Development with Django', 3),
('Sams Teach Yourself Django in 24 Hours', 1),
('The Definitive Guide to Django: Web Development Done Right', 2)
],
lambda b: (b.name, b.num_authors)
)
def test_backwards_m2m_annotate(self):
authors = Author.objects.filter(name__contains="a").annotate(Avg("book__rating")).order_by("name")
self.assertQuerysetEqual(
authors, [
('Adrian Holovaty', 4.5),
('Brad Dayley', 3.0),
('Jacob Kaplan-Moss', 4.5),
('James Bennett', 4.0),
('Paul Bissex', 4.0),
('Stuart Russell', 4.0)
],
lambda a: (a.name, a.book__rating__avg)
)
authors = Author.objects.annotate(num_books=Count("book")).order_by("name")
self.assertQuerysetEqual(
authors, [
('Adrian Holovaty', 1),
('Brad Dayley', 1),
('Jacob Kaplan-Moss', 1),
('James Bennett', 1),
('Jeffrey Forcier', 1),
('Paul Bissex', 1),
('Peter Norvig', 2),
('Stuart Russell', 1),
('Wesley J. Chun', 1)
],
lambda a: (a.name, a.num_books)
)
def test_reverse_fkey_annotate(self):
books = Book.objects.annotate(Sum("publisher__num_awards")).order_by("name")
self.assertQuerysetEqual(
books, [
('Artificial Intelligence: A Modern Approach', 7),
('Paradigms of Artificial Intelligence Programming: Case Studies in Common Lisp', 9),
('Practical Django Projects', 3),
('Python Web Development with Django', 7),
('Sams Teach Yourself Django in 24 Hours', 1),
('The Definitive Guide to Django: Web Development Done Right', 3)
],
lambda b: (b.name, b.publisher__num_awards__sum)
)
publishers = Publisher.objects.annotate(Sum("book__price")).order_by("name")
self.assertQuerysetEqual(
publishers, [
('Apress', Decimal("59.69")),
("Jonno's House of Books", None),
('Morgan Kaufmann', Decimal("75.00")),
('Prentice Hall', Decimal("112.49")),
('Sams', Decimal("23.09"))
],
lambda p: (p.name, p.book__price__sum)
)
def test_annotate_values(self):
books = list(Book.objects.filter(pk=self.b1.pk).annotate(mean_age=Avg("authors__age")).values())
self.assertEqual(
books, [
{
"contact_id": 1,
"id": 1,
"isbn": "159059725",
"mean_age": 34.5,
"name": "The Definitive Guide to Django: Web Development Done Right",
"pages": 447,
"price": Approximate(Decimal("30")),
"pubdate": datetime.date(2007, 12, 6),
"publisher_id": 1,
"rating": 4.5,
}
]
)
books = Book.objects.filter(pk=self.b1.pk).annotate(mean_age=Avg('authors__age')).values('pk', 'isbn', 'mean_age')
self.assertEqual(
list(books), [
{
"pk": 1,
"isbn": "159059725",
"mean_age": 34.5,
}
]
)
books = Book.objects.filter(pk=self.b1.pk).annotate(mean_age=Avg("authors__age")).values("name")
self.assertEqual(
list(books), [
{
"name": "The Definitive Guide to Django: Web Development Done Right"
}
]
)
books = Book.objects.filter(pk=self.b1.pk).values().annotate(mean_age=Avg('authors__age'))
self.assertEqual(
list(books), [
{
"contact_id": 1,
"id": 1,
"isbn": "159059725",
"mean_age": 34.5,
"name": "The Definitive Guide to Django: Web Development Done Right",
"pages": 447,
"price": Approximate(Decimal("30")),
"pubdate": datetime.date(2007, 12, 6),
"publisher_id": 1,
"rating": 4.5,
}
]
)
books = Book.objects.values("rating").annotate(n_authors=Count("authors__id"), mean_age=Avg("authors__age")).order_by("rating")
self.assertEqual(
list(books), [
{
"rating": 3.0,
"n_authors": 1,
"mean_age": 45.0,
},
{
"rating": 4.0,
"n_authors": 6,
"mean_age": Approximate(37.16, places=1)
},
{
"rating": 4.5,
"n_authors": 2,
"mean_age": 34.5,
},
{
"rating": 5.0,
"n_authors": 1,
"mean_age": 57.0,
}
]
)
authors = Author.objects.annotate(Avg("friends__age")).order_by("name")
self.assertEqual(len(authors), 9)
self.assertQuerysetEqual(
authors, [
('Adrian Holovaty', 32.0),
('Brad Dayley', None),
('Jacob Kaplan-Moss', 29.5),
('James Bennett', 34.0),
('Jeffrey Forcier', 27.0),
('Paul Bissex', 31.0),
('Peter Norvig', 46.0),
('Stuart Russell', 57.0),
('Wesley J. Chun', Approximate(33.66, places=1))
],
lambda a: (a.name, a.friends__age__avg)
)
def test_count(self):
vals = Book.objects.aggregate(Count("rating"))
self.assertEqual(vals, {"rating__count": 6})
vals = Book.objects.aggregate(Count("rating", distinct=True))
self.assertEqual(vals, {"rating__count": 4})
def test_fkey_aggregate(self):
explicit = list(Author.objects.annotate(Count('book__id')))
implicit = list(Author.objects.annotate(Count('book')))
self.assertEqual(explicit, implicit)
def test_annotate_ordering(self):
books = Book.objects.values('rating').annotate(oldest=Max('authors__age')).order_by('oldest', 'rating')
self.assertEqual(
list(books), [
{
"rating": 4.5,
"oldest": 35,
},
{
"rating": 3.0,
"oldest": 45
},
{
"rating": 4.0,
"oldest": 57,
},
{
"rating": 5.0,
"oldest": 57,
}
]
)
books = Book.objects.values("rating").annotate(oldest=Max("authors__age")).order_by("-oldest", "-rating")
self.assertEqual(
list(books), [
{
"rating": 5.0,
"oldest": 57,
},
{
"rating": 4.0,
"oldest": 57,
},
{
"rating": 3.0,
"oldest": 45,
},
{
"rating": 4.5,
"oldest": 35,
}
]
)
def test_aggregate_annotation(self):
vals = Book.objects.annotate(num_authors=Count("authors__id")).aggregate(Avg("num_authors"))
self.assertEqual(vals, {"num_authors__avg": Approximate(1.66, places=1)})
def test_avg_duration_field(self):
self.assertEqual(
Publisher.objects.aggregate(Avg('duration', output_field=DurationField())),
{'duration__avg': datetime.timedelta(days=1, hours=12)}
)
def test_sum_duration_field(self):
self.assertEqual(
Publisher.objects.aggregate(Sum('duration', output_field=DurationField())),
{'duration__sum': datetime.timedelta(days=3)}
)
def test_sum_distinct_aggregate(self):
"""
Sum on a distict() QuerySet should aggregate only the distinct items.
"""
authors = Author.objects.filter(book__in=[5, 6])
self.assertEqual(authors.count(), 3)
distinct_authors = authors.distinct()
self.assertEqual(distinct_authors.count(), 2)
# Selected author ages are 57 and 46
age_sum = distinct_authors.aggregate(Sum('age'))
self.assertEqual(age_sum['age__sum'], 103)
def test_filtering(self):
p = Publisher.objects.create(name='Expensive Publisher', num_awards=0)
Book.objects.create(
name='ExpensiveBook1',
pages=1,
isbn='111',
rating=3.5,
price=Decimal("1000"),
publisher=p,
contact_id=1,
pubdate=datetime.date(2008, 12, 1)
)
Book.objects.create(
name='ExpensiveBook2',
pages=1,
isbn='222',
rating=4.0,
price=Decimal("1000"),
publisher=p,
contact_id=1,
pubdate=datetime.date(2008, 12, 2)
)
Book.objects.create(
name='ExpensiveBook3',
pages=1,
isbn='333',
rating=4.5,
price=Decimal("35"),
publisher=p,
contact_id=1,
pubdate=datetime.date(2008, 12, 3)
)
publishers = Publisher.objects.annotate(num_books=Count("book__id")).filter(num_books__gt=1).order_by("pk")
self.assertQuerysetEqual(
publishers, [
"Apress",
"Prentice Hall",
"Expensive Publisher",
],
lambda p: p.name,
)
publishers = Publisher.objects.filter(book__price__lt=Decimal("40.0")).order_by("pk")
self.assertQuerysetEqual(
publishers, [
"Apress",
"Apress",
"Sams",
"Prentice Hall",
"Expensive Publisher",
],
lambda p: p.name
)
publishers = Publisher.objects.annotate(num_books=Count("book__id")).filter(num_books__gt=1, book__price__lt=Decimal("40.0")).order_by("pk")
self.assertQuerysetEqual(
publishers, [
"Apress",
"Prentice Hall",
"Expensive Publisher",
],
lambda p: p.name,
)
publishers = Publisher.objects.filter(book__price__lt=Decimal("40.0")).annotate(num_books=Count("book__id")).filter(num_books__gt=1).order_by("pk")
self.assertQuerysetEqual(
publishers, [
"Apress",
],
lambda p: p.name
)
publishers = Publisher.objects.annotate(num_books=Count("book")).filter(num_books__range=[1, 3]).order_by("pk")
self.assertQuerysetEqual(
publishers, [
"Apress",
"Sams",
"Prentice Hall",
"Morgan Kaufmann",
"Expensive Publisher",
],
lambda p: p.name
)
publishers = Publisher.objects.annotate(num_books=Count("book")).filter(num_books__range=[1, 2]).order_by("pk")
self.assertQuerysetEqual(
publishers, [
"Apress",
"Sams",
"Prentice Hall",
"Morgan Kaufmann",
],
lambda p: p.name
)
publishers = Publisher.objects.annotate(num_books=Count("book")).filter(num_books__in=[1, 3]).order_by("pk")
self.assertQuerysetEqual(
publishers, [
"Sams",
"Morgan Kaufmann",
"Expensive Publisher",
],
lambda p: p.name,
)
publishers = Publisher.objects.annotate(num_books=Count("book")).filter(num_books__isnull=True)
self.assertEqual(len(publishers), 0)
def test_annotation(self):
vals = Author.objects.filter(pk=self.a1.pk).aggregate(Count("friends__id"))
self.assertEqual(vals, {"friends__id__count": 2})
books = Book.objects.annotate(num_authors=Count("authors__name")).filter(num_authors__exact=2).order_by("pk")
self.assertQuerysetEqual(
books, [
"The Definitive Guide to Django: Web Development Done Right",
"Artificial Intelligence: A Modern Approach",
],
lambda b: b.name
)
authors = Author.objects.annotate(num_friends=Count("friends__id", distinct=True)).filter(num_friends=0).order_by("pk")
self.assertQuerysetEqual(
authors, [
"Brad Dayley",
],
lambda a: a.name
)
publishers = Publisher.objects.annotate(num_books=Count("book__id")).filter(num_books__gt=1).order_by("pk")
self.assertQuerysetEqual(
publishers, [
"Apress",
"Prentice Hall",
],
lambda p: p.name
)
publishers = Publisher.objects.filter(book__price__lt=Decimal("40.0")).annotate(num_books=Count("book__id")).filter(num_books__gt=1)
self.assertQuerysetEqual(
publishers, [
"Apress",
],
lambda p: p.name
)
books = Book.objects.annotate(num_authors=Count("authors__id")).filter(authors__name__contains="Norvig", num_authors__gt=1)
self.assertQuerysetEqual(
books, [
"Artificial Intelligence: A Modern Approach",
],
lambda b: b.name
)
def test_more_aggregation(self):
a = Author.objects.get(name__contains='Norvig')
b = Book.objects.get(name__contains='Done Right')
b.authors.add(a)
b.save()
vals = Book.objects.annotate(num_authors=Count("authors__id")).filter(authors__name__contains="Norvig", num_authors__gt=1).aggregate(Avg("rating"))
self.assertEqual(vals, {"rating__avg": 4.25})
def test_even_more_aggregate(self):
publishers = Publisher.objects.annotate(
earliest_book=Min("book__pubdate"),
).exclude(earliest_book=None).order_by("earliest_book").values(
'earliest_book',
'num_awards',
'id',
'name',
)
self.assertEqual(
list(publishers), [
{
'earliest_book': datetime.date(1991, 10, 15),
'num_awards': 9,
'id': 4,
'name': 'Morgan Kaufmann'
},
{
'earliest_book': datetime.date(1995, 1, 15),
'num_awards': 7,
'id': 3,
'name': 'Prentice Hall'
},
{
'earliest_book': datetime.date(2007, 12, 6),
'num_awards': 3,
'id': 1,
'name': 'Apress'
},
{
'earliest_book': datetime.date(2008, 3, 3),
'num_awards': 1,
'id': 2,
'name': 'Sams'
}
]
)
vals = Store.objects.aggregate(Max("friday_night_closing"), Min("original_opening"))
self.assertEqual(
vals,
{
"friday_night_closing__max": datetime.time(23, 59, 59),
"original_opening__min": datetime.datetime(1945, 4, 25, 16, 24, 14),
}
)
def test_annotate_values_list(self):
books = Book.objects.filter(pk=self.b1.pk).annotate(mean_age=Avg("authors__age")).values_list("pk", "isbn", "mean_age")
self.assertEqual(
list(books), [
(1, "159059725", 34.5),
]
)
books = Book.objects.filter(pk=self.b1.pk).annotate(mean_age=Avg("authors__age")).values_list("isbn")
self.assertEqual(
list(books), [
('159059725',)
]
)
books = Book.objects.filter(pk=self.b1.pk).annotate(mean_age=Avg("authors__age")).values_list("mean_age")
self.assertEqual(
list(books), [
(34.5,)
]
)
books = Book.objects.filter(pk=self.b1.pk).annotate(mean_age=Avg("authors__age")).values_list("mean_age", flat=True)
self.assertEqual(list(books), [34.5])
books = Book.objects.values_list("price").annotate(count=Count("price")).order_by("-count", "price")
self.assertEqual(
list(books), [
(Decimal("29.69"), 2),
(Decimal('23.09'), 1),
(Decimal('30'), 1),
(Decimal('75'), 1),
(Decimal('82.8'), 1),
]
)
def test_dates_with_aggregation(self):
"""
Test that .dates() returns a distinct set of dates when applied to a
QuerySet with aggregation.
Refs #18056. Previously, .dates() would return distinct (date_kind,
aggregation) sets, in this case (year, num_authors), so 2008 would be
returned twice because there are books from 2008 with a different
number of authors.
"""
dates = Book.objects.annotate(num_authors=Count("authors")).dates('pubdate', 'year')
self.assertQuerysetEqual(
dates, [
"datetime.date(1991, 1, 1)",
"datetime.date(1995, 1, 1)",
"datetime.date(2007, 1, 1)",
"datetime.date(2008, 1, 1)"
]
)
def test_values_aggregation(self):
# Refs #20782
max_rating = Book.objects.values('rating').aggregate(max_rating=Max('rating'))
self.assertEqual(max_rating['max_rating'], 5)
max_books_per_rating = Book.objects.values('rating').annotate(
books_per_rating=Count('id')
).aggregate(Max('books_per_rating'))
self.assertEqual(
max_books_per_rating,
{'books_per_rating__max': 3})
def test_ticket17424(self):
"""
Check that doing exclude() on a foreign model after annotate()
doesn't crash.
"""
all_books = list(Book.objects.values_list('pk', flat=True).order_by('pk'))
annotated_books = Book.objects.order_by('pk').annotate(one=Count("id"))
# The value doesn't matter, we just need any negative
# constraint on a related model that's a noop.
excluded_books = annotated_books.exclude(publisher__name="__UNLIKELY_VALUE__")
# Try to generate query tree
str(excluded_books.query)
self.assertQuerysetEqual(excluded_books, all_books, lambda x: x.pk)
# Check internal state
self.assertIsNone(annotated_books.query.alias_map["aggregation_book"].join_type)
self.assertIsNone(excluded_books.query.alias_map["aggregation_book"].join_type)
def test_ticket12886(self):
"""
Check that aggregation over sliced queryset works correctly.
"""
qs = Book.objects.all().order_by('-rating')[0:3]
vals = qs.aggregate(average_top3_rating=Avg('rating'))['average_top3_rating']
self.assertAlmostEqual(vals, 4.5, places=2)
def test_ticket11881(self):
"""
Check that subqueries do not needlessly contain ORDER BY, SELECT FOR UPDATE
or select_related() stuff.
"""
qs = Book.objects.all().select_for_update().order_by(
'pk').select_related('publisher').annotate(max_pk=Max('pk'))
with CaptureQueriesContext(connection) as captured_queries:
qs.aggregate(avg_pk=Avg('max_pk'))
self.assertEqual(len(captured_queries), 1)
qstr = captured_queries[0]['sql'].lower()
self.assertNotIn('for update', qstr)
forced_ordering = connection.ops.force_no_ordering()
if forced_ordering:
# If the backend needs to force an ordering we make sure it's
# the only "ORDER BY" clause present in the query.
self.assertEqual(
re.findall(r'order by (\w+)', qstr),
[', '.join(f[1][0] for f in forced_ordering).lower()]
)
else:
self.assertNotIn('order by', qstr)
self.assertEqual(qstr.count(' join '), 0)
def test_decimal_max_digits_has_no_effect(self):
Book.objects.all().delete()
a1 = Author.objects.first()
p1 = Publisher.objects.first()
thedate = timezone.now()
for i in range(10):
Book.objects.create(
isbn="abcde{}".format(i), name="none", pages=10, rating=4.0,
price=9999.98, contact=a1, publisher=p1, pubdate=thedate)
book = Book.objects.aggregate(price_sum=Sum('price'))
self.assertEqual(book['price_sum'], Decimal("99999.80"))
def test_nonaggregate_aggregation_throws(self):
with six.assertRaisesRegex(self, TypeError, 'fail is not an aggregate expression'):
Book.objects.aggregate(fail=F('price'))
def test_nonfield_annotation(self):
book = Book.objects.annotate(val=Max(Value(2, output_field=IntegerField()))).first()
self.assertEqual(book.val, 2)
book = Book.objects.annotate(val=Max(Value(2), output_field=IntegerField())).first()
self.assertEqual(book.val, 2)
book = Book.objects.annotate(val=Max(2, output_field=IntegerField())).first()
self.assertEqual(book.val, 2)
def test_missing_output_field_raises_error(self):
with six.assertRaisesRegex(self, FieldError, 'Cannot resolve expression type, unknown output_field'):
Book.objects.annotate(val=Max(2)).first()
def test_annotation_expressions(self):
authors = Author.objects.annotate(combined_ages=Sum(F('age') + F('friends__age'))).order_by('name')
authors2 = Author.objects.annotate(combined_ages=Sum('age') + Sum('friends__age')).order_by('name')
for qs in (authors, authors2):
self.assertEqual(len(qs), 9)
self.assertQuerysetEqual(
qs, [
('Adrian Holovaty', 132),
('Brad Dayley', None),
('Jacob Kaplan-Moss', 129),
('James Bennett', 63),
('Jeffrey Forcier', 128),
('Paul Bissex', 120),
('Peter Norvig', 103),
('Stuart Russell', 103),
('Wesley J. Chun', 176)
],
lambda a: (a.name, a.combined_ages)
)
def test_aggregation_expressions(self):
a1 = Author.objects.aggregate(av_age=Sum('age') / Count('*'))
a2 = Author.objects.aggregate(av_age=Sum('age') / Count('age'))
a3 = Author.objects.aggregate(av_age=Avg('age'))
self.assertEqual(a1, {'av_age': 37})
self.assertEqual(a2, {'av_age': 37})
self.assertEqual(a3, {'av_age': Approximate(37.4, places=1)})
def test_avg_decimal_field(self):
v = Book.objects.filter(rating=4).aggregate(avg_price=(Avg('price')))['avg_price']
self.assertIsInstance(v, float)
self.assertEqual(v, Approximate(47.39, places=2))
def test_order_of_precedence(self):
p1 = Book.objects.filter(rating=4).aggregate(avg_price=(Avg('price') + 2) * 3)
self.assertEqual(p1, {'avg_price': Approximate(148.18, places=2)})
p2 = Book.objects.filter(rating=4).aggregate(avg_price=Avg('price') + 2 * 3)
self.assertEqual(p2, {'avg_price': Approximate(53.39, places=2)})
def test_combine_different_types(self):
with six.assertRaisesRegex(self, FieldError, 'Expression contains mixed types. You must set output_field'):
Book.objects.annotate(sums=Sum('rating') + Sum('pages') + Sum('price')).get(pk=self.b4.pk)
b1 = Book.objects.annotate(sums=Sum(F('rating') + F('pages') + F('price'),
output_field=IntegerField())).get(pk=self.b4.pk)
self.assertEqual(b1.sums, 383)
b2 = Book.objects.annotate(sums=Sum(F('rating') + F('pages') + F('price'),
output_field=FloatField())).get(pk=self.b4.pk)
self.assertEqual(b2.sums, 383.69)
b3 = Book.objects.annotate(sums=Sum(F('rating') + F('pages') + F('price'),
output_field=DecimalField())).get(pk=self.b4.pk)
self.assertEqual(b3.sums, Approximate(Decimal("383.69"), places=2))
def test_complex_aggregations_require_kwarg(self):
with six.assertRaisesRegex(self, TypeError, 'Complex annotations require an alias'):
Author.objects.annotate(Sum(F('age') + F('friends__age')))
with six.assertRaisesRegex(self, TypeError, 'Complex aggregates require an alias'):
Author.objects.aggregate(Sum('age') / Count('age'))
with six.assertRaisesRegex(self, TypeError, 'Complex aggregates require an alias'):
Author.objects.aggregate(Sum(1))
def test_aggregate_over_complex_annotation(self):
qs = Author.objects.annotate(
combined_ages=Sum(F('age') + F('friends__age')))
age = qs.aggregate(max_combined_age=Max('combined_ages'))
self.assertEqual(age['max_combined_age'], 176)
age = qs.aggregate(max_combined_age_doubled=Max('combined_ages') * 2)
self.assertEqual(age['max_combined_age_doubled'], 176 * 2)
age = qs.aggregate(
max_combined_age_doubled=Max('combined_ages') + Max('combined_ages'))
self.assertEqual(age['max_combined_age_doubled'], 176 * 2)
age = qs.aggregate(
max_combined_age_doubled=Max('combined_ages') + Max('combined_ages'),
sum_combined_age=Sum('combined_ages'))
self.assertEqual(age['max_combined_age_doubled'], 176 * 2)
self.assertEqual(age['sum_combined_age'], 954)
age = qs.aggregate(
max_combined_age_doubled=Max('combined_ages') + Max('combined_ages'),
sum_combined_age_doubled=Sum('combined_ages') + Sum('combined_ages'))
self.assertEqual(age['max_combined_age_doubled'], 176 * 2)
self.assertEqual(age['sum_combined_age_doubled'], 954 * 2)
def test_values_annotation_with_expression(self):
# ensure the F() is promoted to the group by clause
qs = Author.objects.values('name').annotate(another_age=Sum('age') + F('age'))
a = qs.get(name="Adrian Holovaty")
self.assertEqual(a['another_age'], 68)
qs = qs.annotate(friend_count=Count('friends'))
a = qs.get(name="Adrian Holovaty")
self.assertEqual(a['friend_count'], 2)
qs = qs.annotate(combined_age=Sum('age') + F('friends__age')).filter(
name="Adrian Holovaty").order_by('-combined_age')
self.assertEqual(
list(qs), [
{
"name": 'Adrian Holovaty',
"another_age": 68,
"friend_count": 1,
"combined_age": 69
},
{
"name": 'Adrian Holovaty',
"another_age": 68,
"friend_count": 1,
"combined_age": 63
}
]
)
vals = qs.values('name', 'combined_age')
self.assertEqual(
list(vals), [
{
"name": 'Adrian Holovaty',
"combined_age": 69
},
{
"name": 'Adrian Holovaty',
"combined_age": 63
}
]
)
def test_annotate_values_aggregate(self):
alias_age = Author.objects.annotate(
age_alias=F('age')
).values(
'age_alias',
).aggregate(sum_age=Sum('age_alias'))
age = Author.objects.values('age').aggregate(sum_age=Sum('age'))
self.assertEqual(alias_age['sum_age'], age['sum_age'])
def test_annotate_over_annotate(self):
author = Author.objects.annotate(
age_alias=F('age')
).annotate(
sum_age=Sum('age_alias')
).get(name="Adrian Holovaty")
other_author = Author.objects.annotate(
sum_age=Sum('age')
).get(name="Adrian Holovaty")
self.assertEqual(author.sum_age, other_author.sum_age)
def test_annotated_aggregate_over_annotated_aggregate(self):
with six.assertRaisesRegex(self, FieldError, "Cannot compute Sum\('id__max'\): 'id__max' is an aggregate"):
Book.objects.annotate(Max('id')).annotate(Sum('id__max'))
def test_add_implementation(self):
class MySum(Sum):
pass
# test completely changing how the output is rendered
def lower_case_function_override(self, compiler, connection):
sql, params = compiler.compile(self.source_expressions[0])
substitutions = dict(function=self.function.lower(), expressions=sql)
substitutions.update(self.extra)
return self.template % substitutions, params
setattr(MySum, 'as_' + connection.vendor, lower_case_function_override)
qs = Book.objects.annotate(
sums=MySum(F('rating') + F('pages') + F('price'), output_field=IntegerField())
)
self.assertEqual(str(qs.query).count('sum('), 1)
b1 = qs.get(pk=self.b4.pk)
self.assertEqual(b1.sums, 383)
# test changing the dict and delegating
def lower_case_function_super(self, compiler, connection):
self.extra['function'] = self.function.lower()
return super(MySum, self).as_sql(compiler, connection)
setattr(MySum, 'as_' + connection.vendor, lower_case_function_super)
qs = Book.objects.annotate(
sums=MySum(F('rating') + F('pages') + F('price'), output_field=IntegerField())
)
self.assertEqual(str(qs.query).count('sum('), 1)
b1 = qs.get(pk=self.b4.pk)
self.assertEqual(b1.sums, 383)
# test overriding all parts of the template
def be_evil(self, compiler, connection):
substitutions = dict(function='MAX', expressions='2')
substitutions.update(self.extra)
return self.template % substitutions, ()
setattr(MySum, 'as_' + connection.vendor, be_evil)
qs = Book.objects.annotate(
sums=MySum(F('rating') + F('pages') + F('price'), output_field=IntegerField())
)
self.assertEqual(str(qs.query).count('MAX('), 1)
b1 = qs.get(pk=self.b4.pk)
self.assertEqual(b1.sums, 2)
def test_complex_values_aggregation(self):
max_rating = Book.objects.values('rating').aggregate(
double_max_rating=Max('rating') + Max('rating'))
self.assertEqual(max_rating['double_max_rating'], 5 * 2)
max_books_per_rating = Book.objects.values('rating').annotate(
books_per_rating=Count('id') + 5
).aggregate(Max('books_per_rating'))
self.assertEqual(
max_books_per_rating,
{'books_per_rating__max': 3 + 5})
def test_expression_on_aggregation(self):
# Create a plain expression
class Greatest(Func):
function = 'GREATEST'
def as_sqlite(self, compiler, connection):
return super(Greatest, self).as_sql(compiler, connection, function='MAX')
qs = Publisher.objects.annotate(
price_or_median=Greatest(Avg('book__rating'), Avg('book__price'))
).filter(price_or_median__gte=F('num_awards')).order_by('num_awards')
self.assertQuerysetEqual(
qs, [1, 3, 7, 9], lambda v: v.num_awards)
qs2 = Publisher.objects.annotate(
rating_or_num_awards=Greatest(Avg('book__rating'), F('num_awards'),
output_field=FloatField())
).filter(rating_or_num_awards__gt=F('num_awards')).order_by('num_awards')
self.assertQuerysetEqual(
qs2, [1, 3], lambda v: v.num_awards)
@ignore_warnings(category=RemovedInDjango20Warning)
def test_backwards_compatibility(self):
from django.db.models.sql import aggregates as sql_aggregates
class SqlNewSum(sql_aggregates.Aggregate):
sql_function = 'SUM'
class NewSum(Aggregate):
name = 'Sum'
def add_to_query(self, query, alias, col, source, is_summary):
klass = SqlNewSum
aggregate = klass(
col, source=source, is_summary=is_summary, **self.extra)
query.annotations[alias] = aggregate
qs = Author.objects.values('name').annotate(another_age=NewSum('age') + F('age'))
a = qs.get(name="Adrian Holovaty")
self.assertEqual(a['another_age'], 68)
| bsd-3-clause | 3,155,896,192,590,990,300 | 38.635445 | 155 | 0.539189 | false |
AndroidOpenDevelopment/android_external_chromium_org | tools/telemetry/telemetry/value/merge_values.py | 36 | 4295 | # Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
def MergeLikeValuesFromSamePage(all_values):
"""Merges values that measure the same thing on the same page.
A page may end up being measured multiple times, meaning that we may end up
with something like this:
ScalarValue(page1, 'x', 1)
ScalarValue(page2, 'x', 4)
ScalarValue(page1, 'x', 2)
ScalarValue(page2, 'x', 5)
This function will produce:
ListOfScalarValues(page1, 'x', [1, 2])
ListOfScalarValues(page2, 'x', [4, 5])
The workhorse of this code is Value.MergeLikeValuesFromSamePage.
This requires (but assumes) that the values passed in with the same grouping
key pass the Value.IsMergableWith test. If this is not obeyed, the
results will be undefined.
"""
return _MergeLikeValuesCommon(
all_values,
lambda x: (x.page, x.name),
lambda v0, merge_group: v0.MergeLikeValuesFromSamePage(merge_group))
def MergeLikeValuesFromDifferentPages(all_values, group_by_name_suffix=False):
"""Merges values that measure the same thing on different pages.
After using MergeLikeValuesFromSamePage, one still ends up with values from
different pages:
ScalarValue(page1, 'x', 1)
ScalarValue(page1, 'y', 30)
ScalarValue(page2, 'x', 2)
ScalarValue(page2, 'y', 40)
This function will group the values of the same value_name together:
ListOfScalarValues(None, 'x', [1, 2])
ListOfScalarValues(None, 'y', [30, 40])
If group_by_name_suffix is True, then x.z and y.z are considered to be the
same value and are grouped together. If false, then x.z and y.z are
considered different.
The workhorse of this code is Value.MergeLikeValuesFromDifferentPages.
Not all values that go into this function will come out: not every value can
be merged across pages. Values whose MergeLikeValuesFromDifferentPages returns
None will be omitted from the results.
This requires (but assumes) that the values passed in with the same name pass
the Value.IsMergableWith test. If this is not obeyed, the results
will be undefined.
"""
if group_by_name_suffix:
def key(value):
return value.name_suffix
else:
key = lambda x: x.name
return _MergeLikeValuesCommon(
all_values,
key,
lambda v0, merge_group: v0.MergeLikeValuesFromDifferentPages(
merge_group, group_by_name_suffix=group_by_name_suffix))
def _MergeLikeValuesCommon(all_values, key_func, merge_func):
"""Groups all_values by key_func then applies merge_func to the groups.
This takes the all_values list and groups each item in that using the key
provided by key_func. This produces groups of values with like keys. Thes are
then handed to the merge_func to produce a new key. If merge_func produces a
non-None return, it is added to the list of returned values.
"""
# When merging, we want to merge values in a consistent order, e.g. so that
# Scalar(1), Scalar(2) predictably produces ListOfScalarValues([1,2]) rather
# than 2,1.
#
# To do this, the values are sorted by key up front. Then, grouping is
# performed using a dictionary, but as new groups are found, the order in
# which they were found is also noted.
#
# Merging is then performed on groups in group-creation-order. This ensures
# that the returned array is in a stable order, group by group.
#
# Within a group, the order is stable because of the original sort.
all_values = list(all_values)
merge_groups = GroupStably(all_values, key_func)
res = []
for merge_group in merge_groups:
v0 = merge_group[0]
vM = merge_func(v0, merge_group)
if vM:
res.append(vM)
return res
def GroupStably(all_values, key_func):
"""Groups an array by key_func, with the groups returned in a stable order.
Returns a list of groups.
"""
all_values = list(all_values)
merge_groups = {}
merge_groups_in_creation_order = []
for value in all_values:
key = key_func(value)
if key not in merge_groups:
merge_groups[key] = []
merge_groups_in_creation_order.append(merge_groups[key])
merge_groups[key].append(value)
return merge_groups_in_creation_order
| bsd-3-clause | -3,833,790,354,794,113,000 | 36.025862 | 80 | 0.70943 | false |
lucc/alot | alot/utils/argparse.py | 4 | 4700 | # encoding=utf-8
# Copyright (C) 2011-2012 Patrick Totzke <[email protected]>
# Copyright © 2017 Dylan Baker
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Custom extensions of the argparse module."""
import argparse
import collections
import functools
import itertools
import os
import stat
_TRUEISH = ['true', 'yes', 'on', '1', 't', 'y']
_FALSISH = ['false', 'no', 'off', '0', 'f', 'n']
class ValidationFailed(Exception):
"""Exception raised when Validation fails in a ValidatedStoreAction."""
pass
def _boolean(string):
string = string.lower()
if string in _FALSISH:
return False
elif string in _TRUEISH:
return True
else:
raise ValueError('Option must be one of: {}'.format(
', '.join(itertools.chain(iter(_TRUEISH), iter(_FALSISH)))))
def _path_factory(check):
"""Create a function that checks paths."""
@functools.wraps(check)
def validator(paths):
if isinstance(paths, str):
check(paths)
elif isinstance(paths, collections.Sequence):
for path in paths:
check(path)
else:
raise Exception('expected either basestr or sequenc of basstr')
return validator
@_path_factory
def require_file(path):
"""Validator that asserts that a file exists.
This fails if there is nothing at the given path.
"""
if not os.path.isfile(path):
raise ValidationFailed('{} is not a valid file.'.format(path))
@_path_factory
def optional_file_like(path):
"""Validator that ensures that if a file exists it regular, a fifo, or a
character device. The file is not required to exist.
This includes character special devices like /dev/null.
"""
if (os.path.exists(path) and not (os.path.isfile(path) or
stat.S_ISFIFO(os.stat(path).st_mode) or
stat.S_ISCHR(os.stat(path).st_mode))):
raise ValidationFailed(
'{} is not a valid file, character device, or fifo.'.format(path))
@_path_factory
def require_dir(path):
"""Validator that asserts that a directory exists.
This fails if there is nothing at the given path.
"""
if not os.path.isdir(path):
raise ValidationFailed('{} is not a valid directory.'.format(path))
def is_int_or_pm(value):
"""Validator to assert that value is '+', '-', or an integer"""
if value not in ['+', '-']:
try:
value = int(value)
except ValueError:
raise ValidationFailed('value must be an integer or "+" or "-".')
return value
class BooleanAction(argparse.Action):
"""Argparse action that can be used to store boolean values."""
def __init__(self, *args, **kwargs):
kwargs['type'] = _boolean
kwargs['metavar'] = 'BOOL'
argparse.Action.__init__(self, *args, **kwargs)
def __call__(self, parser, namespace, values, option_string=None):
setattr(namespace, self.dest, values)
class ValidatedStoreAction(argparse.Action):
"""An action that allows a validation function to be specificied.
The validator keyword must be a function taking exactly one argument, that
argument is a list of strings or the type specified by the type argument.
It must raise ValidationFailed with a message when validation fails.
"""
def __init__(self, option_strings, dest=None, nargs=None, default=None,
required=False, type=None, metavar=None, help=None,
validator=None):
super(ValidatedStoreAction, self).__init__(
option_strings=option_strings, dest=dest, nargs=nargs,
default=default, required=required, metavar=metavar, type=type,
help=help)
self.validator = validator
def __call__(self, parser, namespace, values, option_string=None):
if self.validator:
try:
self.validator(values)
except ValidationFailed as e:
raise argparse.ArgumentError(self, str(e))
setattr(namespace, self.dest, values)
| gpl-3.0 | -709,833,601,897,656,400 | 31.631944 | 78 | 0.645244 | false |
batxes/4Cin | SHH_WT_models/SHH_WT_models_final_output_0.1_-0.1_11000/SHH_WT_models49537.py | 4 | 17569 | import _surface
import chimera
try:
import chimera.runCommand
except:
pass
from VolumePath import markerset as ms
try:
from VolumePath import Marker_Set, Link
new_marker_set=Marker_Set
except:
from VolumePath import volume_path_dialog
d= volume_path_dialog(True)
new_marker_set= d.new_marker_set
marker_sets={}
surf_sets={}
if "particle_0 geometry" not in marker_sets:
s=new_marker_set('particle_0 geometry')
marker_sets["particle_0 geometry"]=s
s= marker_sets["particle_0 geometry"]
mark=s.place_marker((3032.28, -1721.8, 6078.56), (0.7, 0.7, 0.7), 890.203)
if "particle_1 geometry" not in marker_sets:
s=new_marker_set('particle_1 geometry')
marker_sets["particle_1 geometry"]=s
s= marker_sets["particle_1 geometry"]
mark=s.place_marker((4315.25, -522.175, 6397.99), (0.7, 0.7, 0.7), 792.956)
if "particle_2 geometry" not in marker_sets:
s=new_marker_set('particle_2 geometry')
marker_sets["particle_2 geometry"]=s
s= marker_sets["particle_2 geometry"]
mark=s.place_marker((3771.07, 1154.7, 7222.84), (0.7, 0.7, 0.7), 856.786)
if "particle_3 geometry" not in marker_sets:
s=new_marker_set('particle_3 geometry')
marker_sets["particle_3 geometry"]=s
s= marker_sets["particle_3 geometry"]
mark=s.place_marker((2815.52, -687.847, 8295.85), (0.7, 0.7, 0.7), 963.679)
if "particle_4 geometry" not in marker_sets:
s=new_marker_set('particle_4 geometry')
marker_sets["particle_4 geometry"]=s
s= marker_sets["particle_4 geometry"]
mark=s.place_marker((1738.03, 229.225, 9362.85), (0.7, 0.7, 0.7), 761.442)
if "particle_5 geometry" not in marker_sets:
s=new_marker_set('particle_5 geometry')
marker_sets["particle_5 geometry"]=s
s= marker_sets["particle_5 geometry"]
mark=s.place_marker((1723.3, 2559.05, 8699.62), (0.7, 0.7, 0.7), 961.183)
if "particle_6 geometry" not in marker_sets:
s=new_marker_set('particle_6 geometry')
marker_sets["particle_6 geometry"]=s
s= marker_sets["particle_6 geometry"]
mark=s.place_marker((1315.5, 3968.84, 7784.43), (0.7, 0.7, 0.7), 753.151)
if "particle_7 geometry" not in marker_sets:
s=new_marker_set('particle_7 geometry')
marker_sets["particle_7 geometry"]=s
s= marker_sets["particle_7 geometry"]
mark=s.place_marker((865.716, 3327.72, 8152.58), (1, 0.7, 0), 1098.07)
if "particle_8 geometry" not in marker_sets:
s=new_marker_set('particle_8 geometry')
marker_sets["particle_8 geometry"]=s
s= marker_sets["particle_8 geometry"]
mark=s.place_marker((1577.73, 5314.52, 6556.98), (0.7, 0.7, 0.7), 1010.42)
if "particle_9 geometry" not in marker_sets:
s=new_marker_set('particle_9 geometry')
marker_sets["particle_9 geometry"]=s
s= marker_sets["particle_9 geometry"]
mark=s.place_marker((450.282, 5725.68, 5373.33), (1, 0.7, 0), 821.043)
if "particle_10 geometry" not in marker_sets:
s=new_marker_set('particle_10 geometry')
marker_sets["particle_10 geometry"]=s
s= marker_sets["particle_10 geometry"]
mark=s.place_marker((1697.58, 6949.97, 4626.94), (0.7, 0.7, 0.7), 873.876)
if "particle_11 geometry" not in marker_sets:
s=new_marker_set('particle_11 geometry')
marker_sets["particle_11 geometry"]=s
s= marker_sets["particle_11 geometry"]
mark=s.place_marker((2546.93, 6121.81, 4181.92), (0.7, 0.7, 0.7), 625.532)
if "particle_12 geometry" not in marker_sets:
s=new_marker_set('particle_12 geometry')
marker_sets["particle_12 geometry"]=s
s= marker_sets["particle_12 geometry"]
mark=s.place_marker((3726.71, 5658.99, 3265.37), (0.7, 0.7, 0.7), 880.474)
if "particle_13 geometry" not in marker_sets:
s=new_marker_set('particle_13 geometry')
marker_sets["particle_13 geometry"]=s
s= marker_sets["particle_13 geometry"]
mark=s.place_marker((3029.75, 4278.18, 3164.6), (0.7, 0.7, 0.7), 659.161)
if "particle_14 geometry" not in marker_sets:
s=new_marker_set('particle_14 geometry')
marker_sets["particle_14 geometry"]=s
s= marker_sets["particle_14 geometry"]
mark=s.place_marker((3671.69, 3420.87, 1176.42), (0.7, 0.7, 0.7), 831.745)
if "particle_15 geometry" not in marker_sets:
s=new_marker_set('particle_15 geometry')
marker_sets["particle_15 geometry"]=s
s= marker_sets["particle_15 geometry"]
mark=s.place_marker((6418.48, 2278.04, 283.293), (0.7, 0.7, 0.7), 803.065)
if "particle_16 geometry" not in marker_sets:
s=new_marker_set('particle_16 geometry')
marker_sets["particle_16 geometry"]=s
s= marker_sets["particle_16 geometry"]
mark=s.place_marker((7411.74, 2489.7, 1954.59), (0.7, 0.7, 0.7), 610.262)
if "particle_17 geometry" not in marker_sets:
s=new_marker_set('particle_17 geometry')
marker_sets["particle_17 geometry"]=s
s= marker_sets["particle_17 geometry"]
mark=s.place_marker((7249.42, 3797.84, 1514.44), (0.7, 0.7, 0.7), 741.265)
if "particle_18 geometry" not in marker_sets:
s=new_marker_set('particle_18 geometry')
marker_sets["particle_18 geometry"]=s
s= marker_sets["particle_18 geometry"]
mark=s.place_marker((5931.09, 4703.3, 2032.25), (0.7, 0.7, 0.7), 748.625)
if "particle_19 geometry" not in marker_sets:
s=new_marker_set('particle_19 geometry')
marker_sets["particle_19 geometry"]=s
s= marker_sets["particle_19 geometry"]
mark=s.place_marker((5100.92, 5765.12, 1382.83), (0.7, 0.7, 0.7), 677.181)
if "particle_20 geometry" not in marker_sets:
s=new_marker_set('particle_20 geometry')
marker_sets["particle_20 geometry"]=s
s= marker_sets["particle_20 geometry"]
mark=s.place_marker((4396.96, 6094, 3676.8), (0.7, 0.7, 0.7), 616.015)
if "particle_21 geometry" not in marker_sets:
s=new_marker_set('particle_21 geometry')
marker_sets["particle_21 geometry"]=s
s= marker_sets["particle_21 geometry"]
mark=s.place_marker((5593.44, 5673.33, 2070.09), (0.7, 0.7, 0.7), 653.154)
if "particle_22 geometry" not in marker_sets:
s=new_marker_set('particle_22 geometry')
marker_sets["particle_22 geometry"]=s
s= marker_sets["particle_22 geometry"]
mark=s.place_marker((6397.21, 5492.05, 2696.45), (0.7, 0.7, 0.7), 595.33)
if "particle_23 geometry" not in marker_sets:
s=new_marker_set('particle_23 geometry')
marker_sets["particle_23 geometry"]=s
s= marker_sets["particle_23 geometry"]
mark=s.place_marker((7463.57, 5059.13, 2268), (0.7, 0.7, 0.7), 627.901)
if "particle_24 geometry" not in marker_sets:
s=new_marker_set('particle_24 geometry')
marker_sets["particle_24 geometry"]=s
s= marker_sets["particle_24 geometry"]
mark=s.place_marker((7476.45, 3677.71, 2223.7), (0.7, 0.7, 0.7), 663.941)
if "particle_25 geometry" not in marker_sets:
s=new_marker_set('particle_25 geometry')
marker_sets["particle_25 geometry"]=s
s= marker_sets["particle_25 geometry"]
mark=s.place_marker((7842.64, 2500.94, 1304.12), (0.7, 0.7, 0.7), 663.899)
if "particle_26 geometry" not in marker_sets:
s=new_marker_set('particle_26 geometry')
marker_sets["particle_26 geometry"]=s
s= marker_sets["particle_26 geometry"]
mark=s.place_marker((6863.85, 3504.17, 1816.39), (0.7, 0.7, 0.7), 644.694)
if "particle_27 geometry" not in marker_sets:
s=new_marker_set('particle_27 geometry')
marker_sets["particle_27 geometry"]=s
s= marker_sets["particle_27 geometry"]
mark=s.place_marker((5168.45, 3861.44, 3137.56), (0.7, 0.7, 0.7), 896.802)
if "particle_28 geometry" not in marker_sets:
s=new_marker_set('particle_28 geometry')
marker_sets["particle_28 geometry"]=s
s= marker_sets["particle_28 geometry"]
mark=s.place_marker((5509.37, 5327.31, 3135.47), (0.7, 0.7, 0.7), 576.38)
if "particle_29 geometry" not in marker_sets:
s=new_marker_set('particle_29 geometry')
marker_sets["particle_29 geometry"]=s
s= marker_sets["particle_29 geometry"]
mark=s.place_marker((4881.57, 6396.57, 3562.6), (0.7, 0.7, 0.7), 635.092)
if "particle_30 geometry" not in marker_sets:
s=new_marker_set('particle_30 geometry')
marker_sets["particle_30 geometry"]=s
s= marker_sets["particle_30 geometry"]
mark=s.place_marker((5656.19, 6426.64, 4102.84), (0.7, 0.7, 0.7), 651.505)
if "particle_31 geometry" not in marker_sets:
s=new_marker_set('particle_31 geometry')
marker_sets["particle_31 geometry"]=s
s= marker_sets["particle_31 geometry"]
mark=s.place_marker((3916.95, 6296.49, 4062.5), (0.7, 0.7, 0.7), 718.042)
if "particle_32 geometry" not in marker_sets:
s=new_marker_set('particle_32 geometry')
marker_sets["particle_32 geometry"]=s
s= marker_sets["particle_32 geometry"]
mark=s.place_marker((5006.96, 7250.86, 3049.92), (0.7, 0.7, 0.7), 726.714)
if "particle_33 geometry" not in marker_sets:
s=new_marker_set('particle_33 geometry')
marker_sets["particle_33 geometry"]=s
s= marker_sets["particle_33 geometry"]
mark=s.place_marker((6302.02, 6567.49, 3328.32), (0.7, 0.7, 0.7), 673.585)
if "particle_34 geometry" not in marker_sets:
s=new_marker_set('particle_34 geometry')
marker_sets["particle_34 geometry"]=s
s= marker_sets["particle_34 geometry"]
mark=s.place_marker((6520.98, 5738.81, 2349.47), (0.7, 0.7, 0.7), 598.418)
if "particle_35 geometry" not in marker_sets:
s=new_marker_set('particle_35 geometry')
marker_sets["particle_35 geometry"]=s
s= marker_sets["particle_35 geometry"]
mark=s.place_marker((6911.65, 5539.4, 1041.36), (0.7, 0.7, 0.7), 693.382)
if "particle_36 geometry" not in marker_sets:
s=new_marker_set('particle_36 geometry')
marker_sets["particle_36 geometry"]=s
s= marker_sets["particle_36 geometry"]
mark=s.place_marker((5484.55, 5957.73, 3014.5), (0.7, 0.7, 0.7), 804.038)
if "particle_37 geometry" not in marker_sets:
s=new_marker_set('particle_37 geometry')
marker_sets["particle_37 geometry"]=s
s= marker_sets["particle_37 geometry"]
mark=s.place_marker((7086.4, 6156.56, 2099.03), (0.7, 0.7, 0.7), 816.178)
if "particle_38 geometry" not in marker_sets:
s=new_marker_set('particle_38 geometry')
marker_sets["particle_38 geometry"]=s
s= marker_sets["particle_38 geometry"]
mark=s.place_marker((6845.5, 5367.71, 2749.99), (0.7, 0.7, 0.7), 776.628)
if "particle_39 geometry" not in marker_sets:
s=new_marker_set('particle_39 geometry')
marker_sets["particle_39 geometry"]=s
s= marker_sets["particle_39 geometry"]
mark=s.place_marker((6817.42, 6844.33, 2155.63), (0.7, 0.7, 0.7), 750.656)
if "particle_40 geometry" not in marker_sets:
s=new_marker_set('particle_40 geometry')
marker_sets["particle_40 geometry"]=s
s= marker_sets["particle_40 geometry"]
mark=s.place_marker((6103.36, 7535.44, 3581.73), (0.7, 0.7, 0.7), 709.625)
if "particle_41 geometry" not in marker_sets:
s=new_marker_set('particle_41 geometry')
marker_sets["particle_41 geometry"]=s
s= marker_sets["particle_41 geometry"]
mark=s.place_marker((4946.6, 9028.19, 3996.56), (0.7, 0.7, 0.7), 927.681)
if "particle_42 geometry" not in marker_sets:
s=new_marker_set('particle_42 geometry')
marker_sets["particle_42 geometry"]=s
s= marker_sets["particle_42 geometry"]
mark=s.place_marker((6639.92, 11097.2, 3716.52), (0.7, 0.7, 0.7), 1088.21)
if "particle_43 geometry" not in marker_sets:
s=new_marker_set('particle_43 geometry')
marker_sets["particle_43 geometry"]=s
s= marker_sets["particle_43 geometry"]
mark=s.place_marker((5061.04, 10736.3, 4679.47), (0.7, 0.7, 0.7), 736.147)
if "particle_44 geometry" not in marker_sets:
s=new_marker_set('particle_44 geometry')
marker_sets["particle_44 geometry"]=s
s= marker_sets["particle_44 geometry"]
mark=s.place_marker((5969.56, 9444.49, 4094.28), (0.7, 0.7, 0.7), 861.101)
if "particle_45 geometry" not in marker_sets:
s=new_marker_set('particle_45 geometry')
marker_sets["particle_45 geometry"]=s
s= marker_sets["particle_45 geometry"]
mark=s.place_marker((5136.48, 7942.27, 4966.86), (0.7, 0.7, 0.7), 924.213)
if "particle_46 geometry" not in marker_sets:
s=new_marker_set('particle_46 geometry')
marker_sets["particle_46 geometry"]=s
s= marker_sets["particle_46 geometry"]
mark=s.place_marker((6862.67, 8050.82, 5856.56), (0.7, 0.7, 0.7), 881.828)
if "particle_47 geometry" not in marker_sets:
s=new_marker_set('particle_47 geometry')
marker_sets["particle_47 geometry"]=s
s= marker_sets["particle_47 geometry"]
mark=s.place_marker((7444.58, 9602.84, 4650.8), (0.7, 0.7, 0.7), 927.681)
if "particle_48 geometry" not in marker_sets:
s=new_marker_set('particle_48 geometry')
marker_sets["particle_48 geometry"]=s
s= marker_sets["particle_48 geometry"]
mark=s.place_marker((6817.44, 8735.28, 6192.74), (0.7, 0.7, 0.7), 831.576)
if "particle_49 geometry" not in marker_sets:
s=new_marker_set('particle_49 geometry')
marker_sets["particle_49 geometry"]=s
s= marker_sets["particle_49 geometry"]
mark=s.place_marker((5509.88, 7635.25, 7020.4), (0.7, 0.7, 0.7), 859.494)
if "particle_50 geometry" not in marker_sets:
s=new_marker_set('particle_50 geometry')
marker_sets["particle_50 geometry"]=s
s= marker_sets["particle_50 geometry"]
mark=s.place_marker((5408.56, 8957.27, 7056.19), (0.7, 0.7, 0.7), 704.845)
if "particle_51 geometry" not in marker_sets:
s=new_marker_set('particle_51 geometry')
marker_sets["particle_51 geometry"]=s
s= marker_sets["particle_51 geometry"]
mark=s.place_marker((4357.31, 7682.26, 6723.3), (0.7, 0.7, 0.7), 804.461)
if "particle_52 geometry" not in marker_sets:
s=new_marker_set('particle_52 geometry')
marker_sets["particle_52 geometry"]=s
s= marker_sets["particle_52 geometry"]
mark=s.place_marker((3663.7, 6069.72, 6356.24), (0.7, 0.7, 0.7), 934.111)
if "particle_53 geometry" not in marker_sets:
s=new_marker_set('particle_53 geometry')
marker_sets["particle_53 geometry"]=s
s= marker_sets["particle_53 geometry"]
mark=s.place_marker((2343.96, 6424.38, 7097.16), (0.7, 0.7, 0.7), 988.339)
if "particle_54 geometry" not in marker_sets:
s=new_marker_set('particle_54 geometry')
marker_sets["particle_54 geometry"]=s
s= marker_sets["particle_54 geometry"]
mark=s.place_marker((2558.37, 7005.2, 7540.94), (1, 0.7, 0), 803.7)
if "particle_55 geometry" not in marker_sets:
s=new_marker_set('particle_55 geometry')
marker_sets["particle_55 geometry"]=s
s= marker_sets["particle_55 geometry"]
mark=s.place_marker((4636.69, 6942.12, 7256.51), (0.7, 0.7, 0.7), 812.118)
if "particle_56 geometry" not in marker_sets:
s=new_marker_set('particle_56 geometry')
marker_sets["particle_56 geometry"]=s
s= marker_sets["particle_56 geometry"]
mark=s.place_marker((5266.77, 5370.49, 8644.24), (0.7, 0.7, 0.7), 1177.93)
if "particle_57 geometry" not in marker_sets:
s=new_marker_set('particle_57 geometry')
marker_sets["particle_57 geometry"]=s
s= marker_sets["particle_57 geometry"]
mark=s.place_marker((7420.18, 4575.19, 9658.03), (0.7, 0.7, 0.7), 1038.21)
if "particle_58 geometry" not in marker_sets:
s=new_marker_set('particle_58 geometry')
marker_sets["particle_58 geometry"]=s
s= marker_sets["particle_58 geometry"]
mark=s.place_marker((7963.65, 4412.73, 9836.43), (1, 0.7, 0), 758.016)
if "particle_59 geometry" not in marker_sets:
s=new_marker_set('particle_59 geometry')
marker_sets["particle_59 geometry"]=s
s= marker_sets["particle_59 geometry"]
mark=s.place_marker((7425.73, 4173.8, 10417.4), (0.7, 0.7, 0.7), 824.046)
if "particle_60 geometry" not in marker_sets:
s=new_marker_set('particle_60 geometry')
marker_sets["particle_60 geometry"]=s
s= marker_sets["particle_60 geometry"]
mark=s.place_marker((7248.74, 4818.42, 9881.85), (0.7, 0.7, 0.7), 793.379)
if "particle_61 geometry" not in marker_sets:
s=new_marker_set('particle_61 geometry')
marker_sets["particle_61 geometry"]=s
s= marker_sets["particle_61 geometry"]
mark=s.place_marker((7280.36, 5063.65, 10724.1), (0.7, 0.7, 0.7), 1011.56)
if "particle_62 geometry" not in marker_sets:
s=new_marker_set('particle_62 geometry')
marker_sets["particle_62 geometry"]=s
s= marker_sets["particle_62 geometry"]
mark=s.place_marker((6282.65, 5022.95, 9108.02), (0.7, 0.7, 0.7), 1097.01)
if "particle_63 geometry" not in marker_sets:
s=new_marker_set('particle_63 geometry')
marker_sets["particle_63 geometry"]=s
s= marker_sets["particle_63 geometry"]
mark=s.place_marker((7387.31, 3410.32, 8758.35), (0.7, 0.7, 0.7), 851.626)
if "particle_64 geometry" not in marker_sets:
s=new_marker_set('particle_64 geometry')
marker_sets["particle_64 geometry"]=s
s= marker_sets["particle_64 geometry"]
mark=s.place_marker((8922.9, 2351.29, 8198.01), (0.7, 0.7, 0.7), 869.434)
if "particle_65 geometry" not in marker_sets:
s=new_marker_set('particle_65 geometry')
marker_sets["particle_65 geometry"]=s
s= marker_sets["particle_65 geometry"]
mark=s.place_marker((8851.13, 3830.85, 7135.4), (0.7, 0.7, 0.7), 818.463)
if "particle_66 geometry" not in marker_sets:
s=new_marker_set('particle_66 geometry')
marker_sets["particle_66 geometry"]=s
s= marker_sets["particle_66 geometry"]
mark=s.place_marker((10333.1, 4173.44, 7834.14), (0.7, 0.7, 0.7), 759.539)
if "particle_67 geometry" not in marker_sets:
s=new_marker_set('particle_67 geometry')
marker_sets["particle_67 geometry"]=s
s= marker_sets["particle_67 geometry"]
mark=s.place_marker((8067.2, 4534.08, 8141.43), (0.7, 0.7, 0.7), 1088.59)
if "particle_68 geometry" not in marker_sets:
s=new_marker_set('particle_68 geometry')
marker_sets["particle_68 geometry"]=s
s= marker_sets["particle_68 geometry"]
mark=s.place_marker((9451.53, 3191.92, 9368.49), (0.7, 0.7, 0.7), 822.312)
if "particle_69 geometry" not in marker_sets:
s=new_marker_set('particle_69 geometry')
marker_sets["particle_69 geometry"]=s
s= marker_sets["particle_69 geometry"]
mark=s.place_marker((10462.8, 3662.04, 8203.23), (0.7, 0.7, 0.7), 749.81)
if "particle_70 geometry" not in marker_sets:
s=new_marker_set('particle_70 geometry')
marker_sets["particle_70 geometry"]=s
s= marker_sets["particle_70 geometry"]
mark=s.place_marker((10216.5, 4082.37, 9513.4), (0.7, 0.7, 0.7), 764.488)
for k in surf_sets.keys():
chimera.openModels.add([surf_sets[k]])
| gpl-3.0 | 6,364,557,481,745,716,000 | 46.101877 | 75 | 0.699641 | false |
dimara/synnefo | snf-pithos-backend/pithos/backends/lib/sqlite/permissions.py | 10 | 8486 | # Copyright (C) 2010-2014 GRNET S.A.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from xfeatures import XFeatures
from groups import Groups
from public import Public
from node import Node
from collections import defaultdict
READ = 0
WRITE = 1
class Permissions(XFeatures, Groups, Public, Node):
def __init__(self, **params):
XFeatures.__init__(self, **params)
Groups.__init__(self, **params)
Public.__init__(self, **params)
Node.__init__(self, **params)
def access_grant(self, path, access, members=()):
"""Grant members with access to path.
Members can also be '*' (all),
or some group specified as 'owner:group'."""
if not members:
return
feature = self.xfeature_create(path)
self.feature_setmany(feature, access, members)
def access_set(self, path, permissions):
"""Set permissions for path. The permissions dict
maps 'read', 'write' keys to member lists."""
r = permissions.get('read', [])
w = permissions.get('write', [])
if not r and not w:
self.xfeature_destroy(path)
return
feature = self.xfeature_create(path)
self.feature_clear(feature, READ)
self.feature_clear(feature, WRITE)
if r:
self.feature_setmany(feature, READ, r)
if w:
self.feature_setmany(feature, WRITE, w)
def access_get_for_bulk(self, perms):
"""Get permissions for paths."""
allowed = None
d = defaultdict(list)
for value, feature_id, key in perms:
d[key].append(value)
permissions = d
if READ in permissions:
allowed = 0
permissions['read'] = permissions[READ]
del(permissions[READ])
if WRITE in permissions:
allowed = 1
permissions['write'] = permissions[WRITE]
del(permissions[WRITE])
return (permissions, allowed)
def access_get(self, path):
"""Get permissions for path."""
feature = self.xfeature_get(path)
if not feature:
return {}
permissions = self.feature_dict(feature)
if READ in permissions:
permissions['read'] = permissions[READ]
del(permissions[READ])
if WRITE in permissions:
permissions['write'] = permissions[WRITE]
del(permissions[WRITE])
return permissions
def access_members(self, path):
feature = self.xfeature_get(path)
if not feature:
return []
permissions = self.feature_dict(feature)
members = set()
members.update(permissions.get(READ, []))
members.update(permissions.get(WRITE, []))
for m in set(members):
parts = m.split(':', 1)
if len(parts) != 2:
continue
user, group = parts
members.remove(m)
members.update(self.group_members(user, group))
return members
def access_clear(self, path):
"""Revoke access to path (both permissions and public)."""
self.xfeature_destroy(path)
self.public_unset(path)
def access_clear_bulk(self, paths):
"""Revoke access to path (both permissions and public)."""
self.xfeature_destroy_bulk(paths)
self.public_unset_bulk(paths)
def access_check(self, path, access, member):
"""Return true if the member has this access to the path."""
feature = self.xfeature_get(path)
if not feature:
return False
members = self.feature_get(feature, access)
if member in members or '*' in members:
return True
for owner, group in self.group_parents(member):
if owner + ':' + group in members:
return True
return False
def access_check_bulk(self, paths, member):
rows = None
q = ("select x.path, xvals.value, xvals.feature_id, xvals.key "
"from xfeaturevals xvals join xfeatures x "
"on xvals.feature_id = x.feature_id "
"where x.path in (%s)") % ','.join('?' for _ in paths)
self.execute(q, paths)
rows = self.fetchall()
if rows:
access_check_paths = {}
for path, value, feature_id, key in rows:
try:
access_check_paths[path].append((value, feature_id, key))
except KeyError:
access_check_paths[path] = [(value, feature_id, key)]
return access_check_paths
return None
def access_inherit(self, path):
"""Return the paths influencing the access for path."""
# r = self.xfeature_inherit(path)
# if not r:
# return []
# # Compute valid.
# return [x[0] for x in r if x[0] in valid]
# Only keep path components.
parts = path.rstrip('/').split('/')
valid = []
for i in range(1, len(parts)):
subp = '/'.join(parts[:i + 1])
valid.append(subp)
if subp != path:
valid.append(subp + '/')
return [x for x in valid if self.xfeature_get(x)]
def access_inherit_bulk(self, paths):
"""Return the paths influencing the access for paths."""
# Only keep path components.
valid = []
for path in paths:
parts = path.rstrip('/').split('/')
for i in range(1, len(parts)):
subp = '/'.join(parts[:i + 1])
valid.append(subp)
if subp != path:
valid.append(subp + '/')
valid = self.xfeature_get_bulk(valid)
return [x[1] for x in valid]
def access_list_paths(self, member, prefix=None, include_owned=False,
include_containers=True):
"""Return the list of paths granted to member.
Keyword arguments:
prefix -- return only paths starting with prefix (default None)
include_owned -- return also paths owned by member (default False)
include_containers -- return also container paths owned by member
(default True)
"""
q = ("select distinct path from xfeatures inner join "
" (select distinct feature_id, key from xfeaturevals inner join "
" (select owner || ':' || name as value from groups "
" where member = ? union select ? union select '*') "
" using (value)) "
"using (feature_id)")
p = (member, member)
if prefix:
q += " where "
paths = self.access_inherit(prefix) or [prefix]
q += ' or '.join("path like ? escape '\\'" for _ in paths)
p += tuple(self.escape_like(path) + '%' for path in paths)
self.execute(q, p)
l = [r[0] for r in self.fetchall()]
if include_owned:
node = self.node_lookup(member)
select_containers = "select node from nodes where parent = ? "
q = ("select path from nodes where parent in (%s) " %
select_containers)
args = [node]
if include_containers:
q += ("or node in (%s)" % select_containers)
args += [node]
self.execute(q, args)
l += [r[0] for r in self.fetchall() if r[0] not in l]
return l
def access_list_shared(self, prefix=''):
"""Return the list of shared paths."""
q = "select path from xfeatures where "
paths = self.access_inherit(prefix) or [prefix]
q += ' or '.join("path like ? escape '\\'" for _ in paths)
p = tuple(self.escape_like(path) + '%' for path in paths)
self.execute(q, p)
return [r[0] for r in self.fetchall()]
| gpl-3.0 | 8,818,997,615,140,552,000 | 34.655462 | 79 | 0.56057 | false |
lion-coin/lioncoin | contrib/linearize/linearize.py | 1 | 3354 | #!/usr/bin/python
#
# linearize.py: Construct a linear, no-fork, best version of the blockchain.
#
#
# Copyright (c) 2013 The Lioncoin developers
# Distributed under the MIT/X11 software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
import json
import struct
import re
import base64
import httplib
import sys
ERR_SLEEP = 15
MAX_NONCE = 1000000L
settings = {}
class LioncoinRPC:
OBJID = 1
def __init__(self, host, port, username, password):
authpair = "%s:%s" % (username, password)
self.authhdr = "Basic %s" % (base64.b64encode(authpair))
self.conn = httplib.HTTPConnection(host, port, False, 30)
def rpc(self, method, params=None):
self.OBJID += 1
obj = { 'version' : '1.1',
'method' : method,
'id' : self.OBJID }
if params is None:
obj['params'] = []
else:
obj['params'] = params
self.conn.request('POST', '/', json.dumps(obj),
{ 'Authorization' : self.authhdr,
'Content-type' : 'application/json' })
resp = self.conn.getresponse()
if resp is None:
print "JSON-RPC: no response"
return None
body = resp.read()
resp_obj = json.loads(body)
if resp_obj is None:
print "JSON-RPC: cannot JSON-decode body"
return None
if 'error' in resp_obj and resp_obj['error'] != None:
return resp_obj['error']
if 'result' not in resp_obj:
print "JSON-RPC: no result in object"
return None
return resp_obj['result']
def getblock(self, hash, verbose=True):
return self.rpc('getblock', [hash, verbose])
def getblockhash(self, index):
return self.rpc('getblockhash', [index])
def getblock(rpc, settings, n):
hash = rpc.getblockhash(n)
hexdata = rpc.getblock(hash, False)
data = hexdata.decode('hex')
return data
def get_blocks(settings):
rpc = LioncoinRPC(settings['host'], settings['port'],
settings['rpcuser'], settings['rpcpassword'])
outf = open(settings['output'], 'ab')
for height in xrange(settings['min_height'], settings['max_height']+1):
data = getblock(rpc, settings, height)
outhdr = settings['netmagic']
outhdr += struct.pack("<i", len(data))
outf.write(outhdr)
outf.write(data)
if (height % 1000) == 0:
sys.stdout.write("Wrote block " + str(height) + "\n")
if __name__ == '__main__':
if len(sys.argv) != 2:
print "Usage: linearize.py CONFIG-FILE"
sys.exit(1)
f = open(sys.argv[1])
for line in f:
# skip comment lines
m = re.search('^\s*#', line)
if m:
continue
# parse key=value lines
m = re.search('^(\w+)\s*=\s*(\S.*)$', line)
if m is None:
continue
settings[m.group(1)] = m.group(2)
f.close()
if 'netmagic' not in settings:
settings['netmagic'] = 'f9beb4d9'
if 'output' not in settings:
settings['output'] = 'bootstrap.dat'
if 'host' not in settings:
settings['host'] = '127.0.0.1'
if 'port' not in settings:
settings['port'] = 8332
if 'min_height' not in settings:
settings['min_height'] = 0
if 'max_height' not in settings:
settings['max_height'] = 279000
if 'rpcuser' not in settings or 'rpcpassword' not in settings:
print "Missing username and/or password in cfg file"
sys.exit(1)
settings['netmagic'] = settings['netmagic'].decode('hex')
settings['port'] = int(settings['port'])
settings['min_height'] = int(settings['min_height'])
settings['max_height'] = int(settings['max_height'])
get_blocks(settings)
| mit | -1,167,754,724,317,037,600 | 24.409091 | 77 | 0.660405 | false |
repotvsupertuga/tvsupertuga.repository | script.module.livestreamer/lib/livestreamer/plugins/aftonbladet.py | 34 | 3009 | """Plugin for swedish news paper Aftonbladet's streaming service."""
import re
from livestreamer.plugin import Plugin
from livestreamer.plugin.api import http, validate
from livestreamer.stream import HDSStream, HLSStream
PLAYLIST_URL_FORMAT = "http://{address}/{path}/{filename}"
STREAM_TYPES = {
"hds": HDSStream.parse_manifest,
"hls": HLSStream.parse_variant_playlist
}
STREAM_FORMATS = ("m3u8", "f4m")
VIDEO_INFO_URL = "http://aftonbladet-play-static-ext.cdn.drvideo.aptoma.no/actions/video"
METADATA_URL = "http://aftonbladet-play-metadata.cdn.drvideo.aptoma.no/video/{0}.json"
_embed_re = re.compile("<iframe src=\"(http://tv.aftonbladet.se[^\"]+)\"")
_aptoma_id_re = re.compile("<div id=\"drvideo\".+data-aptomaId=\"([^\"]+)\"")
_live_re = re.compile("data-isLive=\"true\"")
_url_re = re.compile("http(s)?://(\w+.)?.aftonbladet.se")
_video_schema = validate.Schema(
{
"formats": validate.all(
{
validate.text: {
validate.text: validate.all(
dict,
validate.filter(lambda k,v: k in STREAM_FORMATS),
{
validate.text: [{
"address": validate.text,
"filename": validate.text,
"path": validate.text
}]
},
)
}
},
validate.filter(lambda k,v: k in STREAM_TYPES)
)
}
)
class Aftonbladet(Plugin):
@classmethod
def can_handle_url(cls, url):
return _url_re.match(url)
def _get_streams(self):
res = http.get(self.url)
match = _embed_re.search(res.text)
if match:
res = http.get(match.group(1))
match = _aptoma_id_re.search(res.text)
if not match:
return
aptoma_id = match.group(1)
if not _live_re.search(res.text):
res = http.get(METADATA_URL.format(aptoma_id))
metadata = http.json(res)
video_id = metadata["videoId"]
else:
video_id = aptoma_id
res = http.get(VIDEO_INFO_URL, params=dict(id=video_id))
video = http.json(res, schema=_video_schema)
streams = {}
for fmt, providers in video["formats"].items():
for name, provider in providers.items():
for ext, playlists in provider.items():
for playlist in playlists:
url = PLAYLIST_URL_FORMAT.format(**playlist)
parser = STREAM_TYPES[fmt]
try:
streams.update(parser(self.session, url))
except IOError as err:
self.logger.error("Failed to extract {0} streams: {1}",
fmt.upper(), err)
return streams
__plugin__ = Aftonbladet
| gpl-2.0 | -5,171,847,559,245,612,000 | 33.193182 | 89 | 0.514457 | false |
mbudiu-vmw/hiero | bin/stop.py | 1 | 1491 | #!/usr/bin/env python
# We attempted to make this program work with both python2 and python3
"""This Python program stops the Hillview service on the machines specified in the
configuration file."""
# pylint: disable=invalid-name
from argparse import ArgumentParser
from hillviewCommon import ClusterConfiguration, get_config, get_logger
logger = get_logger("stop")
def stop_webserver(config):
"""Stops the Hillview web server"""
assert isinstance(config, ClusterConfiguration)
rh = config.get_webserver()
message = "Stopping web server on " + str(rh)
logger.info(message)
rh.run_remote_shell_command(config.service_folder + "/hillview-webserver-manager.sh stop")
def stop_worker(config, rh):
"""Stops a Hillview worker service on a remote machine"""
rh.run_remote_shell_command(config.service_folder + "/hillview-worker-manager.sh stop")
def stop_aggregator(config, rh):
"""Stops a Hillview aggregator service on a remote machine"""
rh.run_remote_shell_command(config.service_folder + "/hillview-aggregator-manager.sh stop")
def main():
"""Main function"""
parser = ArgumentParser()
parser.add_argument("config", help="json cluster configuration file")
args = parser.parse_args()
config = get_config(parser, args)
stop_webserver(config)
config.run_on_all_workers(lambda rh: stop_worker(config, rh))
config.run_on_all_aggregators(lambda rh: stop_aggregator(config, rh))
if __name__ == "__main__":
main()
| apache-2.0 | -4,786,795,724,790,380,000 | 36.275 | 95 | 0.719651 | false |
eeshangarg/oh-mainline | vendor/packages/gdata/tests/gdata_tests/marketplace/live_client_test.py | 39 | 5064 | #!/usr/bin/python
#
# Copyright 2009 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This module is used for version 2 of the Google Data APIs.
# These tests attempt to connect to Google servers.
__author__ = 'Alexandre Vivien <[email protected]>'
import unittest
import gdata.client
import gdata.data
import gdata.gauth
import gdata.marketplace.client
import gdata.marketplace.data
import gdata.test_config as conf
conf.options.register_option(conf.APPS_DOMAIN_OPTION)
class LicensingClientTest(unittest.TestCase):
def __init__(self, *args, **kwargs):
unittest.TestCase.__init__(self, *args, **kwargs)
gdata.test_config.options.register(
'appsid',
'Enter the Application ID of your Marketplace application',
description='The Application ID of your Marketplace application')
gdata.test_config.options.register(
'appsconsumerkey',
'Enter the Consumer Key of your Marketplace application',
description='The Consumer Key of your Marketplace application')
gdata.test_config.options.register(
'appsconsumersecret',
'Enter the Consumer Secret of your Marketplace application',
description='The Consumer Secret of your Marketplace application')
def setUp(self):
self.client = gdata.marketplace.client.LicensingClient(domain='example.com')
if conf.options.get_value('runlive') == 'true':
self.client = gdata.marketplace.client.LicensingClient(domain=conf.options.get_value('appsdomain'))
conf.configure_client(self.client, 'LicensingClientTest', self.client.auth_service, True)
self.client.auth_token = gdata.gauth.TwoLeggedOAuthHmacToken(conf.options.get_value('appsconsumerkey'), conf.options.get_value('appsconsumersecret'), '')
self.client.source = 'GData-Python-Client-Test'
self.client.account_type='HOSTED'
self.client.http_client.debug = True
self.app_id = conf.options.get_value('appsid')
def tearDown(self):
conf.close_client(self.client)
def testGetLicense(self):
if not conf.options.get_value('runlive') == 'true':
return
# Either load the recording or prepare to make a live request.
conf.configure_cache(self.client, 'testGetLicense')
fetched_feed = self.client.GetLicense(app_id=self.app_id)
self.assertTrue(isinstance(fetched_feed, gdata.marketplace.data.LicenseFeed))
self.assertTrue(isinstance(fetched_feed.entry[0], gdata.marketplace.data.LicenseEntry))
entity = fetched_feed.entry[0].content.entity
self.assertTrue(entity is not None)
self.assertNotEqual(entity.id, '')
self.assertNotEqual(entity.enabled, '')
self.assertNotEqual(entity.customer_id, '')
self.assertNotEqual(entity.state, '')
def testGetLicenseNotifications(self):
if not conf.options.get_value('runlive') == 'true':
return
# Either load the recording or prepare to make a live request.
conf.configure_cache(self.client, 'testGetLicenseNotifications')
fetched_feed = self.client.GetLicenseNotifications(app_id=self.app_id, max_results=2)
self.assertTrue(isinstance(fetched_feed, gdata.marketplace.data.LicenseFeed))
self.assertEqual(len(fetched_feed.entry), 2)
for entry in fetched_feed.entry:
entity = entry.content.entity
self.assertTrue(entity is not None)
self.assertNotEqual(entity.id, '')
self.assertNotEqual(entity.domain_name, '')
self.assertNotEqual(entity.installer_email, '')
self.assertNotEqual(entity.tos_acceptance_time, '')
self.assertNotEqual(entity.last_change_time, '')
self.assertNotEqual(entity.product_config_id, '')
self.assertNotEqual(entity.state, '')
next_uri = fetched_feed.find_next_link()
fetched_feed_next = self.client.GetLicenseNotifications(uri=next_uri)
self.assertTrue(isinstance(fetched_feed_next, gdata.marketplace.data.LicenseFeed))
self.assertTrue(len(fetched_feed_next.entry) <= 2)
for entry in fetched_feed_next.entry:
entity = entry.content.entity
self.assertTrue(entity is not None)
self.assertNotEqual(entity.id, '')
self.assertNotEqual(entity.domain_name, '')
self.assertNotEqual(entity.installer_email, '')
self.assertNotEqual(entity.tos_acceptance_time, '')
self.assertNotEqual(entity.last_change_time, '')
self.assertNotEqual(entity.product_config_id, '')
self.assertNotEqual(entity.state, '')
def suite():
return conf.build_suite([LicensingClientTest])
if __name__ == '__main__':
unittest.TextTestRunner().run(suite())
| agpl-3.0 | -2,235,389,325,386,371,600 | 39.190476 | 159 | 0.724921 | false |
anoopcs9/samba | python/samba/tests/blackbox/samba_dnsupdate.py | 7 | 2592 | # Blackbox tests for "samba_dnsupdate" command
# Copyright (C) Kamen Mazdrashki <[email protected]> 2011
# Copyright (C) Andrew Bartlett <[email protected]> 2015
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import samba.tests
class SambaDnsUpdateTests(samba.tests.BlackboxTestCase):
"""Blackbox test case for samba_dnsupdate."""
def setUp(self):
self.server_ip = samba.tests.env_get_var_value("DNS_SERVER_IP")
super(SambaDnsUpdateTests, self).setUp()
try:
out = self.check_output("samba_dnsupdate --verbose")
self.assertTrue("Looking for DNS entry" in out, out)
except samba.tests.BlackboxProcessError:
pass
def test_samba_dnsupate_no_change(self):
out = self.check_output("samba_dnsupdate --verbose")
self.assertTrue("No DNS updates needed" in out, out)
def test_samba_dnsupate_set_ip(self):
try:
out = self.check_output("samba_dnsupdate --verbose --current-ip=10.0.0.1")
self.assertTrue(" DNS updates and" in out, out)
self.assertTrue(" DNS deletes needed" in out, out)
except samba.tests.BlackboxProcessError:
pass
try:
out = self.check_output("samba_dnsupdate --verbose --use-nsupdate --current-ip=10.0.0.1")
except samba.tests.BlackboxProcessError as e:
self.fail("Error calling samba_dnsupdate: %s" % e)
self.assertTrue("No DNS updates needed" in out, out)
try:
rpc_out = self.check_output("samba_dnsupdate --verbose --use-samba-tool --rpc-server-ip=%s" % self.server_ip)
except samba.tests.BlackboxProcessError as e:
self.fail("Error calling samba_dnsupdate: %s" % e)
self.assertTrue(" DNS updates and" in rpc_out, rpc_out)
self.assertTrue(" DNS deletes needed" in rpc_out, rpc_out)
out = self.check_output("samba_dnsupdate --verbose")
self.assertTrue("No DNS updates needed" in out, out + rpc_out)
| gpl-3.0 | 4,249,348,064,831,128,000 | 42.932203 | 121 | 0.671682 | false |
vlegoff/tsunami | src/primaires/temps/constantes.py | 1 | 2157 | # -*-coding:Utf-8 -*
# Copyright (c) 2010-2017 LE GOFF Vincent
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
# OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""Ce fichier contient les constantes du module temps."""
NOMS_HEURES = {
0: "minuit {minutes}",
1: "une heure {minutes} {moment}",
2: "deux heures {minutes} {moment}",
3: "trois heures {minutes} {moment}",
4: "quatre heures {minutes} {moment}",
5: "cinq heures {minutes} {moment}",
6: "six heures {minutes} {moment}",
7: "sept heures {minutes} {moment}",
8: "huit heures {minutes} {moment}",
9: "neuf heures {minutes} {moment}",
10: "dix heures {minutes} {moment}",
11: "onze heures {minutes} {moment}",
12: "midi {masc_minutes}",
}
| bsd-3-clause | 3,309,715,166,384,590,000 | 44.893617 | 79 | 0.729254 | false |
FreddyPoGo/Monocle | web_sanic.py | 1 | 7721 | #!/usr/bin/env python3
from pkg_resources import resource_filename
from time import time
from sanic import Sanic
from sanic.response import html, json
from jinja2 import Environment, PackageLoader, Markup
from asyncpg import create_pool
from monocle import sanitized as conf
from monocle.bounds import center
from monocle.names import DAMAGE, MOVES, POKEMON
from monocle.web_utils import get_scan_coords, get_worker_markers, Workers, get_args
env = Environment(loader=PackageLoader('monocle', 'templates'))
app = Sanic(__name__)
app.static('/static', resource_filename('monocle', 'static'))
def social_links():
social_links = ''
if conf.FB_PAGE_ID:
social_links = '<a class="map_btn facebook-icon" target="_blank" href="https://www.facebook.com/' + conf.FB_PAGE_ID + '"></a>'
if conf.TWITTER_SCREEN_NAME:
social_links += '<a class="map_btn twitter-icon" target="_blank" href="https://www.twitter.com/' + conf.TWITTER_SCREEN_NAME + '"></a>'
if conf.DISCORD_INVITE_ID:
social_links += '<a class="map_btn discord-icon" target="_blank" href="https://discord.gg/' + conf.DISCORD_INVITE_ID + '"></a>'
if conf.TELEGRAM_USERNAME:
social_links += '<a class="map_btn telegram-icon" target="_blank" href="https://www.telegram.me/' + conf.TELEGRAM_USERNAME + '"></a>'
return Markup(social_links)
def render_map():
css_js = ''
if conf.LOAD_CUSTOM_CSS_FILE:
css_js = '<link rel="stylesheet" href="static/css/custom.css">'
if conf.LOAD_CUSTOM_JS_FILE:
css_js += '<script type="text/javascript" src="static/js/custom.js"></script>'
js_vars = Markup(
"_defaultSettings['FIXED_OPACITY'] = '{:d}'; "
"_defaultSettings['SHOW_TIMER'] = '{:d}'; "
"_defaultSettings['TRASH_IDS'] = [{}]; ".format(conf.FIXED_OPACITY, conf.SHOW_TIMER, ', '.join(str(p_id) for p_id in conf.TRASH_IDS)))
template = env.get_template('custom.html' if conf.LOAD_CUSTOM_HTML_FILE else 'newmap.html')
return html(template.render(
area_name=conf.AREA_NAME,
map_center=center,
map_provider_url=conf.MAP_PROVIDER_URL,
map_provider_attribution=conf.MAP_PROVIDER_ATTRIBUTION,
social_links=social_links(),
init_js_vars=js_vars,
extra_css_js=Markup(css_js)
))
def render_worker_map():
template = env.get_template('workersmap.html')
return html(template.render(
area_name=conf.AREA_NAME,
map_center=center,
map_provider_url=conf.MAP_PROVIDER_URL,
map_provider_attribution=conf.MAP_PROVIDER_ATTRIBUTION,
social_links=social_links()
))
@app.get('/')
async def fullmap(request, html_map=render_map()):
return html_map
if conf.MAP_WORKERS:
workers = Workers()
@app.get('/workers_data')
async def workers_data(request):
return json(get_worker_markers(workers))
@app.get('/workers')
async def workers_map(request, html_map=render_worker_map()):
return html_map
del env
@app.get('/data')
async def pokemon_data(request, _time=time):
last_id = request.args.get('last_id', 0)
async with app.pool.acquire() as conn:
results = await conn.fetch('''
SELECT id, pokemon_id, expire_timestamp, lat, lon, atk_iv, def_iv, sta_iv, move_1, move_2, cp, level, form
FROM sightings
WHERE expire_timestamp > {} AND id > {}
'''.format(_time(), last_id))
return json(list(map(sighting_to_marker, results)))
@app.get('/gym_data')
async def gym_data(request, names=POKEMON, _str=str):
async with app.pool.acquire() as conn:
results = await conn.fetch('''
SELECT
fs.fort_id,
fs.id,
fs.team,
fs.guard_pokemon_id,
fs.last_modified,
fs.in_battle,
fs.slots_available,
fs.time_ocuppied,
f.lat,
f.lon
FROM fort_sightings fs
JOIN forts f ON f.id=fs.fort_id
WHERE (fs.fort_id, fs.last_modified) IN (
SELECT fort_id, MAX(last_modified)
FROM fort_sightings
GROUP BY fort_id
)
''')
return json([{
'id': 'fort-' + _str(fort['fort_id']),
'sighting_id': fort['id'],
'pokemon_id': fort['guard_pokemon_id'],
'pokemon_name': names[fort['guard_pokemon_id']],
'team': fort['team'],
'in_battle': fort['in_battle'],
'slots_available': fort['slots_available'],
'time_ocuppied': fort['time_ocuppied'],
'lat': fort['lat'],
'lon': fort['lon']
} for fort in results])
@app.get('/raid_data')
async def raid_data(request, names=POKEMON, _str=str, _time=time):
async with app.pool.acquire() as conn:
results = await conn.fetch('''
SELECT
f.id,
ri.raid_start,
ri.raid_end,
ri.pokemon_id,
ri.cp,
ri.move_1,
ri.move_2,
ri.raid_level
FROM forts f
JOIN raid_info ri ON ri.fort_id = f.id
WHERE ri.raid_start >= {}
OR ri.raid_end >= {}
'''.format(_time(), _time()))
return json(list(map(raid_to_marker, results)))
@app.get('/spawnpoints')
async def spawn_points(request, _dict=dict):
async with app.pool.acquire() as conn:
results = await conn.fetch('SELECT spawn_id, despawn_time, lat, lon, duration FROM spawnpoints')
return json([_dict(x) for x in results])
@app.get('/pokestops')
async def get_pokestops(request, _dict=dict):
async with app.pool.acquire() as conn:
results = await conn.fetch('SELECT external_id, lat, lon FROM pokestops')
return json([_dict(x) for x in results])
@app.get('/scan_coords')
async def scan_coords(request):
return json(get_scan_coords())
def sighting_to_marker(pokemon, names=POKEMON, moves=MOVES, damage=DAMAGE, trash=conf.TRASH_IDS, _str=str):
pokemon_id = pokemon['pokemon_id']
marker = {
'id': 'pokemon-' + _str(pokemon['id']),
'trash': pokemon_id in trash,
'name': names[pokemon_id],
'pokemon_id': pokemon_id,
'lat': pokemon['lat'],
'lon': pokemon['lon'],
'expires_at': pokemon['expire_timestamp'],
}
move1 = pokemon['move_1']
if pokemon['form']:
marker['form'] = chr(pokemon['form']+64)
if move1:
move2 = pokemon['move_2']
marker['atk'] = pokemon['atk_iv']
marker['def'] = pokemon['def_iv']
marker['sta'] = pokemon['sta_iv']
marker['move1'] = moves[move1]
marker['move2'] = moves[move2]
marker['damage1'] = damage[move1]
marker['damage2'] = damage[move2]
marker['cp'] = pokemon['cp']
marker['level'] = pokemon['level']
return marker
def raid_to_marker(raid, names=POKEMON, moves=MOVES):
marker = {
'fort_id': raid['id'],
'raid_start': raid['raid_start'],
'raid_end': raid['raid_end'],
'raid_level': raid['raid_level']
}
pokemon_id = raid['pokemon_id']
if pokemon_id:
marker['pokemon_id'] = raid['pokemon_id']
marker['pokemon_name'] = names[raid['pokemon_id']]
marker['cp'] = raid['cp']
marker['move_1'] = moves[raid['move_1']]
marker['move_2'] = moves[raid['move_2']]
return marker
@app.listener('before_server_start')
async def register_db(app, loop):
app.pool = await create_pool(**conf.DB, loop=loop)
def main():
args = get_args()
app.run(debug=args.debug, host=args.host, port=args.port)
if __name__ == '__main__':
main()
| mit | -8,481,301,761,078,509,000 | 31.716102 | 142 | 0.586971 | false |
mortcanty/earthengine | src/Crypto/SelfTest/Hash/test_SHA1.py | 5 | 2300 | # -*- coding: utf-8 -*-
#
# SelfTest/Hash/SHA1.py: Self-test for the SHA-1 hash function
#
# Written in 2008 by Dwayne C. Litzenberger <[email protected]>
#
# ===================================================================
# The contents of this file are dedicated to the public domain. To
# the extent that dedication to the public domain is not available,
# everyone is granted a worldwide, perpetual, royalty-free,
# non-exclusive license to exercise all rights associated with the
# contents of this file for any purpose whatsoever.
# No rights are reserved.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# ===================================================================
"""Self-test suite for Crypto.Hash.SHA"""
__revision__ = "$Id$"
from Crypto.Util.py3compat import *
# Test vectors from various sources
# This is a list of (expected_result, input[, description]) tuples.
test_data = [
# FIPS PUB 180-2, A.1 - "One-Block Message"
('a9993e364706816aba3e25717850c26c9cd0d89d', 'abc'),
# FIPS PUB 180-2, A.2 - "Multi-Block Message"
('84983e441c3bd26ebaae4aa1f95129e5e54670f1',
'abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq'),
# FIPS PUB 180-2, A.3 - "Long Message"
# ('34aa973cd4c4daa4f61eeb2bdbad27316534016f',
# 'a' * 10**6,
# '"a" * 10**6'),
# RFC 3174: Section 7.3, "TEST4" (multiple of 512 bits)
('dea356a2cddd90c7a7ecedc5ebb563934f460452',
'01234567' * 80,
'"01234567" * 80'),
]
def get_tests(config={}):
from Crypto.Hash import SHA1
from common import make_hash_tests
return make_hash_tests(SHA1, "SHA1", test_data,
digest_size=20,
oid="1.3.14.3.2.26")
if __name__ == '__main__':
import unittest
suite = lambda: unittest.TestSuite(get_tests())
unittest.main(defaultTest='suite')
# vim:set ts=4 sw=4 sts=4 expandtab:
| mit | -7,136,943,835,792,963,000 | 34.9375 | 69 | 0.655217 | false |
tuomas777/linkedevents | events/attic/models.py | 3 | 1359 | class Person(BaseModel):
description = models.TextField(blank=True)
family_name = models.CharField(max_length=255, null=True, blank=True)
email = models.EmailField(null=True, blank=True)
creator = models.ForeignKey('self', null=True, blank=True,
related_name='person_creators')
editor = models.ForeignKey('self', null=True, blank=True,
related_name='person_editors')
# Custom fields
member_of = models.ForeignKey('Organization', null=True, blank=True)
user = models.ForeignKey(User, null=True, blank=True)
class Meta:
verbose_name = _('person')
verbose_name_plural = _('persons')
reversion.register(Person)
class Organization(BaseModel):
description = models.TextField(blank=True)
base_IRI = models.CharField(max_length=200, null=True, blank=True)
compact_IRI_name = models.CharField(max_length=200, null=True, blank=True)
creator = models.ForeignKey(Person, null=True, blank=True,
related_name='organization_creators')
editor = models.ForeignKey(Person, null=True, blank=True,
related_name='organization_editors')
class Meta:
verbose_name = _('organization')
verbose_name_plural = _('organizations')
reversion.register(Organization)
| bsd-3-clause | 6,320,756,399,223,444,000 | 38.970588 | 78 | 0.643856 | false |
rafaeltg/Deep-Learning-Algorithms | pydl/models/linear/ridge.py | 2 | 1238 | import numpy as np
import sklearn.linear_model as sk_lin
from .base import LinearMixin
class Ridge(sk_lin.Ridge, LinearMixin):
def __init__(self, name='ridge', **kwargs):
super().__init__(**kwargs)
self.name = name
def get_config(self):
config = {
'name': self.name,
'alpha': self.alpha,
'max_iter': self.max_iter,
'tol': self.tol,
'fit_intercept': self.fit_intercept,
'solver': self.solver
}
if self.built:
config['coef_'] = self.coef_.flatten().tolist()
config['intercept_'] = self.intercept_.flatten().tolist() if isinstance(self.intercept_, np.ndarray) else self.intercept_
return config
@classmethod
def from_config(cls, config: dict):
coef = config.pop('coef_', None)
if coef is not None:
coef = np.asarray(coef)
intercept = config.pop('intercept_', None)
c = cls(**config)
if coef is not None:
c.__dict__['coef_'] = coef
if intercept is not None:
c.__dict__['intercept_'] = intercept
return c
@property
def built(self):
return hasattr(self, 'coef_')
| mit | -3,677,710,417,318,497,300 | 24.791667 | 133 | 0.542003 | false |
WindCanDie/spark | dev/github_jira_sync.py | 31 | 5298 | #!/usr/bin/env python
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Utility for updating JIRA's with information about Github pull requests
import json
import os
import re
import sys
import urllib2
try:
import jira.client
except ImportError:
print("This tool requires the jira-python library")
print("Install using 'sudo pip install jira'")
sys.exit(-1)
# User facing configs
GITHUB_API_BASE = os.environ.get("GITHUB_API_BASE", "https://api.github.com/repos/apache/spark")
JIRA_PROJECT_NAME = os.environ.get("JIRA_PROJECT_NAME", "SPARK")
JIRA_API_BASE = os.environ.get("JIRA_API_BASE", "https://issues.apache.org/jira")
JIRA_USERNAME = os.environ.get("JIRA_USERNAME", "apachespark")
JIRA_PASSWORD = os.environ.get("JIRA_PASSWORD", "XXX")
# Maximum number of updates to perform in one run
MAX_UPDATES = int(os.environ.get("MAX_UPDATES", "100000"))
# Cut-off for oldest PR on which to comment. Useful for avoiding
# "notification overload" when running for the first time.
MIN_COMMENT_PR = int(os.environ.get("MIN_COMMENT_PR", "1496"))
# File used as an optimization to store maximum previously seen PR
# Used mostly because accessing ASF JIRA is slow, so we want to avoid checking
# the state of JIRA's that are tied to PR's we've already looked at.
MAX_FILE = ".github-jira-max"
def get_url(url):
try:
return urllib2.urlopen(url)
except urllib2.HTTPError:
print("Unable to fetch URL, exiting: %s" % url)
sys.exit(-1)
def get_json(urllib_response):
return json.load(urllib_response)
# Return a list of (JIRA id, JSON dict) tuples:
# e.g. [('SPARK-1234', {.. json ..}), ('SPARK-5687', {.. json ..})}
def get_jira_prs():
result = []
has_next_page = True
page_num = 0
while has_next_page:
page = get_url(GITHUB_API_BASE + "/pulls?page=%s&per_page=100" % page_num)
page_json = get_json(page)
for pull in page_json:
jiras = re.findall(JIRA_PROJECT_NAME + "-[0-9]{4,5}", pull['title'])
for jira in jiras:
result = result + [(jira, pull)]
# Check if there is another page
link_header = filter(lambda k: k.startswith("Link"), page.info().headers)[0]
if "next" not in link_header:
has_next_page = False
else:
page_num += 1
return result
def set_max_pr(max_val):
f = open(MAX_FILE, 'w')
f.write("%s" % max_val)
f.close()
print("Writing largest PR number seen: %s" % max_val)
def get_max_pr():
if os.path.exists(MAX_FILE):
result = int(open(MAX_FILE, 'r').read())
print("Read largest PR number previously seen: %s" % result)
return result
else:
return 0
jira_client = jira.client.JIRA({'server': JIRA_API_BASE},
basic_auth=(JIRA_USERNAME, JIRA_PASSWORD))
jira_prs = get_jira_prs()
previous_max = get_max_pr()
print("Retrieved %s JIRA PR's from Github" % len(jira_prs))
jira_prs = [(k, v) for k, v in jira_prs if int(v['number']) > previous_max]
print("%s PR's remain after excluding visted ones" % len(jira_prs))
num_updates = 0
considered = []
for issue, pr in sorted(jira_prs, key=lambda kv: int(kv[1]['number'])):
if num_updates >= MAX_UPDATES:
break
pr_num = int(pr['number'])
print("Checking issue %s" % issue)
considered = considered + [pr_num]
url = pr['html_url']
title = "[Github] Pull Request #%s (%s)" % (pr['number'], pr['user']['login'])
try:
existing_links = map(lambda l: l.raw['object']['url'], jira_client.remote_links(issue))
except:
print("Failure reading JIRA %s (does it exist?)" % issue)
print(sys.exc_info()[0])
continue
if url in existing_links:
continue
icon = {"title": "Pull request #%s" % pr['number'],
"url16x16": "https://assets-cdn.github.com/favicon.ico"}
destination = {"title": title, "url": url, "icon": icon}
# For all possible fields see:
# https://developer.atlassian.com/display/JIRADEV/Fields+in+Remote+Issue+Links
# application = {"name": "Github pull requests", "type": "org.apache.spark.jira.github"}
jira_client.add_remote_link(issue, destination)
comment = "User '%s' has created a pull request for this issue:" % pr['user']['login']
comment += "\n%s" % pr['html_url']
if pr_num >= MIN_COMMENT_PR:
jira_client.add_comment(issue, comment)
print("Added link %s <-> PR #%s" % (issue, pr['number']))
num_updates += 1
if len(considered) > 0:
set_max_pr(max(considered))
| apache-2.0 | -6,562,862,029,785,360,000 | 33.627451 | 96 | 0.649868 | false |
Clarissa-Bot/Clarissa | libs/__init__.py | 1 | 1623 | # Enfold Enterprise Server
# Copyright(C), 2004-5, Enfold Systems, LLC - ALL RIGHTS RESERVED
# Enfold Systems, LLC
# 4617 Montrose Blvd., Suite C215
# Houston, Texas 77006 USA
# p. +1 713.942.2377 | f. +1 832.201.8856
# www.enfoldsystems.com
# [email protected]
import sys
if "win32" in sys.platform:
# Use win32process from pywin32
import win32api
import win32con
import win32process
import pywintypes
def _get_handle_for_pid(pid, ro=True):
if pid == 0:
pHandle = win32process.GetCurrentProcess()
else:
flags = win32con.PROCESS_QUERY_INFORMATION
if not ro:
flags |= win32con.PROCESS_SET_INFORMATION
try:
pHandle = win32api.OpenProcess(flags, 0, pid)
except pywintypes.error as e:
raise e
return pHandle
def set_process_affinity_mask(pid, value):
pHandle = _get_handle_for_pid(pid, False)
current = win32process.GetProcessAffinityMask(pHandle)[0]
try:
win32process.SetProcessAffinityMask(pHandle, value)
except win32process.error as e:
raise e
return current
def get_process_affinity_mask(pid):
pHandle = _get_handle_for_pid(pid)
try:
return win32process.GetProcessAffinityMask(pHandle)[0]
except win32process.error as e:
raise e
elif sys.platform in ('linux2'):
pass
else:
def set_process_affinity_mask(pid, value):
raise NotImplementedError
def get_process_affinity_mask(pid):
raise NotImplementedError
| apache-2.0 | -6,839,715,249,462,650,000 | 27.473684 | 66 | 0.63093 | false |
ljschumacher/tierpsy-tracker | tierpsy/processing/run_multi_cmd.py | 1 | 6950 | # -*- coding: utf-8 -*-
"""
Created on Fri Oct 16 23:43:30 2015
@author: ajaver
"""
#import os
import sys
import os
import time
import subprocess as sp
from functools import partial
from io import StringIO
from tierpsy.helper.misc import TimeCounter, ReadEnqueue
GUI_CLEAR_SIGNAL = '+++++++++++++++++++++++++++++++++++++++++++++++++'
class CapturingOutput(list):
'''modified from http://stackoverflow.com/questions/1218933/can-i-redirect-the-stdout-in-python-into-some-sort-of-string-buffer'''
def __enter__(self):
self._stdout = sys.stdout
sys.stdout = self._stringio = StringIO()
return self
def __exit__(self, *args):
self.extend([x + '\n' for x in self._stringio.getvalue().splitlines()])
sys.stdout = self._stdout
ON_POSIX = 'posix' in sys.builtin_module_names
class StartProcess():
def __init__(self, cmd, local_obj='', is_debug = True):
self.is_debug = is_debug
self.output = ['Started\n']
if local_obj:
with CapturingOutput() as output:
if cmd[0] == sys.executable:
cmd = cmd[1:]
self.obj_cmd = local_obj(cmd)
self.cmd = self.obj_cmd.start()
self.output += output
else:
self.obj_cmd = ''
self.cmd = cmd
self.output = ['Started\n']
self.output += [cmdlist2str(self.cmd) + '\n']
self.proc = sp.Popen(self.cmd, stdout=sp.PIPE, stderr=sp.PIPE,
bufsize=1, close_fds=ON_POSIX)
self.buf_reader = ReadEnqueue(self.proc .stdout)
def read_buff(self):
while True:
# read line without blocking
line = self.buf_reader.read()
if line is not None:
self.output.append(line)
else:
break
# store only the last line
self.output = self.output[-1:]
def close(self):
if self.proc.poll() != 0:
error_outputs = self.proc.stderr.read().decode("utf-8")
# print errors details if there was any
self.output[-1] += 'ERROR: \n'
#I want to add only the last line of the error. No traceback info in order to do not overwhelm the user.
dd = error_outputs.split('\n')
if len(dd) > 1:
self.output[-1] += dd[-2] + '\n'
if self.is_debug:
self.output[-1] += error_outputs
self.output[-1] += cmdlist2str(self.cmd) + '\n'
self.proc.stderr.flush()
if self.obj_cmd and self.proc.poll() == 0:
with CapturingOutput() as output:
self.obj_cmd.clean()
self.output += output
self.proc.wait()
self.proc.stdout.close()
self.proc.stderr.close()
def RunMultiCMD(cmd_list,
local_obj='',
max_num_process=3,
refresh_time=10,
is_debug = True):
'''Start different process using the command is cmd_list'''
start_obj = partial(StartProcess, local_obj=local_obj, is_debug=is_debug)
total_timer = TimeCounter() #timer to meassure the total time
cmd_list = cmd_list[::-1] # since I am using pop to get the next element i need to invert the list to get athe same order
tot_tasks = len(cmd_list)
if tot_tasks < max_num_process:
max_num_process = tot_tasks
# initialize the first max_number_process in the list
finished_tasks = []
current_tasks = []
for ii in range(max_num_process):
cmd = cmd_list.pop()
current_tasks.append(start_obj(cmd))
# keep loop tasks as long as there are tasks in the list
while current_tasks:
time.sleep(refresh_time)
print(GUI_CLEAR_SIGNAL)
os.system(['clear', 'cls'][os.name == 'nt'])
# print info of the finished tasks
for task_finish_msg in finished_tasks:
sys.stdout.write(task_finish_msg)
# loop along the process list to update output and see if there is any
# task finished
next_tasks = []
#I want to close the tasks after starting the next the tasks. It has de disadvantage of
#requiring more disk space, (required files for the new task + the finished files)
#but at least it should start a new tasks while it is copying the old results.
tasks_to_close = []
for task in current_tasks:
task.read_buff()
if task.proc.poll() is None:
# add task to the new list if it hasn't complete
next_tasks.append(task)
sys.stdout.write(task.output[-1])
else:
# close the task and add its las output to the finished_tasks
# list
tasks_to_close.append(task)
# add new task once the previous one was finished
if cmd_list and len(next_tasks) < max_num_process:
cmd = cmd_list.pop()
next_tasks.append(start_obj(cmd))
# if there is stlll space add a new tasks.
while cmd_list and len(next_tasks) < max_num_process:
cmd = cmd_list.pop()
next_tasks.append(start_obj(cmd))
#close tasks (copy finished files to final destination)
for task in tasks_to_close:
task.close()
sys.stdout.write(task.output[-1])
finished_tasks.append(task.output[-1])
#start the new loop
current_tasks = next_tasks
#display progress
n_finished = len(finished_tasks)
n_remaining = len(current_tasks) + len(cmd_list)
progress_str = 'Tasks: {} finished, {} remaining. Total_time {}.'.format(
n_finished, n_remaining, total_timer.get_time_str())
print('*************************************************')
print(progress_str)
print('*************************************************')
#if i don't add this the GUI could terminate before displaying the last text.
sys.stdout.flush()
time.sleep(1)
def cmdlist2str(cmdlist):
# change the format from the list accepted by Popen to a text string
# accepted by the terminal
for ii, dd in enumerate(cmdlist):
if not dd.startswith('-'):
if os.name != 'nt':
dd = "'" + dd + "'"
else:
if dd.endswith(os.sep):
dd = dd[:-1]
dd = '"' + dd + '"'
if ii == 0:
cmd_str = dd
else:
cmd_str += ' ' + dd
return cmd_str
def print_cmd_list(cmd_list_compress):
# print all the commands to be processed
if cmd_list_compress:
for cmd in cmd_list_compress:
cmd_str = cmdlist2str(cmd)
print(cmd_str)
| mit | 3,874,704,655,631,148,000 | 31.02765 | 134 | 0.538705 | false |
seanchen/taiga-back | taiga/projects/models.py | 3 | 36238 | # Copyright (C) 2014 Andrey Antukh <[email protected]>
# Copyright (C) 2014 Jesús Espino <[email protected]>
# Copyright (C) 2014 David Barragán <[email protected]>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import itertools
import uuid
from django.core.exceptions import ValidationError
from django.db import models
from django.db.models import signals
from django.apps import apps
from django.conf import settings
from django.dispatch import receiver
from django.contrib.auth import get_user_model
from django.utils.translation import ugettext_lazy as _
from django.utils import timezone
from django_pgjson.fields import JsonField
from djorm_pgarray.fields import TextArrayField
from taiga.permissions.permissions import ANON_PERMISSIONS, MEMBERS_PERMISSIONS
from taiga.base.tags import TaggedMixin
from taiga.base.utils.slug import slugify_uniquely
from taiga.base.utils.dicts import dict_sum
from taiga.base.utils.sequence import arithmetic_progression
from taiga.base.utils.slug import slugify_uniquely_for_queryset
from . import choices
class Membership(models.Model):
# This model stores all project memberships. Also
# stores invitations to memberships that does not have
# assigned user.
user = models.ForeignKey(settings.AUTH_USER_MODEL, null=True, blank=True, default=None,
related_name="memberships")
project = models.ForeignKey("Project", null=False, blank=False,
related_name="memberships")
role = models.ForeignKey("users.Role", null=False, blank=False,
related_name="memberships")
is_owner = models.BooleanField(default=False, null=False, blank=False)
# Invitation metadata
email = models.EmailField(max_length=255, default=None, null=True, blank=True,
verbose_name=_("email"))
created_at = models.DateTimeField(default=timezone.now,
verbose_name=_("create at"))
token = models.CharField(max_length=60, blank=True, null=True, default=None,
verbose_name=_("token"))
invited_by = models.ForeignKey(settings.AUTH_USER_MODEL, related_name="ihaveinvited+",
null=True, blank=True)
invitation_extra_text = models.TextField(null=True, blank=True,
verbose_name=_("invitation extra text"))
user_order = models.IntegerField(default=10000, null=False, blank=False,
verbose_name=_("user order"))
def clean(self):
# TODO: Review and do it more robust
memberships = Membership.objects.filter(user=self.user, project=self.project)
if self.user and memberships.count() > 0 and memberships[0].id != self.id:
raise ValidationError(_('The user is already member of the project'))
class Meta:
verbose_name = "membership"
verbose_name_plural = "membershipss"
unique_together = ("user", "project",)
ordering = ["project", "user__full_name", "user__username", "user__email", "email"]
permissions = (
("view_membership", "Can view membership"),
)
class ProjectDefaults(models.Model):
default_points = models.OneToOneField("projects.Points", on_delete=models.SET_NULL,
related_name="+", null=True, blank=True,
verbose_name=_("default points"))
default_us_status = models.OneToOneField("projects.UserStoryStatus",
on_delete=models.SET_NULL, related_name="+",
null=True, blank=True,
verbose_name=_("default US status"))
default_task_status = models.OneToOneField("projects.TaskStatus",
on_delete=models.SET_NULL, related_name="+",
null=True, blank=True,
verbose_name=_("default task status"))
default_priority = models.OneToOneField("projects.Priority", on_delete=models.SET_NULL,
related_name="+", null=True, blank=True,
verbose_name=_("default priority"))
default_severity = models.OneToOneField("projects.Severity", on_delete=models.SET_NULL,
related_name="+", null=True, blank=True,
verbose_name=_("default severity"))
default_issue_status = models.OneToOneField("projects.IssueStatus",
on_delete=models.SET_NULL, related_name="+",
null=True, blank=True,
verbose_name=_("default issue status"))
default_issue_type = models.OneToOneField("projects.IssueType",
on_delete=models.SET_NULL, related_name="+",
null=True, blank=True,
verbose_name=_("default issue type"))
class Meta:
abstract = True
class Project(ProjectDefaults, TaggedMixin, models.Model):
name = models.CharField(max_length=250, null=False, blank=False,
verbose_name=_("name"))
slug = models.SlugField(max_length=250, unique=True, null=False, blank=True,
verbose_name=_("slug"))
description = models.TextField(null=False, blank=False,
verbose_name=_("description"))
created_date = models.DateTimeField(null=False, blank=False,
verbose_name=_("created date"),
default=timezone.now)
modified_date = models.DateTimeField(null=False, blank=False,
verbose_name=_("modified date"))
owner = models.ForeignKey(settings.AUTH_USER_MODEL, null=False, blank=False,
related_name="owned_projects", verbose_name=_("owner"))
members = models.ManyToManyField(settings.AUTH_USER_MODEL, related_name="projects",
through="Membership", verbose_name=_("members"),
through_fields=("project", "user"))
total_milestones = models.IntegerField(default=0, null=False, blank=False,
verbose_name=_("total of milestones"))
total_story_points = models.FloatField(default=0, verbose_name=_("total story points"))
is_backlog_activated = models.BooleanField(default=True, null=False, blank=True,
verbose_name=_("active backlog panel"))
is_kanban_activated = models.BooleanField(default=False, null=False, blank=True,
verbose_name=_("active kanban panel"))
is_wiki_activated = models.BooleanField(default=True, null=False, blank=True,
verbose_name=_("active wiki panel"))
is_issues_activated = models.BooleanField(default=True, null=False, blank=True,
verbose_name=_("active issues panel"))
videoconferences = models.CharField(max_length=250, null=True, blank=True,
choices=choices.VIDEOCONFERENCES_CHOICES,
verbose_name=_("videoconference system"))
videoconferences_salt = models.CharField(max_length=250, null=True, blank=True,
verbose_name=_("videoconference room salt"))
creation_template = models.ForeignKey("projects.ProjectTemplate",
related_name="projects", null=True,
blank=True, default=None,
verbose_name=_("creation template"))
anon_permissions = TextArrayField(blank=True, null=True,
default=[],
verbose_name=_("anonymous permissions"),
choices=ANON_PERMISSIONS)
public_permissions = TextArrayField(blank=True, null=True,
default=[],
verbose_name=_("user permissions"),
choices=MEMBERS_PERMISSIONS)
is_private = models.BooleanField(default=True, null=False, blank=True,
verbose_name=_("is private"))
userstories_csv_uuid = models.CharField(max_length=32, editable=False,
null=True, blank=True,
default=None, db_index=True)
tasks_csv_uuid = models.CharField(max_length=32, editable=False, null=True,
blank=True, default=None, db_index=True)
issues_csv_uuid = models.CharField(max_length=32, editable=False,
null=True, blank=True, default=None,
db_index=True)
tags_colors = TextArrayField(dimension=2, null=False, blank=True, verbose_name=_("tags colors"), default=[])
_importing = None
class Meta:
verbose_name = "project"
verbose_name_plural = "projects"
ordering = ["name"]
permissions = (
("view_project", "Can view project"),
)
def __str__(self):
return self.name
def __repr__(self):
return "<Project {0}>".format(self.id)
def save(self, *args, **kwargs):
if not self._importing or not self.modified_date:
self.modified_date = timezone.now()
if not self.slug:
base_name = "{}-{}".format(self.owner.username, self.name)
base_slug = slugify_uniquely(base_name, self.__class__)
slug = base_slug
for i in arithmetic_progression():
if not type(self).objects.filter(slug=slug).exists() or i > 100:
break
slug = "{}-{}".format(base_slug, i)
self.slug = slug
if not self.videoconferences:
self.videoconferences_salt = None
super().save(*args, **kwargs)
def get_roles(self):
return self.roles.all()
def get_users(self):
user_model = get_user_model()
members = self.memberships.values_list("user", flat=True)
return user_model.objects.filter(id__in=list(members))
def update_role_points(self, user_stories=None):
RolePoints = apps.get_model("userstories", "RolePoints")
Role = apps.get_model("users", "Role")
# Get all available roles on this project
roles = self.get_roles().filter(computable=True)
if roles.count() == 0:
return
# Iter over all project user stories and create
# role point instance for new created roles.
if user_stories is None:
user_stories = self.user_stories.all()
# Get point instance that represent a null/undefined
# The current model allows duplicate values. Because
# of it, we should get all poins with None as value
# and use the first one.
# In case of that not exists, creates one for avoid
# unexpected errors.
none_points = list(self.points.filter(value=None))
if none_points:
null_points_value = none_points[0]
else:
name = slugify_uniquely_for_queryset("?", self.points.all(), slugfield="name")
null_points_value = Points.objects.create(name=name, value=None, project=self)
for us in user_stories:
usroles = Role.objects.filter(role_points__in=us.role_points.all()).distinct()
new_roles = roles.exclude(id__in=usroles)
new_rolepoints = [RolePoints(role=role, user_story=us, points=null_points_value)
for role in new_roles]
RolePoints.objects.bulk_create(new_rolepoints)
# Now remove rolepoints associated with not existing roles.
rp_query = RolePoints.objects.filter(user_story__in=self.user_stories.all())
rp_query = rp_query.exclude(role__id__in=roles.values_list("id", flat=True))
rp_query.delete()
def _get_user_stories_points(self, user_stories):
role_points = [us.role_points.all() for us in user_stories]
flat_role_points = itertools.chain(*role_points)
flat_role_dicts = map(lambda x: {x.role_id: x.points.value if x.points.value else 0},
flat_role_points)
return dict_sum(*flat_role_dicts)
def _get_points_increment(self, client_requirement, team_requirement):
last_milestones = self.milestones.order_by('-estimated_finish')
last_milestone = last_milestones[0] if last_milestones else None
if last_milestone:
user_stories = self.user_stories.filter(
created_date__gte=last_milestone.estimated_finish,
client_requirement=client_requirement,
team_requirement=team_requirement
)
else:
user_stories = self.user_stories.filter(
client_requirement=client_requirement,
team_requirement=team_requirement
)
user_stories = user_stories.prefetch_related('role_points', 'role_points__points')
return self._get_user_stories_points(user_stories)
@property
def project(self):
return self
@property
def project(self):
return self
@property
def future_team_increment(self):
team_increment = self._get_points_increment(False, True)
shared_increment = {key: value / 2 for key, value in self.future_shared_increment.items()}
return dict_sum(team_increment, shared_increment)
@property
def future_client_increment(self):
client_increment = self._get_points_increment(True, False)
shared_increment = {key: value / 2 for key, value in self.future_shared_increment.items()}
return dict_sum(client_increment, shared_increment)
@property
def future_shared_increment(self):
return self._get_points_increment(True, True)
@property
def closed_points(self):
return self.calculated_points["closed"]
@property
def defined_points(self):
return self.calculated_points["defined"]
@property
def assigned_points(self):
return self.calculated_points["assigned"]
@property
def calculated_points(self):
user_stories = self.user_stories.all().prefetch_related('role_points', 'role_points__points')
closed_user_stories = user_stories.filter(is_closed=True)
assigned_user_stories = user_stories.filter(milestone__isnull=False)
return {
"defined": self._get_user_stories_points(user_stories),
"closed": self._get_user_stories_points(closed_user_stories),
"assigned": self._get_user_stories_points(assigned_user_stories),
}
class ProjectModulesConfig(models.Model):
project = models.OneToOneField("Project", null=False, blank=False,
related_name="modules_config", verbose_name=_("project"))
config = JsonField(null=True, blank=True, verbose_name=_("modules config"))
class Meta:
verbose_name = "project modules config"
verbose_name_plural = "project modules configs"
ordering = ["project"]
# User Stories common Models
class UserStoryStatus(models.Model):
name = models.CharField(max_length=255, null=False, blank=False,
verbose_name=_("name"))
slug = models.SlugField(max_length=255, null=False, blank=True,
verbose_name=_("slug"))
order = models.IntegerField(default=10, null=False, blank=False,
verbose_name=_("order"))
is_closed = models.BooleanField(default=False, null=False, blank=True,
verbose_name=_("is closed"))
is_archived = models.BooleanField(default=False, null=False, blank=True,
verbose_name=_("is archived"))
color = models.CharField(max_length=20, null=False, blank=False, default="#999999",
verbose_name=_("color"))
wip_limit = models.IntegerField(null=True, blank=True, default=None,
verbose_name=_("work in progress limit"))
project = models.ForeignKey("Project", null=False, blank=False,
related_name="us_statuses", verbose_name=_("project"))
class Meta:
verbose_name = "user story status"
verbose_name_plural = "user story statuses"
ordering = ["project", "order", "name"]
unique_together = (("project", "name"), ("project", "slug"))
permissions = (
("view_userstorystatus", "Can view user story status"),
)
def __str__(self):
return self.name
def save(self, *args, **kwargs):
qs = self.project.us_statuses
if self.id:
qs = qs.exclude(id=self.id)
self.slug = slugify_uniquely_for_queryset(self.name, qs)
return super().save(*args, **kwargs)
class Points(models.Model):
name = models.CharField(max_length=255, null=False, blank=False,
verbose_name=_("name"))
order = models.IntegerField(default=10, null=False, blank=False,
verbose_name=_("order"))
value = models.FloatField(default=None, null=True, blank=True,
verbose_name=_("value"))
project = models.ForeignKey("Project", null=False, blank=False,
related_name="points", verbose_name=_("project"))
class Meta:
verbose_name = "points"
verbose_name_plural = "points"
ordering = ["project", "order", "name"]
unique_together = ("project", "name")
permissions = (
("view_points", "Can view points"),
)
def __str__(self):
return self.name
# Tasks common models
class TaskStatus(models.Model):
name = models.CharField(max_length=255, null=False, blank=False,
verbose_name=_("name"))
slug = models.SlugField(max_length=255, null=False, blank=True,
verbose_name=_("slug"))
order = models.IntegerField(default=10, null=False, blank=False,
verbose_name=_("order"))
is_closed = models.BooleanField(default=False, null=False, blank=True,
verbose_name=_("is closed"))
color = models.CharField(max_length=20, null=False, blank=False, default="#999999",
verbose_name=_("color"))
project = models.ForeignKey("Project", null=False, blank=False,
related_name="task_statuses", verbose_name=_("project"))
class Meta:
verbose_name = "task status"
verbose_name_plural = "task statuses"
ordering = ["project", "order", "name"]
unique_together = (("project", "name"), ("project", "slug"))
permissions = (
("view_taskstatus", "Can view task status"),
)
def __str__(self):
return self.name
def save(self, *args, **kwargs):
qs = self.project.task_statuses
if self.id:
qs = qs.exclude(id=self.id)
self.slug = slugify_uniquely_for_queryset(self.name, qs)
return super().save(*args, **kwargs)
# Issue common Models
class Priority(models.Model):
name = models.CharField(max_length=255, null=False, blank=False,
verbose_name=_("name"))
order = models.IntegerField(default=10, null=False, blank=False,
verbose_name=_("order"))
color = models.CharField(max_length=20, null=False, blank=False, default="#999999",
verbose_name=_("color"))
project = models.ForeignKey("Project", null=False, blank=False,
related_name="priorities", verbose_name=_("project"))
class Meta:
verbose_name = "priority"
verbose_name_plural = "priorities"
ordering = ["project", "order", "name"]
unique_together = ("project", "name")
permissions = (
("view_priority", "Can view priority"),
)
def __str__(self):
return self.name
class Severity(models.Model):
name = models.CharField(max_length=255, null=False, blank=False,
verbose_name=_("name"))
order = models.IntegerField(default=10, null=False, blank=False,
verbose_name=_("order"))
color = models.CharField(max_length=20, null=False, blank=False, default="#999999",
verbose_name=_("color"))
project = models.ForeignKey("Project", null=False, blank=False,
related_name="severities", verbose_name=_("project"))
class Meta:
verbose_name = "severity"
verbose_name_plural = "severities"
ordering = ["project", "order", "name"]
unique_together = ("project", "name")
permissions = (
("view_severity", "Can view severity"),
)
def __str__(self):
return self.name
class IssueStatus(models.Model):
name = models.CharField(max_length=255, null=False, blank=False,
verbose_name=_("name"))
slug = models.SlugField(max_length=255, null=False, blank=True,
verbose_name=_("slug"))
order = models.IntegerField(default=10, null=False, blank=False,
verbose_name=_("order"))
is_closed = models.BooleanField(default=False, null=False, blank=True,
verbose_name=_("is closed"))
color = models.CharField(max_length=20, null=False, blank=False, default="#999999",
verbose_name=_("color"))
project = models.ForeignKey("Project", null=False, blank=False,
related_name="issue_statuses", verbose_name=_("project"))
class Meta:
verbose_name = "issue status"
verbose_name_plural = "issue statuses"
ordering = ["project", "order", "name"]
unique_together = (("project", "name"), ("project", "slug"))
permissions = (
("view_issuestatus", "Can view issue status"),
)
def __str__(self):
return self.name
def save(self, *args, **kwargs):
qs = self.project.issue_statuses
if self.id:
qs = qs.exclude(id=self.id)
self.slug = slugify_uniquely_for_queryset(self.name, qs)
return super().save(*args, **kwargs)
class IssueType(models.Model):
name = models.CharField(max_length=255, null=False, blank=False,
verbose_name=_("name"))
order = models.IntegerField(default=10, null=False, blank=False,
verbose_name=_("order"))
color = models.CharField(max_length=20, null=False, blank=False, default="#999999",
verbose_name=_("color"))
project = models.ForeignKey("Project", null=False, blank=False,
related_name="issue_types", verbose_name=_("project"))
class Meta:
verbose_name = "issue type"
verbose_name_plural = "issue types"
ordering = ["project", "order", "name"]
unique_together = ("project", "name")
permissions = (
("view_issuetype", "Can view issue type"),
)
def __str__(self):
return self.name
class ProjectTemplate(models.Model):
name = models.CharField(max_length=250, null=False, blank=False,
verbose_name=_("name"))
slug = models.SlugField(max_length=250, null=False, blank=True,
verbose_name=_("slug"), unique=True)
description = models.TextField(null=False, blank=False,
verbose_name=_("description"))
created_date = models.DateTimeField(null=False, blank=False,
verbose_name=_("created date"),
default=timezone.now)
modified_date = models.DateTimeField(null=False, blank=False,
verbose_name=_("modified date"))
default_owner_role = models.CharField(max_length=50, null=False,
blank=False,
verbose_name=_("default owner's role"))
is_backlog_activated = models.BooleanField(default=True, null=False, blank=True,
verbose_name=_("active backlog panel"))
is_kanban_activated = models.BooleanField(default=False, null=False, blank=True,
verbose_name=_("active kanban panel"))
is_wiki_activated = models.BooleanField(default=True, null=False, blank=True,
verbose_name=_("active wiki panel"))
is_issues_activated = models.BooleanField(default=True, null=False, blank=True,
verbose_name=_("active issues panel"))
videoconferences = models.CharField(max_length=250, null=True, blank=True,
choices=choices.VIDEOCONFERENCES_CHOICES,
verbose_name=_("videoconference system"))
videoconferences_salt = models.CharField(max_length=250, null=True, blank=True,
verbose_name=_("videoconference room salt"))
default_options = JsonField(null=True, blank=True, verbose_name=_("default options"))
us_statuses = JsonField(null=True, blank=True, verbose_name=_("us statuses"))
points = JsonField(null=True, blank=True, verbose_name=_("points"))
task_statuses = JsonField(null=True, blank=True, verbose_name=_("task statuses"))
issue_statuses = JsonField(null=True, blank=True, verbose_name=_("issue statuses"))
issue_types = JsonField(null=True, blank=True, verbose_name=_("issue types"))
priorities = JsonField(null=True, blank=True, verbose_name=_("priorities"))
severities = JsonField(null=True, blank=True, verbose_name=_("severities"))
roles = JsonField(null=True, blank=True, verbose_name=_("roles"))
_importing = None
class Meta:
verbose_name = "project template"
verbose_name_plural = "project templates"
ordering = ["name"]
def __str__(self):
return self.name
def __repr__(self):
return "<Project Template {0}>".format(self.slug)
def save(self, *args, **kwargs):
if not self._importing or not self.modified_date:
self.modified_date = timezone.now()
super().save(*args, **kwargs)
def load_data_from_project(self, project):
self.is_backlog_activated = project.is_backlog_activated
self.is_kanban_activated = project.is_kanban_activated
self.is_wiki_activated = project.is_wiki_activated
self.is_issues_activated = project.is_issues_activated
self.videoconferences = project.videoconferences
self.videoconferences_salt = project.videoconferences_salt
self.default_options = {
"points": getattr(project.default_points, "name", None),
"us_status": getattr(project.default_us_status, "name", None),
"task_status": getattr(project.default_task_status, "name", None),
"issue_status": getattr(project.default_issue_status, "name", None),
"issue_type": getattr(project.default_issue_type, "name", None),
"priority": getattr(project.default_priority, "name", None),
"severity": getattr(project.default_severity, "name", None)
}
self.us_statuses = []
for us_status in project.us_statuses.all():
self.us_statuses.append({
"name": us_status.name,
"slug": us_status.slug,
"is_closed": us_status.is_closed,
"is_archived": us_status.is_archived,
"color": us_status.color,
"wip_limit": us_status.wip_limit,
"order": us_status.order,
})
self.points = []
for us_point in project.points.all():
self.points.append({
"name": us_point.name,
"value": us_point.value,
"order": us_point.order,
})
self.task_statuses = []
for task_status in project.task_statuses.all():
self.task_statuses.append({
"name": task_status.name,
"slug": task_status.slug,
"is_closed": task_status.is_closed,
"color": task_status.color,
"order": task_status.order,
})
self.issue_statuses = []
for issue_status in project.issue_statuses.all():
self.issue_statuses.append({
"name": issue_status.name,
"slug": issue_status.slug,
"is_closed": issue_status.is_closed,
"color": issue_status.color,
"order": issue_status.order,
})
self.issue_types = []
for issue_type in project.issue_types.all():
self.issue_types.append({
"name": issue_type.name,
"color": issue_type.color,
"order": issue_type.order,
})
self.priorities = []
for priority in project.priorities.all():
self.priorities.append({
"name": priority.name,
"color": priority.color,
"order": priority.order,
})
self.severities = []
for severity in project.severities.all():
self.severities.append({
"name": severity.name,
"color": severity.color,
"order": severity.order,
})
self.roles = []
for role in project.roles.all():
self.roles.append({
"name": role.name,
"slug": role.slug,
"permissions": role.permissions,
"order": role.order,
"computable": role.computable
})
try:
owner_membership = Membership.objects.get(project=project, user=project.owner)
self.default_owner_role = owner_membership.role.slug
except Membership.DoesNotExist:
self.default_owner_role = self.roles[0].get("slug", None)
def apply_to_project(self, project):
Role = apps.get_model("users", "Role")
if project.id is None:
raise Exception("Project need an id (must be a saved project)")
project.creation_template = self
project.is_backlog_activated = self.is_backlog_activated
project.is_kanban_activated = self.is_kanban_activated
project.is_wiki_activated = self.is_wiki_activated
project.is_issues_activated = self.is_issues_activated
project.videoconferences = self.videoconferences
project.videoconferences_salt = self.videoconferences_salt
for us_status in self.us_statuses:
UserStoryStatus.objects.create(
name=us_status["name"],
slug=us_status["slug"],
is_closed=us_status["is_closed"],
is_archived=us_status["is_archived"],
color=us_status["color"],
wip_limit=us_status["wip_limit"],
order=us_status["order"],
project=project
)
for point in self.points:
Points.objects.create(
name=point["name"],
value=point["value"],
order=point["order"],
project=project
)
for task_status in self.task_statuses:
TaskStatus.objects.create(
name=task_status["name"],
slug=task_status["slug"],
is_closed=task_status["is_closed"],
color=task_status["color"],
order=task_status["order"],
project=project
)
for issue_status in self.issue_statuses:
IssueStatus.objects.create(
name=issue_status["name"],
slug=issue_status["slug"],
is_closed=issue_status["is_closed"],
color=issue_status["color"],
order=issue_status["order"],
project=project
)
for issue_type in self.issue_types:
IssueType.objects.create(
name=issue_type["name"],
color=issue_type["color"],
order=issue_type["order"],
project=project
)
for priority in self.priorities:
Priority.objects.create(
name=priority["name"],
color=priority["color"],
order=priority["order"],
project=project
)
for severity in self.severities:
Severity.objects.create(
name=severity["name"],
color=severity["color"],
order=severity["order"],
project=project
)
for role in self.roles:
Role.objects.create(
name=role["name"],
slug=role["slug"],
order=role["order"],
computable=role["computable"],
project=project,
permissions=role['permissions']
)
if self.points:
project.default_points = Points.objects.get(name=self.default_options["points"],
project=project)
if self.us_statuses:
project.default_us_status = UserStoryStatus.objects.get(name=self.default_options["us_status"],
project=project)
if self.task_statuses:
project.default_task_status = TaskStatus.objects.get(name=self.default_options["task_status"],
project=project)
if self.issue_statuses:
project.default_issue_status = IssueStatus.objects.get(name=self.default_options["issue_status"],
project=project)
if self.issue_types:
project.default_issue_type = IssueType.objects.get(name=self.default_options["issue_type"],
project=project)
if self.priorities:
project.default_priority = Priority.objects.get(name=self.default_options["priority"], project=project)
if self.severities:
project.default_severity = Severity.objects.get(name=self.default_options["severity"], project=project)
return project
| agpl-3.0 | 5,982,643,201,889,653,000 | 43.190244 | 115 | 0.560741 | false |
StudentOrganisationForAreospaceResearch/GroundControl | tester.py | 1 | 2656 | # -*- coding: utf-8 -*-
"""
Student Organisation for Aerospace Reasearch
University of Calgary
Canada
Developers:
Nathan Meulenbroek
Description:
Tester file. Gives bogus data to the main file to run algorithm tests
"""
import random
class Tester():
"""
Very much overengineered to support future improvements.
"""
time = 0.0
altitude = 0.0
pressure = 0.0
acceleration_x = 0.0
acceleration_y = 0.0
acceleration_z = 0.0
ang_acceleration_x = 0.0
ang_acceleration_y = 0.0
ang_acceleration_z = 0.0
magnetic_field_x = 0.0
magnetic_field_y = 0.0
magnetic_field_z = 0.0
pitch = 0.0
yaw = 0.0
roll = 0.0
temperature = 0.0
longitude = 0.0
latitude = 0.0
def __init__(self, *args, **kwargs):
return
def get_data(self):
self.time = random.randrange(0, 30, 1)
self.altitude = random.randrange(0, 30, 1)
self.pressure = random.randrange(0, 30, 1)
self.acceleration_x = random.randrange(0, 30, 1)
self.acceleration_y = random.randrange(0, 30, 1)
self.acceleration_z = random.randrange(0, 30, 1)
self.ang_acceleration_x = random.randrange(0, 30, 1)
self.ang_acceleration_y = random.randrange(0, 30, 1)
self.ang_acceleration_z = random.randrange(0, 30, 1)
self.magnetic_field_x = random.randrange(0, 30, 1)
self.magnetic_field_y = random.randrange(0, 30, 1)
self.magnetic_field_z = random.randrange(0, 30, 1)
self.pitch = random.randrange(0, 30, 1)
self.yaw = random.randrange(0, 30, 1)
self.roll = random.randrange(0, 30, 1)
self.temperature = random.randrange(0, 30, 1)
self.longitude = random.randrange(0, 30, 1)
self.latitude = random.randrange(0, 30, 1)
return (str(self.time) + '|' + str(self.altitude) + '|' +
str(self.pressure) + '|' + str(self.acceleration_x) + '|' +
str(self.acceleration_y) + '|' + str(self.acceleration_z)
+ '|' + str(self.ang_acceleration_x) + '|' +
str(self.ang_acceleration_y) + '|' +
str(self.ang_acceleration_z) + '|' + str(self.magnetic_field_x)
+ '|' + str(self.magnetic_field_y) + '|' +
str(self.magnetic_field_z) + '|' + str(self.pitch) + '|' +
str(self.yaw) + '|' + str(self.roll) + '|' +
str(self.temperature) + '|' + str(self.longitude) + '|' +
str(self.latitude))
def __init__():
temp = Tester()
for i in range(10):
print(temp.get_data())
return | apache-2.0 | 3,864,576,654,864,803,000 | 32.632911 | 79 | 0.558358 | false |
FlightGear/flightgear | utils/Modeller/uv_import_svg.py | 1 | 7964 | #!BPY
# """
# Name: 'UV: (Re)Import UV from SVG'
# Blender: 245
# Group: 'Image'
# Tooltip: 'Re-import UV layout from SVG file'
# """
__author__ = "Melchior FRANZ < mfranz # aon : at >"
__url__ = ["http://www.flightgear.org/", "http://cvs.flightgear.org/viewvc/source/utils/Modeller/uv_import_svg.py"]
__version__ = "0.2"
__bpydoc__ = """\
Imports an SVG file containing UV maps, which has been saved by the
uv_export.svg script. This allows to move, scale, and rotate object
mappings in SVG editors like Inkscape. Note that all contained UV maps
will be set, no matter which objects are actually selected at the moment.
The choice has been made when the file was saved!
"""
#--------------------------------------------------------------------------------
# Copyright (C) 2008 Melchior FRANZ < mfranz # aon : at >
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#--------------------------------------------------------------------------------
ID_SEPARATOR = '_.:._'
import Blender, BPyMessages, sys, math, re, xml.sax
numwsp = re.compile('(?<=[\d.])\s+(?=[-+.\d])')
commawsp = re.compile('\s+|\s*,\s*')
istrans = re.compile('^\s*(skewX|skewY|scale|translate|rotate|matrix)\s*\(([^\)]*)\)\s*')
isnumber = re.compile('^[-+]?(\d+(\.\d*)?|\.\d+)([eE][-+]?\d+)?$')
class Abort(Exception):
def __init__(self, msg):
self.msg = msg
class Matrix:
def __init__(self, a = 1, b = 0, c = 0, d = 1, e = 0, f = 0):
self.a = a; self.b = b; self.c = c; self.d = d; self.e = e; self.f = f
def __str__(self):
return "[Matrix %f %f %f %f %f %f]" % (self.a, self.b, self.c, self.d, self.e, self.f)
def multiply(self, mat):
a = mat.a * self.a + mat.c * self.b
b = mat.b * self.a + mat.d * self.b
c = mat.a * self.c + mat.c * self.d
d = mat.b * self.c + mat.d * self.d
e = mat.a * self.e + mat.c * self.f + mat.e
f = mat.b * self.e + mat.d * self.f + mat.f
self.a = a; self.b = b; self.c = c; self.d = d; self.e = e; self.f = f
def transform(self, u, v):
return u * self.a + v * self.c + self.e, u * self.b + v * self.d + self.f
def translate(self, dx, dy):
self.multiply(Matrix(1, 0, 0, 1, dx, dy))
def scale(self, sx, sy):
self.multiply(Matrix(sx, 0, 0, sy, 0, 0))
def rotate(self, a):
a *= math.pi / 180
self.multiply(Matrix(math.cos(a), math.sin(a), -math.sin(a), math.cos(a), 0, 0))
def skewX(self, a):
a *= math.pi / 180
self.multiply(Matrix(1, 0, math.tan(a), 1, 0, 0))
def skewY(self, a):
a *= math.pi / 180
self.multiply(Matrix(1, math.tan(a), 0, 1, 0, 0))
def parse_transform(s):
matrix = Matrix()
while True:
match = istrans.match(s)
if not match:
break
cmd = match.group(1)
values = commawsp.split(match.group(2).strip())
s = s[len(match.group(0)):]
arg = []
for value in values:
match = isnumber.match(value)
if not match:
raise Abort("bad transform value")
arg.append(float(match.group(0)))
num = len(arg)
if cmd == "skewX":
if num == 1:
matrix.skewX(arg[0])
continue
elif cmd == "skewY":
if num == 1:
matrix.skewY(arg[0])
continue
elif cmd == "scale":
if num == 1:
matrix.scale(arg[0], arg[0])
continue
if num == 2:
matrix.scale(arg[0], arg[1])
continue
elif cmd == "translate":
if num == 1:
matrix.translate(arg[0], 0)
continue
if num == 2:
matrix.translate(arg[0], arg[1])
continue
elif cmd == "rotate":
if num == 1:
matrix.rotate(arg[0])
continue
if num == 3:
matrix.translate(-arg[1], -arg[2])
matrix.rotate(arg[0])
matrix.translate(arg[1], arg[2])
continue
elif cmd == "matrix":
if num == 6:
matrix.multiply(Matrix(*arg))
continue
else:
print "ERROR: unknown transform", cmd
continue
print "ERROR: '%s' with wrong argument number (%d)" % (cmd, num)
if len(s):
print "ERROR: transform with trailing garbage (%s)" % s
return matrix
class import_svg(xml.sax.handler.ContentHandler):
# err_handler
def error(self, exception):
raise Abort(str(exception))
def fatalError(self, exception):
raise Abort(str(exception))
def warning(self, exception):
print "WARNING: " + str(exception)
# doc_handler
def setDocumentLocator(self, whatever):
pass
def startDocument(self):
self.verified = False
self.scandesc = False
self.matrices = [None]
self.meshes = {}
for o in Blender.Scene.GetCurrent().objects:
if o.type != "Mesh":
continue
mesh = o.getData(mesh = 1)
if not mesh.faceUV:
continue
if mesh.name in self.meshes:
continue
self.meshes[mesh.name] = mesh
def endDocument(self):
pass
def characters(self, data):
if not self.scandesc:
return
if data.startswith("uv_export_svg.py"):
self.verified = True
def ignorableWhitespace(self, data, start, length):
pass
def processingInstruction(self, target, data):
pass
def startElement(self, name, attrs):
currmat = self.matrices[-1]
if "transform" in attrs:
m = parse_transform(attrs["transform"])
if currmat is not None:
m.multiply(currmat)
self.matrices.append(m)
else:
self.matrices.append(currmat)
if name == "polygon":
self.handlePolygon(attrs)
elif name == "svg":
if "viewBox" in attrs:
x, y, w, h = commawsp.split(attrs["viewBox"], 4)
if int(x) or int(y):
raise Abort("bad viewBox")
self.width = int(w)
self.height = int(h)
if self.width != self.height:
raise Abort("viewBox isn't a square")
else:
raise Abort("no viewBox")
elif name == "desc" and not self.verified:
self.scandesc = True
def endElement(self, name):
self.scandesc = False
self.matrices.pop()
def handlePolygon(self, attrs):
if not self.verified:
raise Abort("this file wasn't written by uv_export_svg.py")
ident = attrs.get("id", None)
points = attrs.get("points", None)
if not ident or not points:
print('bad polygon "%s"' % ident)
return
try:
meshname, num = ident.strip().split(ID_SEPARATOR, 2)
except:
print('broken id "%s"' % ident)
return
if not meshname in self.meshes:
print('unknown mesh "%s"' % meshname)
return
#print 'mesh %s face %d: ' % (meshname, num)
matrix = self.matrices[-1]
transuv = []
for p in numwsp.split(points.strip()):
u, v = commawsp.split(p.strip(), 2)
u = float(u)
v = float(v)
if matrix:
u, v = matrix.transform(u, v)
transuv.append((u / self.width, 1 - v / self.height))
for i, uv in enumerate(self.meshes[meshname].faces[int(num)].uv):
uv[0] = transuv[i][0]
uv[1] = transuv[i][1]
def run_parser(path):
if BPyMessages.Error_NoFile(path):
return
editmode = Blender.Window.EditMode()
if editmode:
Blender.Window.EditMode(0)
Blender.Window.WaitCursor(1)
try:
xml.sax.parse(path, import_svg(), import_svg())
Blender.Registry.SetKey("UVImportExportSVG", { "path" : path }, False)
except Abort, e:
print "Error:", e.msg, " -> aborting ...\n"
Blender.Draw.PupMenu("Error%t|" + e.msg)
Blender.Window.RedrawAll()
Blender.Window.WaitCursor(0)
if editmode:
Blender.Window.EditMode(1)
registry = Blender.Registry.GetKey("UVImportExportSVG", False)
if registry and "path" in registry and Blender.sys.exists(Blender.sys.expandpath(registry["path"])):
path = registry["path"]
else:
path = ""
Blender.Window.FileSelector(run_parser, "Import SVG", path)
| gpl-2.0 | 3,930,523,951,224,759,000 | 24.857143 | 115 | 0.624686 | false |
rogueinabox/rogueinabox | rogueinabox/history.py | 1 | 5964 | #Copyright (C) 2017 Andrea Asperti, Carlo De Pieri, Gianmaria Pedrini
#
#This file is part of Rogueinabox.
#
#Rogueinabox is free software: you can redistribute it and/or modify
#it under the terms of the GNU General Public License as published by
#the Free Software Foundation, either version 3 of the License, or
#(at your option) any later version.
#
#Rogueinabox is distributed in the hope that it will be useful,
#but WITHOUT ANY WARRANTY; without even the implied warranty of
#MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
#GNU General Public License for more details.
#
#You should have received a copy of the GNU General Public License
#along with this program. If not, see <http://www.gnu.org/licenses/>.
from abc import ABC, abstractmethod
import pickle
import random
import numpy as np
import os
from collections import deque
class HistoryManager(ABC):
"""A class responsible for saving history and loading batch of it for training purposes."""
def __init__(self, agent):
"""Constructor for History"""
self.agent = agent
self._history = None
@property
def history(self):
"""Return the history"""
return self._history
def hist_len(self):
"""Return the history length"""
return len(self._history)
def save_history_on_file(self, filename):
"""Save the history on file"""
print("Saving history...")
with open(filename, "wb") as history:
pickle.dump(self._history, history)
print("History saved!")
def load_history_from_file(self, filename):
"""Load the history from the filesystem"""
if os.path.isfile(filename):
print("History found, loading...")
with open(filename, "rb") as history:
self._history = pickle.load(history)
print("History loaded!")
@abstractmethod
def update_history(self):
"""Method responsible for saving the new state into the history"""
pass
@abstractmethod
def pick_batch(self):
"""Method responsible for picking a batch of states from the history to train"""
pass
class FIFORandomPickHM(HistoryManager):
"""Simple fifo queue history implementation"""
def __init__(self, agent):
super().__init__(agent)
self._history = deque()
def update_history(self, action_index, reward, terminal):
"""Update the fifo history queue
return True if an item was added, False otherwise
"""
self._history.appendleft((self.agent.old_state, action_index, reward, self.agent.state, terminal))
if len(self._history) > self.agent.configs["histsize"]:
self._history.pop()
return True
def pick_batch(self, batch_dimension):
return random.sample(list(self._history), batch_dimension)
class NearDoorRandomPickHM(HistoryManager):
"""A more balanced history implementation for ExitRoom"""
def __init__(self, agent):
super().__init__(agent)
self._history = deque()
def _distance_from_door(self, state):
# warning: the rogue may cover the door
rogue_pos = np.argwhere(state[1] == 255)
if rogue_pos.shape[0] == 0: return 1000
rx,ry = rogue_pos[0][0],rogue_pos[0][1]
doors = np.argwhere(state[2] == 255)
dl = []
for dpos in doors:
dx,dy = dpos[0],dpos[1]
dl.append(abs(dx-rx)+abs(dy-ry))
if dl == []: return 1000
mind = min(dl)
print("distance = %", mind)
return mind
def update_history(self, action_index, reward, terminal):
"""Update the balanced history queue
return True if an item was added, False otherwise
"""
item_added = False
if (reward > 0) or (random.random() < self._distance_from_door(self.agent.state[0])**-2.):
self._history.appendleft((self.agent.old_state, action_index, reward, self.agent.state, terminal))
item_added = True
if len(self._history) > self.agent.configs["histsize"]:
self._history.pop()
return item_added
def pick_batch(self, batch_dimension):
return random.sample(list(self._history), batch_dimension)
class StatisticBalanceRandomPickHM(HistoryManager):
"""Simple balanced history implementation"""
def __init__(self, agent):
super().__init__(agent)
self._history = deque()
def update_history(self, action_index, reward, terminal):
"""Update the balanced history queue
return True if an item was added, False otherwise
"""
item_added = False
if (reward >= 0) or (self.agent.configs["iteration"] % 7 == 0):
self._history.appendleft((self.agent.old_state, action_index, reward, self.agent.state, terminal))
item_added = True
if len(self._history) > self.agent.configs["histsize"]:
self._history.pop()
return item_added
def pick_batch(self, batch_dimension):
return random.sample(list(self._history), batch_dimension)
class StatisticBalance2RandomPickHM(HistoryManager):
"""Simple balanced history implementation"""
def __init__(self, agent):
super().__init__(agent)
self._history = deque()
def update_history(self, action_index, reward, terminal):
"""Update the balanced history queue
return True if an item was added, False otherwise
"""
item_added = False
if reward > 0 or (reward < 0 and random.random() < 0.2):
self._history.appendleft((self.agent.old_state, action_index, reward, self.agent.state, terminal))
item_added = True
if len(self._history) > self.agent.configs["histsize"]:
self._history.pop()
return item_added
def pick_batch(self, batch_dimension):
return random.sample(list(self._history), batch_dimension)
| gpl-3.0 | 4,185,820,969,041,403,400 | 34.082353 | 110 | 0.632126 | false |
mou4e/zirconium | tools/perf/profile_creators/history_profile_extender_unittest.py | 3 | 1630 | # Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import shutil
import tempfile
import unittest
from profile_creators.history_profile_extender import HistoryProfileExtender
from telemetry.core import util
from telemetry import decorators
from telemetry.unittest_util import options_for_unittests
util.AddDirToPythonPath(util.GetTelemetryDir(), 'third_party', 'mock')
import mock
# Testing private method.
# pylint: disable=protected-access
class HistoryProfileExtenderTest(unittest.TestCase):
# The profile extender does not work on Android or ChromeOS.
@decorators.Disabled('android', 'chromeos')
def testFullFunctionality(self):
extender = HistoryProfileExtender()
# Stop the extender at the earliest possible opportunity.
extender.ShouldExitAfterBatchNavigation = mock.MagicMock(return_value=True)
# Normally, the number of tabs depends on the number of cores. Use a
# static, small number to increase the speed of the test.
extender._NUM_TABS = 3
options = options_for_unittests.GetCopy()
options.output_profile_path = tempfile.mkdtemp()
try:
extender.Run(options)
self.assertEquals(extender.profile_path, options.output_profile_path)
self.assertTrue(os.path.exists(extender.profile_path))
history_db_path = os.path.join(extender.profile_path, "Default",
"History")
stat_info = os.stat(history_db_path)
self.assertGreater(stat_info.st_size, 1000)
finally:
shutil.rmtree(options.output_profile_path)
| bsd-3-clause | -1,564,218,973,125,563,600 | 36.045455 | 79 | 0.754601 | false |
Subsets and Splits